batch : add optional for sequential equal split (#14511)

ggml-ci
This commit is contained in:
Georgi Gerganov
2025-07-04 09:08:59 +03:00
committed by GitHub
parent 7b50f7c025
commit 67d1ef23c6
5 changed files with 26 additions and 5 deletions

View File

@@ -70,7 +70,7 @@ llama_memory_context_ptr llama_memory_hybrid::init_batch(llama_batch_allocr & ba
// if all tokens are output, split by sequence
ubatch = balloc.split_seq(n_ubatch);
} else {
ubatch = balloc.split_equal(n_ubatch);
ubatch = balloc.split_equal(n_ubatch, false);
}
if (ubatch.n_tokens == 0) {