batch : add optional for sequential equal split (#14511)

ggml-ci
This commit is contained in:
Georgi Gerganov
2025-07-04 09:08:59 +03:00
committed by GitHub
parent 7b50f7c025
commit 67d1ef23c6
5 changed files with 26 additions and 5 deletions

View File

@ -374,7 +374,7 @@ llama_memory_context_ptr llama_memory_recurrent::init_batch(llama_batch_allocr &
// if all tokens are output, split by sequence
ubatch = balloc.split_seq(n_ubatch);
} else {
ubatch = balloc.split_equal(n_ubatch);
ubatch = balloc.split_equal(n_ubatch, false);
}
if (balloc.get_n_used() < balloc.get_n_tokens()) {