diff --git a/src/llama-batch.cpp b/src/llama-batch.cpp index 8698d89ac..55d89eca0 100644 --- a/src/llama-batch.cpp +++ b/src/llama-batch.cpp @@ -477,7 +477,7 @@ llama_ubatch llama_batch_allocr::split_simple(uint32_t n_ubatch) { llama_ubatch llama_batch_allocr::split_equal(uint32_t n_ubatch, bool sequential) { if (sequential && has_cpl) { - LLAMA_LOG_ERROR("%s: sequential split is not supported when there are coupled sequences in the input batch\n", __func__); + LLAMA_LOG_ERROR("%s: sequential split is not supported when there are coupled sequences in the input batch (you may need to use the -kvu flag)\n", __func__); return {}; }