From c82d48ec23fb8749c341d0838f6891fd5f6b6da0 Mon Sep 17 00:00:00 2001 From: Molly Sophia Date: Mon, 21 Jul 2025 17:38:36 +0800 Subject: [PATCH] llama : fix `--reverse-prompt` crashing issue (#14794) Signed-off-by: Molly Sophia --- tools/main/main.cpp | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/tools/main/main.cpp b/tools/main/main.cpp index 516bf0965..eb36c6884 100644 --- a/tools/main/main.cpp +++ b/tools/main/main.cpp @@ -785,14 +785,17 @@ int main(int argc, char ** argv) { } // check for reverse prompt using special tokens - llama_token last_token = common_sampler_last(smpl); - for (auto token : antiprompt_token) { - if (token == last_token) { - if (params.interactive) { - is_interacting = true; + // avoid calling common_sampler_last() if last_output is empty + if (!last_output.empty()) { + llama_token last_token = common_sampler_last(smpl); + for (auto token : antiprompt_token) { + if (token == last_token) { + if (params.interactive) { + is_interacting = true; + } + is_antiprompt = true; + break; } - is_antiprompt = true; - break; } }