mirror of
https://github.com/ggml-org/llama.cpp.git
synced 2025-07-27 03:33:46 -04:00
llama : fix --reverse-prompt
crashing issue (#14794)
Signed-off-by: Molly Sophia <mollysophia379@gmail.com>
This commit is contained in:
@ -785,6 +785,8 @@ int main(int argc, char ** argv) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// check for reverse prompt using special tokens
|
// check for reverse prompt using special tokens
|
||||||
|
// avoid calling common_sampler_last() if last_output is empty
|
||||||
|
if (!last_output.empty()) {
|
||||||
llama_token last_token = common_sampler_last(smpl);
|
llama_token last_token = common_sampler_last(smpl);
|
||||||
for (auto token : antiprompt_token) {
|
for (auto token : antiprompt_token) {
|
||||||
if (token == last_token) {
|
if (token == last_token) {
|
||||||
@ -795,6 +797,7 @@ int main(int argc, char ** argv) {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (is_antiprompt) {
|
if (is_antiprompt) {
|
||||||
LOG_DBG("found antiprompt: %s\n", last_output.c_str());
|
LOG_DBG("found antiprompt: %s\n", last_output.c_str());
|
||||||
|
Reference in New Issue
Block a user