examples : fix is_first logic for tokenization (#14329)

ggml-ci
This commit is contained in:
Georgi Gerganov
2025-06-22 20:10:07 +03:00
committed by GitHub
parent af3373f1ad
commit f1f5e82df6
2 changed files with 2 additions and 2 deletions

View File

@ -98,7 +98,7 @@ int main(int argc, char ** argv) {
auto generate = [&](const std::string & prompt) {
std::string response;
const bool is_first = llama_memory_seq_pos_max(llama_get_memory(ctx), 0) == 0;
const bool is_first = llama_memory_seq_pos_max(llama_get_memory(ctx), 0) == -1;
// tokenize the prompt
const int n_prompt_tokens = -llama_tokenize(vocab, prompt.c_str(), prompt.size(), NULL, 0, is_first, true);