mirror of
https://github.com/ggml-org/llama.cpp.git
synced 2025-06-27 12:05:03 +00:00
fix assistant prefilling when content is an array
This commit is contained in:
@ -779,7 +779,13 @@ static json oaicompat_chat_params_parse(
|
||||
|
||||
/* Append assistant prefilled message */
|
||||
if (prefill_assistant_message) {
|
||||
chat_params.prompt += last_message.content;
|
||||
if (last_message.content.is_array()) {
|
||||
for (auto & p : last_message.content) {
|
||||
chat_params.prompt += p["text"];
|
||||
}
|
||||
} else {
|
||||
chat_params.prompt += last_message.content;
|
||||
}
|
||||
}
|
||||
|
||||
llama_params["chat_format"] = static_cast<int>(chat_params.format);
|
||||
|
Reference in New Issue
Block a user