llama-chat : fix multiple system message for gemma, orion (#14246)

This commit is contained in:
Xuan-Son Nguyen
2025-06-18 09:58:43 +02:00
committed by GitHub
parent 3865cff4f5
commit 95402553a5

View File

@ -333,7 +333,7 @@ int32_t llm_chat_apply_template(
std::string role(message->role);
if (role == "system") {
// there is no system message for gemma, but we will merge it with user prompt, so nothing is broken
system_prompt = trim(message->content);
system_prompt += trim(message->content);
continue;
}
// in gemma, "assistant" is "model"
@ -355,7 +355,7 @@ int32_t llm_chat_apply_template(
std::string role(message->role);
if (role == "system") {
// there is no system message support, we will merge it with user prompt
system_prompt = message->content;
system_prompt += message->content;
continue;
} else if (role == "user") {
ss << "Human: ";