2025-01-03 10:18:53 +02:00
|
|
|
#pragma once
|
|
|
|
|
|
|
|
#include <string>
|
|
|
|
#include <vector>
|
|
|
|
#include <cstdint>
|
|
|
|
|
|
|
|
enum llm_chat_template {
|
|
|
|
LLM_CHAT_TEMPLATE_CHATML,
|
|
|
|
LLM_CHAT_TEMPLATE_LLAMA_2,
|
|
|
|
LLM_CHAT_TEMPLATE_LLAMA_2_SYS,
|
|
|
|
LLM_CHAT_TEMPLATE_LLAMA_2_SYS_BOS,
|
|
|
|
LLM_CHAT_TEMPLATE_LLAMA_2_SYS_STRIP,
|
|
|
|
LLM_CHAT_TEMPLATE_MISTRAL_V1,
|
|
|
|
LLM_CHAT_TEMPLATE_MISTRAL_V3,
|
|
|
|
LLM_CHAT_TEMPLATE_MISTRAL_V3_TEKKEN,
|
|
|
|
LLM_CHAT_TEMPLATE_MISTRAL_V7,
|
2025-05-09 11:17:51 +02:00
|
|
|
LLM_CHAT_TEMPLATE_MISTRAL_V7_TEKKEN,
|
2025-01-03 10:18:53 +02:00
|
|
|
LLM_CHAT_TEMPLATE_PHI_3,
|
2025-01-09 10:07:33 +01:00
|
|
|
LLM_CHAT_TEMPLATE_PHI_4,
|
2025-01-03 10:18:53 +02:00
|
|
|
LLM_CHAT_TEMPLATE_FALCON_3,
|
|
|
|
LLM_CHAT_TEMPLATE_ZEPHYR,
|
|
|
|
LLM_CHAT_TEMPLATE_MONARCH,
|
|
|
|
LLM_CHAT_TEMPLATE_GEMMA,
|
|
|
|
LLM_CHAT_TEMPLATE_ORION,
|
|
|
|
LLM_CHAT_TEMPLATE_OPENCHAT,
|
|
|
|
LLM_CHAT_TEMPLATE_VICUNA,
|
|
|
|
LLM_CHAT_TEMPLATE_VICUNA_ORCA,
|
|
|
|
LLM_CHAT_TEMPLATE_DEEPSEEK,
|
|
|
|
LLM_CHAT_TEMPLATE_DEEPSEEK_2,
|
2025-01-04 21:06:11 +01:00
|
|
|
LLM_CHAT_TEMPLATE_DEEPSEEK_3,
|
2025-01-03 10:18:53 +02:00
|
|
|
LLM_CHAT_TEMPLATE_COMMAND_R,
|
|
|
|
LLM_CHAT_TEMPLATE_LLAMA_3,
|
2025-04-28 10:11:58 +02:00
|
|
|
LLM_CHAT_TEMPLATE_CHATGLM_3,
|
|
|
|
LLM_CHAT_TEMPLATE_CHATGLM_4,
|
2025-02-02 15:48:46 +08:00
|
|
|
LLM_CHAT_TEMPLATE_GLMEDGE,
|
2025-01-03 10:18:53 +02:00
|
|
|
LLM_CHAT_TEMPLATE_MINICPM,
|
|
|
|
LLM_CHAT_TEMPLATE_EXAONE_3,
|
|
|
|
LLM_CHAT_TEMPLATE_RWKV_WORLD,
|
|
|
|
LLM_CHAT_TEMPLATE_GRANITE,
|
|
|
|
LLM_CHAT_TEMPLATE_GIGACHAT,
|
|
|
|
LLM_CHAT_TEMPLATE_MEGREZ,
|
2025-03-30 21:12:03 +03:00
|
|
|
LLM_CHAT_TEMPLATE_YANDEX,
|
2025-03-30 22:21:03 +02:00
|
|
|
LLM_CHAT_TEMPLATE_BAILING,
|
2025-04-07 23:06:44 +02:00
|
|
|
LLM_CHAT_TEMPLATE_LLAMA4,
|
2025-04-22 16:24:54 +02:00
|
|
|
LLM_CHAT_TEMPLATE_SMOLVLM,
|
2025-01-03 10:18:53 +02:00
|
|
|
LLM_CHAT_TEMPLATE_UNKNOWN,
|
|
|
|
};
|
|
|
|
|
|
|
|
struct llama_chat_message;
|
|
|
|
|
|
|
|
llm_chat_template llm_chat_template_from_str(const std::string & name);
|
|
|
|
|
|
|
|
llm_chat_template llm_chat_detect_template(const std::string & tmpl);
|
|
|
|
|
|
|
|
int32_t llm_chat_apply_template(
|
|
|
|
llm_chat_template tmpl,
|
|
|
|
const std::vector<const llama_chat_message *> & chat,
|
|
|
|
std::string & dest, bool add_ass);
|