From e434e69183fd9e1031f4445002083178c331a28b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sigbj=C3=B8rn=20Skj=C3=A6ret?= Date: Mon, 16 Jun 2025 21:58:42 +0200 Subject: [PATCH] common : suggest --jinja when autodetection fails (#14222) --- common/chat.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/chat.cpp b/common/chat.cpp index 0dad14fba..7d9aaeb12 100644 --- a/common/chat.cpp +++ b/common/chat.cpp @@ -1838,7 +1838,7 @@ static common_chat_params common_chat_templates_apply_legacy( if (res < 0) { // if the custom "tmpl" is not supported, we throw an error // this is a bit redundant (for good), since we're not sure if user validated the custom template with llama_chat_verify_template() - throw std::runtime_error("this custom template is not supported"); + throw std::runtime_error("this custom template is not supported, try using --jinja"); } // if it turns out that our buffer is too small, we resize it