From 7ad0779f5de84a68143b2c00ab5dc94a948925d3 Mon Sep 17 00:00:00 2001 From: Florent BENOIT Date: Sun, 23 Feb 2025 18:15:51 +0100 Subject: [PATCH] run: allow to customize prompt by env var LLAMA_PROMPT_PREFIX (#12041) Signed-off-by: Florent Benoit --- examples/run/run.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/run/run.cpp b/examples/run/run.cpp index de736c7d5..38407d519 100644 --- a/examples/run/run.cpp +++ b/examples/run/run.cpp @@ -977,7 +977,8 @@ static int generate(LlamaData & llama_data, const std::string & prompt, std::str } static int read_user_input(std::string & user_input) { - static const char * prompt_prefix = "> "; + static const char * prompt_prefix_env = std::getenv("LLAMA_PROMPT_PREFIX"); + static const char * prompt_prefix = prompt_prefix_env ? prompt_prefix_env : "> "; #ifdef WIN32 printf("\r" LOG_CLR_TO_EOL LOG_COL_DEFAULT "%s", prompt_prefix);