From 09651d09ffc1e941bd1be23163abf5495c416547 Mon Sep 17 00:00:00 2001 From: Nexes the Elder <124105151+Nexesenex@users.noreply.github.com> Date: Fri, 18 Jul 2025 06:25:54 +0200 Subject: [PATCH] graph : Pass the graph placeholder message in debug mode (#14748) Without that condition, this debug log clutters the screen every batch treated in the prompt processing, or every token generated in Kobold.cpp. --- src/llama-graph.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/llama-graph.cpp b/src/llama-graph.cpp index f47538ef0..e27f78c2a 100644 --- a/src/llama-graph.cpp +++ b/src/llama-graph.cpp @@ -467,7 +467,9 @@ bool llm_graph_result::can_reuse(const llm_graph_params & params) { for (auto & input : inputs) { const bool cur = input->can_reuse(params); - LLAMA_LOG_DEBUG(" %s: can_reuse = %d\n", "placeholder", cur); + if (debug > 1) { + LLAMA_LOG_DEBUG("%s: can_reuse = %d\n", "placeholder", cur); + } res = res && cur; }