From 15dea7bbdf7d180201fb55bebd0b5c9f6bdd9b16 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 25 Apr 2025 11:55:49 +0300 Subject: [PATCH] opt : remove print [no ci] --- src/llama-context.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/llama-context.cpp b/src/llama-context.cpp index f921211f5..769313906 100644 --- a/src/llama-context.cpp +++ b/src/llama-context.cpp @@ -1957,8 +1957,6 @@ void llama_context::opt_epoch_iter( n_outputs = ubatch.n_tokens; - printf("ubatch.n_tokens = %d\n", ubatch.n_tokens); - // TODO: not sure if this is needed if (!kv_self->find_slot(ubatch)) { LLAMA_LOG_WARN("%s: failed to find KV cache slot for ubatch of size %d\n", __func__, ubatch.n_tokens);