From 98ce93e776d8d86f842ce05e0288f79efbf4fd2d Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Tue, 6 May 2025 10:51:01 +0200 Subject: [PATCH] llama : fix build_ffn without gate --- src/llama-graph.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/llama-graph.cpp b/src/llama-graph.cpp index 0da4e7d2b..0b004a8ba 100644 --- a/src/llama-graph.cpp +++ b/src/llama-graph.cpp @@ -782,7 +782,7 @@ ggml_tensor * llm_graph_context::build_ffn( } break; } - if (type_gate == LLM_FFN_PAR) { + if (gate && type_gate == LLM_FFN_PAR) { cur = ggml_mul(ctx0, cur, tmp); cb(cur, "ffn_gate_par", il); }