From 8936d0ce585e3bbf1b4e75c56808d0f1f8f70301 Mon Sep 17 00:00:00 2001 From: "Rodrigo Rodriguez (Pragmatismo)" Date: Thu, 11 Sep 2025 16:15:06 -0300 Subject: [PATCH] -Lamma.cpp hardcoded config tests. --- src/services/llm_local.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/services/llm_local.rs b/src/services/llm_local.rs index 4fd144f..264efaa 100644 --- a/src/services/llm_local.rs +++ b/src/services/llm_local.rs @@ -191,7 +191,7 @@ async fn start_llm_server( let mut cmd = tokio::process::Command::new("sh"); cmd.arg("-c").arg(format!( - "cd {} && numactl --interleave=all ./llama-server -m {} --host 0.0.0.0 --port {} --threads 20 --threads-batch 40 --temp 0.7 --parallel 1 --repeat-penalty 1.1 --ctx-size 10000 --batch-size 10000 -n 4096 --mlock --no-mmap --prompt-cache-all --flash-attn --no-kv-offload --tensor-split 100 --no-mmap &", + "cd {} && numactl --interleave=all ./llama-server -m {} --host 0.0.0.0 --port {} --threads 20 --threads-batch 40 --temp 0.7 --parallel 1 --repeat-penalty 1.1 --ctx-size 8192 --batch-size 8192 -n 4096 --mlock --no-mmap --flash-attn --no-kv-offload --no-mmap &", llama_cpp_path, model_path, port ));