From f10522bf76ae6cbc0ccc403723dac2e0e1b304e2 Mon Sep 17 00:00:00 2001 From: "Rodrigo Rodriguez (Pragmatismo)" Date: Fri, 12 Sep 2025 09:22:24 -0300 Subject: [PATCH] - New settings for llama.cpp. --- src/services/llm_local.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/services/llm_local.rs b/src/services/llm_local.rs index 4e32c8b..2dca6e5 100644 --- a/src/services/llm_local.rs +++ b/src/services/llm_local.rs @@ -193,7 +193,7 @@ async fn start_llm_server( let mut cmd = tokio::process::Command::new("sh"); cmd.arg("-c").arg(format!( - "cd {} && ./llama-server -m {} --host 0.0.0.0 --port {} --n-gpu-layers 99 --mlock --no-mmap --threads 20 --threads-batch 40 &", + "cd {} && ./llama-server -m {} --host 0.0.0.0 --port {} --n-gpu-layers 99 &", llama_cpp_path, model_path, port ));