- Split AIConfig into separate LLMConfig and embedding config structs - Update create_site.rs to use config.llm instead of config.ai - Improve config loading comments in bootstrap manager - Add new LLM-related environment variables with defaults - Maintain backward compatibility with existing config loading - Clean up unused AIConfig struct and related code The change better organizes the AI-related configuration by separating LLM and embedding configurations, making the code more maintainable and flexible for future AI service integrations.
37 lines
863 B
CSV
37 lines
863 B
CSV
name,value
|
|
|
|
server_host,0.0.0.0
|
|
server_port,8080
|
|
sites_root,/tmp
|
|
|
|
llm-key,none
|
|
llm-url,http://localhost:8080/v1
|
|
llm-model,botserver-stack/data/llm/DeepSeek-R1-Distill-Qwen-1.5B-Q3_K_M.gguf
|
|
|
|
embedding-url,http://localhost:8082
|
|
embedding-model,botserver-stack/data/llm/bge-small-en-v1.5-f32.gguf
|
|
|
|
llm-server,false
|
|
llm-server-path,botserver-stack/bin/llm/
|
|
llm-server-model,botserver-stack/data/llm/DeepSeek-R1-Distill-Qwen-1.5B-Q3_K_M.gguf
|
|
llm-server-host,0.0.0.0
|
|
llm-server-port,8080
|
|
llm-server-gpu-layers,35
|
|
llm-server-n-moe,4
|
|
llm-server-ctx-size,2048
|
|
llm-server-parallel,4
|
|
llm-server-cont-batching,true
|
|
llm-server-mlock,true
|
|
llm-server-no-mmap,true
|
|
|
|
email-from,from@domain.com
|
|
email-server,mail.domain.com
|
|
email-port,587
|
|
email-user,user@domain.com
|
|
email-pass,
|
|
|
|
custom-server,localhost
|
|
custom-port,5432
|
|
custom-database,mycustomdb
|
|
custom-username,
|
|
custom-password,
|