53 lines
1.1 KiB
Text
53 lines
1.1 KiB
Text
|
|
# BotServer Embedded Configuration
|
||
|
|
# For Orange Pi, Raspberry Pi, and other ARM SBCs
|
||
|
|
|
||
|
|
# Server
|
||
|
|
HOST=0.0.0.0
|
||
|
|
PORT=8088
|
||
|
|
RUST_LOG=info
|
||
|
|
|
||
|
|
# Database (SQLite for embedded, no PostgreSQL needed)
|
||
|
|
DATABASE_URL=sqlite:///opt/botserver/data/botserver.db
|
||
|
|
|
||
|
|
# LLM Configuration - Local llama.cpp
|
||
|
|
LLM_PROVIDER=llamacpp
|
||
|
|
LLM_API_URL=http://127.0.0.1:8080
|
||
|
|
LLM_MODEL=tinyllama
|
||
|
|
|
||
|
|
# Alternative: Use remote API
|
||
|
|
# LLM_PROVIDER=openai
|
||
|
|
# LLM_API_URL=https://api.openai.com/v1
|
||
|
|
# LLM_API_KEY=sk-...
|
||
|
|
|
||
|
|
# Alternative: Ollama (if installed)
|
||
|
|
# LLM_PROVIDER=ollama
|
||
|
|
# LLM_API_URL=http://127.0.0.1:11434
|
||
|
|
# LLM_MODEL=tinyllama
|
||
|
|
|
||
|
|
# Memory limits for embedded
|
||
|
|
MAX_CONTEXT_TOKENS=2048
|
||
|
|
MAX_RESPONSE_TOKENS=512
|
||
|
|
STREAMING_ENABLED=true
|
||
|
|
|
||
|
|
# Embedded UI
|
||
|
|
STATIC_FILES_PATH=/opt/botserver/ui
|
||
|
|
DEFAULT_UI=embedded
|
||
|
|
|
||
|
|
# WebSocket
|
||
|
|
WS_PING_INTERVAL=30
|
||
|
|
WS_TIMEOUT=300
|
||
|
|
|
||
|
|
# Security (change in production!)
|
||
|
|
JWT_SECRET=embedded-change-me-in-production
|
||
|
|
CORS_ORIGINS=*
|
||
|
|
|
||
|
|
# Logging
|
||
|
|
LOG_FILE=/opt/botserver/data/botserver.log
|
||
|
|
LOG_MAX_SIZE=10M
|
||
|
|
LOG_RETENTION=7
|
||
|
|
|
||
|
|
# Performance tuning for low-memory devices
|
||
|
|
# Uncomment for <2GB RAM devices
|
||
|
|
# RUST_BACKTRACE=0
|
||
|
|
# MALLOC_ARENA_MAX=2
|