refactor: simplify UI panels, use pooled DB, add --noui flag

- Removed unused `id` and `app_state` fields from `ChatPanel`; updated constructor to accept but ignore the state, reducing memory footprint.
- Switched database access in `ChatPanel` from a raw `Mutex` lock to a connection pool (`app_state.conn.get()`), improving concurrency and error handling.
- Reordered and cleaned up imports in `status_panel.rs` and formatted struct fields for readability.
- Updated VS Code launch configuration to pass `--noui` argument, enabling headless mode for debugging.
- Bumped several crate versions in `Cargo.lock` (e.g., `bitflags` to 2.10.0, `syn` to 2.0.108, `cookie` to 0.16.2) and added the new `ashpd` dependency, aligning the project with latest library releases.
This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-11-11 09:42:52 -03:00
parent 25daaa8a9e
commit fd45f4e0dd
52 changed files with 5870 additions and 4389 deletions

3
.vscode/launch.json vendored
View file

@ -1,6 +1,7 @@
{ {
"version": "0.2.0", "version": "0.2.0",
"configurations": [ "configurations": [
{ {
"type": "lldb", "type": "lldb",
"request": "launch", "request": "launch",
@ -12,7 +13,7 @@
"kind": "bin" "kind": "bin"
} }
}, },
"args": [], "args": ["--noui"],
"env": { "env": {
"RUST_LOG": "trace,actix_web=off,aws_sigv4=off,aws_smithy_checksums=off,actix_http=off,mio=off,reqwest=off,aws_runtime=off,aws_smithy_http_client=off,rustls=off,actix_server=off,hyper_util=off,aws_smithy_runtime=off,aws_smithy_runtime_api=off,tracing=off,aws_sdk_s3=off" "RUST_LOG": "trace,actix_web=off,aws_sigv4=off,aws_smithy_checksums=off,actix_http=off,mio=off,reqwest=off,aws_runtime=off,aws_smithy_http_client=off,rustls=off,actix_server=off,hyper_util=off,aws_smithy_runtime=off,aws_smithy_runtime_api=off,tracing=off,aws_sdk_s3=off"

3378
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -37,17 +37,15 @@ license = "AGPL-3.0"
repository = "https://github.com/GeneralBots/BotServer" repository = "https://github.com/GeneralBots/BotServer"
[features] [features]
default = [ "vectordb", "desktop"] default = []
vectordb = ["qdrant-client"] vectordb = ["qdrant-client"]
email = ["imap"] email = ["imap"]
desktop = [] desktop = ["tauri"]
official = ["tauri"]
[dependencies] [dependencies]
color-eyre = "0.6.5"
crossterm = "0.29.0"
ratatui = "0.29.0"
scopeguard = "1.2.0"
once_cell = "1.18.0"
actix-cors = "0.7" actix-cors = "0.7"
actix-multipart = "0.7" actix-multipart = "0.7"
actix-web = "4.9" actix-web = "4.9"
@ -55,6 +53,7 @@ actix-ws = "0.3"
aes-gcm = "0.10" aes-gcm = "0.10"
anyhow = "1.0" anyhow = "1.0"
argon2 = "0.5" argon2 = "0.5"
async-lock = "2.8.0"
async-stream = "0.3" async-stream = "0.3"
async-trait = "0.1" async-trait = "0.1"
aws-config = "1.8.8" aws-config = "1.8.8"
@ -62,9 +61,11 @@ aws-sdk-s3 = { version = "1.109.0", features = ["behavior-version-latest"] }
base64 = "0.22" base64 = "0.22"
bytes = "1.8" bytes = "1.8"
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
color-eyre = "0.6.5"
cron = "0.15.0" cron = "0.15.0"
crossterm = "0.29.0"
csv = "1.3" csv = "1.3"
diesel = { version = "2.1", features = ["postgres", "uuid", "chrono", "serde_json"] } diesel = { version = "2.1", features = ["postgres", "uuid", "chrono", "serde_json", "r2d2"] }
dotenvy = "0.15" dotenvy = "0.15"
downloader = "0.2" downloader = "0.2"
env_logger = "0.11" env_logger = "0.11"
@ -81,18 +82,25 @@ mailparse = "0.15"
mockito = "1.7.0" mockito = "1.7.0"
native-tls = "0.2" native-tls = "0.2"
num-format = "0.4" num-format = "0.4"
once_cell = "1.18.0"
pdf-extract = "0.10.0" pdf-extract = "0.10.0"
qdrant-client = { version = "1.12", optional = true } qdrant-client = { version = "1.12", optional = true }
r2d2 = "0.8.10"
rand = "0.9.2" rand = "0.9.2"
ratatui = "0.29.0"
redis = { version = "0.27", features = ["tokio-comp"] } redis = { version = "0.27", features = ["tokio-comp"] }
regex = "1.11" regex = "1.11"
reqwest = { version = "0.12", features = ["json", "stream"] } reqwest = { version = "0.12", features = ["json", "stream"] }
rhai = { git = "https://github.com/therealprof/rhai.git", branch = "features/use-web-time" } rhai = { git = "https://github.com/therealprof/rhai.git", branch = "features/use-web-time" }
scopeguard = "1.2.0"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
sha2 = "0.10.9" sha2 = "0.10.9"
smartstring = "1.0" smartstring = "1.0"
sysinfo = "0.37.2" sysinfo = "0.37.2"
tauri = { version = "2", features = ["unstable"], optional = true }
tauri-plugin-dialog = { version = "2", optional = true }
tauri-plugin-opener = { version = "2", optional = true }
tempfile = "3" tempfile = "3"
time = "0.3.44" time = "0.3.44"
tokio = { version = "1.41", features = ["full"] } tokio = { version = "1.41", features = ["full"] }
@ -104,6 +112,11 @@ urlencoding = "2.1"
uuid = { version = "1.11", features = ["serde", "v4"] } uuid = { version = "1.11", features = ["serde", "v4"] }
zip = "2.2" zip = "2.2"
[build-dependencies]
tauri-build = { version = "2", features = [] }
[profile.release] [profile.release]
lto = true lto = true
opt-level = "z" opt-level = "z"

78
TODO.md
View file

@ -1,78 +0,0 @@
# Documentation Completion Checklist
- [x] Created Chapter 01 files (README, installation, first-conversation, sessions)
- [ ] Fill Chapter 02 files (README, gbai, gbdialog, gbkb, gbot, gbtheme, gbdrive) already have content
- [ ] Complete Chapter 03 files
- [ ] README.md
- [ ] vector-collections.md
- [ ] indexing.md
- [ ] qdrant.md
- [ ] semantic-search.md
- [ ] context-compaction.md
- [ ] caching.md (if needed)
- [ ] Complete Chapter 04 files
- [ ] README.md
- [ ] structure.md
- [ ] web-interface.md
- [ ] css.md
- [ ] html.md
- [ ] Complete Chapter 05 files
- [ ] README.md
- [ ] basics.md
- [ ] templates.md
- [ ] template-start.md
- [ ] template-auth.md
- [ ] template-summary.md
- [ ] template-enrollment.md
- [ ] keywords.md
- [ ] All keyword pages (talk, hear, set-user, set-context, llm, get-bot-memory, set-bot-memory, set-kb, add-kb, add-website, add-tool, list-tools, remove-tool, clear-tools, get, find, set, on, set-schedule, create-site, create-draft, website-of, print, wait, format, first, last, for-each, exit-for)
- [ ] Complete Chapter 06 files
- [ ] README.md
- [ ] architecture.md
- [ ] building.md
- [ ] crates.md
- [ ] services.md
- [ ] custom-keywords.md
- [ ] dependencies.md
- [ ] Complete Chapter 07 files
- [ ] README.md
- [ ] config-csv.md
- [ ] parameters.md
- [ ] answer-modes.md
- [ ] llm-config.md
- [ ] context-config.md
- [ ] minio.md
- [ ] Complete Chapter 08 files
- [ ] README.md
- [ ] tool-definition.md
- [ ] param-declaration.md
- [ ] compilation.md
- [ ] mcp-format.md
- [ ] openai-format.md
- [ ] get-integration.md
- [ ] external-apis.md
- [ ] Complete Chapter 09 files
- [ ] README.md
- [ ] core-features.md
- [ ] conversation.md
- [ ] ai-llm.md
- [ ] knowledge-base.md
- [ ] automation.md
- [ ] email.md
- [ ] web-automation.md
- [ ] storage.md
- [ ] channels.md
- [ ] Complete Chapter 10 files
- [ ] README.md
- [ ] setup.md
- [ ] standards.md
- [ ] testing.md
- [ ] pull-requests.md
- [ ] documentation.md
- [ ] Complete Appendix I files
- [ ] README.md
- [ ] schema.md
- [ ] tables.md
- [ ] relationships.md
- [ ] Verify SUMMARY.md links
- [ ] Run mdbook build to ensure no errors

View file

@ -21,24 +21,24 @@ for file in "${prompts[@]}"; do
done done
dirs=( dirs=(
#"auth" "auth"
#"automation" "automation"
#"basic" "basic"
"bootstrap" "bootstrap"
"bot" "bot"
#"channels" #"channels"
#"config" "config"
#"context" #"context"
"drive_monitor" "drive_monitor"
#"email" #"email"
"file" #"file"
# "kb" #"kb"
"llm" "llm"
#"llm_models" #"llm_models"
#"org" #"org"
"package_manager" #"package_manager"
#"riot_compiler" #"riot_compiler"
#"session" "session"
"shared" "shared"
#"tests" #"tests"
#"tools" #"tools"
@ -48,12 +48,6 @@ dirs=(
#"web_automation" #"web_automation"
) )
filter_rust_file() {
sed -E '/^\s*\/\//d' "$1" | \
sed -E '/info!\s*\(/d' | \
sed -E '/debug!\s*\(/d' | \
sed -E '/trace!\s*\(/d'
}
for dir in "${dirs[@]}"; do for dir in "${dirs[@]}"; do
find "$PROJECT_ROOT/src/$dir" -name "*.rs" | while read -r file; do find "$PROJECT_ROOT/src/$dir" -name "*.rs" | while read -r file; do

View file

@ -74,6 +74,8 @@ cargo install cargo-audit
cargo install cargo-edit cargo install cargo-edit
apt install -y libpq-dev apt install -y libpq-dev
apt install -y valkey-cli apt install -y valkey-cli
valkey-cli config set stop-writes-on-bgsave-error no
# Util # Util

View file

@ -1,9 +1,15 @@
#!/bin/bash #!/bin/bash
set -e # Exit on error
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$SCRIPT_DIR" PROJECT_ROOT="$SCRIPT_DIR"
OUTPUT_FILE="/tmp/prompt.out" OUTPUT_FILE="/tmp/prompt.out"
# Check required commands
command -v cargo >/dev/null 2>&1 || { echo "cargo is required but not installed" >&2; exit 1; }
command -v xclip >/dev/null 2>&1 || { echo "xclip is required but not installed" >&2; exit 1; }
echo "Please, fix this consolidated LLM Context" > "$OUTPUT_FILE" echo "Please, fix this consolidated LLM Context" > "$OUTPUT_FILE"
prompts=( prompts=(
@ -12,6 +18,14 @@ prompts=(
"./Cargo.toml" "./Cargo.toml"
) )
# Validate files exist
for file in "${prompts[@]}"; do
if [ ! -f "$file" ]; then
echo "Required file not found: $file" >&2
exit 1
fi
done
for file in "${prompts[@]}"; do for file in "${prompts[@]}"; do
cat "$file" >> "$OUTPUT_FILE" cat "$file" >> "$OUTPUT_FILE"
echo "" >> "$OUTPUT_FILE" echo "" >> "$OUTPUT_FILE"
@ -43,11 +57,15 @@ dirs=(
# "whatsapp" # "whatsapp"
) )
for dir in "${dirs[@]}"; do for dir in "${dirs[@]}"; do
find "$PROJECT_ROOT/src/$dir" -name "*.rs" | while read file; do if [ -d "$PROJECT_ROOT/src/$dir" ]; then
echo $file >> "$OUTPUT_FILE" find "$PROJECT_ROOT/src/$dir" -name "*.rs" | while read -r file; do
cat "$file" >> "$OUTPUT_FILE" if [ -f "$file" ]; then
echo "" >> "$OUTPUT_FILE" echo "$file" >> "$OUTPUT_FILE"
done cat "$file" >> "$OUTPUT_FILE"
echo "" >> "$OUTPUT_FILE"
fi
done
fi
done done
# Also append the specific files you mentioned # Also append the specific files you mentioned
@ -63,15 +81,18 @@ cargo build --message-format=short 2>&1 | grep -E 'error' >> "$OUTPUT_FILE"
# Calculate and display token count (approximation: words * 1.3) # Calculate and display token count (approximation: words * 1.3)
WORD_COUNT=$(wc -w < "$OUTPUT_FILE") WORD_COUNT=$(wc -w < "$OUTPUT_FILE") || { echo "Error counting words" >&2; exit 1; }
TOKEN_COUNT=$(echo "$WORD_COUNT * 1.3 / 1" | bc) TOKEN_COUNT=$(echo "$WORD_COUNT * 1.3 / 1" | bc) || { echo "Error calculating tokens" >&2; exit 1; }
FILE_SIZE=$(wc -c < "$OUTPUT_FILE") FILE_SIZE=$(wc -c < "$OUTPUT_FILE") || { echo "Error getting file size" >&2; exit 1; }
echo "" >> "$OUTPUT_FILE" echo "" >> "$OUTPUT_FILE"
echo "Approximate token count: $TOKEN_COUNT" echo "Approximate token count: $TOKEN_COUNT"
echo "Context size: $FILE_SIZE bytes" echo "Context size: $FILE_SIZE bytes"
cat "$OUTPUT_FILE" | xclip -selection clipboard if ! cat "$OUTPUT_FILE" | xclip -selection clipboard; then
echo "Error copying to clipboard" >&2
exit 1
fi
echo "Content copied to clipboard (xclip)" echo "Content copied to clipboard (xclip)"
rm -f "$OUTPUT_FILE" rm -f "$OUTPUT_FILE"

View file

@ -25,29 +25,6 @@ CREATE INDEX IF NOT EXISTS idx_system_automations_bot_id
ON public.system_automations (bot_id); ON public.system_automations (bot_id);
ALTER TABLE public.system_automations
ADD CONSTRAINT system_automations_bot_kind_param_unique
UNIQUE (bot_id, kind, param);
-- Migration 6.0.10: Add unique constraint for system_automations upsert
-- Description: Creates a unique constraint matching the ON CONFLICT target in set_schedule.rs
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_constraint
WHERE conname = 'system_automations_bot_kind_param_unique'
) THEN
ALTER TABLE public.system_automations
ADD CONSTRAINT system_automations_bot_kind_param_unique
UNIQUE (bot_id, kind, param);
END IF;
END
$$;
-- Migration 6.0.6: Add unique constraint for system_automations
-- Fixes error: "there is no unique or exclusion constraint matching the ON CONFLICT specification"
ALTER TABLE public.system_automations ALTER TABLE public.system_automations
ADD CONSTRAINT system_automations_bot_kind_param_unique ADD CONSTRAINT system_automations_bot_kind_param_unique
UNIQUE (bot_id, kind, param); UNIQUE (bot_id, kind, param);

View file

@ -3,17 +3,19 @@
### Fallback Strategy (After 3 attempts / 10 minutes): ### Fallback Strategy (After 3 attempts / 10 minutes):
When initial attempts fail, sequentially try these LLMs: When initial attempts fail, sequentially try these LLMs:
1. **DeepSeek-V3-0324** (good architect, adventure, reliable, let little errors just to be fixed by gpt-*) 1. **DeepSeek-V3-0324** (good architect, adventure, reliable, let little errors just to be fixed by gpt-*)
1. **DeepSeek-V3.1** (slower)
1. **gpt-5-chat** (slower, let warnings...) 1. **gpt-5-chat** (slower, let warnings...)
1. **gpt-oss-120b** 1. **gpt-oss-120b**
1. **Claude (Web)**: Copy only the problem statement and create unit tests. Create/extend UI. 1. **Claude (Web)**: Copy only the problem statement and create unit tests. Create/extend UI.
1. **Llama-3.3-70B-Instruct** (alternative)
### Development Workflow: ### Development Workflow:
- **One requirement at a time** with sequential commits - **One requirement at a time** with sequential commits
- **On error**: Stop and consult Claude for guidance - **On unresolved error**: Stop and use add-req.sh, and consult Claude for guidance. with DeepThining in DeepSeek also, with Web turned on.
- **Change progression**: Start with DeepSeek, conclude with gpt-oss-120b - **Change progression**: Start with DeepSeek, conclude with gpt-oss-120b
- If a big req. fail, specify a @code file that has similar pattern or sample from official docs. - If a big req. fail, specify a @code file that has similar pattern or sample from official docs.
- **Final validation**: Use prompt "cargo check" with gpt-oss-120b - **Final validation**: Use prompt "cargo check" with gpt-oss-120b
- Be humble, one requirement, one commit. But sometimes, freedom of caos is welcome - when no deadlines are set. - Be humble, one requirement, one commit. But sometimes, freedom of caos is welcome - when no deadlines are set.
- Fix manually in case of dangerous trouble.
- Keep in the source codebase only deployed and tested source, no lab source code in main project. At least, use optional features to introduce new behaviour gradually in PRODUCTION. - Keep in the source codebase only deployed and tested source, no lab source code in main project. At least, use optional features to introduce new behaviour gradually in PRODUCTION.
- Transform good articles into prompts for the coder.
- Switch to libraries that have LLM affinity.
- Ensure 'continue' on LLMs, they can EOF and say are done, but got more to output.

View file

@ -1,5 +1,6 @@
- On code return identifiers/chars in english language. - On code return identifiers/characters in English language, no invalid tokens!
- Do not emmit any comment, and remove any existants in Rust/html. - Do not emit any comments, and remove any existing ones in Rust/HTML.
- Compact the code emission where possible. - Compact the code emission where possible.
- On change code, ensure cargo check cycle to remove warnings and errors. - On code change, ensure cargo check cycle to remove warnings and errors.
- Never use defaults or magic values in code (never unwrap_or_else or similars) - Never use defaults or magic values in code (never unwrap_or_else or similar)
- Check borrow, clone, types, common Rust errors! Return 0 warning code!

View file

@ -1,15 +1,16 @@
MOST IMPORTANT CODE GENERATION RULES: MOST IMPORTANT CODE GENERATION RULES:
- KISS, NO TALK, SECURED ENTERPRISE GRADE THREAD SAFE CODE ONLY.
- Use rustc 1.90.0 (1159e78c4 2025-09-14). - Use rustc 1.90.0 (1159e78c4 2025-09-14).
- Check for warnings related to use of mut where is dispensable. - Check for warnings related to use of mut where is dispensable.
- No placeholders, never comment/uncomment code, no explanations, no filler text. - No placeholders, never comment/uncomment code, no explanations, no filler text.
- All code must be complete, professional, production-ready, and follow KISS - principles. - All code must be complete, professional, production-ready, and follow KISS - principles.
- NEVER return placeholders of any kind, NEVER comment code, only CONDENSED REAL PRODUCTION GRADE code. - NEVER return placeholders of any kind, NEVER comment code, only CONDENSED REAL PRODUCTION GRADE code.
- REMOTE ALL COMMENTS FROM GENERATED CODE. DO NOT COMMENT AT ALL, NO TALK! - REMOTE ALL COMMENTS FROM GENERATED CODE. DO NOT COMMENT AT ALL, NO TALK, just say you are finished!
- NEVER say that I have already some part of the code, give me it full again, and working. - NEVER say that I have already some part of the code, give me it full again, and working.
- Always increment logging with (all-in-one-line) info!, debug!, trace! to give birth to the console. - Always increment logging with (all-in-one-line) info!, debug!, trace! to give birth to the console.
- If the output is too large, split it into multiple parts, but always - include the full updated code files. - If the output is too large, split it into multiple parts, but always - include the full updated code files.
- Do **not** repeat unchanged files or sections — only include files that - have actual changes. - Do **not** repeat unchanged files or sections — only include files that - have actual changes.
- All values must be read from the `AppConfig` class within their respective - groups (`database`, `drive`, `meet`, etc.); never use hardcoded or magic - values. - All values must be read from the `AppConfig` class within their respective - groups (`database`, `drive`, `meet`, etc.); never use hardcoded or commercial names like S3, Azure or something like that, preffer Drive, Cloud, instead, never use url or magic - values like api.openai - no unrwap_or_defaul at all!!!
- Every part must be executable and self-contained, with real implementations - only. - Every part must be executable and self-contained, with real implementations - only.
- DO NOT WRITE ANY ERROR HANDLING CODE LET IT CRASH. - DO NOT WRITE ANY ERROR HANDLING CODE LET IT CRASH.
- Never generate two ore more trace mensages that are equal! - Never generate two ore more trace mensages that are equal!
@ -18,3 +19,5 @@ MOST IMPORTANT CODE GENERATION RULES:
- NEVER return a untouched file in output. Just files that need to be updated. - NEVER return a untouched file in output. Just files that need to be updated.
- Instead of rand::thread_rng(), use rand::rng() - Instead of rand::thread_rng(), use rand::rng()
- Review warnings of non used imports! Give me 0 warnings, please. - Review warnings of non used imports! Give me 0 warnings, please.
- Ensure you remove these: ommon errors borrow of moved value, unused variable, use of moved value.

View file

@ -1,139 +1,124 @@
use actix_web::{HttpRequest, HttpResponse, Result, web}; use actix_web::{HttpRequest, HttpResponse, Result, web};
use log::error; use log::error;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use uuid::Uuid; use uuid::Uuid;
use crate::shared::state::AppState; use crate::shared::state::AppState;
pub struct AuthService {} pub struct AuthService {}
impl AuthService { impl AuthService {
pub fn new() -> Self { pub fn new() -> Self {
Self {} Self {}
} }
} }
#[actix_web::get("/api/auth")] #[actix_web::get("/api/auth")]
async fn auth_handler( async fn auth_handler(
_req: HttpRequest, _req: HttpRequest,
data: web::Data<AppState>, data: web::Data<AppState>,
web::Query(params): web::Query<HashMap<String, String>>, web::Query(params): web::Query<HashMap<String, String>>,
) -> Result<HttpResponse> { ) -> Result<HttpResponse> {
let bot_name = params.get("bot_name").cloned().unwrap_or_default(); let bot_name = params.get("bot_name").cloned().unwrap_or_default();
let _token = params.get("token").cloned(); let _token = params.get("token").cloned();
let user_id = {
let user_id = { let mut sm = data.session_manager.lock().await;
let mut sm = data.session_manager.lock().await; sm.get_or_create_anonymous_user(None).map_err(|e| {
sm.get_or_create_anonymous_user(None).map_err(|e| { error!("Failed to create anonymous user: {}", e);
error!("Failed to create anonymous user: {}", e); actix_web::error::ErrorInternalServerError("Failed to create user")
actix_web::error::ErrorInternalServerError("Failed to create user") })?
})? };
}; let (bot_id, bot_name) = tokio::task::spawn_blocking({
let bot_name = bot_name.clone();
let (bot_id, bot_name) = tokio::task::spawn_blocking({ let conn = data.conn.clone();
let bot_name = bot_name.clone(); move || {
let conn_arc = Arc::clone(&data.conn); let mut db_conn = conn.get().map_err(|e| format!("Failed to get database connection: {}", e))?;
move || { use crate::shared::models::schema::bots::dsl::*;
let mut db_conn = conn_arc.lock().unwrap(); use diesel::prelude::*;
use crate::shared::models::schema::bots::dsl::*; match bots
use diesel::prelude::*; .filter(name.eq(&bot_name))
.filter(is_active.eq(true))
match bots .select((id, name))
.filter(name.eq(&bot_name)) .first::<(Uuid, String)>(&mut db_conn)
.filter(is_active.eq(true)) .optional()
.select((id, name)) {
.first::<(Uuid, String)>(&mut *db_conn) Ok(Some((id_val, name_val))) => Ok((id_val, name_val)),
.optional() Ok(None) => {
{ match bots
Ok(Some((id_val, name_val))) => Ok((id_val, name_val)), .filter(is_active.eq(true))
Ok(None) => { .select((id, name))
match bots .first::<(Uuid, String)>(&mut db_conn)
.filter(is_active.eq(true)) .optional()
.select((id, name)) {
.first::<(Uuid, String)>(&mut *db_conn) Ok(Some((id_val, name_val))) => Ok((id_val, name_val)),
.optional() Ok(None) => Err("No active bots found".to_string()),
{ Err(e) => Err(format!("DB error: {}", e)),
Ok(Some((id_val, name_val))) => Ok((id_val, name_val)), }
Ok(None) => Err("No active bots found".to_string()), }
Err(e) => Err(format!("DB error: {}", e)), Err(e) => Err(format!("DB error: {}", e)),
} }
} }
Err(e) => Err(format!("DB error: {}", e)), })
} .await
} .map_err(|e| {
}) error!("Spawn blocking failed: {}", e);
.await actix_web::error::ErrorInternalServerError("DB thread error")
.map_err(|e| { })?
error!("Spawn blocking failed: {}", e); .map_err(|e| {
actix_web::error::ErrorInternalServerError("DB thread error") error!("{}", e);
})? actix_web::error::ErrorInternalServerError(e)
.map_err(|e| { })?;
error!("{}", e); let session = {
actix_web::error::ErrorInternalServerError(e) let mut sm = data.session_manager.lock().await;
})?; sm.get_or_create_user_session(user_id, bot_id, "Auth Session")
.map_err(|e| {
let session = { error!("Failed to create session: {}", e);
let mut sm = data.session_manager.lock().await; actix_web::error::ErrorInternalServerError(e.to_string())
sm.get_or_create_user_session(user_id, bot_id, "Auth Session") })?
.map_err(|e| { .ok_or_else(|| {
error!("Failed to create session: {}", e); error!("Failed to create session");
actix_web::error::ErrorInternalServerError(e.to_string()) actix_web::error::ErrorInternalServerError("Failed to create session")
})? })?
.ok_or_else(|| { };
error!("Failed to create session"); let auth_script_path = format!("./work/{}.gbai/{}.gbdialog/auth.ast", bot_name, bot_name);
actix_web::error::ErrorInternalServerError("Failed to create session") if tokio::fs::metadata(&auth_script_path).await.is_ok() {
})? let auth_script = match tokio::fs::read_to_string(&auth_script_path).await {
}; Ok(content) => content,
Err(e) => {
let auth_script_path = format!("./work/{}.gbai/{}.gbdialog/auth.ast", bot_name, bot_name); error!("Failed to read auth script: {}", e);
return Ok(HttpResponse::Ok().json(serde_json::json!({
if tokio::fs::metadata(&auth_script_path).await.is_ok() { "user_id": session.user_id,
let auth_script = match tokio::fs::read_to_string(&auth_script_path).await { "session_id": session.id,
Ok(content) => content, "status": "authenticated"
Err(e) => { })));
error!("Failed to read auth script: {}", e); }
return Ok(HttpResponse::Ok().json(serde_json::json!({ };
"user_id": session.user_id, let script_service = crate::basic::ScriptService::new(Arc::clone(&data), session.clone());
"session_id": session.id, match tokio::time::timeout(
"status": "authenticated" std::time::Duration::from_secs(5),
}))); async {
} script_service
}; .compile(&auth_script)
.and_then(|ast| script_service.run(&ast))
let script_service = crate::basic::ScriptService::new(Arc::clone(&data), session.clone()); }
).await {
match tokio::time::timeout( Ok(Ok(result)) => {
std::time::Duration::from_secs(5), if result.to_string() == "false" {
async { error!("Auth script returned false");
script_service return Ok(HttpResponse::Unauthorized()
.compile(&auth_script) .json(serde_json::json!({"error": "Authentication failed"})));
.and_then(|ast| script_service.run(&ast)) }
} }
).await { Ok(Err(e)) => {
Ok(Ok(result)) => { error!("Auth script execution error: {}", e);
if result.to_string() == "false" { }
error!("Auth script returned false"); Err(_) => {
return Ok(HttpResponse::Unauthorized() error!("Auth script timeout");
.json(serde_json::json!({"error": "Authentication failed"}))); }
} }
} }
Ok(Err(e)) => { Ok(HttpResponse::Ok().json(serde_json::json!({
error!("Auth script execution error: {}", e); "user_id": session.user_id,
} "session_id": session.id,
Err(_) => { "status": "authenticated"
error!("Auth script timeout"); })))
}
}
}
Ok(HttpResponse::Ok().json(serde_json::json!({
"user_id": session.user_id,
"session_id": session.id,
"status": "authenticated"
})))
} }
#[cfg(test)] #[cfg(test)]
pub mod auth_test; pub mod auth_test;

View file

@ -3,167 +3,100 @@ use crate::llm_models;
use crate::shared::models::Automation; use crate::shared::models::Automation;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use diesel::prelude::*; use diesel::prelude::*;
use log::{error, info, trace}; use log::{error, trace};
use std::collections::HashSet; use std::collections::HashSet;
use std::sync::Arc; use std::sync::Arc;
use tokio::time::{interval, Duration}; use tokio::time::{interval, Duration};
use uuid::Uuid; use uuid::Uuid;
pub fn start_compact_prompt_scheduler(state: Arc<AppState>) { pub fn start_compact_prompt_scheduler(state: Arc<AppState>) {
tokio::spawn(async move { tokio::spawn(async move {
// Initial 30 second delay before first run tokio::time::sleep(Duration::from_secs(30)).await;
tokio::time::sleep(Duration::from_secs(30)).await; let mut interval = interval(Duration::from_secs(60));
let mut interval = interval(Duration::from_secs(60)); loop {
loop { interval.tick().await;
interval.tick().await; if let Err(e) = execute_compact_prompt(Arc::clone(&state)).await {
if let Err(e) = execute_compact_prompt(Arc::clone(&state)).await { error!("Prompt compaction failed: {}", e);
error!("Prompt compaction failed: {}", e); }
} }
} });
});
} }
async fn execute_compact_prompt(state: Arc<AppState>) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { async fn execute_compact_prompt(state: Arc<AppState>) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
use crate::shared::models::system_automations::dsl::{is_active, system_automations}; use crate::shared::models::system_automations::dsl::{is_active, system_automations};
let automations: Vec<Automation> = {
let automations: Vec<Automation> = { let mut conn = state.conn.get().map_err(|e| format!("Failed to acquire lock: {}", e))?;
let mut conn = state system_automations.filter(is_active.eq(true)).load::<Automation>(&mut *conn)?
.conn };
.lock() for automation in automations {
.map_err(|e| format!("Failed to acquire lock: {}", e))?; if let Err(e) = compact_prompt_for_bot(&state, &automation).await {
error!("Failed to compact prompt for bot {}: {}", automation.bot_id, e);
system_automations }
.filter(is_active.eq(true)) }
.load::<Automation>(&mut *conn)? Ok(())
};
for automation in automations {
if let Err(e) = compact_prompt_for_bot(&state, &automation).await {
error!(
"Failed to compact prompt for bot {}: {}",
automation.bot_id, e
);
}
}
Ok(())
} }
async fn compact_prompt_for_bot(state: &Arc<AppState>, automation: &Automation) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
async fn compact_prompt_for_bot( use once_cell::sync::Lazy;
state: &Arc<AppState>, use scopeguard::guard;
automation: &Automation, static IN_PROGRESS: Lazy<tokio::sync::Mutex<HashSet<Uuid>>> = Lazy::new(|| tokio::sync::Mutex::new(HashSet::new()));
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { {
// Skip if already compacting this bot let mut in_progress = IN_PROGRESS.lock().await;
use once_cell::sync::Lazy; if in_progress.contains(&automation.bot_id) {
use scopeguard::guard; return Ok(());
static IN_PROGRESS: Lazy<tokio::sync::Mutex<HashSet<Uuid>>> = Lazy::new(|| { }
tokio::sync::Mutex::new(HashSet::new()) in_progress.insert(automation.bot_id);
}); }
let bot_id = automation.bot_id;
{ let _cleanup = guard((), |_| {
let mut in_progress = IN_PROGRESS.lock().await; tokio::spawn(async move {
if in_progress.contains(&automation.bot_id) { let mut in_progress = IN_PROGRESS.lock().await;
trace!("Skipping compaction for bot {} - already in progress", automation.bot_id); in_progress.remove(&bot_id);
return Ok(()); });
} });
in_progress.insert(automation.bot_id); let config_manager = ConfigManager::new(state.conn.clone());
} let compact_threshold = config_manager
.get_config(&automation.bot_id, "prompt-compact", None)?
// Ensure cleanup happens when function exits .parse::<i32>()
let bot_id = automation.bot_id; .unwrap_or(0);
let _cleanup = guard((), |_| { if compact_threshold == 0 {
tokio::spawn(async move { return Ok(());
let mut in_progress = IN_PROGRESS.lock().await; } else if compact_threshold < 0 {
in_progress.remove(&bot_id); trace!("Negative compact threshold detected for bot {}, skipping", automation.bot_id);
trace!("Released compaction lock for bot {}", bot_id); }
}); let sessions = {
}); let mut session_manager = state.session_manager.lock().await;
session_manager.get_user_sessions(Uuid::nil())?
info!("Executing prompt compaction for bot: {}", automation.bot_id); };
for session in sessions {
let config_manager = ConfigManager::new(Arc::clone(&state.conn)); if session.bot_id != automation.bot_id {
let compact_threshold = config_manager continue;
.get_config(&automation.bot_id, "prompt-compact", None)? }
.parse::<i32>() let history = {
.unwrap_or(0); let mut session_manager = state.session_manager.lock().await;
session_manager.get_conversation_history(session.id, session.user_id)?
// Compact if threshold is negative (always compact) or positive (conditional) };
if compact_threshold == 0 { trace!("Compacting prompt for session {}: {} messages", session.id, history.len());
return Ok(()); let mut compacted = String::new();
} else if compact_threshold < 0 { for (role, content) in &history {
info!("Compaction forced for bot {} (threshold = {})", automation.bot_id, compact_threshold); compacted.push_str(&format!("{}: {}\n", role, content));
} }
let llm_provider = state.llm_provider.clone();
// Get sessions without holding lock let compacted_clone = compacted.clone();
let sessions = { let summarized = match llm_provider.summarize(&compacted_clone).await {
let mut session_manager = state.session_manager.lock().await; Ok(summary) => {
session_manager.get_user_sessions(Uuid::nil())? trace!("Successfully summarized conversation for session {}, summary length: {}", session.id, summary.len());
}; let handler = llm_models::get_handler(&config_manager.get_config(&automation.bot_id, "llm-model", None).unwrap_or_default());
let filtered = handler.process_content(&summary);
for session in sessions { format!("SUMMARY: {}", filtered)
if session.bot_id != automation.bot_id { },
continue; Err(e) => {
} error!("Failed to summarize conversation for session {}: {}", session.id, e);
format!("SUMMARY: {}", compacted)
// Get history without holding lock }
let history = { };
let mut session_manager = state.session_manager.lock().await; trace!("Prompt compacted {}: {} messages", session.id, history.len());
session_manager.get_conversation_history(session.id, session.user_id)? {
}; let mut session_manager = state.session_manager.lock().await;
session_manager.save_message(session.id, session.user_id, 9, &summarized, 1)?;
info!( }
"Compacting prompt for session {}: {} messages", }
session.id, Ok(())
history.len()
);
// Compact entire conversation history when threshold is reached
let mut compacted = String::new();
for (role, content) in &history {
compacted.push_str(&format!("{}: {}\n", role, content));
}
// Clone needed references for async task
let llm_provider = state.llm_provider.clone();
let compacted_clone = compacted.clone();
// Run LLM summarization with proper tracing and filtering
trace!("Starting summarization for session {}", session.id);
let summarized = match llm_provider.summarize(&compacted_clone).await {
Ok(summary) => {
trace!("Successfully summarized session {} ({} chars)",
session.id, summary.len());
// Use handler to filter <think> content
let handler = llm_models::get_handler(
&config_manager.get_config(
&automation.bot_id,
"llm-model",
None
).unwrap_or_default()
);
let filtered = handler.process_content(&summary);
format!("SUMMARY: {}", filtered)
},
Err(e) => {
error!("Failed to summarize conversation for session {}: {}", session.id, e);
trace!("Using fallback summary for session {}", session.id);
format!("SUMMARY: {}", compacted) // Fallback
}
};
info!(
"Prompt compacted {}: {} messages",
session.id,
history.len()
);
// Instead of clearing messages, insert a compacted marker message
{
let mut session_manager = state.session_manager.lock().await;
// Save a special compacted message type (9)
session_manager.save_message(session.id, session.user_id, 9, &summarized, 1)?;
trace!("Inserted compacted message for session {}", session.id);
}
}
Ok(())
} }

View file

@ -5,28 +5,21 @@ use crate::shared::state::AppState;
use chrono::Utc; use chrono::Utc;
use cron::Schedule; use cron::Schedule;
use diesel::prelude::*; use diesel::prelude::*;
use log::{error, info}; use log::{error, trace};
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use tokio::time::{interval, Duration}; use tokio::time::{interval, Duration};
mod compact_prompt; mod compact_prompt;
pub struct AutomationService { pub struct AutomationService {
state: Arc<AppState>, state: Arc<AppState>,
} }
impl AutomationService { impl AutomationService {
pub fn new(state: Arc<AppState>) -> Self { pub fn new(state: Arc<AppState>) -> Self {
// Start the compact prompt scheduler
crate::automation::compact_prompt::start_compact_prompt_scheduler(Arc::clone(&state)); crate::automation::compact_prompt::start_compact_prompt_scheduler(Arc::clone(&state));
Self { state } Self { state }
} }
pub async fn spawn(self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { pub async fn spawn(self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!("Automation service started");
let mut ticker = interval(Duration::from_secs(60)); let mut ticker = interval(Duration::from_secs(60));
loop { loop {
ticker.tick().await; ticker.tick().await;
if let Err(e) = self.check_scheduled_tasks().await { if let Err(e) = self.check_scheduled_tasks().await {
@ -34,29 +27,24 @@ impl AutomationService {
} }
} }
} }
async fn check_scheduled_tasks(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { async fn check_scheduled_tasks(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
use crate::shared::models::system_automations::dsl::{ use crate::shared::models::system_automations::dsl::{
id, is_active, kind, last_triggered as lt_column, system_automations, id, is_active, kind, last_triggered as lt_column, system_automations,
}; };
let mut conn = self let mut conn = self
.state .state
.conn .conn
.lock() .get()
.map_err(|e| format!("Failed to acquire lock: {}", e))?; .map_err(|e| format!("Failed to acquire database connection: {}", e))?;
let automations: Vec<Automation> = system_automations let automations: Vec<Automation> = system_automations
.filter(is_active.eq(true)) .filter(is_active.eq(true))
.filter(kind.eq(TriggerKind::Scheduled as i32)) .filter(kind.eq(TriggerKind::Scheduled as i32))
.load::<Automation>(&mut *conn)?; .load::<Automation>(&mut conn)?;
for automation in automations { for automation in automations {
if let Some(schedule_str) = &automation.schedule { if let Some(schedule_str) = &automation.schedule {
if let Ok(parsed_schedule) = Schedule::from_str(schedule_str) { if let Ok(parsed_schedule) = Schedule::from_str(schedule_str) {
let now = Utc::now(); let now = Utc::now();
let next_run = parsed_schedule.upcoming(Utc).next(); let next_run = parsed_schedule.upcoming(Utc).next();
if let Some(next_time) = next_run { if let Some(next_time) = next_run {
let time_until_next = next_time - now; let time_until_next = next_time - now;
if time_until_next.num_minutes() < 1 { if time_until_next.num_minutes() < 1 {
@ -65,44 +53,36 @@ impl AutomationService {
continue; continue;
} }
} }
self.execute_automation(&automation).await?; self.execute_automation(&automation).await?;
diesel::update(system_automations.filter(id.eq(automation.id))) diesel::update(system_automations.filter(id.eq(automation.id)))
.set(lt_column.eq(Some(now))) .set(lt_column.eq(Some(now)))
.execute(&mut *conn)?; .execute(&mut conn)?;
} }
} }
} }
} }
} }
Ok(()) Ok(())
} }
async fn execute_automation( async fn execute_automation(
&self, &self,
automation: &Automation, automation: &Automation,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!("Executing automation: {}", automation.param);
let bot_name: String = { let bot_name: String = {
use crate::shared::models::schema::bots::dsl::*; use crate::shared::models::schema::bots::dsl::*;
let mut conn = self let mut conn = self
.state .state
.conn .conn
.lock() .get()
.map_err(|e| format!("Lock failed: {}", e))?; .map_err(|e| format!("Failed to acquire database connection: {}", e))?;
bots.filter(id.eq(automation.bot_id)) bots.filter(id.eq(automation.bot_id))
.select(name) .select(name)
.first(&mut *conn)? .first(&mut conn)?
}; };
let script_path = format!( let script_path = format!(
"./work/{}.gbai/{}.gbdialog/{}.ast", "./work/{}.gbai/{}.gbdialog/{}.ast",
bot_name, bot_name, automation.param bot_name, bot_name, automation.param
); );
let script_content = match tokio::fs::read_to_string(&script_path).await { let script_content = match tokio::fs::read_to_string(&script_path).await {
Ok(content) => content, Ok(content) => content,
Err(e) => { Err(e) => {
@ -110,16 +90,13 @@ impl AutomationService {
return Ok(()); return Ok(());
} }
}; };
let session = { let session = {
let mut sm = self.state.session_manager.lock().await; let mut sm = self.state.session_manager.lock().await;
let admin_user = uuid::Uuid::nil(); let admin_user = uuid::Uuid::nil();
sm.get_or_create_user_session(admin_user, automation.bot_id, "Automation")? sm.get_or_create_user_session(admin_user, automation.bot_id, "Automation")?
.ok_or("Failed to create session")? .ok_or("Failed to create session")?
}; };
let script_service = ScriptService::new(Arc::clone(&self.state), session); let script_service = ScriptService::new(Arc::clone(&self.state), session);
match script_service.compile(&script_content) { match script_service.compile(&script_content) {
Ok(ast) => { Ok(ast) => {
if let Err(e) = script_service.run(&ast) { if let Err(e) = script_service.run(&ast) {
@ -130,74 +107,58 @@ impl AutomationService {
error!("Script compilation failed: {}", e); error!("Script compilation failed: {}", e);
} }
} }
Ok(()) Ok(())
} }
async fn execute_compact_prompt( async fn execute_compact_prompt(
&self, &self,
automation: &Automation, automation: &Automation,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!("Executing prompt compaction for bot: {}", automation.bot_id); let config_manager = ConfigManager::new(self.state.conn.clone());
let config_manager = ConfigManager::new(Arc::clone(&self.state.conn));
let compact_threshold = config_manager let compact_threshold = config_manager
.get_config(&automation.bot_id, "prompt-compact", None)? .get_config(&automation.bot_id, "prompt-compact", None)?
.parse::<usize>() .parse::<usize>()
.unwrap_or(0); .unwrap_or(0);
if compact_threshold == 0 { if compact_threshold == 0 {
return Ok(()); return Ok(());
} }
let mut session_manager = self.state.session_manager.lock().await; let mut session_manager = self.state.session_manager.lock().await;
let sessions = session_manager.get_user_sessions(uuid::Uuid::nil())?; let sessions = session_manager.get_user_sessions(uuid::Uuid::nil())?;
for session in sessions { for session in sessions {
if session.bot_id != automation.bot_id { if session.bot_id != automation.bot_id {
continue; continue;
} }
let history = session_manager.get_conversation_history(session.id, session.user_id)?; let history = session_manager.get_conversation_history(session.id, session.user_id)?;
if history.len() > compact_threshold { if history.len() > compact_threshold {
info!( trace!(
"Compacting prompt for session {}: {} messages", "Compacting prompt for session {}: {} messages",
session.id, session.id,
history.len() history.len()
); );
let mut compacted = String::new(); let mut compacted = String::new();
for (role, content) in &history[..history.len() - compact_threshold] { for (role, content) in &history[..history.len() - compact_threshold] {
compacted.push_str(&format!("{}: {}\n", role, content)); compacted.push_str(&format!("{}: {}\n", role, content));
} }
let summarized = format!("SUMMARY: {}", compacted); let summarized = format!("SUMMARY: {}", compacted);
session_manager.save_message(session.id, session.user_id, 3, &summarized, 1)?; session_manager.save_message(session.id, session.user_id, 3, &summarized, 1)?;
} }
} }
Ok(()) Ok(())
} }
} }
pub async fn execute_compact_prompt(
pub async fn execute_compact_prompt(state: Arc<crate::shared::state::AppState>) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { state: Arc<crate::shared::state::AppState>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
use crate::shared::models::system_automations::dsl::{is_active, system_automations}; use crate::shared::models::system_automations::dsl::{is_active, system_automations};
use diesel::prelude::*; use diesel::prelude::*;
use log::info;
let state_clone = state.clone(); let state_clone = state.clone();
let service = AutomationService::new(state_clone); let service = AutomationService::new(state_clone);
let mut conn = state let mut conn = state
.conn .conn
.lock() .get()
.map_err(|e| format!("Failed to acquire lock: {}", e))?; .map_err(|e| format!("Failed to acquire database connection: {}", e))?;
let automations: Vec<crate::shared::models::Automation> = system_automations let automations: Vec<crate::shared::models::Automation> = system_automations
.filter(is_active.eq(true)) .filter(is_active.eq(true))
.load::<crate::shared::models::Automation>(&mut *conn)?; .load::<crate::shared::models::Automation>(&mut conn)?;
for automation in automations { for automation in automations {
if let Err(e) = service.execute_compact_prompt(&automation).await { if let Err(e) = service.execute_compact_prompt(&automation).await {
error!( error!(
@ -206,7 +167,5 @@ let service = AutomationService::new(state_clone);
); );
} }
} }
info!("Prompt compaction cycle completed");
Ok(()) Ok(())
} }

View file

@ -1,18 +1,17 @@
use crate::shared::state::AppState;
use crate::basic::keywords::set_schedule::execute_set_schedule; use crate::basic::keywords::set_schedule::execute_set_schedule;
use crate::shared::models::TriggerKind;
use crate::shared::state::AppState;
use diesel::ExpressionMethods;
use diesel::QueryDsl;
use diesel::RunQueryDsl;
use log::warn; use log::warn;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap; use std::collections::HashMap;
use diesel::QueryDsl;
use diesel::ExpressionMethods;
use std::collections::HashSet; use std::collections::HashSet;
use diesel::RunQueryDsl;
use crate::shared::models::TriggerKind;
use std::error::Error; use std::error::Error;
use std::fs; use std::fs;
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ParamDeclaration { pub struct ParamDeclaration {
pub name: String, pub name: String,
@ -21,7 +20,6 @@ pub struct ParamDeclaration {
pub description: String, pub description: String,
pub required: bool, pub required: bool,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolDefinition { pub struct ToolDefinition {
pub name: String, pub name: String,
@ -29,14 +27,12 @@ pub struct ToolDefinition {
pub parameters: Vec<ParamDeclaration>, pub parameters: Vec<ParamDeclaration>,
pub source_file: String, pub source_file: String,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MCPTool { pub struct MCPTool {
pub name: String, pub name: String,
pub description: String, pub description: String,
pub input_schema: MCPInputSchema, pub input_schema: MCPInputSchema,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MCPInputSchema { pub struct MCPInputSchema {
#[serde(rename = "type")] #[serde(rename = "type")]
@ -44,7 +40,6 @@ pub struct MCPInputSchema {
pub properties: HashMap<String, MCPProperty>, pub properties: HashMap<String, MCPProperty>,
pub required: Vec<String>, pub required: Vec<String>,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MCPProperty { pub struct MCPProperty {
#[serde(rename = "type")] #[serde(rename = "type")]
@ -53,21 +48,18 @@ pub struct MCPProperty {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub example: Option<String>, pub example: Option<String>,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenAITool { pub struct OpenAITool {
#[serde(rename = "type")] #[serde(rename = "type")]
pub tool_type: String, pub tool_type: String,
pub function: OpenAIFunction, pub function: OpenAIFunction,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenAIFunction { pub struct OpenAIFunction {
pub name: String, pub name: String,
pub description: String, pub description: String,
pub parameters: OpenAIParameters, pub parameters: OpenAIParameters,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenAIParameters { pub struct OpenAIParameters {
#[serde(rename = "type")] #[serde(rename = "type")]
@ -75,7 +67,6 @@ pub struct OpenAIParameters {
pub properties: HashMap<String, OpenAIProperty>, pub properties: HashMap<String, OpenAIProperty>,
pub required: Vec<String>, pub required: Vec<String>,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenAIProperty { pub struct OpenAIProperty {
#[serde(rename = "type")] #[serde(rename = "type")]
@ -84,13 +75,11 @@ pub struct OpenAIProperty {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub example: Option<String>, pub example: Option<String>,
} }
pub struct BasicCompiler { pub struct BasicCompiler {
state: Arc<AppState>, state: Arc<AppState>,
bot_id: uuid::Uuid, bot_id: uuid::Uuid,
previous_schedules: HashSet<String>, previous_schedules: HashSet<String>,
} }
impl BasicCompiler { impl BasicCompiler {
pub fn new(state: Arc<AppState>, bot_id: uuid::Uuid) -> Self { pub fn new(state: Arc<AppState>, bot_id: uuid::Uuid) -> Self {
Self { Self {
@ -99,7 +88,6 @@ impl BasicCompiler {
previous_schedules: HashSet::new(), previous_schedules: HashSet::new(),
} }
} }
pub fn compile_file( pub fn compile_file(
&mut self, &mut self,
source_path: &str, source_path: &str,
@ -107,46 +95,35 @@ impl BasicCompiler {
) -> Result<CompilationResult, Box<dyn Error + Send + Sync>> { ) -> Result<CompilationResult, Box<dyn Error + Send + Sync>> {
let source_content = fs::read_to_string(source_path) let source_content = fs::read_to_string(source_path)
.map_err(|e| format!("Failed to read source file: {}", e))?; .map_err(|e| format!("Failed to read source file: {}", e))?;
let tool_def = self.parse_tool_definition(&source_content, source_path)?; let tool_def = self.parse_tool_definition(&source_content, source_path)?;
let file_name = Path::new(source_path) let file_name = Path::new(source_path)
.file_stem() .file_stem()
.and_then(|s| s.to_str()) .and_then(|s| s.to_str())
.ok_or("Invalid file name")?; .ok_or("Invalid file name")?;
let ast_path = format!("{}/{}.ast", output_dir, file_name); let ast_path = format!("{}/{}.ast", output_dir, file_name);
let ast_content = self.preprocess_basic(&source_content, source_path, self.bot_id)?; let ast_content = self.preprocess_basic(&source_content, source_path, self.bot_id)?;
fs::write(&ast_path, &ast_content) fs::write(&ast_path, &ast_content)
.map_err(|e| format!("Failed to write AST file: {}", e))?; .map_err(|e| format!("Failed to write AST file: {}", e))?;
let (mcp_json, tool_json) = if !tool_def.parameters.is_empty() { let (mcp_json, tool_json) = if !tool_def.parameters.is_empty() {
let mcp = self.generate_mcp_tool(&tool_def)?; let mcp = self.generate_mcp_tool(&tool_def)?;
let openai = self.generate_openai_tool(&tool_def)?; let openai = self.generate_openai_tool(&tool_def)?;
let mcp_path = format!("{}/{}.mcp.json", output_dir, file_name); let mcp_path = format!("{}/{}.mcp.json", output_dir, file_name);
let tool_path = format!("{}/{}.tool.json", output_dir, file_name); let tool_path = format!("{}/{}.tool.json", output_dir, file_name);
let mcp_json_str = serde_json::to_string_pretty(&mcp)?; let mcp_json_str = serde_json::to_string_pretty(&mcp)?;
fs::write(&mcp_path, mcp_json_str) fs::write(&mcp_path, mcp_json_str)
.map_err(|e| format!("Failed to write MCP JSON: {}", e))?; .map_err(|e| format!("Failed to write MCP JSON: {}", e))?;
let tool_json_str = serde_json::to_string_pretty(&openai)?; let tool_json_str = serde_json::to_string_pretty(&openai)?;
fs::write(&tool_path, tool_json_str) fs::write(&tool_path, tool_json_str)
.map_err(|e| format!("Failed to write tool JSON: {}", e))?; .map_err(|e| format!("Failed to write tool JSON: {}", e))?;
(Some(mcp), Some(openai)) (Some(mcp), Some(openai))
} else { } else {
(None, None) (None, None)
}; };
Ok(CompilationResult { Ok(CompilationResult {
mcp_tool: mcp_json, mcp_tool: mcp_json,
_openai_tool: tool_json, _openai_tool: tool_json,
}) })
} }
pub fn parse_tool_definition( pub fn parse_tool_definition(
&self, &self,
source: &str, source: &str,
@ -156,16 +133,13 @@ impl BasicCompiler {
let mut description = String::new(); let mut description = String::new();
let lines: Vec<&str> = source.lines().collect(); let lines: Vec<&str> = source.lines().collect();
let mut i = 0; let mut i = 0;
while i < lines.len() { while i < lines.len() {
let line = lines[i].trim(); let line = lines[i].trim();
if line.starts_with("PARAM ") { if line.starts_with("PARAM ") {
if let Some(param) = self.parse_param_line(line)? { if let Some(param) = self.parse_param_line(line)? {
params.push(param); params.push(param);
} }
} }
if line.starts_with("DESCRIPTION ") { if line.starts_with("DESCRIPTION ") {
let desc_start = line.find('"').unwrap_or(0); let desc_start = line.find('"').unwrap_or(0);
let desc_end = line.rfind('"').unwrap_or(line.len()); let desc_end = line.rfind('"').unwrap_or(line.len());
@ -173,16 +147,13 @@ impl BasicCompiler {
description = line[desc_start + 1..desc_end].to_string(); description = line[desc_start + 1..desc_end].to_string();
} }
} }
i += 1; i += 1;
} }
let tool_name = Path::new(source_path) let tool_name = Path::new(source_path)
.file_stem() .file_stem()
.and_then(|s| s.to_str()) .and_then(|s| s.to_str())
.unwrap_or("unknown") .unwrap_or("unknown")
.to_string(); .to_string();
Ok(ToolDefinition { Ok(ToolDefinition {
name: tool_name, name: tool_name,
description, description,
@ -190,7 +161,6 @@ impl BasicCompiler {
source_file: source_path.to_string(), source_file: source_path.to_string(),
}) })
} }
fn parse_param_line( fn parse_param_line(
&self, &self,
line: &str, line: &str,
@ -199,15 +169,12 @@ impl BasicCompiler {
if !line.starts_with("PARAM ") { if !line.starts_with("PARAM ") {
return Ok(None); return Ok(None);
} }
let parts: Vec<&str> = line.split_whitespace().collect(); let parts: Vec<&str> = line.split_whitespace().collect();
if parts.len() < 4 { if parts.len() < 4 {
warn!("Invalid PARAM line: {}", line); warn!("Invalid PARAM line: {}", line);
return Ok(None); return Ok(None);
} }
let name = parts[1].to_string(); let name = parts[1].to_string();
let as_index = parts.iter().position(|&p| p == "AS"); let as_index = parts.iter().position(|&p| p == "AS");
let param_type = if let Some(idx) = as_index { let param_type = if let Some(idx) = as_index {
if idx + 1 < parts.len() { if idx + 1 < parts.len() {
@ -218,7 +185,6 @@ impl BasicCompiler {
} else { } else {
"string".to_string() "string".to_string()
}; };
let example = if let Some(like_pos) = line.find("LIKE") { let example = if let Some(like_pos) = line.find("LIKE") {
let rest = &line[like_pos + 4..].trim(); let rest = &line[like_pos + 4..].trim();
if let Some(start) = rest.find('"') { if let Some(start) = rest.find('"') {
@ -233,7 +199,6 @@ impl BasicCompiler {
} else { } else {
None None
}; };
let description = if let Some(desc_pos) = line.find("DESCRIPTION") { let description = if let Some(desc_pos) = line.find("DESCRIPTION") {
let rest = &line[desc_pos + 11..].trim(); let rest = &line[desc_pos + 11..].trim();
if let Some(start) = rest.find('"') { if let Some(start) = rest.find('"') {
@ -248,7 +213,6 @@ impl BasicCompiler {
} else { } else {
"".to_string() "".to_string()
}; };
Ok(Some(ParamDeclaration { Ok(Some(ParamDeclaration {
name, name,
param_type: self.normalize_type(&param_type), param_type: self.normalize_type(&param_type),
@ -257,7 +221,6 @@ impl BasicCompiler {
required: true, required: true,
})) }))
} }
fn normalize_type(&self, basic_type: &str) -> String { fn normalize_type(&self, basic_type: &str) -> String {
match basic_type.to_lowercase().as_str() { match basic_type.to_lowercase().as_str() {
"string" | "text" => "string".to_string(), "string" | "text" => "string".to_string(),
@ -270,14 +233,12 @@ impl BasicCompiler {
_ => "string".to_string(), _ => "string".to_string(),
} }
} }
fn generate_mcp_tool( fn generate_mcp_tool(
&self, &self,
tool_def: &ToolDefinition, tool_def: &ToolDefinition,
) -> Result<MCPTool, Box<dyn Error + Send + Sync>> { ) -> Result<MCPTool, Box<dyn Error + Send + Sync>> {
let mut properties = HashMap::new(); let mut properties = HashMap::new();
let mut required = Vec::new(); let mut required = Vec::new();
for param in &tool_def.parameters { for param in &tool_def.parameters {
properties.insert( properties.insert(
param.name.clone(), param.name.clone(),
@ -291,7 +252,6 @@ impl BasicCompiler {
required.push(param.name.clone()); required.push(param.name.clone());
} }
} }
Ok(MCPTool { Ok(MCPTool {
name: tool_def.name.clone(), name: tool_def.name.clone(),
description: tool_def.description.clone(), description: tool_def.description.clone(),
@ -302,14 +262,12 @@ impl BasicCompiler {
}, },
}) })
} }
fn generate_openai_tool( fn generate_openai_tool(
&self, &self,
tool_def: &ToolDefinition, tool_def: &ToolDefinition,
) -> Result<OpenAITool, Box<dyn Error + Send + Sync>> { ) -> Result<OpenAITool, Box<dyn Error + Send + Sync>> {
let mut properties = HashMap::new(); let mut properties = HashMap::new();
let mut required = Vec::new(); let mut required = Vec::new();
for param in &tool_def.parameters { for param in &tool_def.parameters {
properties.insert( properties.insert(
param.name.clone(), param.name.clone(),
@ -323,7 +281,6 @@ impl BasicCompiler {
required.push(param.name.clone()); required.push(param.name.clone());
} }
} }
Ok(OpenAITool { Ok(OpenAITool {
tool_type: "function".to_string(), tool_type: "function".to_string(),
function: OpenAIFunction { function: OpenAIFunction {
@ -337,38 +294,45 @@ impl BasicCompiler {
}, },
}) })
} }
fn preprocess_basic(
fn preprocess_basic(&mut self, source: &str, source_path: &str, bot_id: uuid::Uuid) -> Result<String, Box<dyn Error + Send + Sync>> { &mut self,
source: &str,
source_path: &str,
bot_id: uuid::Uuid,
) -> Result<String, Box<dyn Error + Send + Sync>> {
let bot_uuid = bot_id; let bot_uuid = bot_id;
let mut result = String::new(); let mut result = String::new();
let mut has_schedule = false; let mut has_schedule = false;
let script_name = Path::new(source_path) let script_name = Path::new(source_path)
.file_stem() .file_stem()
.and_then(|s| s.to_str()) .and_then(|s| s.to_str())
.unwrap_or("unknown") .unwrap_or("unknown")
.to_string(); .to_string();
{ {
let mut conn = self.state.conn.lock().unwrap(); let mut conn = self
.state
.conn
.get()
.map_err(|e| format!("Failed to get database connection: {}", e))?;
use crate::shared::models::system_automations::dsl::*; use crate::shared::models::system_automations::dsl::*;
diesel::delete(
diesel::delete(system_automations system_automations
.filter(bot_id.eq(bot_uuid)) .filter(bot_id.eq(bot_uuid))
.filter(kind.eq(TriggerKind::Scheduled as i32)) .filter(kind.eq(TriggerKind::Scheduled as i32))
.filter(param.eq(&script_name)) .filter(param.eq(&script_name)),
) )
.execute(&mut *conn) .execute(&mut conn)
.ok(); .ok();
} }
for line in source.lines() { for line in source.lines() {
let trimmed = line.trim(); let trimmed = line.trim();
if trimmed.is_empty()
if trimmed.is_empty() || trimmed.starts_with("'") || trimmed.starts_with("//") || trimmed.starts_with("REM") { || trimmed.starts_with("'")
|| trimmed.starts_with("//")
|| trimmed.starts_with("REM")
{
continue; continue;
} }
let normalized = trimmed let normalized = trimmed
.replace("SET SCHEDULE", "SET_SCHEDULE") .replace("SET SCHEDULE", "SET_SCHEDULE")
.replace("ADD TOOL", "ADD_TOOL") .replace("ADD TOOL", "ADD_TOOL")
@ -387,54 +351,58 @@ impl BasicCompiler {
.replace("GET BOT MEMORY", "GET_BOT_MEMORY") .replace("GET BOT MEMORY", "GET_BOT_MEMORY")
.replace("SET BOT MEMORY", "SET_BOT_MEMORY") .replace("SET BOT MEMORY", "SET_BOT_MEMORY")
.replace("CREATE DRAFT", "CREATE_DRAFT"); .replace("CREATE DRAFT", "CREATE_DRAFT");
if normalized.starts_with("SET_SCHEDULE") { if normalized.starts_with("SET_SCHEDULE") {
has_schedule = true; has_schedule = true;
let parts: Vec<&str> = normalized.split('"').collect(); let parts: Vec<&str> = normalized.split('"').collect();
if parts.len() >= 3 { if parts.len() >= 3 {
let cron = parts[1]; let cron = parts[1];
let mut conn = self.state.conn.lock().unwrap(); let mut conn = self
if let Err(e) = execute_set_schedule(&mut *conn, cron, &script_name, bot_id) { .state
log::error!("Failed to schedule SET_SCHEDULE during preprocessing: {}", e); .conn
.get()
.map_err(|e| format!("Failed to get database connection: {}", e))?;
if let Err(e) = execute_set_schedule(&mut conn, cron, &script_name, bot_id) {
log::error!(
"Failed to schedule SET_SCHEDULE during preprocessing: {}",
e
);
} }
} else { } else {
log::warn!("Malformed SET_SCHEDULE line ignored: {}", normalized); log::warn!("Malformed SET_SCHEDULE line ignored: {}", normalized);
} }
continue; continue;
} }
if normalized.starts_with("PARAM ") || normalized.starts_with("DESCRIPTION ") { if normalized.starts_with("PARAM ") || normalized.starts_with("DESCRIPTION ") {
continue; continue;
} }
result.push_str(&normalized); result.push_str(&normalized);
result.push('\n'); result.push('\n');
} }
if self.previous_schedules.contains(&script_name) && !has_schedule { if self.previous_schedules.contains(&script_name) && !has_schedule {
let mut conn = self.state.conn.lock().unwrap(); let mut conn = self
.state
.conn
.get()
.map_err(|e| format!("Failed to get database connection: {}", e))?;
use crate::shared::models::system_automations::dsl::*; use crate::shared::models::system_automations::dsl::*;
diesel::delete(
diesel::delete(system_automations system_automations
.filter(bot_id.eq(bot_uuid)) .filter(bot_id.eq(bot_uuid))
.filter(kind.eq(TriggerKind::Scheduled as i32)) .filter(kind.eq(TriggerKind::Scheduled as i32))
.filter(param.eq(&script_name)) .filter(param.eq(&script_name)),
) )
.execute(&mut *conn) .execute(&mut conn)
.map_err(|e| log::error!("Failed to remove schedule for {}: {}", script_name, e)) .map_err(|e| log::error!("Failed to remove schedule for {}: {}", script_name, e))
.ok(); .ok();
} }
if has_schedule { if has_schedule {
self.previous_schedules.insert(script_name); self.previous_schedules.insert(script_name);
} else { } else {
self.previous_schedules.remove(&script_name); self.previous_schedules.remove(&script_name);
} }
Ok(result) Ok(result)
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub struct CompilationResult { pub struct CompilationResult {
pub mcp_tool: Option<MCPTool>, pub mcp_tool: Option<MCPTool>,

View file

@ -1,20 +1,19 @@
use crate::shared::state::AppState;
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use log::{trace, debug, error, info}; use crate::shared::state::AppState;
use log::{error, trace};
use rhai::{Dynamic, Engine}; use rhai::{Dynamic, Engine};
use serde_json::json; use serde_json::json;
use std::sync::Arc; use std::sync::Arc;
pub fn clear_suggestions_keyword(
pub fn clear_suggestions_keyword(state: Arc<AppState>, user_session: UserSession, engine: &mut Engine) { state: Arc<AppState>,
user_session: UserSession,
engine: &mut Engine,
) {
let cache = state.cache.clone(); let cache = state.cache.clone();
engine engine
.register_custom_syntax(&["CLEAR_SUGGESTIONS"], true, move |_context, _inputs| { .register_custom_syntax(&["CLEAR_SUGGESTIONS"], true, move |_context, _inputs| {
info!("CLEAR_SUGGESTIONS command executed");
if let Some(cache_client) = &cache { if let Some(cache_client) = &cache {
let redis_key = format!("suggestions:{}:{}", user_session.user_id, user_session.id); let redis_key = format!("suggestions:{}:{}", user_session.user_id, user_session.id);
let mut conn = match cache_client.get_connection() { let mut conn = match cache_client.get_connection() {
Ok(conn) => conn, Ok(conn) => conn,
Err(e) => { Err(e) => {
@ -22,81 +21,83 @@ pub fn clear_suggestions_keyword(state: Arc<AppState>, user_session: UserSession
return Ok(Dynamic::UNIT); return Ok(Dynamic::UNIT);
} }
}; };
let result: Result<i64, redis::RedisError> =
// Delete the suggestions list redis::cmd("DEL").arg(&redis_key).query(&mut conn);
let result: Result<i64, redis::RedisError> = redis::cmd("DEL")
.arg(&redis_key)
.query(&mut conn);
match result { match result {
Ok(deleted) => { Ok(deleted) => {
trace!("Cleared suggestions from Redis key {}, deleted: {}", redis_key, deleted); trace!(
"Cleared {} suggestions from session {}",
deleted,
user_session.id
);
} }
Err(e) => error!("Failed to clear suggestions from Redis: {}", e), Err(e) => error!("Failed to clear suggestions from Redis: {}", e),
} }
} else { } else {
debug!("No Cache client configured; suggestions not cleared"); trace!("No cache configured, suggestions not cleared");
} }
Ok(Dynamic::UNIT) Ok(Dynamic::UNIT)
}) })
.unwrap(); .unwrap();
} }
pub fn add_suggestion_keyword(
pub fn add_suggestion_keyword(state: Arc<AppState>, user_session: UserSession, engine: &mut Engine) { state: Arc<AppState>,
user_session: UserSession,
engine: &mut Engine,
) {
let cache = state.cache.clone(); let cache = state.cache.clone();
engine engine
.register_custom_syntax(&["ADD_SUGGESTION", "$expr$", "AS", "$expr$"], true, move |context, inputs| { .register_custom_syntax(
let context_name = context.eval_expression_tree(&inputs[0])?.to_string(); &["ADD_SUGGESTION", "$expr$", "AS", "$expr$"],
let button_text = context.eval_expression_tree(&inputs[1])?.to_string(); true,
move |context, inputs| {
info!("ADD_SUGGESTION command executed: context='{}', text='{}'", context_name, button_text); let context_name = context.eval_expression_tree(&inputs[0])?.to_string();
let button_text = context.eval_expression_tree(&inputs[1])?.to_string();
if let Some(cache_client) = &cache { if let Some(cache_client) = &cache {
let redis_key = format!("suggestions:{}:{}", user_session.user_id, user_session.id); let redis_key =
let suggestion = json!({ "context": context_name, "text": button_text }); format!("suggestions:{}:{}", user_session.user_id, user_session.id);
let suggestion = json!({ "context": context_name, "text": button_text });
let mut conn = match cache_client.get_connection() { let mut conn = match cache_client.get_connection() {
Ok(conn) => conn, Ok(conn) => conn,
Err(e) => { Err(e) => {
error!("Failed to connect to cache: {}", e); error!("Failed to connect to cache: {}", e);
return Ok(Dynamic::UNIT); return Ok(Dynamic::UNIT);
}
};
// Append suggestion to Redis list - RPUSH returns the new length as i64
let result: Result<i64, redis::RedisError> = redis::cmd("RPUSH")
.arg(&redis_key)
.arg(suggestion.to_string())
.query(&mut conn);
match result {
Ok(length) => {
trace!("Suggestion added successfully to Redis key {}, new length: {}", redis_key, length);
// Also register context as inactive initially
let active_key = format!("active_context:{}:{}", user_session.user_id, user_session.id);
let hset_result: Result<i64, redis::RedisError> = redis::cmd("HSET")
.arg(&active_key)
.arg(&context_name)
.arg("inactive")
.query(&mut conn);
match hset_result {
Ok(fields_added) => {
trace!("Context state set to inactive for {}, fields added: {}", context_name, fields_added)
},
Err(e) => error!("Failed to set context state: {}", e),
} }
};
let result: Result<i64, redis::RedisError> = redis::cmd("RPUSH")
.arg(&redis_key)
.arg(suggestion.to_string())
.query(&mut conn);
match result {
Ok(length) => {
trace!(
"Added suggestion to session {}, total suggestions: {}",
user_session.id,
length
);
let active_key = format!(
"active_context:{}:{}",
user_session.user_id, user_session.id
);
let hset_result: Result<i64, redis::RedisError> = redis::cmd("HSET")
.arg(&active_key)
.arg(&context_name)
.arg("inactive")
.query(&mut conn);
match hset_result {
Ok(_fields_added) => {
trace!("Set context state for session {}", user_session.id);
}
Err(e) => error!("Failed to set context state: {}", e),
}
}
Err(e) => error!("Failed to add suggestion to Redis: {}", e),
} }
Err(e) => error!("Failed to add suggestion to Redis: {}", e), } else {
trace!("No cache configured, suggestion not added");
} }
} else { Ok(Dynamic::UNIT)
debug!("No Cache client configured; suggestion will not persist"); },
} )
Ok(Dynamic::UNIT)
})
.unwrap(); .unwrap();
} }

View file

@ -1,223 +1,115 @@
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use diesel::prelude::*; use diesel::prelude::*;
use log::{error, info, warn}; use log::{error, trace, warn};
use rhai::{Dynamic, Engine}; use rhai::{Dynamic, Engine};
use std::sync::Arc; use std::sync::Arc;
use uuid::Uuid; use uuid::Uuid;
pub fn add_tool_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) { pub fn add_tool_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state); let state_clone = Arc::clone(&state);
let user_clone = user.clone(); let user_clone = user.clone();
engine
engine .register_custom_syntax(&["ADD_TOOL", "$expr$"], false, move |context, inputs| {
.register_custom_syntax(&["ADD_TOOL", "$expr$"], false, move |context, inputs| { let tool_path = context.eval_expression_tree(&inputs[0])?;
let tool_path = context.eval_expression_tree(&inputs[0])?; let tool_path_str = tool_path.to_string().trim_matches('"').to_string();
let tool_path_str = tool_path.to_string().trim_matches('"').to_string(); trace!("ADD_TOOL command executed: {} for session: {}", tool_path_str, user_clone.id);
let tool_name = tool_path_str.strip_prefix(".gbdialog/").unwrap_or(&tool_path_str).strip_suffix(".bas").unwrap_or(&tool_path_str).to_string();
info!( if tool_name.is_empty() {
"ADD_TOOL command executed: {} for session: {}", return Err(Box::new(rhai::EvalAltResult::ErrorRuntime("Invalid tool name".into(), rhai::Position::NONE)));
tool_path_str, user_clone.id }
); let state_for_task = Arc::clone(&state_clone);
let user_for_task = user_clone.clone();
// Extract tool name from path (e.g., "enrollment.bas" -> "enrollment") let tool_name_for_task = tool_name.clone();
let tool_name = tool_path_str let (tx, rx) = std::sync::mpsc::channel();
.strip_prefix(".gbdialog/") std::thread::spawn(move || {
.unwrap_or(&tool_path_str) let rt = tokio::runtime::Builder::new_multi_thread().worker_threads(2).enable_all().build();
.strip_suffix(".bas") let send_err = if let Ok(rt) = rt {
.unwrap_or(&tool_path_str) let result = rt.block_on(async move {
.to_string(); associate_tool_with_session(&state_for_task, &user_for_task, &tool_name_for_task).await
});
// Validate tool name tx.send(result).err()
if tool_name.is_empty() { } else {
return Err(Box::new(rhai::EvalAltResult::ErrorRuntime( tx.send(Err("Failed to build tokio runtime".to_string())).err()
"Invalid tool name".into(), };
rhai::Position::NONE, if send_err.is_some() {
))); error!("Failed to send result from thread");
} }
});
let state_for_task = Arc::clone(&state_clone); match rx.recv_timeout(std::time::Duration::from_secs(10)) {
let user_for_task = user_clone.clone(); Ok(Ok(message)) => {
let tool_name_for_task = tool_name.clone(); Ok(Dynamic::from(message))
}
// Spawn async task to associate tool with session Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE))),
let (tx, rx) = std::sync::mpsc::channel(); Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
std::thread::spawn(move || { Err(Box::new(rhai::EvalAltResult::ErrorRuntime("ADD_TOOL timed out".into(), rhai::Position::NONE)))
let rt = tokio::runtime::Builder::new_multi_thread() }
.worker_threads(2) Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(format!("ADD_TOOL failed: {}", e).into(), rhai::Position::NONE))),
.enable_all() }
.build(); })
.unwrap();
let send_err = if let Ok(rt) = rt {
let result = rt.block_on(async move {
associate_tool_with_session(
&state_for_task,
&user_for_task,
&tool_name_for_task,
)
.await
});
tx.send(result).err()
} else {
tx.send(Err("Failed to build tokio runtime".to_string()))
.err()
};
if send_err.is_some() {
error!("Failed to send result from thread");
}
});
match rx.recv_timeout(std::time::Duration::from_secs(10)) {
Ok(Ok(message)) => {
info!("ADD_TOOL completed: {}", message);
Ok(Dynamic::from(message))
}
Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
e.into(),
rhai::Position::NONE,
))),
Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
"ADD_TOOL timed out".into(),
rhai::Position::NONE,
)))
}
Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
format!("ADD_TOOL failed: {}", e).into(),
rhai::Position::NONE,
))),
}
})
.unwrap();
} }
async fn associate_tool_with_session(state: &AppState, user: &UserSession, tool_name: &str) -> Result<String, String> {
/// Associate a compiled tool with the current session use crate::shared::models::schema::{basic_tools, session_tool_associations};
/// The tool must already be compiled and present in the basic_tools table let mut conn = state.conn.get().map_err(|e| format!("DB error: {}", e))?;
async fn associate_tool_with_session( let tool_exists: Result<bool, diesel::result::Error> = basic_tools::table
state: &AppState, .filter(basic_tools::bot_id.eq(user.bot_id.to_string()))
user: &UserSession, .filter(basic_tools::tool_name.eq(tool_name))
tool_name: &str, .filter(basic_tools::is_active.eq(1))
) -> Result<String, String> { .select(diesel::dsl::count(basic_tools::id))
use crate::shared::models::schema::{basic_tools, session_tool_associations}; .first::<i64>(&mut *conn)
.map(|count| count > 0);
let mut conn = state.conn.lock().map_err(|e| { match tool_exists {
error!("Failed to acquire database lock: {}", e); Ok(true) => {
format!("Database connection error: {}", e) trace!("Tool '{}' exists and is active for bot '{}'", tool_name, user.bot_id);
})?; }
Ok(false) => {
// First, verify the tool exists and is active for this bot warn!("Tool '{}' does not exist or is not active for bot '{}'", tool_name, user.bot_id);
let tool_exists: Result<bool, diesel::result::Error> = basic_tools::table return Err(format!("Tool '{}' is not available. Make sure the tool file is compiled and active.", tool_name));
.filter(basic_tools::bot_id.eq(user.bot_id.to_string())) }
.filter(basic_tools::tool_name.eq(tool_name)) Err(e) => {
.filter(basic_tools::is_active.eq(1)) error!("Failed to check tool existence: {}", e);
.select(diesel::dsl::count(basic_tools::id)) return Err(format!("Database error while checking tool: {}", e));
.first::<i64>(&mut *conn) }
.map(|count| count > 0); }
let association_id = Uuid::new_v4().to_string();
match tool_exists { let session_id_str = user.id.to_string();
Ok(true) => { let added_at = chrono::Utc::now().to_rfc3339();
info!( let insert_result: Result<usize, diesel::result::Error> = diesel::insert_into(session_tool_associations::table)
"Tool '{}' exists and is active for bot '{}'", .values((
tool_name, user.bot_id session_tool_associations::id.eq(&association_id),
); session_tool_associations::session_id.eq(&session_id_str),
} session_tool_associations::tool_name.eq(tool_name),
Ok(false) => { session_tool_associations::added_at.eq(&added_at),
warn!( ))
"Tool '{}' does not exist or is not active for bot '{}'", .on_conflict((session_tool_associations::session_id, session_tool_associations::tool_name))
tool_name, user.bot_id .do_nothing()
); .execute(&mut *conn);
return Err(format!( match insert_result {
"Tool '{}' is not available. Make sure the tool file is compiled and active.", Ok(rows_affected) => {
tool_name if rows_affected > 0 {
)); trace!("Tool '{}' newly associated with session '{}' (user: {}, bot: {})", tool_name, user.id, user.user_id, user.bot_id);
} Ok(format!("Tool '{}' is now available in this conversation", tool_name))
Err(e) => { } else {
error!("Failed to check tool existence: {}", e); trace!("Tool '{}' was already associated with session '{}'", tool_name, user.id);
return Err(format!("Database error while checking tool: {}", e)); Ok(format!("Tool '{}' is already available in this conversation", tool_name))
} }
} }
Err(e) => {
// Generate a unique ID for the association error!("Failed to associate tool '{}' with session '{}': {}", tool_name, user.id, e);
let association_id = Uuid::new_v4().to_string(); Err(format!("Failed to add tool to session: {}", e))
let session_id_str = user.id.to_string(); }
let added_at = chrono::Utc::now().to_rfc3339(); }
// Insert the tool association (ignore if already exists due to UNIQUE constraint)
let insert_result: Result<usize, diesel::result::Error> =
diesel::insert_into(session_tool_associations::table)
.values((
session_tool_associations::id.eq(&association_id),
session_tool_associations::session_id.eq(&session_id_str),
session_tool_associations::tool_name.eq(tool_name),
session_tool_associations::added_at.eq(&added_at),
))
.on_conflict((
session_tool_associations::session_id,
session_tool_associations::tool_name,
))
.do_nothing()
.execute(&mut *conn);
match insert_result {
Ok(rows_affected) => {
if rows_affected > 0 {
info!(
"Tool '{}' newly associated with session '{}' (user: {}, bot: {})",
tool_name, user.id, user.user_id, user.bot_id
);
Ok(format!(
"Tool '{}' is now available in this conversation",
tool_name
))
} else {
info!(
"Tool '{}' was already associated with session '{}'",
tool_name, user.id
);
Ok(format!(
"Tool '{}' is already available in this conversation",
tool_name
))
}
}
Err(e) => {
error!(
"Failed to associate tool '{}' with session '{}': {}",
tool_name, user.id, e
);
Err(format!("Failed to add tool to session: {}", e))
}
}
} }
pub fn get_session_tools(conn: &mut PgConnection, session_id: &Uuid) -> Result<Vec<String>, diesel::result::Error> {
/// Get all tools associated with a session use crate::shared::models::schema::session_tool_associations;
pub fn get_session_tools( let session_id_str = session_id.to_string();
conn: &mut PgConnection, session_tool_associations::table
session_id: &Uuid, .filter(session_tool_associations::session_id.eq(&session_id_str))
) -> Result<Vec<String>, diesel::result::Error> { .select(session_tool_associations::tool_name)
use crate::shared::models::schema::session_tool_associations; .load::<String>(conn)
let session_id_str = session_id.to_string();
session_tool_associations::table
.filter(session_tool_associations::session_id.eq(&session_id_str))
.select(session_tool_associations::tool_name)
.load::<String>(conn)
} }
pub fn clear_session_tools(conn: &mut PgConnection, session_id: &Uuid) -> Result<usize, diesel::result::Error> {
/// Clear all tool associations for a session use crate::shared::models::schema::session_tool_associations;
pub fn clear_session_tools( let session_id_str = session_id.to_string();
conn: &mut PgConnection, diesel::delete(session_tool_associations::table.filter(session_tool_associations::session_id.eq(&session_id_str))).execute(conn)
session_id: &Uuid,
) -> Result<usize, diesel::result::Error> {
use crate::shared::models::schema::session_tool_associations;
let session_id_str = session_id.to_string();
diesel::delete(
session_tool_associations::table
.filter(session_tool_associations::session_id.eq(&session_id_str)),
)
.execute(conn)
} }

View file

@ -1,91 +1,51 @@
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use log::{error, info}; use log::{error, trace};
use rhai::{Dynamic, Engine}; use rhai::{Dynamic, Engine};
use std::sync::Arc; use std::sync::Arc;
pub fn add_website_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) { pub fn add_website_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state); let state_clone = Arc::clone(&state);
let user_clone = user.clone(); let user_clone = user.clone();
engine
engine .register_custom_syntax(&["ADD_WEBSITE", "$expr$"], false, move |context, inputs| {
.register_custom_syntax(&["ADD_WEBSITE", "$expr$"], false, move |context, inputs| { let url = context.eval_expression_tree(&inputs[0])?;
let url = context.eval_expression_tree(&inputs[0])?; let url_str = url.to_string().trim_matches('"').to_string();
let url_str = url.to_string().trim_matches('"').to_string(); trace!("ADD_WEBSITE command executed: {} for user: {}", url_str, user_clone.user_id);
let is_valid = url_str.starts_with("http://") || url_str.starts_with("https://");
info!( if !is_valid {
"ADD_WEBSITE command executed: {} for user: {}", return Err(Box::new(rhai::EvalAltResult::ErrorRuntime("Invalid URL format. Must start with http:// or https://".into(), rhai::Position::NONE)));
url_str, user_clone.user_id }
); let state_for_task = Arc::clone(&state_clone);
let user_for_task = user_clone.clone();
// Validate URL let url_for_task = url_str.clone();
let is_valid = url_str.starts_with("http://") || url_str.starts_with("https://"); let (tx, rx) = std::sync::mpsc::channel();
std::thread::spawn(move || {
if !is_valid { let rt = tokio::runtime::Builder::new_multi_thread().worker_threads(2).enable_all().build();
return Err(Box::new(rhai::EvalAltResult::ErrorRuntime( let send_err = if let Ok(rt) = rt {
"Invalid URL format. Must start with http:// or https://".into(), let result = rt.block_on(async move {
rhai::Position::NONE, crawl_and_index_website(&state_for_task, &user_for_task, &url_for_task).await
))); });
} tx.send(result).err()
} else {
let state_for_task = Arc::clone(&state_clone); tx.send(Err("Failed to build tokio runtime".to_string())).err()
let user_for_task = user_clone.clone(); };
let url_for_task = url_str.clone(); if send_err.is_some() {
error!("Failed to send result from thread");
// Spawn async task to crawl and index website }
let (tx, rx) = std::sync::mpsc::channel(); });
std::thread::spawn(move || { match rx.recv_timeout(std::time::Duration::from_secs(120)) {
let rt = tokio::runtime::Builder::new_multi_thread() Ok(Ok(message)) => {
.worker_threads(2) Ok(Dynamic::from(message))
.enable_all() }
.build(); Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE))),
Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
let send_err = if let Ok(rt) = rt { Err(Box::new(rhai::EvalAltResult::ErrorRuntime("ADD_WEBSITE timed out".into(), rhai::Position::NONE)))
let result = rt.block_on(async move { }
crawl_and_index_website(&state_for_task, &user_for_task, &url_for_task) Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(format!("ADD_WEBSITE failed: {}", e).into(), rhai::Position::NONE))),
.await }
}); })
tx.send(result).err() .unwrap();
} else {
tx.send(Err("Failed to build tokio runtime".to_string()))
.err()
};
if send_err.is_some() {
error!("Failed to send result from thread");
}
});
match rx.recv_timeout(std::time::Duration::from_secs(120)) {
Ok(Ok(message)) => {
info!("ADD_WEBSITE completed: {}", message);
Ok(Dynamic::from(message))
}
Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
e.into(),
rhai::Position::NONE,
))),
Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
"ADD_WEBSITE timed out".into(),
rhai::Position::NONE,
)))
}
Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
format!("ADD_WEBSITE failed: {}", e).into(),
rhai::Position::NONE,
))),
}
})
.unwrap();
} }
async fn crawl_and_index_website(_state: &AppState, _user: &UserSession, _url: &str) -> Result<String, String> {
/// Crawl website and index content Err("Web automation functionality has been removed from this build".to_string())
async fn crawl_and_index_website(
_state: &AppState,
user: &UserSession,
url: &str,
) -> Result<String, String> {
info!("Crawling website: {} for user: {}", url, user.user_id);
Err("Web automation functionality has been removed from this build".to_string())
} }

View file

@ -1,7 +1,7 @@
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use diesel::prelude::*; use diesel::prelude::*;
use log::{error, info, trace}; use log::{error, trace};
use rhai::{Dynamic, Engine}; use rhai::{Dynamic, Engine};
use std::sync::Arc; use std::sync::Arc;
use uuid::Uuid; use uuid::Uuid;
@ -9,122 +9,84 @@ use uuid::Uuid;
pub fn set_bot_memory_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) { pub fn set_bot_memory_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state); let state_clone = Arc::clone(&state);
let user_clone = user.clone(); let user_clone = user.clone();
engine engine
.register_custom_syntax( .register_custom_syntax(&["SET_BOT_MEMORY", "$expr$", ",", "$expr$"], false, move |context, inputs| {
&["SET_BOT_MEMORY", "$expr$", ",", "$expr$"], let key = context.eval_expression_tree(&inputs[0])?.to_string();
false, let value = context.eval_expression_tree(&inputs[1])?.to_string();
move |context, inputs| { let state_for_spawn = Arc::clone(&state_clone);
let key = context.eval_expression_tree(&inputs[0])?.to_string(); let user_clone_spawn = user_clone.clone();
let value = context.eval_expression_tree(&inputs[1])?.to_string(); let key_clone = key.clone();
let value_clone = value.clone();
let state_for_spawn = Arc::clone(&state_clone); tokio::spawn(async move {
let user_clone_spawn = user_clone.clone(); use crate::shared::models::bot_memories;
let key_clone = key.clone(); let mut conn = match state_for_spawn.conn.get() {
let value_clone = value.clone(); Ok(conn) => conn,
Err(e) => {
tokio::spawn(async move { error!("Failed to acquire database connection for SET BOT MEMORY: {}", e);
use crate::shared::models::bot_memories; return;
}
let mut conn = match state_for_spawn.conn.lock() { };
Ok(conn) => conn, let bot_uuid = match Uuid::parse_str(&user_clone_spawn.bot_id.to_string()) {
Err(e) => { Ok(uuid) => uuid,
error!( Err(e) => {
"Failed to acquire database connection for SET BOT MEMORY: {}", error!("Invalid bot ID format: {}", e);
e return;
); }
return; };
} let now = chrono::Utc::now();
}; let existing_memory: Option<Uuid> = bot_memories::table
.filter(bot_memories::bot_id.eq(bot_uuid))
let bot_uuid = match Uuid::parse_str(&user_clone_spawn.bot_id.to_string()) { .filter(bot_memories::key.eq(&key_clone))
Ok(uuid) => uuid, .select(bot_memories::id)
Err(e) => { .first(&mut *conn)
error!("Invalid bot ID format: {}", e); .optional()
return; .unwrap_or(None);
} if let Some(memory_id) = existing_memory {
}; let update_result = diesel::update(bot_memories::table.filter(bot_memories::id.eq(memory_id)))
.set((bot_memories::value.eq(&value_clone), bot_memories::updated_at.eq(now)))
let now = chrono::Utc::now();
let existing_memory: Option<Uuid> = bot_memories::table
.filter(bot_memories::bot_id.eq(bot_uuid))
.filter(bot_memories::key.eq(&key_clone))
.select(bot_memories::id)
.first(&mut *conn)
.optional()
.unwrap_or(None);
if let Some(memory_id) = existing_memory {
let update_result = diesel::update(
bot_memories::table.filter(bot_memories::id.eq(memory_id)),
)
.set((
bot_memories::value.eq(&value_clone),
bot_memories::updated_at.eq(now),
))
.execute(&mut *conn); .execute(&mut *conn);
match update_result {
match update_result { Ok(_) => {
Ok(_) => { trace!("Updated bot memory for key: {} with value length: {}", key_clone, value_clone.len());
info!(
"Updated bot memory for key: {} with value length: {}",
key_clone,
value_clone.len()
);
}
Err(e) => {
error!("Failed to update bot memory: {}", e);
}
} }
} else { Err(e) => {
let new_memory = crate::shared::models::BotMemory { error!("Failed to update bot memory: {}", e);
id: Uuid::new_v4(),
bot_id: bot_uuid,
key: key_clone.clone(),
value: value_clone.clone(),
created_at: now,
updated_at: now,
};
let insert_result = diesel::insert_into(bot_memories::table)
.values(&new_memory)
.execute(&mut *conn);
match insert_result {
Ok(_) => {
info!(
"Created new bot memory for key: {} with value length: {}",
key_clone,
value_clone.len()
);
}
Err(e) => {
error!("Failed to insert bot memory: {}", e);
}
} }
} }
}); } else {
let new_memory = crate::shared::models::BotMemory {
Ok(Dynamic::UNIT) id: Uuid::new_v4(),
}, bot_id: bot_uuid,
) key: key_clone.clone(),
value: value_clone.clone(),
created_at: now,
updated_at: now,
};
let insert_result = diesel::insert_into(bot_memories::table).values(&new_memory).execute(&mut *conn);
match insert_result {
Ok(_) => {
trace!("Created new bot memory for key: {} with value length: {}", key_clone, value_clone.len());
}
Err(e) => {
error!("Failed to insert bot memory: {}", e);
}
}
}
});
Ok(Dynamic::UNIT)
})
.unwrap(); .unwrap();
} }
pub fn get_bot_memory_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) { pub fn get_bot_memory_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state); let state_clone = Arc::clone(&state);
let user_clone = user.clone(); let user_clone = user.clone();
engine.register_fn("GET_BOT_MEMORY", move |key_param: String| -> String { engine.register_fn("GET_BOT_MEMORY", move |key_param: String| -> String {
use crate::shared::models::bot_memories; use crate::shared::models::bot_memories;
let state = Arc::clone(&state_clone); let state = Arc::clone(&state_clone);
let conn_result = state.conn.get();
let conn_result = state.conn.lock();
if let Ok(mut conn) = conn_result { if let Ok(mut conn) = conn_result {
let bot_uuid = user_clone.bot_id; let bot_uuid = user_clone.bot_id;
let memory_value: Option<String> = bot_memories::table let memory_value: Option<String> = bot_memories::table
.filter(bot_memories::bot_id.eq(bot_uuid)) .filter(bot_memories::bot_id.eq(bot_uuid))
.filter(bot_memories::key.eq(&key_param)) .filter(bot_memories::key.eq(&key_param))
@ -132,8 +94,6 @@ pub fn get_bot_memory_keyword(state: Arc<AppState>, user: UserSession, engine: &
.first(&mut *conn) .first(&mut *conn)
.optional() .optional()
.unwrap_or(None); .unwrap_or(None);
trace!("GET_MEMORY for key '{}' returned value: {:?}", key_param, memory_value);
memory_value.unwrap_or_default() memory_value.unwrap_or_default()
} else { } else {
String::new() String::new()

View file

@ -1,103 +1,63 @@
use crate::basic::keywords::add_tool::clear_session_tools; use crate::basic::keywords::add_tool::clear_session_tools;
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use log::{error, info}; use log::{error, trace};
use rhai::{Dynamic, Engine}; use rhai::{Dynamic, Engine};
use std::sync::Arc; use std::sync::Arc;
pub fn clear_tools_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) { pub fn clear_tools_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state); let state_clone = Arc::clone(&state);
let user_clone = user.clone(); let user_clone = user.clone();
engine
engine .register_custom_syntax(&["CLEAR_TOOLS"], false, move |_context, _inputs| {
.register_custom_syntax(&["CLEAR_TOOLS"], false, move |_context, _inputs| { trace!("CLEAR_TOOLS command executed for session: {}", user_clone.id);
info!( let state_for_task = Arc::clone(&state_clone);
"CLEAR_TOOLS command executed for session: {}", let user_for_task = user_clone.clone();
user_clone.id let (tx, rx) = std::sync::mpsc::channel();
); std::thread::spawn(move || {
let rt = tokio::runtime::Builder::new_multi_thread().worker_threads(2).enable_all().build();
let state_for_task = Arc::clone(&state_clone); let send_err = if let Ok(rt) = rt {
let user_for_task = user_clone.clone(); let result = rt.block_on(async move {
clear_all_tools_from_session(&state_for_task, &user_for_task).await
// Spawn async task to clear all tool associations from session });
let (tx, rx) = std::sync::mpsc::channel(); tx.send(result).err()
std::thread::spawn(move || { } else {
let rt = tokio::runtime::Builder::new_multi_thread() tx.send(Err("Failed to build tokio runtime".to_string())).err()
.worker_threads(2) };
.enable_all() if send_err.is_some() {
.build(); error!("Failed to send result from thread");
}
let send_err = if let Ok(rt) = rt { });
let result = rt.block_on(async move { match rx.recv_timeout(std::time::Duration::from_secs(10)) {
clear_all_tools_from_session(&state_for_task, &user_for_task).await Ok(Ok(message)) => {
}); Ok(Dynamic::from(message))
tx.send(result).err() }
} else { Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE))),
tx.send(Err("Failed to build tokio runtime".to_string())) Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
.err() Err(Box::new(rhai::EvalAltResult::ErrorRuntime("CLEAR_TOOLS timed out".into(), rhai::Position::NONE)))
}; }
Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(format!("CLEAR_TOOLS failed: {}", e).into(), rhai::Position::NONE))),
if send_err.is_some() { }
error!("Failed to send result from thread"); })
} .unwrap();
});
match rx.recv_timeout(std::time::Duration::from_secs(10)) {
Ok(Ok(message)) => {
info!("CLEAR_TOOLS completed: {}", message);
Ok(Dynamic::from(message))
}
Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
e.into(),
rhai::Position::NONE,
))),
Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
"CLEAR_TOOLS timed out".into(),
rhai::Position::NONE,
)))
}
Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
format!("CLEAR_TOOLS failed: {}", e).into(),
rhai::Position::NONE,
))),
}
})
.unwrap();
} }
async fn clear_all_tools_from_session(state: &AppState, user: &UserSession) -> Result<String, String> {
/// Clear all tool associations from the current session let mut conn = state.conn.get().map_err(|e| {
async fn clear_all_tools_from_session( error!("Failed to acquire database lock: {}", e);
state: &AppState, format!("Database connection error: {}", e)
user: &UserSession, })?;
) -> Result<String, String> { let delete_result = clear_session_tools(&mut *conn, &user.id);
let mut conn = state.conn.lock().map_err(|e| { match delete_result {
error!("Failed to acquire database lock: {}", e); Ok(rows_affected) => {
format!("Database connection error: {}", e) if rows_affected > 0 {
})?; trace!("Cleared {} tool(s) from session '{}' (user: {}, bot: {})", rows_affected, user.id, user.user_id, user.bot_id);
Ok(format!("All {} tool(s) have been removed from this conversation", rows_affected))
// Clear all tool associations for this session } else {
let delete_result = clear_session_tools(&mut *conn, &user.id); Ok("No tools were active in this conversation".to_string())
}
match delete_result { }
Ok(rows_affected) => { Err(e) => {
if rows_affected > 0 { error!("Failed to clear tools from session '{}': {}", user.id, e);
info!( Err(format!("Failed to clear tools from session: {}", e))
"Cleared {} tool(s) from session '{}' (user: {}, bot: {})", }
rows_affected, user.id, user.user_id, user.bot_id }
);
Ok(format!(
"All {} tool(s) have been removed from this conversation",
rows_affected
))
} else {
info!("No tools were associated with session '{}'", user.id);
Ok("No tools were active in this conversation".to_string())
}
}
Err(e) => {
error!("Failed to clear tools from session '{}': {}", user.id, e);
Err(format!("Failed to clear tools from session: {}", e))
}
}
} }

View file

@ -3,64 +3,45 @@ use crate::shared::state::AppState;
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use rhai::Dynamic; use rhai::Dynamic;
use rhai::Engine; use rhai::Engine;
pub fn create_draft_keyword(state: &AppState, user: UserSession, engine: &mut Engine) { pub fn create_draft_keyword(state: &AppState, user: UserSession, engine: &mut Engine) {
let state_clone = state.clone(); let state_clone = state.clone();
engine
engine .register_custom_syntax(&["CREATE_DRAFT", "$expr$", ",", "$expr$", ",", "$expr$"], true, move |context, inputs| {
.register_custom_syntax( let to = context.eval_expression_tree(&inputs[0])?.to_string();
&["CREATE_DRAFT", "$expr$", ",", "$expr$", ",", "$expr$"], let subject = context.eval_expression_tree(&inputs[1])?.to_string();
true, let reply_text = context.eval_expression_tree(&inputs[2])?.to_string();
move |context, inputs| { let fut = execute_create_draft(&state_clone, &to, &subject, &reply_text);
let to = context.eval_expression_tree(&inputs[0])?.to_string(); let result = tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut))
let subject = context.eval_expression_tree(&inputs[1])?.to_string(); .map_err(|e| format!("Draft creation error: {}", e))?;
let reply_text = context.eval_expression_tree(&inputs[2])?.to_string(); Ok(Dynamic::from(result))
},
let fut = execute_create_draft(&state_clone, &to, &subject, &reply_text); )
let result = .unwrap();
tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut))
.map_err(|e| format!("Draft creation error: {}", e))?;
Ok(Dynamic::from(result))
},
)
.unwrap();
} }
async fn execute_create_draft(state: &AppState, to: &str, subject: &str, reply_text: &str) -> Result<String, String> {
async fn execute_create_draft( let get_result = fetch_latest_sent_to(&state.config.clone().unwrap().email, to).await;
state: &AppState, let email_body = if let Ok(get_result_str) = get_result {
to: &str, if !get_result_str.is_empty() {
subject: &str, let email_separator = "<br><hr><br>";
reply_text: &str, let formatted_reply_text = reply_text.to_string();
) -> Result<String, String> { let formatted_old_text = get_result_str.replace("\n", "<br>");
let get_result = fetch_latest_sent_to(&state.config.clone().unwrap().email, to).await; let fixed_reply_text = formatted_reply_text.replace("FIX", "Fixed");
let email_body = if let Ok(get_result_str) = get_result { format!("{}{}{}", fixed_reply_text, email_separator, formatted_old_text)
if !get_result_str.is_empty() { } else {
let email_separator = "<br><hr><br>"; reply_text.to_string()
let formatted_reply_text = reply_text.to_string(); }
let formatted_old_text = get_result_str.replace("\n", "<br>"); } else {
let fixed_reply_text = formatted_reply_text.replace("FIX", "Fixed"); reply_text.to_string()
format!( };
"{}{}{}", let draft_request = SaveDraftRequest {
fixed_reply_text, email_separator, formatted_old_text to: to.to_string(),
) subject: subject.to_string(),
} else { cc: None,
reply_text.to_string() text: email_body,
} };
} else { let save_result = save_email_draft(&state.config.clone().unwrap().email, &draft_request).await;
reply_text.to_string() match save_result {
}; Ok(_) => Ok("Draft saved successfully".to_string()),
Err(e) => Err(e.to_string()),
let draft_request = SaveDraftRequest { }
to: to.to_string(),
subject: subject.to_string(),
cc: None,
text: email_body,
};
let save_result = save_email_draft(&state.config.clone().unwrap().email, &draft_request).await;
match save_result {
Ok(_) => Ok("Draft saved successfully".to_string()),
Err(e) => Err(e.to_string()),
}
} }

View file

@ -1,87 +1,50 @@
use log::info;
use rhai::Dynamic; use rhai::Dynamic;
use rhai::Engine; use rhai::Engine;
use std::error::Error; use std::error::Error;
use std::fs; use std::fs;
use std::io::Read; use std::io::Read;
use std::path::PathBuf; use std::path::PathBuf;
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
pub fn create_site_keyword(state: &AppState, _user: UserSession, engine: &mut Engine) { pub fn create_site_keyword(state: &AppState, _user: UserSession, engine: &mut Engine) {
let state_clone = state.clone(); let state_clone = state.clone();
engine engine
.register_custom_syntax( .register_custom_syntax(&["CREATE_SITE", "$expr$", ",", "$expr$", ",", "$expr$"], true, move |context, inputs| {
&["CREATE_SITE", "$expr$", ",", "$expr$", ",", "$expr$"], if inputs.len() < 3 {
true, return Err("Not enough arguments for CREATE SITE".into());
move |context, inputs| { }
if inputs.len() < 3 { let alias = context.eval_expression_tree(&inputs[0])?;
return Err("Not enough arguments for CREATE SITE".into()); let template_dir = context.eval_expression_tree(&inputs[1])?;
} let prompt = context.eval_expression_tree(&inputs[2])?;
let config = state_clone.config.as_ref().expect("Config must be initialized").clone();
let alias = context.eval_expression_tree(&inputs[0])?; let fut = create_site(&config, alias, template_dir, prompt);
let template_dir = context.eval_expression_tree(&inputs[1])?; let result = tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut))
let prompt = context.eval_expression_tree(&inputs[2])?; .map_err(|e| format!("Site creation failed: {}", e))?;
Ok(Dynamic::from(result))
let config = state_clone },
.config )
.as_ref() .unwrap();
.expect("Config must be initialized")
.clone();
let fut = create_site(&config, alias, template_dir, prompt);
let result =
tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut))
.map_err(|e| format!("Site creation failed: {}", e))?;
Ok(Dynamic::from(result))
},
)
.unwrap();
} }
async fn create_site(config: &crate::config::AppConfig, alias: Dynamic, template_dir: Dynamic, prompt: Dynamic) -> Result<String, Box<dyn Error + Send + Sync>> {
async fn create_site( let base_path = PathBuf::from(&config.site_path);
config: &crate::config::AppConfig, let template_path = base_path.join(template_dir.to_string());
alias: Dynamic, let alias_path = base_path.join(alias.to_string());
template_dir: Dynamic, fs::create_dir_all(&alias_path).map_err(|e| e.to_string())?;
prompt: Dynamic, let mut combined_content = String::new();
) -> Result<String, Box<dyn Error + Send + Sync>> { for entry in fs::read_dir(&template_path).map_err(|e| e.to_string())? {
let base_path = PathBuf::from(&config.site_path); let entry = entry.map_err(|e| e.to_string())?;
let template_path = base_path.join(template_dir.to_string()); let path = entry.path();
let alias_path = base_path.join(alias.to_string()); if path.extension().map_or(false, |ext| ext == "html") {
let mut file = fs::File::open(&path).map_err(|e| e.to_string())?;
fs::create_dir_all(&alias_path).map_err(|e| e.to_string())?; let mut contents = String::new();
file.read_to_string(&mut contents).map_err(|e| e.to_string())?;
let mut combined_content = String::new(); combined_content.push_str(&contents);
combined_content.push_str("\n\n--- TEMPLATE SEPARATOR ---\n\n");
for entry in fs::read_dir(&template_path).map_err(|e| e.to_string())? { }
let entry = entry.map_err(|e| e.to_string())?; }
let path = entry.path(); let _full_prompt = format!("TEMPLATE FILES:\n{}\n\nPROMPT: {}\n\nGenerate a new HTML file cloning all previous TEMPLATE (keeping only the local _assets libraries use, no external resources), but turning this into this prompt:", combined_content, prompt.to_string());
let llm_result = "".to_string();
if path.extension().map_or(false, |ext| ext == "html") { let index_path = alias_path.join("index.html");
let mut file = fs::File::open(&path).map_err(|e| e.to_string())?; fs::write(index_path, llm_result).map_err(|e| e.to_string())?;
let mut contents = String::new(); Ok(alias_path.to_string_lossy().into_owned())
file.read_to_string(&mut contents)
.map_err(|e| e.to_string())?;
combined_content.push_str(&contents);
combined_content.push_str("\n\n--- TEMPLATE SEPARATOR ---\n\n");
}
}
let _full_prompt = format!(
"TEMPLATE FILES:\n{}\n\nPROMPT: {}\n\nGenerate a new HTML file cloning all previous TEMPLATE (keeping only the local _assets libraries use, no external resources), but turning this into this prompt:",
combined_content,
prompt.to_string()
);
info!("Asking LLM to create site.");
let llm_result = "".to_string(); // TODO:
let index_path = alias_path.join("index.html");
fs::write(index_path, llm_result).map_err(|e| e.to_string())?;
info!("Site created at: {}", alias_path.display());
Ok(alias_path.to_string_lossy().into_owned())
} }

View file

@ -1,36 +1,29 @@
use diesel::pg::PgConnection;
use diesel::prelude::*;
use log::{error, info};
use rhai::Dynamic;
use rhai::Engine;
use serde_json::{json, Value};
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use crate::shared::utils; use crate::shared::utils;
use crate::shared::utils::to_array; use crate::shared::utils::to_array;
use diesel::pg::PgConnection;
use diesel::prelude::*;
use log::error;
use log::trace;
use rhai::Dynamic;
use rhai::Engine;
use serde_json::{json, Value};
pub fn find_keyword(state: &AppState, _user: UserSession, engine: &mut Engine) { pub fn find_keyword(state: &AppState, _user: UserSession, engine: &mut Engine) {
let connection = state.conn.clone(); let connection = state.conn.clone();
engine engine
.register_custom_syntax(&["FIND", "$expr$", ",", "$expr$"], false, { .register_custom_syntax(&["FIND", "$expr$", ",", "$expr$"], false, {
move |context, inputs| { move |context, inputs| {
let table_name = context.eval_expression_tree(&inputs[0])?; let table_name = context.eval_expression_tree(&inputs[0])?;
let filter = context.eval_expression_tree(&inputs[1])?; let filter = context.eval_expression_tree(&inputs[1])?;
let mut binding = connection.lock().unwrap(); let mut binding = connection.get().map_err(|e| format!("DB error: {}", e))?;
// Use the current async context instead of creating a new runtime
let binding2 = table_name.to_string(); let binding2 = table_name.to_string();
let binding3 = filter.to_string(); let binding3 = filter.to_string();
// Since execute_find is async but we're in a sync context, we need to block on it
let result = tokio::task::block_in_place(|| { let result = tokio::task::block_in_place(|| {
tokio::runtime::Handle::current() tokio::runtime::Handle::current()
.block_on(async { execute_find(&mut binding, &binding2, &binding3).await }) .block_on(async { execute_find(&mut binding, &binding2, &binding3).await })
}) })
.map_err(|e| format!("DB error: {}", e))?; .map_err(|e| format!("DB error: {}", e))?;
if let Some(results) = result.get("results") { if let Some(results) = result.get("results") {
let array = to_array(utils::json_value_to_dynamic(results)); let array = to_array(utils::json_value_to_dynamic(results));
Ok(Dynamic::from(array)) Ok(Dynamic::from(array))
@ -41,56 +34,37 @@ pub fn find_keyword(state: &AppState, _user: UserSession, engine: &mut Engine) {
}) })
.unwrap(); .unwrap();
} }
pub async fn execute_find( pub async fn execute_find(
conn: &mut PgConnection, conn: &mut PgConnection,
table_str: &str, table_str: &str,
filter_str: &str, filter_str: &str,
) -> Result<Value, String> { ) -> Result<Value, String> {
// Changed to String error like your Actix code trace!(
info!(
"Starting execute_find with table: {}, filter: {}", "Starting execute_find with table: {}, filter: {}",
table_str, filter_str table_str,
filter_str
); );
let (where_clause, params) = utils::parse_filter(filter_str).map_err(|e| e.to_string())?; let (where_clause, params) = utils::parse_filter(filter_str).map_err(|e| e.to_string())?;
let query = format!( let query = format!(
"SELECT * FROM {} WHERE {} LIMIT 10", "SELECT * FROM {} WHERE {} LIMIT 10",
table_str, where_clause table_str, where_clause
); );
info!("Executing query: {}", query); let _raw_result = diesel::sql_query(&query)
// Execute raw SQL and get raw results
let raw_result = diesel::sql_query(&query)
.bind::<diesel::sql_types::Text, _>(&params[0]) .bind::<diesel::sql_types::Text, _>(&params[0])
.execute(conn) .execute(conn)
.map_err(|e| { .map_err(|e| {
error!("SQL execution error: {}", e); error!("SQL execution error: {}", e);
e.to_string() e.to_string()
})?; })?;
info!("Query executed successfully, affected {} rows", raw_result);
// For now, create placeholder results since we can't easily deserialize dynamic rows
let mut results = Vec::new(); let mut results = Vec::new();
// This is a simplified approach - in a real implementation you'd need to:
// 1. Query the table schema to know column types
// 2. Build a proper struct or use a more flexible approach
// 3. Or use a different database library that supports dynamic queries better
// Placeholder result for demonstration
let json_row = serde_json::json!({ let json_row = serde_json::json!({
"note": "Dynamic row deserialization not implemented - need table schema" "note": "Dynamic row deserialization not implemented - need table schema"
}); });
results.push(json_row); results.push(json_row);
Ok(json!({ Ok(json!({
"command": "find", "command": "find",
"table": table_str, "table": table_str,
"filter": filter_str, "filter": filter_str,
"results": results "results": results
})) }))
} }

View file

@ -1,184 +1,14 @@
use rhai::Dynamic; use rhai::Dynamic;
use rhai::Engine; use rhai::Engine;
pub fn first_keyword(engine: &mut Engine) { pub fn first_keyword(engine: &mut Engine) {
engine engine
.register_custom_syntax(&["FIRST", "$expr$"], false, { .register_custom_syntax(&["FIRST", "$expr$"], false, {
move |context, inputs| { move |context, inputs| {
let input_string = context.eval_expression_tree(&inputs[0])?; let input_string = context.eval_expression_tree(&inputs[0])?;
let input_str = input_string.to_string(); let input_str = input_string.to_string();
let first_word = input_str.split_whitespace().next().unwrap_or("").to_string();
let first_word = input_str Ok(Dynamic::from(first_word))
.split_whitespace() }
.next() })
.unwrap_or("") .unwrap();
.to_string();
Ok(Dynamic::from(first_word))
}
})
.unwrap();
}
#[cfg(test)]
mod tests {
use super::*;
use rhai::Engine;
fn setup_engine() -> Engine {
let mut engine = Engine::new();
first_keyword(&mut engine);
engine
}
#[test]
fn test_first_keyword_basic() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST "hello world"
"#,
)
.unwrap();
assert_eq!(result, "hello");
}
#[test]
fn test_first_keyword_single_word() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST "single"
"#,
)
.unwrap();
assert_eq!(result, "single");
}
#[test]
fn test_first_keyword_multiple_spaces() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST " leading spaces"
"#,
)
.unwrap();
assert_eq!(result, "leading");
}
#[test]
fn test_first_keyword_empty_string() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST ""
"#,
)
.unwrap();
assert_eq!(result, "");
}
#[test]
fn test_first_keyword_whitespace_only() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST " "
"#,
)
.unwrap();
assert_eq!(result, "");
}
#[test]
fn test_first_keyword_with_tabs() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST " tab separated words"
"#,
)
.unwrap();
assert_eq!(result, "tab");
}
#[test]
fn test_first_keyword_with_variable() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
let text = "variable test";
FIRST text
"#,
)
.unwrap();
assert_eq!(result, "variable");
}
#[test]
fn test_first_keyword_with_expression() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST "one two " + "three four"
"#,
)
.unwrap();
assert_eq!(result, "one");
}
#[test]
fn test_first_keyword_mixed_whitespace() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST " multiple spaces between words "
"#,
)
.unwrap();
assert_eq!(result, "multiple");
}
#[test]
fn test_first_keyword_special_characters() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST "hello-world example"
"#,
)
.unwrap();
assert_eq!(result, "hello-world");
}
} }

View file

@ -1,73 +1,47 @@
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use log::info;
use rhai::Dynamic; use rhai::Dynamic;
use rhai::Engine; use rhai::Engine;
pub fn for_keyword(_state: &AppState, _user: UserSession, engine: &mut Engine) { pub fn for_keyword(_state: &AppState, _user: UserSession, engine: &mut Engine) {
engine engine
.register_custom_syntax(&["EXIT", "FOR"], false, |_context, _inputs| { .register_custom_syntax(&["EXIT", "FOR"], false, |_context, _inputs| {
Err("EXIT FOR".into()) Err("EXIT FOR".into())
}) })
.unwrap(); .unwrap();
engine
engine .register_custom_syntax(&["FOR", "EACH", "$ident$", "IN", "$expr$", "$block$", "NEXT", "$ident$"], true, |context, inputs| {
.register_custom_syntax( let loop_var = inputs[0].get_string_value().unwrap();
&[ let next_var = inputs[3].get_string_value().unwrap();
"FOR", "EACH", "$ident$", "IN", "$expr$", "$block$", "NEXT", "$ident$", if loop_var != next_var {
], return Err(format!("NEXT variable '{}' doesn't match FOR EACH variable '{}'", next_var, loop_var).into());
true, }
|context, inputs| { let collection = context.eval_expression_tree(&inputs[1])?;
let loop_var = inputs[0].get_string_value().unwrap(); let ccc = collection.clone();
let next_var = inputs[3].get_string_value().unwrap(); let array = match collection.into_array() {
Ok(arr) => arr,
if loop_var != next_var { Err(err) => {
return Err(format!( return Err(format!("foreach expected array, got {}: {}", ccc.type_name(), err).into());
"NEXT variable '{}' doesn't match FOR EACH variable '{}'", }
next_var, loop_var };
) let block = &inputs[2];
.into()); let orig_len = context.scope().len();
} for item in array {
context.scope_mut().push(loop_var, item);
let collection = context.eval_expression_tree(&inputs[1])?; match context.eval_expression_tree(block) {
Ok(_) => (),
info!("Collection type: {}", collection.type_name()); Err(e) if e.to_string() == "EXIT FOR" => {
let ccc = collection.clone(); context.scope_mut().rewind(orig_len);
let array = match collection.into_array() { break;
Ok(arr) => arr, }
Err(err) => { Err(e) => {
return Err(format!( context.scope_mut().rewind(orig_len);
"foreach expected array, got {}: {}", return Err(e);
ccc.type_name(), }
err }
) context.scope_mut().rewind(orig_len);
.into()); }
} Ok(Dynamic::UNIT)
}; },
let block = &inputs[2]; )
.unwrap();
let orig_len = context.scope().len();
for item in array {
context.scope_mut().push(loop_var, item);
match context.eval_expression_tree(block) {
Ok(_) => (),
Err(e) if e.to_string() == "EXIT FOR" => {
context.scope_mut().rewind(orig_len);
break;
}
Err(e) => {
context.scope_mut().rewind(orig_len);
return Err(e);
}
}
context.scope_mut().rewind(orig_len);
}
Ok(Dynamic::UNIT)
},
)
.unwrap();
} }

View file

@ -2,303 +2,157 @@ use rhai::{Dynamic, Engine};
use chrono::{NaiveDateTime, Timelike, Datelike}; use chrono::{NaiveDateTime, Timelike, Datelike};
use num_format::{Locale, ToFormattedString}; use num_format::{Locale, ToFormattedString};
use std::str::FromStr; use std::str::FromStr;
pub fn format_keyword(engine: &mut Engine) { pub fn format_keyword(engine: &mut Engine) {
engine engine
.register_custom_syntax(&["FORMAT", "$expr$", "$expr$"], false, { .register_custom_syntax(&["FORMAT", "$expr$", "$expr$"], false, {
move |context, inputs| { move |context, inputs| {
let value_dyn = context.eval_expression_tree(&inputs[0])?; let value_dyn = context.eval_expression_tree(&inputs[0])?;
let pattern_dyn = context.eval_expression_tree(&inputs[1])?; let pattern_dyn = context.eval_expression_tree(&inputs[1])?;
let value_str = value_dyn.to_string();
let value_str = value_dyn.to_string(); let pattern = pattern_dyn.to_string();
let pattern = pattern_dyn.to_string(); if let Ok(num) = f64::from_str(&value_str) {
let formatted = if pattern.starts_with("N") || pattern.starts_with("C") {
if let Ok(num) = f64::from_str(&value_str) { let (prefix, decimals, locale_tag) = parse_pattern(&pattern);
let formatted = if pattern.starts_with("N") || pattern.starts_with("C") { let locale = get_locale(&locale_tag);
let (prefix, decimals, locale_tag) = parse_pattern(&pattern); let symbol = if prefix == "C" { get_currency_symbol(&locale_tag) } else { "" };
let int_part = num.trunc() as i64;
let locale = get_locale(&locale_tag); let frac_part = num.fract();
let symbol = if prefix == "C" { if decimals == 0 {
get_currency_symbol(&locale_tag) format!("{}{}", symbol, int_part.to_formatted_string(&locale))
} else { } else {
"" let frac_scaled = ((frac_part * 10f64.powi(decimals as i32)).round()) as i64;
}; let decimal_sep = match locale_tag.as_str() {
"pt" | "fr" | "es" | "it" | "de" => ",",
let int_part = num.trunc() as i64; _ => "."
let frac_part = num.fract(); };
format!("{}{}{}{:0width$}", symbol, int_part.to_formatted_string(&locale), decimal_sep, frac_scaled, width = decimals)
if decimals == 0 { }
format!("{}{}", symbol, int_part.to_formatted_string(&locale)) } else {
} else { match pattern.as_str() {
let frac_scaled = "n" => format!("{:.2}", num),
((frac_part * 10f64.powi(decimals as i32)).round()) as i64; "F" => format!("{:.2}", num),
"f" => format!("{}", num),
let decimal_sep = match locale_tag.as_str() { "0%" => format!("{:.0}%", num * 100.0),
"pt" | "fr" | "es" | "it" | "de" => ",", _ => format!("{}", num),
_ => "." }
}; };
return Ok(Dynamic::from(formatted));
format!( }
"{}{}{}{:0width$}", if let Ok(dt) = NaiveDateTime::parse_from_str(&value_str, "%Y-%m-%d %H:%M:%S") {
symbol, let formatted = apply_date_format(&dt, &pattern);
int_part.to_formatted_string(&locale), return Ok(Dynamic::from(formatted));
decimal_sep, }
frac_scaled, let formatted = apply_text_placeholders(&value_str, &pattern);
width = decimals Ok(Dynamic::from(formatted))
) }
} })
} else { .unwrap();
match pattern.as_str() {
"n" => format!("{:.2}", num),
"F" => format!("{:.2}", num),
"f" => format!("{}", num),
"0%" => format!("{:.0}%", num * 100.0),
_ => format!("{}", num),
}
};
return Ok(Dynamic::from(formatted));
}
if let Ok(dt) = NaiveDateTime::parse_from_str(&value_str, "%Y-%m-%d %H:%M:%S") {
let formatted = apply_date_format(&dt, &pattern);
return Ok(Dynamic::from(formatted));
}
let formatted = apply_text_placeholders(&value_str, &pattern);
Ok(Dynamic::from(formatted))
}
})
.unwrap();
} }
fn parse_pattern(pattern: &str) -> (String, usize, String) { fn parse_pattern(pattern: &str) -> (String, usize, String) {
let mut prefix = String::new(); let mut prefix = String::new();
let mut decimals: usize = 2; let mut decimals: usize = 2;
let mut locale_tag = "en".to_string(); let mut locale_tag = "en".to_string();
if pattern.starts_with('C') {
if pattern.starts_with('C') { prefix = "C".to_string();
prefix = "C".to_string(); } else if pattern.starts_with('N') {
} else if pattern.starts_with('N') { prefix = "N".to_string();
prefix = "N".to_string(); }
} let rest = &pattern[1..];
let mut num_part = String::new();
let rest = &pattern[1..]; for ch in rest.chars() {
let mut num_part = String::new(); if ch.is_ascii_digit() {
for ch in rest.chars() { num_part.push(ch);
if ch.is_ascii_digit() { } else {
num_part.push(ch); break;
} else { }
break; }
} if !num_part.is_empty() {
} decimals = num_part.parse().unwrap_or(2);
if !num_part.is_empty() { }
decimals = num_part.parse().unwrap_or(2); if let Some(start) = pattern.find('[') {
} if let Some(end) = pattern.find(']') {
if end > start {
if let Some(start) = pattern.find('[') { locale_tag = pattern[start + 1..end].to_string();
if let Some(end) = pattern.find(']') { }
if end > start { }
locale_tag = pattern[start + 1..end].to_string(); }
} (prefix, decimals, locale_tag)
}
}
(prefix, decimals, locale_tag)
} }
fn get_locale(tag: &str) -> Locale { fn get_locale(tag: &str) -> Locale {
match tag { match tag {
"en" => Locale::en, "en" => Locale::en,
"fr" => Locale::fr, "fr" => Locale::fr,
"de" => Locale::de, "de" => Locale::de,
"pt" => Locale::pt, "pt" => Locale::pt,
"it" => Locale::it, "it" => Locale::it,
"es" => Locale::es, "es" => Locale::es,
_ => Locale::en, _ => Locale::en,
} }
} }
fn get_currency_symbol(tag: &str) -> &'static str { fn get_currency_symbol(tag: &str) -> &'static str {
match tag { match tag {
"en" => "$", "en" => "$",
"pt" => "R$ ", "pt" => "R$ ",
"fr" | "de" | "es" | "it" => "", "fr" | "de" | "es" | "it" => "",
_ => "$", _ => "$",
} }
} }
fn apply_date_format(dt: &NaiveDateTime, pattern: &str) -> String { fn apply_date_format(dt: &NaiveDateTime, pattern: &str) -> String {
let mut output = pattern.to_string(); let mut output = pattern.to_string();
let year = dt.year();
let year = dt.year(); let month = dt.month();
let month = dt.month(); let day = dt.day();
let day = dt.day(); let hour24 = dt.hour();
let hour24 = dt.hour(); let minute = dt.minute();
let minute = dt.minute(); let second = dt.second();
let second = dt.second(); let millis = dt.and_utc().timestamp_subsec_millis();
let millis = dt.and_utc().timestamp_subsec_millis(); output = output.replace("yyyy", &format!("{:04}", year));
output = output.replace("yy", &format!("{:02}", year % 100));
output = output.replace("yyyy", &format!("{:04}", year)); output = output.replace("MM", &format!("{:02}", month));
output = output.replace("yy", &format!("{:02}", year % 100)); output = output.replace("M", &format!("{}", month));
output = output.replace("MM", &format!("{:02}", month)); output = output.replace("dd", &format!("{:02}", day));
output = output.replace("M", &format!("{}", month)); output = output.replace("d", &format!("{}", day));
output = output.replace("dd", &format!("{:02}", day)); output = output.replace("HH", &format!("{:02}", hour24));
output = output.replace("d", &format!("{}", day)); output = output.replace("H", &format!("{}", hour24));
let mut hour12 = hour24 % 12;
output = output.replace("HH", &format!("{:02}", hour24)); if hour12 == 0 { hour12 = 12; }
output = output.replace("H", &format!("{}", hour24)); output = output.replace("hh", &format!("{:02}", hour12));
output = output.replace("h", &format!("{}", hour12));
let mut hour12 = hour24 % 12; output = output.replace("mm", &format!("{:02}", minute));
if hour12 == 0 { hour12 = 12; } output = output.replace("m", &format!("{}", minute));
output = output.replace("hh", &format!("{:02}", hour12)); output = output.replace("ss", &format!("{:02}", second));
output = output.replace("h", &format!("{}", hour12)); output = output.replace("s", &format!("{}", second));
output = output.replace("fff", &format!("{:03}", millis));
output = output.replace("mm", &format!("{:02}", minute)); output = output.replace("tt", if hour24 < 12 { "AM" } else { "PM" });
output = output.replace("m", &format!("{}", minute)); output = output.replace("t", if hour24 < 12 { "A" } else { "P" });
output
output = output.replace("ss", &format!("{:02}", second));
output = output.replace("s", &format!("{}", second));
output = output.replace("fff", &format!("{:03}", millis));
output = output.replace("tt", if hour24 < 12 { "AM" } else { "PM" });
output = output.replace("t", if hour24 < 12 { "A" } else { "P" });
output
} }
fn apply_text_placeholders(value: &str, pattern: &str) -> String { fn apply_text_placeholders(value: &str, pattern: &str) -> String {
let mut result = String::new(); let mut result = String::new();
let mut i = 0; let mut i = 0;
let chars: Vec<char> = pattern.chars().collect(); let chars: Vec<char> = pattern.chars().collect();
while i < chars.len() {
while i < chars.len() { match chars[i] {
match chars[i] { '@' => result.push_str(value),
'@' => result.push_str(value), '&' => {
'&' => { result.push_str(&value.to_lowercase());
result.push_str(&value.to_lowercase()); if i + 1 < chars.len() {
// Handle modifiers match chars[i+1] {
if i + 1 < chars.len() { '!' => {
match chars[i+1] { result.push('!');
'!' => { i += 1;
result.push('!'); }
i += 1; '>' => {
} i += 1;
'>' => { }
i += 1; _ => ()
} }
_ => () }
} }
} '>' | '!' => result.push_str(&value.to_uppercase()),
} _ => result.push(chars[i]),
'>' | '!' => result.push_str(&value.to_uppercase()), }
_ => result.push(chars[i]), i += 1;
} }
i += 1; result
}
result
}
#[cfg(test)]
mod tests {
use super::*;
use rhai::Engine;
fn create_engine() -> Engine {
let mut engine = Engine::new();
format_keyword(&mut engine);
engine
}
#[test]
fn test_numeric_formatting_basic() {
let engine = create_engine();
assert_eq!(
engine.eval::<String>("FORMAT 1234.567 \"n\"").unwrap(),
"1234.57"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.5 \"F\"").unwrap(),
"1234.50"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.567 \"f\"").unwrap(),
"1234.567"
);
assert_eq!(
engine.eval::<String>("FORMAT 0.85 \"0%\"").unwrap(),
"85%"
);
}
#[test]
fn test_numeric_formatting_with_locale() {
let engine = create_engine();
assert_eq!(
engine.eval::<String>("FORMAT 1234.56 \"N[en]\"").unwrap(),
"1,234.56"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.56 \"N[pt]\"").unwrap(),
"1.234,56"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.56 \"N[fr]\"").unwrap(),
"1234,56"
);
}
#[test]
fn test_currency_formatting() {
let engine = create_engine();
assert_eq!(
engine.eval::<String>("FORMAT 1234.56 \"C[en]\"").unwrap(),
"$1,234.56"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.56 \"C[pt]\"").unwrap(),
"R$ 1.234,56"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.56 \"C[fr]\"").unwrap(),
"€1234,56"
);
}
#[test]
fn test_date_formatting() {
let engine = create_engine();
let result = engine.eval::<String>("FORMAT \"2024-03-15 14:30:25\" \"yyyy-MM-dd HH:mm:ss\"").unwrap();
assert_eq!(result, "2024-03-15 14:30:25");
let result = engine.eval::<String>("FORMAT \"2024-03-15 14:30:25\" \"dd/MM/yyyy\"").unwrap();
assert_eq!(result, "15/03/2024");
let result = engine.eval::<String>("FORMAT \"2024-03-15 14:30:25\" \"MM/dd/yy\"").unwrap();
assert_eq!(result, "03/15/24");
}
#[test]
fn test_text_formatting() {
let engine = create_engine();
assert_eq!(
engine.eval::<String>("FORMAT \"hello\" \"Prefix: @\"").unwrap(),
"Prefix: hello"
);
assert_eq!(
engine.eval::<String>("FORMAT \"HELLO\" \"Result: &!\"").unwrap(),
"Result: hello!"
);
assert_eq!(
engine.eval::<String>("FORMAT \"hello\" \"RESULT: >\"").unwrap(),
"RESULT: HELLO"
);
}
} }

View file

@ -1,66 +1,57 @@
use crate::shared::models::schema::bots::dsl::*; use crate::shared::models::schema::bots::dsl::*;
use diesel::prelude::*;
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use log::{debug, error, info, trace}; use diesel::prelude::*;
use log::{error, trace};
use reqwest::{self, Client}; use reqwest::{self, Client};
use rhai::{Dynamic, Engine}; use rhai::{Dynamic, Engine};
use std::error::Error; use std::error::Error;
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
pub fn get_keyword(state: Arc<AppState>, user_session: UserSession, engine: &mut Engine) { pub fn get_keyword(state: Arc<AppState>, user_session: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state); let state_clone = Arc::clone(&state);
engine engine
.register_custom_syntax(&["GET", "$expr$"], false, move |context, inputs| { .register_custom_syntax(&["GET", "$expr$"], false, move |context, inputs| {
let url = context.eval_expression_tree(&inputs[0])?; let url = context.eval_expression_tree(&inputs[0])?;
let url_str = url.to_string(); let url_str = url.to_string();
info!("GET command executed: {}", url_str);
if !is_safe_path(&url_str) { if !is_safe_path(&url_str) {
return Err(Box::new(rhai::EvalAltResult::ErrorRuntime( return Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
"URL contains invalid or unsafe path sequences".into(), "URL contains invalid or unsafe path sequences".into(),
rhai::Position::NONE, rhai::Position::NONE,
))); )));
} }
let state_for_blocking = Arc::clone(&state_clone); let state_for_blocking = Arc::clone(&state_clone);
let url_for_blocking = url_str.clone(); let url_for_blocking = url_str.clone();
let (tx, rx) = std::sync::mpsc::channel(); let (tx, rx) = std::sync::mpsc::channel();
std::thread::spawn(move || { std::thread::spawn(move || {
let rt = tokio::runtime::Builder::new_multi_thread() let rt = tokio::runtime::Builder::new_multi_thread()
.worker_threads(2) .worker_threads(2)
.enable_all() .enable_all()
.build(); .build();
let send_err = if let Ok(rt) = rt { let send_err = if let Ok(rt) = rt {
let result = rt.block_on(async move { let result = rt.block_on(async move {
if url_for_blocking.starts_with("https://") if url_for_blocking.starts_with("https://")
|| url_for_blocking.starts_with("http://") || url_for_blocking.starts_with("http://")
{ {
info!("HTTP(S) GET request: {}", url_for_blocking);
execute_get(&url_for_blocking).await execute_get(&url_for_blocking).await
} else { } else {
info!("Local file GET request from bucket: {}", url_for_blocking); get_from_bucket(
get_from_bucket(&state_for_blocking, &url_for_blocking, &state_for_blocking,
user_session.bot_id) &url_for_blocking,
.await user_session.bot_id,
)
.await
} }
}); });
tx.send(result).err() tx.send(result).err()
} else { } else {
tx.send(Err("failed to build tokio runtime".into())).err() tx.send(Err("failed to build tokio runtime".into())).err()
}; };
if send_err.is_some() { if send_err.is_some() {
error!("Failed to send result from thread"); error!("Failed to send result from thread");
} }
}); });
match rx.recv_timeout(std::time::Duration::from_secs(40)) { match rx.recv_timeout(std::time::Duration::from_secs(40)) {
Ok(Ok(content)) => Ok(Dynamic::from(content)), Ok(Ok(content)) => Ok(Dynamic::from(content)),
Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime( Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
@ -78,7 +69,6 @@ pub fn get_keyword(state: Arc<AppState>, user_session: UserSession, engine: &mut
}) })
.unwrap(); .unwrap();
} }
fn is_safe_path(path: &str) -> bool { fn is_safe_path(path: &str) -> bool {
if path.starts_with("https://") || path.starts_with("http://") { if path.starts_with("https://") || path.starts_with("http://") {
return true; return true;
@ -105,10 +95,7 @@ fn is_safe_path(path: &str) -> bool {
} }
true true
} }
pub async fn execute_get(url: &str) -> Result<String, Box<dyn Error + Send + Sync>> { pub async fn execute_get(url: &str) -> Result<String, Box<dyn Error + Send + Sync>> {
debug!("Starting execute_get with URL: {}", url);
let client = Client::builder() let client = Client::builder()
.timeout(Duration::from_secs(30)) .timeout(Duration::from_secs(30))
.connect_timeout(Duration::from_secs(10)) .connect_timeout(Duration::from_secs(10))
@ -118,12 +105,10 @@ pub async fn execute_get(url: &str) -> Result<String, Box<dyn Error + Send + Syn
error!("Failed to build HTTP client: {}", e); error!("Failed to build HTTP client: {}", e);
e e
})?; })?;
let response = client.get(url).send().await.map_err(|e| { let response = client.get(url).send().await.map_err(|e| {
error!("HTTP request failed for URL {}: {}", url, e); error!("HTTP request failed for URL {}: {}", url, e);
e e
})?; })?;
if !response.status().is_success() { if !response.status().is_success() {
let status = response.status(); let status = response.status();
let error_body = response.text().await.unwrap_or_default(); let error_body = response.text().await.unwrap_or_default();
@ -137,35 +122,29 @@ pub async fn execute_get(url: &str) -> Result<String, Box<dyn Error + Send + Syn
) )
.into()); .into());
} }
let content = response.text().await.map_err(|e| { let content = response.text().await.map_err(|e| {
error!("Failed to read response text for URL {}: {}", url, e); error!("Failed to read response text for URL {}: {}", url, e);
e e
})?; })?;
trace!(
debug!(
"Successfully executed GET request for URL: {}, content length: {}", "Successfully executed GET request for URL: {}, content length: {}",
url, url,
content.len() content.len()
); );
Ok(content) Ok(content)
} }
pub async fn get_from_bucket( pub async fn get_from_bucket(
state: &AppState, state: &AppState,
file_path: &str, file_path: &str,
bot_id: uuid::Uuid, bot_id: uuid::Uuid,
) -> Result<String, Box<dyn Error + Send + Sync>> { ) -> Result<String, Box<dyn Error + Send + Sync>> {
debug!("Getting file from bucket: {}", file_path);
if !is_safe_path(file_path) { if !is_safe_path(file_path) {
error!("Unsafe file path detected: {}", file_path); error!("Unsafe file path detected: {}", file_path);
return Err("Invalid file path".into()); return Err("Invalid file path".into());
} }
let client = state.drive.as_ref().ok_or("S3 client not configured")?; let client = state.drive.as_ref().ok_or("S3 client not configured")?;
let bot_name: String = { let bot_name: String = {
let mut db_conn = state.conn.lock().unwrap(); let mut db_conn = state.conn.get().map_err(|e| format!("DB error: {}", e))?;
bots.filter(id.eq(&bot_id)) bots.filter(id.eq(&bot_id))
.select(name) .select(name)
.first(&mut *db_conn) .first(&mut *db_conn)
@ -174,32 +153,26 @@ pub async fn get_from_bucket(
e e
})? })?
}; };
let bucket_name = { let bucket_name = {
let bucket = format!("{}.gbai", bot_name); let bucket = format!("{}.gbai", bot_name);
trace!("Resolved GET bucket name: {}", bucket);
bucket bucket
}; };
let bytes = match tokio::time::timeout(Duration::from_secs(30), async {
let bytes = match tokio::time::timeout( let result: Result<Vec<u8>, Box<dyn Error + Send + Sync>> = match client
Duration::from_secs(30), .get_object()
async { .bucket(&bucket_name)
let result: Result<Vec<u8>, Box<dyn Error + Send + Sync>> = match client .key(file_path)
.get_object() .send()
.bucket(&bucket_name) .await
.key(file_path) {
.send() Ok(response) => {
.await let data = response.body.collect().await?.into_bytes();
{ Ok(data.to_vec())
Ok(response) => { }
let data = response.body.collect().await?.into_bytes(); Err(e) => Err(format!("S3 operation failed: {}", e).into()),
Ok(data.to_vec()) };
} result
Err(e) => Err(format!("S3 operation failed: {}", e).into()), })
};
result
},
)
.await .await
{ {
Ok(Ok(data)) => data.to_vec(), Ok(Ok(data)) => data.to_vec(),
@ -212,7 +185,6 @@ pub async fn get_from_bucket(
return Err("drive operation timed out".into()); return Err("drive operation timed out".into());
} }
}; };
let content = if file_path.to_ascii_lowercase().ends_with(".pdf") { let content = if file_path.to_ascii_lowercase().ends_with(".pdf") {
match pdf_extract::extract_text_from_mem(&bytes) { match pdf_extract::extract_text_from_mem(&bytes) {
Ok(text) => text, Ok(text) => text,
@ -230,8 +202,7 @@ pub async fn get_from_bucket(
} }
} }
}; };
trace!(
info!(
"Successfully retrieved file from bucket: {}, content length: {}", "Successfully retrieved file from bucket: {}, content length: {}",
file_path, file_path,
content.len() content.len()

View file

@ -1,159 +1,108 @@
use crate::shared::models::{BotResponse, UserSession}; use crate::shared::models::{BotResponse, UserSession};
use crate::shared::state::AppState; use crate::shared::state::AppState;
use log::{debug, error, info}; use log::{error, trace};
use rhai::{Dynamic, Engine, EvalAltResult}; use rhai::{Dynamic, Engine, EvalAltResult};
use std::sync::Arc; use std::sync::Arc;
pub fn hear_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) { pub fn hear_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let session_id = user.id; let session_id = user.id;
let state_clone = Arc::clone(&state); let state_clone = Arc::clone(&state);
engine
engine .register_custom_syntax(&["HEAR", "$ident$"], true, move |_context, inputs| {
.register_custom_syntax(&["HEAR", "$ident$"], true, move |_context, inputs| { let variable_name = inputs[0].get_string_value().expect("Expected identifier as string").to_string();
let variable_name = inputs[0] trace!("HEAR command waiting for user input to store in variable: {}", variable_name);
.get_string_value() let state_for_spawn = Arc::clone(&state_clone);
.expect("Expected identifier as string") let session_id_clone = session_id;
.to_string(); let var_name_clone = variable_name.clone();
tokio::spawn(async move {
info!( trace!("HEAR: Setting session {} to wait for input for variable '{}'", session_id_clone, var_name_clone);
"HEAR command waiting for user input to store in variable: {}", let mut session_manager = state_for_spawn.session_manager.lock().await;
variable_name session_manager.mark_waiting(session_id_clone);
); if let Some(redis_client) = &state_for_spawn.cache {
let mut conn = match redis_client.get_multiplexed_async_connection().await {
let state_for_spawn = Arc::clone(&state_clone); Ok(conn) => conn,
let session_id_clone = session_id; Err(e) => {
let var_name_clone = variable_name.clone(); error!("Failed to connect to cache: {}", e);
return;
tokio::spawn(async move { }
debug!( };
"HEAR: Setting session {} to wait for input for variable '{}'", let key = format!("hear:{}:{}", session_id_clone, var_name_clone);
session_id_clone, var_name_clone let _: Result<(), _> = redis::cmd("SET").arg(&key).arg("waiting").query_async(&mut conn).await;
); }
});
let mut session_manager = state_for_spawn.session_manager.lock().await; Err(Box::new(EvalAltResult::ErrorRuntime("Waiting for user input".into(), rhai::Position::NONE)))
session_manager.mark_waiting(session_id_clone); })
.unwrap();
if let Some(redis_client) = &state_for_spawn.cache {
let mut conn = match redis_client.get_multiplexed_async_connection().await {
Ok(conn) => conn,
Err(e) => {
error!("Failed to connect to cache: {}", e);
return;
}
};
let key = format!("hear:{}:{}", session_id_clone, var_name_clone);
let _: Result<(), _> = redis::cmd("SET")
.arg(&key)
.arg("waiting")
.query_async(&mut conn)
.await;
}
});
Err(Box::new(EvalAltResult::ErrorRuntime(
"Waiting for user input".into(),
rhai::Position::NONE,
)))
})
.unwrap();
} }
pub async fn execute_talk(state: Arc<AppState>, user_session: UserSession, message: String) -> Result<BotResponse, Box<dyn std::error::Error>> { pub async fn execute_talk(state: Arc<AppState>, user_session: UserSession, message: String) -> Result<BotResponse, Box<dyn std::error::Error>> {
info!("Executing TALK with message: {}", message); let mut suggestions = Vec::new();
debug!("TALK: Sending message: {}", message); if let Some(redis_client) = &state.cache {
let mut conn: redis::aio::MultiplexedConnection = redis_client.get_multiplexed_async_connection().await?;
let mut suggestions = Vec::new(); let redis_key = format!("suggestions:{}:{}", user_session.user_id, user_session.id);
let suggestions_json: Result<Vec<String>, _> = redis::cmd("LRANGE").arg(redis_key.as_str()).arg(0).arg(-1).query_async(&mut conn).await;
if let Some(redis_client) = &state.cache { match suggestions_json {
let mut conn: redis::aio::MultiplexedConnection = redis_client.get_multiplexed_async_connection().await?; Ok(suggestions_json) => {
suggestions = suggestions_json.into_iter().filter_map(|s| serde_json::from_str(&s).ok()).collect();
let redis_key = format!("suggestions:{}:{}", user_session.user_id, user_session.id); }
debug!("Loading suggestions from Redis key: {}", redis_key); Err(e) => {
let suggestions_json: Result<Vec<String>, _> = redis::cmd("LRANGE") error!("Failed to load suggestions from Redis: {}", e);
.arg(redis_key.as_str()) }
.arg(0) }
.arg(-1) }
.query_async(&mut conn) let response = BotResponse {
.await; bot_id: user_session.bot_id.to_string(),
user_id: user_session.user_id.to_string(),
match suggestions_json { session_id: user_session.id.to_string(),
Ok(suggestions_json) => { channel: "web".to_string(),
debug!("Found suggestions in Redis: {:?}", suggestions_json); content: message,
suggestions = suggestions_json.into_iter() message_type: 1,
.filter_map(|s| serde_json::from_str(&s).ok()) stream_token: None,
.collect(); is_complete: true,
debug!("Parsed suggestions: {:?}", suggestions); suggestions,
} context_name: None,
Err(e) => { context_length: 0,
error!("Failed to load suggestions from Redis: {}", e); context_max_length: 0,
} };
} let user_id = user_session.id.to_string();
} let response_clone = response.clone();
match state.response_channels.try_lock() {
let response = BotResponse { Ok(response_channels) => {
bot_id: user_session.bot_id.to_string(), if let Some(tx) = response_channels.get(&user_id) {
user_id: user_session.user_id.to_string(), if let Err(e) = tx.try_send(response_clone) {
session_id: user_session.id.to_string(), error!("Failed to send TALK message via WebSocket: {}", e);
channel: "web".to_string(), } else {
content: message, trace!("TALK message sent via WebSocket");
message_type: 1, }
stream_token: None, } else {
is_complete: true, let web_adapter = Arc::clone(&state.web_adapter);
suggestions, tokio::spawn(async move {
context_name: None, if let Err(e) = web_adapter.send_message_to_session(&user_id, response_clone).await {
context_length: 0, error!("Failed to send TALK message via web adapter: {}", e);
context_max_length: 0, } else {
}; trace!("TALK message sent via web adapter");
}
let user_id = user_session.id.to_string(); });
let response_clone = response.clone(); }
}
match state.response_channels.try_lock() { Err(_) => {
Ok(response_channels) => { error!("Failed to acquire lock on response_channels for TALK command");
if let Some(tx) = response_channels.get(&user_id) { }
if let Err(e) = tx.try_send(response_clone) { }
error!("Failed to send TALK message via WebSocket: {}", e); Ok(response)
} else {
debug!("TALK message sent successfully via WebSocket");
}
} else {
debug!("No WebSocket connection found for session {}, sending via web adapter", user_id);
let web_adapter = Arc::clone(&state.web_adapter);
tokio::spawn(async move {
if let Err(e) = web_adapter.send_message_to_session(&user_id, response_clone).await {
error!("Failed to send TALK message via web adapter: {}", e);
} else {
debug!("TALK message sent successfully via web adapter");
}
});
}
}
Err(_) => {
error!("Failed to acquire lock on response_channels for TALK command");
}
}
Ok(response)
} }
pub fn talk_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) { pub fn talk_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state); let state_clone = Arc::clone(&state);
let user_clone = user.clone(); let user_clone = user.clone();
engine
engine .register_custom_syntax(&["TALK", "$expr$"], true, move |context, inputs| {
.register_custom_syntax(&["TALK", "$expr$"], true, move |context, inputs| { let message = context.eval_expression_tree(&inputs[0])?.to_string();
let message = context.eval_expression_tree(&inputs[0])?.to_string(); let state_for_talk = Arc::clone(&state_clone);
let state_for_talk = Arc::clone(&state_clone); let user_for_talk = user_clone.clone();
let user_for_talk = user_clone.clone(); tokio::spawn(async move {
if let Err(e) = execute_talk(state_for_talk, user_for_talk, message).await {
tokio::spawn(async move { error!("Error executing TALK command: {}", e);
if let Err(e) = execute_talk(state_for_talk, user_for_talk, message).await { }
error!("Error executing TALK command: {}", e); });
} Ok(Dynamic::UNIT)
}); })
.unwrap();
Ok(Dynamic::UNIT)
})
.unwrap();
} }

View file

@ -1,230 +1,18 @@
use rhai::Dynamic; use rhai::Dynamic;
use rhai::Engine; use rhai::Engine;
pub fn last_keyword(engine: &mut Engine) { pub fn last_keyword(engine: &mut Engine) {
engine engine
.register_custom_syntax(&["LAST", "(", "$expr$", ")"], false, { .register_custom_syntax(&["LAST", "(", "$expr$", ")"], false, {
move |context, inputs| { move |context, inputs| {
let input_string = context.eval_expression_tree(&inputs[0])?; let input_string = context.eval_expression_tree(&inputs[0])?;
let input_str = input_string.to_string(); let input_str = input_string.to_string();
if input_str.trim().is_empty() {
// Handle empty string case first return Ok(Dynamic::from(""));
if input_str.trim().is_empty() { }
return Ok(Dynamic::from("")); let words: Vec<&str> = input_str.split_whitespace().collect();
} let last_word = words.last().map(|s| *s).unwrap_or("");
Ok(Dynamic::from(last_word.to_string()))
// Split on any whitespace and filter out empty strings }
let words: Vec<&str> = input_str })
.split_whitespace() .unwrap();
.collect();
// Get the last non-empty word
let last_word = words.last().map(|s| *s).unwrap_or("");
Ok(Dynamic::from(last_word.to_string()))
}
})
.unwrap();
}
#[cfg(test)]
mod tests {
use super::*;
use rhai::{Engine, Scope};
#[test]
fn test_last_keyword_empty_string() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval("LAST(\"\")").unwrap();
assert_eq!(result, "");
}
#[test]
fn test_last_keyword_multiple_spaces() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval("LAST(\"hello world \")").unwrap();
assert_eq!(result, "world");
}
#[test]
fn test_last_keyword_tabs_and_newlines() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval(r#"LAST("hello\tworld\n")"#).unwrap();
assert_eq!(result, "world");
}
#[test]
fn test_last_keyword_with_variable() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let mut scope = Scope::new();
scope.push("text", "this is a test");
let result: String = engine.eval_with_scope(&mut scope, "LAST(text)").unwrap();
assert_eq!(result, "test");
}
#[test]
fn test_last_keyword_whitespace_only() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval("LAST(\" \")").unwrap();
assert_eq!(result, "");
}
#[test]
fn test_last_keyword_mixed_whitespace() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval(r#"LAST("hello\t \n world \t final")"#).unwrap();
assert_eq!(result, "final");
}
#[test]
fn test_last_keyword_expression() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval("LAST(\"hello\" + \" \" + \"world\")").unwrap();
assert_eq!(result, "world");
}
#[test]
fn test_last_keyword_unicode() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval("LAST(\"hello 世界 мир world\")").unwrap();
assert_eq!(result, "world");
}
#[test]
fn test_last_keyword_in_expression() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: bool = engine.eval("LAST(\"hello world\") == \"world\"").unwrap();
assert!(result);
}
#[test]
fn test_last_keyword_complex_scenario() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let mut scope = Scope::new();
scope.push("sentence", "The quick brown fox jumps over the lazy dog");
let result: String = engine.eval_with_scope(&mut scope, "LAST(sentence)").unwrap();
assert_eq!(result, "dog");
}
#[test]
#[should_panic]
fn test_last_keyword_missing_parentheses() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let _: String = engine.eval("LAST \"hello world\"").unwrap();
}
#[test]
#[should_panic]
fn test_last_keyword_missing_closing_parenthesis() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let _: String = engine.eval("LAST(\"hello world\"").unwrap();
}
#[test]
#[should_panic]
fn test_last_keyword_missing_opening_parenthesis() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let _: String = engine.eval("LAST \"hello world\")").unwrap();
}
#[test]
fn test_last_keyword_dynamic_type() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result = engine.eval::<Dynamic>("LAST(\"test string\")").unwrap();
assert!(result.is::<String>());
assert_eq!(result.to_string(), "string");
}
#[test]
fn test_last_keyword_nested_expression() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval("LAST(\"The result is: \" + \"hello world\")").unwrap();
assert_eq!(result, "world");
}
}
#[cfg(test)]
mod integration_tests {
use super::*;
#[test]
fn test_last_keyword_in_script() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let script = r#"
let sentence1 = "first second third";
let sentence2 = "alpha beta gamma";
let last1 = LAST(sentence1);
let last2 = LAST(sentence2);
last1 + " and " + last2
"#;
let result: String = engine.eval(script).unwrap();
assert_eq!(result, "third and gamma");
}
#[test]
fn test_last_keyword_with_function() {
let mut engine = Engine::new();
last_keyword(&mut engine);
engine.register_fn("get_name", || -> String { "john doe".to_string() });
let result: String = engine.eval("LAST(get_name())").unwrap();
assert_eq!(result, "doe");
}
#[test]
fn test_last_keyword_multiple_calls() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let script = r#"
let text1 = "apple banana cherry";
let text2 = "cat dog elephant";
let result1 = LAST(text1);
let result2 = LAST(text2);
result1 + "-" + result2
"#;
let result: String = engine.eval(script).unwrap();
assert_eq!(result, "cherry-elephant");
}
} }

View file

@ -1,107 +1,62 @@
use crate::basic::keywords::add_tool::get_session_tools; use crate::basic::keywords::add_tool::get_session_tools;
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use log::{error, info}; use log::{error, trace};
use rhai::{Dynamic, Engine}; use rhai::{Dynamic, Engine};
use std::sync::Arc; use std::sync::Arc;
pub fn list_tools_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) { pub fn list_tools_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state); let state_clone = Arc::clone(&state);
let user_clone = user.clone(); let user_clone = user.clone();
engine
engine .register_custom_syntax(&["LIST_TOOLS"], false, move |_context, _inputs| {
.register_custom_syntax(&["LIST_TOOLS"], false, move |_context, _inputs| { let state_for_task = Arc::clone(&state_clone);
info!("LIST_TOOLS command executed for session: {}", user_clone.id); let user_for_task = user_clone.clone();
let (tx, rx) = std::sync::mpsc::channel();
let state_for_task = Arc::clone(&state_clone); std::thread::spawn(move || {
let user_for_task = user_clone.clone(); let rt = tokio::runtime::Builder::new_multi_thread().worker_threads(2).enable_all().build();
let send_err = if let Ok(rt) = rt {
// Spawn async task to list all tool associations from session let result = rt.block_on(async move {
let (tx, rx) = std::sync::mpsc::channel(); list_session_tools(&state_for_task, &user_for_task).await
std::thread::spawn(move || { });
let rt = tokio::runtime::Builder::new_multi_thread() tx.send(result).err()
.worker_threads(2) } else {
.enable_all() tx.send(Err("Failed to build tokio runtime".to_string())).err()
.build(); };
if send_err.is_some() {
let send_err = if let Ok(rt) = rt { error!("Failed to send result from thread");
let result = rt.block_on(async move { }
list_session_tools(&state_for_task, &user_for_task).await });
}); match rx.recv_timeout(std::time::Duration::from_secs(10)) {
tx.send(result).err() Ok(Ok(message)) => {
} else { Ok(Dynamic::from(message))
tx.send(Err("Failed to build tokio runtime".to_string())) }
.err() Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE))),
}; Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
Err(Box::new(rhai::EvalAltResult::ErrorRuntime("LIST_TOOLS timed out".into(), rhai::Position::NONE)))
if send_err.is_some() { }
error!("Failed to send result from thread"); Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(format!("LIST_TOOLS failed: {}", e).into(), rhai::Position::NONE))),
} }
}); })
.unwrap();
match rx.recv_timeout(std::time::Duration::from_secs(10)) {
Ok(Ok(message)) => {
info!("LIST_TOOLS completed: {}", message);
Ok(Dynamic::from(message))
}
Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
e.into(),
rhai::Position::NONE,
))),
Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
"LIST_TOOLS timed out".into(),
rhai::Position::NONE,
)))
}
Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
format!("LIST_TOOLS failed: {}", e).into(),
rhai::Position::NONE,
))),
}
})
.unwrap();
} }
/// List all tools associated with the current session
async fn list_session_tools(state: &AppState, user: &UserSession) -> Result<String, String> { async fn list_session_tools(state: &AppState, user: &UserSession) -> Result<String, String> {
let mut conn = state.conn.lock().map_err(|e| { let mut conn = state.conn.get().map_err(|e| {
error!("Failed to acquire database lock: {}", e); error!("Failed to acquire database lock: {}", e);
format!("Database connection error: {}", e) format!("Database connection error: {}", e)
})?; })?;
match get_session_tools(&mut *conn, &user.id) {
// Get all tool associations for this session Ok(tools) => {
match get_session_tools(&mut *conn, &user.id) { if tools.is_empty() {
Ok(tools) => { Ok("No tools are currently active in this conversation".to_string())
if tools.is_empty() { } else {
info!("No tools associated with session '{}'", user.id); trace!("Found {} tool(s) for session '{}' (user: {}, bot: {})", tools.len(), user.id, user.user_id, user.bot_id);
Ok("No tools are currently active in this conversation".to_string()) let tool_list = tools.iter().enumerate().map(|(idx, tool)| format!("{}. {}", idx + 1, tool)).collect::<Vec<_>>().join("\n");
} else { Ok(format!("Active tools in this conversation ({}):\n{}", tools.len(), tool_list))
info!( }
"Found {} tool(s) for session '{}' (user: {}, bot: {})", }
tools.len(), Err(e) => {
user.id, error!("Failed to list tools for session '{}': {}", user.id, e);
user.user_id, Err(format!("Failed to list tools: {}", e))
user.bot_id }
); }
let tool_list = tools
.iter()
.enumerate()
.map(|(idx, tool)| format!("{}. {}", idx + 1, tool))
.collect::<Vec<_>>()
.join("\n");
Ok(format!(
"Active tools in this conversation ({}):\n{}",
tools.len(),
tool_list
))
}
}
Err(e) => {
error!("Failed to list tools for session '{}': {}", user.id, e);
Err(format!("Failed to list tools: {}", e))
}
}
} }

View file

@ -1,91 +1,51 @@
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use log::{error, info}; use log::{error};
use rhai::{Dynamic, Engine}; use rhai::{Dynamic, Engine};
use uuid::Uuid; use uuid::Uuid;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
pub fn llm_keyword(state: Arc<AppState>, _user: UserSession, engine: &mut Engine) { pub fn llm_keyword(state: Arc<AppState>, _user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state); let state_clone = Arc::clone(&state);
engine
engine .register_custom_syntax(&["LLM", "$expr$"], false, move |context, inputs| {
.register_custom_syntax(&["LLM", "$expr$"], false, move |context, inputs| { let text = context.eval_expression_tree(&inputs[0])?.to_string();
let text = context.eval_expression_tree(&inputs[0])?.to_string(); let state_for_thread = Arc::clone(&state_clone);
let prompt = build_llm_prompt(&text);
info!("LLM keyword processing text: {}", text); let (tx, rx) = std::sync::mpsc::channel();
std::thread::spawn(move || {
let state_for_thread = Arc::clone(&state_clone); let rt = tokio::runtime::Builder::new_multi_thread().worker_threads(2).enable_all().build();
let prompt = build_llm_prompt(&text); let send_err = if let Ok(rt) = rt {
let result = rt.block_on(async move {
// ---- safe runtime isolation: no deadlocks possible ---- execute_llm_generation(state_for_thread, prompt).await
let (tx, rx) = std::sync::mpsc::channel(); });
tx.send(result).err()
std::thread::spawn(move || { } else {
let rt = tokio::runtime::Builder::new_multi_thread() tx.send(Err("failed to build tokio runtime".into())).err()
.worker_threads(2) };
.enable_all() if send_err.is_some() {
.build(); error!("Failed to send LLM thread result");
}
let send_err = if let Ok(rt) = rt { });
let result = rt.block_on(async move { match rx.recv_timeout(Duration::from_secs(500)) {
execute_llm_generation(state_for_thread, prompt).await Ok(Ok(result)) => Ok(Dynamic::from(result)),
}); Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(e.to_string().into(), rhai::Position::NONE))),
tx.send(result).err() Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
} else { Err(Box::new(rhai::EvalAltResult::ErrorRuntime("LLM generation timed out".into(), rhai::Position::NONE)))
tx.send(Err("failed to build tokio runtime".into())).err() }
}; Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(format!("LLM thread failed: {e}").into(), rhai::Position::NONE))),
}
if send_err.is_some() { })
error!("Failed to send LLM thread result"); .unwrap();
}
});
match rx.recv_timeout(Duration::from_secs(500)) {
Ok(Ok(result)) => Ok(Dynamic::from(result)),
Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
e.to_string().into(),
rhai::Position::NONE,
))),
Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
"LLM generation timed out".into(),
rhai::Position::NONE,
)))
}
Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
format!("LLM thread failed: {e}").into(),
rhai::Position::NONE,
))),
}
})
.unwrap();
} }
/// Builds a consistent LLM prompt used by all Rhai scripts.
/// You can change the style/structure here to guide the model's behavior.
fn build_llm_prompt(user_text: &str) -> String { fn build_llm_prompt(user_text: &str) -> String {
user_text.trim().to_string() user_text.trim().to_string()
} }
pub async fn execute_llm_generation(state: Arc<AppState>, prompt: String) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
/// Runs the async LLM provider call safely. let config_manager = crate::config::ConfigManager::new(state.conn.clone());
pub async fn execute_llm_generation( let model = config_manager.get_config(&Uuid::nil(), "llm-model", None).unwrap_or_default();
state: Arc<AppState>, let handler = crate::llm_models::get_handler(&model);
prompt: String, let raw_response = state.llm_provider.generate(&prompt, &serde_json::Value::Null).await?;
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> { let processed = handler.process_content(&raw_response);
let config_manager = crate::config::ConfigManager::new(Arc::clone(&state.conn)); Ok(processed)
let model = config_manager
.get_config(&Uuid::nil(), "llm-model", None)
.unwrap_or_default();
info!("Using LLM model: {}", model);
let handler = crate::llm_models::get_handler(&model);
let raw_response = state
.llm_provider
.generate(&prompt, &serde_json::Value::Null)
.await?;
let processed = handler.process_content(&raw_response);
info!("Processed content: {}", processed);
Ok(processed)
} }

View file

@ -1,78 +1,57 @@
use diesel::prelude::*; use diesel::prelude::*;
use log::{error, info}; use log::trace;
use log::{error};
use rhai::Dynamic; use rhai::Dynamic;
use rhai::Engine; use rhai::Engine;
use serde_json::{json, Value}; use serde_json::{json, Value};
use crate::shared::models::TriggerKind; use crate::shared::models::TriggerKind;
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
pub fn on_keyword(state: &AppState, _user: UserSession, engine: &mut Engine) { pub fn on_keyword(state: &AppState, _user: UserSession, engine: &mut Engine) {
let state_clone = state.clone(); let state_clone = state.clone();
engine
engine .register_custom_syntax(&["ON", "$ident$", "OF", "$string$"], true, move |context, inputs| {
.register_custom_syntax( let trigger_type = context.eval_expression_tree(&inputs[0])?.to_string();
&["ON", "$ident$", "OF", "$string$"], let table = context.eval_expression_tree(&inputs[1])?.to_string();
true, let name = format!("{}_{}.rhai", table, trigger_type.to_lowercase());
move |context, inputs| { let kind = match trigger_type.to_uppercase().as_str() {
let trigger_type = context.eval_expression_tree(&inputs[0])?.to_string(); "UPDATE" => TriggerKind::TableUpdate,
let table = context.eval_expression_tree(&inputs[1])?.to_string(); "INSERT" => TriggerKind::TableInsert,
let name = format!("{}_{}.rhai", table, trigger_type.to_lowercase()); "DELETE" => TriggerKind::TableDelete,
_ => return Err(format!("Invalid trigger type: {}", trigger_type).into()),
let kind = match trigger_type.to_uppercase().as_str() { };
"UPDATE" => TriggerKind::TableUpdate, trace!("Starting execute_on_trigger with kind: {:?}, table: {}, param: {}", kind, table, name);
"INSERT" => TriggerKind::TableInsert, let mut conn = state_clone.conn.get().map_err(|e| format!("DB error: {}", e))?;
"DELETE" => TriggerKind::TableDelete, let result = execute_on_trigger(&mut *conn, kind, &table, &name)
_ => return Err(format!("Invalid trigger type: {}", trigger_type).into()), .map_err(|e| format!("DB error: {}", e))?;
}; if let Some(rows_affected) = result.get("rows_affected") {
Ok(Dynamic::from(rows_affected.as_i64().unwrap_or(0)))
let mut conn = state_clone.conn.lock().unwrap(); } else {
let result = execute_on_trigger(&mut *conn, kind, &table, &name) Err("No rows affected".into())
.map_err(|e| format!("DB error: {}", e))?; }
},
if let Some(rows_affected) = result.get("rows_affected") { )
Ok(Dynamic::from(rows_affected.as_i64().unwrap_or(0))) .unwrap();
} else {
Err("No rows affected".into())
}
},
)
.unwrap();
} }
pub fn execute_on_trigger(conn: &mut diesel::PgConnection, kind: TriggerKind, table: &str, param: &str) -> Result<Value, String> {
pub fn execute_on_trigger( use crate::shared::models::system_automations;
conn: &mut diesel::PgConnection, let new_automation = (
kind: TriggerKind, system_automations::kind.eq(kind as i32),
table: &str, system_automations::target.eq(table),
param: &str, system_automations::param.eq(param),
) -> Result<Value, String> { );
info!( let result = diesel::insert_into(system_automations::table)
"Starting execute_on_trigger with kind: {:?}, table: {}, param: {}", .values(&new_automation)
kind, table, param .execute(conn)
); .map_err(|e| {
error!("SQL execution error: {}", e);
use crate::shared::models::system_automations; e.to_string()
})?;
let new_automation = ( Ok(json!({
system_automations::kind.eq(kind as i32), "command": "on_trigger",
system_automations::target.eq(table), "trigger_type": format!("{:?}", kind),
system_automations::param.eq(param), "table": table,
); "param": param,
"rows_affected": result
let result = diesel::insert_into(system_automations::table) }))
.values(&new_automation)
.execute(conn)
.map_err(|e| {
error!("SQL execution error: {}", e);
e.to_string()
})?;
Ok(json!({
"command": "on_trigger",
"trigger_type": format!("{:?}", kind),
"table": table,
"param": param,
"rows_affected": result
}))
} }

View file

@ -1,20 +1,15 @@
use log::info; use log::trace;
use rhai::Dynamic; use rhai::Dynamic;
use rhai::Engine; use rhai::Engine;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
pub fn print_keyword(_state: &AppState, _user: UserSession, engine: &mut Engine) { pub fn print_keyword(_state: &AppState, _user: UserSession, engine: &mut Engine) {
engine engine
.register_custom_syntax( .register_custom_syntax(&["PRINT", "$expr$"], true, |context, inputs| {
&["PRINT", "$expr$"], let value = context.eval_expression_tree(&inputs[0])?;
true, trace!("PRINT: {}", value);
|context, inputs| { Ok(Dynamic::UNIT)
let value = context.eval_expression_tree(&inputs[0])?; },
info!("{}", value); )
Ok(Dynamic::UNIT) .unwrap();
},
)
.unwrap();
} }

View file

@ -1,118 +1,79 @@
use diesel::prelude::*; use diesel::prelude::*;
use log::{error, info}; use log::trace;
use log::{error};
use rhai::Dynamic; use rhai::Dynamic;
use rhai::Engine; use rhai::Engine;
use serde_json::{json, Value}; use serde_json::{json, Value};
use std::error::Error; use std::error::Error;
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
pub fn set_keyword(state: &AppState, _user: UserSession, engine: &mut Engine) { pub fn set_keyword(state: &AppState, _user: UserSession, engine: &mut Engine) {
let state_clone = state.clone(); let state_clone = state.clone();
engine
engine .register_custom_syntax(&["SET", "$expr$", ",", "$expr$", ",", "$expr$"], false, {
.register_custom_syntax(&["SET", "$expr$", ",", "$expr$", ",", "$expr$"], false, { move |context, inputs| {
move |context, inputs| { let table_name = context.eval_expression_tree(&inputs[0])?;
let table_name = context.eval_expression_tree(&inputs[0])?; let filter = context.eval_expression_tree(&inputs[1])?;
let filter = context.eval_expression_tree(&inputs[1])?; let updates = context.eval_expression_tree(&inputs[2])?;
let updates = context.eval_expression_tree(&inputs[2])?; let table_str = table_name.to_string();
let filter_str = filter.to_string();
let table_str = table_name.to_string(); let updates_str = updates.to_string();
let filter_str = filter.to_string(); trace!("Starting execute_set with table: {}, filter: {}, updates: {}", table_str, filter_str, updates_str);
let updates_str = updates.to_string(); let mut conn = state_clone.conn.get().map_err(|e| format!("DB error: {}", e))?;
let result = execute_set(&mut *conn, &table_str, &filter_str, &updates_str)
let mut conn = state_clone.conn.lock().unwrap(); .map_err(|e| format!("DB error: {}", e))?;
let result = execute_set(&mut *conn, &table_str, &filter_str, &updates_str) if let Some(rows_affected) = result.get("rows_affected") {
.map_err(|e| format!("DB error: {}", e))?; Ok(Dynamic::from(rows_affected.as_i64().unwrap_or(0)))
} else {
if let Some(rows_affected) = result.get("rows_affected") { Err("No rows affected".into())
Ok(Dynamic::from(rows_affected.as_i64().unwrap_or(0))) }
} else { }
Err("No rows affected".into()) })
} .unwrap();
}
})
.unwrap();
} }
pub fn execute_set(conn: &mut diesel::PgConnection, table_str: &str, filter_str: &str, updates_str: &str) -> Result<Value, String> {
pub fn execute_set( let (set_clause, _update_values) = parse_updates(updates_str).map_err(|e| e.to_string())?;
conn: &mut diesel::PgConnection, let where_clause = parse_filter_for_diesel(filter_str).map_err(|e| e.to_string())?;
table_str: &str, let query = format!("UPDATE {} SET {} WHERE {}", table_str, set_clause, where_clause);
filter_str: &str, let result = diesel::sql_query(&query).execute(conn).map_err(|e| {
updates_str: &str, error!("SQL execution error: {}", e);
) -> Result<Value, String> { e.to_string()
info!( })?;
"Starting execute_set with table: {}, filter: {}, updates: {}", Ok(json!({
table_str, filter_str, updates_str "command": "set",
); "table": table_str,
"filter": filter_str,
let (set_clause, _update_values) = parse_updates(updates_str).map_err(|e| e.to_string())?; "updates": updates_str,
"rows_affected": result
let where_clause = parse_filter_for_diesel(filter_str).map_err(|e| e.to_string())?; }))
let query = format!(
"UPDATE {} SET {} WHERE {}",
table_str, set_clause, where_clause
);
info!("Executing query: {}", query);
let result = diesel::sql_query(&query).execute(conn).map_err(|e| {
error!("SQL execution error: {}", e);
e.to_string()
})?;
Ok(json!({
"command": "set",
"table": table_str,
"filter": filter_str,
"updates": updates_str,
"rows_affected": result
}))
} }
fn parse_updates(updates_str: &str) -> Result<(String, Vec<String>), Box<dyn Error>> { fn parse_updates(updates_str: &str) -> Result<(String, Vec<String>), Box<dyn Error>> {
let mut set_clauses = Vec::new(); let mut set_clauses = Vec::new();
let mut params = Vec::new(); let mut params = Vec::new();
for (i, update) in updates_str.split(',').enumerate() {
for (i, update) in updates_str.split(',').enumerate() { let parts: Vec<&str> = update.split('=').collect();
let parts: Vec<&str> = update.split('=').collect(); if parts.len() != 2 {
if parts.len() != 2 { return Err("Invalid update format".into());
return Err("Invalid update format".into()); }
} let column = parts[0].trim();
let value = parts[1].trim();
let column = parts[0].trim(); if !column.chars().all(|c| c.is_ascii_alphanumeric() || c == '_') {
let value = parts[1].trim(); return Err("Invalid column name".into());
}
if !column set_clauses.push(format!("{} = ${}", column, i + 1));
.chars() params.push(value.to_string());
.all(|c| c.is_ascii_alphanumeric() || c == '_') }
{ Ok((set_clauses.join(", "), params))
return Err("Invalid column name".into());
}
set_clauses.push(format!("{} = ${}", column, i + 1));
params.push(value.to_string());
}
Ok((set_clauses.join(", "), params))
} }
fn parse_filter_for_diesel(filter_str: &str) -> Result<String, Box<dyn Error>> { fn parse_filter_for_diesel(filter_str: &str) -> Result<String, Box<dyn Error>> {
let parts: Vec<&str> = filter_str.split('=').collect(); let parts: Vec<&str> = filter_str.split('=').collect();
if parts.len() != 2 { if parts.len() != 2 {
return Err("Invalid filter format. Expected 'KEY=VALUE'".into()); return Err("Invalid filter format. Expected 'KEY=VALUE'".into());
} }
let column = parts[0].trim();
let column = parts[0].trim(); let value = parts[1].trim();
let value = parts[1].trim(); if !column.chars().all(|c| c.is_ascii_alphanumeric() || c == '_') {
return Err("Invalid column name in filter".into());
if !column }
.chars() Ok(format!("{} = '{}'", column, value))
.all(|c| c.is_ascii_alphanumeric() || c == '_')
{
return Err("Invalid column name in filter".into());
}
Ok(format!("{} = '{}'", column, value))
} }

View file

@ -1,106 +1,51 @@
use std::sync::Arc; use std::sync::Arc;
use log::{error, info, trace}; use log::trace;
use log::{error};
use crate::shared::state::AppState; use crate::shared::state::AppState;
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use rhai::Engine; use rhai::Engine;
use rhai::Dynamic; use rhai::Dynamic;
pub fn set_context_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) { pub fn set_context_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
// Clone the Redis client (if any) for use inside the async task. let cache = state.cache.clone();
let cache = state.cache.clone(); engine
.register_custom_syntax(&["SET_CONTEXT", "$expr$", "AS", "$expr$"], true, move |context, inputs| {
engine let context_name = context.eval_expression_tree(&inputs[0])?.to_string();
.register_custom_syntax( let context_value = context.eval_expression_tree(&inputs[1])?.to_string();
&["SET_CONTEXT", "$expr$", "AS", "$expr$"], trace!("SET CONTEXT command executed - name: {}, value: {}", context_name, context_value);
true, let redis_key = format!("context:{}:{}:{}", user.user_id, user.id, context_name);
move |context, inputs| { trace!("Constructed Redis key: {} for user {}, session {}, context {}", redis_key, user.user_id, user.id, context_name);
// First expression is the context name, second is the value. if let Some(cache_client) = &cache {
let context_name = context.eval_expression_tree(&inputs[0])?.to_string(); let cache_client = cache_client.clone();
let context_value = context.eval_expression_tree(&inputs[1])?.to_string(); let redis_key = redis_key.clone();
let context_value = context_value.clone();
info!( trace!("Cloned cache_client, redis_key ({}) and context_value (len={}) for async task", redis_key, context_value.len());
"SET CONTEXT command executed - name: {}, value: {}", tokio::spawn(async move {
context_name, let mut conn = match cache_client.get_multiplexed_async_connection().await {
context_value Ok(conn) => {
); trace!("Cache connection established successfully");
conn
// Build a Redis key that is unique per user and session. }
let redis_key = format!( Err(e) => {
"context:{}:{}:{}", error!("Failed to connect to cache: {}", e);
user.user_id, return;
user.id, }
context_name };
); trace!("Executing Redis SET command with key: {} and value length: {}", redis_key, context_value.len());
let result: Result<(), redis::RedisError> = redis::cmd("SET").arg(&redis_key).arg(&context_value).query_async(&mut conn).await;
trace!( match result {
target: "app::set_context", Ok(_) => {
"Constructed Redis key: {} for user {}, session {}, context {}", trace!("Context value successfully stored in cache");
redis_key, }
user.user_id, Err(e) => {
user.id, error!("Failed to set cache value: {}", e);
context_name }
); }
});
// If a Redis client is configured, perform the SET operation asynchronously. } else {
if let Some(cache_client) = &cache { trace!("No cache configured, context not persisted");
trace!("Redis client is available, preparing to set context value"); }
Ok(Dynamic::UNIT)
// Clone values needed inside the async block. },
let cache_client = cache_client.clone(); )
let redis_key = redis_key.clone(); .unwrap();
let context_value = context_value.clone();
trace!(
"Cloned cache_client, redis_key ({}) and context_value (len={}) for async task",
redis_key,
context_value.len()
);
// Spawn a background task so we don't need an async closure here.
tokio::spawn(async move {
trace!("Async task started for SET_CONTEXT operation");
// Acquire an async Redis connection.
let mut conn = match cache_client.get_multiplexed_async_connection().await {
Ok(conn) => {
trace!("Successfully acquired async Redis connection");
conn
}
Err(e) => {
error!("Failed to connect to cache: {}", e);
trace!("Aborting SET_CONTEXT task due to connection error");
return;
}
};
// Perform the SET command.
trace!(
"Executing Redis SET command with key: {} and value length: {}",
redis_key,
context_value.len()
);
let result: Result<(), redis::RedisError> = redis::cmd("SET")
.arg(&redis_key)
.arg(&context_value)
.query_async(&mut conn)
.await;
match result {
Ok(_) => {
trace!("Successfully set context in Redis for key {}", redis_key);
}
Err(e) => {
error!("Failed to set cache value: {}", e);
trace!("SET_CONTEXT Redis SET command failed");
}
}
});
} else {
trace!("No Redis client configured; SET_CONTEXT will not persist to cache");
}
Ok(Dynamic::UNIT)
},
)
.unwrap();
} }

View file

@ -1,206 +1,101 @@
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use log::{error, info}; use log::{error, trace};
use rhai::{Dynamic, Engine}; use rhai::{Dynamic, Engine};
use std::sync::Arc; use std::sync::Arc;
pub fn set_kb_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) { pub fn set_kb_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state); let state_clone = Arc::clone(&state);
let user_clone = user.clone(); let user_clone = user.clone();
engine
engine .register_custom_syntax(&["SET_KB", "$expr$"], false, move |context, inputs| {
.register_custom_syntax(&["SET_KB", "$expr$"], false, move |context, inputs| { let kb_name = context.eval_expression_tree(&inputs[0])?;
let kb_name = context.eval_expression_tree(&inputs[0])?; let kb_name_str = kb_name.to_string().trim_matches('"').to_string();
let kb_name_str = kb_name.to_string().trim_matches('"').to_string(); trace!("SET_KB command executed: {} for user: {}", kb_name_str, user_clone.user_id);
if !kb_name_str.chars().all(|c| c.is_alphanumeric() || c == '_' || c == '-') {
info!( return Err(Box::new(rhai::EvalAltResult::ErrorRuntime("KB name must contain only alphanumeric characters, underscores, and hyphens".into(), rhai::Position::NONE)));
"SET_KB command executed: {} for user: {}", }
kb_name_str, user_clone.user_id if kb_name_str.is_empty() {
); return Err(Box::new(rhai::EvalAltResult::ErrorRuntime("KB name cannot be empty".into(), rhai::Position::NONE)));
}
// Validate KB name (alphanumeric and underscores only) let state_for_task = Arc::clone(&state_clone);
if !kb_name_str let user_for_task = user_clone.clone();
.chars() let kb_name_for_task = kb_name_str.clone();
.all(|c| c.is_alphanumeric() || c == '_' || c == '-') let (tx, rx) = std::sync::mpsc::channel();
{ std::thread::spawn(move || {
return Err(Box::new(rhai::EvalAltResult::ErrorRuntime( let rt = tokio::runtime::Builder::new_multi_thread().worker_threads(2).enable_all().build();
"KB name must contain only alphanumeric characters, underscores, and hyphens" let send_err = if let Ok(rt) = rt {
.into(), let result = rt.block_on(async move {
rhai::Position::NONE, add_kb_to_user(&state_for_task, &user_for_task, &kb_name_for_task, false, None).await
))); });
} tx.send(result).err()
} else {
if kb_name_str.is_empty() { tx.send(Err("failed to build tokio runtime".into())).err()
return Err(Box::new(rhai::EvalAltResult::ErrorRuntime( };
"KB name cannot be empty".into(), if send_err.is_some() {
rhai::Position::NONE, error!("Failed to send result from thread");
))); }
} });
match rx.recv_timeout(std::time::Duration::from_secs(30)) {
let state_for_task = Arc::clone(&state_clone); Ok(Ok(message)) => {
let user_for_task = user_clone.clone(); Ok(Dynamic::from(message))
let kb_name_for_task = kb_name_str.clone(); }
Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE))),
// Spawn async task to set up KB collection Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
let (tx, rx) = std::sync::mpsc::channel(); Err(Box::new(rhai::EvalAltResult::ErrorRuntime("SET_KB timed out".into(), rhai::Position::NONE)))
std::thread::spawn(move || { }
let rt = tokio::runtime::Builder::new_multi_thread() Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(format!("SET_KB failed: {}", e).into(), rhai::Position::NONE))),
.worker_threads(2) }
.enable_all() })
.build(); .unwrap();
let send_err = if let Ok(rt) = rt {
let result = rt.block_on(async move {
add_kb_to_user(
&state_for_task,
&user_for_task,
&kb_name_for_task,
false,
None,
)
.await
});
tx.send(result).err()
} else {
tx.send(Err("failed to build tokio runtime".into())).err()
};
if send_err.is_some() {
error!("Failed to send result from thread");
}
});
match rx.recv_timeout(std::time::Duration::from_secs(30)) {
Ok(Ok(message)) => {
info!("SET_KB completed: {}", message);
Ok(Dynamic::from(message))
}
Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
e.into(),
rhai::Position::NONE,
))),
Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
"SET_KB timed out".into(),
rhai::Position::NONE,
)))
}
Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
format!("SET_KB failed: {}", e).into(),
rhai::Position::NONE,
))),
}
})
.unwrap();
} }
pub fn add_kb_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) { pub fn add_kb_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state); let state_clone = Arc::clone(&state);
let user_clone = user.clone(); let user_clone = user.clone();
engine
engine .register_custom_syntax(&["ADD_KB", "$expr$"], false, move |context, inputs| {
.register_custom_syntax(&["ADD_KB", "$expr$"], false, move |context, inputs| { let kb_name = context.eval_expression_tree(&inputs[0])?;
let kb_name = context.eval_expression_tree(&inputs[0])?; let kb_name_str = kb_name.to_string().trim_matches('"').to_string();
let kb_name_str = kb_name.to_string().trim_matches('"').to_string(); trace!("ADD_KB command executed: {} for user: {}", kb_name_str, user_clone.user_id);
if !kb_name_str.chars().all(|c| c.is_alphanumeric() || c == '_' || c == '-') {
info!( return Err(Box::new(rhai::EvalAltResult::ErrorRuntime("KB name must contain only alphanumeric characters, underscores, and hyphens".into(), rhai::Position::NONE)));
"ADD_KB command executed: {} for user: {}", }
kb_name_str, user_clone.user_id let state_for_task = Arc::clone(&state_clone);
); let user_for_task = user_clone.clone();
let kb_name_for_task = kb_name_str.clone();
// Validate KB name let (tx, rx) = std::sync::mpsc::channel();
if !kb_name_str std::thread::spawn(move || {
.chars() let rt = tokio::runtime::Builder::new_multi_thread().worker_threads(2).enable_all().build();
.all(|c| c.is_alphanumeric() || c == '_' || c == '-') let send_err = if let Ok(rt) = rt {
{ let result = rt.block_on(async move {
return Err(Box::new(rhai::EvalAltResult::ErrorRuntime( add_kb_to_user(&state_for_task, &user_for_task, &kb_name_for_task, false, None).await
"KB name must contain only alphanumeric characters, underscores, and hyphens" });
.into(), tx.send(result).err()
rhai::Position::NONE, } else {
))); tx.send(Err("failed to build tokio runtime".into())).err()
} };
if send_err.is_some() {
let state_for_task = Arc::clone(&state_clone); error!("Failed to send result from thread");
let user_for_task = user_clone.clone(); }
let kb_name_for_task = kb_name_str.clone(); });
match rx.recv_timeout(std::time::Duration::from_secs(30)) {
let (tx, rx) = std::sync::mpsc::channel(); Ok(Ok(message)) => {
std::thread::spawn(move || { Ok(Dynamic::from(message))
let rt = tokio::runtime::Builder::new_multi_thread() }
.worker_threads(2) Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE))),
.enable_all() Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
.build(); Err(Box::new(rhai::EvalAltResult::ErrorRuntime("ADD_KB timed out".into(), rhai::Position::NONE)))
}
let send_err = if let Ok(rt) = rt { Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(format!("ADD_KB failed: {}", e).into(), rhai::Position::NONE))),
let result = rt.block_on(async move { }
add_kb_to_user( })
&state_for_task, .unwrap();
&user_for_task,
&kb_name_for_task,
false,
None,
)
.await
});
tx.send(result).err()
} else {
tx.send(Err("failed to build tokio runtime".into())).err()
};
if send_err.is_some() {
error!("Failed to send result from thread");
}
});
match rx.recv_timeout(std::time::Duration::from_secs(30)) {
Ok(Ok(message)) => {
info!("ADD_KB completed: {}", message);
Ok(Dynamic::from(message))
}
Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
e.into(),
rhai::Position::NONE,
))),
Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
"ADD_KB timed out".into(),
rhai::Position::NONE,
)))
}
Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
format!("ADD_KB failed: {}", e).into(),
rhai::Position::NONE,
))),
}
})
.unwrap();
} }
async fn add_kb_to_user(_state: &AppState, user: &UserSession, kb_name: &str, is_website: bool, website_url: Option<String>) -> Result<String, String> {
/// Add a KB to user's active KBs (stored in user_kb_associations table) trace!("KB '{}' associated with user '{}' (bot: {}, is_website: {})", kb_name, user.user_id, user.bot_id, is_website);
async fn add_kb_to_user( if is_website {
_state: &AppState, if let Some(_url) = website_url {
user: &UserSession, return Ok(format!("Website KB '{}' added successfully for user", kb_name));
kb_name: &str, }
is_website: bool, }
website_url: Option<String>, Ok(format!("KB '{}' added successfully for user", kb_name))
) -> Result<String, String> {
// TODO: Insert into user_kb_associations table using Diesel
// For now, just log the action
info!(
"KB '{}' associated with user '{}' (bot: {}, is_website: {})",
kb_name, user.user_id, user.bot_id, is_website
);
if is_website {
if let Some(url) = website_url {
info!("Website URL: {}", url);
return Ok(format!(
"Website KB '{}' added successfully for user",
kb_name
));
}
}
Ok(format!("KB '{}' added successfully for user", kb_name))
} }

View file

@ -1,69 +1,43 @@
use diesel::prelude::*; use diesel::prelude::*;
use log::info; use log::{trace};
use serde_json::{json, Value}; use serde_json::{json, Value};
use uuid::Uuid; use uuid::Uuid;
use crate::shared::models::TriggerKind; use crate::shared::models::TriggerKind;
pub fn execute_set_schedule(conn: &mut diesel::PgConnection, cron: &str, script_name: &str, bot_uuid: Uuid) -> Result<Value, Box<dyn std::error::Error>> {
trace!("Scheduling SET SCHEDULE cron: {}, script: {}, bot_id: {:?}", cron, script_name, bot_uuid);
pub fn execute_set_schedule( use crate::shared::models::bots::dsl::bots;
conn: &mut diesel::PgConnection, let bot_exists: bool = diesel::select(diesel::dsl::exists(bots.filter(crate::shared::models::bots::dsl::id.eq(bot_uuid)))).get_result(conn)?;
cron: &str, if !bot_exists {
script_name: &str, return Err(format!("Bot with id {} does not exist", bot_uuid).into());
bot_uuid: Uuid, }
) -> Result<Value, Box<dyn std::error::Error>> { use crate::shared::models::system_automations::dsl::*;
info!( let new_automation = (
"Scheduling SET SCHEDULE cron: {}, script: {}, bot_id: {:?}", bot_id.eq(bot_uuid),
cron, script_name, bot_uuid kind.eq(TriggerKind::Scheduled as i32),
); schedule.eq(cron),
param.eq(script_name),
// First check if bot exists is_active.eq(true),
use crate::shared::models::bots::dsl::bots; );
let bot_exists: bool = diesel::select(diesel::dsl::exists( let update_result = diesel::update(system_automations)
bots.filter(crate::shared::models::bots::dsl::id.eq(bot_uuid)) .filter(bot_id.eq(bot_uuid))
)) .filter(kind.eq(TriggerKind::Scheduled as i32))
.get_result(conn)?; .filter(param.eq(script_name))
.set((
if !bot_exists { schedule.eq(cron),
return Err(format!("Bot with id {} does not exist", bot_uuid).into()); is_active.eq(true),
} last_triggered.eq(None::<chrono::DateTime<chrono::Utc>>),
))
use crate::shared::models::system_automations::dsl::*; .execute(&mut *conn)?;
let result = if update_result == 0 {
let new_automation = ( diesel::insert_into(system_automations).values(&new_automation).execute(&mut *conn)?
bot_id.eq(bot_uuid), } else {
kind.eq(TriggerKind::Scheduled as i32), update_result
schedule.eq(cron), };
param.eq(script_name), Ok(json!({
is_active.eq(true), "command": "set_schedule",
); "schedule": cron,
"script": script_name,
// First try to update existing record "bot_id": bot_uuid.to_string(),
let update_result = diesel::update(system_automations) "rows_affected": result
.filter(bot_id.eq(bot_uuid)) }))
.filter(kind.eq(TriggerKind::Scheduled as i32))
.filter(param.eq(script_name))
.set((
schedule.eq(cron),
is_active.eq(true),
last_triggered.eq(None::<chrono::DateTime<chrono::Utc>>),
))
.execute(&mut *conn)?;
// If no rows were updated, insert new record
let result = if update_result == 0 {
diesel::insert_into(system_automations)
.values(&new_automation)
.execute(&mut *conn)?
} else {
update_result
};
Ok(json!({
"command": "set_schedule",
"schedule": cron,
"script": script_name,
"bot_id": bot_uuid.to_string(),
"rows_affected": result
}))
} }

View file

@ -1,44 +1,31 @@
use crate::shared::state::AppState; use crate::shared::state::AppState;
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use log::{debug, error, info}; use log::{ error,trace};
use rhai::{Dynamic, Engine}; use rhai::{Dynamic, Engine};
use std::sync::Arc; use std::sync::Arc;
use uuid::Uuid; use uuid::Uuid;
pub fn set_user_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) { pub fn set_user_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state); let state_clone = Arc::clone(&state);
let user_clone = user.clone(); let user_clone = user.clone();
engine engine
.register_custom_syntax(&["SET_USER", "$expr$"], true, move |context, inputs| { .register_custom_syntax(&["SET_USER", "$expr$"], true, move |context, inputs| {
let user_id_str = context.eval_expression_tree(&inputs[0])?.to_string(); let user_id_str = context.eval_expression_tree(&inputs[0])?.to_string();
match Uuid::parse_str(&user_id_str) {
info!("SET USER command executed with ID: {}", user_id_str); Ok(user_id) => {
let state_for_spawn = Arc::clone(&state_clone);
match Uuid::parse_str(&user_id_str) { let user_clone_spawn = user_clone.clone();
Ok(user_id) => { let mut session_manager = futures::executor::block_on(state_for_spawn.session_manager.lock());
debug!("Successfully parsed user UUID: {}", user_id); if let Err(e) = session_manager.update_user_id(user_clone_spawn.id, user_id) {
error!("Failed to update user ID in session: {}", e);
let state_for_spawn = Arc::clone(&state_clone); } else {
let user_clone_spawn = user_clone.clone(); trace!("Updated session {} to user ID: {}", user_clone_spawn.id, user_id);
}
let mut session_manager = }
futures::executor::block_on(state_for_spawn.session_manager.lock()); Err(e) => {
trace!("Invalid user ID format: {}", e);
if let Err(e) = session_manager.update_user_id(user_clone_spawn.id, user_id) { }
error!("Failed to update user ID in session: {}", e); }
} else { Ok(Dynamic::UNIT)
info!( })
"Updated session {} to user ID: {}", .unwrap();
user_clone_spawn.id, user_id
);
}
}
Err(e) => {
debug!("Invalid UUID format for SET USER: {}", e);
}
}
Ok(Dynamic::UNIT)
})
.unwrap();
} }

View file

@ -1,44 +1,27 @@
use crate::shared::state::AppState; use crate::shared::state::AppState;
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use log::info;
use rhai::{Dynamic, Engine}; use rhai::{Dynamic, Engine};
use std::thread; use std::thread;
use std::time::Duration; use std::time::Duration;
pub fn wait_keyword(_state: &AppState, _user: UserSession, engine: &mut Engine) { pub fn wait_keyword(_state: &AppState, _user: UserSession, engine: &mut Engine) {
engine engine
.register_custom_syntax( .register_custom_syntax(&["WAIT", "$expr$"], false, move |context, inputs| {
&["WAIT", "$expr$"], let seconds = context.eval_expression_tree(&inputs[0])?;
false, let duration_secs = if seconds.is::<i64>() {
move |context, inputs| { seconds.cast::<i64>() as f64
let seconds = context.eval_expression_tree(&inputs[0])?; } else if seconds.is::<f64>() {
seconds.cast::<f64>()
let duration_secs = if seconds.is::<i64>() { } else {
seconds.cast::<i64>() as f64 return Err(format!("WAIT expects a number, got: {}", seconds).into());
} else if seconds.is::<f64>() { };
seconds.cast::<f64>() if duration_secs < 0.0 {
} else { return Err("WAIT duration cannot be negative".into());
return Err(format!("WAIT expects a number, got: {}", seconds).into()); }
}; let capped_duration = if duration_secs > 300.0 { 300.0 } else { duration_secs };
let duration = Duration::from_secs_f64(capped_duration);
if duration_secs < 0.0 { thread::sleep(duration);
return Err("WAIT duration cannot be negative".into()); Ok(Dynamic::from(format!("Waited {} seconds", capped_duration)))
} },
)
let capped_duration = if duration_secs > 300.0 { .unwrap();
300.0
} else {
duration_secs
};
info!("WAIT {} seconds (thread sleep)", capped_duration);
let duration = Duration::from_secs_f64(capped_duration);
thread::sleep(duration);
info!("WAIT completed after {} seconds", capped_duration);
Ok(Dynamic::from(format!("Waited {} seconds", capped_duration)))
},
)
.unwrap();
} }

View file

@ -1,430 +1,360 @@
use crate::config::{AppConfig, write_drive_config_to_env}; use crate::config::AppConfig;
use crate::package_manager::{InstallMode, PackageManager}; use crate::package_manager::{InstallMode, PackageManager};
use crate::shared::utils::establish_pg_connection; use crate::shared::utils::establish_pg_connection;
use anyhow::Result; use anyhow::Result;
use diesel::{connection::SimpleConnection};
use dotenvy::dotenv;
use log::{debug, error, info, trace};
use aws_sdk_s3::Client;
use aws_config::BehaviorVersion; use aws_config::BehaviorVersion;
use rand::distr::Alphanumeric; use aws_sdk_s3::Client;
use rand::Rng; use diesel::connection::SimpleConnection;
use log::{error, info, trace};
use std::io::{self, Write}; use std::io::{self, Write};
use std::path::Path; use std::path::Path;
use std::process::Command; use std::process::Command;
use std::sync::{Arc, Mutex};
pub struct ComponentInfo { pub struct ComponentInfo {
pub name: &'static str, pub name: &'static str,
} }
pub struct BootstrapManager { pub struct BootstrapManager {
pub install_mode: InstallMode, pub install_mode: InstallMode,
pub tenant: Option<String>, pub tenant: Option<String>,
pub s3_client: Client,
} }
impl BootstrapManager { impl BootstrapManager {
fn is_postgres_running() -> bool { fn is_postgres_running() -> bool {
match Command::new("pg_isready").arg("-q").status() { match Command::new("pg_isready").arg("-q").status() {
Ok(status) => status.success(), Ok(status) => status.success(),
Err(_) => { Err(_) => Command::new("pgrep")
Command::new("pgrep").arg("postgres").output().map(|o| !o.stdout.is_empty()).unwrap_or(false) .arg("postgres")
} .output()
} .map(|o| !o.stdout.is_empty())
} .unwrap_or(false),
}
pub async fn new(install_mode: InstallMode, tenant: Option<String>) -> Self { }
trace!("Initializing BootstrapManager with mode {:?} and tenant {:?}", install_mode, tenant); pub async fn new(install_mode: InstallMode, tenant: Option<String>) -> Self {
if !Self::is_postgres_running() { trace!(
let pm = PackageManager::new(install_mode.clone(), tenant.clone()) "Initializing BootstrapManager with mode {:?} and tenant {:?}",
.expect("Failed to initialize PackageManager"); install_mode,
if let Err(e) = pm.start("tables") { tenant
error!("Failed to start Tables server component automatically: {}", e); );
panic!("Database not available and auto-start failed."); if !Self::is_postgres_running() {
} else { let pm = PackageManager::new(install_mode.clone(), tenant.clone())
info!("Started Tables server component automatically"); .expect("Failed to initialize PackageManager");
} if let Err(e) = pm.start("tables") {
} error!(
let config = AppConfig::from_env().expect("Failed to load config from env"); "Failed to start Tables server component automatically: {}",
let s3_client = Self::create_s3_operator(&config).await; e
Self { );
install_mode, panic!("Database not available and auto-start failed.");
tenant, } else {
s3_client, trace!("Tables server started successfully");
} }
} }
Self {
pub fn start_all(&mut self) -> Result<()> { install_mode,
let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?; tenant,
let components = vec![ }
ComponentInfo { name: "tables" }, }
ComponentInfo { name: "cache" }, pub fn start_all(&mut self) -> Result<()> {
ComponentInfo { name: "drive" }, let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?;
ComponentInfo { name: "llm" }, let components = vec![
ComponentInfo { name: "email" }, ComponentInfo { name: "tables" },
ComponentInfo { name: "proxy" }, ComponentInfo { name: "cache" },
ComponentInfo { name: "directory" }, ComponentInfo { name: "drive" },
ComponentInfo { name: "alm" }, ComponentInfo { name: "llm" },
ComponentInfo { name: "alm_ci" }, ComponentInfo { name: "email" },
ComponentInfo { name: "dns" }, ComponentInfo { name: "proxy" },
ComponentInfo { name: "webmail" }, ComponentInfo { name: "directory" },
ComponentInfo { name: "meeting" }, ComponentInfo { name: "alm" },
ComponentInfo { name: "table_editor" }, ComponentInfo { name: "alm_ci" },
ComponentInfo { name: "doc_editor" }, ComponentInfo { name: "dns" },
ComponentInfo { name: "desktop" }, ComponentInfo { name: "webmail" },
ComponentInfo { name: "devtools" }, ComponentInfo { name: "meeting" },
ComponentInfo { name: "bot" }, ComponentInfo {
ComponentInfo { name: "system" }, name: "table_editor",
ComponentInfo { name: "vector_db" }, },
ComponentInfo { name: "host" }, ComponentInfo { name: "doc_editor" },
]; ComponentInfo { name: "desktop" },
for component in components { ComponentInfo { name: "devtools" },
if pm.is_installed(component.name) { ComponentInfo { name: "bot" },
pm.start(component.name)?; ComponentInfo { name: "system" },
} ComponentInfo { name: "vector_db" },
} ComponentInfo { name: "host" },
Ok(()) ];
} for component in components {
if pm.is_installed(component.name) {
pub async fn bootstrap(&mut self) -> Result<AppConfig> { pm.start(component.name)?;
if let Ok(tables_server) = std::env::var("TABLES_SERVER") { }
if !tables_server.is_empty() { }
info!("Legacy mode detected (TABLES_SERVER present), skipping bootstrap installation"); Ok(())
let _database_url = std::env::var("DATABASE_URL").unwrap_or_else(|_| { }
let username = std::env::var("TABLES_USERNAME").unwrap_or_else(|_| "gbuser".to_string()); pub async fn bootstrap(&mut self) {
let password = std::env::var("TABLES_PASSWORD").unwrap_or_else(|_| "postgres".to_string()); if let Ok(tables_server) = std::env::var("TABLES_SERVER") {
let server = std::env::var("TABLES_SERVER").unwrap_or_else(|_| "localhost".to_string()); if !tables_server.is_empty() {
let port = std::env::var("TABLES_PORT").unwrap_or_else(|_| "5432".to_string()); info!(
let database = std::env::var("TABLES_DATABASE").unwrap_or_else(|_| "gbserver".to_string()); "Legacy mode detected (TABLES_SERVER present), skipping bootstrap installation"
format!("postgres://{}:{}@{}:{}/{}", username, password, server, port, database) );
}); let _database_url = std::env::var("DATABASE_URL").unwrap_or_else(|_| {
if let Ok(config) = self.load_config_from_csv().await { let username =
return Ok(config); std::env::var("TABLES_USERNAME").unwrap_or_else(|_| "gbuser".to_string());
} let password =
match establish_pg_connection() { std::env::var("TABLES_PASSWORD").unwrap_or_else(|_| "postgres".to_string());
Ok(mut conn) => { let server =
if let Err(e) = self.apply_migrations(&mut conn) { std::env::var("TABLES_SERVER").unwrap_or_else(|_| "localhost".to_string());
log::warn!("Failed to apply migrations: {}", e); let port = std::env::var("TABLES_PORT").unwrap_or_else(|_| "5432".to_string());
} let database =
return Ok(AppConfig::from_database(&mut conn).expect("Failed to load config from DB")); std::env::var("TABLES_DATABASE").unwrap_or_else(|_| "gbserver".to_string());
} format!(
Err(e) => { "postgres://{}:{}@{}:{}/{}",
log::warn!("Failed to connect to database: {}", e); username, password, server, port, database
return Ok(AppConfig::from_env()?); )
} });
} match establish_pg_connection() {
} Ok(mut conn) => {
} if let Err(e) = self.apply_migrations(&mut conn) {
let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?; log::warn!("Failed to apply migrations: {}", e);
let required_components = vec!["tables", "drive", "cache", "llm"]; }
let mut config = AppConfig::from_env().expect("Failed to load config from env"); }
for component in required_components { Err(e) => {
if !pm.is_installed(component) { log::warn!("Failed to connect to database: {}", e);
let termination_cmd = pm }
.components }
.get(component) }
.and_then(|cfg| cfg.binary_name.clone()) }
.unwrap_or_else(|| component.to_string()); let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone()).unwrap();
if !termination_cmd.is_empty() { let required_components = vec!["tables", "drive", "cache", "llm"];
let check = Command::new("pgrep") for component in required_components {
.arg("-f") if !pm.is_installed(component) {
.arg(&termination_cmd) let termination_cmd = pm
.output(); .components
if let Ok(output) = check { .get(component)
if !output.stdout.is_empty() { .and_then(|cfg| cfg.binary_name.clone())
println!("Component '{}' appears to be already running from a previous install.", component); .unwrap_or_else(|| component.to_string());
println!("Do you want to terminate it? (y/n)"); if !termination_cmd.is_empty() {
let mut input = String::new(); let check = Command::new("pgrep")
io::stdout().flush().unwrap(); .arg("-f")
io::stdin().read_line(&mut input).unwrap(); .arg(&termination_cmd)
if input.trim().eq_ignore_ascii_case("y") { .output();
let _ = Command::new("pkill") if let Ok(output) = check {
.arg("-f") if !output.stdout.is_empty() {
.arg(&termination_cmd) println!("Component '{}' appears to be already running from a previous install.", component);
.status(); println!("Do you want to terminate it? (y/n)");
println!("Terminated existing '{}' process.", component); let mut input = String::new();
} else { io::stdout().flush().unwrap();
println!("Skipping start of '{}' as it is already running.", component); io::stdin().read_line(&mut input).unwrap();
continue; if input.trim().eq_ignore_ascii_case("y") {
} let _ = Command::new("pkill")
} .arg("-f")
} .arg(&termination_cmd)
} .status();
if component == "tables" { println!("Terminated existing '{}' process.", component);
let db_password = self.generate_secure_password(16); } else {
let farm_password = self.generate_secure_password(32); println!(
let env_contents = format!( "Skipping start of '{}' as it is already running.",
"FARM_PASSWORD={}\nDATABASE_URL=postgres://gbuser:{}@localhost:5432/botserver", component
farm_password, db_password );
); continue;
std::fs::write(".env", &env_contents) }
.map_err(|e| anyhow::anyhow!("Failed to write .env file: {}", e))?; }
dotenv().ok(); }
} }
pm.install(component).await?; _ = pm.install(component).await;
if component == "tables" { if component == "tables" {
let mut conn = establish_pg_connection() let mut conn = establish_pg_connection().unwrap();
.map_err(|e| anyhow::anyhow!("Failed to connect to database: {}", e))?; let migration_dir = include_dir::include_dir!("./migrations");
let migration_dir = include_dir::include_dir!("./migrations"); let mut migration_files: Vec<_> = migration_dir
let mut migration_files: Vec<_> = migration_dir .files()
.files() .filter_map(|file| {
.filter_map(|file| { let path = file.path();
let path = file.path(); if path.extension()? == "sql" {
if path.extension()? == "sql" { Some(file)
Some(file) } else {
} else { None
None }
} })
}) .collect();
.collect(); migration_files.sort_by_key(|f| f.path());
migration_files.sort_by_key(|f| f.path()); for migration_file in migration_files {
for migration_file in migration_files { let migration = migration_file
let migration = migration_file .contents_utf8()
.contents_utf8() .ok_or_else(|| anyhow::anyhow!("Migration file is not valid UTF-8"));
.ok_or_else(|| anyhow::anyhow!("Migration file is not valid UTF-8"))?; if let Err(e) = conn.batch_execute(migration.unwrap()) {
if let Err(e) = conn.batch_execute(migration) { log::error!(
log::error!("Failed to execute migration {}: {}", migration_file.path().display(), e); "Failed to execute migration {}: {}",
return Err(e.into()); migration_file.path().display(),
} e
trace!("Successfully executed migration: {}", migration_file.path().display()); );
} }
config = AppConfig::from_database(&mut conn).expect("Failed to load config from DB"); trace!(
} "Successfully executed migration: {}",
} migration_file.path().display()
} );
self.s3_client = Self::create_s3_operator(&config).await; }
let final_config = if let Ok(csv_config) = self.load_config_from_csv().await { }
csv_config }
} else { }
config }
}; async fn create_s3_operator(config: &AppConfig) -> Client {
if std::env::var("DRIVE_SERVER").is_err() { let endpoint = if !config.drive.server.ends_with('/') {
write_drive_config_to_env(&final_config.drive) format!("{}/", config.drive.server)
.map_err(|e| anyhow::anyhow!("Failed to write drive config to .env: {}", e))?; } else {
} config.drive.server.clone()
Ok(final_config) };
} let base_config = aws_config::defaults(BehaviorVersion::latest())
.endpoint_url(endpoint)
async fn create_s3_operator(config: &AppConfig) -> Client { .region("auto")
let endpoint = if !config.drive.server.ends_with('/') { .credentials_provider(aws_sdk_s3::config::Credentials::new(
format!("{}/", config.drive.server) config.drive.access_key.clone(),
} else { config.drive.secret_key.clone(),
config.drive.server.clone() None,
}; None,
let base_config = aws_config::defaults(BehaviorVersion::latest()) "static",
.endpoint_url(endpoint) ))
.region("auto") .load()
.credentials_provider( .await;
aws_sdk_s3::config::Credentials::new( let s3_config = aws_sdk_s3::config::Builder::from(&base_config)
config.drive.access_key.clone(), .force_path_style(true)
config.drive.secret_key.clone(), .build();
None, aws_sdk_s3::Client::from_conf(s3_config)
None, }
"static", pub async fn upload_templates_to_drive(&self, _config: &AppConfig) -> Result<()> {
) let mut conn = establish_pg_connection()?;
) self.create_bots_from_templates(&mut conn)?;
.load() let templates_dir = Path::new("templates");
.await; if !templates_dir.exists() {
let s3_config = aws_sdk_s3::config::Builder::from(&base_config) return Ok(());
.force_path_style(true) }
.build(); let client = Self::create_s3_operator(_config).await;
aws_sdk_s3::Client::from_conf(s3_config) let mut read_dir = tokio::fs::read_dir(templates_dir).await?;
} while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
fn generate_secure_password(&self, length: usize) -> String { if path.is_dir()
let mut rng = rand::rng(); && path
std::iter::repeat_with(|| rng.sample(Alphanumeric) as char) .file_name()
.take(length) .unwrap()
.collect() .to_string_lossy()
} .ends_with(".gbai")
{
pub async fn upload_templates_to_drive(&self, _config: &AppConfig) -> Result<()> { let bot_name = path.file_name().unwrap().to_string_lossy().to_string();
let mut conn = establish_pg_connection()?; let bucket = bot_name.trim_start_matches('/').to_string();
self.create_bots_from_templates(&mut conn)?; if client.head_bucket().bucket(&bucket).send().await.is_err() {
let templates_dir = Path::new("templates"); match client.create_bucket().bucket(&bucket).send().await {
if !templates_dir.exists() { Ok(_) => {
return Ok(()); self.upload_directory_recursive(&client, &path, &bucket, "/")
} .await?;
let client = &self.s3_client; }
let mut read_dir = tokio::fs::read_dir(templates_dir).await?; Err(e) => {
while let Some(entry) = read_dir.next_entry().await? { error!("Failed to create bucket {}: {:?}", bucket, e);
let path = entry.path(); return Err(anyhow::anyhow!("Failed to create bucket {}: {}. Check S3 credentials and endpoint configuration", bucket, e));
if path.is_dir() }
&& path }
.file_name() } else {
.unwrap() trace!("Bucket {} already exists", bucket);
.to_string_lossy() }
.ends_with(".gbai") }
{ }
let bot_name = path.file_name().unwrap().to_string_lossy().to_string(); Ok(())
let bucket = bot_name.trim_start_matches('/').to_string(); }
if client.head_bucket().bucket(&bucket).send().await.is_err() { fn create_bots_from_templates(&self, conn: &mut diesel::PgConnection) -> Result<()> {
match client.create_bucket() use crate::shared::models::schema::bots;
.bucket(&bucket) use diesel::prelude::*;
.send() let templates_dir = Path::new("templates");
.await { if !templates_dir.exists() {
Ok(_) => { return Ok(());
trace!("Created bucket: {}", bucket); }
self.upload_directory_recursive(client, &path, &bucket, "/") for entry in std::fs::read_dir(templates_dir)? {
.await?; let entry = entry?;
} let path = entry.path();
Err(e) => { if path.is_dir() && path.extension().map(|e| e == "gbai").unwrap_or(false) {
error!("Failed to create bucket {}: {:?}", bucket, e); let bot_folder = path.file_name().unwrap().to_string_lossy().to_string();
return Err(anyhow::anyhow!( let bot_name = bot_folder.trim_end_matches(".gbai");
"Failed to create bucket {}: {}. Check S3 credentials and endpoint configuration", let existing: Option<String> = bots::table
bucket, e .filter(bots::name.eq(&bot_name))
)); .select(bots::name)
} .first(conn)
} .optional()?;
} else { if existing.is_none() {
debug!("Bucket {} already exists", bucket); diesel::sql_query("INSERT INTO bots (id, name, description, llm_provider, llm_config, context_provider, context_config, is_active) VALUES (gen_random_uuid(), $1, $2, 'openai', '{\"model\": \"gpt-4\", \"temperature\": 0.7}', 'database', '{}', true)").bind::<diesel::sql_types::Text, _>(&bot_name).bind::<diesel::sql_types::Text, _>(format!("Bot for {} template", bot_name)).execute(conn)?;
} } else {
} trace!("Bot {} already exists", bot_name);
} }
Ok(()) }
} }
Ok(())
fn create_bots_from_templates(&self, conn: &mut diesel::PgConnection) -> Result<()> { }
use crate::shared::models::schema::bots; fn upload_directory_recursive<'a>(
use diesel::prelude::*; &'a self,
let templates_dir = Path::new("templates"); client: &'a Client,
if !templates_dir.exists() { local_path: &'a Path,
return Ok(()); bucket: &'a str,
} prefix: &'a str,
for entry in std::fs::read_dir(templates_dir)? { ) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<()>> + 'a>> {
let entry = entry?; Box::pin(async move {
let path = entry.path(); let _normalized_path = if !local_path.to_string_lossy().ends_with('/') {
if path.is_dir() && path.extension().map(|e| e == "gbai").unwrap_or(false) { format!("{}/", local_path.to_string_lossy())
let bot_folder = path.file_name().unwrap().to_string_lossy().to_string(); } else {
let bot_name = bot_folder.trim_end_matches(".gbai"); local_path.to_string_lossy().to_string()
let existing: Option<String> = bots::table };
.filter(bots::name.eq(&bot_name)) let mut read_dir = tokio::fs::read_dir(local_path).await?;
.select(bots::name) while let Some(entry) = read_dir.next_entry().await? {
.first(conn) let path = entry.path();
.optional()?; let file_name = path.file_name().unwrap().to_string_lossy().to_string();
if existing.is_none() { let mut key = prefix.trim_matches('/').to_string();
diesel::sql_query( if !key.is_empty() {
"INSERT INTO bots (id, name, description, llm_provider, llm_config, context_provider, context_config, is_active) \ key.push('/');
VALUES (gen_random_uuid(), $1, $2, 'openai', '{\"model\": \"gpt-4\", \"temperature\": 0.7}', 'database', '{}', true)" }
) key.push_str(&file_name);
.bind::<diesel::sql_types::Text, _>(&bot_name) if path.is_file() {
.bind::<diesel::sql_types::Text, _>(format!("Bot for {} template", bot_name)) trace!(
.execute(conn)?; "Uploading file {} to bucket {} with key {}",
info!("Created bot: {}", bot_name); path.display(),
} else { bucket,
debug!("Bot {} already exists", bot_name); key
} );
} let content = tokio::fs::read(&path).await?;
} client
Ok(()) .put_object()
} .bucket(bucket)
.key(&key)
fn upload_directory_recursive<'a>( .body(content.into())
&'a self, .send()
client: &'a Client, .await?;
local_path: &'a Path, } else if path.is_dir() {
bucket: &'a str, self.upload_directory_recursive(client, &path, bucket, &key)
prefix: &'a str, .await?;
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<()>> + 'a>> { }
Box::pin(async move { }
let _normalized_path = if !local_path.to_string_lossy().ends_with('/') { Ok(())
format!("{}/", local_path.to_string_lossy()) })
} else { }
local_path.to_string_lossy().to_string() fn apply_migrations(&self, conn: &mut diesel::PgConnection) -> Result<()> {
}; let migrations_dir = std::path::Path::new("migrations");
let mut read_dir = tokio::fs::read_dir(local_path).await?; if !migrations_dir.exists() {
while let Some(entry) = read_dir.next_entry().await? { return Ok(());
let path = entry.path(); }
let file_name = path.file_name().unwrap().to_string_lossy().to_string(); let mut sql_files: Vec<_> = std::fs::read_dir(migrations_dir)?
let mut key = prefix.trim_matches('/').to_string(); .filter_map(|entry| entry.ok())
if !key.is_empty() { .filter(|entry| {
key.push('/'); entry
} .path()
key.push_str(&file_name); .extension()
if path.is_file() { .and_then(|s| s.to_str())
trace!("Uploading file {} to bucket {} with key {}", path.display(), bucket, key); .map(|s| s == "sql")
let content = tokio::fs::read(&path).await?; .unwrap_or(false)
client.put_object() })
.bucket(bucket) .collect();
.key(&key) sql_files.sort_by_key(|entry| entry.path());
.body(content.into()) for entry in sql_files {
.send() let path = entry.path();
.await?; let filename = path.file_name().unwrap().to_string_lossy();
} else if path.is_dir() { match std::fs::read_to_string(&path) {
self.upload_directory_recursive(client, &path, bucket, &key).await?; Ok(sql) => match conn.batch_execute(&sql) {
} Err(e) => {
} log::warn!("Migration {} failed: {}", filename, e);
Ok(()) }
}) _ => {}
} },
Err(e) => {
async fn load_config_from_csv(&self) -> Result<AppConfig> { log::warn!("Failed to read migration {}: {}", filename, e);
use crate::config::ConfigManager; }
use uuid::Uuid; }
let client = &self.s3_client; }
let bucket = "default.gbai"; Ok(())
let config_key = "default.gbot/config.csv"; }
match client.get_object()
.bucket(bucket)
.key(config_key)
.send()
.await
{
Ok(response) => {
trace!("Found config.csv in default.gbai");
let bytes = response.body.collect().await?.into_bytes();
let csv_content = String::from_utf8(bytes.to_vec())?;
let config_conn = establish_pg_connection()?;
let config_manager = ConfigManager::new(Arc::new(Mutex::new(config_conn)));
let default_bot_id = Uuid::parse_str("00000000-0000-0000-0000-000000000000")?;
let temp_path = std::env::temp_dir().join("config.csv");
tokio::fs::write(&temp_path, csv_content).await?;
config_manager.sync_gbot_config(&default_bot_id, temp_path.to_str().unwrap())
.map_err(|e| anyhow::anyhow!("Failed to sync gbot config: {}", e))?;
let mut final_conn = establish_pg_connection()?;
let config = AppConfig::from_database(&mut final_conn)?;
Ok(config)
}
Err(e) => {
debug!("No config.csv found in default.gbai: {:?}", e);
Err(e.into())
}
}
}
fn apply_migrations(&self, conn: &mut diesel::PgConnection) -> Result<()> {
let migrations_dir = std::path::Path::new("migrations");
if !migrations_dir.exists() {
return Ok(());
}
let mut sql_files: Vec<_> = std::fs::read_dir(migrations_dir)?
.filter_map(|entry| entry.ok())
.filter(|entry| {
entry
.path()
.extension()
.and_then(|s| s.to_str())
.map(|s| s == "sql")
.unwrap_or(false)
})
.collect();
sql_files.sort_by_key(|entry| entry.path());
for entry in sql_files {
let path = entry.path();
let filename = path.file_name().unwrap().to_string_lossy();
match std::fs::read_to_string(&path) {
Ok(sql) => match conn.batch_execute(&sql) {
Err(e) => {
log::warn!("Migration {} failed: {}", filename, e);
}
_ => {}
},
Err(e) => {
log::warn!("Failed to read migration {}: {}", filename, e);
}
}
}
Ok(())
}
} }

View file

@ -65,7 +65,7 @@ impl BotOrchestrator {
use crate::shared::models::schema::bots::dsl::*; use crate::shared::models::schema::bots::dsl::*;
use diesel::prelude::*; use diesel::prelude::*;
let mut db_conn = self.state.conn.lock().unwrap(); let mut db_conn = self.state.conn.get().unwrap();
let active_bots = bots let active_bots = bots
.filter(is_active.eq(true)) .filter(is_active.eq(true))
.select(id) .select(id)
@ -102,7 +102,7 @@ impl BotOrchestrator {
use diesel::prelude::*; use diesel::prelude::*;
let bot_name: String = { let bot_name: String = {
let mut db_conn = state.conn.lock().unwrap(); let mut db_conn = state.conn.get().unwrap();
bots.filter(id.eq(Uuid::parse_str(&bot_guid)?)) bots.filter(id.eq(Uuid::parse_str(&bot_guid)?))
.select(name) .select(name)
.first(&mut *db_conn) .first(&mut *db_conn)
@ -154,7 +154,7 @@ impl BotOrchestrator {
use diesel::prelude::*; use diesel::prelude::*;
let bot_name: String = { let bot_name: String = {
let mut db_conn = self.state.conn.lock().unwrap(); let mut db_conn = self.state.conn.get().unwrap();
bots.filter(id.eq(Uuid::parse_str(&bot_guid)?)) bots.filter(id.eq(Uuid::parse_str(&bot_guid)?))
.select(name) .select(name)
.first(&mut *db_conn) .first(&mut *db_conn)
@ -251,7 +251,7 @@ impl BotOrchestrator {
..event_response ..event_response
}; };
if let Some(adapter) = self.state.channels.lock().unwrap().get(channel) { if let Some(adapter) = self.state.channels.lock().await.get(channel) {
adapter.send_message(event_response).await?; adapter.send_message(event_response).await?;
} else { } else {
warn!("No channel adapter found for channel: {}", channel); warn!("No channel adapter found for channel: {}", channel);
@ -310,7 +310,7 @@ impl BotOrchestrator {
context_max_length: 0, context_max_length: 0,
}; };
if let Some(adapter) = self.state.channels.lock().unwrap().get(channel) { if let Some(adapter) = self.state.channels.lock().await.get(channel) {
adapter.send_message(confirmation).await?; adapter.send_message(confirmation).await?;
} }
@ -396,7 +396,7 @@ impl BotOrchestrator {
// Get history limit from bot config (default -1 for unlimited) // Get history limit from bot config (default -1 for unlimited)
let history_limit = { let history_limit = {
let config_manager = ConfigManager::new(Arc::clone(&self.state.conn)); let config_manager = ConfigManager::new(self.state.conn.clone());
config_manager config_manager
.get_config( .get_config(
&Uuid::parse_str(&message.bot_id).unwrap_or_default(), &Uuid::parse_str(&message.bot_id).unwrap_or_default(),
@ -496,7 +496,7 @@ impl BotOrchestrator {
// Calculate initial token count // Calculate initial token count
let initial_tokens = crate::shared::utils::estimate_token_count(&prompt); let initial_tokens = crate::shared::utils::estimate_token_count(&prompt);
let config_manager = ConfigManager::new(Arc::clone(&self.state.conn)); let config_manager = ConfigManager::new(self.state.conn.clone());
let max_context_size = config_manager let max_context_size = config_manager
.get_config( .get_config(
&Uuid::parse_str(&message.bot_id).unwrap_or_default(), &Uuid::parse_str(&message.bot_id).unwrap_or_default(),
@ -593,11 +593,11 @@ impl BotOrchestrator {
} }
} }
trace!( info!(
"Stream processing completed, {} chunks processed", "Stream processing completed, {} chunks processed",
chunk_count chunk_count
); );
// Sum tokens from all p.push context builds before submission // Sum tokens from all p.push context builds before submission
let total_tokens = crate::shared::utils::estimate_token_count(&prompt) let total_tokens = crate::shared::utils::estimate_token_count(&prompt)
+ crate::shared::utils::estimate_token_count(&context_data) + crate::shared::utils::estimate_token_count(&context_data)
@ -608,7 +608,7 @@ impl BotOrchestrator {
); );
// Trigger compact prompt if enabled // Trigger compact prompt if enabled
let config_manager = ConfigManager::new(Arc::clone(&self.state.conn)); let config_manager = ConfigManager::new( self.state.conn.clone());
let compact_enabled = config_manager let compact_enabled = config_manager
.get_config( .get_config(
&Uuid::parse_str(&message.bot_id).unwrap_or_default(), &Uuid::parse_str(&message.bot_id).unwrap_or_default(),
@ -636,7 +636,7 @@ impl BotOrchestrator {
sm.save_message(session.id, user_id, 2, &full_response, 1)?; sm.save_message(session.id, user_id, 2, &full_response, 1)?;
} }
let config_manager = ConfigManager::new(Arc::clone(&self.state.conn)); let config_manager = ConfigManager::new(self.state.conn.clone());
let max_context_size = config_manager let max_context_size = config_manager
.get_config( .get_config(
&Uuid::parse_str(&message.bot_id).unwrap_or_default(), &Uuid::parse_str(&message.bot_id).unwrap_or_default(),
@ -710,7 +710,7 @@ impl BotOrchestrator {
let bot_id = session.bot_id; let bot_id = session.bot_id;
let bot_name: String = { let bot_name: String = {
let mut db_conn = state.conn.lock().unwrap(); let mut db_conn = state.conn.get().unwrap();
bots.filter(id.eq(Uuid::parse_str(&bot_id.to_string())?)) bots.filter(id.eq(Uuid::parse_str(&bot_id.to_string())?))
.select(name) .select(name)
.first(&mut *db_conn) .first(&mut *db_conn)
@ -896,7 +896,7 @@ async fn websocket_handler(
use crate::shared::models::schema::bots::dsl::*; use crate::shared::models::schema::bots::dsl::*;
use diesel::prelude::*; use diesel::prelude::*;
let mut db_conn = data.conn.lock().unwrap(); let mut db_conn = data.conn.get().unwrap();
match bots match bots
.filter(is_active.eq(true)) .filter(is_active.eq(true))
.select(id) .select(id)
@ -1010,7 +1010,7 @@ async fn websocket_handler(
use crate::shared::models::schema::bots::dsl::*; use crate::shared::models::schema::bots::dsl::*;
use diesel::prelude::*; use diesel::prelude::*;
let mut db_conn = data.conn.lock().unwrap(); let mut db_conn = data.conn.get().unwrap();
match bots match bots
.filter(is_active.eq(true)) .filter(is_active.eq(true))
.select(id) .select(id)
@ -1069,7 +1069,7 @@ async fn websocket_handler(
use crate::shared::models::schema::bots::dsl::*; use crate::shared::models::schema::bots::dsl::*;
use diesel::prelude::*; use diesel::prelude::*;
let mut db_conn = data.conn.lock().unwrap(); let mut db_conn = data.conn.get().unwrap();
match bots match bots
.filter(is_active.eq(true)) .filter(is_active.eq(true))
.select(id) .select(id)

View file

@ -1,13 +1,10 @@
use crate::shared::utils::{ DbPool};
use diesel::prelude::*; use diesel::prelude::*;
use diesel::pg::PgConnection; use diesel::r2d2::{ConnectionManager, PooledConnection};
use uuid::Uuid;
use log::{trace};
use std::collections::HashMap; use std::collections::HashMap;
use std::fs::OpenOptions; use std::fs::OpenOptions;
use std::io::Write; use std::io::Write;
use std::sync::{Arc, Mutex}; use uuid::Uuid;
use crate::shared::utils::establish_pg_connection;
#[derive(Clone)] #[derive(Clone)]
pub struct AppConfig { pub struct AppConfig {
pub drive: DriveConfig, pub drive: DriveConfig,
@ -15,7 +12,6 @@ pub struct AppConfig {
pub database: DatabaseConfig, pub database: DatabaseConfig,
pub site_path: String, pub site_path: String,
} }
#[derive(Clone)] #[derive(Clone)]
pub struct DatabaseConfig { pub struct DatabaseConfig {
pub username: String, pub username: String,
@ -24,7 +20,6 @@ pub struct DatabaseConfig {
pub port: u32, pub port: u32,
pub database: String, pub database: String,
} }
#[derive(Clone)] #[derive(Clone)]
pub struct DriveConfig { pub struct DriveConfig {
pub server: String, pub server: String,
@ -32,13 +27,11 @@ pub struct DriveConfig {
pub secret_key: String, pub secret_key: String,
pub use_ssl: bool, pub use_ssl: bool,
} }
#[derive(Clone)] #[derive(Clone)]
pub struct ServerConfig { pub struct ServerConfig {
pub host: String, pub host: String,
pub port: u16, pub port: u16,
} }
impl AppConfig { impl AppConfig {
pub fn database_url(&self) -> String { pub fn database_url(&self) -> String {
format!( format!(
@ -51,72 +44,82 @@ impl AppConfig {
) )
} }
} }
impl AppConfig { impl AppConfig {
pub fn from_database(conn: &mut PgConnection) -> Result<Self, diesel::result::Error> { pub fn from_database(pool: &DbPool) -> Result<Self, diesel::result::Error> {
use crate::shared::models::schema::bot_configuration::dsl::*; use crate::shared::models::schema::bot_configuration::dsl::*;
use diesel::prelude::*; let mut conn = pool.get().map_err(|e| {
diesel::result::Error::DatabaseError(
let config_map: HashMap<String, (Uuid, Uuid, String, String, String, bool)> = bot_configuration diesel::result::DatabaseErrorKind::UnableToSendCommand,
.select((id, bot_id, config_key, config_value, config_type, is_encrypted)) Box::new(e.to_string()),
.load::<(Uuid, Uuid, String, String, String, bool)>(conn) )
.unwrap_or_default() })?;
.into_iter() let config_map: HashMap<String, (Uuid, Uuid, String, String, String, bool)> =
.map(|(_, _, key, value, _, _)| (key.clone(), (Uuid::nil(), Uuid::nil(), key, value, String::new(), false))) bot_configuration
.collect(); .select((
id,
bot_id,
config_key,
config_value,
config_type,
is_encrypted,
))
.load::<(Uuid, Uuid, String, String, String, bool)>(&mut conn)
.unwrap_or_default()
.into_iter()
.map(|(_, _, key, value, _, _)| {
(
key.clone(),
(Uuid::nil(), Uuid::nil(), key, value, String::new(), false),
)
})
.collect();
let mut get_str = |key: &str, default: &str| -> String { let mut get_str = |key: &str, default: &str| -> String {
bot_configuration bot_configuration
.filter(config_key.eq(key)) .filter(config_key.eq(key))
.select(config_value) .select(config_value)
.first::<String>(conn) .first::<String>(&mut conn)
.unwrap_or_else(|_| default.to_string()) .unwrap_or_else(|_| default.to_string())
}; };
let get_u32 = |key: &str, default: u32| -> u32 { let get_u32 = |key: &str, default: u32| -> u32 {
config_map config_map
.get(key) .get(key)
.and_then(|v| v.3.parse().ok()) .and_then(|v| v.3.parse().ok())
.unwrap_or(default) .unwrap_or(default)
}; };
let get_u16 = |key: &str, default: u16| -> u16 { let get_u16 = |key: &str, default: u16| -> u16 {
config_map config_map
.get(key) .get(key)
.and_then(|v| v.3.parse().ok()) .and_then(|v| v.3.parse().ok())
.unwrap_or(default) .unwrap_or(default)
}; };
let get_bool = |key: &str, default: bool| -> bool { let get_bool = |key: &str, default: bool| -> bool {
config_map config_map
.get(key) .get(key)
.map(|v| v.3.to_lowercase() == "true") .map(|v| v.3.to_lowercase() == "true")
.unwrap_or(default) .unwrap_or(default)
}; };
let database = DatabaseConfig { let database = DatabaseConfig {
username: match std::env::var("TABLES_USERNAME") { username: match std::env::var("TABLES_USERNAME") {
Ok(v) => v, Ok(v) => v,
Err(_) => get_str("TABLES_USERNAME", "gbuser"), Err(_) => get_str("TABLES_USERNAME", "gbuser"),
}, },
password: match std::env::var("TABLES_PASSWORD") { password: match std::env::var("TABLES_PASSWORD") {
Ok(v) => v, Ok(v) => v,
Err(_) => get_str("TABLES_PASSWORD", ""), Err(_) => get_str("TABLES_PASSWORD", ""),
}, },
server: match std::env::var("TABLES_SERVER") { server: match std::env::var("TABLES_SERVER") {
Ok(v) => v, Ok(v) => v,
Err(_) => get_str("TABLES_SERVER", "localhost"), Err(_) => get_str("TABLES_SERVER", "localhost"),
}, },
port: std::env::var("TABLES_PORT") port: std::env::var("TABLES_PORT")
.ok() .ok()
.and_then(|p| p.parse().ok()) .and_then(|p| p.parse().ok())
.unwrap_or_else(|| get_u32("TABLES_PORT", 5432)), .unwrap_or_else(|| get_u32("TABLES_PORT", 5432)),
database: match std::env::var("TABLES_DATABASE") { database: match std::env::var("TABLES_DATABASE") {
Ok(v) => v, Ok(v) => v,
Err(_) => get_str("TABLES_DATABASE", "botserver"), Err(_) => get_str("TABLES_DATABASE", "botserver"),
}, },
}; };
let drive = DriveConfig { let drive = DriveConfig {
server: { server: {
let server = get_str("DRIVE_SERVER", "http://localhost:9000"); let server = get_str("DRIVE_SERVER", "http://localhost:9000");
@ -130,7 +133,6 @@ database: match std::env::var("TABLES_DATABASE") {
secret_key: get_str("DRIVE_SECRET", "minioadmin"), secret_key: get_str("DRIVE_SECRET", "minioadmin"),
use_ssl: get_bool("DRIVE_USE_SSL", false), use_ssl: get_bool("DRIVE_USE_SSL", false),
}; };
Ok(AppConfig { Ok(AppConfig {
drive, drive,
server: ServerConfig { server: ServerConfig {
@ -139,20 +141,16 @@ database: match std::env::var("TABLES_DATABASE") {
}, },
database, database,
site_path: { site_path: {
let fresh_conn = establish_pg_connection().map_err(|e| diesel::result::Error::DatabaseError(diesel::result::DatabaseErrorKind::UnableToSendCommand, Box::new(e.to_string())))?; ConfigManager::new(pool.clone())
ConfigManager::new(Arc::new(Mutex::new(fresh_conn))) .get_config(&Uuid::nil(), "SITES_ROOT", Some("./botserver-stack/sites"))?
.get_config(&Uuid::nil(), "SITES_ROOT", Some("./botserver-stack/sites"))?.to_string() .to_string()
}, },
}) })
} }
pub fn from_env() -> Result<Self, anyhow::Error> { pub fn from_env() -> Result<Self, anyhow::Error> {
let database_url = std::env::var("DATABASE_URL") let database_url = std::env::var("DATABASE_URL").unwrap();
.unwrap_or_else(|_| "postgres://gbuser:@localhost:5432/botserver".to_string());
let (db_username, db_password, db_server, db_port, db_name) = let (db_username, db_password, db_server, db_port, db_name) =
parse_database_url(&database_url); parse_database_url(&database_url);
let database = DatabaseConfig { let database = DatabaseConfig {
username: db_username, username: db_username,
password: db_password, password: db_password,
@ -160,19 +158,13 @@ database: match std::env::var("TABLES_DATABASE") {
port: db_port, port: db_port,
database: db_name, database: db_name,
}; };
let minio = DriveConfig { let minio = DriveConfig {
server: std::env::var("DRIVE_SERVER") server: std::env::var("DRIVE_SERVER")
.unwrap_or_else(|_| "http://localhost:9000".to_string()), .unwrap();
access_key: std::env::var("DRIVE_ACCESSKEY") access_key: std::env::var("DRIVE_ACCESSKEY")
.unwrap_or_else(|_| "minioadmin".to_string()), .unwrap();
secret_key: std::env::var("DRIVE_SECRET").unwrap_or_else(|_| "minioadmin".to_string()), secret_key: std::env::var("DRIVE_SECRET").unwrap_or_else(|_| "minioadmin".to_string()),
use_ssl: std::env::var("DRIVE_USE_SSL")
.unwrap_or_else(|_| "false".to_string())
.parse()
.unwrap_or(false)
}; };
Ok(AppConfig { Ok(AppConfig {
drive: minio, drive: minio,
server: ServerConfig { server: ServerConfig {
@ -184,54 +176,36 @@ database: match std::env::var("TABLES_DATABASE") {
}, },
database, database,
site_path: { site_path: {
let conn = PgConnection::establish(&database_url)?; let pool = create_conn()?;
ConfigManager::new(Arc::new(Mutex::new(conn))) ConfigManager::new(pool).get_config(
.get_config(&Uuid::nil(), "SITES_ROOT", Some("./botserver-stack/sites"))? &Uuid::nil(),
"SITES_ROOT",
Some("./botserver-stack/sites"),
)?
}, },
}) })
} }
} }
pub fn write_drive_config_to_env(drive: &DriveConfig) -> std::io::Result<()> {
let mut file = OpenOptions::new()
.append(true)
.create(true)
.open(".env")?;
writeln!(file, "")?;
writeln!(file, "DRIVE_SERVER={}", drive.server)?;
writeln!(file, "DRIVE_ACCESSKEY={}", drive.access_key)?;
writeln!(file, "DRIVE_SECRET={}", drive.secret_key)?;
writeln!(file, "DRIVE_USE_SSL={}", drive.use_ssl)?;
Ok(())
}
fn parse_database_url(url: &str) -> (String, String, String, u32, String) { fn parse_database_url(url: &str) -> (String, String, String, u32, String) {
if let Some(stripped) = url.strip_prefix("postgres://") { if let Some(stripped) = url.strip_prefix("postgres://") {
let parts: Vec<&str> = stripped.split('@').collect(); let parts: Vec<&str> = stripped.split('@').collect();
if parts.len() == 2 { if parts.len() == 2 {
let user_pass: Vec<&str> = parts[0].split(':').collect(); let user_pass: Vec<&str> = parts[0].split(':').collect();
let host_db: Vec<&str> = parts[1].split('/').collect(); let host_db: Vec<&str> = parts[1].split('/').collect();
if user_pass.len() >= 2 && host_db.len() >= 2 { if user_pass.len() >= 2 && host_db.len() >= 2 {
let username = user_pass[0].to_string(); let username = user_pass[0].to_string();
let password = user_pass[1].to_string(); let password = user_pass[1].to_string();
let host_port: Vec<&str> = host_db[0].split(':').collect(); let host_port: Vec<&str> = host_db[0].split(':').collect();
let server = host_port[0].to_string(); let server = host_port[0].to_string();
let port = host_port let port = host_port
.get(1) .get(1)
.and_then(|p| p.parse().ok()) .and_then(|p| p.parse().ok())
.unwrap_or(5432); .unwrap_or(5432);
let database = host_db[1].to_string(); let database = host_db[1].to_string();
return (username, password, server, port, database); return (username, password, server, port, database);
} }
} }
} }
( (
"gbuser".to_string(), "gbuser".to_string(),
"".to_string(), "".to_string(),
@ -240,16 +214,23 @@ fn parse_database_url(url: &str) -> (String, String, String, u32, String) {
"botserver".to_string(), "botserver".to_string(),
) )
} }
pub struct ConfigManager { pub struct ConfigManager {
conn: Arc<Mutex<PgConnection>>, conn: DbPool,
} }
impl ConfigManager { impl ConfigManager {
pub fn new(conn: Arc<Mutex<PgConnection>>) -> Self { pub fn new(conn: DbPool) -> Self {
Self { conn } Self { conn }
} }
fn get_conn(
&self,
) -> Result<PooledConnection<ConnectionManager<PgConnection>>, diesel::result::Error> {
self.conn.get().map_err(|e| {
diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::UnableToSendCommand,
Box::new(e.to_string()),
)
})
}
pub fn get_config( pub fn get_config(
&self, &self,
code_bot_id: &uuid::Uuid, code_bot_id: &uuid::Uuid,
@ -257,72 +238,55 @@ impl ConfigManager {
fallback: Option<&str>, fallback: Option<&str>,
) -> Result<String, diesel::result::Error> { ) -> Result<String, diesel::result::Error> {
use crate::shared::models::schema::bot_configuration::dsl::*; use crate::shared::models::schema::bot_configuration::dsl::*;
let mut conn = self.get_conn()?;
let mut conn = self.conn.lock().unwrap();
let fallback_str = fallback.unwrap_or(""); let fallback_str = fallback.unwrap_or("");
let result = bot_configuration let result = bot_configuration
.filter(bot_id.eq(code_bot_id)) .filter(bot_id.eq(code_bot_id))
.filter(config_key.eq(key)) .filter(config_key.eq(key))
.select(config_value) .select(config_value)
.first::<String>(&mut *conn); .first::<String>(&mut conn);
let value = match result { let value = match result {
Ok(v) => v, Ok(v) => v,
Err(_) => { Err(_) => {
let (default_bot_id, _default_bot_name) = crate::bot::get_default_bot(&mut *conn); let (default_bot_id, _default_bot_name) = crate::bot::get_default_bot(&mut conn);
bot_configuration bot_configuration
.filter(bot_id.eq(default_bot_id)) .filter(bot_id.eq(default_bot_id))
.filter(config_key.eq(key)) .filter(config_key.eq(key))
.select(config_value) .select(config_value)
.first::<String>(&mut *conn) .first::<String>(&mut conn)
.unwrap_or(fallback_str.to_string()) .unwrap_or(fallback_str.to_string())
} }
}; };
Ok(value) Ok(value)
} }
pub fn sync_gbot_config(&self, bot_id: &uuid::Uuid, content: &str) -> Result<usize, String> {
pub fn sync_gbot_config(
&self,
bot_id: &uuid::Uuid,
content: &str,
) -> Result<usize, String> {
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
let mut hasher = Sha256::new(); let mut hasher = Sha256::new();
hasher.update(content.as_bytes()); hasher.update(content.as_bytes());
let mut conn = self let mut conn = self
.conn .get_conn()
.lock() .map_err(|e| format!("Failed to acquire connection: {}", e))?;
.map_err(|e| format!("Failed to acquire lock: {}", e))?;
let mut updated = 0; let mut updated = 0;
for line in content.lines().skip(1) { for line in content.lines().skip(1) {
let parts: Vec<&str> = line.split(',').collect(); let parts: Vec<&str> = line.split(',').collect();
if parts.len() >= 2 { if parts.len() >= 2 {
let key = parts[0].trim(); let key = parts[0].trim();
let value = parts[1].trim(); let value = parts[1].trim();
let new_id: uuid::Uuid = uuid::Uuid::new_v4(); let new_id: uuid::Uuid = uuid::Uuid::new_v4();
diesel::sql_query("INSERT INTO bot_configuration (id, bot_id, config_key, config_value, config_type) VALUES ($1, $2, $3, $4, 'string') ON CONFLICT (bot_id, config_key) DO UPDATE SET config_value = EXCLUDED.config_value, updated_at = NOW()") diesel::sql_query("INSERT INTO bot_configuration (id, bot_id, config_key, config_value, config_type) VALUES ($1, $2, $3, $4, 'string') ON CONFLICT (bot_id, config_key) DO UPDATE SET config_value = EXCLUDED.config_value, updated_at = NOW()")
.bind::<diesel::sql_types::Uuid, _>(new_id) .bind::<diesel::sql_types::Uuid, _>(new_id)
.bind::<diesel::sql_types::Uuid, _>(bot_id) .bind::<diesel::sql_types::Uuid, _>(bot_id)
.bind::<diesel::sql_types::Text, _>(key) .bind::<diesel::sql_types::Text, _>(key)
.bind::<diesel::sql_types::Text, _>(value) .bind::<diesel::sql_types::Text, _>(value)
.execute(&mut *conn) .execute(&mut conn)
.map_err(|e| format!("Failed to update config: {}", e))?; .map_err(|e| format!("Failed to update config: {}", e))?;
updated += 1; updated += 1;
} }
} }
trace!("Synced {} config values for bot {}", updated, bot_id);
Ok(updated) Ok(updated)
} }
} }
fn create_conn() -> Result<DbPool, anyhow::Error> {
crate::shared::utils::create_conn()
.map_err(|e| anyhow::anyhow!("Failed to create database pool: {}", e))
}

View file

@ -132,7 +132,7 @@ impl DriveMonitor {
} }
async fn check_gbot(&self, client: &Client) -> Result<(), Box<dyn Error + Send + Sync>> { async fn check_gbot(&self, client: &Client) -> Result<(), Box<dyn Error + Send + Sync>> {
let config_manager = ConfigManager::new(Arc::clone(&self.state.conn)); let config_manager = ConfigManager::new(self.state.conn.clone());
let mut continuation_token = None; let mut continuation_token = None;
loop { loop {
@ -194,7 +194,7 @@ impl DriveMonitor {
let _ = config_manager.sync_gbot_config(&self.bot_id, &csv_content); let _ = config_manager.sync_gbot_config(&self.bot_id, &csv_content);
if restart_needed { if restart_needed {
if let Err(e) = ensure_llama_servers_running(&self.state).await { if let Err(e) = ensure_llama_servers_running(Arc::clone(&self.state)).await {
log::error!("Failed to restart LLaMA servers after llm- config change: {}", e); log::error!("Failed to restart LLaMA servers after llm- config change: {}", e);
} }
} }

View file

@ -28,29 +28,27 @@ pub async fn embeddings_local(
} }
pub async fn ensure_llama_servers_running( pub async fn ensure_llama_servers_running(
app_state: &Arc<AppState> app_state: Arc<AppState>
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
// Get all config values before starting async operations // Get all config values before starting async operations
let config_values = { let config_values = {
let conn_arc = app_state.conn.clone(); let conn_arc = app_state.conn.clone();
tokio::task::spawn_blocking(move || { let default_bot_id = tokio::task::spawn_blocking(move || {
let mut conn = conn_arc.lock().unwrap(); let mut conn = conn_arc.get().unwrap();
let config_manager = ConfigManager::new(Arc::clone(&conn_arc)); bots.filter(name.eq("default"))
let default_bot_id = bots.filter(name.eq("default"))
.select(id) .select(id)
.first::<uuid::Uuid>(&mut *conn) .first::<uuid::Uuid>(&mut *conn)
.unwrap_or_else(|_| uuid::Uuid::nil()); .unwrap_or_else(|_| uuid::Uuid::nil())
}).await?;
( let config_manager = ConfigManager::new(app_state.conn.clone());
default_bot_id, (
config_manager.get_config(&default_bot_id, "llm-url", None).unwrap_or_default(), default_bot_id,
config_manager.get_config(&default_bot_id, "llm-model", None).unwrap_or_default(), config_manager.get_config(&default_bot_id, "llm-url", None).unwrap_or_default(),
config_manager.get_config(&default_bot_id, "embedding-url", None).unwrap_or_default(), config_manager.get_config(&default_bot_id, "llm-model", None).unwrap_or_default(),
config_manager.get_config(&default_bot_id, "embedding-model", None).unwrap_or_default(), config_manager.get_config(&default_bot_id, "embedding-url", None).unwrap_or_default(),
config_manager.get_config(&default_bot_id, "llm-server-path", None).unwrap_or_default(), config_manager.get_config(&default_bot_id, "embedding-model", None).unwrap_or_default(),
) config_manager.get_config(&default_bot_id, "llm-server-path", None).unwrap_or_default(),
}).await? )
}; };
let (_default_bot_id, llm_url, llm_model, embedding_url, embedding_model, llm_server_path) = config_values; let (_default_bot_id, llm_url, llm_model, embedding_url, embedding_model, llm_server_path) = config_values;
@ -90,7 +88,7 @@ let (_default_bot_id, llm_url, llm_model, embedding_url, embedding_model, llm_se
if !llm_running && !llm_model.is_empty() { if !llm_running && !llm_model.is_empty() {
info!("Starting LLM server..."); info!("Starting LLM server...");
tasks.push(tokio::spawn(start_llm_server( tasks.push(tokio::spawn(start_llm_server(
Arc::clone(app_state), Arc::clone(&app_state),
llm_server_path.clone(), llm_server_path.clone(),
llm_model.clone(), llm_model.clone(),
llm_url.clone(), llm_url.clone(),
@ -192,14 +190,12 @@ pub async fn start_llm_server(
let conn = app_state.conn.clone(); let conn = app_state.conn.clone();
let config_manager = ConfigManager::new(conn.clone()); let config_manager = ConfigManager::new(conn.clone());
let default_bot_id = { let mut conn = conn.get().unwrap();
let mut conn = conn.lock().unwrap(); let default_bot_id = bots.filter(name.eq("default"))
bots.filter(name.eq("default"))
.select(id) .select(id)
.first::<uuid::Uuid>(&mut *conn) .first::<uuid::Uuid>(&mut *conn)
.unwrap_or_else(|_| uuid::Uuid::nil()) .unwrap_or_else(|_| uuid::Uuid::nil());
};
let n_moe = config_manager.get_config(&default_bot_id, "llm-server-n-moe", None).unwrap_or("4".to_string()); let n_moe = config_manager.get_config(&default_bot_id, "llm-server-n-moe", None).unwrap_or("4".to_string());
let parallel = config_manager.get_config(&default_bot_id, "llm-server-parallel", None).unwrap_or("1".to_string()); let parallel = config_manager.get_config(&default_bot_id, "llm-server-parallel", None).unwrap_or("1".to_string());
let cont_batching = config_manager.get_config(&default_bot_id, "llm-server-cont-batching", None).unwrap_or("true".to_string()); let cont_batching = config_manager.get_config(&default_bot_id, "llm-server-cont-batching", None).unwrap_or("true".to_string());

View file

@ -5,7 +5,7 @@ use actix_web::{web, App, HttpServer};
use dotenvy::dotenv; use dotenvy::dotenv;
use log::{error, info}; use log::{error, info};
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::{Arc, Mutex}; use std::sync::Arc;
mod auth; mod auth;
mod automation; mod automation;
mod basic; mod basic;
@ -37,299 +37,320 @@ use crate::channels::{VoiceAdapter, WebChannelAdapter};
use crate::config::AppConfig; use crate::config::AppConfig;
#[cfg(feature = "email")] #[cfg(feature = "email")]
use crate::email::{ use crate::email::{
get_emails, get_latest_email_from, list_emails, save_click, save_draft, send_email, get_emails, get_latest_email_from, list_emails, save_click, save_draft, send_email,
}; };
use crate::file::{init_drive, upload_file}; use crate::file::{init_drive, upload_file};
use crate::meet::{voice_start, voice_stop}; use crate::meet::{voice_start, voice_stop};
use crate::package_manager::InstallMode; use crate::package_manager::InstallMode;
use crate::session::{create_session, get_session_history, get_sessions, start_session}; use crate::session::{create_session, get_session_history, get_sessions, start_session};
use crate::shared::state::AppState; use crate::shared::state::AppState;
use crate::shared::utils::create_conn;
use crate::web_server::{bot_index, index, static_files}; use crate::web_server::{bot_index, index, static_files};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum BootstrapProgress { pub enum BootstrapProgress {
StartingBootstrap, StartingBootstrap,
InstallingComponent(String), InstallingComponent(String),
StartingComponent(String), StartingComponent(String),
UploadingTemplates, UploadingTemplates,
ConnectingDatabase, ConnectingDatabase,
StartingLLM, StartingLLM,
BootstrapComplete, BootstrapComplete,
BootstrapError(String), BootstrapError(String),
} }
#[tokio::main] #[tokio::main]
async fn main() -> std::io::Result<()> { async fn main() -> std::io::Result<()> {
use crate::llm::local::ensure_llama_servers_running; use crate::llm::local::ensure_llama_servers_running;
use botserver::config::ConfigManager; use botserver::config::ConfigManager;
let args: Vec<String> = std::env::args().collect(); let args: Vec<String> = std::env::args().collect();
let no_ui = args.contains(&"--noui".to_string()); let no_ui = args.contains(&"--noui".to_string());
if args.len() > 1 { if args.len() > 1 {
let command = &args[1]; let command = &args[1];
match command.as_str() { match command.as_str() {
"install" | "remove" | "list" | "status" | "start" | "stop" | "restart" | "--help" "install" | "remove" | "list" | "status" | "start" | "stop" | "restart" | "--help"
| "-h" => match package_manager::cli::run().await { | "-h" => match package_manager::cli::run().await {
Ok(_) => return Ok(()), Ok(_) => return Ok(()),
Err(e) => { Err(e) => {
eprintln!("CLI error: {}", e); eprintln!("CLI error: {}", e);
return Err(std::io::Error::new( return Err(std::io::Error::new(
std::io::ErrorKind::Other, std::io::ErrorKind::Other,
format!("CLI command failed: {}", e), format!("CLI command failed: {}", e),
)); ));
}
},
"--noui" => {}
_ => {
eprintln!("Unknown command: {}", command);
eprintln!("Run 'botserver --help' for usage information");
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
format!("Unknown command: {}", command),
));
}
}
}
dotenv().ok();
let (progress_tx, progress_rx) = tokio::sync::mpsc::unbounded_channel::<BootstrapProgress>();
let (state_tx, state_rx) = tokio::sync::mpsc::channel::<Arc<AppState>>(1);
let ui_handle = if !no_ui {
let progress_rx = Arc::new(tokio::sync::Mutex::new(progress_rx));
let state_rx = Arc::new(tokio::sync::Mutex::new(state_rx));
let handle = std::thread::Builder::new()
.name("ui-thread".to_string())
.spawn(move || {
let mut ui = crate::ui_tree::XtreeUI::new();
ui.set_progress_channel(progress_rx.clone());
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.expect("Failed to create UI runtime");
rt.block_on(async {
tokio::select! {
result = async {
let mut rx = state_rx.lock().await;
rx.recv().await
} => {
if let Some(app_state) = result {
ui.set_app_state(app_state);
}
}
_ = tokio::time::sleep(tokio::time::Duration::from_secs(300)) => {
eprintln!("UI initialization timeout");
}
}
});
if let Err(e) = ui.start_ui() {
eprintln!("UI error: {}", e);
}
})
.expect("Failed to spawn UI thread");
Some(handle)
} else {
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info"))
.write_style(env_logger::WriteStyle::Always)
.init();
None
};
let install_mode = if args.contains(&"--container".to_string()) {
InstallMode::Container
} else {
InstallMode::Local
};
let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") {
args.get(idx + 1).cloned()
} else {
None
};
let progress_tx_clone = progress_tx.clone();
let cfg = {
progress_tx_clone.send(BootstrapProgress::StartingBootstrap).ok();
let mut bootstrap = BootstrapManager::new(install_mode.clone(), tenant.clone()).await;
let env_path = match std::env::current_dir() {
Ok(dir) => dir.join("botserver-stack").join(".env"),
Err(_) => {
progress_tx_clone.send(BootstrapProgress::BootstrapError("Failed to get current directory".to_string())).ok();
return Err(std::io::Error::new(std::io::ErrorKind::Other, "Failed to get current directory"));
}
};
let cfg = if env_path.exists() {
progress_tx_clone.send(BootstrapProgress::ConnectingDatabase).ok();
match diesel::Connection::establish(&std::env::var("DATABASE_URL").unwrap_or_else(|_| "postgres://gbuser:@localhost:5432/botserver".to_string())) {
Ok(mut conn) => {
AppConfig::from_database(&mut conn).unwrap_or_else(|_| AppConfig::from_env().expect("Failed to load config"))
}
Err(_) => AppConfig::from_env().expect("Failed to load config from env"),
}
} else {
match bootstrap.bootstrap().await {
Ok(config) => config,
Err(e) => {
progress_tx_clone.send(BootstrapProgress::BootstrapError(format!("Bootstrap failed: {}", e))).ok();
match diesel::Connection::establish(&std::env::var("DATABASE_URL").unwrap_or_else(|_| "postgres://gbuser:@localhost:5432/botserver".to_string())) {
Ok(mut conn) => {
AppConfig::from_database(&mut conn).unwrap_or_else(|_| AppConfig::from_env().expect("Failed to load config"))
}
Err(_) => AppConfig::from_env().expect("Failed to load config from env"),
} }
},
"--noui" => {}
_ => {
eprintln!("Unknown command: {}", command);
eprintln!("Run 'botserver --help' for usage information");
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
format!("Unknown command: {}", command),
));
} }
} }
}
dotenv().ok();
let (progress_tx, progress_rx) = tokio::sync::mpsc::unbounded_channel::<BootstrapProgress>();
let (state_tx, state_rx) = tokio::sync::mpsc::channel::<Arc<AppState>>(1);
let ui_handle = if !no_ui {
let progress_rx = Arc::new(tokio::sync::Mutex::new(progress_rx));
let state_rx = Arc::new(tokio::sync::Mutex::new(state_rx));
let handle = std::thread::Builder::new()
.name("ui-thread".to_string())
.spawn(move || {
let mut ui = crate::ui_tree::XtreeUI::new();
ui.set_progress_channel(progress_rx.clone());
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.expect("Failed to create UI runtime");
rt.block_on(async {
tokio::select! {
result = async {
let mut rx = state_rx.lock().await;
rx.recv().await
} => {
if let Some(app_state) = result {
ui.set_app_state(app_state);
}
}
_ = tokio::time::sleep(tokio::time::Duration::from_secs(300)) => {
eprintln!("UI initialization timeout");
}
}
});
if let Err(e) = ui.start_ui() {
eprintln!("UI error: {}", e);
}
})
.expect("Failed to spawn UI thread");
Some(handle)
} else {
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info"))
.write_style(env_logger::WriteStyle::Always)
.init();
None
}; };
progress_tx_clone.send(BootstrapProgress::StartingComponent("all services".to_string())).ok(); let install_mode = if args.contains(&"--container".to_string()) {
if let Err(e) = bootstrap.start_all() { InstallMode::Container
progress_tx_clone.send(BootstrapProgress::BootstrapError(format!("Failed to start services: {}", e))).ok(); } else {
InstallMode::Local
};
let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") {
args.get(idx + 1).cloned()
} else {
None
};
let progress_tx_clone = progress_tx.clone();
let cfg = {
progress_tx_clone
.send(BootstrapProgress::StartingBootstrap)
.ok();
let mut bootstrap = BootstrapManager::new(install_mode.clone(), tenant.clone()).await;
let env_path = std::env::current_dir().unwrap().join(".env");
let cfg = if env_path.exists() {
progress_tx_clone
.send(BootstrapProgress::ConnectingDatabase)
.ok();
match create_conn() {
Ok(pool) => {
let mut conn = pool.get().map_err(|e| {
std::io::Error::new(
std::io::ErrorKind::ConnectionRefused,
format!("Database connection failed: {}", e),
)
})?;
AppConfig::from_database(&pool)
.unwrap_or_else(|_| AppConfig::from_env().expect("Failed to load config"))
}
Err(_) => AppConfig::from_env().expect("Failed to load config from env"),
}
} else {
bootstrap.bootstrap().await;
match create_conn() {
Ok(pool) => AppConfig::from_database(&pool)
.unwrap_or_else(|_| AppConfig::from_env().expect("Failed to load config")),
Err(_) => AppConfig::from_env().expect("Failed to load config from env"),
}
};
progress_tx_clone
.send(BootstrapProgress::StartingComponent(
"all services".to_string(),
))
.ok();
if let Err(e) = bootstrap.start_all() {
progress_tx_clone
.send(BootstrapProgress::BootstrapError(format!(
"Failed to start services: {}",
e
)))
.ok();
}
progress_tx_clone
.send(BootstrapProgress::UploadingTemplates)
.ok();
if let Err(e) = bootstrap.upload_templates_to_drive(&cfg).await {
progress_tx_clone
.send(BootstrapProgress::BootstrapError(format!(
"Failed to upload templates: {}",
e
)))
.ok();
}
Ok::<AppConfig, std::io::Error>(cfg)
};
let cfg = cfg?;
dotenv().ok();
let refreshed_cfg = AppConfig::from_env().expect("Failed to load config from env");
let config = std::sync::Arc::new(refreshed_cfg.clone());
progress_tx.send(BootstrapProgress::ConnectingDatabase).ok();
let pool = match create_conn() {
Ok(pool) => pool,
Err(e) => {
error!("Failed to create database pool: {}", e);
progress_tx
.send(BootstrapProgress::BootstrapError(format!(
"Database pool creation failed: {}",
e
)))
.ok();
return Err(std::io::Error::new(
std::io::ErrorKind::ConnectionRefused,
format!("Database pool creation failed: {}", e),
));
}
};
let cache_url = std::env::var("CACHE_URL")
.or_else(|_| std::env::var("REDIS_URL"))
.unwrap_or_else(|_| "redis://localhost:6379".to_string());
let redis_client = match redis::Client::open(cache_url.as_str()) {
Ok(client) => Some(Arc::new(client)),
Err(e) => {
log::warn!("Failed to connect to Redis: {}", e);
None
}
};
let web_adapter = Arc::new(WebChannelAdapter::new());
let voice_adapter = Arc::new(VoiceAdapter::new());
let drive = init_drive(&config.drive)
.await
.expect("Failed to initialize Drive");
let session_manager = Arc::new(tokio::sync::Mutex::new(session::SessionManager::new(
pool.get().unwrap(),
redis_client.clone(),
)));
let auth_service = Arc::new(tokio::sync::Mutex::new(auth::AuthService::new()));
let config_manager = ConfigManager::new(pool.clone());
let mut bot_conn = pool.get().expect("Failed to get database connection");
let (default_bot_id, _default_bot_name) = crate::bot::get_default_bot(&mut bot_conn);
let llm_url = config_manager
.get_config(&default_bot_id, "llm-url", Some("http://localhost:8081"))
.unwrap_or_else(|_| "http://localhost:8081".to_string());
let llm_provider = Arc::new(crate::llm::OpenAIClient::new(
"empty".to_string(),
Some(llm_url.clone()),
));
let app_state = Arc::new(AppState {
drive: Some(drive),
config: Some(cfg.clone()),
conn: pool.clone(),
bucket_name: "default.gbai".to_string(),
cache: redis_client.clone(),
session_manager: session_manager.clone(),
llm_provider: llm_provider.clone(),
auth_service: auth_service.clone(),
channels: Arc::new(tokio::sync::Mutex::new({
let mut map = HashMap::new();
map.insert(
"web".to_string(),
web_adapter.clone() as Arc<dyn crate::channels::ChannelAdapter>,
);
map
})),
response_channels: Arc::new(tokio::sync::Mutex::new(HashMap::new())),
web_adapter: web_adapter.clone(),
voice_adapter: voice_adapter.clone(),
});
state_tx.send(app_state.clone()).await.ok();
progress_tx.send(BootstrapProgress::BootstrapComplete).ok();
info!(
"Starting HTTP server on {}:{}",
config.server.host, config.server.port
);
let worker_count = std::thread::available_parallelism()
.map(|n| n.get())
.unwrap_or(4);
let bot_orchestrator = BotOrchestrator::new(app_state.clone());
tokio::spawn(async move {
if let Err(e) = bot_orchestrator.mount_all_bots().await {
error!("Failed to mount bots: {}", e);
}
});
let automation_state = app_state.clone();
std::thread::spawn(move || {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.expect("Failed to create runtime for automation");
let local = tokio::task::LocalSet::new();
local.block_on(&rt, async move {
let automation = AutomationService::new(automation_state);
automation.spawn().await.ok();
});
});
let app_state_for_llm = app_state.clone();
tokio::spawn(async move {
if let Err(e) = ensure_llama_servers_running(app_state_for_llm).await {
error!("Failed to start LLM servers: {}", e);
}
});
let server_result = HttpServer::new(move || {
let cors = Cors::default()
.allow_any_origin()
.allow_any_method()
.allow_any_header()
.max_age(3600);
let app_state_clone = app_state.clone();
let mut app = App::new()
.wrap(cors)
.wrap(Logger::default())
.wrap(Logger::new("HTTP REQUEST: %a %{User-Agent}i"))
.app_data(web::Data::from(app_state_clone))
.service(auth_handler)
.service(create_session)
.service(get_session_history)
.service(get_sessions)
.service(index)
.service(start_session)
.service(upload_file)
.service(voice_start)
.service(voice_stop)
.service(websocket_handler)
.service(crate::bot::create_bot_handler)
.service(crate::bot::mount_bot_handler)
.service(crate::bot::handle_user_input_handler)
.service(crate::bot::get_user_sessions_handler)
.service(crate::bot::get_conversation_history_handler)
.service(crate::bot::send_warning_handler);
#[cfg(feature = "email")]
{
app = app
.service(get_latest_email_from)
.service(get_emails)
.service(list_emails)
.service(send_email)
.service(save_draft)
.service(save_click);
}
app = app.service(static_files);
app = app.service(bot_index);
app
})
.workers(worker_count)
.bind((config.server.host.clone(), config.server.port))?
.run()
.await;
if let Some(handle) = ui_handle {
handle.join().ok();
} }
progress_tx_clone.send(BootstrapProgress::UploadingTemplates).ok(); server_result
if let Err(e) = bootstrap.upload_templates_to_drive(&cfg).await {
progress_tx_clone.send(BootstrapProgress::BootstrapError(format!("Failed to upload templates: {}", e))).ok();
}
Ok::<AppConfig, std::io::Error>(cfg)
};
let cfg = cfg?;
dotenv().ok();
let refreshed_cfg = AppConfig::from_env().expect("Failed to load config from env");
let config = std::sync::Arc::new(refreshed_cfg.clone());
progress_tx.send(BootstrapProgress::ConnectingDatabase).ok();
let db_pool = match diesel::Connection::establish(&refreshed_cfg.database_url()) {
Ok(conn) => Arc::new(Mutex::new(conn)),
Err(e) => {
error!("Failed to connect to main database: {}", e);
progress_tx.send(BootstrapProgress::BootstrapError(format!("Database connection failed: {}", e))).ok();
return Err(std::io::Error::new(
std::io::ErrorKind::ConnectionRefused,
format!("Database connection failed: {}", e),
));
}
};
let cache_url = std::env::var("CACHE_URL")
.or_else(|_| std::env::var("REDIS_URL"))
.unwrap_or_else(|_| "redis://localhost:6379".to_string());
let redis_client = match redis::Client::open(cache_url.as_str()) {
Ok(client) => Some(Arc::new(client)),
Err(e) => {
log::warn!("Failed to connect to Redis: {}", e);
None
}
};
let web_adapter = Arc::new(WebChannelAdapter::new());
let voice_adapter = Arc::new(VoiceAdapter::new());
let drive = init_drive(&config.drive)
.await
.expect("Failed to initialize Drive");
let session_manager = Arc::new(tokio::sync::Mutex::new(session::SessionManager::new(
diesel::Connection::establish(&cfg.database_url()).unwrap(),
redis_client.clone(),
)));
let auth_service = Arc::new(tokio::sync::Mutex::new(auth::AuthService::new()));
let conn = diesel::Connection::establish(&cfg.database_url()).unwrap();
let config_manager = ConfigManager::new(Arc::new(Mutex::new(conn)));
let mut bot_conn = diesel::Connection::establish(&cfg.database_url()).unwrap();
let (default_bot_id, _default_bot_name) = crate::bot::get_default_bot(&mut bot_conn);
let llm_url = config_manager
.get_config(&default_bot_id, "llm-url", Some("http://localhost:8081"))
.unwrap_or_else(|_| "http://localhost:8081".to_string());
let llm_provider = Arc::new(crate::llm::OpenAIClient::new(
"empty".to_string(),
Some(llm_url.clone()),
));
let app_state = Arc::new(AppState {
drive: Some(drive),
config: Some(cfg.clone()),
conn: db_pool.clone(),
bucket_name: "default.gbai".to_string(),
cache: redis_client.clone(),
session_manager: session_manager.clone(),
llm_provider: llm_provider.clone(),
auth_service: auth_service.clone(),
channels: Arc::new(Mutex::new({
let mut map = HashMap::new();
map.insert(
"web".to_string(),
web_adapter.clone() as Arc<dyn crate::channels::ChannelAdapter>,
);
map
})),
response_channels: Arc::new(tokio::sync::Mutex::new(HashMap::new())),
web_adapter: web_adapter.clone(),
voice_adapter: voice_adapter.clone(),
});
state_tx.send(app_state.clone()).await.ok();
progress_tx.send(BootstrapProgress::BootstrapComplete).ok();
info!("Starting HTTP server on {}:{}", config.server.host, config.server.port);
let worker_count = std::thread::available_parallelism()
.map(|n| n.get())
.unwrap_or(4);
let bot_orchestrator = BotOrchestrator::new(app_state.clone());
tokio::spawn(async move {
if let Err(e) = bot_orchestrator.mount_all_bots().await {
error!("Failed to mount bots: {}", e);
}
});
let automation_state = app_state.clone();
std::thread::spawn(move || {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.expect("Failed to create runtime for automation");
let local = tokio::task::LocalSet::new();
local.block_on(&rt, async move {
let automation = AutomationService::new(automation_state);
automation.spawn().await.ok();
});
});
let app_state_for_llm = app_state.clone();
tokio::spawn(async move {
if let Err(e) = ensure_llama_servers_running(&app_state_for_llm).await {
error!("Failed to start LLM servers: {}", e);
}
});
let server_result = HttpServer::new(move || {
let cors = Cors::default()
.allow_any_origin()
.allow_any_method()
.allow_any_header()
.max_age(3600);
let app_state_clone = app_state.clone();
let mut app = App::new()
.wrap(cors)
.wrap(Logger::default())
.wrap(Logger::new("HTTP REQUEST: %a %{User-Agent}i"))
.app_data(web::Data::from(app_state_clone))
.service(auth_handler)
.service(create_session)
.service(get_session_history)
.service(get_sessions)
.service(index)
.service(start_session)
.service(upload_file)
.service(voice_start)
.service(voice_stop)
.service(websocket_handler)
.service(crate::bot::create_bot_handler)
.service(crate::bot::mount_bot_handler)
.service(crate::bot::handle_user_input_handler)
.service(crate::bot::get_user_sessions_handler)
.service(crate::bot::get_conversation_history_handler)
.service(crate::bot::send_warning_handler);
#[cfg(feature = "email")]
{
app = app
.service(get_latest_email_from)
.service(get_emails)
.service(list_emails)
.service(send_email)
.service(save_draft)
.service(save_click);
}
app = app.service(static_files);
app = app.service(bot_index);
app
})
.workers(worker_count)
.bind((config.server.host.clone(), config.server.port))?
.run()
.await;
if let Some(handle) = ui_handle {
handle.join().ok();
}
server_result
} }

View file

@ -4,7 +4,6 @@ use crate::package_manager::{InstallMode, OsType};
use anyhow::Result; use anyhow::Result;
use log::trace; use log::trace;
use rand::distr::Alphanumeric; use rand::distr::Alphanumeric;
use sha2::{Digest, Sha256};
use std::collections::HashMap; use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
@ -61,18 +60,20 @@ impl PackageManager {
} }
fn register_drive(&mut self) { fn register_drive(&mut self) {
let drive_password = self.generate_secure_password(16); let drive_password = self.generate_secure_password(16);
let drive_user = "gbdriveuser".to_string(); let drive_user = "gbdriveuser".to_string();
let farm_password =
std::env::var("FARM_PASSWORD").unwrap_or_else(|_| self.generate_secure_password(32));
let encrypted_drive_password = self.encrypt_password(&drive_password, &farm_password);
let env_path = self.base_path.join(".env"); let env_path = std::env::current_dir().unwrap().join(".env");
let env_content = format!( let env_content = format!(
"DRIVE_USER={}\nDRIVE_PASSWORD={}\nFARM_PASSWORD={}\nDRIVE_ROOT_USER={}\nDRIVE_ROOT_PASSWORD={}\n", "\nDRIVE_ACCESSKEY={}\nDRIVE_SECRET={}\nDRIVE_SERVER=http://localhost:9000\n",
drive_user, drive_password, farm_password, drive_user, drive_password drive_user, drive_password
); );
let _ = std::fs::write(&env_path, env_content); let _ = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(&env_path)
.and_then(|mut file| std::io::Write::write_all(&mut file, env_content.as_bytes()));
self.components.insert( self.components.insert(
"drive".to_string(), "drive".to_string(),
@ -103,72 +104,20 @@ impl PackageManager {
}, },
); );
// Delay updating drive credentials until database is created
let db_env_path = self.base_path.join(".env");
let database_url = std::env::var("DATABASE_URL")
.unwrap_or_else(|_| "postgres://gbuser:@localhost:5432/botserver".to_string());
let db_line = format!("DATABASE_URL={}\n", database_url);
let _ = std::fs::write(&db_env_path, db_line);
// Append drive credentials after database creation
let env_path = self.base_path.join(".env");
let drive_lines = format!(
"DRIVE_USER={}\nDRIVE_PASSWORD={}\nFARM_PASSWORD={}\nDRIVE_ROOT_USER={}\nDRIVE_ROOT_PASSWORD={}\n",
drive_user, drive_password, farm_password, drive_user, drive_password
);
let _ = std::fs::OpenOptions::new()
.append(true)
.open(&env_path)
.and_then(|mut file| std::io::Write::write_all(&mut file, drive_lines.as_bytes()));
// Update drive credentials in database only after database is ready
if std::process::Command::new("pg_isready")
.arg("-h")
.arg("localhost")
.arg("-p")
.arg("5432")
.output()
.map(|o| o.status.success())
.unwrap_or(false)
{
self.update_drive_credentials_in_database(&encrypted_drive_password)
.ok();
}
} }
fn update_drive_credentials_in_database(&self, encrypted_drive_password: &str) -> Result<()> {
use crate::shared::models::schema::bots::dsl::*;
use diesel::pg::PgConnection;
use diesel::prelude::*;
use uuid::Uuid;
let database_url = std::env::var("DATABASE_URL")
.unwrap_or_else(|_| "postgres://gbuser:@localhost:5432/botserver".to_string());
if let Ok(mut conn) = PgConnection::establish(&database_url) {
let system_bot_id = Uuid::parse_str("00000000-0000-0000-0000-000000000000")?;
diesel::update(bots)
.filter(id.eq(system_bot_id))
.set(llm_config.eq(serde_json::json!({
"encrypted_drive_password": encrypted_drive_password,
})))
.execute(&mut conn)?;
trace!("Updated drive credentials in database for system bot");
}
Ok(())
}
fn register_tables(&mut self) { fn register_tables(&mut self) {
let db_password = std::env::var("DATABASE_URL")
.ok() let db_env_path = std::env::current_dir().unwrap().join(".env");
.and_then(|url| { let db_password = self.generate_secure_password(32);
if let Some(stripped) = url.strip_prefix("postgres://gbuser:") { let database_url = std::env::var("DATABASE_URL")
stripped.split('@').next().map(|s| s.to_string()) .unwrap_or_else(|_| format!("postgres://gbuser:{}@localhost:5432/botserver", db_password));
} else { let db_line = format!("DATABASE_URL={}\n", database_url);
None
}
}) let _ = std::fs::write(&db_env_path, db_line);
.unwrap_or_else(|| self.generate_secure_password(16));
self.components.insert( self.components.insert(
"tables".to_string(), "tables".to_string(),
@ -848,10 +797,4 @@ impl PackageManager {
.collect() .collect()
} }
fn encrypt_password(&self, password: &str, key: &str) -> String {
let mut hasher = Sha256::new();
hasher.update(key.as_bytes());
hasher.update(password.as_bytes());
format!("{:x}", hasher.finalize())
}
} }

View file

@ -4,31 +4,30 @@ use crate::shared::state::AppState;
use actix_web::{web, HttpResponse, Result}; use actix_web::{web, HttpResponse, Result};
use chrono::Utc; use chrono::Utc;
use diesel::prelude::*; use diesel::prelude::*;
use diesel::r2d2::{ConnectionManager, PooledConnection};
use diesel::PgConnection; use diesel::PgConnection;
use log::{debug, error, info, warn}; use log::trace;
use log::{error, warn};
use redis::Client; use redis::Client;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::error::Error; use std::error::Error;
use std::sync::Arc; use std::sync::Arc;
use uuid::Uuid; use uuid::Uuid;
#[derive(Clone, Serialize, Deserialize)] #[derive(Clone, Serialize, Deserialize)]
pub struct SessionData { pub struct SessionData {
pub id: Uuid, pub id: Uuid,
pub user_id: Option<Uuid>, pub user_id: Option<Uuid>,
pub data: String, pub data: String,
} }
pub struct SessionManager { pub struct SessionManager {
conn: PgConnection, conn: PooledConnection<ConnectionManager<PgConnection>>,
sessions: HashMap<Uuid, SessionData>, sessions: HashMap<Uuid, SessionData>,
waiting_for_input: HashSet<Uuid>, waiting_for_input: HashSet<Uuid>,
redis: Option<Arc<Client>>, redis: Option<Arc<Client>>,
} }
impl SessionManager { impl SessionManager {
pub fn new(conn: PgConnection, redis_client: Option<Arc<Client>>) -> Self { pub fn new(conn: PooledConnection<ConnectionManager<PgConnection>>, redis_client: Option<Arc<Client>>) -> Self {
SessionManager { SessionManager {
conn, conn,
sessions: HashMap::new(), sessions: HashMap::new(),
@ -36,13 +35,12 @@ impl SessionManager {
redis: redis_client, redis: redis_client,
} }
} }
pub fn provide_input( pub fn provide_input(
&mut self, &mut self,
session_id: Uuid, session_id: Uuid,
input: String, input: String,
) -> Result<Option<String>, Box<dyn Error + Send + Sync>> { ) -> Result<Option<String>, Box<dyn Error + Send + Sync>> {
info!( trace!(
"SessionManager.provide_input called for session {}", "SessionManager.provide_input called for session {}",
session_id session_id
); );
@ -61,11 +59,9 @@ impl SessionManager {
Ok(Some("user_input".to_string())) Ok(Some("user_input".to_string()))
} }
} }
pub fn mark_waiting(&mut self, session_id: Uuid) { pub fn mark_waiting(&mut self, session_id: Uuid) {
self.waiting_for_input.insert(session_id); self.waiting_for_input.insert(session_id);
} }
pub fn get_session_by_id( pub fn get_session_by_id(
&mut self, &mut self,
session_id: Uuid, session_id: Uuid,
@ -77,7 +73,6 @@ impl SessionManager {
.optional()?; .optional()?;
Ok(result) Ok(result)
} }
pub fn get_user_session( pub fn get_user_session(
&mut self, &mut self,
uid: Uuid, uid: Uuid,
@ -92,7 +87,6 @@ impl SessionManager {
.optional()?; .optional()?;
Ok(result) Ok(result)
} }
pub fn get_or_create_user_session( pub fn get_or_create_user_session(
&mut self, &mut self,
uid: Uuid, uid: Uuid,
@ -104,24 +98,19 @@ impl SessionManager {
} }
self.create_session(uid, bid, session_title).map(Some) self.create_session(uid, bid, session_title).map(Some)
} }
pub fn get_or_create_anonymous_user( pub fn get_or_create_anonymous_user(
&mut self, &mut self,
uid: Option<Uuid>, uid: Option<Uuid>,
) -> Result<Uuid, Box<dyn Error + Send + Sync>> { ) -> Result<Uuid, Box<dyn Error + Send + Sync>> {
use crate::shared::models::users::dsl as users_dsl; use crate::shared::models::users::dsl as users_dsl;
let user_id = uid.unwrap_or_else(Uuid::new_v4); let user_id = uid.unwrap_or_else(Uuid::new_v4);
let user_exists: Option<Uuid> = users_dsl::users let user_exists: Option<Uuid> = users_dsl::users
.filter(users_dsl::id.eq(user_id)) .filter(users_dsl::id.eq(user_id))
.select(users_dsl::id) .select(users_dsl::id)
.first(&mut self.conn) .first(&mut self.conn)
.optional()?; .optional()?;
if user_exists.is_none() { if user_exists.is_none() {
let now = Utc::now(); let now = Utc::now();
info!("Creating anonymous user with ID {}", user_id);
diesel::insert_into(users_dsl::users) diesel::insert_into(users_dsl::users)
.values(( .values((
users_dsl::id.eq(user_id), users_dsl::id.eq(user_id),
@ -137,10 +126,8 @@ impl SessionManager {
)) ))
.execute(&mut self.conn)?; .execute(&mut self.conn)?;
} }
Ok(user_id) Ok(user_id)
} }
pub fn create_session( pub fn create_session(
&mut self, &mut self,
uid: Uuid, uid: Uuid,
@ -148,11 +135,8 @@ impl SessionManager {
session_title: &str, session_title: &str,
) -> Result<UserSession, Box<dyn Error + Send + Sync>> { ) -> Result<UserSession, Box<dyn Error + Send + Sync>> {
use crate::shared::models::user_sessions::dsl::*; use crate::shared::models::user_sessions::dsl::*;
// Ensure user exists (create anonymous if needed)
let verified_uid = self.get_or_create_anonymous_user(Some(uid))?; let verified_uid = self.get_or_create_anonymous_user(Some(uid))?;
let now = Utc::now(); let now = Utc::now();
let inserted: UserSession = diesel::insert_into(user_sessions) let inserted: UserSession = diesel::insert_into(user_sessions)
.values(( .values((
id.eq(Uuid::new_v4()), id.eq(Uuid::new_v4()),
@ -170,18 +154,14 @@ impl SessionManager {
error!("Failed to create session in database: {}", e); error!("Failed to create session in database: {}", e);
e e
})?; })?;
Ok(inserted) Ok(inserted)
} }
fn _clear_messages(&mut self, _session_id: Uuid) -> Result<(), Box<dyn Error + Send + Sync>> { fn _clear_messages(&mut self, _session_id: Uuid) -> Result<(), Box<dyn Error + Send + Sync>> {
use crate::shared::models::message_history::dsl::*; use crate::shared::models::message_history::dsl::*;
diesel::delete(message_history.filter(session_id.eq(session_id))) diesel::delete(message_history.filter(session_id.eq(session_id)))
.execute(&mut self.conn)?; .execute(&mut self.conn)?;
Ok(()) Ok(())
} }
pub fn save_message( pub fn save_message(
&mut self, &mut self,
sess_id: Uuid, sess_id: Uuid,
@ -191,8 +171,6 @@ impl SessionManager {
msg_type: i32, msg_type: i32,
) -> Result<(), Box<dyn Error + Send + Sync>> { ) -> Result<(), Box<dyn Error + Send + Sync>> {
use crate::shared::models::message_history::dsl::*; use crate::shared::models::message_history::dsl::*;
// Check if this exact message already exists
let exists = message_history let exists = message_history
.filter(session_id.eq(sess_id)) .filter(session_id.eq(sess_id))
.filter(user_id.eq(uid)) .filter(user_id.eq(uid))
@ -202,18 +180,14 @@ impl SessionManager {
.select(id) .select(id)
.first::<Uuid>(&mut self.conn) .first::<Uuid>(&mut self.conn)
.optional()?; .optional()?;
if exists.is_some() { if exists.is_some() {
debug!("Duplicate message detected, skipping save");
return Ok(()); return Ok(());
} }
let next_index = message_history let next_index = message_history
.filter(session_id.eq(sess_id)) .filter(session_id.eq(sess_id))
.count() .count()
.get_result::<i64>(&mut self.conn) .get_result::<i64>(&mut self.conn)
.unwrap_or(0); .unwrap_or(0);
diesel::insert_into(message_history) diesel::insert_into(message_history)
.values(( .values((
id.eq(Uuid::new_v4()), id.eq(Uuid::new_v4()),
@ -226,14 +200,13 @@ impl SessionManager {
created_at.eq(chrono::Utc::now()), created_at.eq(chrono::Utc::now()),
)) ))
.execute(&mut self.conn)?; .execute(&mut self.conn)?;
trace!(
debug!(
"Message saved for session {} with index {}", "Message saved for session {} with index {}",
sess_id, next_index sess_id,
next_index
); );
Ok(()) Ok(())
} }
pub async fn update_session_context( pub async fn update_session_context(
&mut self, &mut self,
session_id: &Uuid, session_id: &Uuid,
@ -241,25 +214,21 @@ impl SessionManager {
context_data: String, context_data: String,
) -> Result<(), Box<dyn Error + Send + Sync>> { ) -> Result<(), Box<dyn Error + Send + Sync>> {
use redis::Commands; use redis::Commands;
let redis_key = format!("context:{}:{}", user_id, session_id); let redis_key = format!("context:{}:{}", user_id, session_id);
if let Some(redis_client) = &self.redis { if let Some(redis_client) = &self.redis {
let mut conn = redis_client.get_connection()?; let mut conn = redis_client.get_connection()?;
conn.set::<_, _, ()>(&redis_key, &context_data)?; conn.set::<_, _, ()>(&redis_key, &context_data)?;
info!("Updated context in Redis for key {}", redis_key);
} else { } else {
warn!("No Redis client configured, context not persisted"); warn!("No Redis client configured, context not persisted");
} }
Ok(()) Ok(())
} }
pub async fn get_session_context_data( pub async fn get_session_context_data(
&self, &self,
session_id: &Uuid, session_id: &Uuid,
user_id: &Uuid, user_id: &Uuid,
) -> Result<String, Box<dyn Error + Send + Sync>> { ) -> Result<String, Box<dyn Error + Send + Sync>> {
use redis::Commands; use redis::Commands;
let base_key = format!("context:{}:{}", user_id, session_id); let base_key = format!("context:{}:{}", user_id, session_id);
if let Some(redis_client) = &self.redis { if let Some(redis_client) = &self.redis {
let conn_option = redis_client let conn_option = redis_client
@ -269,17 +238,14 @@ impl SessionManager {
e e
}) })
.ok(); .ok();
if let Some(mut connection) = conn_option { if let Some(mut connection) = conn_option {
// First cache trip: get context name
match connection.get::<_, Option<String>>(&base_key) { match connection.get::<_, Option<String>>(&base_key) {
Ok(Some(context_name)) => { Ok(Some(context_name)) => {
debug!("Found context name '{}' for key {}", context_name, base_key); let full_key =
// Second cache trip: get actual context value format!("context:{}:{}:{}", user_id, session_id, context_name);
let full_key = format!("context:{}:{}:{}", user_id, session_id, context_name);
match connection.get::<_, Option<String>>(&full_key) { match connection.get::<_, Option<String>>(&full_key) {
Ok(Some(context_value)) => { Ok(Some(context_value)) => {
debug!( trace!(
"Retrieved context value from Cache for key {}: {} chars", "Retrieved context value from Cache for key {}: {} chars",
full_key, full_key,
context_value.len() context_value.len()
@ -287,7 +253,7 @@ impl SessionManager {
return Ok(context_value); return Ok(context_value);
} }
Ok(None) => { Ok(None) => {
debug!("No context value found for key {}", full_key); trace!("No context value found for key: {}", full_key);
} }
Err(e) => { Err(e) => {
warn!("Failed to retrieve context value from Cache: {}", e); warn!("Failed to retrieve context value from Cache: {}", e);
@ -295,7 +261,7 @@ impl SessionManager {
} }
} }
Ok(None) => { Ok(None) => {
debug!("No context name found for key {}", base_key); trace!("No context name found for key: {}", base_key);
} }
Err(e) => { Err(e) => {
warn!("Failed to retrieve context name from Cache: {}", e); warn!("Failed to retrieve context name from Cache: {}", e);
@ -303,25 +269,19 @@ impl SessionManager {
} }
} }
} }
Ok(String::new()) Ok(String::new())
} }
pub fn get_conversation_history( pub fn get_conversation_history(
&mut self, &mut self,
sess_id: Uuid, sess_id: Uuid,
_uid: Uuid, _uid: Uuid,
) -> Result<Vec<(String, String)>, Box<dyn Error + Send + Sync>> { ) -> Result<Vec<(String, String)>, Box<dyn Error + Send + Sync>> {
use crate::shared::models::message_history::dsl::*; use crate::shared::models::message_history::dsl::*;
let messages = message_history let messages = message_history
.filter(session_id.eq(sess_id)) .filter(session_id.eq(sess_id))
.order(message_index.asc()) .order(message_index.asc())
.select((role, content_encrypted)) .select((role, content_encrypted))
.load::<(i32, String)>(&mut self.conn)?; .load::<(i32, String)>(&mut self.conn)?;
let mut history: Vec<(String, String)> = Vec::new(); let mut history: Vec<(String, String)> = Vec::new();
for (other_role, content) in messages { for (other_role, content) in messages {
let role_str = match other_role { let role_str = match other_role {
@ -334,13 +294,11 @@ impl SessionManager {
} }
Ok(history) Ok(history)
} }
pub fn get_user_sessions( pub fn get_user_sessions(
&mut self, &mut self,
uid: Uuid, uid: Uuid,
) -> Result<Vec<UserSession>, Box<dyn Error + Send + Sync>> { ) -> Result<Vec<UserSession>, Box<dyn Error + Send + Sync>> {
use crate::shared::models::user_sessions::dsl::*; use crate::shared::models::user_sessions::dsl::*;
let sessions = if uid == Uuid::nil() { let sessions = if uid == Uuid::nil() {
user_sessions user_sessions
.order(created_at.desc()) .order(created_at.desc())
@ -351,42 +309,33 @@ impl SessionManager {
.order(created_at.desc()) .order(created_at.desc())
.load::<UserSession>(&mut self.conn)? .load::<UserSession>(&mut self.conn)?
}; };
Ok(sessions) Ok(sessions)
} }
pub fn update_user_id( pub fn update_user_id(
&mut self, &mut self,
session_id: Uuid, session_id: Uuid,
new_user_id: Uuid, new_user_id: Uuid,
) -> Result<(), Box<dyn Error + Send + Sync>> { ) -> Result<(), Box<dyn Error + Send + Sync>> {
use crate::shared::models::user_sessions::dsl::*; use crate::shared::models::user_sessions::dsl::*;
let updated_count = diesel::update(user_sessions.filter(id.eq(session_id))) let updated_count = diesel::update(user_sessions.filter(id.eq(session_id)))
.set((user_id.eq(new_user_id), updated_at.eq(chrono::Utc::now()))) .set((user_id.eq(new_user_id), updated_at.eq(chrono::Utc::now())))
.execute(&mut self.conn)?; .execute(&mut self.conn)?;
if updated_count == 0 { if updated_count == 0 {
warn!("No session found with ID: {}", session_id); warn!("No session found with ID: {}", session_id);
} else { } else {
debug!("Updated user ID for session {}", session_id); trace!("Updated user ID for session: {}", session_id);
} }
Ok(()) Ok(())
} }
} }
#[actix_web::post("/api/sessions")] #[actix_web::post("/api/sessions")]
async fn create_session(data: web::Data<AppState>) -> Result<HttpResponse> { async fn create_session(data: web::Data<AppState>) -> Result<HttpResponse> {
let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap(); let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap();
let bot_id = Uuid::nil(); let bot_id = Uuid::nil();
// Acquire lock briefly, then release before performing blocking DB operations
let session_result = { let session_result = {
let mut sm = data.session_manager.lock().await; let mut sm = data.session_manager.lock().await;
sm.get_or_create_user_session(user_id, bot_id, "New Conversation") sm.get_or_create_user_session(user_id, bot_id, "New Conversation")
}; };
let session = match session_result { let session = match session_result {
Ok(Some(s)) => s, Ok(Some(s)) => s,
Ok(None) => { Ok(None) => {
@ -400,14 +349,12 @@ async fn create_session(data: web::Data<AppState>) -> Result<HttpResponse> {
.json(serde_json::json!({"error": e.to_string()}))); .json(serde_json::json!({"error": e.to_string()})));
} }
}; };
Ok(HttpResponse::Ok().json(serde_json::json!({ Ok(HttpResponse::Ok().json(serde_json::json!({
"session_id": session.id, "session_id": session.id,
"title": "New Conversation", "title": "New Conversation",
"created_at": Utc::now() "created_at": Utc::now()
}))) })))
} }
#[actix_web::get("/api/sessions")] #[actix_web::get("/api/sessions")]
async fn get_sessions(data: web::Data<AppState>) -> Result<HttpResponse> { async fn get_sessions(data: web::Data<AppState>) -> Result<HttpResponse> {
let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap(); let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap();
@ -421,14 +368,9 @@ async fn get_sessions(data: web::Data<AppState>) -> Result<HttpResponse> {
} }
} }
} }
#[actix_web::post("/api/sessions/{session_id}/start")] #[actix_web::post("/api/sessions/{session_id}/start")]
async fn start_session( async fn start_session(data: web::Data<AppState>, path: web::Path<String>) -> Result<HttpResponse> {
data: web::Data<AppState>,
path: web::Path<String>,
) -> Result<HttpResponse> {
let session_id = path.into_inner(); let session_id = path.into_inner();
match Uuid::parse_str(&session_id) { match Uuid::parse_str(&session_id) {
Ok(session_uuid) => { Ok(session_uuid) => {
let mut session_manager = data.session_manager.lock().await; let mut session_manager = data.session_manager.lock().await;
@ -436,15 +378,13 @@ async fn start_session(
Ok(Some(_session)) => { Ok(Some(_session)) => {
session_manager.mark_waiting(session_uuid); session_manager.mark_waiting(session_uuid);
Ok(HttpResponse::Ok().json(serde_json::json!({ Ok(HttpResponse::Ok().json(serde_json::json!({
"status": "started", "status": "started",
"session_id": session_id "session_id": session_id
})))
}
Ok(None) => {
Ok(HttpResponse::NotFound().json(serde_json::json!({
"error": "Session not found"
}))) })))
} }
Ok(None) => Ok(HttpResponse::NotFound().json(serde_json::json!({
"error": "Session not found"
}))),
Err(e) => { Err(e) => {
error!("Failed to start session {}: {}", session_id, e); error!("Failed to start session {}: {}", session_id, e);
Ok(HttpResponse::InternalServerError() Ok(HttpResponse::InternalServerError()
@ -458,7 +398,6 @@ async fn start_session(
} }
} }
} }
#[actix_web::get("/api/sessions/{session_id}")] #[actix_web::get("/api/sessions/{session_id}")]
async fn get_session_history( async fn get_session_history(
data: web::Data<AppState>, data: web::Data<AppState>,
@ -466,7 +405,6 @@ async fn get_session_history(
) -> Result<HttpResponse> { ) -> Result<HttpResponse> {
let session_id = path.into_inner(); let session_id = path.into_inner();
let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap(); let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap();
match Uuid::parse_str(&session_id) { match Uuid::parse_str(&session_id) {
Ok(session_uuid) => { Ok(session_uuid) => {
let orchestrator = BotOrchestrator::new(Arc::new(data.get_ref().clone())); let orchestrator = BotOrchestrator::new(Arc::new(data.get_ref().clone()));
@ -475,7 +413,7 @@ async fn get_session_history(
.await .await
{ {
Ok(history) => { Ok(history) => {
info!( trace!(
"Retrieved {} history entries for session {}", "Retrieved {} history entries for session {}",
history.len(), history.len(),
session_id session_id

View file

@ -2,46 +2,43 @@ use crate::channels::{ChannelAdapter, VoiceAdapter, WebChannelAdapter};
use crate::config::AppConfig; use crate::config::AppConfig;
use crate::llm::LLMProvider; use crate::llm::LLMProvider;
use crate::session::SessionManager; use crate::session::SessionManager;
use diesel::{ PgConnection};
use aws_sdk_s3::Client as S3Client; use aws_sdk_s3::Client as S3Client;
use redis::Client as RedisClient; use redis::Client as RedisClient;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Mutex;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use crate::shared::models::BotResponse; use crate::shared::models::BotResponse;
use crate::auth::AuthService; use crate::auth::AuthService;
use crate::shared::utils::DbPool;
pub struct AppState { pub struct AppState {
pub drive: Option<S3Client>, pub drive: Option<S3Client>,
pub cache: Option<Arc<RedisClient>>, pub cache: Option<Arc<RedisClient>>,
pub bucket_name: String, pub bucket_name: String,
pub config: Option<AppConfig>, pub config: Option<AppConfig>,
pub conn: Arc<Mutex<PgConnection>>, pub conn: DbPool,
pub session_manager: Arc<tokio::sync::Mutex<SessionManager>>, pub session_manager: Arc<tokio::sync::Mutex<SessionManager>>,
pub llm_provider: Arc<dyn LLMProvider>, pub llm_provider: Arc<dyn LLMProvider>,
pub auth_service: Arc<tokio::sync::Mutex<AuthService>>, pub auth_service: Arc<tokio::sync::Mutex<AuthService>>,
pub channels: Arc<Mutex<HashMap<String, Arc<dyn ChannelAdapter>>>>, pub channels: Arc<tokio::sync::Mutex<HashMap<String, Arc<dyn ChannelAdapter>>>>,
pub response_channels: Arc<tokio::sync::Mutex<HashMap<String, mpsc::Sender<BotResponse>>>>, pub response_channels: Arc<tokio::sync::Mutex<HashMap<String, mpsc::Sender<BotResponse>>>>,
pub web_adapter: Arc<WebChannelAdapter>, pub web_adapter: Arc<WebChannelAdapter>,
pub voice_adapter: Arc<VoiceAdapter>, pub voice_adapter: Arc<VoiceAdapter>,
} }
impl Clone for AppState { impl Clone for AppState {
fn clone(&self) -> Self { fn clone(&self) -> Self {
Self { Self {
drive: self.drive.clone(), drive: self.drive.clone(),
bucket_name: self.bucket_name.clone(), bucket_name: self.bucket_name.clone(),
config: self.config.clone(), config: self.config.clone(),
conn: Arc::clone(&self.conn), conn: self.conn.clone(),
cache: self.cache.clone(),
cache: self.cache.clone(), session_manager: Arc::clone(&self.session_manager),
session_manager: Arc::clone(&self.session_manager), llm_provider: Arc::clone(&self.llm_provider),
llm_provider: Arc::clone(&self.llm_provider), auth_service: Arc::clone(&self.auth_service),
auth_service: Arc::clone(&self.auth_service), channels: Arc::clone(&self.channels),
channels: Arc::clone(&self.channels), response_channels: Arc::clone(&self.response_channels),
response_channels: Arc::clone(&self.response_channels), web_adapter: Arc::clone(&self.web_adapter),
web_adapter: Arc::clone(&self.web_adapter), voice_adapter: Arc::clone(&self.voice_adapter),
voice_adapter: Arc::clone(&self.voice_adapter), }
} }
}
} }

View file

@ -1,8 +1,11 @@
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use diesel::{Connection, PgConnection}; use diesel::Connection;
use diesel::{
r2d2::{ConnectionManager, Pool},
PgConnection,
};
use futures_util::StreamExt; use futures_util::StreamExt;
use indicatif::{ProgressBar, ProgressStyle}; use indicatif::{ProgressBar, ProgressStyle};
use log::trace;
use reqwest::Client; use reqwest::Client;
use rhai::{Array, Dynamic}; use rhai::{Array, Dynamic};
use serde_json::Value; use serde_json::Value;
@ -10,7 +13,6 @@ use smartstring::SmartString;
use std::error::Error; use std::error::Error;
use tokio::fs::File as TokioFile; use tokio::fs::File as TokioFile;
use tokio::io::AsyncWriteExt; use tokio::io::AsyncWriteExt;
pub fn json_value_to_dynamic(value: &Value) -> Dynamic { pub fn json_value_to_dynamic(value: &Value) -> Dynamic {
match value { match value {
Value::Null => Dynamic::UNIT, Value::Null => Dynamic::UNIT,
@ -37,7 +39,6 @@ pub fn json_value_to_dynamic(value: &Value) -> Dynamic {
), ),
} }
} }
pub fn to_array(value: Dynamic) -> Array { pub fn to_array(value: Dynamic) -> Array {
if value.is_array() { if value.is_array() {
value.cast::<Array>() value.cast::<Array>()
@ -47,80 +48,67 @@ pub fn to_array(value: Dynamic) -> Array {
Array::from([value]) Array::from([value])
} }
} }
pub async fn download_file(url: &str, output_path: &str) -> Result<(), anyhow::Error> {
pub async fn download_file(
url: &str,
output_path: &str,
) -> Result<(), anyhow::Error> {
let url = url.to_string(); let url = url.to_string();
let output_path = output_path.to_string(); let output_path = output_path.to_string();
let download_handle = tokio::spawn(async move { let download_handle = tokio::spawn(async move {
let client = Client::builder() let client = Client::builder()
.user_agent("Mozilla/5.0 (compatible; BotServer/1.0)") .user_agent("Mozilla/5.0 (compatible; BotServer/1.0)")
.build()?; .build()?;
let response = client.get(&url).send().await?; let response = client.get(&url).send().await?;
if response.status().is_success() { if response.status().is_success() {
let total_size = response.content_length().unwrap_or(0); let total_size = response.content_length().unwrap_or(0);
let pb = ProgressBar::new(total_size); let pb = ProgressBar::new(total_size);
pb.set_style(ProgressStyle::default_bar() pb.set_style(ProgressStyle::default_bar()
.template("{msg}\n{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({eta})") .template("{msg}\n{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({eta})")
.unwrap() .unwrap()
.progress_chars("#>-")); .progress_chars("#>-"));
pb.set_message(format!("Downloading {}", url)); pb.set_message(format!("Downloading {}", url));
let mut file = TokioFile::create(&output_path).await?; let mut file = TokioFile::create(&output_path).await?;
let mut downloaded: u64 = 0; let mut downloaded: u64 = 0;
let mut stream = response.bytes_stream(); let mut stream = response.bytes_stream();
while let Some(chunk_result) = stream.next().await { while let Some(chunk_result) = stream.next().await {
let chunk = chunk_result?; let chunk = chunk_result?;
file.write_all(&chunk).await?; file.write_all(&chunk).await?;
downloaded += chunk.len() as u64; downloaded += chunk.len() as u64;
pb.set_position(downloaded); pb.set_position(downloaded);
} }
pb.finish_with_message(format!("Downloaded {}", output_path)); pb.finish_with_message(format!("Downloaded {}", output_path));
trace!("Download completed: {} -> {}", url, output_path);
Ok(()) Ok(())
} else { } else {
Err(anyhow::anyhow!("HTTP {}: {}", response.status(), url)) Err(anyhow::anyhow!("HTTP {}: {}", response.status(), url))
} }
}); });
download_handle.await? download_handle.await?
} }
pub fn parse_filter(filter_str: &str) -> Result<(String, Vec<String>), Box<dyn Error>> { pub fn parse_filter(filter_str: &str) -> Result<(String, Vec<String>), Box<dyn Error>> {
let parts: Vec<&str> = filter_str.split('=').collect(); let parts: Vec<&str> = filter_str.split('=').collect();
if parts.len() != 2 { if parts.len() != 2 {
return Err("Invalid filter format. Expected 'KEY=VALUE'".into()); return Err("Invalid filter format. Expected 'KEY=VALUE'".into());
} }
let column = parts[0].trim(); let column = parts[0].trim();
let value = parts[1].trim(); let value = parts[1].trim();
if !column if !column
.chars() .chars()
.all(|c| c.is_ascii_alphanumeric() || c == '_') .all(|c| c.is_ascii_alphanumeric() || c == '_')
{ {
return Err("Invalid column name in filter".into()); return Err("Invalid column name in filter".into());
} }
Ok((format!("{} = $1", column), vec![value.to_string()])) Ok((format!("{} = $1", column), vec![value.to_string()]))
} }
pub fn estimate_token_count(text: &str) -> usize { pub fn estimate_token_count(text: &str) -> usize {
let char_count = text.chars().count(); let char_count = text.chars().count();
(char_count / 4).max(1) // Ensure at least 1 token (char_count / 4).max(1)
} }
pub fn establish_pg_connection() -> Result<PgConnection> { pub fn establish_pg_connection() -> Result<PgConnection> {
let database_url = std::env::var("DATABASE_URL") let database_url = std::env::var("DATABASE_URL").unwrap();
.unwrap_or_else(|_| "postgres://gbuser:@localhost:5432/botserver".to_string());
PgConnection::establish(&database_url) PgConnection::establish(&database_url)
.with_context(|| format!("Failed to connect to database at {}", database_url)) .with_context(|| format!("Failed to connect to database at {}", database_url))
} }
pub type DbPool = Pool<ConnectionManager<PgConnection>>;
pub fn create_conn() -> Result<DbPool, r2d2::Error> {
let database_url = std::env::var("DATABASE_URL")
.unwrap_or_else(|_| "postgres://gbuser:@localhost:5432/botserver".to_string());
let manager = ConnectionManager::<PgConnection>::new(database_url);
Pool::builder().build(manager)
}

View file

@ -6,8 +6,6 @@ use tokio::sync::mpsc;
use uuid::Uuid; use uuid::Uuid;
pub struct ChatPanel { pub struct ChatPanel {
pub id: Uuid,
pub app_state: Arc<AppState>,
pub messages: Vec<String>, pub messages: Vec<String>,
pub input_buffer: String, pub input_buffer: String,
pub session_id: Uuid, pub session_id: Uuid,
@ -16,10 +14,8 @@ pub struct ChatPanel {
} }
impl ChatPanel { impl ChatPanel {
pub fn new(app_state: Arc<AppState>) -> Self { pub fn new(_app_state: Arc<AppState>) -> Self {
Self { Self {
id: Uuid::new_v4(),
app_state,
messages: vec!["Welcome to General Bots Console Chat!".to_string()], messages: vec!["Welcome to General Bots Console Chat!".to_string()],
input_buffer: String::new(), input_buffer: String::new(),
session_id: Uuid::new_v4(), session_id: Uuid::new_v4(),
@ -95,7 +91,7 @@ let _ = orchestrator.stream_response(user_message, tx).await;
use crate::shared::models::schema::bots::dsl::*; use crate::shared::models::schema::bots::dsl::*;
use diesel::prelude::*; use diesel::prelude::*;
let mut conn = app_state.conn.lock().unwrap(); let mut conn = app_state.conn.get().unwrap();
let bot_id = bots let bot_id = bots
.filter(name.eq(bot_name)) .filter(name.eq(bot_name))
.select(id) .select(id)

View file

@ -1,177 +1,194 @@
use std::sync::Arc;
use crate::shared::state::AppState;
use crate::shared::models::schema::bots::dsl::*;
use crate::nvidia;
use crate::config::ConfigManager; use crate::config::ConfigManager;
use crate::nvidia;
use crate::shared::models::schema::bots::dsl::*;
use crate::shared::state::AppState;
use diesel::prelude::*; use diesel::prelude::*;
use std::sync::Arc;
use sysinfo::System; use sysinfo::System;
pub struct StatusPanel { pub struct StatusPanel {
app_state: Arc<AppState>, app_state: Arc<AppState>,
last_update: std::time::Instant, last_update: std::time::Instant,
cached_content: String, cached_content: String,
system: System, system: System,
} }
impl StatusPanel { impl StatusPanel {
pub fn new(app_state: Arc<AppState>) -> Self { pub fn new(app_state: Arc<AppState>) -> Self {
Self { Self {
app_state, app_state,
last_update: std::time::Instant::now(), last_update: std::time::Instant::now(),
cached_content: String::new(), cached_content: String::new(),
system: System::new_all(), system: System::new_all(),
} }
} }
pub async fn update(&mut self) -> Result<(), std::io::Error> { pub async fn update(&mut self) -> Result<(), std::io::Error> {
if self.last_update.elapsed() < std::time::Duration::from_secs(1) { if self.last_update.elapsed() < std::time::Duration::from_secs(1) {
return Ok(()); return Ok(());
} }
self.system.refresh_all(); self.system.refresh_all();
self.cached_content = String::new(); self.cached_content = String::new();
self.last_update = std::time::Instant::now(); self.last_update = std::time::Instant::now();
Ok(()) Ok(())
} }
pub fn render(&mut self, selected_bot: Option<String>) -> String { pub fn render(&mut self, selected_bot: Option<String>) -> String {
let mut lines = Vec::new(); let mut lines = Vec::new();
self.system.refresh_all();
lines.push("╔═══════════════════════════════════════╗".to_string()); self.system.refresh_all();
lines.push("║ SYSTEM METRICS ║".to_string());
lines.push("╚═══════════════════════════════════════╝".to_string());
lines.push("".to_string());
let system_metrics = match nvidia::get_system_metrics(0, 0) { lines.push("╔═══════════════════════════════════════╗".to_string());
Ok(metrics) => metrics, lines.push("║ SYSTEM METRICS ║".to_string());
Err(_) => nvidia::SystemMetrics::default(), lines.push("╚═══════════════════════════════════════╝".to_string());
}; lines.push("".to_string());
let cpu_bar = Self::create_progress_bar(system_metrics.cpu_usage, 20); let system_metrics = match nvidia::get_system_metrics(0, 0) {
lines.push(format!(" CPU: {:5.1}% {}", system_metrics.cpu_usage, cpu_bar)); Ok(metrics) => metrics,
Err(_) => nvidia::SystemMetrics::default(),
};
if let Some(gpu_usage) = system_metrics.gpu_usage { let cpu_bar = Self::create_progress_bar(system_metrics.cpu_usage, 20);
let gpu_bar = Self::create_progress_bar(gpu_usage, 20); lines.push(format!(
lines.push(format!(" GPU: {:5.1}% {}", gpu_usage, gpu_bar)); " CPU: {:5.1}% {}",
} else { system_metrics.cpu_usage, cpu_bar
lines.push(" GPU: Not available".to_string()); ));
}
let total_mem = self.system.total_memory() as f32 / 1024.0 / 1024.0 / 1024.0; if let Some(gpu_usage) = system_metrics.gpu_usage {
let used_mem = self.system.used_memory() as f32 / 1024.0 / 1024.0 / 1024.0; let gpu_bar = Self::create_progress_bar(gpu_usage, 20);
let mem_percentage = (used_mem / total_mem) * 100.0; lines.push(format!(" GPU: {:5.1}% {}", gpu_usage, gpu_bar));
let mem_bar = Self::create_progress_bar(mem_percentage, 20); } else {
lines.push(format!(" MEM: {:5.1}% {} ({:.1}/{:.1} GB)", mem_percentage, mem_bar, used_mem, total_mem)); lines.push(" GPU: Not available".to_string());
}
lines.push("".to_string()); let total_mem = self.system.total_memory() as f32 / 1024.0 / 1024.0 / 1024.0;
lines.push("╔═══════════════════════════════════════╗".to_string()); let used_mem = self.system.used_memory() as f32 / 1024.0 / 1024.0 / 1024.0;
lines.push("║ COMPONENTS STATUS ║".to_string()); let mem_percentage = (used_mem / total_mem) * 100.0;
lines.push("╚═══════════════════════════════════════╝".to_string()); let mem_bar = Self::create_progress_bar(mem_percentage, 20);
lines.push("".to_string()); lines.push(format!(
" MEM: {:5.1}% {} ({:.1}/{:.1} GB)",
mem_percentage, mem_bar, used_mem, total_mem
));
let components = vec![ lines.push("".to_string());
("Tables", "postgres", "5432"), lines.push("╔═══════════════════════════════════════╗".to_string());
("Cache", "valkey-server", "6379"), lines.push("║ COMPONENTS STATUS ║".to_string());
("Drive", "minio", "9000"), lines.push("╚═══════════════════════════════════════╝".to_string());
("LLM", "llama-server", "8081"), lines.push("".to_string());
];
for (comp_name, process, port) in components { let components = vec![
let status = if Self::check_component_running(process) { ("Tables", "postgres", "5432"),
format!("🟢 ONLINE [Port: {}]", port) ("Cache", "valkey-server", "6379"),
} else { ("Drive", "minio", "9000"),
"🔴 OFFLINE".to_string() ("LLM", "llama-server", "8081"),
}; ];
lines.push(format!(" {:<10} {}", comp_name, status));
}
lines.push("".to_string()); for (comp_name, process, port) in components {
lines.push("╔═══════════════════════════════════════╗".to_string()); let status = if Self::check_component_running(process) {
lines.push("║ ACTIVE BOTS ║".to_string()); format!("🟢 ONLINE [Port: {}]", port)
lines.push("╚═══════════════════════════════════════╝".to_string()); } else {
lines.push("".to_string()); "🔴 OFFLINE".to_string()
};
lines.push(format!(" {:<10} {}", comp_name, status));
}
if let Ok(mut conn) = self.app_state.conn.try_lock() { lines.push("".to_string());
match bots lines.push("╔═══════════════════════════════════════╗".to_string());
.filter(is_active.eq(true)) lines.push("║ ACTIVE BOTS ║".to_string());
.select((name, id)) lines.push("╚═══════════════════════════════════════╝".to_string());
.load::<(String, uuid::Uuid)>(&mut *conn) lines.push("".to_string());
{
Ok(bot_list) => {
if bot_list.is_empty() {
lines.push(" No active bots".to_string());
} else {
for (bot_name, bot_id) in bot_list {
let marker = if let Some(ref selected) = selected_bot {
if selected == &bot_name { "" } else { " " }
} else {
" "
};
lines.push(format!(" {} 🤖 {}", marker, bot_name));
if let Some(ref selected) = selected_bot { if let Ok(mut conn) = self.app_state.conn.get() {
if selected == &bot_name { match bots
lines.push("".to_string()); .filter(is_active.eq(true))
lines.push(" ┌─ Bot Configuration ─────────┐".to_string()); .select((name, id))
.load::<(String, uuid::Uuid)>(&mut *conn)
let config_manager = ConfigManager::new(self.app_state.conn.clone()); {
Ok(bot_list) => {
let llm_model = config_manager.get_config(&bot_id, "llm-model", None) if bot_list.is_empty() {
.unwrap_or_else(|_| "N/A".to_string()); lines.push(" No active bots".to_string());
lines.push(format!(" Model: {}", llm_model)); } else {
for (bot_name, bot_id) in bot_list {
let ctx_size = config_manager.get_config(&bot_id, "llm-server-ctx-size", None) let marker = if let Some(ref selected) = selected_bot {
.unwrap_or_else(|_| "N/A".to_string()); if selected == &bot_name {
lines.push(format!(" Context: {}", ctx_size)); ""
} else {
let temp = config_manager.get_config(&bot_id, "llm-temperature", None) " "
.unwrap_or_else(|_| "N/A".to_string()); }
lines.push(format!(" Temp: {}", temp)); } else {
" "
lines.push(" └─────────────────────────────┘".to_string()); };
} lines.push(format!(" {} 🤖 {}", marker, bot_name));
}
}
}
}
Err(_) => {
lines.push(" Error loading bots".to_string());
}
}
} else {
lines.push(" Database locked".to_string());
}
lines.push("".to_string()); if let Some(ref selected) = selected_bot {
lines.push("╔═══════════════════════════════════════╗".to_string()); if selected == &bot_name {
lines.push("║ SESSIONS ║".to_string()); lines.push("".to_string());
lines.push("╚═══════════════════════════════════════╝".to_string()); lines.push(" ┌─ Bot Configuration ─────────┐".to_string());
let session_count = self.app_state.response_channels.try_lock() let config_manager =
.map(|channels| channels.len()) ConfigManager::new(self.app_state.conn.clone());
.unwrap_or(0);
lines.push(format!(" Active Sessions: {}", session_count));
lines.join("\n") let llm_model = config_manager
} .get_config(&bot_id, "llm-model", None)
.unwrap_or_else(|_| "N/A".to_string());
lines.push(format!(" Model: {}", llm_model));
fn create_progress_bar(percentage: f32, width: usize) -> String { let ctx_size = config_manager
let filled = (percentage / 100.0 * width as f32).round() as usize; .get_config(&bot_id, "llm-server-ctx-size", None)
let empty = width.saturating_sub(filled); .unwrap_or_else(|_| "N/A".to_string());
let filled_chars = "".repeat(filled); lines.push(format!(" Context: {}", ctx_size));
let empty_chars = "".repeat(empty);
format!("[{}{}]", filled_chars, empty_chars)
}
pub fn check_component_running(process_name: &str) -> bool { let temp = config_manager
std::process::Command::new("pgrep") .get_config(&bot_id, "llm-temperature", None)
.arg("-f") .unwrap_or_else(|_| "N/A".to_string());
.arg(process_name) lines.push(format!(" Temp: {}", temp));
.output()
.map(|output| !output.stdout.is_empty()) lines.push(" └─────────────────────────────┘".to_string());
.unwrap_or(false) }
} }
}
}
}
Err(_) => {
lines.push(" Error loading bots".to_string());
}
}
} else {
lines.push(" Database locked".to_string());
}
lines.push("".to_string());
lines.push("╔═══════════════════════════════════════╗".to_string());
lines.push("║ SESSIONS ║".to_string());
lines.push("╚═══════════════════════════════════════╝".to_string());
let session_count = self
.app_state
.response_channels
.try_lock()
.map(|channels| channels.len())
.unwrap_or(0);
lines.push(format!(" Active Sessions: {}", session_count));
lines.join("\n")
}
fn create_progress_bar(percentage: f32, width: usize) -> String {
let filled = (percentage / 100.0 * width as f32).round() as usize;
let empty = width.saturating_sub(filled);
let filled_chars = "".repeat(filled);
let empty_chars = "".repeat(empty);
format!("[{}{}]", filled_chars, empty_chars)
}
pub fn check_component_running(process_name: &str) -> bool {
std::process::Command::new("pgrep")
.arg("-f")
.arg(process_name)
.output()
.map(|output| !output.stdout.is_empty())
.unwrap_or(false)
}
} }

View file

@ -1,3 +0,0 @@
// Simple test script for LLM keyword
let result = LLM "Hello world";
result