WIP: Local changes before merging master into main

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2026-03-01 07:40:11 -03:00
parent bbdf243c86
commit 8f495c75ec
30 changed files with 1630 additions and 265 deletions

View file

@ -63,14 +63,14 @@ sha256 = ""
[components.directory] [components.directory]
name = "Zitadel Identity Provider" name = "Zitadel Identity Provider"
url = "https://github.com/zitadel/zitadel/releases/download/v2.70.4/zitadel-linux-amd64.tar.gz" url = "https://github.com/zitadel/zitadel/releases/download/v4.11.1/zitadel-linux-amd64.tar.gz"
filename = "zitadel-linux-amd64.tar.gz" filename = "zitadel-linux-amd64.tar.gz"
sha256 = "" sha256 = ""
[components.alm] [components.alm]
name = "Forgejo Git Server" name = "Forgejo Git Server"
url = "https://codeberg.org/forgejo/forgejo/releases/download/v10.0.2/forgejo-10.0.2-linux-amd64" url = "https://codeberg.org/forgejo/forgejo/releases/download/v14.0.2/forgejo-14.0.2-linux-amd64"
filename = "forgejo-10.0.2-linux-amd64" filename = "forgejo-14.0.2-linux-amd64"
sha256 = "" sha256 = ""
[components.alm_ci] [components.alm_ci]

View file

@ -16,7 +16,7 @@ use chrono::Utc;
use diesel::prelude::*; use diesel::prelude::*;
use diesel::sql_query; use diesel::sql_query;
use diesel::sql_types::{Text, Uuid as DieselUuid}; use diesel::sql_types::{Text, Uuid as DieselUuid};
use log::{error, info, trace}; use log::{error, info, trace, warn};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::sync::Arc; use std::sync::Arc;
use uuid::Uuid; use uuid::Uuid;
@ -413,8 +413,13 @@ pub async fn classify_intent_handler(
let session = match get_current_session(&state) { let session = match get_current_session(&state) {
Ok(s) => s, Ok(s) => s,
Err(e) => { Err(e) => {
warn!("No active session for classify, bootstrapping default: {}", e);
match bootstrap_default_session(&state) {
Ok(s) => s,
Err(e2) => {
error!("Failed to bootstrap session: {}", e2);
return ( return (
StatusCode::UNAUTHORIZED, StatusCode::INTERNAL_SERVER_ERROR,
Json(ClassifyIntentResponse { Json(ClassifyIntentResponse {
success: false, success: false,
intent_type: "UNKNOWN".to_string(), intent_type: "UNKNOWN".to_string(),
@ -423,10 +428,12 @@ pub async fn classify_intent_handler(
requires_clarification: false, requires_clarification: false,
clarification_question: None, clarification_question: None,
result: None, result: None,
error: Some(format!("Authentication error: {}", e)), error: Some(format!("No session available: {e2}")),
}), }),
); );
} }
}
}
}; };
let classifier = IntentClassifier::new(Arc::clone(&state)); let classifier = IntentClassifier::new(Arc::clone(&state));
@ -1364,6 +1371,49 @@ fn get_current_session(
Ok(session) Ok(session)
} }
fn bootstrap_default_session(
state: &Arc<AppState>,
) -> Result<crate::core::shared::models::UserSession, Box<dyn std::error::Error + Send + Sync>> {
use diesel::prelude::*;
let mut conn = state
.conn
.get()
.map_err(|e| format!("DB connection error: {}", e))?;
#[derive(QueryableByName)]
struct BotRow {
#[diesel(sql_type = diesel::sql_types::Uuid)]
id: uuid::Uuid,
}
let bots: Vec<BotRow> = diesel::sql_query("SELECT id FROM bots LIMIT 1")
.get_results(&mut conn)
.unwrap_or_default();
let bot_id = bots
.first()
.map(|b| b.id)
.unwrap_or_else(uuid::Uuid::nil);
let session_id = uuid::Uuid::new_v4();
let user_id = uuid::Uuid::nil();
diesel::sql_query(
"INSERT INTO user_sessions (id, bot_id, user_id, channel, created_at, updated_at)
VALUES ($1, $2, $3, 'vibe', NOW(), NOW())
ON CONFLICT DO NOTHING"
)
.bind::<diesel::sql_types::Uuid, _>(session_id)
.bind::<diesel::sql_types::Uuid, _>(bot_id)
.bind::<diesel::sql_types::Uuid, _>(user_id)
.execute(&mut conn)
.map_err(|e| format!("Failed to create bootstrap session: {}", e))?;
get_current_session(state)
}
fn create_auto_task_from_plan( fn create_auto_task_from_plan(
_state: &Arc<AppState>, _state: &Arc<AppState>,
session: &crate::core::shared::models::UserSession, session: &crate::core::shared::models::UserSession,

View file

@ -1,4 +1,3 @@
use crate::auto_task::app_generator::AppGenerator;
use crate::auto_task::intent_compiler::IntentCompiler; use crate::auto_task::intent_compiler::IntentCompiler;
use crate::basic::ScriptService; use crate::basic::ScriptService;
@ -10,7 +9,7 @@ use chrono::{DateTime, Utc};
use diesel::prelude::*; use diesel::prelude::*;
use diesel::sql_query; use diesel::sql_query;
use diesel::sql_types::{Text, Uuid as DieselUuid}; use diesel::sql_types::{Text, Uuid as DieselUuid};
use log::{error, info, trace, warn}; use log::{info, trace, warn};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::sync::Arc; use std::sync::Arc;
use uuid::Uuid; use uuid::Uuid;
@ -512,112 +511,44 @@ Respond with JSON only:
session: &UserSession, session: &UserSession,
task_id: Option<String>, task_id: Option<String>,
) -> Result<IntentResult, Box<dyn std::error::Error + Send + Sync>> { ) -> Result<IntentResult, Box<dyn std::error::Error + Send + Sync>> {
info!("Handling APP_CREATE intent"); info!("Handling APP_CREATE intent via Orchestrator pipeline");
// [AGENT MODE] Initialize the LXC container session for real terminal output let mut orchestrator = if let Some(tid) = task_id {
let t_id = task_id.clone().unwrap_or_else(|| uuid::Uuid::new_v4().to_string()); crate::auto_task::orchestrator::Orchestrator::with_task_id(
let mut executor = crate::auto_task::AgentExecutor::new(self.state.clone(), &session.id.to_string(), &t_id); self.state.clone(),
tid,
if let Err(e) = executor.initialize().await { )
log::warn!("Failed to initialize LXC container for agent: {}", e);
} else { } else {
executor.broadcast_thought("Analyzing the user prompt and setting up a dedicated LXC workspace..."); crate::auto_task::orchestrator::Orchestrator::new(self.state.clone())
let _ = executor.execute_shell_command("echo 'Initializing Agent Workspace...' && date && mkdir -p /root/app").await;
}
let mut app_generator = if let Some(tid) = task_id {
AppGenerator::with_task_id(self.state.clone(), tid)
} else {
AppGenerator::new(self.state.clone())
}; };
match app_generator let result = orchestrator
.generate_app(&classification.original_text, session) .execute_pipeline(classification, session)
.await .await?;
{
Ok(app) => {
let mut resources = Vec::new();
// Track created tables Ok(IntentResult {
for table in &app.tables { success: result.success,
resources.push(CreatedResource {
resource_type: "table".to_string(),
name: table.name.clone(),
path: Some("tables.bas".to_string()),
});
}
for page in &app.pages {
resources.push(CreatedResource {
resource_type: "page".to_string(),
name: page.filename.clone(),
path: Some(page.filename.clone()),
});
}
for tool in &app.tools {
resources.push(CreatedResource {
resource_type: "tool".to_string(),
name: tool.filename.clone(),
path: Some(tool.filename.clone()),
});
}
let app_url = format!("/apps/{}", app.name.to_lowercase().replace(' ', "-"));
let res = Ok(IntentResult {
success: true,
intent_type: IntentType::AppCreate, intent_type: IntentType::AppCreate,
message: format!( message: result.message,
"Done:\n{}\nApp available at {}", created_resources: result
resources .created_resources
.iter() .into_iter()
.filter(|r| r.resource_type == "table") .map(|r| CreatedResource {
.map(|r| format!("{} table created", r.name)) resource_type: r.resource_type,
.collect::<Vec<_>>() name: r.name,
.join("\n"), path: r.path,
app_url })
), .collect(),
created_resources: resources, app_url: result.app_url,
app_url: Some(app_url), task_id: Some(result.task_id),
task_id: None,
schedule_id: None, schedule_id: None,
tool_triggers: Vec::new(), tool_triggers: Vec::new(),
next_steps: vec![ next_steps: vec![
"Open the app to start using it".to_string(), "Open the app to start using it".to_string(),
"Use Designer to customize the app".to_string(), "Use Designer to customize the app".to_string(),
], ],
error: None, error: result.error,
}); })
// [AGENT MODE] Cleanup the LXC container
executor.broadcast_thought("Generation complete. Terminating LXC sandbox.");
executor.cleanup().await;
res
}
Err(e) => {
error!("Failed to generate app: {e}");
let res = Ok(IntentResult {
success: false,
intent_type: IntentType::AppCreate,
message: "Failed to create the application".to_string(),
created_resources: Vec::new(),
app_url: None,
task_id: None,
schedule_id: None,
tool_triggers: Vec::new(),
next_steps: vec!["Try again with more details".to_string()],
error: Some(e.to_string()),
});
// [AGENT MODE] Cleanup the LXC container on error
if let Err(_) = executor.execute_shell_command("echo 'Build failed' >&2").await {}
executor.cleanup().await;
res
}
}
} }
fn handle_todo( fn handle_todo(

View file

@ -5,6 +5,7 @@ pub mod autotask_api;
pub mod designer_ai; pub mod designer_ai;
pub mod intent_classifier; pub mod intent_classifier;
pub mod intent_compiler; pub mod intent_compiler;
pub mod orchestrator;
pub mod safety_layer; pub mod safety_layer;
pub mod task_manifest; pub mod task_manifest;
pub mod task_types; pub mod task_types;

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,4 @@
use log::info; use log::{info, trace};
pub fn convert_mail_line_with_substitution(line: &str) -> String { pub fn convert_mail_line_with_substitution(line: &str) -> String {
let mut result = String::new(); let mut result = String::new();
@ -68,7 +68,7 @@ pub fn convert_mail_line_with_substitution(line: &str) -> String {
} }
} }
info!("Converted mail line: '{}' → '{}'", line, result); trace!("Converted mail line: '{}' → '{}'", line, result);
result result
} }
@ -138,6 +138,6 @@ pub fn convert_mail_block(recipient: &str, lines: &[String]) -> String {
}; };
result.push_str(&format!("send_mail({}, \"{}\", {}, []);\n", recipient_expr, subject, body_expr)); result.push_str(&format!("send_mail({}, \"{}\", {}, []);\n", recipient_expr, subject, body_expr));
info!("Converted MAIL block → {}", result); trace!("Converted MAIL block → {}", result);
result result
} }

View file

@ -4,7 +4,7 @@ pub mod talk;
pub use mail::convert_mail_block; pub use mail::convert_mail_block;
pub use talk::convert_talk_block; pub use talk::convert_talk_block;
use log::info; use log::{info, trace};
pub fn convert_begin_blocks(script: &str) -> String { pub fn convert_begin_blocks(script: &str) -> String {
let mut result = String::new(); let mut result = String::new();
@ -23,14 +23,14 @@ pub fn convert_begin_blocks(script: &str) -> String {
} }
if upper == "BEGIN TALK" { if upper == "BEGIN TALK" {
info!("Converting BEGIN TALK statement"); trace!("Converting BEGIN TALK statement");
in_talk_block = true; in_talk_block = true;
talk_block_lines.clear(); talk_block_lines.clear();
continue; continue;
} }
if upper == "END TALK" { if upper == "END TALK" {
info!("Converting END TALK statement, processing {} lines", talk_block_lines.len()); trace!("Converting END TALK statement, processing {} lines", talk_block_lines.len());
in_talk_block = false; in_talk_block = false;
let converted = convert_talk_block(&talk_block_lines); let converted = convert_talk_block(&talk_block_lines);
result.push_str(&converted); result.push_str(&converted);
@ -45,7 +45,7 @@ pub fn convert_begin_blocks(script: &str) -> String {
if upper.starts_with("BEGIN MAIL ") { if upper.starts_with("BEGIN MAIL ") {
let recipient = &trimmed[11..].trim(); let recipient = &trimmed[11..].trim();
info!("Converting BEGIN MAIL statement: recipient='{}'", recipient); trace!("Converting BEGIN MAIL statement: recipient='{}'", recipient);
mail_recipient = recipient.to_string(); mail_recipient = recipient.to_string();
in_mail_block = true; in_mail_block = true;
mail_block_lines.clear(); mail_block_lines.clear();
@ -53,7 +53,7 @@ pub fn convert_begin_blocks(script: &str) -> String {
} }
if upper == "END MAIL" { if upper == "END MAIL" {
info!("Converting END MAIL statement, processing {} lines", mail_block_lines.len()); trace!("Converting END MAIL statement, processing {} lines", mail_block_lines.len());
in_mail_block = false; in_mail_block = false;
let converted = convert_mail_block(&mail_recipient, &mail_block_lines); let converted = convert_mail_block(&mail_recipient, &mail_block_lines);
result.push_str(&converted); result.push_str(&converted);

View file

@ -1,4 +1,4 @@
use log::info; use log::{info, trace};
pub fn convert_talk_line_with_substitution(line: &str) -> String { pub fn convert_talk_line_with_substitution(line: &str) -> String {
let mut result = String::new(); let mut result = String::new();
@ -109,7 +109,7 @@ pub fn convert_talk_line_with_substitution(line: &str) -> String {
result = "TALK \"\"".to_string(); result = "TALK \"\"".to_string();
} }
info!("Converted TALK line: '{}' → '{}'", line, result); trace!("Converted TALK line: '{}' → '{}'", line, result);
result result
} }

View file

@ -521,7 +521,7 @@ impl BasicCompiler {
{ {
log::error!("Failed to register WEBHOOK during preprocessing: {}", e); log::error!("Failed to register WEBHOOK during preprocessing: {}", e);
} else { } else {
log::info!( log::trace!(
"Registered webhook endpoint {} for script {} during preprocessing", "Registered webhook endpoint {} for script {} during preprocessing",
endpoint, endpoint,
script_name script_name
@ -550,7 +550,7 @@ impl BasicCompiler {
{ {
log::error!("Failed to register USE_WEBSITE during preprocessing: {}", e); log::error!("Failed to register USE_WEBSITE during preprocessing: {}", e);
} else { } else {
log::info!( log::trace!(
"Registered website {} for crawling during preprocessing (refresh: {})", "Registered website {} for crawling during preprocessing (refresh: {})",
url, refresh url, refresh
); );
@ -676,7 +676,7 @@ impl BasicCompiler {
let table_name = table_name.trim_matches('"'); let table_name = table_name.trim_matches('"');
// Debug log to see what we're querying // Debug log to see what we're querying
log::info!("Converting SAVE for table: '{}' (original: '{}')", table_name, &parts[0]); log::trace!("Converting SAVE for table: '{}' (original: '{}')", table_name, &parts[0]);
// Get column names from TABLE definition (preserves order from .bas file) // Get column names from TABLE definition (preserves order from .bas file)
let column_names = self.get_table_columns_for_save(table_name, bot_id)?; let column_names = self.get_table_columns_for_save(table_name, bot_id)?;
@ -685,7 +685,7 @@ impl BasicCompiler {
let values: Vec<&String> = parts.iter().skip(1).collect(); let values: Vec<&String> = parts.iter().skip(1).collect();
let mut map_pairs = Vec::new(); let mut map_pairs = Vec::new();
log::info!("Matching {} variables to {} columns", values.len(), column_names.len()); log::trace!("Matching {} variables to {} columns", values.len(), column_names.len());
for value_var in values.iter() { for value_var in values.iter() {
// Find the column that matches this variable (case-insensitive) // Find the column that matches this variable (case-insensitive)
@ -753,7 +753,7 @@ impl BasicCompiler {
// Try to parse TABLE definition from the bot's .bas files to get correct field order // Try to parse TABLE definition from the bot's .bas files to get correct field order
if let Ok(columns) = self.get_columns_from_table_definition(table_name, bot_id) { if let Ok(columns) = self.get_columns_from_table_definition(table_name, bot_id) {
if !columns.is_empty() { if !columns.is_empty() {
log::info!("Using TABLE definition for '{}': {} columns", table_name, columns.len()); log::trace!("Using TABLE definition for '{}': {} columns", table_name, columns.len());
return Ok(columns); return Ok(columns);
} }
} }
@ -880,7 +880,7 @@ impl BasicCompiler {
match sql_query(&bot_query).load(&mut *bot_conn) { match sql_query(&bot_query).load(&mut *bot_conn) {
Ok(bot_cols) => { Ok(bot_cols) => {
log::info!("Found {} columns for table '{}' in bot database", bot_cols.len(), table_name); log::trace!("Found {} columns for table '{}' in bot database", bot_cols.len(), table_name);
bot_cols.into_iter() bot_cols.into_iter()
.map(|c: ColumnRow| c.column_name) .map(|c: ColumnRow| c.column_name)
.collect() .collect()
@ -895,7 +895,7 @@ impl BasicCompiler {
Vec::new() Vec::new()
} }
} else { } else {
log::info!("Found {} columns for table '{}' in main database", cols.len(), table_name); log::trace!("Found {} columns for table '{}' in main database", cols.len(), table_name);
cols.into_iter() cols.into_iter()
.map(|c: ColumnRow| c.column_name) .map(|c: ColumnRow| c.column_name)
.collect() .collect()
@ -919,7 +919,7 @@ impl BasicCompiler {
match sql_query(&bot_query).load(&mut *bot_conn) { match sql_query(&bot_query).load(&mut *bot_conn) {
Ok(cols) => { Ok(cols) => {
log::info!("Found {} columns for table '{}' in bot database", cols.len(), table_name); log::trace!("Found {} columns for table '{}' in bot database", cols.len(), table_name);
cols.into_iter() cols.into_iter()
.filter(|c: &ColumnRow| c.column_name != "id") .filter(|c: &ColumnRow| c.column_name != "id")
.map(|c: ColumnRow| c.column_name) .map(|c: ColumnRow| c.column_name)

View file

@ -120,7 +120,7 @@ impl Default for McpConnection {
fn default() -> Self { fn default() -> Self {
Self { Self {
connection_type: ConnectionType::Http, connection_type: ConnectionType::Http,
url: "http://localhost:9000".to_string(), url: "http://localhost:8080".to_string(),
port: None, port: None,
timeout_seconds: 30, timeout_seconds: 30,
max_retries: 3, max_retries: 3,

View file

@ -388,7 +388,7 @@ impl McpCsvLoader {
.first() .first()
.map(|s| (*s).to_string()) .map(|s| (*s).to_string())
.unwrap_or_else(|| "localhost".to_string()); .unwrap_or_else(|| "localhost".to_string());
let port: u16 = parts.get(1).and_then(|p| p.parse().ok()).unwrap_or(9000); let port: u16 = parts.get(1).and_then(|p| p.parse().ok()).unwrap_or(8080);
McpConnection { McpConnection {
connection_type: ConnectionType::Tcp, connection_type: ConnectionType::Tcp,
url: host, url: host,

View file

@ -6,7 +6,7 @@ use crate::basic::keywords::switch_case::switch_keyword;
use crate::core::shared::models::UserSession; use crate::core::shared::models::UserSession;
use crate::core::shared::state::AppState; use crate::core::shared::state::AppState;
use diesel::prelude::*; use diesel::prelude::*;
use log::info; use log::{info, trace};
use rhai::{Dynamic, Engine, EvalAltResult, Scope}; use rhai::{Dynamic, Engine, EvalAltResult, Scope};
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
@ -560,7 +560,7 @@ impl ScriptService {
} }
pub fn compile(&self, script: &str) -> Result<rhai::AST, Box<EvalAltResult>> { pub fn compile(&self, script: &str) -> Result<rhai::AST, Box<EvalAltResult>> {
let processed_script = self.preprocess_basic_script(script); let processed_script = self.preprocess_basic_script(script);
info!("Processed Script:\n{}", processed_script); trace!("Processed Script:\n{}", processed_script);
match self.engine.compile(&processed_script) { match self.engine.compile(&processed_script) {
Ok(ast) => Ok(ast), Ok(ast) => Ok(ast),
Err(parse_error) => Err(Box::new(parse_error.into())), Err(parse_error) => Err(Box::new(parse_error.into())),
@ -587,7 +587,7 @@ impl ScriptService {
.collect::<Vec<&str>>() .collect::<Vec<&str>>()
.join("\n"); .join("\n");
info!("Filtered tool metadata: {} -> {} chars", script.len(), executable_script.len()); trace!("Filtered tool metadata: {} -> {} chars", script.len(), executable_script.len());
// Apply minimal preprocessing for tools (skip variable normalization to avoid breaking multi-line strings) // Apply minimal preprocessing for tools (skip variable normalization to avoid breaking multi-line strings)
let script = preprocess_switch(&executable_script); let script = preprocess_switch(&executable_script);
@ -597,7 +597,7 @@ impl ScriptService {
// let script = Self::convert_format_syntax(&script); // let script = Self::convert_format_syntax(&script);
// Skip normalize_variables_to_lowercase for tools - it breaks multi-line strings // Skip normalize_variables_to_lowercase for tools - it breaks multi-line strings
info!("Preprocessed tool script for Rhai compilation"); trace!("Preprocessed tool script for Rhai compilation");
// Convert SAVE statements with field lists to map-based SAVE (simplified version for tools) // Convert SAVE statements with field lists to map-based SAVE (simplified version for tools)
let script = Self::convert_save_for_tools(&script); let script = Self::convert_save_for_tools(&script);
// Convert BEGIN TALK and BEGIN MAIL blocks to single calls // Convert BEGIN TALK and BEGIN MAIL blocks to single calls
@ -986,7 +986,7 @@ impl ScriptService {
}; };
result.push_str(&format!("send_mail({}, \"{}\", {}, []);\n", recipient_expr, subject, body_expr)); result.push_str(&format!("send_mail({}, \"{}\", {}, []);\n", recipient_expr, subject, body_expr));
log::info!("Converted MAIL block → {}", result); log::trace!("Converted MAIL block → {}", result);
result result
} }
@ -1086,7 +1086,7 @@ impl ScriptService {
let mut mail_block_lines: Vec<String> = Vec::new(); let mut mail_block_lines: Vec<String> = Vec::new();
let mut in_line_continuation = false; let mut in_line_continuation = false;
log::info!("Converting IF/THEN syntax, input has {} lines", script.lines().count()); log::trace!("Converting IF/THEN syntax, input has {} lines", script.lines().count());
for line in script.lines() { for line in script.lines() {
let trimmed = line.trim(); let trimmed = line.trim();
@ -1119,7 +1119,7 @@ impl ScriptService {
} else { } else {
condition.to_string() condition.to_string()
}; };
log::info!("Converting IF statement: condition='{}'", condition); log::trace!("Converting IF statement: condition='{}'", condition);
result.push_str("if "); result.push_str("if ");
result.push_str(&condition); result.push_str(&condition);
result.push_str(" {\n"); result.push_str(" {\n");
@ -1129,7 +1129,7 @@ impl ScriptService {
// Handle ELSE // Handle ELSE
if upper == "ELSE" { if upper == "ELSE" {
log::info!("Converting ELSE statement"); log::trace!("Converting ELSE statement");
result.push_str("} else {\n"); result.push_str("} else {\n");
continue; continue;
} }
@ -1152,7 +1152,7 @@ impl ScriptService {
} else { } else {
condition.to_string() condition.to_string()
}; };
log::info!("Converting ELSEIF statement: condition='{}'", condition); log::trace!("Converting ELSEIF statement: condition='{}'", condition);
result.push_str("} else if "); result.push_str("} else if ");
result.push_str(&condition); result.push_str(&condition);
result.push_str(" {\n"); result.push_str(" {\n");
@ -1161,7 +1161,7 @@ impl ScriptService {
// Handle END IF // Handle END IF
if upper == "END IF" { if upper == "END IF" {
log::info!("Converting END IF statement"); log::trace!("Converting END IF statement");
if if_stack.pop().is_some() { if if_stack.pop().is_some() {
result.push_str("}\n"); result.push_str("}\n");
} }
@ -1171,7 +1171,7 @@ impl ScriptService {
// Handle WITH ... END WITH (BASIC object creation) // Handle WITH ... END WITH (BASIC object creation)
if upper.starts_with("WITH ") { if upper.starts_with("WITH ") {
let object_name = &trimmed[5..].trim(); let object_name = &trimmed[5..].trim();
log::info!("Converting WITH statement: object='{}'", object_name); log::trace!("Converting WITH statement: object='{}'", object_name);
// Convert WITH obj → let obj = #{ (start object literal) // Convert WITH obj → let obj = #{ (start object literal)
result.push_str("let "); result.push_str("let ");
result.push_str(object_name); result.push_str(object_name);
@ -1181,7 +1181,7 @@ impl ScriptService {
} }
if upper == "END WITH" { if upper == "END WITH" {
log::info!("Converting END WITH statement"); log::trace!("Converting END WITH statement");
result.push_str("};\n"); result.push_str("};\n");
in_with_block = false; in_with_block = false;
continue; continue;
@ -1189,14 +1189,14 @@ impl ScriptService {
// Handle BEGIN TALK ... END TALK (multi-line TALK with ${} substitution) // Handle BEGIN TALK ... END TALK (multi-line TALK with ${} substitution)
if upper == "BEGIN TALK" { if upper == "BEGIN TALK" {
log::info!("Converting BEGIN TALK statement"); log::trace!("Converting BEGIN TALK statement");
in_talk_block = true; in_talk_block = true;
talk_block_lines.clear(); talk_block_lines.clear();
continue; continue;
} }
if upper == "END TALK" { if upper == "END TALK" {
log::info!("Converting END TALK statement, processing {} lines", talk_block_lines.len()); log::trace!("Converting END TALK statement, processing {} lines", talk_block_lines.len());
in_talk_block = false; in_talk_block = false;
// Split into multiple TALK statements to avoid expression complexity limit // Split into multiple TALK statements to avoid expression complexity limit
@ -1239,7 +1239,7 @@ impl ScriptService {
// Handle BEGIN MAIL ... END MAIL (multi-line email with ${} substitution) // Handle BEGIN MAIL ... END MAIL (multi-line email with ${} substitution)
if upper.starts_with("BEGIN MAIL ") { if upper.starts_with("BEGIN MAIL ") {
let recipient = &trimmed[11..].trim(); // Skip "BEGIN MAIL " let recipient = &trimmed[11..].trim(); // Skip "BEGIN MAIL "
log::info!("Converting BEGIN MAIL statement: recipient='{}'", recipient); log::trace!("Converting BEGIN MAIL statement: recipient='{}'", recipient);
mail_recipient = recipient.to_string(); mail_recipient = recipient.to_string();
in_mail_block = true; in_mail_block = true;
mail_block_lines.clear(); mail_block_lines.clear();
@ -1247,7 +1247,7 @@ impl ScriptService {
} }
if upper == "END MAIL" { if upper == "END MAIL" {
log::info!("Converting END MAIL statement, processing {} lines", mail_block_lines.len()); log::trace!("Converting END MAIL statement, processing {} lines", mail_block_lines.len());
in_mail_block = false; in_mail_block = false;
// Process the mail block and convert to SEND EMAIL // Process the mail block and convert to SEND EMAIL
@ -1287,11 +1287,11 @@ impl ScriptService {
// Handle SAVE table, field1, field2, ... → INSERT "table", #{field1: value1, field2: value2, ...} // Handle SAVE table, field1, field2, ... → INSERT "table", #{field1: value1, field2: value2, ...}
if upper.starts_with("SAVE") && upper.contains(',') { if upper.starts_with("SAVE") && upper.contains(',') {
log::info!("Processing SAVE line: '{}'", trimmed); log::trace!("Processing SAVE line: '{}'", trimmed);
// Extract the part after "SAVE" // Extract the part after "SAVE"
let after_save = &trimmed[4..].trim(); // Skip "SAVE" let after_save = &trimmed[4..].trim(); // Skip "SAVE"
let parts: Vec<&str> = after_save.split(',').collect(); let parts: Vec<&str> = after_save.split(',').collect();
log::info!("SAVE parts: {:?}", parts); log::trace!("SAVE parts: {:?}", parts);
if parts.len() >= 2 { if parts.len() >= 2 {
// First part is the table name (in quotes) // First part is the table name (in quotes)
@ -1301,7 +1301,7 @@ impl ScriptService {
if parts.len() == 2 { if parts.len() == 2 {
let object_name = parts[1].trim().trim_end_matches(';'); let object_name = parts[1].trim().trim_end_matches(';');
let converted = format!("INSERT \"{}\", {};\n", table, object_name); let converted = format!("INSERT \"{}\", {};\n", table, object_name);
log::info!("Converted SAVE to INSERT (old syntax): '{}'", converted); log::trace!("Converted SAVE to INSERT (old syntax): '{}'", converted);
result.push_str(&converted); result.push_str(&converted);
continue; continue;
} }
@ -1310,7 +1310,7 @@ impl ScriptService {
// The runtime SAVE handler will match them to database columns by position // The runtime SAVE handler will match them to database columns by position
let values = parts[1..].join(", "); let values = parts[1..].join(", ");
let converted = format!("SAVE \"{}\", {};\n", table, values); let converted = format!("SAVE \"{}\", {};\n", table, values);
log::info!("Keeping SAVE syntax (modern): '{}'", converted); log::trace!("Keeping SAVE syntax (modern): '{}'", converted);
result.push_str(&converted); result.push_str(&converted);
continue; continue;
} }
@ -1319,17 +1319,17 @@ impl ScriptService {
// Handle SEND EMAIL → send_mail (function call style) // Handle SEND EMAIL → send_mail (function call style)
// Syntax: SEND EMAIL to, subject, body → send_mail(to, subject, body, []) // Syntax: SEND EMAIL to, subject, body → send_mail(to, subject, body, [])
if upper.starts_with("SEND EMAIL") { if upper.starts_with("SEND EMAIL") {
log::info!("Processing SEND EMAIL line: '{}'", trimmed); log::trace!("Processing SEND EMAIL line: '{}'", trimmed);
let after_send = &trimmed[11..].trim(); // Skip "SEND EMAIL " (10 chars + space = 11) let after_send = &trimmed[11..].trim(); // Skip "SEND EMAIL " (10 chars + space = 11)
let parts: Vec<&str> = after_send.split(',').collect(); let parts: Vec<&str> = after_send.split(',').collect();
log::info!("SEND EMAIL parts: {:?}", parts); log::trace!("SEND EMAIL parts: {:?}", parts);
if parts.len() == 3 { if parts.len() == 3 {
let to = parts[0].trim(); let to = parts[0].trim();
let subject = parts[1].trim(); let subject = parts[1].trim();
let body = parts[2].trim().trim_end_matches(';'); let body = parts[2].trim().trim_end_matches(';');
// Convert to send_mail(to, subject, body, []) function call // Convert to send_mail(to, subject, body, []) function call
let converted = format!("send_mail({}, {}, {}, []);\n", to, subject, body); let converted = format!("send_mail({}, {}, {}, []);\n", to, subject, body);
log::info!("Converted SEND EMAIL to: '{}'", converted); log::trace!("Converted SEND EMAIL to: '{}'", converted);
result.push_str(&converted); result.push_str(&converted);
continue; continue;
} }
@ -1397,7 +1397,7 @@ impl ScriptService {
} }
} }
log::info!("IF/THEN conversion complete, output has {} lines", result.lines().count()); log::trace!("IF/THEN conversion complete, output has {} lines", result.lines().count());
// Convert BASIC <> (not equal) to Rhai != globally // Convert BASIC <> (not equal) to Rhai != globally
@ -1417,7 +1417,7 @@ impl ScriptService {
let lines: Vec<&str> = script.lines().collect(); let lines: Vec<&str> = script.lines().collect();
let mut i = 0; let mut i = 0;
log::info!("Converting SELECT/CASE syntax to if-else chains"); log::trace!("Converting SELECT/CASE syntax to if-else chains");
// Helper function to strip 'let ' from the beginning of a line // Helper function to strip 'let ' from the beginning of a line
// This is needed because convert_if_then_syntax adds 'let' to all assignments, // This is needed because convert_if_then_syntax adds 'let' to all assignments,
@ -1441,7 +1441,7 @@ impl ScriptService {
if upper.starts_with("SELECT ") && !upper.contains(" THEN") { if upper.starts_with("SELECT ") && !upper.contains(" THEN") {
// Extract the variable being selected // Extract the variable being selected
let select_var = trimmed[7..].trim(); // Skip "SELECT " let select_var = trimmed[7..].trim(); // Skip "SELECT "
log::info!("Converting SELECT statement for variable: '{}'", select_var); log::trace!("Converting SELECT statement for variable: '{}'", select_var);
// Skip the SELECT line // Skip the SELECT line
i += 1; i += 1;

View file

@ -17,6 +17,8 @@ pub fn get_processes_to_kill() -> Vec<(&'static str, Vec<&'static str>)> {
("botserver-stack/bin/meeting", vec!["-9", "-f"]), ("botserver-stack/bin/meeting", vec!["-9", "-f"]),
("botserver-stack/bin/vector_db", vec!["-9", "-f"]), ("botserver-stack/bin/vector_db", vec!["-9", "-f"]),
("botserver-stack/bin/zitadel", vec!["-9", "-f"]), ("botserver-stack/bin/zitadel", vec!["-9", "-f"]),
("botserver-stack/bin/alm", vec!["-9", "-f"]),
("forgejo", vec!["-9", "-f"]),
("caddy", vec!["-9", "-f"]), ("caddy", vec!["-9", "-f"]),
("postgres", vec!["-9", "-f"]), ("postgres", vec!["-9", "-f"]),
("minio", vec!["-9", "-f"]), ("minio", vec!["-9", "-f"]),
@ -201,3 +203,25 @@ pub enum BotExistsResult {
} }
/// Check if Zitadel directory is healthy
pub fn zitadel_health_check() -> bool {
// Check if Zitadel is responding on port 9000
if let Ok(output) = Command::new("curl")
.args(["-f", "-s", "--connect-timeout", "2", "http://localhost:9000/debug/ready"])
.output()
{
if output.status.success() {
return true;
}
}
// Fallback: just check if port 9000 is listening
match Command::new("nc")
.args(["-z", "-w", "1", "127.0.0.1", "9000"])
.output()
{
Ok(output) => output.status.success(),
Err(_) => false,
}
}

View file

@ -294,7 +294,7 @@ impl AppConfig {
let port = std::env::var("PORT") let port = std::env::var("PORT")
.ok() .ok()
.and_then(|v| v.parse::<u16>().ok()) .and_then(|v| v.parse::<u16>().ok())
.unwrap_or_else(|| get_u16("server_port", 9000)); .unwrap_or_else(|| get_u16("server_port", 8080));
Ok(Self { Ok(Self {
drive, drive,
@ -302,7 +302,7 @@ impl AppConfig {
server: ServerConfig { server: ServerConfig {
host: get_str("server_host", "0.0.0.0"), host: get_str("server_host", "0.0.0.0"),
port, port,
base_url: config_map.get("server_base_url").cloned().unwrap_or_else(|| "http://localhost:9000".to_string()), base_url: config_map.get("server_base_url").cloned().unwrap_or_else(|| "http://localhost:8080".to_string()),
}, },
site_path: { site_path: {
ConfigManager::new(pool.clone()).get_config( ConfigManager::new(pool.clone()).get_config(
@ -332,7 +332,7 @@ impl AppConfig {
let port = std::env::var("PORT") let port = std::env::var("PORT")
.ok() .ok()
.and_then(|v| v.parse::<u16>().ok()) .and_then(|v| v.parse::<u16>().ok())
.unwrap_or(9000); .unwrap_or(8080);
Ok(Self { Ok(Self {
drive: minio, drive: minio,
@ -340,7 +340,7 @@ impl AppConfig {
server: ServerConfig { server: ServerConfig {
host: "0.0.0.0".to_string(), host: "0.0.0.0".to_string(),
port, port,
base_url: "http://localhost:9000".to_string(), base_url: "http://localhost:8080".to_string(),
}, },
site_path: "./botserver-stack/sites".to_string(), site_path: "./botserver-stack/sites".to_string(),

View file

@ -81,7 +81,7 @@ pub async fn provision_user_handler(
.config .config
.as_ref() .as_ref()
.map(|c| c.server.base_url.clone()) .map(|c| c.server.base_url.clone())
.unwrap_or_else(|| "http://localhost:8300".to_string()); .unwrap_or_else(|| "http://localhost:9000".to_string());
let provisioning = UserProvisioningService::new(state.conn.clone(), s3_client, base_url); let provisioning = UserProvisioningService::new(state.conn.clone(), s3_client, base_url);
@ -114,7 +114,7 @@ pub async fn deprovision_user_handler(
.config .config
.as_ref() .as_ref()
.map(|c| c.server.base_url.clone()) .map(|c| c.server.base_url.clone())
.unwrap_or_else(|| "http://localhost:8300".to_string()); .unwrap_or_else(|| "http://localhost:9000".to_string());
let provisioning = UserProvisioningService::new(state.conn.clone(), s3_client, base_url); let provisioning = UserProvisioningService::new(state.conn.clone(), s3_client, base_url);
@ -257,7 +257,7 @@ pub async fn check_services_status(State(state): State<Arc<AppState>>) -> impl I
let client = create_tls_client(Some(2)); let client = create_tls_client(Some(2));
if let Ok(response) = client.get("https://localhost:8300/healthz").send().await { if let Ok(response) = client.get("https://localhost:9000/healthz").send().await {
status.directory = response.status().is_success(); status.directory = response.status().is_success();
} }

View file

@ -439,7 +439,7 @@ async fn get_bot_config(state: &AppState) -> HashMap<String, String> {
fn get_base_url(state: &AppState) -> String { fn get_base_url(state: &AppState) -> String {
let _ = state; let _ = state;
"http://localhost:8300".to_string() "http://localhost:9000".to_string()
} }
async fn create_or_get_oauth_user( async fn create_or_get_oauth_user(

View file

@ -443,7 +443,7 @@ impl PackageManager {
"directory".to_string(), "directory".to_string(),
ComponentConfig { ComponentConfig {
name: "directory".to_string(), name: "directory".to_string(),
ports: vec![8300], ports: vec![9000],
dependencies: vec!["tables".to_string()], dependencies: vec!["tables".to_string()],
linux_packages: vec![], linux_packages: vec![],
macos_packages: vec![], macos_packages: vec![],
@ -455,6 +455,8 @@ impl PackageManager {
"mkdir -p {{LOGS_PATH}}".to_string(), "mkdir -p {{LOGS_PATH}}".to_string(),
], ],
post_install_cmds_linux: vec![ post_install_cmds_linux: vec![
"cat > {{CONF_PATH}}/directory/steps.yaml << 'EOF'\n---\nDatabase:\n postgres:\n Host: localhost\n Port: 5432\n Database: zitadel\n User:\n Username: zitadel\n Password: zitadel\n SSL:\n Mode: disable\n Admin:\n Username: gbuser\n Password: {{DB_PASSWORD}}\n SSL:\n Mode: disable\nEOF".to_string(),
"cat > {{CONF_PATH}}/directory/zitadel.yaml << 'EOF'\nLog:\n Level: info\n\nDatabase:\n postgres:\n Host: localhost\n Port: 5432\n Database: zitadel\n User:\n Username: zitadel\n Password: zitadel\n SSL:\n Mode: disable\n Admin:\n Username: gbuser\n Password: {{DB_PASSWORD}}\n SSL:\n Mode: disable\n\nMachine:\n Identification:\n Hostname: localhost\n WebhookAddress: http://localhost:8080\n\nPort: 8300\nExternalDomain: localhost\nExternalPort: 8300\nExternalSecure: false\n\nTLS:\n Enabled: false\nEOF".to_string(),
@ -471,12 +473,12 @@ impl PackageManager {
env_vars: HashMap::from([ env_vars: HashMap::from([
("ZITADEL_EXTERNALSECURE".to_string(), "false".to_string()), ("ZITADEL_EXTERNALSECURE".to_string(), "false".to_string()),
("ZITADEL_EXTERNALDOMAIN".to_string(), "localhost".to_string()), ("ZITADEL_EXTERNALDOMAIN".to_string(), "localhost".to_string()),
("ZITADEL_EXTERNALPORT".to_string(), "8300".to_string()), ("ZITADEL_EXTERNALPORT".to_string(), "9000".to_string()),
("ZITADEL_TLS_ENABLED".to_string(), "false".to_string()), ("ZITADEL_TLS_ENABLED".to_string(), "false".to_string()),
]), ]),
data_download_list: Vec::new(), data_download_list: Vec::new(),
exec_cmd: "ZITADEL_MASTERKEY=$(VAULT_ADDR=https://localhost:8200 VAULT_CACERT={{CONF_PATH}}/system/certificates/ca/ca.crt vault kv get -field=masterkey secret/gbo/directory 2>/dev/null || echo 'MasterkeyNeedsToHave32Characters') nohup {{BIN_PATH}}/zitadel start --config {{CONF_PATH}}/directory/zitadel.yaml --masterkeyFromEnv --tlsMode disabled > {{LOGS_PATH}}/zitadel.log 2>&1 &".to_string(), exec_cmd: "ZITADEL_MASTERKEY=$(VAULT_ADDR=https://localhost:8200 VAULT_CACERT={{CONF_PATH}}/system/certificates/ca/ca.crt vault kv get -field=masterkey secret/gbo/directory 2>/dev/null || echo 'MasterkeyNeedsToHave32Characters') nohup {{BIN_PATH}}/zitadel start --config {{CONF_PATH}}/directory/zitadel.yaml --masterkeyFromEnv --tlsMode disabled > {{LOGS_PATH}}/zitadel.log 2>&1 &".to_string(),
check_cmd: "curl -f --connect-timeout 2 -m 5 http://localhost:8300/healthz >/dev/null 2>&1".to_string(), check_cmd: "curl -f --connect-timeout 2 -m 5 http://localhost:9000/healthz >/dev/null 2>&1".to_string(),
}, },
); );
} }

View file

@ -2,6 +2,6 @@ pub mod directory_setup;
pub mod email_setup; pub mod email_setup;
pub mod vector_db_setup; pub mod vector_db_setup;
pub use directory_setup::{DirectorySetup, DefaultUser, CreateUserParams}; pub use directory_setup::{DirectorySetup, DirectoryConfig, DefaultUser, CreateUserParams};
pub use email_setup::EmailSetup; pub use email_setup::EmailSetup;
pub use vector_db_setup::VectorDbSetup; pub use vector_db_setup::VectorDbSetup;

View file

@ -224,7 +224,7 @@ impl SecretsManager {
Ok(( Ok((
s.get("url") s.get("url")
.cloned() .cloned()
.unwrap_or_else(|| "http://localhost:8300".into()), .unwrap_or_else(|| "http://localhost:9000".into()),
s.get("project_id").cloned().unwrap_or_default(), s.get("project_id").cloned().unwrap_or_default(),
s.get("client_id").cloned().unwrap_or_default(), s.get("client_id").cloned().unwrap_or_default(),
s.get("client_secret").cloned().unwrap_or_default(), s.get("client_secret").cloned().unwrap_or_default(),
@ -338,7 +338,7 @@ impl SecretsManager {
secrets.insert("password".into(), "changeme".into()); secrets.insert("password".into(), "changeme".into());
} }
SecretPaths::DIRECTORY => { SecretPaths::DIRECTORY => {
secrets.insert("url".into(), "http://localhost:8300".into()); secrets.insert("url".into(), "http://localhost:9000".into());
secrets.insert("project_id".into(), String::new()); secrets.insert("project_id".into(), String::new());
secrets.insert("client_id".into(), String::new()); secrets.insert("client_id".into(), String::new());
secrets.insert("client_secret".into(), String::new()); secrets.insert("client_secret".into(), String::new());

View file

@ -482,7 +482,7 @@ impl InternalUrls {
pub const DIRECTORY_BASE: &'static str = "http://localhost:9000"; pub const DIRECTORY_BASE: &'static str = "http://localhost:9000";
pub const DATABASE: &'static str = "postgres://localhost:5432"; pub const DATABASE: &'static str = "postgres://localhost:5432";
pub const CACHE: &'static str = "redis://localhost:6379"; pub const CACHE: &'static str = "redis://localhost:6379";
pub const DRIVE: &'static str = "https://localhost:9000"; pub const DRIVE: &'static str = "https://localhost:9100";
pub const EMAIL: &'static str = "http://localhost:8025"; pub const EMAIL: &'static str = "http://localhost:8025";
pub const LLM: &'static str = "http://localhost:8081"; pub const LLM: &'static str = "http://localhost:8081";
pub const EMBEDDING: &'static str = "http://localhost:8082"; pub const EMBEDDING: &'static str = "http://localhost:8082";

View file

@ -147,21 +147,30 @@ pub async fn login(
) )
})?; })?;
// Try to get admin token: first PAT file, then OAuth client credentials
let pat_path = std::path::Path::new("./botserver-stack/conf/directory/admin-pat.txt"); let pat_path = std::path::Path::new("./botserver-stack/conf/directory/admin-pat.txt");
let admin_token = std::fs::read_to_string(pat_path) let admin_token = std::fs::read_to_string(pat_path)
.map(|s| s.trim().to_string()) .map(|s| s.trim().to_string())
.unwrap_or_default(); .unwrap_or_default();
if admin_token.is_empty() { let admin_token = if admin_token.is_empty() {
error!("Admin PAT token not found"); info!("Admin PAT token not found, using OAuth client credentials flow");
match get_oauth_token(&http_client, &client).await {
Ok(token) => token,
Err(e) => {
error!("Failed to get OAuth token: {}", e);
return Err(( return Err((
StatusCode::INTERNAL_SERVER_ERROR, StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse { Json(ErrorResponse {
error: "Authentication service not configured".to_string(), error: "Authentication service not configured".to_string(),
details: None, details: Some("OAuth client credentials not available".to_string()),
}), }),
)); ));
} }
}
} else {
admin_token
};
let search_url = format!("{}/v2/users", client.api_url()); let search_url = format!("{}/v2/users", client.api_url());
let search_body = serde_json::json!({ let search_body = serde_json::json!({
@ -765,3 +774,43 @@ async fn create_organization(
Err(format!("Failed to create organization: {}", error_text)) Err(format!("Failed to create organization: {}", error_text))
} }
} }
async fn get_oauth_token(
http_client: &reqwest::Client,
client: &crate::directory::client::ZitadelClient,
) -> Result<String, String> {
let token_url = format!("{}/oauth/v2/token", client.api_url());
let params = [
("grant_type", "client_credentials".to_string()),
("client_id", client.client_id().to_string()),
("client_secret", client.client_secret().to_string()),
("scope", "openid profile email urn:zitadel:iam:org:project:id:zitadel:aud".to_string()),
];
let response = http_client
.post(&token_url)
.form(&params)
.send()
.await
.map_err(|e| format!("Failed to request OAuth token: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(format!("OAuth token request failed: {}", error_text));
}
let token_data: serde_json::Value = response
.json()
.await
.map_err(|e| format!("Failed to parse OAuth token response: {}", e))?;
let access_token = token_data
.get("access_token")
.and_then(|t| t.as_str())
.ok_or_else(|| "No access_token in OAuth response".to_string())?
.to_string();
info!("Successfully obtained OAuth access token via client credentials");
Ok(access_token)
}

View file

@ -144,6 +144,23 @@ async fn create_bootstrap_admin(client: &ZitadelClient) -> Result<BootstrapResul
} }
}; };
// Create a PAT token for the admin user for API access
// Note: This requires OAuth client credentials. If not available, PAT creation
// will be skipped, but admin user can still login with username/password.
info!("Creating admin PAT token for API access...");
match client.create_pat(&user_id, "Admin API Token", None).await {
Ok(pat_token) => {
info!("Admin PAT token created successfully");
save_admin_pat_token(&pat_token);
}
Err(e) => {
// PAT creation failure is not critical - user can still login
// This happens when Zitadel doesn't have OAuth client configured yet
warn!("Failed to create admin PAT token (non-critical): {}", e);
info!("Admin user can still login with username/password. PAT can be created later via UI.");
}
}
let base_url = client.api_url(); let base_url = client.api_url();
let setup_url = format!("{}/ui/login", base_url); let setup_url = format!("{}/ui/login", base_url);
@ -250,7 +267,7 @@ fn save_setup_credentials(result: &BootstrapResult) {
Password: {:<46} Password: {:<46}
Email: {:<46} Email: {:<46}
🌐 LOGIN NOW: http://localhost:9000/suite/login ║ 🌐 LOGIN NOW: http://localhost:8080/suite/login ║
@ -280,6 +297,32 @@ fn save_setup_credentials(result: &BootstrapResult) {
} }
} }
fn save_admin_pat_token(pat_token: &str) {
// Create directory if it doesn't exist
let pat_dir = std::path::Path::new("./botserver-stack/conf/directory");
if let Err(e) = fs::create_dir_all(pat_dir) {
error!("Failed to create PAT directory: {}", e);
return;
}
let pat_path = pat_dir.join("admin-pat.txt");
match fs::write(&pat_path, pat_token) {
Ok(_) => {
#[cfg(unix)]
{
if let Err(e) = fs::set_permissions(&pat_path, fs::Permissions::from_mode(0o600)) {
warn!("Failed to set PAT file permissions: {}", e);
}
}
info!("Admin PAT token saved to: {}", pat_path.display());
}
Err(e) => {
error!("Failed to save admin PAT token: {}", e);
}
}
}
fn print_bootstrap_credentials(result: &BootstrapResult) { fn print_bootstrap_credentials(result: &BootstrapResult) {
let separator = "".repeat(60); let separator = "".repeat(60);
@ -313,7 +356,7 @@ fn print_bootstrap_credentials(result: &BootstrapResult) {
println!("{:^60}", ""); println!("{:^60}", "");
println!("{:56}", "🌐 LOGIN NOW:"); println!("{:56}", "🌐 LOGIN NOW:");
println!("{:^60}", ""); println!("{:^60}", "");
println!("{:56}", "http://localhost:9000/suite/login"); println!("{:56}", "http://localhost:8080/suite/login");
println!("{:^60}", ""); println!("{:^60}", "");
println!("{}", separator); println!("{}", separator);
println!("{:^60}", ""); println!("{:^60}", "");

View file

@ -21,6 +21,8 @@ pub struct ZitadelClient {
http_client: reqwest::Client, http_client: reqwest::Client,
access_token: Arc<RwLock<Option<String>>>, access_token: Arc<RwLock<Option<String>>>,
pat_token: Option<String>, pat_token: Option<String>,
/// Username and password for password grant OAuth flow
password_credentials: Option<(String, String)>,
} }
impl ZitadelClient { impl ZitadelClient {
@ -35,6 +37,28 @@ impl ZitadelClient {
http_client, http_client,
access_token: Arc::new(RwLock::new(None)), access_token: Arc::new(RwLock::new(None)),
pat_token: None, pat_token: None,
password_credentials: None,
})
}
/// Create a client that uses password grant OAuth flow
/// This is used for initial bootstrap with Zitadel's default admin user
pub fn with_password_grant(
config: ZitadelConfig,
username: String,
password: String,
) -> Result<Self> {
let http_client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()
.map_err(|e| anyhow!("Failed to create HTTP client: {}", e))?;
Ok(Self {
config,
http_client,
access_token: Arc::new(RwLock::new(None)),
pat_token: None,
password_credentials: Some((username, password)),
}) })
} }
@ -49,6 +73,7 @@ impl ZitadelClient {
http_client, http_client,
access_token: Arc::new(RwLock::new(None)), access_token: Arc::new(RwLock::new(None)),
pat_token: Some(pat_token), pat_token: Some(pat_token),
password_credentials: None,
}) })
} }
@ -108,13 +133,24 @@ impl ZitadelClient {
let token_url = format!("{}/oauth/v2/token", self.config.api_url); let token_url = format!("{}/oauth/v2/token", self.config.api_url);
log::info!("Requesting access token from: {}", token_url); log::info!("Requesting access token from: {}", token_url);
let params = [ // Build params dynamically based on auth method
("grant_type", "client_credentials"), let mut params: Vec<(&str, String)> = vec![
("client_id", &self.config.client_id), ("client_id", self.config.client_id.clone()),
("client_secret", &self.config.client_secret), ("client_secret", self.config.client_secret.clone()),
("scope", "openid profile email"),
]; ];
if let Some((username, password)) = &self.password_credentials {
// Use password grant flow
params.push(("grant_type", "password".to_string()));
params.push(("username", username.clone()));
params.push(("password", password.clone()));
params.push(("scope", "openid profile email urn:zitadel:iam:org:project:id:zitadel:aud".to_string()));
} else {
// Use client credentials flow
params.push(("grant_type", "client_credentials".to_string()));
params.push(("scope", "openid profile email".to_string()));
}
let response = self let response = self
.http_client .http_client
.post(&token_url) .post(&token_url)
@ -533,4 +569,47 @@ impl ZitadelClient {
Ok(()) Ok(())
} }
pub async fn create_pat(&self, user_id: &str, display_name: &str, expiration_date: Option<&str>) -> Result<String> {
let token = self.get_access_token().await?;
let url = format!("{}/v2/users/{}/pat", self.config.api_url, user_id);
let body = if let Some(expiry) = expiration_date {
serde_json::json!({
"displayName": display_name,
"expirationDate": expiry
})
} else {
serde_json::json!({
"displayName": display_name
})
};
let response = self
.http_client
.post(&url)
.bearer_auth(&token)
.json(&body)
.send()
.await
.map_err(|e| anyhow!("Failed to create PAT: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(anyhow!("Failed to create PAT: {}", error_text));
}
let data: serde_json::Value = response
.json()
.await
.map_err(|e| anyhow!("Failed to parse PAT response: {}", e))?;
let pat_token = data
.get("token")
.and_then(|t| t.as_str())
.ok_or_else(|| anyhow!("No token in PAT response"))?
.to_string();
Ok(pat_token)
}
} }

View file

@ -96,7 +96,7 @@ impl DriveMonitor {
let mut file_states = self.file_states.write().await; let mut file_states = self.file_states.write().await;
let count = states.len(); let count = states.len();
*file_states = states; *file_states = states;
info!( trace!(
"[DRIVE_MONITOR] Loaded {} file states from disk for bot {}", "[DRIVE_MONITOR] Loaded {} file states from disk for bot {}",
count, count,
self.bot_id self.bot_id
@ -236,7 +236,7 @@ impl DriveMonitor {
.store(true, std::sync::atomic::Ordering::SeqCst); .store(true, std::sync::atomic::Ordering::SeqCst);
trace!("start_monitoring: calling check_for_changes..."); trace!("start_monitoring: calling check_for_changes...");
info!("Calling initial check_for_changes..."); trace!("Calling initial check_for_changes...");
match tokio::time::timeout(Duration::from_secs(300), self.check_for_changes()).await { match tokio::time::timeout(Duration::from_secs(300), self.check_for_changes()).await {
Ok(Ok(_)) => { Ok(Ok(_)) => {
@ -262,15 +262,15 @@ impl DriveMonitor {
// Force enable periodic monitoring regardless of initial check result // Force enable periodic monitoring regardless of initial check result
self.is_processing.store(true, std::sync::atomic::Ordering::SeqCst); self.is_processing.store(true, std::sync::atomic::Ordering::SeqCst);
info!("Forced is_processing to true for periodic monitoring"); trace!("Forced is_processing to true for periodic monitoring");
let self_clone = self.clone(); // Don't wrap in Arc::new - that creates a copy let self_clone = self.clone(); // Don't wrap in Arc::new - that creates a copy
tokio::spawn(async move { tokio::spawn(async move {
let mut consecutive_processing_failures = 0; let mut consecutive_processing_failures = 0;
info!("Starting periodic monitoring loop for bot {}", self_clone.bot_id); trace!("Starting periodic monitoring loop for bot {}", self_clone.bot_id);
let is_processing_state = self_clone.is_processing.load(std::sync::atomic::Ordering::SeqCst); let is_processing_state = self_clone.is_processing.load(std::sync::atomic::Ordering::SeqCst);
info!("is_processing state at loop start: {} for bot {}", is_processing_state, self_clone.bot_id); trace!("is_processing state at loop start: {} for bot {}", is_processing_state, self_clone.bot_id);
while self_clone while self_clone
.is_processing .is_processing
@ -304,7 +304,7 @@ impl DriveMonitor {
self_clone.consecutive_failures.swap(0, Ordering::Relaxed); self_clone.consecutive_failures.swap(0, Ordering::Relaxed);
consecutive_processing_failures = 0; consecutive_processing_failures = 0;
if prev_failures > 0 { if prev_failures > 0 {
info!("S3/MinIO recovered for bucket {} after {} failures", trace!("S3/MinIO recovered for bucket {} after {} failures",
self_clone.bucket_name, prev_failures); self_clone.bucket_name, prev_failures);
} }
} }
@ -336,15 +336,15 @@ impl DriveMonitor {
} }
} }
info!("Monitoring loop ended for bot {}", self_clone.bot_id); trace!("Monitoring loop ended for bot {}", self_clone.bot_id);
}); });
info!("DriveMonitor started for bot {}", self.bot_id); trace!("DriveMonitor started for bot {}", self.bot_id);
Ok(()) Ok(())
} }
pub async fn stop_monitoring(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { pub async fn stop_monitoring(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!("Stopping DriveMonitor for bot {}", self.bot_id); trace!("Stopping DriveMonitor for bot {}", self.bot_id);
self.is_processing self.is_processing
.store(false, std::sync::atomic::Ordering::SeqCst); .store(false, std::sync::atomic::Ordering::SeqCst);
@ -352,12 +352,12 @@ impl DriveMonitor {
self.file_states.write().await.clear(); self.file_states.write().await.clear();
self.consecutive_failures.store(0, Ordering::Relaxed); self.consecutive_failures.store(0, Ordering::Relaxed);
info!("DriveMonitor stopped for bot {}", self.bot_id); trace!("DriveMonitor stopped for bot {}", self.bot_id);
Ok(()) Ok(())
} }
pub fn spawn(self: Arc<Self>) -> tokio::task::JoinHandle<()> { pub fn spawn(self: Arc<Self>) -> tokio::task::JoinHandle<()> {
tokio::spawn(async move { tokio::spawn(async move {
info!( trace!(
"Drive Monitor service started for bucket: {}", "Drive Monitor service started for bucket: {}",
self.bucket_name self.bucket_name
); );
@ -387,7 +387,7 @@ impl DriveMonitor {
Ok(_) => { Ok(_) => {
let prev_failures = self.consecutive_failures.swap(0, Ordering::Relaxed); let prev_failures = self.consecutive_failures.swap(0, Ordering::Relaxed);
if prev_failures > 0 { if prev_failures > 0 {
info!("S3/MinIO recovered for bucket {} after {} failures", trace!("S3/MinIO recovered for bucket {} after {} failures",
self.bucket_name, prev_failures); self.bucket_name, prev_failures);
} }
} }
@ -634,7 +634,7 @@ impl DriveMonitor {
let normalized_new_value = Self::normalize_config_value(new_value); let normalized_new_value = Self::normalize_config_value(new_value);
if normalized_old_value != normalized_new_value { if normalized_old_value != normalized_new_value {
info!( trace!(
"Detected change in {} (old: {}, new: {})", "Detected change in {} (old: {}, new: {})",
key, normalized_old_value, normalized_new_value key, normalized_old_value, normalized_new_value
); );
@ -656,7 +656,7 @@ impl DriveMonitor {
} }
if llm_url_changed { if llm_url_changed {
info!("Broadcasting LLM configuration refresh"); trace!("Broadcasting LLM configuration refresh");
let effective_url = if !new_llm_url.is_empty() { let effective_url = if !new_llm_url.is_empty() {
new_llm_url new_llm_url
} else { } else {
@ -672,7 +672,7 @@ impl DriveMonitor {
.unwrap_or_default() .unwrap_or_default()
}; };
info!( trace!(
"LLM configuration changed to: URL={}, Model={}", "LLM configuration changed to: URL={}, Model={}",
effective_url, effective_model effective_url, effective_model
); );
@ -697,7 +697,7 @@ impl DriveMonitor {
Some(effective_endpoint_path), Some(effective_endpoint_path),
) )
.await; .await;
info!("Dynamic LLM provider updated with new configuration"); trace!("Dynamic LLM provider updated with new configuration");
} else { } else {
warn!("Dynamic LLM provider not available - config change ignored"); warn!("Dynamic LLM provider not available - config change ignored");
} }
@ -788,7 +788,7 @@ impl DriveMonitor {
client: &Client, client: &Client,
file_path: &str, file_path: &str,
) -> Result<(), Box<dyn Error + Send + Sync>> { ) -> Result<(), Box<dyn Error + Send + Sync>> {
info!( trace!(
"Fetching object from Drive: bucket={}, key={}", "Fetching object from Drive: bucket={}, key={}",
&self.bucket_name, file_path &self.bucket_name, file_path
); );
@ -800,7 +800,7 @@ impl DriveMonitor {
.await .await
{ {
Ok(res) => { Ok(res) => {
info!( trace!(
"Successfully fetched object from Drive: bucket={}, key={}, size={}", "Successfully fetched object from Drive: bucket={}, key={}, size={}",
&self.bucket_name, &self.bucket_name,
file_path, file_path,
@ -838,22 +838,26 @@ impl DriveMonitor {
let tool_name_clone = tool_name.clone(); let tool_name_clone = tool_name.clone();
let source_content_clone = source_content.clone(); let source_content_clone = source_content.clone();
let bot_id = self.bot_id; let bot_id = self.bot_id;
tokio::task::spawn_blocking(move || { let elapsed_ms = tokio::task::spawn_blocking(move || {
std::fs::create_dir_all(&work_dir_clone)?; std::fs::create_dir_all(&work_dir_clone)?;
let local_source_path = format!("{}/{}.bas", work_dir_clone, tool_name_clone); let local_source_path = format!("{}/{}.bas", work_dir_clone, tool_name_clone);
std::fs::write(&local_source_path, &source_content_clone)?; std::fs::write(&local_source_path, &source_content_clone)?;
let mut compiler = BasicCompiler::new(state_clone, bot_id); let mut compiler = BasicCompiler::new(state_clone, bot_id);
let start_time = std::time::Instant::now();
let result = compiler.compile_file(&local_source_path, &work_dir_str)?; let result = compiler.compile_file(&local_source_path, &work_dir_str)?;
let elapsed = start_time.elapsed().as_millis();
if let Some(mcp_tool) = result.mcp_tool { if let Some(mcp_tool) = result.mcp_tool {
info!( trace!(
"MCP tool definition generated with {} parameters", "MCP tool definition generated with {} parameters",
mcp_tool.input_schema.properties.len() mcp_tool.input_schema.properties.len()
); );
} }
Ok::<(), Box<dyn Error + Send + Sync>>(()) Ok::<u128, Box<dyn Error + Send + Sync>>(elapsed)
}) })
.await??; .await??;
info!("Successfully compiled {} in {} ms", tool_name, elapsed_ms);
// Check for USE WEBSITE commands and trigger immediate crawling // Check for USE WEBSITE commands and trigger immediate crawling
if source_content.contains("USE WEBSITE") { if source_content.contains("USE WEBSITE") {
self.trigger_immediate_website_crawl(&source_content).await?; self.trigger_immediate_website_crawl(&source_content).await?;
@ -892,7 +896,7 @@ impl DriveMonitor {
let refresh_str = cap.get(2).map(|m| m.as_str()).unwrap_or("1m"); let refresh_str = cap.get(2).map(|m| m.as_str()).unwrap_or("1m");
info!("Found USE WEBSITE command for {}, checking if crawl needed", url_str); trace!("Found USE WEBSITE command for {}, checking if crawl needed", url_str);
// Check if crawl is already in progress or recently completed // Check if crawl is already in progress or recently completed
let mut conn = self.state.conn.get() let mut conn = self.state.conn.get()
@ -1098,13 +1102,13 @@ impl DriveMonitor {
if is_new || is_modified { if is_new || is_modified {
if path.to_lowercase().ends_with(".pdf") { if path.to_lowercase().ends_with(".pdf") {
pdf_files_found += 1; pdf_files_found += 1;
info!( trace!(
"Detected {} PDF in .gbkb: {} (will extract text for vectordb)", "Detected {} PDF in .gbkb: {} (will extract text for vectordb)",
if is_new { "new" } else { "changed" }, if is_new { "new" } else { "changed" },
path path
); );
} else { } else {
info!( trace!(
"Detected {} in .gbkb: {}", "Detected {} in .gbkb: {}",
if is_new { "new file" } else { "change" }, if is_new { "new file" } else { "change" },
path path
@ -1148,7 +1152,7 @@ impl DriveMonitor {
#[cfg(any(feature = "research", feature = "llm"))] #[cfg(any(feature = "research", feature = "llm"))]
{ {
if !is_embedding_server_ready() { if !is_embedding_server_ready() {
info!("Embedding server not ready, deferring KB indexing for {}", kb_folder_path.display()); trace!("Embedding server not ready, deferring KB indexing for {}", kb_folder_path.display());
continue; continue;
} }
@ -1178,7 +1182,7 @@ impl DriveMonitor {
let kb_key_owned = kb_key.clone(); let kb_key_owned = kb_key.clone();
tokio::spawn(async move { tokio::spawn(async move {
info!( trace!(
"Triggering KB indexing for folder: {} (PDF text extraction enabled)", "Triggering KB indexing for folder: {} (PDF text extraction enabled)",
kb_folder_owned.display() kb_folder_owned.display()
); );
@ -1246,7 +1250,7 @@ impl DriveMonitor {
} }
if files_processed > 0 { if files_processed > 0 {
info!( trace!(
"Processed {} .gbkb files (including {} PDFs for text extraction)", "Processed {} .gbkb files (including {} PDFs for text extraction)",
files_processed, pdf_files_found files_processed, pdf_files_found
); );
@ -1264,7 +1268,7 @@ impl DriveMonitor {
}); });
for path in paths_to_remove { for path in paths_to_remove {
info!("Detected deletion in .gbkb: {}", path); trace!("Detected deletion in .gbkb: {}", path);
file_states.remove(&path); file_states.remove(&path);
let path_parts: Vec<&str> = path.split('/').collect(); let path_parts: Vec<&str> = path.split('/').collect();
@ -1321,7 +1325,7 @@ impl DriveMonitor {
let bytes = response.body.collect().await?.into_bytes(); let bytes = response.body.collect().await?.into_bytes();
tokio::fs::write(&local_path, bytes).await?; tokio::fs::write(&local_path, bytes).await?;
info!( trace!(
"Downloaded .gbkb file {} to {}", "Downloaded .gbkb file {} to {}",
file_path, file_path,
local_path.display() local_path.display()

View file

@ -1,7 +1,7 @@
use crate::basic::compiler::BasicCompiler; use crate::basic::compiler::BasicCompiler;
use crate::core::shared::state::AppState; use crate::core::shared::state::AppState;
use diesel::prelude::*; use diesel::prelude::*;
use log::{debug, error, info, warn}; use log::{debug, error, info, trace, warn};
use std::collections::HashMap; use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -38,7 +38,7 @@ impl LocalFileMonitor {
// Use /opt/gbo/data as the base directory for source files // Use /opt/gbo/data as the base directory for source files
let data_dir = PathBuf::from("/opt/gbo/data"); let data_dir = PathBuf::from("/opt/gbo/data");
info!("Initializing with data_dir: {:?}, work_root: {:?}", data_dir, work_root); trace!("Initializing with data_dir: {:?}, work_root: {:?}", data_dir, work_root);
Self { Self {
state, state,
@ -50,7 +50,7 @@ impl LocalFileMonitor {
} }
pub async fn start_monitoring(&self) -> Result<(), Box<dyn Error + Send + Sync>> { pub async fn start_monitoring(&self) -> Result<(), Box<dyn Error + Send + Sync>> {
info!("Starting local file monitor for /opt/gbo/data/*.gbai directories"); trace!("Starting local file monitor for /opt/gbo/data/*.gbai directories");
// Create data directory if it doesn't exist // Create data directory if it doesn't exist
if let Err(e) = tokio::fs::create_dir_all(&self.data_dir).await { if let Err(e) = tokio::fs::create_dir_all(&self.data_dir).await {
@ -68,12 +68,12 @@ impl LocalFileMonitor {
monitor.monitoring_loop().await; monitor.monitoring_loop().await;
}); });
info!("Local file monitor started"); trace!("Local file monitor started");
Ok(()) Ok(())
} }
async fn monitoring_loop(&self) { async fn monitoring_loop(&self) {
info!("Starting monitoring loop"); trace!("Starting monitoring loop");
// Try to create a file system watcher // Try to create a file system watcher
let (tx, mut rx) = tokio::sync::mpsc::channel(100); let (tx, mut rx) = tokio::sync::mpsc::channel(100);
@ -105,7 +105,7 @@ impl LocalFileMonitor {
return; return;
} }
info!("Watching directory: {:?}", self.data_dir); trace!("Watching directory: {:?}", self.data_dir);
while self.is_processing.load(Ordering::SeqCst) { while self.is_processing.load(Ordering::SeqCst) {
tokio::time::sleep(Duration::from_secs(5)).await; tokio::time::sleep(Duration::from_secs(5)).await;
@ -116,7 +116,7 @@ impl LocalFileMonitor {
EventKind::Create(_) | EventKind::Modify(_) | EventKind::Any => { EventKind::Create(_) | EventKind::Modify(_) | EventKind::Any => {
for path in &event.paths { for path in &event.paths {
if self.is_gbdialog_file(path) { if self.is_gbdialog_file(path) {
info!("Detected change: {:?}", path); trace!("Detected change: {:?}", path);
if let Err(e) = self.compile_local_file(path).await { if let Err(e) = self.compile_local_file(path).await {
error!("Failed to compile {:?}: {}", path, e); error!("Failed to compile {:?}: {}", path, e);
} }
@ -126,7 +126,7 @@ impl LocalFileMonitor {
EventKind::Remove(_) => { EventKind::Remove(_) => {
for path in &event.paths { for path in &event.paths {
if self.is_gbdialog_file(path) { if self.is_gbdialog_file(path) {
info!("File removed: {:?}", path); trace!("File removed: {:?}", path);
self.remove_file_state(path).await; self.remove_file_state(path).await;
} }
} }
@ -141,11 +141,11 @@ impl LocalFileMonitor {
} }
} }
info!("Monitoring loop ended"); trace!("Monitoring loop ended");
} }
async fn polling_loop(&self) { async fn polling_loop(&self) {
info!("Using polling fallback (checking every 10s)"); trace!("Using polling fallback (checking every 10s)");
while self.is_processing.load(Ordering::SeqCst) { while self.is_processing.load(Ordering::SeqCst) {
tokio::time::sleep(Duration::from_secs(10)).await; tokio::time::sleep(Duration::from_secs(10)).await;
@ -231,7 +231,7 @@ impl LocalFileMonitor {
}; };
if should_compile { if should_compile {
info!("Compiling: {:?}", path); trace!("Compiling: {:?}", path);
if let Err(e) = self.compile_local_file(&path).await { if let Err(e) = self.compile_local_file(&path).await {
error!("Failed to compile {:?}: {}", path, e); error!("Failed to compile {:?}: {}", path, e);
} }
@ -285,7 +285,7 @@ impl LocalFileMonitor {
.map_err(|e| format!("Failed to get bot_id for '{}': {}", bot_name_clone, e))? .map_err(|e| format!("Failed to get bot_id for '{}': {}", bot_name_clone, e))?
}; };
tokio::task::spawn_blocking(move || { let elapsed_ms = tokio::task::spawn_blocking(move || {
std::fs::create_dir_all(&work_dir_clone)?; std::fs::create_dir_all(&work_dir_clone)?;
let local_source_path = work_dir_clone.join(format!("{}.bas", tool_name_clone)); let local_source_path = work_dir_clone.join(format!("{}.bas", tool_name_clone));
std::fs::write(&local_source_path, &source_content_clone)?; std::fs::write(&local_source_path, &source_content_clone)?;
@ -294,19 +294,21 @@ impl LocalFileMonitor {
.ok_or_else(|| "Invalid UTF-8 in local source path".to_string())?; .ok_or_else(|| "Invalid UTF-8 in local source path".to_string())?;
let work_dir_str = work_dir_clone.to_str() let work_dir_str = work_dir_clone.to_str()
.ok_or_else(|| "Invalid UTF-8 in work directory path".to_string())?; .ok_or_else(|| "Invalid UTF-8 in work directory path".to_string())?;
let start_time = std::time::Instant::now();
let result = compiler.compile_file(local_source_str, work_dir_str)?; let result = compiler.compile_file(local_source_str, work_dir_str)?;
let elapsed_ms = start_time.elapsed().as_millis();
if let Some(mcp_tool) = result.mcp_tool { if let Some(mcp_tool) = result.mcp_tool {
info!( trace!(
"[LOCAL_MONITOR] MCP tool generated with {} parameters for bot {}", "[LOCAL_MONITOR] MCP tool generated with {} parameters for bot {}",
mcp_tool.input_schema.properties.len(), mcp_tool.input_schema.properties.len(),
bot_name_clone bot_name_clone
); );
} }
Ok::<(), Box<dyn Error + Send + Sync>>(()) Ok::<u128, Box<dyn Error + Send + Sync>>(elapsed_ms)
}) })
.await??; .await??;
info!("Successfully compiled: {:?}", file_path); info!("Successfully compiled: {:?} in {} ms", file_path, elapsed_ms);
Ok(()) Ok(())
} }
@ -317,7 +319,7 @@ impl LocalFileMonitor {
} }
pub async fn stop_monitoring(&self) { pub async fn stop_monitoring(&self) {
info!("Stopping local file monitor"); trace!("Stopping local file monitor");
self.is_processing.store(false, Ordering::SeqCst); self.is_processing.store(false, Ordering::SeqCst);
self.file_states.write().await.clear(); self.file_states.write().await.clear();
} }

View file

@ -1,5 +1,4 @@
use crate::core::config::ConfigManager; use crate::core::config::ConfigManager;
use crate::core::kb::embedding_generator::set_embedding_server_ready;
use crate::core::shared::memory_monitor::{log_jemalloc_stats, MemoryStats}; use crate::core::shared::memory_monitor::{log_jemalloc_stats, MemoryStats};
use crate::security::command_guard::SafeCommand; use crate::security::command_guard::SafeCommand;
use crate::core::shared::models::schema::bots::dsl::*; use crate::core::shared::models::schema::bots::dsl::*;
@ -7,7 +6,6 @@ use crate::core::shared::state::AppState;
use diesel::prelude::*; use diesel::prelude::*;
use log::{error, info, trace, warn}; use log::{error, info, trace, warn};
use reqwest; use reqwest;
use std::fmt::Write;
use std::sync::Arc; use std::sync::Arc;
use tokio; use tokio;
@ -95,9 +93,10 @@ pub async fn ensure_llama_servers_running(
}; };
// For llama-server startup, use path relative to botserver root // For llama-server startup, use path relative to botserver root
// The models are in ./data/llm/ and the llama-server runs from botserver root // The models are in <stack_path>/data/llm/ and the llama-server runs from botserver root
let llm_model_path = format!("./data/llm/{}", llm_model); let stack_path = std::env::var("BOTSERVER_STACK_PATH").unwrap_or_else(|_| "./botserver-stack".to_string());
let embedding_model_path = format!("./data/llm/{}", embedding_model); let llm_model_path = format!("{stack_path}/data/llm/{}", llm_model);
let embedding_model_path = format!("{stack_path}/data/llm/{}", embedding_model);
if !llm_server_enabled { if !llm_server_enabled {
info!("Local LLM server management disabled (llm-server=false). Using external endpoints."); info!("Local LLM server management disabled (llm-server=false). Using external endpoints.");
info!(" LLM URL: {llm_url}"); info!(" LLM URL: {llm_url}");
@ -188,6 +187,37 @@ pub async fn ensure_llama_servers_running(
} else if embedding_model.is_empty() { } else if embedding_model.is_empty() {
info!("EMBEDDING_MODEL not set, skipping Embedding server"); info!("EMBEDDING_MODEL not set, skipping Embedding server");
} }
// Start servers in background - don't block HTTP server startup
if !tasks.is_empty() {
info!("LLM servers starting in background (non-blocking mode)");
tokio::spawn(async move {
for task in tasks {
if let Err(e) = task.await {
error!("LLM server task failed: {}", e);
}
}
info!("LLM server startup tasks completed");
});
}
// Return immediately - don't wait for servers to be ready
info!("LLM server initialization initiated (will start in background)");
info!("HTTP server can start without waiting for LLM servers");
trace!("ensure_llama_servers_running returning early (non-blocking)");
let end_mem = MemoryStats::current();
trace!(
"[LLM_LOCAL] ensure_llama_servers_running END (non-blocking), RSS={} (total delta={})",
MemoryStats::format_bytes(end_mem.rss_bytes),
MemoryStats::format_bytes(end_mem.rss_bytes.saturating_sub(start_mem.rss_bytes))
);
log_jemalloc_stats();
trace!("ensure_llama_servers_running EXIT OK (non-blocking)");
return Ok(());
// OLD BLOCKING CODE - REMOVED TO PREVENT HTTP SERVER BLOCKING
/*
for task in tasks { for task in tasks {
task.await??; task.await??;
} }
@ -202,7 +232,7 @@ pub async fn ensure_llama_servers_running(
let mut llm_ready = llm_running || llm_model.is_empty(); let mut llm_ready = llm_running || llm_model.is_empty();
let mut embedding_ready = embedding_running || embedding_model.is_empty(); let mut embedding_ready = embedding_running || embedding_model.is_empty();
let mut attempts = 0; let mut attempts = 0;
let max_attempts = 120; let max_attempts = 15; // Reduced from 120 to 15 (30 seconds instead of 240)
while attempts < max_attempts && (!llm_ready || !embedding_ready) { while attempts < max_attempts && (!llm_ready || !embedding_ready) {
trace!("Wait loop iteration {}", attempts); trace!("Wait loop iteration {}", attempts);
tokio::time::sleep(tokio::time::Duration::from_secs(2)).await; tokio::time::sleep(tokio::time::Duration::from_secs(2)).await;
@ -301,6 +331,7 @@ pub async fn ensure_llama_servers_running(
} }
Err(error_msg.into()) Err(error_msg.into())
} }
*/ // END OF OLD BLOCKING CODE
} }
pub async fn is_server_running(url: &str) -> bool { pub async fn is_server_running(url: &str) -> bool {
let client = reqwest::Client::builder() let client = reqwest::Client::builder()
@ -377,8 +408,8 @@ pub fn start_llm_server(
let gpu_layers = config_manager let gpu_layers = config_manager
.get_config(&default_bot_id, "llm-server-gpu-layers", None) .get_config(&default_bot_id, "llm-server-gpu-layers", None)
.unwrap_or_else(|_| "20".to_string()); .unwrap_or_else(|_| "0".to_string());
let gpu_layers = if gpu_layers.is_empty() { "20".to_string() } else { gpu_layers }; let gpu_layers = if gpu_layers.is_empty() { "0".to_string() } else { gpu_layers };
let reasoning_format = config_manager let reasoning_format = config_manager
.get_config(&default_bot_id, "llm-server-reasoning-format", None) .get_config(&default_bot_id, "llm-server-reasoning-format", None)
@ -501,7 +532,7 @@ pub async fn start_embedding_server(
.arg("--host").arg("0.0.0.0") .arg("--host").arg("0.0.0.0")
.arg("--port").arg(port) .arg("--port").arg(port)
.arg("--embedding") .arg("--embedding")
.arg("--n-gpu-layers").arg("99") .arg("--n-gpu-layers").arg("0")
.arg("--verbose"); .arg("--verbose");
if !cfg!(windows) { if !cfg!(windows) {

View file

@ -607,8 +607,11 @@ pub async fn create_app_state(
fn init_directory_service() -> Result<(Arc<Mutex<crate::directory::AuthService>>, crate::directory::ZitadelConfig), std::io::Error> { fn init_directory_service() -> Result<(Arc<Mutex<crate::directory::AuthService>>, crate::directory::ZitadelConfig), std::io::Error> {
let zitadel_config = { let zitadel_config = {
// Try to load from directory_config.json first // Try to load from directory_config.json first
let config_path = "./config/directory_config.json"; // Use same path as DirectorySetup saves to (BOTSERVER_STACK_PATH/conf/system/directory_config.json)
if let Ok(content) = std::fs::read_to_string(config_path) { let stack_path = std::env::var("BOTSERVER_STACK_PATH")
.unwrap_or_else(|_| "./botserver-stack".to_string());
let config_path = format!("{}/conf/system/directory_config.json", stack_path);
if let Ok(content) = std::fs::read_to_string(&config_path) {
if let Ok(json) = serde_json::from_str::<serde_json::Value>(&content) { if let Ok(json) = serde_json::from_str::<serde_json::Value>(&content) {
let base_url = json let base_url = json
.get("base_url") .get("base_url")

View file

@ -101,14 +101,14 @@ impl CorsConfig {
Self { Self {
allowed_origins: vec![ allowed_origins: vec![
"http://localhost:3000".to_string(), "http://localhost:3000".to_string(),
"http://localhost:8080".to_string(),
"http://localhost:9000".to_string(), "http://localhost:9000".to_string(),
"http://localhost:8300".to_string(),
"http://127.0.0.1:3000".to_string(), "http://127.0.0.1:3000".to_string(),
"http://127.0.0.1:8080".to_string(),
"http://127.0.0.1:9000".to_string(), "http://127.0.0.1:9000".to_string(),
"http://127.0.0.1:8300".to_string(),
"https://localhost:3000".to_string(), "https://localhost:3000".to_string(),
"https://localhost:8080".to_string(),
"https://localhost:9000".to_string(), "https://localhost:9000".to_string(),
"https://localhost:8300".to_string(),
], ],
allowed_methods: vec![ allowed_methods: vec![
Method::GET, Method::GET,

View file

@ -35,9 +35,9 @@ impl TlsIntegration {
services.insert( services.insert(
"api".to_string(), "api".to_string(),
ServiceUrls { ServiceUrls {
original: "http://localhost:9000".to_string(), original: "http://localhost:8080".to_string(),
secure: "https://localhost:8443".to_string(), secure: "https://localhost:8443".to_string(),
port: 9000, port: 8080,
tls_port: 8443, tls_port: 8443,
}, },
); );
@ -95,10 +95,10 @@ impl TlsIntegration {
services.insert( services.insert(
"minio".to_string(), "minio".to_string(),
ServiceUrls { ServiceUrls {
original: "https://localhost:9000".to_string(), original: "https://localhost:9100".to_string(),
secure: "https://localhost:9000".to_string(), secure: "https://localhost:9100".to_string(),
port: 9000, port: 9100,
tls_port: 9000, tls_port: 9100,
}, },
); );

View file

@ -28,8 +28,8 @@ pub struct ZitadelAuthConfig {
impl Default for ZitadelAuthConfig { impl Default for ZitadelAuthConfig {
fn default() -> Self { fn default() -> Self {
Self { Self {
issuer_url: "http://localhost:8300".to_string(), issuer_url: "http://localhost:9000".to_string(),
api_url: "http://localhost:8300".to_string(), api_url: "http://localhost:9000".to_string(),
client_id: String::new(), client_id: String::new(),
client_secret: String::new(), client_secret: String::new(),
project_id: String::new(), project_id: String::new(),