feat(bootstrap): enable bootstrap and package_manager modules

Uncommented bootstrap and package_manager directories in add-req.sh to include them in build process. Refactored bootstrap module for cleaner initialization and improved component handling logic.
This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-11-08 07:04:44 -03:00
parent 332dbe7420
commit 25daaa8a9e
14 changed files with 1627 additions and 1353 deletions

View file

@ -24,7 +24,7 @@ dirs=(
#"auth" #"auth"
#"automation" #"automation"
#"basic" #"basic"
#"bootstrap" "bootstrap"
"bot" "bot"
#"channels" #"channels"
#"config" #"config"
@ -36,7 +36,7 @@ dirs=(
"llm" "llm"
#"llm_models" #"llm_models"
#"org" #"org"
#"package_manager" "package_manager"
#"riot_compiler" #"riot_compiler"
#"session" #"session"
"shared" "shared"

View file

@ -14,567 +14,417 @@ use std::path::Path;
use std::process::Command; use std::process::Command;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
pub struct ComponentInfo { pub struct ComponentInfo {
pub name: &'static str, pub name: &'static str,
} }
pub struct BootstrapManager { pub struct BootstrapManager {
pub install_mode: InstallMode, pub install_mode: InstallMode,
pub tenant: Option<String>, pub tenant: Option<String>,
pub s3_client: Client, pub s3_client: Client,
} }
impl BootstrapManager { impl BootstrapManager {
fn is_postgres_running() -> bool { fn is_postgres_running() -> bool {
match Command::new("pg_isready").arg("-q").status() { match Command::new("pg_isready").arg("-q").status() {
Ok(status) => status.success(), Ok(status) => status.success(),
Err(_) => { Err(_) => {
// fallback check using pgrep Command::new("pgrep").arg("postgres").output().map(|o| !o.stdout.is_empty()).unwrap_or(false)
Command::new("pgrep").arg("postgres").output().map(|o| !o.stdout.is_empty()).unwrap_or(false) }
} }
} }
}
pub async fn new(install_mode: InstallMode, tenant: Option<String>) -> Self {
pub async fn new(install_mode: InstallMode, tenant: Option<String>) -> Self { trace!("Initializing BootstrapManager with mode {:?} and tenant {:?}", install_mode, tenant);
info!( if !Self::is_postgres_running() {
"Initializing BootstrapManager with mode {:?} and tenant {:?}", let pm = PackageManager::new(install_mode.clone(), tenant.clone())
install_mode, tenant .expect("Failed to initialize PackageManager");
); if let Err(e) = pm.start("tables") {
error!("Failed to start Tables server component automatically: {}", e);
if !Self::is_postgres_running() { panic!("Database not available and auto-start failed.");
let pm = PackageManager::new(install_mode.clone(), tenant.clone()) } else {
.expect("Failed to initialize PackageManager"); info!("Started Tables server component automatically");
if let Err(e) = pm.start("tables") { }
error!("Failed to start Tables server component automatically: {}", e); }
panic!("Database not available and auto-start failed."); let config = AppConfig::from_env().expect("Failed to load config from env");
} else { let s3_client = Self::create_s3_operator(&config).await;
info!("Tables component started successfully."); Self {
} install_mode,
} tenant,
s3_client,
let config = AppConfig::from_env().expect("Failed to load config from env"); }
let s3_client = futures::executor::block_on(Self::create_s3_operator(&config)); }
Self {
install_mode, pub fn start_all(&mut self) -> Result<()> {
tenant, let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?;
s3_client, let components = vec![
} ComponentInfo { name: "tables" },
} ComponentInfo { name: "cache" },
ComponentInfo { name: "drive" },
pub fn start_all(&mut self) -> Result<()> { ComponentInfo { name: "llm" },
let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?; ComponentInfo { name: "email" },
let components = vec![ ComponentInfo { name: "proxy" },
ComponentInfo { ComponentInfo { name: "directory" },
name: "tables", ComponentInfo { name: "alm" },
ComponentInfo { name: "alm_ci" },
}, ComponentInfo { name: "dns" },
ComponentInfo { ComponentInfo { name: "webmail" },
name: "cache", ComponentInfo { name: "meeting" },
ComponentInfo { name: "table_editor" },
}, ComponentInfo { name: "doc_editor" },
ComponentInfo { ComponentInfo { name: "desktop" },
name: "drive", ComponentInfo { name: "devtools" },
ComponentInfo { name: "bot" },
}, ComponentInfo { name: "system" },
ComponentInfo { ComponentInfo { name: "vector_db" },
name: "llm", ComponentInfo { name: "host" },
];
}, for component in components {
ComponentInfo { if pm.is_installed(component.name) {
name: "email", pm.start(component.name)?;
}
}, }
ComponentInfo { Ok(())
name: "proxy", }
}, pub async fn bootstrap(&mut self) -> Result<AppConfig> {
ComponentInfo { if let Ok(tables_server) = std::env::var("TABLES_SERVER") {
name: "directory", if !tables_server.is_empty() {
info!("Legacy mode detected (TABLES_SERVER present), skipping bootstrap installation");
}, let _database_url = std::env::var("DATABASE_URL").unwrap_or_else(|_| {
ComponentInfo { let username = std::env::var("TABLES_USERNAME").unwrap_or_else(|_| "gbuser".to_string());
name: "alm", let password = std::env::var("TABLES_PASSWORD").unwrap_or_else(|_| "postgres".to_string());
let server = std::env::var("TABLES_SERVER").unwrap_or_else(|_| "localhost".to_string());
}, let port = std::env::var("TABLES_PORT").unwrap_or_else(|_| "5432".to_string());
ComponentInfo { let database = std::env::var("TABLES_DATABASE").unwrap_or_else(|_| "gbserver".to_string());
name: "alm_ci", format!("postgres://{}:{}@{}:{}/{}", username, password, server, port, database)
});
}, if let Ok(config) = self.load_config_from_csv().await {
ComponentInfo { return Ok(config);
name: "dns", }
match establish_pg_connection() {
}, Ok(mut conn) => {
ComponentInfo { if let Err(e) = self.apply_migrations(&mut conn) {
name: "webmail", log::warn!("Failed to apply migrations: {}", e);
}
}, return Ok(AppConfig::from_database(&mut conn).expect("Failed to load config from DB"));
ComponentInfo { }
name: "meeting", Err(e) => {
log::warn!("Failed to connect to database: {}", e);
}, return Ok(AppConfig::from_env()?);
ComponentInfo { }
name: "table_editor", }
}
}, }
ComponentInfo { let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?;
name: "doc_editor", let required_components = vec!["tables", "drive", "cache", "llm"];
let mut config = AppConfig::from_env().expect("Failed to load config from env");
}, for component in required_components {
ComponentInfo { if !pm.is_installed(component) {
name: "desktop", let termination_cmd = pm
.components
}, .get(component)
ComponentInfo { .and_then(|cfg| cfg.binary_name.clone())
name: "devtools", .unwrap_or_else(|| component.to_string());
if !termination_cmd.is_empty() {
}, let check = Command::new("pgrep")
ComponentInfo { .arg("-f")
name: "bot", .arg(&termination_cmd)
.output();
}, if let Ok(output) = check {
ComponentInfo { if !output.stdout.is_empty() {
name: "system", println!("Component '{}' appears to be already running from a previous install.", component);
println!("Do you want to terminate it? (y/n)");
}, let mut input = String::new();
ComponentInfo { io::stdout().flush().unwrap();
name: "vector_db", io::stdin().read_line(&mut input).unwrap();
if input.trim().eq_ignore_ascii_case("y") {
}, let _ = Command::new("pkill")
ComponentInfo { .arg("-f")
name: "host", .arg(&termination_cmd)
.status();
}, println!("Terminated existing '{}' process.", component);
]; } else {
info!("Starting all installed components..."); println!("Skipping start of '{}' as it is already running.", component);
for component in components { continue;
if pm.is_installed(component.name) { }
debug!("Starting component: {}", component.name); }
pm.start(component.name)?; }
} }
} if component == "tables" {
let db_password = self.generate_secure_password(16);
Ok(()) let farm_password = self.generate_secure_password(32);
} let env_contents = format!(
"FARM_PASSWORD={}\nDATABASE_URL=postgres://gbuser:{}@localhost:5432/botserver",
pub async fn bootstrap(&mut self) -> Result<AppConfig> { farm_password, db_password
// First check for legacy mode );
if let Ok(tables_server) = std::env::var("TABLES_SERVER") { std::fs::write(".env", &env_contents)
if !tables_server.is_empty() { .map_err(|e| anyhow::anyhow!("Failed to write .env file: {}", e))?;
info!( dotenv().ok();
"Legacy mode detected (TABLES_SERVER present), skipping bootstrap installation" }
); pm.install(component).await?;
let _database_url = std::env::var("DATABASE_URL").unwrap_or_else(|_| { if component == "tables" {
let username = let mut conn = establish_pg_connection()
std::env::var("TABLES_USERNAME").unwrap_or_else(|_| "gbuser".to_string()); .map_err(|e| anyhow::anyhow!("Failed to connect to database: {}", e))?;
let password = let migration_dir = include_dir::include_dir!("./migrations");
std::env::var("TABLES_PASSWORD").unwrap_or_else(|_| "postgres".to_string()); let mut migration_files: Vec<_> = migration_dir
let server = .files()
std::env::var("TABLES_SERVER").unwrap_or_else(|_| "localhost".to_string()); .filter_map(|file| {
let port = std::env::var("TABLES_PORT").unwrap_or_else(|_| "5432".to_string()); let path = file.path();
let database = if path.extension()? == "sql" {
std::env::var("TABLES_DATABASE").unwrap_or_else(|_| "gbserver".to_string()); Some(file)
format!( } else {
"postgres://{}:{}@{}:{}/{}", None
username, password, server, port, database }
) })
}); .collect();
migration_files.sort_by_key(|f| f.path());
// In legacy mode, still try to load config.csv if available for migration_file in migration_files {
if let Ok(config) = self.load_config_from_csv().await { let migration = migration_file
return Ok(config); .contents_utf8()
} .ok_or_else(|| anyhow::anyhow!("Migration file is not valid UTF-8"))?;
if let Err(e) = conn.batch_execute(migration) {
match establish_pg_connection() { log::error!("Failed to execute migration {}: {}", migration_file.path().display(), e);
Ok(mut conn) => { return Err(e.into());
if let Err(e) = self.apply_migrations(&mut conn) { }
log::warn!("Failed to apply migrations: {}", e); trace!("Successfully executed migration: {}", migration_file.path().display());
} }
return Ok(AppConfig::from_database(&mut conn).expect("Failed to load config from DB")); config = AppConfig::from_database(&mut conn).expect("Failed to load config from DB");
} }
Err(e) => { }
log::warn!("Failed to connect to database: {}", e); }
return Ok(AppConfig::from_env()?); self.s3_client = Self::create_s3_operator(&config).await;
} let final_config = if let Ok(csv_config) = self.load_config_from_csv().await {
} csv_config
} } else {
} config
};
let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?; if std::env::var("DRIVE_SERVER").is_err() {
let required_components = vec!["tables", "drive", "cache", "llm"]; write_drive_config_to_env(&final_config.drive)
let mut config = AppConfig::from_env().expect("Failed to load config from env"); .map_err(|e| anyhow::anyhow!("Failed to write drive config to .env: {}", e))?;
}
for component in required_components { Ok(final_config)
if !pm.is_installed(component) { }
let termination_cmd = pm
.components async fn create_s3_operator(config: &AppConfig) -> Client {
.get(component) let endpoint = if !config.drive.server.ends_with('/') {
.and_then(|cfg| cfg.binary_name.clone()) format!("{}/", config.drive.server)
.unwrap_or_else(|| component.to_string()); } else {
config.drive.server.clone()
if !termination_cmd.is_empty() { };
let check = Command::new("pgrep") let base_config = aws_config::defaults(BehaviorVersion::latest())
.arg("-f") .endpoint_url(endpoint)
.arg(&termination_cmd) .region("auto")
.output(); .credentials_provider(
if let Ok(output) = check { aws_sdk_s3::config::Credentials::new(
if !output.stdout.is_empty() { config.drive.access_key.clone(),
println!("Component '{}' appears to be already running from a previous install.", component); config.drive.secret_key.clone(),
println!("Do you want to terminate it? (y/n)"); None,
let mut input = String::new(); None,
io::stdout().flush().unwrap(); "static",
io::stdin().read_line(&mut input).unwrap(); )
if input.trim().eq_ignore_ascii_case("y") { )
let _ = Command::new("pkill") .load()
.arg("-f") .await;
.arg(&termination_cmd) let s3_config = aws_sdk_s3::config::Builder::from(&base_config)
.status(); .force_path_style(true)
println!("Terminated existing '{}' process.", component); .build();
} else { aws_sdk_s3::Client::from_conf(s3_config)
println!( }
"Skipping start of '{}' as it is already running.",
component fn generate_secure_password(&self, length: usize) -> String {
); let mut rng = rand::rng();
continue; std::iter::repeat_with(|| rng.sample(Alphanumeric) as char)
} .take(length)
} .collect()
} }
}
pub async fn upload_templates_to_drive(&self, _config: &AppConfig) -> Result<()> {
if component == "tables" { let mut conn = establish_pg_connection()?;
let db_password = self.generate_secure_password(16); self.create_bots_from_templates(&mut conn)?;
let farm_password = self.generate_secure_password(32); let templates_dir = Path::new("templates");
let env_contents = format!( if !templates_dir.exists() {
"FARM_PASSWORD={}\nDATABASE_URL=postgres://gbuser:{}@localhost:5432/botserver", return Ok(());
farm_password, db_password }
); let client = &self.s3_client;
std::fs::write(".env", &env_contents) let mut read_dir = tokio::fs::read_dir(templates_dir).await?;
.map_err(|e| anyhow::anyhow!("Failed to write .env file: {}", e))?; while let Some(entry) = read_dir.next_entry().await? {
dotenv().ok(); let path = entry.path();
} if path.is_dir()
&& path
futures::executor::block_on(pm.install(component))?; .file_name()
.unwrap()
if component == "tables" { .to_string_lossy()
let mut conn = establish_pg_connection() .ends_with(".gbai")
.map_err(|e| anyhow::anyhow!("Failed to connect to database: {}", e))?; {
let bot_name = path.file_name().unwrap().to_string_lossy().to_string();
let migration_dir = include_dir::include_dir!("./migrations"); let bucket = bot_name.trim_start_matches('/').to_string();
let mut migration_files: Vec<_> = migration_dir if client.head_bucket().bucket(&bucket).send().await.is_err() {
.files() match client.create_bucket()
.filter_map(|file| { .bucket(&bucket)
let path = file.path(); .send()
if path.extension()? == "sql" { .await {
Some(file) Ok(_) => {
} else { trace!("Created bucket: {}", bucket);
None self.upload_directory_recursive(client, &path, &bucket, "/")
} .await?;
}) }
.collect(); Err(e) => {
error!("Failed to create bucket {}: {:?}", bucket, e);
migration_files.sort_by_key(|f| f.path()); return Err(anyhow::anyhow!(
"Failed to create bucket {}: {}. Check S3 credentials and endpoint configuration",
for migration_file in migration_files { bucket, e
let migration = migration_file ));
.contents_utf8() }
.ok_or_else(|| anyhow::anyhow!("Migration file is not valid UTF-8"))?; }
} else {
if let Err(e) = conn.batch_execute(migration) { debug!("Bucket {} already exists", bucket);
log::error!( }
"Failed to execute migration {}: {}", }
migration_file.path().display(), }
e Ok(())
); }
return Err(e.into());
} fn create_bots_from_templates(&self, conn: &mut diesel::PgConnection) -> Result<()> {
info!( use crate::shared::models::schema::bots;
"Successfully executed migration: {}", use diesel::prelude::*;
migration_file.path().display() let templates_dir = Path::new("templates");
); if !templates_dir.exists() {
} return Ok(());
}
config = AppConfig::from_database(&mut conn).expect("Failed to load config from DB"); for entry in std::fs::read_dir(templates_dir)? {
} let entry = entry?;
} let path = entry.path();
} if path.is_dir() && path.extension().map(|e| e == "gbai").unwrap_or(false) {
let bot_folder = path.file_name().unwrap().to_string_lossy().to_string();
self.s3_client = futures::executor::block_on(Self::create_s3_operator(&config)); let bot_name = bot_folder.trim_end_matches(".gbai");
let existing: Option<String> = bots::table
// Load config from CSV if available .filter(bots::name.eq(&bot_name))
let final_config = if let Ok(csv_config) = self.load_config_from_csv().await { .select(bots::name)
csv_config .first(conn)
} else { .optional()?;
config if existing.is_none() {
}; diesel::sql_query(
"INSERT INTO bots (id, name, description, llm_provider, llm_config, context_provider, context_config, is_active) \
// Write drive config to .env file if not already present (first bootstrap) VALUES (gen_random_uuid(), $1, $2, 'openai', '{\"model\": \"gpt-4\", \"temperature\": 0.7}', 'database', '{}', true)"
if std::env::var("DRIVE_SERVER").is_err() { )
write_drive_config_to_env(&final_config.drive) .bind::<diesel::sql_types::Text, _>(&bot_name)
.map_err(|e| anyhow::anyhow!("Failed to write drive config to .env: {}", e))?; .bind::<diesel::sql_types::Text, _>(format!("Bot for {} template", bot_name))
} .execute(conn)?;
info!("Created bot: {}", bot_name);
Ok(final_config) } else {
} debug!("Bot {} already exists", bot_name);
}
}
}
async fn create_s3_operator(config: &AppConfig) -> Client { Ok(())
let endpoint = if !config.drive.server.ends_with('/') { }
format!("{}/", config.drive.server)
} else { fn upload_directory_recursive<'a>(
config.drive.server.clone() &'a self,
}; client: &'a Client,
local_path: &'a Path,
let base_config = aws_config::defaults(BehaviorVersion::latest()) bucket: &'a str,
.endpoint_url(endpoint) prefix: &'a str,
.region("auto") ) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<()>> + 'a>> {
.credentials_provider( Box::pin(async move {
aws_sdk_s3::config::Credentials::new( let _normalized_path = if !local_path.to_string_lossy().ends_with('/') {
config.drive.access_key.clone(), format!("{}/", local_path.to_string_lossy())
config.drive.secret_key.clone(), } else {
None, local_path.to_string_lossy().to_string()
None, };
"static", let mut read_dir = tokio::fs::read_dir(local_path).await?;
) while let Some(entry) = read_dir.next_entry().await? {
) let path = entry.path();
.load() let file_name = path.file_name().unwrap().to_string_lossy().to_string();
.await; let mut key = prefix.trim_matches('/').to_string();
if !key.is_empty() {
let s3_config = aws_sdk_s3::config::Builder::from(&base_config) key.push('/');
.force_path_style(true) }
.build(); key.push_str(&file_name);
if path.is_file() {
aws_sdk_s3::Client::from_conf(s3_config) trace!("Uploading file {} to bucket {} with key {}", path.display(), bucket, key);
} let content = tokio::fs::read(&path).await?;
client.put_object()
.bucket(bucket)
.key(&key)
.body(content.into())
fn generate_secure_password(&self, length: usize) -> String { .send()
let mut rng = rand::rng(); .await?;
std::iter::repeat_with(|| rng.sample(Alphanumeric) as char) } else if path.is_dir() {
.take(length) self.upload_directory_recursive(client, &path, bucket, &key).await?;
.collect() }
} }
Ok(())
})
pub async fn upload_templates_to_drive(&self, _config: &AppConfig) -> Result<()> { }
let mut conn = establish_pg_connection()?;
self.create_bots_from_templates(&mut conn)?; async fn load_config_from_csv(&self) -> Result<AppConfig> {
let templates_dir = Path::new("templates"); use crate::config::ConfigManager;
if !templates_dir.exists() { use uuid::Uuid;
return Ok(()); let client = &self.s3_client;
} let bucket = "default.gbai";
let client = &self.s3_client; let config_key = "default.gbot/config.csv";
for entry in std::fs::read_dir(templates_dir)? { match client.get_object()
let entry = entry?; .bucket(bucket)
let path = entry.path(); .key(config_key)
if path.is_dir() .send()
&& path .await
.file_name() {
.unwrap() Ok(response) => {
.to_string_lossy() trace!("Found config.csv in default.gbai");
.ends_with(".gbai") let bytes = response.body.collect().await?.into_bytes();
{ let csv_content = String::from_utf8(bytes.to_vec())?;
let bot_name = path.file_name().unwrap().to_string_lossy().to_string(); let config_conn = establish_pg_connection()?;
let bucket = bot_name.trim_start_matches('/').to_string(); let config_manager = ConfigManager::new(Arc::new(Mutex::new(config_conn)));
info!("Checking template {} for Drive bucket {}", bot_name, bucket); let default_bot_id = Uuid::parse_str("00000000-0000-0000-0000-000000000000")?;
let temp_path = std::env::temp_dir().join("config.csv");
// Check if bucket exists tokio::fs::write(&temp_path, csv_content).await?;
if client.head_bucket().bucket(&bucket).send().await.is_err() { config_manager.sync_gbot_config(&default_bot_id, temp_path.to_str().unwrap())
info!("Bucket {} not found, creating it and uploading template", bucket); .map_err(|e| anyhow::anyhow!("Failed to sync gbot config: {}", e))?;
match client.create_bucket() let mut final_conn = establish_pg_connection()?;
.bucket(&bucket) let config = AppConfig::from_database(&mut final_conn)?;
.send() Ok(config)
.await { }
Ok(_) => { Err(e) => {
debug!("Bucket {} created successfully", bucket); debug!("No config.csv found in default.gbai: {:?}", e);
// Only upload template if bucket was just created Err(e.into())
self.upload_directory_recursive(client, &path, &bucket, "/") }
.await?; }
info!("Uploaded template {} to Drive bucket {}", bot_name, bucket); }
}
Err(e) => { fn apply_migrations(&self, conn: &mut diesel::PgConnection) -> Result<()> {
error!("Failed to create bucket {}: {:?}", bucket, e); let migrations_dir = std::path::Path::new("migrations");
return Err(anyhow::anyhow!( if !migrations_dir.exists() {
"Failed to create bucket {}: {}. Check S3 credentials and endpoint configuration", return Ok(());
bucket, e }
)); let mut sql_files: Vec<_> = std::fs::read_dir(migrations_dir)?
} .filter_map(|entry| entry.ok())
} .filter(|entry| {
} else { entry
info!("Bucket {} already exists, skipping template upload", bucket); .path()
} .extension()
} .and_then(|s| s.to_str())
} .map(|s| s == "sql")
Ok(()) .unwrap_or(false)
} })
.collect();
fn create_bots_from_templates(&self, conn: &mut diesel::PgConnection) -> Result<()> { sql_files.sort_by_key(|entry| entry.path());
use crate::shared::models::schema::bots; for entry in sql_files {
use diesel::prelude::*; let path = entry.path();
let filename = path.file_name().unwrap().to_string_lossy();
let templates_dir = Path::new("templates"); match std::fs::read_to_string(&path) {
if !templates_dir.exists() { Ok(sql) => match conn.batch_execute(&sql) {
return Ok(()); Err(e) => {
} log::warn!("Migration {} failed: {}", filename, e);
}
for entry in std::fs::read_dir(templates_dir)? { _ => {}
let entry = entry?; },
let path = entry.path(); Err(e) => {
if path.is_dir() && path.extension().map(|e| e == "gbai").unwrap_or(false) { log::warn!("Failed to read migration {}: {}", filename, e);
let bot_folder = path.file_name().unwrap().to_string_lossy().to_string(); }
let bot_name = bot_folder.trim_end_matches(".gbai"); }
}
let existing: Option<String> = bots::table Ok(())
.filter(bots::name.eq(&bot_name)) }
.select(bots::name)
.first(conn)
.optional()?;
if existing.is_none() {
diesel::sql_query(
"INSERT INTO bots (id, name, description, llm_provider, llm_config, context_provider, context_config, is_active) \
VALUES (gen_random_uuid(), $1, $2, 'openai', '{\"model\": \"gpt-4\", \"temperature\": 0.7}', 'database', '{}', true)"
)
.bind::<diesel::sql_types::Text, _>(&bot_name)
.bind::<diesel::sql_types::Text, _>(format!("Bot for {} template", bot_name))
.execute(conn)?;
} else {
log::trace!("Bot {} already exists", bot_name);
}
}
}
Ok(())
}
fn upload_directory_recursive<'a>(
&'a self,
client: &'a Client,
local_path: &'a Path,
bucket: &'a str,
prefix: &'a str,
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<()>> + 'a>> {
Box::pin(async move {
let normalized_path = if !local_path.to_string_lossy().ends_with('/') {
format!("{}/", local_path.to_string_lossy())
} else {
local_path.to_string_lossy().to_string()
};
trace!("Starting upload from local path: {}", normalized_path);
for entry in std::fs::read_dir(local_path)? {
let entry = entry?;
let path = entry.path();
let file_name = path.file_name().unwrap().to_string_lossy().to_string();
// Construct key path, ensuring no duplicate slashes
let mut key = prefix.trim_matches('/').to_string();
if !key.is_empty() {
key.push('/');
}
key.push_str(&file_name);
if path.is_file() {
info!("Uploading file: {} to bucket {} with key: {}",
path.display(), bucket, key);
let content = std::fs::read(&path)?;
client.put_object()
.bucket(bucket)
.key(&key)
.body(content.into())
.send()
.await?;
} else if path.is_dir() {
self.upload_directory_recursive(client, &path, bucket, &key).await?;
}
}
Ok(())
})
}
async fn load_config_from_csv(&self) -> Result<AppConfig> {
use crate::config::ConfigManager;
use uuid::Uuid;
let client = &self.s3_client;
let bucket = "default.gbai";
let config_key = "default.gbot/config.csv";
match client.get_object()
.bucket(bucket)
.key(config_key)
.send()
.await
{
Ok(response) => {
let bytes = response.body.collect().await?.into_bytes();
let csv_content = String::from_utf8(bytes.to_vec())?;
// Create new connection for config loading
let config_conn = establish_pg_connection()?;
let config_manager = ConfigManager::new(Arc::new(Mutex::new(config_conn)));
// Use default bot ID or create one if needed
let default_bot_id = Uuid::parse_str("00000000-0000-0000-0000-000000000000")?;
// Write CSV to temp file for ConfigManager
let temp_path = std::env::temp_dir().join("config.csv");
std::fs::write(&temp_path, csv_content)?;
// First sync the CSV to database
config_manager.sync_gbot_config(&default_bot_id, temp_path.to_str().unwrap())
.map_err(|e| anyhow::anyhow!("Failed to sync gbot config: {}", e))?;
// Create fresh connection for final config load
let mut final_conn = establish_pg_connection()?;
let config = AppConfig::from_database(&mut final_conn)?;
info!("Successfully loaded config from CSV with LLM settings");
Ok(config)
}
Err(e) => {
debug!("No config.csv found: {}", e);
Err(e.into())
}
}
}
fn apply_migrations(&self, conn: &mut diesel::PgConnection) -> Result<()> {
let migrations_dir = std::path::Path::new("migrations");
if !migrations_dir.exists() {
return Ok(());
}
let mut sql_files: Vec<_> = std::fs::read_dir(migrations_dir)?
.filter_map(|entry| entry.ok())
.filter(|entry| {
entry
.path()
.extension()
.and_then(|s| s.to_str())
.map(|s| s == "sql")
.unwrap_or(false)
})
.collect();
sql_files.sort_by_key(|entry| entry.path());
for entry in sql_files {
let path = entry.path();
let filename = path.file_name().unwrap().to_string_lossy();
match std::fs::read_to_string(&path) {
Ok(sql) => match conn.batch_execute(&sql) {
Err(e) => {
log::warn!("Migration {} failed: {}", filename, e);
}
_ => {}
},
Err(e) => {
log::warn!("Failed to read migration {}: {}", filename, e);
}
}
}
Ok(())
}
} }

View file

@ -235,22 +235,20 @@ impl BotOrchestrator {
channel channel
); );
let event_response = BotResponse { let event_response = BotResponse::from_string_ids(
bot_id: bot_id.to_string(), bot_id,
user_id: user_id.to_string(), session_id,
session_id: session_id.to_string(), user_id,
channel: channel.to_string(), serde_json::to_string(&serde_json::json!({
content: serde_json::to_string(&serde_json::json!({
"event": event_type, "event": event_type,
"data": data "data": data
}))?, }))?,
channel.to_string(),
)?;
let event_response = BotResponse {
message_type: 2, message_type: 2,
stream_token: None,
is_complete: true, is_complete: true,
suggestions: Vec::new(), ..event_response
context_name: None,
context_length: 0,
context_max_length: 0,
}; };
if let Some(adapter) = self.state.channels.lock().unwrap().get(channel) { if let Some(adapter) = self.state.channels.lock().unwrap().get(channel) {
@ -510,7 +508,7 @@ impl BotOrchestrator {
.unwrap_or(0); .unwrap_or(0);
// Show initial progress // Show initial progress
if let Ok(metrics) = get_system_metrics(initial_tokens, max_context_size) { if let Ok(_metrics) = get_system_metrics(initial_tokens, max_context_size) {
} }
let model = config_manager let model = config_manager
.get_config( .get_config(
@ -563,11 +561,11 @@ impl BotOrchestrator {
let current_tokens = let current_tokens =
initial_tokens + crate::shared::utils::estimate_token_count(&full_response); initial_tokens + crate::shared::utils::estimate_token_count(&full_response);
if let Ok(metrics) = get_system_metrics(current_tokens, max_context_size) { if let Ok(metrics) = get_system_metrics(current_tokens, max_context_size) {
let gpu_bar = let _gpu_bar =
"".repeat((metrics.gpu_usage.unwrap_or(0.0) / 5.0).round() as usize); "".repeat((metrics.gpu_usage.unwrap_or(0.0) / 5.0).round() as usize);
let cpu_bar = "".repeat((metrics.cpu_usage / 5.0).round() as usize); let _cpu_bar = "".repeat((metrics.cpu_usage / 5.0).round() as usize);
let token_ratio = current_tokens as f64 / max_context_size.max(1) as f64; let token_ratio = current_tokens as f64 / max_context_size.max(1) as f64;
let token_bar = "".repeat((token_ratio * 20.0).round() as usize); let _token_bar = "".repeat((token_ratio * 20.0).round() as usize);
let mut ui = BotUI::new().unwrap(); let mut ui = BotUI::new().unwrap();
ui.render_progress(current_tokens, max_context_size).unwrap(); ui.render_progress(current_tokens, max_context_size).unwrap();
} }

View file

@ -297,19 +297,30 @@ impl DriveMonitor {
let bot_name = self.bucket_name.strip_suffix(".gbai").unwrap_or(&self.bucket_name); let bot_name = self.bucket_name.strip_suffix(".gbai").unwrap_or(&self.bucket_name);
let work_dir = format!("./work/{}.gbai/{}.gbdialog", bot_name, bot_name); let work_dir = format!("./work/{}.gbai/{}.gbdialog", bot_name, bot_name);
std::fs::create_dir_all(&work_dir)?; // Offload the blocking compilation work to a blocking thread pool
let state_clone = Arc::clone(&self.state);
let work_dir_clone = work_dir.clone();
let tool_name_clone = tool_name.clone();
let source_content_clone = source_content.clone();
let bot_id = self.bot_id;
let local_source_path = format!("{}/{}.bas", work_dir, tool_name); tokio::task::spawn_blocking(move || {
std::fs::write(&local_source_path, &source_content)?; std::fs::create_dir_all(&work_dir_clone)?;
let mut compiler = BasicCompiler::new(Arc::clone(&self.state), self.bot_id); let local_source_path = format!("{}/{}.bas", work_dir_clone, tool_name_clone);
let result = compiler.compile_file(&local_source_path, &work_dir)?; std::fs::write(&local_source_path, &source_content_clone)?;
let mut compiler = BasicCompiler::new(state_clone, bot_id);
let result = compiler.compile_file(&local_source_path, &work_dir_clone)?;
if let Some(mcp_tool) = result.mcp_tool { if let Some(mcp_tool) = result.mcp_tool {
info!("MCP tool definition generated with {} parameters", info!("MCP tool definition generated with {} parameters",
mcp_tool.input_schema.properties.len()); mcp_tool.input_schema.properties.len());
} }
Ok::<(), Box<dyn Error + Send + Sync>>(())
}).await??;
Ok(()) Ok(())
} }
} }

View file

@ -30,22 +30,29 @@ pub async fn embeddings_local(
pub async fn ensure_llama_servers_running( pub async fn ensure_llama_servers_running(
app_state: &Arc<AppState> app_state: &Arc<AppState>
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let conn = app_state.conn.clone(); // Get all config values before starting async operations
let config_manager = ConfigManager::new(conn.clone()); let config_values = {
let conn_arc = app_state.conn.clone();
tokio::task::spawn_blocking(move || {
let mut conn = conn_arc.lock().unwrap();
let config_manager = ConfigManager::new(Arc::clone(&conn_arc));
let default_bot_id = { let default_bot_id = bots.filter(name.eq("default"))
let mut conn = conn.lock().unwrap();
bots.filter(name.eq("default"))
.select(id) .select(id)
.first::<uuid::Uuid>(&mut *conn) .first::<uuid::Uuid>(&mut *conn)
.unwrap_or_else(|_| uuid::Uuid::nil()) .unwrap_or_else(|_| uuid::Uuid::nil());
};
let llm_url = config_manager.get_config(&default_bot_id, "llm-url", None)?; (
let llm_model = config_manager.get_config(&default_bot_id, "llm-model", None)?; default_bot_id,
let embedding_url = config_manager.get_config(&default_bot_id, "embedding-url", None)?; config_manager.get_config(&default_bot_id, "llm-url", None).unwrap_or_default(),
let embedding_model = config_manager.get_config(&default_bot_id, "embedding-model", None)?; config_manager.get_config(&default_bot_id, "llm-model", None).unwrap_or_default(),
let llm_server_path = config_manager.get_config(&default_bot_id, "llm-server-path", None)?; config_manager.get_config(&default_bot_id, "embedding-url", None).unwrap_or_default(),
config_manager.get_config(&default_bot_id, "embedding-model", None).unwrap_or_default(),
config_manager.get_config(&default_bot_id, "llm-server-path", None).unwrap_or_default(),
)
}).await?
};
let (_default_bot_id, llm_url, llm_model, embedding_url, embedding_model, llm_server_path) = config_values;
info!("Starting LLM servers..."); info!("Starting LLM servers...");
info!("Configuration:"); info!("Configuration:");

View file

@ -3,8 +3,7 @@ use actix_cors::Cors;
use actix_web::middleware::Logger; use actix_web::middleware::Logger;
use actix_web::{web, App, HttpServer}; use actix_web::{web, App, HttpServer};
use dotenvy::dotenv; use dotenvy::dotenv;
use log::error; use log::{error, info};
use log::info;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
mod auth; mod auth;
@ -38,7 +37,7 @@ use crate::channels::{VoiceAdapter, WebChannelAdapter};
use crate::config::AppConfig; use crate::config::AppConfig;
#[cfg(feature = "email")] #[cfg(feature = "email")]
use crate::email::{ use crate::email::{
get_emails, get_latest_email_from, list_emails, save_click, save_draft, send_email, get_emails, get_latest_email_from, list_emails, save_click, save_draft, send_email,
}; };
use crate::file::{init_drive, upload_file}; use crate::file::{init_drive, upload_file};
use crate::meet::{voice_start, voice_stop}; use crate::meet::{voice_start, voice_stop};
@ -47,83 +46,117 @@ use crate::session::{create_session, get_session_history, get_sessions, start_se
use crate::shared::state::AppState; use crate::shared::state::AppState;
use crate::web_server::{bot_index, index, static_files}; use crate::web_server::{bot_index, index, static_files};
#[derive(Debug, Clone)]
pub enum BootstrapProgress {
StartingBootstrap,
InstallingComponent(String),
StartingComponent(String),
UploadingTemplates,
ConnectingDatabase,
StartingLLM,
BootstrapComplete,
BootstrapError(String),
}
#[tokio::main] #[tokio::main]
async fn main() -> std::io::Result<()> { async fn main() -> std::io::Result<()> {
use crate::llm::local::ensure_llama_servers_running; use crate::llm::local::ensure_llama_servers_running;
use botserver::config::ConfigManager; use botserver::config::ConfigManager;
let args: Vec<String> = std::env::args().collect(); let args: Vec<String> = std::env::args().collect();
let no_ui = args.contains(&"--noui".to_string()); let no_ui = args.contains(&"--noui".to_string());
if args.len() > 1 { if args.len() > 1 {
let command = &args[1]; let command = &args[1];
match command.as_str() { match command.as_str() {
"install" | "remove" | "list" | "status" | "start" | "stop" | "restart" | "--help" "install" | "remove" | "list" | "status" | "start" | "stop" | "restart" | "--help"
| "-h" => match package_manager::cli::run().await { | "-h" => match package_manager::cli::run().await {
Ok(_) => return Ok(()), Ok(_) => return Ok(()),
Err(e) => { Err(e) => {
eprintln!("CLI error: {}", e); eprintln!("CLI error: {}", e);
return Err(std::io::Error::new( return Err(std::io::Error::new(
std::io::ErrorKind::Other, std::io::ErrorKind::Other,
format!("CLI command failed: {}", e), format!("CLI command failed: {}", e),
)); ));
} }
}, },
"--noui" => {} "--noui" => {}
_ => { _ => {
eprintln!("Unknown command: {}", command); eprintln!("Unknown command: {}", command);
eprintln!("Run 'botserver --help' for usage information"); eprintln!("Run 'botserver --help' for usage information");
return Err(std::io::Error::new( return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput, std::io::ErrorKind::InvalidInput,
format!("Unknown command: {}", command), format!("Unknown command: {}", command),
)); ));
} }
} }
} }
dotenv().ok(); dotenv().ok();
let ui_handle = if !no_ui { let (progress_tx, progress_rx) = tokio::sync::mpsc::unbounded_channel::<BootstrapProgress>();
let (ui_tx, mut ui_rx) = tokio::sync::mpsc::channel::<Arc<AppState>>(1); let (state_tx, state_rx) = tokio::sync::mpsc::channel::<Arc<AppState>>(1);
let handle = std::thread::Builder::new() let ui_handle = if !no_ui {
.name("ui-thread".to_string()) let progress_rx = Arc::new(tokio::sync::Mutex::new(progress_rx));
.spawn(move || { let state_rx = Arc::new(tokio::sync::Mutex::new(state_rx));
let mut ui = crate::ui_tree::XtreeUI::new(); let handle = std::thread::Builder::new()
let rt = tokio::runtime::Builder::new_current_thread() .name("ui-thread".to_string())
.enable_all() .spawn(move || {
.build() let mut ui = crate::ui_tree::XtreeUI::new();
.expect("Failed to create UI runtime"); ui.set_progress_channel(progress_rx.clone());
rt.block_on(async { let rt = tokio::runtime::Builder::new_current_thread()
if let Some(app_state) = ui_rx.recv().await { .enable_all()
ui.set_app_state(app_state); .build()
} .expect("Failed to create UI runtime");
}); rt.block_on(async {
if let Err(e) = ui.start_ui() { tokio::select! {
eprintln!("UI error: {}", e); result = async {
} let mut rx = state_rx.lock().await;
}) rx.recv().await
.expect("Failed to spawn UI thread"); } => {
Some((handle, ui_tx)) if let Some(app_state) = result {
} else { ui.set_app_state(app_state);
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")) }
.write_style(env_logger::WriteStyle::Always) }
.init(); _ = tokio::time::sleep(tokio::time::Duration::from_secs(300)) => {
None eprintln!("UI initialization timeout");
}; }
let install_mode = if args.contains(&"--container".to_string()) { }
InstallMode::Container });
} else { if let Err(e) = ui.start_ui() {
InstallMode::Local eprintln!("UI error: {}", e);
}; }
let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") { })
args.get(idx + 1).cloned() .expect("Failed to spawn UI thread");
} else { Some(handle)
None } else {
}; env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info"))
.write_style(env_logger::WriteStyle::Always)
.init();
None
};
let install_mode = if args.contains(&"--container".to_string()) {
InstallMode::Container
} else {
InstallMode::Local
};
let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") {
args.get(idx + 1).cloned()
} else {
None
};
let progress_tx_clone = progress_tx.clone();
let cfg = {
progress_tx_clone.send(BootstrapProgress::StartingBootstrap).ok();
let mut bootstrap = BootstrapManager::new(install_mode.clone(), tenant.clone()).await; let mut bootstrap = BootstrapManager::new(install_mode.clone(), tenant.clone()).await;
let env_path = std::env::current_dir()? let env_path = match std::env::current_dir() {
.join("botserver-stack") Ok(dir) => dir.join("botserver-stack").join(".env"),
.join(".env"); Err(_) => {
progress_tx_clone.send(BootstrapProgress::BootstrapError("Failed to get current directory".to_string())).ok();
return Err(std::io::Error::new(std::io::ErrorKind::Other, "Failed to get current directory"));
}
};
let cfg = if env_path.exists() { let cfg = if env_path.exists() {
match diesel::Connection::establish(&std::env::var("DATABASE_URL").unwrap()) { progress_tx_clone.send(BootstrapProgress::ConnectingDatabase).ok();
match diesel::Connection::establish(&std::env::var("DATABASE_URL").unwrap_or_else(|_| "postgres://gbuser:@localhost:5432/botserver".to_string())) {
Ok(mut conn) => { Ok(mut conn) => {
AppConfig::from_database(&mut conn).expect("Failed to load config from DB") AppConfig::from_database(&mut conn).unwrap_or_else(|_| AppConfig::from_env().expect("Failed to load config"))
} }
Err(_) => AppConfig::from_env().expect("Failed to load config from env"), Err(_) => AppConfig::from_env().expect("Failed to load config from env"),
} }
@ -131,174 +164,172 @@ async fn main() -> std::io::Result<()> {
match bootstrap.bootstrap().await { match bootstrap.bootstrap().await {
Ok(config) => config, Ok(config) => config,
Err(e) => { Err(e) => {
log::error!("Bootstrap failed: {}", e); progress_tx_clone.send(BootstrapProgress::BootstrapError(format!("Bootstrap failed: {}", e))).ok();
match diesel::Connection::establish( match diesel::Connection::establish(&std::env::var("DATABASE_URL").unwrap_or_else(|_| "postgres://gbuser:@localhost:5432/botserver".to_string())) {
&std::env::var("DATABASE_URL").unwrap()
) {
Ok(mut conn) => { Ok(mut conn) => {
AppConfig::from_database(&mut conn).expect("Failed to load config from DB") AppConfig::from_database(&mut conn).unwrap_or_else(|_| AppConfig::from_env().expect("Failed to load config"))
} }
Err(_) => AppConfig::from_env().expect("Failed to load config from env"), Err(_) => AppConfig::from_env().expect("Failed to load config from env"),
} }
} }
} }
}; };
progress_tx_clone.send(BootstrapProgress::StartingComponent("all services".to_string())).ok();
if let Err(e) = bootstrap.start_all() { if let Err(e) = bootstrap.start_all() {
log::warn!("Failed to start all services: {}", e); progress_tx_clone.send(BootstrapProgress::BootstrapError(format!("Failed to start services: {}", e))).ok();
} }
if let Err(e) = futures::executor::block_on(bootstrap.upload_templates_to_drive(&cfg)) { progress_tx_clone.send(BootstrapProgress::UploadingTemplates).ok();
log::warn!("Failed to upload templates to MinIO: {}", e); if let Err(e) = bootstrap.upload_templates_to_drive(&cfg).await {
progress_tx_clone.send(BootstrapProgress::BootstrapError(format!("Failed to upload templates: {}", e))).ok();
} }
dotenv().ok(); Ok::<AppConfig, std::io::Error>(cfg)
let refreshed_cfg = AppConfig::from_env().expect("Failed to load config from env"); };
let config = std::sync::Arc::new(refreshed_cfg.clone()); let cfg = cfg?;
let db_pool = match diesel::Connection::establish(&refreshed_cfg.database_url()) { dotenv().ok();
Ok(conn) => Arc::new(Mutex::new(conn)), let refreshed_cfg = AppConfig::from_env().expect("Failed to load config from env");
Err(e) => { let config = std::sync::Arc::new(refreshed_cfg.clone());
log::error!("Failed to connect to main database: {}", e); progress_tx.send(BootstrapProgress::ConnectingDatabase).ok();
return Err(std::io::Error::new( let db_pool = match diesel::Connection::establish(&refreshed_cfg.database_url()) {
std::io::ErrorKind::ConnectionRefused, Ok(conn) => Arc::new(Mutex::new(conn)),
format!("Database connection failed: {}", e), Err(e) => {
)); error!("Failed to connect to main database: {}", e);
} progress_tx.send(BootstrapProgress::BootstrapError(format!("Database connection failed: {}", e))).ok();
}; return Err(std::io::Error::new(
let cache_url = std::env::var("CACHE_URL") std::io::ErrorKind::ConnectionRefused,
.or_else(|_| std::env::var("REDIS_URL")) format!("Database connection failed: {}", e),
.unwrap_or_else(|_| "redis://localhost:6379".to_string()); ));
let redis_client = match redis::Client::open(cache_url.as_str()) { }
Ok(client) => Some(Arc::new(client)), };
Err(e) => { let cache_url = std::env::var("CACHE_URL")
log::warn!("Failed to connect to Redis: {}", e); .or_else(|_| std::env::var("REDIS_URL"))
None .unwrap_or_else(|_| "redis://localhost:6379".to_string());
} let redis_client = match redis::Client::open(cache_url.as_str()) {
}; Ok(client) => Some(Arc::new(client)),
let web_adapter = Arc::new(WebChannelAdapter::new()); Err(e) => {
let voice_adapter = Arc::new(VoiceAdapter::new()); log::warn!("Failed to connect to Redis: {}", e);
let drive = init_drive(&config.drive) None
.await }
.expect("Failed to initialize Drive"); };
let session_manager = Arc::new(tokio::sync::Mutex::new(session::SessionManager::new( let web_adapter = Arc::new(WebChannelAdapter::new());
diesel::Connection::establish(&cfg.database_url()).unwrap(), let voice_adapter = Arc::new(VoiceAdapter::new());
redis_client.clone(), let drive = init_drive(&config.drive)
))); .await
let auth_service = Arc::new(tokio::sync::Mutex::new(auth::AuthService::new())); .expect("Failed to initialize Drive");
let conn = diesel::Connection::establish(&cfg.database_url()).unwrap(); let session_manager = Arc::new(tokio::sync::Mutex::new(session::SessionManager::new(
let config_manager = ConfigManager::new(Arc::new(Mutex::new(conn))); diesel::Connection::establish(&cfg.database_url()).unwrap(),
let mut bot_conn = diesel::Connection::establish(&cfg.database_url()).unwrap(); redis_client.clone(),
let (default_bot_id, _default_bot_name) = crate::bot::get_default_bot(&mut bot_conn); )));
let llm_url = config_manager let auth_service = Arc::new(tokio::sync::Mutex::new(auth::AuthService::new()));
.get_config(&default_bot_id, "llm-url", Some("http://localhost:8081")) let conn = diesel::Connection::establish(&cfg.database_url()).unwrap();
.unwrap_or_else(|_| "http://localhost:8081".to_string()); let config_manager = ConfigManager::new(Arc::new(Mutex::new(conn)));
let llm_provider = Arc::new(crate::llm::OpenAIClient::new( let mut bot_conn = diesel::Connection::establish(&cfg.database_url()).unwrap();
"empty".to_string(), let (default_bot_id, _default_bot_name) = crate::bot::get_default_bot(&mut bot_conn);
Some(llm_url.clone()), let llm_url = config_manager
)); .get_config(&default_bot_id, "llm-url", Some("http://localhost:8081"))
let app_state = Arc::new(AppState { .unwrap_or_else(|_| "http://localhost:8081".to_string());
drive: Some(drive), let llm_provider = Arc::new(crate::llm::OpenAIClient::new(
config: Some(cfg.clone()), "empty".to_string(),
conn: db_pool.clone(), Some(llm_url.clone()),
bucket_name: "default.gbai".to_string(), ));
cache: redis_client.clone(), let app_state = Arc::new(AppState {
session_manager: session_manager.clone(), drive: Some(drive),
llm_provider: llm_provider.clone(), config: Some(cfg.clone()),
auth_service: auth_service.clone(), conn: db_pool.clone(),
channels: Arc::new(Mutex::new({ bucket_name: "default.gbai".to_string(),
let mut map = HashMap::new(); cache: redis_client.clone(),
map.insert( session_manager: session_manager.clone(),
"web".to_string(), llm_provider: llm_provider.clone(),
web_adapter.clone() as Arc<dyn crate::channels::ChannelAdapter>, auth_service: auth_service.clone(),
); channels: Arc::new(Mutex::new({
map let mut map = HashMap::new();
})), map.insert(
response_channels: Arc::new(tokio::sync::Mutex::new(HashMap::new())), "web".to_string(),
web_adapter: web_adapter.clone(), web_adapter.clone() as Arc<dyn crate::channels::ChannelAdapter>,
voice_adapter: voice_adapter.clone(), );
}); map
if let Some((_, ui_tx)) = &ui_handle { })),
ui_tx.send(app_state.clone()).await.ok(); response_channels: Arc::new(tokio::sync::Mutex::new(HashMap::new())),
} web_adapter: web_adapter.clone(),
info!( voice_adapter: voice_adapter.clone(),
"Starting HTTP server on {}:{}", });
config.server.host, config.server.port state_tx.send(app_state.clone()).await.ok();
); progress_tx.send(BootstrapProgress::BootstrapComplete).ok();
info!("Starting HTTP server on {}:{}", config.server.host, config.server.port);
let worker_count = std::thread::available_parallelism() let worker_count = std::thread::available_parallelism()
.map(|n| n.get()) .map(|n| n.get())
.unwrap_or(4); .unwrap_or(4);
let bot_orchestrator = BotOrchestrator::new(app_state.clone());
let bot_orchestrator = BotOrchestrator::new(app_state.clone()); tokio::spawn(async move {
if let Err(e) = bot_orchestrator.mount_all_bots().await { if let Err(e) = bot_orchestrator.mount_all_bots().await {
log::error!("Failed to mount bots: {}", e); error!("Failed to mount bots: {}", e);
let msg = format!("Bot mount failure: {}", e); }
let _ = bot_orchestrator });
.send_warning("System", "AdminBot", msg.as_str()) let automation_state = app_state.clone();
.await; std::thread::spawn(move || {
} let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
let automation_state = app_state.clone(); .build()
std::thread::spawn(move || { .expect("Failed to create runtime for automation");
let rt = tokio::runtime::Builder::new_current_thread() let local = tokio::task::LocalSet::new();
.enable_all() local.block_on(&rt, async move {
.build() let automation = AutomationService::new(automation_state);
.expect("Failed to create runtime for automation"); automation.spawn().await.ok();
let local = tokio::task::LocalSet::new(); });
local.block_on(&rt, async move { });
let automation = AutomationService::new(automation_state); let app_state_for_llm = app_state.clone();
automation.spawn().await.ok(); tokio::spawn(async move {
}); if let Err(e) = ensure_llama_servers_running(&app_state_for_llm).await {
}); error!("Failed to start LLM servers: {}", e);
}
if let Err(e) = ensure_llama_servers_running(&app_state).await { });
error!("Failed to start LLM servers: {}", e); let server_result = HttpServer::new(move || {
} let cors = Cors::default()
.allow_any_origin()
HttpServer::new(move || { .allow_any_method()
let cors = Cors::default() .allow_any_header()
.allow_any_origin() .max_age(3600);
.allow_any_method() let app_state_clone = app_state.clone();
.allow_any_header() let mut app = App::new()
.max_age(3600); .wrap(cors)
.wrap(Logger::default())
let app_state_clone = app_state.clone(); .wrap(Logger::new("HTTP REQUEST: %a %{User-Agent}i"))
.app_data(web::Data::from(app_state_clone))
let mut app = App::new() .service(auth_handler)
.wrap(cors) .service(create_session)
.wrap(Logger::default()) .service(get_session_history)
.wrap(Logger::new("HTTP REQUEST: %a %{User-Agent}i")) .service(get_sessions)
.app_data(web::Data::from(app_state_clone)) .service(index)
.service(auth_handler) .service(start_session)
.service(create_session) .service(upload_file)
.service(get_session_history) .service(voice_start)
.service(get_sessions) .service(voice_stop)
.service(index) .service(websocket_handler)
.service(start_session) .service(crate::bot::create_bot_handler)
.service(upload_file) .service(crate::bot::mount_bot_handler)
.service(voice_start) .service(crate::bot::handle_user_input_handler)
.service(voice_stop) .service(crate::bot::get_user_sessions_handler)
.service(websocket_handler) .service(crate::bot::get_conversation_history_handler)
.service(crate::bot::create_bot_handler) .service(crate::bot::send_warning_handler);
.service(crate::bot::mount_bot_handler) #[cfg(feature = "email")]
.service(crate::bot::handle_user_input_handler) {
.service(crate::bot::get_user_sessions_handler) app = app
.service(crate::bot::get_conversation_history_handler); .service(get_latest_email_from)
.service(get_emails)
#[cfg(feature = "email")] .service(list_emails)
{ .service(send_email)
app = app .service(save_draft)
.service(get_latest_email_from) .service(save_click);
.service(get_emails) }
.service(list_emails) app = app.service(static_files);
.service(send_email) app = app.service(bot_index);
.service(save_draft) app
.service(save_click); })
} .workers(worker_count)
.bind((config.server.host.clone(), config.server.port))?
app = app.service(static_files); .run()
app = app.service(bot_index); .await;
app if let Some(handle) = ui_handle {
}) handle.join().ok();
.workers(worker_count) }
.bind((config.server.host.clone(), config.server.port))? server_result
.run()
.await
} }

View file

@ -1,5 +1,4 @@
use anyhow::Result; use anyhow::Result;
use log::warn;
use std::collections::HashMap; use std::collections::HashMap;
use sysinfo::{System}; use sysinfo::{System};

View file

@ -88,6 +88,31 @@ pub struct BotResponse {
pub context_max_length: usize, pub context_max_length: usize,
} }
impl BotResponse {
pub fn from_string_ids(
bot_id: &str,
session_id: &str,
user_id: &str,
content: String,
channel: String,
) -> Result<Self, anyhow::Error> {
Ok(Self {
bot_id: bot_id.to_string(),
user_id: user_id.to_string(),
session_id: session_id.to_string(),
channel,
content,
message_type: 2,
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Queryable, Identifiable, Insertable)] #[derive(Debug, Clone, Serialize, Deserialize, Queryable, Identifiable, Insertable)]
#[diesel(table_name = bot_memories)] #[diesel(table_name = bot_memories)]
pub struct BotMemory { pub struct BotMemory {

139
src/ui_tree/chat_panel.rs Normal file
View file

@ -0,0 +1,139 @@
use color_eyre::Result;
use std::sync::Arc;
use crate::shared::state::AppState;
use crate::shared::models::BotResponse;
use tokio::sync::mpsc;
use uuid::Uuid;
pub struct ChatPanel {
pub id: Uuid,
pub app_state: Arc<AppState>,
pub messages: Vec<String>,
pub input_buffer: String,
pub session_id: Uuid,
pub user_id: Uuid,
pub response_rx: Option<mpsc::Receiver<BotResponse>>,
}
impl ChatPanel {
pub fn new(app_state: Arc<AppState>) -> Self {
Self {
id: Uuid::new_v4(),
app_state,
messages: vec!["Welcome to General Bots Console Chat!".to_string()],
input_buffer: String::new(),
session_id: Uuid::new_v4(),
user_id: Uuid::new_v4(),
response_rx: None,
}
}
pub fn add_char(&mut self, c: char) {
self.input_buffer.push(c);
}
pub fn backspace(&mut self) {
self.input_buffer.pop();
}
pub async fn send_message(&mut self, bot_name: &str, app_state: &Arc<AppState>) -> Result<()> {
if self.input_buffer.trim().is_empty() {
return Ok(());
}
let message = self.input_buffer.clone();
self.messages.push(format!("You: {}", message));
self.input_buffer.clear();
let bot_id = self.get_bot_id(bot_name, app_state).await?;
let user_message = crate::shared::models::UserMessage {
bot_id: bot_id.to_string(),
user_id: self.user_id.to_string(),
session_id: self.session_id.to_string(),
channel: "console".to_string(),
content: message,
message_type: 1,
media_url: None,
timestamp: chrono::Utc::now(),
context_name: None,
};
let (tx, rx) = mpsc::channel::<BotResponse>(100);
self.response_rx = Some(rx);
let orchestrator = crate::bot::BotOrchestrator::new(app_state.clone());
let _ = orchestrator.stream_response(user_message, tx).await;
Ok(())
}
pub async fn poll_response(&mut self, _bot_name: &str) -> Result<()> {
if let Some(rx) = &mut self.response_rx {
while let Ok(response) = rx.try_recv() {
if !response.content.is_empty() && !response.is_complete {
if let Some(last_msg) = self.messages.last_mut() {
if last_msg.starts_with("Bot: ") {
last_msg.push_str(&response.content);
} else {
self.messages.push(format!("Bot: {}", response.content));
}
} else {
self.messages.push(format!("Bot: {}", response.content));
}
}
if response.is_complete && response.content.is_empty() {
break;
}
}
}
Ok(())
}
async fn get_bot_id(&self, bot_name: &str, app_state: &Arc<AppState>) -> Result<Uuid> {
use crate::shared::models::schema::bots::dsl::*;
use diesel::prelude::*;
let mut conn = app_state.conn.lock().unwrap();
let bot_id = bots
.filter(name.eq(bot_name))
.select(id)
.first::<Uuid>(&mut *conn)?;
Ok(bot_id)
}
pub fn render(&self) -> String {
let mut lines = Vec::new();
lines.push("╔═══════════════════════════════════════╗".to_string());
lines.push("║ CONVERSATION ║".to_string());
lines.push("╚═══════════════════════════════════════╝".to_string());
lines.push("".to_string());
let visible_start = if self.messages.len() > 15 {
self.messages.len() - 15
} else {
0
};
for msg in &self.messages[visible_start..] {
if msg.starts_with("You: ") {
lines.push(format!(" {}", msg));
} else if msg.starts_with("Bot: ") {
lines.push(format!(" {}", msg));
} else {
lines.push(format!(" {}", msg));
}
}
lines.push("".to_string());
lines.push("─────────────────────────────────────────".to_string());
lines.push(format!(" > {}_", self.input_buffer));
lines.push("".to_string());
lines.push(" Enter: Send | Tab: Switch Panel".to_string());
lines.join("\n")
}
}

View file

@ -54,7 +54,7 @@ impl Editor {
&self.file_path &self.file_path
} }
pub fn render(&self) -> String { pub fn render(&self, cursor_blink: bool) -> String {
let lines: Vec<&str> = self.content.lines().collect(); let lines: Vec<&str> = self.content.lines().collect();
let total_lines = lines.len().max(1); let total_lines = lines.len().max(1);
let visible_lines = 25; let visible_lines = 25;
@ -67,26 +67,35 @@ impl Editor {
let start = self.scroll_offset; let start = self.scroll_offset;
let end = (start + visible_lines).min(total_lines); let end = (start + visible_lines).min(total_lines);
let mut display_lines = Vec::new();
let mut display_lines = Vec::new();
for i in start..end { for i in start..end {
let line_num = i + 1; let line_num = i + 1;
let line_content = if i < lines.len() { lines[i] } else { "" }; let line_content = if i < lines.len() { lines[i] } else { "" };
let is_cursor_line = i == cursor_line; let is_cursor_line = i == cursor_line;
let line_marker = if is_cursor_line { "" } else { " " };
display_lines.push(format!("{} {:4}{}", line_marker, line_num, line_content)); let cursor_indicator = if is_cursor_line && cursor_blink {
let spaces = " ".repeat(cursor_col);
format!("{}", spaces)
} else {
String::new()
};
display_lines.push(format!(" {:4}{}{}", line_num, line_content, cursor_indicator));
} }
if display_lines.is_empty() { if display_lines.is_empty() {
display_lines.push(" ▶ 1 │ ".to_string()); let cursor_indicator = if cursor_blink { "" } else { "" };
display_lines.push(format!(" 1 │ {}", cursor_indicator));
} }
display_lines.push("".to_string()); display_lines.push("".to_string());
display_lines.push("─────────────────────────────────────────────────────────────".to_string()); display_lines.push("─────────────────────────────────────────────────────────────".to_string());
let status = if self.modified { "" } else { "" }; let status = if self.modified { "MODIFIED" } else { "SAVED" };
display_lines.push(format!(" {} {} │ Line: {}, Col: {}", display_lines.push(format!(" {} {} │ Line: {}, Col: {}",
status, self.file_path, cursor_line + 1, cursor_col + 1)); status, self.file_path, cursor_line + 1, cursor_col + 1));
display_lines.push(" Ctrl+S: Save │ Ctrl+W: Close │ Esc: Close without saving".to_string()); display_lines.push(" Ctrl+S: Save │ Ctrl+W: Close │ Esc: Close without saving".to_string());
display_lines.join("\n") display_lines.join("\n")
} }

View file

@ -32,6 +32,7 @@ impl FileTree {
self.items.clear(); self.items.clear();
self.current_bucket = None; self.current_bucket = None;
self.current_path.clear(); self.current_path.clear();
if let Some(drive) = &self.app_state.drive { if let Some(drive) = &self.app_state.drive {
let result = drive.list_buckets().send().await; let result = drive.list_buckets().send().await;
match result { match result {
@ -52,6 +53,7 @@ impl FileTree {
} else { } else {
self.items.push(("✗ Drive not connected".to_string(), TreeNode::Bucket { name: String::new() })); self.items.push(("✗ Drive not connected".to_string(), TreeNode::Bucket { name: String::new() }));
} }
if self.items.is_empty() { if self.items.is_empty() {
self.items.push(("(no buckets found)".to_string(), TreeNode::Bucket { name: String::new() })); self.items.push(("(no buckets found)".to_string(), TreeNode::Bucket { name: String::new() }));
} }
@ -121,6 +123,7 @@ impl FileTree {
if let Some(token) = continuation_token { if let Some(token) = continuation_token {
request = request.continuation_token(token); request = request.continuation_token(token);
} }
let result = request.send().await?; let result = request.send().await?;
for obj in result.contents() { for obj in result.contents() {
@ -142,14 +145,17 @@ impl FileTree {
if key == normalized_prefix { if key == normalized_prefix {
continue; continue;
} }
let relative = if !normalized_prefix.is_empty() && key.starts_with(&normalized_prefix) { let relative = if !normalized_prefix.is_empty() && key.starts_with(&normalized_prefix) {
&key[normalized_prefix.len()..] &key[normalized_prefix.len()..]
} else { } else {
&key &key
}; };
if relative.is_empty() { if relative.is_empty() {
continue; continue;
} }
if let Some(slash_pos) = relative.find('/') { if let Some(slash_pos) = relative.find('/') {
let folder_name = &relative[..slash_pos]; let folder_name = &relative[..slash_pos];
if !folder_name.is_empty() { if !folder_name.is_empty() {
@ -162,7 +168,6 @@ impl FileTree {
let mut folder_vec: Vec<String> = folders.into_iter().collect(); let mut folder_vec: Vec<String> = folders.into_iter().collect();
folder_vec.sort(); folder_vec.sort();
for folder_name in folder_vec { for folder_name in folder_vec {
let full_path = if normalized_prefix.is_empty() { let full_path = if normalized_prefix.is_empty() {
folder_name.clone() folder_name.clone()
@ -178,7 +183,6 @@ impl FileTree {
} }
files.sort_by(|(a, _), (b, _)| a.cmp(b)); files.sort_by(|(a, _), (b, _)| a.cmp(b));
for (name, full_path) in files { for (name, full_path) in files {
let icon = if name.ends_with(".bas") { let icon = if name.ends_with(".bas") {
"⚙️" "⚙️"
@ -226,6 +230,27 @@ impl FileTree {
self.items.get(self.selected).map(|(_, node)| node) self.items.get(self.selected).map(|(_, node)| node)
} }
pub fn get_selected_bot(&self) -> Option<String> {
if let Some(bucket) = &self.current_bucket {
if bucket.ends_with(".gbai") {
return Some(bucket.trim_end_matches(".gbai").to_string());
}
}
if let Some((_, node)) = self.items.get(self.selected) {
match node {
TreeNode::Bucket { name } => {
if name.ends_with(".gbai") {
return Some(name.trim_end_matches(".gbai").to_string());
}
}
_ => {}
}
}
None
}
pub fn move_up(&mut self) { pub fn move_up(&mut self) {
if self.selected > 0 { if self.selected > 0 {
self.selected -= 1; self.selected -= 1;

View file

@ -46,11 +46,11 @@ impl Log for UiLogger {
if self.enabled(record.metadata()) { if self.enabled(record.metadata()) {
let timestamp = Local::now().format("%H:%M:%S"); let timestamp = Local::now().format("%H:%M:%S");
let level_icon = match record.level() { let level_icon = match record.level() {
log::Level::Error => "", log::Level::Error => "ERR",
log::Level::Warn => "⚠️", log::Level::Warn => "WRN",
log::Level::Info => "", log::Level::Info => "INF",
log::Level::Debug => "🔍", log::Level::Debug => "DBG",
log::Level::Trace => "📝", log::Level::Trace => "TRC",
}; };
let log_entry = format!("[{}] {} {}", timestamp, level_icon, record.args()); let log_entry = format!("[{}] {} {}", timestamp, level_icon, record.args());
if let Ok(mut panel) = self.log_panel.lock() { if let Ok(mut panel) = self.log_panel.lock() {

File diff suppressed because it is too large Load diff

View file

@ -2,12 +2,15 @@ use std::sync::Arc;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use crate::shared::models::schema::bots::dsl::*; use crate::shared::models::schema::bots::dsl::*;
use crate::nvidia; use crate::nvidia;
use crate::config::ConfigManager;
use diesel::prelude::*; use diesel::prelude::*;
use sysinfo::System;
pub struct StatusPanel { pub struct StatusPanel {
app_state: Arc<AppState>, app_state: Arc<AppState>,
last_update: std::time::Instant, last_update: std::time::Instant,
cached_content: String, cached_content: String,
system: System,
} }
impl StatusPanel { impl StatusPanel {
@ -16,61 +19,40 @@ impl StatusPanel {
app_state, app_state,
last_update: std::time::Instant::now(), last_update: std::time::Instant::now(),
cached_content: String::new(), cached_content: String::new(),
system: System::new_all(),
} }
} }
pub async fn update(&mut self) -> Result<(), std::io::Error> { pub async fn update(&mut self) -> Result<(), std::io::Error> {
if self.last_update.elapsed() < std::time::Duration::from_secs(2) { if self.last_update.elapsed() < std::time::Duration::from_secs(1) {
return Ok(()); return Ok(());
} }
self.system.refresh_all();
self.cached_content = String::new();
self.last_update = std::time::Instant::now();
Ok(())
}
pub fn render(&mut self, selected_bot: Option<String>) -> String {
let mut lines = Vec::new(); let mut lines = Vec::new();
lines.push("═══════════════════════════════════════".to_string());
lines.push(" COMPONENT STATUS".to_string()); self.system.refresh_all();
lines.push("═══════════════════════════════════════".to_string());
lines.push("╔═══════════════════════════════════════╗".to_string());
lines.push("║ SYSTEM METRICS ║".to_string());
lines.push("╚═══════════════════════════════════════╝".to_string());
lines.push("".to_string()); lines.push("".to_string());
let db_status = if self.app_state.conn.try_lock().is_ok() {
"🟢 ONLINE"
} else {
"🔴 OFFLINE"
};
lines.push(format!(" Database: {}", db_status));
let cache_status = if self.app_state.cache.is_some() {
"🟢 ONLINE"
} else {
"🟡 DISABLED"
};
lines.push(format!(" Cache: {}", cache_status));
let drive_status = if self.app_state.drive.is_some() {
"🟢 ONLINE"
} else {
"🔴 OFFLINE"
};
lines.push(format!(" Drive: {}", drive_status));
let llm_status = "🟢 ONLINE";
lines.push(format!(" LLM: {}", llm_status));
// Get system metrics
let system_metrics = match nvidia::get_system_metrics(0, 0) { let system_metrics = match nvidia::get_system_metrics(0, 0) {
Ok(metrics) => metrics, Ok(metrics) => metrics,
Err(_) => nvidia::SystemMetrics::default(), Err(_) => nvidia::SystemMetrics::default(),
}; };
// Add system metrics with progress bars
lines.push("".to_string());
lines.push("───────────────────────────────────────".to_string());
lines.push(" SYSTEM METRICS".to_string());
lines.push("───────────────────────────────────────".to_string());
// CPU usage with progress bar
let cpu_bar = Self::create_progress_bar(system_metrics.cpu_usage, 20); let cpu_bar = Self::create_progress_bar(system_metrics.cpu_usage, 20);
lines.push(format!(" CPU: {:5.1}% {}", system_metrics.cpu_usage, cpu_bar)); lines.push(format!(" CPU: {:5.1}% {}", system_metrics.cpu_usage, cpu_bar));
// GPU usage with progress bar (if available)
if let Some(gpu_usage) = system_metrics.gpu_usage { if let Some(gpu_usage) = system_metrics.gpu_usage {
let gpu_bar = Self::create_progress_bar(gpu_usage, 20); let gpu_bar = Self::create_progress_bar(gpu_usage, 20);
lines.push(format!(" GPU: {:5.1}% {}", gpu_usage, gpu_bar)); lines.push(format!(" GPU: {:5.1}% {}", gpu_usage, gpu_bar));
@ -78,10 +60,39 @@ impl StatusPanel {
lines.push(" GPU: Not available".to_string()); lines.push(" GPU: Not available".to_string());
} }
let total_mem = self.system.total_memory() as f32 / 1024.0 / 1024.0 / 1024.0;
let used_mem = self.system.used_memory() as f32 / 1024.0 / 1024.0 / 1024.0;
let mem_percentage = (used_mem / total_mem) * 100.0;
let mem_bar = Self::create_progress_bar(mem_percentage, 20);
lines.push(format!(" MEM: {:5.1}% {} ({:.1}/{:.1} GB)", mem_percentage, mem_bar, used_mem, total_mem));
lines.push("".to_string());
lines.push("╔═══════════════════════════════════════╗".to_string());
lines.push("║ COMPONENTS STATUS ║".to_string());
lines.push("╚═══════════════════════════════════════╝".to_string());
lines.push("".to_string());
let components = vec![
("Tables", "postgres", "5432"),
("Cache", "valkey-server", "6379"),
("Drive", "minio", "9000"),
("LLM", "llama-server", "8081"),
];
for (comp_name, process, port) in components {
let status = if Self::check_component_running(process) {
format!("🟢 ONLINE [Port: {}]", port)
} else {
"🔴 OFFLINE".to_string()
};
lines.push(format!(" {:<10} {}", comp_name, status));
}
lines.push("".to_string());
lines.push("╔═══════════════════════════════════════╗".to_string());
lines.push("║ ACTIVE BOTS ║".to_string());
lines.push("╚═══════════════════════════════════════╝".to_string());
lines.push("".to_string()); lines.push("".to_string());
lines.push("───────────────────────────────────────".to_string());
lines.push(" ACTIVE BOTS".to_string());
lines.push("───────────────────────────────────────".to_string());
if let Ok(mut conn) = self.app_state.conn.try_lock() { if let Ok(mut conn) = self.app_state.conn.try_lock() {
match bots match bots
@ -93,8 +104,36 @@ impl StatusPanel {
if bot_list.is_empty() { if bot_list.is_empty() {
lines.push(" No active bots".to_string()); lines.push(" No active bots".to_string());
} else { } else {
for (bot_name, _bot_id) in bot_list { for (bot_name, bot_id) in bot_list {
lines.push(format!(" 🤖 {}", bot_name)); let marker = if let Some(ref selected) = selected_bot {
if selected == &bot_name { "" } else { " " }
} else {
" "
};
lines.push(format!(" {} 🤖 {}", marker, bot_name));
if let Some(ref selected) = selected_bot {
if selected == &bot_name {
lines.push("".to_string());
lines.push(" ┌─ Bot Configuration ─────────┐".to_string());
let config_manager = ConfigManager::new(self.app_state.conn.clone());
let llm_model = config_manager.get_config(&bot_id, "llm-model", None)
.unwrap_or_else(|_| "N/A".to_string());
lines.push(format!(" Model: {}", llm_model));
let ctx_size = config_manager.get_config(&bot_id, "llm-server-ctx-size", None)
.unwrap_or_else(|_| "N/A".to_string());
lines.push(format!(" Context: {}", ctx_size));
let temp = config_manager.get_config(&bot_id, "llm-temperature", None)
.unwrap_or_else(|_| "N/A".to_string());
lines.push(format!(" Temp: {}", temp));
lines.push(" └─────────────────────────────┘".to_string());
}
}
} }
} }
} }
@ -107,35 +146,32 @@ impl StatusPanel {
} }
lines.push("".to_string()); lines.push("".to_string());
lines.push("───────────────────────────────────────".to_string()); lines.push("╔═══════════════════════════════════════╗".to_string());
lines.push(" SESSIONS".to_string()); lines.push(" SESSIONS".to_string());
lines.push("───────────────────────────────────────".to_string()); lines.push("╚═══════════════════════════════════════╝".to_string());
let session_count = self.app_state.response_channels.try_lock() let session_count = self.app_state.response_channels.try_lock()
.map(|channels| channels.len()) .map(|channels| channels.len())
.unwrap_or(0); .unwrap_or(0);
lines.push(format!(" Active: {}", session_count)); lines.push(format!(" Active Sessions: {}", session_count));
lines.push("".to_string()); lines.join("\n")
lines.push("═══════════════════════════════════════".to_string());
self.cached_content = lines.join("\n");
self.last_update = std::time::Instant::now();
Ok(())
} }
pub fn render(&self) -> String {
self.cached_content.clone()
}
/// Creates a visual progress bar for percentage values
fn create_progress_bar(percentage: f32, width: usize) -> String { fn create_progress_bar(percentage: f32, width: usize) -> String {
let filled = (percentage / 100.0 * width as f32).round() as usize; let filled = (percentage / 100.0 * width as f32).round() as usize;
let empty = width.saturating_sub(filled); let empty = width.saturating_sub(filled);
let filled_chars = "".repeat(filled); let filled_chars = "".repeat(filled);
let empty_chars = "".repeat(empty); let empty_chars = "".repeat(empty);
format!("[{}{}]", filled_chars, empty_chars) format!("[{}{}]", filled_chars, empty_chars)
} }
pub fn check_component_running(process_name: &str) -> bool {
std::process::Command::new("pgrep")
.arg("-f")
.arg(process_name)
.output()
.map(|output| !output.stdout.is_empty())
.unwrap_or(false)
}
} }