2025-11-05 10:15:36 -03:00
|
|
|
use crate::config::{AppConfig, write_drive_config_to_env};
|
2025-10-28 14:00:52 -03:00
|
|
|
use crate::package_manager::{InstallMode, PackageManager};
|
2025-11-01 07:20:04 -03:00
|
|
|
use crate::shared::utils::establish_pg_connection;
|
2025-10-30 18:48:16 -03:00
|
|
|
use anyhow::Result;
|
2025-11-04 23:11:33 -03:00
|
|
|
use diesel::{connection::SimpleConnection};
|
2025-10-28 14:00:52 -03:00
|
|
|
use dotenvy::dotenv;
|
2025-11-07 09:37:45 -03:00
|
|
|
use log::{debug, error, info, trace, warn};
|
2025-10-30 13:05:35 -03:00
|
|
|
use aws_sdk_s3::Client;
|
2025-10-30 12:35:25 -03:00
|
|
|
use aws_config::BehaviorVersion;
|
2025-10-19 19:28:08 -03:00
|
|
|
use rand::distr::Alphanumeric;
|
2025-10-28 20:41:21 -03:00
|
|
|
use rand::Rng;
|
2025-10-28 14:00:52 -03:00
|
|
|
use std::io::{self, Write};
|
2025-10-20 23:32:49 -03:00
|
|
|
use std::path::Path;
|
2025-10-26 17:13:58 -03:00
|
|
|
use std::process::Command;
|
2025-10-30 13:29:46 -03:00
|
|
|
use std::sync::{Arc, Mutex};
|
2025-10-30 18:48:16 -03:00
|
|
|
|
2025-10-28 20:41:21 -03:00
|
|
|
|
2025-10-26 17:13:58 -03:00
|
|
|
pub struct ComponentInfo {
|
|
|
|
|
pub name: &'static str,
|
|
|
|
|
}
|
2025-10-18 19:08:00 -03:00
|
|
|
|
|
|
|
|
pub struct BootstrapManager {
|
2025-10-19 11:08:23 -03:00
|
|
|
pub install_mode: InstallMode,
|
|
|
|
|
pub tenant: Option<String>,
|
2025-10-30 12:35:25 -03:00
|
|
|
pub s3_client: Client,
|
2025-10-18 19:08:00 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl BootstrapManager {
|
2025-11-07 09:37:45 -03:00
|
|
|
fn is_postgres_running() -> bool {
|
|
|
|
|
match Command::new("pg_isready").arg("-q").status() {
|
|
|
|
|
Ok(status) => status.success(),
|
|
|
|
|
Err(_) => {
|
|
|
|
|
// fallback check using pgrep
|
|
|
|
|
Command::new("pgrep").arg("postgres").output().map(|o| !o.stdout.is_empty()).unwrap_or(false)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-10-30 12:35:25 -03:00
|
|
|
pub async fn new(install_mode: InstallMode, tenant: Option<String>) -> Self {
|
2025-10-28 14:00:52 -03:00
|
|
|
info!(
|
2025-10-20 07:34:06 -03:00
|
|
|
"Initializing BootstrapManager with mode {:?} and tenant {:?}",
|
2025-10-28 14:00:52 -03:00
|
|
|
install_mode, tenant
|
2025-10-20 07:34:06 -03:00
|
|
|
);
|
2025-11-07 09:37:45 -03:00
|
|
|
|
|
|
|
|
if !Self::is_postgres_running() {
|
|
|
|
|
warn!("PostgreSQL server is not running. Attempting to start 'tables' component...");
|
|
|
|
|
let pm = PackageManager::new(install_mode.clone(), tenant.clone())
|
|
|
|
|
.expect("Failed to initialize PackageManager");
|
|
|
|
|
if let Err(e) = pm.start("tables") {
|
|
|
|
|
error!("Failed to start PostgreSQL component automatically: {}", e);
|
|
|
|
|
panic!("Database not available and auto-start failed.");
|
|
|
|
|
} else {
|
|
|
|
|
info!("PostgreSQL component started successfully.");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-02 18:36:21 -03:00
|
|
|
let config = AppConfig::from_env().expect("Failed to load config from env");
|
2025-10-30 12:35:25 -03:00
|
|
|
let s3_client = futures::executor::block_on(Self::create_s3_operator(&config));
|
2025-10-18 19:08:00 -03:00
|
|
|
Self {
|
2025-10-19 11:08:23 -03:00
|
|
|
install_mode,
|
2025-10-18 19:08:00 -03:00
|
|
|
tenant,
|
2025-10-30 12:35:25 -03:00
|
|
|
s3_client,
|
2025-10-18 19:08:00 -03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-10-19 15:03:27 -03:00
|
|
|
pub fn start_all(&mut self) -> Result<()> {
|
|
|
|
|
let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?;
|
|
|
|
|
let components = vec![
|
2025-10-28 14:00:52 -03:00
|
|
|
ComponentInfo {
|
|
|
|
|
name: "tables",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "cache",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "drive",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "llm",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "email",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "proxy",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "directory",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "alm",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "alm_ci",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "dns",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "webmail",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "meeting",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "table_editor",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "doc_editor",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "desktop",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "devtools",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "bot",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "system",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "vector_db",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
|
|
|
|
ComponentInfo {
|
|
|
|
|
name: "host",
|
2025-11-04 23:11:33 -03:00
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
},
|
2025-10-19 15:03:27 -03:00
|
|
|
];
|
2025-11-04 07:58:43 -03:00
|
|
|
info!("Starting all installed components...");
|
2025-10-26 21:47:20 -03:00
|
|
|
for component in components {
|
|
|
|
|
if pm.is_installed(component.name) {
|
2025-11-04 07:58:43 -03:00
|
|
|
debug!("Starting component: {}", component.name);
|
2025-10-26 21:47:20 -03:00
|
|
|
pm.start(component.name)?;
|
|
|
|
|
}
|
2025-10-19 15:03:27 -03:00
|
|
|
}
|
2025-10-28 14:00:52 -03:00
|
|
|
|
2025-10-26 21:47:20 -03:00
|
|
|
Ok(())
|
2025-10-19 15:03:27 -03:00
|
|
|
}
|
|
|
|
|
|
2025-10-30 12:35:25 -03:00
|
|
|
pub async fn bootstrap(&mut self) -> Result<AppConfig> {
|
2025-10-30 13:29:46 -03:00
|
|
|
// First check for legacy mode
|
2025-10-20 20:21:31 -03:00
|
|
|
if let Ok(tables_server) = std::env::var("TABLES_SERVER") {
|
|
|
|
|
if !tables_server.is_empty() {
|
2025-10-28 14:00:52 -03:00
|
|
|
info!(
|
2025-10-20 20:21:31 -03:00
|
|
|
"Legacy mode detected (TABLES_SERVER present), skipping bootstrap installation"
|
|
|
|
|
);
|
2025-11-01 07:25:36 -03:00
|
|
|
let _database_url = std::env::var("DATABASE_URL").unwrap_or_else(|_| {
|
2025-10-28 14:00:52 -03:00
|
|
|
let username =
|
2025-10-30 17:32:21 -03:00
|
|
|
std::env::var("TABLES_USERNAME").unwrap_or_else(|_| "gbuser".to_string());
|
2025-10-28 14:00:52 -03:00
|
|
|
let password =
|
|
|
|
|
std::env::var("TABLES_PASSWORD").unwrap_or_else(|_| "postgres".to_string());
|
|
|
|
|
let server =
|
|
|
|
|
std::env::var("TABLES_SERVER").unwrap_or_else(|_| "localhost".to_string());
|
2025-10-20 20:21:31 -03:00
|
|
|
let port = std::env::var("TABLES_PORT").unwrap_or_else(|_| "5432".to_string());
|
2025-10-28 14:00:52 -03:00
|
|
|
let database =
|
|
|
|
|
std::env::var("TABLES_DATABASE").unwrap_or_else(|_| "gbserver".to_string());
|
|
|
|
|
format!(
|
|
|
|
|
"postgres://{}:{}@{}:{}/{}",
|
|
|
|
|
username, password, server, port, database
|
|
|
|
|
)
|
2025-10-20 20:21:31 -03:00
|
|
|
});
|
|
|
|
|
|
2025-10-30 13:29:46 -03:00
|
|
|
// In legacy mode, still try to load config.csv if available
|
|
|
|
|
if let Ok(config) = self.load_config_from_csv().await {
|
|
|
|
|
return Ok(config);
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-01 07:20:04 -03:00
|
|
|
match establish_pg_connection() {
|
2025-10-20 20:21:31 -03:00
|
|
|
Ok(mut conn) => {
|
2025-10-20 23:32:49 -03:00
|
|
|
if let Err(e) = self.apply_migrations(&mut conn) {
|
|
|
|
|
log::warn!("Failed to apply migrations: {}", e);
|
|
|
|
|
}
|
2025-11-02 18:36:21 -03:00
|
|
|
return Ok(AppConfig::from_database(&mut conn).expect("Failed to load config from DB"));
|
2025-10-20 20:21:31 -03:00
|
|
|
}
|
|
|
|
|
Err(e) => {
|
2025-11-01 07:20:04 -03:00
|
|
|
log::warn!("Failed to connect to database: {}", e);
|
2025-11-02 18:36:21 -03:00
|
|
|
return Ok(AppConfig::from_env()?);
|
2025-10-20 20:21:31 -03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-10-19 11:08:23 -03:00
|
|
|
let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?;
|
2025-11-01 07:20:04 -03:00
|
|
|
let required_components = vec!["tables", "drive", "cache", "llm"];
|
2025-11-02 18:36:21 -03:00
|
|
|
let mut config = AppConfig::from_env().expect("Failed to load config from env");
|
2025-10-20 07:34:06 -03:00
|
|
|
|
2025-10-19 11:08:23 -03:00
|
|
|
for component in required_components {
|
|
|
|
|
if !pm.is_installed(component) {
|
2025-10-28 14:00:52 -03:00
|
|
|
let termination_cmd = pm
|
|
|
|
|
.components
|
2025-10-26 21:47:20 -03:00
|
|
|
.get(component)
|
2025-10-26 17:13:58 -03:00
|
|
|
.and_then(|cfg| cfg.binary_name.clone())
|
|
|
|
|
.unwrap_or_else(|| component.to_string());
|
|
|
|
|
|
|
|
|
|
if !termination_cmd.is_empty() {
|
2025-10-28 14:00:52 -03:00
|
|
|
let check = Command::new("pgrep")
|
|
|
|
|
.arg("-f")
|
|
|
|
|
.arg(&termination_cmd)
|
|
|
|
|
.output();
|
2025-10-26 17:13:58 -03:00
|
|
|
if let Ok(output) = check {
|
|
|
|
|
if !output.stdout.is_empty() {
|
|
|
|
|
println!("Component '{}' appears to be already running from a previous install.", component);
|
|
|
|
|
println!("Do you want to terminate it? (y/n)");
|
|
|
|
|
let mut input = String::new();
|
|
|
|
|
io::stdout().flush().unwrap();
|
|
|
|
|
io::stdin().read_line(&mut input).unwrap();
|
|
|
|
|
if input.trim().eq_ignore_ascii_case("y") {
|
|
|
|
|
let _ = Command::new("pkill")
|
|
|
|
|
.arg("-f")
|
|
|
|
|
.arg(&termination_cmd)
|
|
|
|
|
.status();
|
|
|
|
|
println!("Terminated existing '{}' process.", component);
|
|
|
|
|
} else {
|
2025-10-28 14:00:52 -03:00
|
|
|
println!(
|
|
|
|
|
"Skipping start of '{}' as it is already running.",
|
|
|
|
|
component
|
|
|
|
|
);
|
2025-10-26 17:13:58 -03:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-10-20 09:42:07 -03:00
|
|
|
if component == "tables" {
|
|
|
|
|
let db_password = self.generate_secure_password(16);
|
|
|
|
|
let farm_password = self.generate_secure_password(32);
|
|
|
|
|
let env_contents = format!(
|
|
|
|
|
"FARM_PASSWORD={}\nDATABASE_URL=postgres://gbuser:{}@localhost:5432/botserver",
|
2025-10-28 14:00:52 -03:00
|
|
|
farm_password, db_password
|
2025-10-20 09:42:07 -03:00
|
|
|
);
|
2025-10-28 14:00:52 -03:00
|
|
|
std::fs::write(".env", &env_contents)
|
2025-10-20 09:42:07 -03:00
|
|
|
.map_err(|e| anyhow::anyhow!("Failed to write .env file: {}", e))?;
|
2025-10-20 16:52:08 -03:00
|
|
|
dotenv().ok();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
futures::executor::block_on(pm.install(component))?;
|
|
|
|
|
|
|
|
|
|
if component == "tables" {
|
2025-11-01 07:20:04 -03:00
|
|
|
let mut conn = establish_pg_connection()
|
2025-10-20 16:52:08 -03:00
|
|
|
.map_err(|e| anyhow::anyhow!("Failed to connect to database: {}", e))?;
|
2025-10-20 09:42:07 -03:00
|
|
|
|
|
|
|
|
let migration_dir = include_dir::include_dir!("./migrations");
|
|
|
|
|
let mut migration_files: Vec<_> = migration_dir
|
|
|
|
|
.files()
|
|
|
|
|
.filter_map(|file| {
|
|
|
|
|
let path = file.path();
|
|
|
|
|
if path.extension()? == "sql" {
|
2025-10-20 19:49:54 -03:00
|
|
|
Some(file)
|
2025-10-20 09:42:07 -03:00
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
.collect();
|
|
|
|
|
|
2025-10-20 19:49:54 -03:00
|
|
|
migration_files.sort_by_key(|f| f.path());
|
2025-10-20 09:42:07 -03:00
|
|
|
|
|
|
|
|
for migration_file in migration_files {
|
2025-10-20 19:49:54 -03:00
|
|
|
let migration = migration_file
|
|
|
|
|
.contents_utf8()
|
|
|
|
|
.ok_or_else(|| anyhow::anyhow!("Migration file is not valid UTF-8"))?;
|
|
|
|
|
|
|
|
|
|
if let Err(e) = conn.batch_execute(migration) {
|
|
|
|
|
log::error!(
|
|
|
|
|
"Failed to execute migration {}: {}",
|
|
|
|
|
migration_file.path().display(),
|
|
|
|
|
e
|
|
|
|
|
);
|
|
|
|
|
return Err(e.into());
|
|
|
|
|
}
|
2025-10-28 14:00:52 -03:00
|
|
|
info!(
|
2025-10-20 19:49:54 -03:00
|
|
|
"Successfully executed migration: {}",
|
|
|
|
|
migration_file.path().display()
|
|
|
|
|
);
|
2025-10-20 09:42:07 -03:00
|
|
|
}
|
|
|
|
|
|
2025-11-02 18:36:21 -03:00
|
|
|
config = AppConfig::from_database(&mut conn).expect("Failed to load config from DB");
|
2025-10-20 09:42:07 -03:00
|
|
|
}
|
2025-10-18 19:08:00 -03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-10-30 12:35:25 -03:00
|
|
|
self.s3_client = futures::executor::block_on(Self::create_s3_operator(&config));
|
2025-10-30 13:29:46 -03:00
|
|
|
|
|
|
|
|
// Load config from CSV if available
|
2025-11-05 10:15:36 -03:00
|
|
|
let final_config = if let Ok(csv_config) = self.load_config_from_csv().await {
|
|
|
|
|
csv_config
|
2025-10-30 13:29:46 -03:00
|
|
|
} else {
|
2025-11-05 10:15:36 -03:00
|
|
|
config
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Write drive config to .env file if not already present (first bootstrap)
|
|
|
|
|
if std::env::var("DRIVE_SERVER").is_err() {
|
|
|
|
|
write_drive_config_to_env(&final_config.drive)
|
|
|
|
|
.map_err(|e| anyhow::anyhow!("Failed to write drive config to .env: {}", e))?;
|
2025-10-30 13:29:46 -03:00
|
|
|
}
|
2025-11-05 10:15:36 -03:00
|
|
|
|
|
|
|
|
Ok(final_config)
|
2025-10-18 19:08:00 -03:00
|
|
|
}
|
2025-10-29 14:16:11 -03:00
|
|
|
|
|
|
|
|
|
2025-10-30 12:35:25 -03:00
|
|
|
|
2025-10-30 13:05:35 -03:00
|
|
|
async fn create_s3_operator(config: &AppConfig) -> Client {
|
2025-10-30 12:35:25 -03:00
|
|
|
let endpoint = if !config.drive.server.ends_with('/') {
|
|
|
|
|
format!("{}/", config.drive.server)
|
|
|
|
|
} else {
|
|
|
|
|
config.drive.server.clone()
|
|
|
|
|
};
|
|
|
|
|
|
2025-10-30 13:05:35 -03:00
|
|
|
let base_config = aws_config::defaults(BehaviorVersion::latest())
|
|
|
|
|
.endpoint_url(endpoint)
|
|
|
|
|
.region("auto")
|
|
|
|
|
.credentials_provider(
|
|
|
|
|
aws_sdk_s3::config::Credentials::new(
|
|
|
|
|
config.drive.access_key.clone(),
|
|
|
|
|
config.drive.secret_key.clone(),
|
|
|
|
|
None,
|
|
|
|
|
None,
|
|
|
|
|
"static",
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
.load()
|
|
|
|
|
.await;
|
|
|
|
|
|
|
|
|
|
let s3_config = aws_sdk_s3::config::Builder::from(&base_config)
|
|
|
|
|
.force_path_style(true)
|
|
|
|
|
.build();
|
|
|
|
|
|
|
|
|
|
aws_sdk_s3::Client::from_conf(s3_config)
|
2025-10-30 12:35:25 -03:00
|
|
|
}
|
2025-10-29 14:16:11 -03:00
|
|
|
|
|
|
|
|
|
2025-10-30 12:35:25 -03:00
|
|
|
|
2025-10-29 09:54:39 -03:00
|
|
|
|
2025-10-19 19:28:08 -03:00
|
|
|
fn generate_secure_password(&self, length: usize) -> String {
|
2025-10-20 19:49:54 -03:00
|
|
|
let mut rng = rand::rng();
|
2025-10-28 14:00:52 -03:00
|
|
|
std::iter::repeat_with(|| rng.sample(Alphanumeric) as char)
|
2025-10-19 19:28:08 -03:00
|
|
|
.take(length)
|
|
|
|
|
.collect()
|
|
|
|
|
}
|
|
|
|
|
|
2025-10-20 23:32:49 -03:00
|
|
|
|
2025-11-01 07:25:36 -03:00
|
|
|
pub async fn upload_templates_to_drive(&self, _config: &AppConfig) -> Result<()> {
|
2025-11-01 07:20:04 -03:00
|
|
|
let mut conn = establish_pg_connection()?;
|
2025-10-21 22:43:28 -03:00
|
|
|
self.create_bots_from_templates(&mut conn)?;
|
2025-10-20 23:32:49 -03:00
|
|
|
let templates_dir = Path::new("templates");
|
|
|
|
|
if !templates_dir.exists() {
|
|
|
|
|
return Ok(());
|
|
|
|
|
}
|
2025-10-30 12:35:25 -03:00
|
|
|
let client = &self.s3_client;
|
2025-10-20 23:32:49 -03:00
|
|
|
for entry in std::fs::read_dir(templates_dir)? {
|
|
|
|
|
let entry = entry?;
|
|
|
|
|
let path = entry.path();
|
2025-10-29 09:54:39 -03:00
|
|
|
if path.is_dir()
|
|
|
|
|
&& path
|
|
|
|
|
.file_name()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.to_string_lossy()
|
|
|
|
|
.ends_with(".gbai")
|
|
|
|
|
{
|
2025-10-20 23:32:49 -03:00
|
|
|
let bot_name = path.file_name().unwrap().to_string_lossy().to_string();
|
2025-10-29 14:16:11 -03:00
|
|
|
let bucket = bot_name.trim_start_matches('/').to_string();
|
2025-11-04 07:07:39 -03:00
|
|
|
info!("Checking template {} for Drive bucket {}", bot_name, bucket);
|
2025-10-30 12:35:25 -03:00
|
|
|
|
|
|
|
|
// Check if bucket exists
|
|
|
|
|
if client.head_bucket().bucket(&bucket).send().await.is_err() {
|
2025-11-04 07:07:39 -03:00
|
|
|
info!("Bucket {} not found, creating it and uploading template", bucket);
|
2025-10-30 12:35:25 -03:00
|
|
|
match client.create_bucket()
|
|
|
|
|
.bucket(&bucket)
|
|
|
|
|
.send()
|
|
|
|
|
.await {
|
|
|
|
|
Ok(_) => {
|
|
|
|
|
debug!("Bucket {} created successfully", bucket);
|
2025-11-04 07:07:39 -03:00
|
|
|
// Only upload template if bucket was just created
|
|
|
|
|
self.upload_directory_recursive(client, &path, &bucket, "/")
|
|
|
|
|
.await?;
|
|
|
|
|
info!("Uploaded template {} to Drive bucket {}", bot_name, bucket);
|
2025-10-30 12:35:25 -03:00
|
|
|
}
|
|
|
|
|
Err(e) => {
|
|
|
|
|
error!("Failed to create bucket {}: {:?}", bucket, e);
|
|
|
|
|
return Err(anyhow::anyhow!(
|
|
|
|
|
"Failed to create bucket {}: {}. Check S3 credentials and endpoint configuration",
|
|
|
|
|
bucket, e
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-11-04 07:07:39 -03:00
|
|
|
} else {
|
|
|
|
|
info!("Bucket {} already exists, skipping template upload", bucket);
|
2025-10-29 14:16:11 -03:00
|
|
|
}
|
2025-10-20 23:32:49 -03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2025-10-21 22:43:28 -03:00
|
|
|
fn create_bots_from_templates(&self, conn: &mut diesel::PgConnection) -> Result<()> {
|
|
|
|
|
use crate::shared::models::schema::bots;
|
|
|
|
|
use diesel::prelude::*;
|
|
|
|
|
|
|
|
|
|
let templates_dir = Path::new("templates");
|
|
|
|
|
if !templates_dir.exists() {
|
|
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for entry in std::fs::read_dir(templates_dir)? {
|
|
|
|
|
let entry = entry?;
|
|
|
|
|
let path = entry.path();
|
2025-10-28 14:00:52 -03:00
|
|
|
if path.is_dir() && path.extension().map(|e| e == "gbai").unwrap_or(false) {
|
2025-10-21 22:43:28 -03:00
|
|
|
let bot_folder = path.file_name().unwrap().to_string_lossy().to_string();
|
|
|
|
|
let bot_name = bot_folder.trim_end_matches(".gbai");
|
|
|
|
|
|
|
|
|
|
let existing: Option<String> = bots::table
|
2025-10-31 07:30:37 -03:00
|
|
|
.filter(bots::name.eq(&bot_name))
|
2025-10-21 22:43:28 -03:00
|
|
|
.select(bots::name)
|
|
|
|
|
.first(conn)
|
|
|
|
|
.optional()?;
|
|
|
|
|
|
|
|
|
|
if existing.is_none() {
|
2025-10-28 14:00:52 -03:00
|
|
|
diesel::sql_query(
|
|
|
|
|
"INSERT INTO bots (id, name, description, llm_provider, llm_config, context_provider, context_config, is_active) \
|
2025-10-21 22:43:28 -03:00
|
|
|
VALUES (gen_random_uuid(), $1, $2, 'openai', '{\"model\": \"gpt-4\", \"temperature\": 0.7}', 'database', '{}', true)"
|
2025-10-28 14:00:52 -03:00
|
|
|
)
|
2025-10-31 07:30:37 -03:00
|
|
|
.bind::<diesel::sql_types::Text, _>(&bot_name)
|
2025-10-28 14:00:52 -03:00
|
|
|
.bind::<diesel::sql_types::Text, _>(format!("Bot for {} template", bot_name))
|
|
|
|
|
.execute(conn)?;
|
2025-10-21 22:43:28 -03:00
|
|
|
} else {
|
2025-10-31 07:30:37 -03:00
|
|
|
log::trace!("Bot {} already exists", bot_name);
|
2025-10-21 22:43:28 -03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2025-10-20 23:32:49 -03:00
|
|
|
fn upload_directory_recursive<'a>(
|
|
|
|
|
&'a self,
|
2025-10-30 12:35:25 -03:00
|
|
|
client: &'a Client,
|
2025-10-20 23:32:49 -03:00
|
|
|
local_path: &'a Path,
|
2025-10-30 12:52:21 -03:00
|
|
|
bucket: &'a str,
|
2025-10-28 14:00:52 -03:00
|
|
|
prefix: &'a str,
|
2025-10-20 23:32:49 -03:00
|
|
|
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<()>> + 'a>> {
|
|
|
|
|
Box::pin(async move {
|
2025-10-29 14:16:11 -03:00
|
|
|
let normalized_path = if !local_path.to_string_lossy().ends_with('/') {
|
|
|
|
|
format!("{}/", local_path.to_string_lossy())
|
2025-10-29 09:54:39 -03:00
|
|
|
} else {
|
2025-10-29 14:16:11 -03:00
|
|
|
local_path.to_string_lossy().to_string()
|
|
|
|
|
};
|
|
|
|
|
trace!("Starting upload from local path: {}", normalized_path);
|
2025-10-20 23:32:49 -03:00
|
|
|
for entry in std::fs::read_dir(local_path)? {
|
|
|
|
|
let entry = entry?;
|
|
|
|
|
let path = entry.path();
|
|
|
|
|
let file_name = path.file_name().unwrap().to_string_lossy().to_string();
|
2025-10-30 12:52:21 -03:00
|
|
|
|
|
|
|
|
// Construct key path, ensuring no duplicate slashes
|
|
|
|
|
let mut key = prefix.trim_matches('/').to_string();
|
|
|
|
|
if !key.is_empty() {
|
|
|
|
|
key.push('/');
|
|
|
|
|
}
|
|
|
|
|
key.push_str(&file_name);
|
2025-10-20 23:32:49 -03:00
|
|
|
|
|
|
|
|
if path.is_file() {
|
2025-10-30 12:52:21 -03:00
|
|
|
info!("Uploading file: {} to bucket {} with key: {}",
|
|
|
|
|
path.display(), bucket, key);
|
2025-10-28 14:00:52 -03:00
|
|
|
let content = std::fs::read(&path)?;
|
2025-10-30 12:35:25 -03:00
|
|
|
client.put_object()
|
2025-10-30 12:52:21 -03:00
|
|
|
.bucket(bucket)
|
2025-10-30 12:35:25 -03:00
|
|
|
.key(&key)
|
|
|
|
|
.body(content.into())
|
|
|
|
|
.send()
|
|
|
|
|
.await?;
|
2025-10-20 23:32:49 -03:00
|
|
|
} else if path.is_dir() {
|
2025-10-30 12:52:21 -03:00
|
|
|
self.upload_directory_recursive(client, &path, bucket, &key).await?;
|
2025-10-20 23:32:49 -03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2025-10-30 13:29:46 -03:00
|
|
|
async fn load_config_from_csv(&self) -> Result<AppConfig> {
|
|
|
|
|
use crate::config::ConfigManager;
|
|
|
|
|
use uuid::Uuid;
|
|
|
|
|
|
|
|
|
|
let client = &self.s3_client;
|
2025-10-31 07:30:37 -03:00
|
|
|
let bucket = "default.gbai";
|
2025-10-30 13:29:46 -03:00
|
|
|
let config_key = "default.gbot/config.csv";
|
|
|
|
|
|
|
|
|
|
match client.get_object()
|
|
|
|
|
.bucket(bucket)
|
|
|
|
|
.key(config_key)
|
|
|
|
|
.send()
|
|
|
|
|
.await
|
|
|
|
|
{
|
|
|
|
|
Ok(response) => {
|
|
|
|
|
let bytes = response.body.collect().await?.into_bytes();
|
|
|
|
|
let csv_content = String::from_utf8(bytes.to_vec())?;
|
|
|
|
|
|
|
|
|
|
// Create new connection for config loading
|
2025-11-01 07:20:04 -03:00
|
|
|
let config_conn = establish_pg_connection()?;
|
2025-10-30 13:29:46 -03:00
|
|
|
let config_manager = ConfigManager::new(Arc::new(Mutex::new(config_conn)));
|
2025-11-04 07:58:43 -03:00
|
|
|
|
2025-10-30 13:29:46 -03:00
|
|
|
// Use default bot ID or create one if needed
|
|
|
|
|
let default_bot_id = Uuid::parse_str("00000000-0000-0000-0000-000000000000")?;
|
|
|
|
|
|
|
|
|
|
// Write CSV to temp file for ConfigManager
|
|
|
|
|
let temp_path = std::env::temp_dir().join("config.csv");
|
|
|
|
|
std::fs::write(&temp_path, csv_content)?;
|
|
|
|
|
|
2025-11-01 08:43:14 -03:00
|
|
|
// First sync the CSV to database
|
2025-10-30 13:29:46 -03:00
|
|
|
config_manager.sync_gbot_config(&default_bot_id, temp_path.to_str().unwrap())
|
|
|
|
|
.map_err(|e| anyhow::anyhow!("Failed to sync gbot config: {}", e))?;
|
|
|
|
|
|
2025-11-01 08:43:14 -03:00
|
|
|
// Create fresh connection for final config load
|
|
|
|
|
let mut final_conn = establish_pg_connection()?;
|
2025-11-02 18:36:21 -03:00
|
|
|
let config = AppConfig::from_database(&mut final_conn)?;
|
|
|
|
|
info!("Successfully loaded config from CSV with LLM settings");
|
|
|
|
|
Ok(config)
|
2025-10-30 13:29:46 -03:00
|
|
|
}
|
|
|
|
|
Err(e) => {
|
|
|
|
|
debug!("No config.csv found: {}", e);
|
|
|
|
|
Err(e.into())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-10-20 23:32:49 -03:00
|
|
|
fn apply_migrations(&self, conn: &mut diesel::PgConnection) -> Result<()> {
|
|
|
|
|
let migrations_dir = std::path::Path::new("migrations");
|
|
|
|
|
if !migrations_dir.exists() {
|
|
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
|
2025-10-28 14:00:52 -03:00
|
|
|
let mut sql_files: Vec<_> = std::fs::read_dir(migrations_dir)?
|
2025-10-20 23:32:49 -03:00
|
|
|
.filter_map(|entry| entry.ok())
|
|
|
|
|
.filter(|entry| {
|
|
|
|
|
entry
|
|
|
|
|
.path()
|
|
|
|
|
.extension()
|
|
|
|
|
.and_then(|s| s.to_str())
|
|
|
|
|
.map(|s| s == "sql")
|
|
|
|
|
.unwrap_or(false)
|
|
|
|
|
})
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
sql_files.sort_by_key(|entry| entry.path());
|
|
|
|
|
|
|
|
|
|
for entry in sql_files {
|
|
|
|
|
let path = entry.path();
|
|
|
|
|
let filename = path.file_name().unwrap().to_string_lossy();
|
|
|
|
|
match std::fs::read_to_string(&path) {
|
2025-10-28 14:00:52 -03:00
|
|
|
Ok(sql) => match conn.batch_execute(&sql) {
|
|
|
|
|
Err(e) => {
|
|
|
|
|
log::warn!("Migration {} failed: {}", filename, e);
|
2025-10-20 23:32:49 -03:00
|
|
|
}
|
2025-10-28 14:00:52 -03:00
|
|
|
_ => {}
|
|
|
|
|
},
|
2025-10-20 23:32:49 -03:00
|
|
|
Err(e) => {
|
|
|
|
|
log::warn!("Failed to read migration {}: {}", filename, e);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-10-18 19:08:00 -03:00
|
|
|
}
|