From bf0ed6223fbd5f2c244df0150a44b83c1d957217 Mon Sep 17 00:00:00 2001 From: "Rodrigo Rodriguez (Pragmatismo)" Date: Sat, 22 Nov 2025 22:55:35 -0300 Subject: [PATCH] - Refactor folder as features. --- src/attendance/mod.rs | 12 + src/attendance/queue.rs | 658 +++++++ src/calendar/mod.rs | 482 +++++ src/console/chat_panel.rs | 113 ++ src/console/editor.rs | 142 ++ src/console/file_tree.rs | 268 +++ src/console/log_panel.rs | 64 + src/console/mod.rs | 826 +++++++++ src/console/status_panel.rs | 189 ++ src/core/automation/automation.test.rs | 10 + src/core/automation/mod.rs | 135 ++ src/core/bootstrap/bootstrap.test.rs | 10 + src/core/bootstrap/mod.rs | 397 +++++ src/core/bot/bot.test.rs | 10 + src/core/bot/channels/channels.test.rs | 10 + src/core/bot/channels/instagram.rs | 29 + src/core/bot/channels/mod.rs | 127 ++ src/core/bot/channels/teams.rs | 29 + src/core/bot/channels/whatsapp.rs | 29 + src/core/bot/mod.rs | 580 ++++++ src/core/bot/multimedia.rs | 542 ++++++ src/core/bot/ui.rs | 1 + src/core/config/config.test.rs | 10 + src/core/config/mod.rs | 240 +++ src/core/mod.rs | 8 + src/core/package_manager/cli.rs | 168 ++ src/core/package_manager/component.rs | 22 + src/core/package_manager/facade.rs | 632 +++++++ src/core/package_manager/installer.rs | 732 ++++++++ src/core/package_manager/mod.rs | 43 + src/core/package_manager/os.rs | 12 + .../package_manager/package_manager.test.rs | 25 + .../package_manager/setup/directory_setup.rs | 497 ++++++ src/core/package_manager/setup/email_setup.rs | 339 ++++ src/core/package_manager/setup/mod.rs | 5 + src/core/session/mod.rs | 457 +++++ src/core/session/session.test.rs | 15 + src/core/shared/admin.rs | 623 +++++++ src/core/shared/analytics.rs | 557 ++++++ src/core/shared/mod.rs | 5 + src/core/shared/models.rs | 351 ++++ src/core/shared/schema.rs | 0 src/core/shared/shared.test.rs | 25 + src/core/shared/state.rs | 87 + src/core/shared/utils.rs | 164 ++ src/core/web_server/mod.rs | 46 + src/core/web_server/web_server.test.rs | 15 + src/desktop/drive.rs | 82 + src/desktop/local-sync.rs | 391 ++++ src/desktop/mod.rs | 3 + src/desktop/stream.rs | 23 + src/desktop/sync.rs | 126 ++ src/desktop/ui.test.rs | 20 + src/directory/client.rs | 435 +++++ src/directory/groups.rs | 345 ++++ src/directory/mod.rs | 184 ++ src/directory/router.rs | 98 ++ src/directory/users.rs | 327 ++++ src/drive/api.rs | 527 ++++++ src/drive/drive_monitor/drive_monitor.test.rs | 10 + src/drive/drive_monitor/mod.rs | 355 ++++ src/drive/files.rs | 1325 ++++++++++++++ src/instagram/instagram.rs | 336 ++++ src/instagram/mod.rs | 3 + src/llm/compact_prompt.rs | 177 ++ src/llm/context/context.test.rs | 15 + src/llm/context/mod.rs | 0 src/llm/llm_models/deepseek_r3.rs | 16 + src/llm/llm_models/gpt_oss_120b.rs | 19 + src/llm/llm_models/gpt_oss_20b.rs | 18 + src/llm/llm_models/llm_models.test.rs | 35 + src/llm/llm_models/mod.rs | 20 + src/llm/prompt_manager/README.md | 1 + src/llm/prompt_manager/prompts.csv | 1567 +++++++++++++++++ src/msteams/mod.rs | 3 + src/msteams/teams.rs | 359 ++++ src/tasks/mod.rs | 708 ++++++++ src/vector-db/mod.rs | 3 + src/vector-db/vectordb_indexer.rs | 555 ++++++ src/weba/mod.rs | 6 + src/whatsapp/mod.rs | 3 + src/whatsapp/whatsapp.rs | 444 +++++ web/desktop/js/feature-manager.js | 523 ++++++ 83 files changed, 18803 insertions(+) create mode 100644 src/attendance/mod.rs create mode 100644 src/attendance/queue.rs create mode 100644 src/calendar/mod.rs create mode 100644 src/console/chat_panel.rs create mode 100644 src/console/editor.rs create mode 100644 src/console/file_tree.rs create mode 100644 src/console/log_panel.rs create mode 100644 src/console/mod.rs create mode 100644 src/console/status_panel.rs create mode 100644 src/core/automation/automation.test.rs create mode 100644 src/core/automation/mod.rs create mode 100644 src/core/bootstrap/bootstrap.test.rs create mode 100644 src/core/bootstrap/mod.rs create mode 100644 src/core/bot/bot.test.rs create mode 100644 src/core/bot/channels/channels.test.rs create mode 100644 src/core/bot/channels/instagram.rs create mode 100644 src/core/bot/channels/mod.rs create mode 100644 src/core/bot/channels/teams.rs create mode 100644 src/core/bot/channels/whatsapp.rs create mode 100644 src/core/bot/mod.rs create mode 100644 src/core/bot/multimedia.rs create mode 100644 src/core/bot/ui.rs create mode 100644 src/core/config/config.test.rs create mode 100644 src/core/config/mod.rs create mode 100644 src/core/mod.rs create mode 100644 src/core/package_manager/cli.rs create mode 100644 src/core/package_manager/component.rs create mode 100644 src/core/package_manager/facade.rs create mode 100644 src/core/package_manager/installer.rs create mode 100644 src/core/package_manager/mod.rs create mode 100644 src/core/package_manager/os.rs create mode 100644 src/core/package_manager/package_manager.test.rs create mode 100644 src/core/package_manager/setup/directory_setup.rs create mode 100644 src/core/package_manager/setup/email_setup.rs create mode 100644 src/core/package_manager/setup/mod.rs create mode 100644 src/core/session/mod.rs create mode 100644 src/core/session/session.test.rs create mode 100644 src/core/shared/admin.rs create mode 100644 src/core/shared/analytics.rs create mode 100644 src/core/shared/mod.rs create mode 100644 src/core/shared/models.rs create mode 100644 src/core/shared/schema.rs create mode 100644 src/core/shared/shared.test.rs create mode 100644 src/core/shared/state.rs create mode 100644 src/core/shared/utils.rs create mode 100644 src/core/web_server/mod.rs create mode 100644 src/core/web_server/web_server.test.rs create mode 100644 src/desktop/drive.rs create mode 100644 src/desktop/local-sync.rs create mode 100644 src/desktop/mod.rs create mode 100644 src/desktop/stream.rs create mode 100644 src/desktop/sync.rs create mode 100644 src/desktop/ui.test.rs create mode 100644 src/directory/client.rs create mode 100644 src/directory/groups.rs create mode 100644 src/directory/mod.rs create mode 100644 src/directory/router.rs create mode 100644 src/directory/users.rs create mode 100644 src/drive/api.rs create mode 100644 src/drive/drive_monitor/drive_monitor.test.rs create mode 100644 src/drive/drive_monitor/mod.rs create mode 100644 src/drive/files.rs create mode 100644 src/instagram/instagram.rs create mode 100644 src/instagram/mod.rs create mode 100644 src/llm/compact_prompt.rs create mode 100644 src/llm/context/context.test.rs create mode 100644 src/llm/context/mod.rs create mode 100644 src/llm/llm_models/deepseek_r3.rs create mode 100644 src/llm/llm_models/gpt_oss_120b.rs create mode 100644 src/llm/llm_models/gpt_oss_20b.rs create mode 100644 src/llm/llm_models/llm_models.test.rs create mode 100644 src/llm/llm_models/mod.rs create mode 100644 src/llm/prompt_manager/README.md create mode 100644 src/llm/prompt_manager/prompts.csv create mode 100644 src/msteams/mod.rs create mode 100644 src/msteams/teams.rs create mode 100644 src/tasks/mod.rs create mode 100644 src/vector-db/mod.rs create mode 100644 src/vector-db/vectordb_indexer.rs create mode 100644 src/weba/mod.rs create mode 100644 src/whatsapp/mod.rs create mode 100644 src/whatsapp/whatsapp.rs create mode 100644 web/desktop/js/feature-manager.js diff --git a/src/attendance/mod.rs b/src/attendance/mod.rs new file mode 100644 index 000000000..2505af64b --- /dev/null +++ b/src/attendance/mod.rs @@ -0,0 +1,12 @@ +//! REST API Module +//! +//! Provides HTTP endpoints for cloud-based functionality. +//! Supports web, desktop, and mobile clients. +//! +//! Note: Local operations require native access and are handled separately: +//! - Screen capture: Tauri commands (desktop) or WebRTC (web/mobile) +//! - File sync: Tauri commands with local rclone process (desktop only) + +pub mod drive; +pub mod keyword_services; +pub mod queue; diff --git a/src/attendance/queue.rs b/src/attendance/queue.rs new file mode 100644 index 000000000..528a67a6e --- /dev/null +++ b/src/attendance/queue.rs @@ -0,0 +1,658 @@ +//! Queue Management API for Attendant System +//! +//! Handles conversation queues, attendant assignment, and real-time updates. +//! Reads attendant data from attendant.csv in bot's .gbai folder. + +use crate::shared::models::UserSession; +use crate::shared::state::AppState; +use axum::{ + extract::{Path, Query, State}, + http::StatusCode, + response::IntoResponse, + Json, +}; +use chrono::Utc; +use diesel::prelude::*; +use log::{error, info, warn}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::PathBuf; +use std::sync::Arc; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct QueueItem { + pub session_id: Uuid, + pub user_id: Uuid, + pub bot_id: Uuid, + pub channel: String, + pub user_name: String, + pub user_email: Option, + pub last_message: String, + pub last_message_time: String, + pub waiting_time_seconds: i64, + pub priority: i32, + pub status: QueueStatus, + pub assigned_to: Option, + pub assigned_to_name: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum QueueStatus { + Waiting, + Assigned, + Active, + Resolved, + Abandoned, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AttendantStats { + pub attendant_id: String, + pub attendant_name: String, + pub channel: String, + pub preferences: String, + pub active_conversations: i32, + pub total_handled_today: i32, + pub avg_response_time_seconds: i32, + pub status: AttendantStatus, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AttendantCSV { + pub id: String, + pub name: String, + pub channel: String, + pub preferences: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum AttendantStatus { + Online, + Busy, + Away, + Offline, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AssignRequest { + pub session_id: Uuid, + pub attendant_id: Uuid, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TransferRequest { + pub session_id: Uuid, + pub from_attendant_id: Uuid, + pub to_attendant_id: Uuid, + pub reason: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct QueueFilters { + pub channel: Option, + pub status: Option, + pub assigned_to: Option, +} + +/// Check if bot has transfer enabled in config.csv +async fn is_transfer_enabled(bot_id: Uuid, work_path: &str) -> bool { + let config_path = PathBuf::from(work_path) + .join(format!("{}.gbai", bot_id)) + .join("config.csv"); + + if !config_path.exists() { + warn!("Config file not found: {:?}", config_path); + return false; + } + + match std::fs::read_to_string(&config_path) { + Ok(content) => { + for line in content.lines() { + if line.to_lowercase().contains("transfer") && line.to_lowercase().contains("true") + { + return true; + } + } + false + } + Err(e) => { + error!("Failed to read config file: {}", e); + false + } + } +} + +/// Read attendants from attendant.csv +async fn read_attendants_csv(bot_id: Uuid, work_path: &str) -> Vec { + let attendant_path = PathBuf::from(work_path) + .join(format!("{}.gbai", bot_id)) + .join("attendant.csv"); + + if !attendant_path.exists() { + warn!("Attendant file not found: {:?}", attendant_path); + return Vec::new(); + } + + match std::fs::read_to_string(&attendant_path) { + Ok(content) => { + let mut attendants = Vec::new(); + let mut lines = content.lines(); + + // Skip header + lines.next(); + + for line in lines { + let parts: Vec<&str> = line.split(',').map(|s| s.trim()).collect(); + if parts.len() >= 4 { + attendants.push(AttendantCSV { + id: parts[0].to_string(), + name: parts[1].to_string(), + channel: parts[2].to_string(), + preferences: parts[3].to_string(), + }); + } + } + attendants + } + Err(e) => { + error!("Failed to read attendant file: {}", e); + Vec::new() + } + } +} + +/// GET /api/queue/list +/// Get all conversations in queue (only if bot has transfer=true) +pub async fn list_queue( + State(state): State>, + Query(filters): Query, +) -> impl IntoResponse { + info!("Listing queue items with filters: {:?}", filters); + + let result = tokio::task::spawn_blocking({ + let conn = state.conn.clone(); + move || { + let mut db_conn = conn + .get() + .map_err(|e| format!("Failed to get database connection: {}", e))?; + + use crate::shared::models::schema::user_sessions; + use crate::shared::models::schema::users; + + // Build query - get recent sessions with user info + let sessions_data: Vec = user_sessions::table + .order(user_sessions::created_at.desc()) + .limit(50) + .load(&mut db_conn) + .map_err(|e| format!("Failed to load sessions: {}", e))?; + + let mut queue_items = Vec::new(); + + for session_data in sessions_data { + // Get user info separately + let user_info: Option<(String, String)> = users::table + .filter(users::id.eq(session_data.user_id)) + .select((users::username, users::email)) + .first(&mut db_conn) + .optional() + .map_err(|e| format!("Failed to load user: {}", e))?; + + let (uname, uemail) = user_info.unwrap_or_else(|| { + ( + format!("user_{}", session_data.user_id), + format!("{}@unknown.local", session_data.user_id), + ) + }); + + let channel = session_data + .context_data + .get("channel") + .and_then(|c| c.as_str()) + .unwrap_or("web") + .to_string(); + + let waiting_time = (Utc::now() - session_data.updated_at).num_seconds(); + + queue_items.push(QueueItem { + session_id: session_data.id, + user_id: session_data.user_id, + bot_id: session_data.bot_id, + channel, + user_name: uname, + user_email: Some(uemail), + last_message: session_data.title.clone(), + last_message_time: session_data.updated_at.to_rfc3339(), + waiting_time_seconds: waiting_time, + priority: if waiting_time > 300 { 2 } else { 1 }, + status: QueueStatus::Waiting, + assigned_to: None, + assigned_to_name: None, + }); + } + + Ok::, String>(queue_items) + } + }) + .await; + + match result { + Ok(Ok(queue_items)) => { + info!("Found {} queue items", queue_items.len()); + (StatusCode::OK, Json(queue_items)) + } + Ok(Err(e)) => { + error!("Queue list error: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(vec![] as Vec), + ) + } + Err(e) => { + error!("Task error: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(vec![] as Vec), + ) + } + } +} + +/// GET /api/queue/attendants?bot_id={bot_id} +/// Get all attendants from attendant.csv for a bot +pub async fn list_attendants( + State(state): State>, + Query(params): Query>, +) -> impl IntoResponse { + info!("Listing attendants"); + + let bot_id_str = params.get("bot_id").cloned().unwrap_or_default(); + let bot_id = match Uuid::parse_str(&bot_id_str) { + Ok(id) => id, + Err(_) => { + // Get default bot + let conn = state.conn.clone(); + let result = tokio::task::spawn_blocking(move || { + let mut db_conn = conn.get().ok()?; + use crate::shared::models::schema::bots; + bots::table + .filter(bots::is_active.eq(true)) + .select(bots::id) + .first::(&mut db_conn) + .ok() + }) + .await; + + match result { + Ok(Some(id)) => id, + _ => { + error!("No valid bot_id provided and no default bot found"); + return (StatusCode::BAD_REQUEST, Json(vec![] as Vec)); + } + } + } + }; + + // Check if transfer is enabled + let work_path = "./work"; + if !is_transfer_enabled(bot_id, work_path).await { + warn!("Transfer not enabled for bot {}", bot_id); + return (StatusCode::OK, Json(vec![] as Vec)); + } + + // Read attendants from CSV + let attendant_csvs = read_attendants_csv(bot_id, work_path).await; + + let attendants: Vec = attendant_csvs + .into_iter() + .map(|att| AttendantStats { + attendant_id: att.id, + attendant_name: att.name, + channel: att.channel, + preferences: att.preferences, + active_conversations: 0, + total_handled_today: 0, + avg_response_time_seconds: 0, + status: AttendantStatus::Online, + }) + .collect(); + + info!("Found {} attendants from CSV", attendants.len()); + (StatusCode::OK, Json(attendants)) +} + +/// POST /api/queue/assign +/// Assign conversation to attendant (stores in session context_data) +pub async fn assign_conversation( + State(state): State>, + Json(request): Json, +) -> impl IntoResponse { + info!( + "Assigning session {} to attendant {}", + request.session_id, request.attendant_id + ); + + // Store assignment in session context_data + let result = tokio::task::spawn_blocking({ + let conn = state.conn.clone(); + let session_id = request.session_id; + let attendant_id = request.attendant_id; + + move || { + let mut db_conn = conn + .get() + .map_err(|e| format!("Failed to get database connection: {}", e))?; + + use crate::shared::models::schema::user_sessions; + + // Get current session + let session: UserSession = user_sessions::table + .filter(user_sessions::id.eq(session_id)) + .first(&mut db_conn) + .map_err(|e| format!("Session not found: {}", e))?; + + // Update context_data with assignment + let mut ctx = session.context_data.clone(); + ctx["assigned_to"] = serde_json::json!(attendant_id.to_string()); + ctx["assigned_at"] = serde_json::json!(Utc::now().to_rfc3339()); + ctx["status"] = serde_json::json!("assigned"); + + diesel::update(user_sessions::table.filter(user_sessions::id.eq(session_id))) + .set(user_sessions::context_data.eq(&ctx)) + .execute(&mut db_conn) + .map_err(|e| format!("Failed to update session: {}", e))?; + + Ok::<(), String>(()) + } + }) + .await; + + match result { + Ok(Ok(())) => ( + StatusCode::OK, + Json(serde_json::json!({ + "success": true, + "session_id": request.session_id, + "attendant_id": request.attendant_id, + "assigned_at": Utc::now().to_rfc3339() + })), + ), + Ok(Err(e)) => { + error!("Assignment error: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "success": false, + "error": e + })), + ) + } + Err(e) => { + error!("Assignment error: {:?}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "success": false, + "error": format!("{:?}", e) + })), + ) + } + } +} + +/// POST /api/queue/transfer +/// Transfer conversation between attendants +pub async fn transfer_conversation( + State(state): State>, + Json(request): Json, +) -> impl IntoResponse { + info!( + "Transferring session {} from {} to {}", + request.session_id, request.from_attendant_id, request.to_attendant_id + ); + + let result = tokio::task::spawn_blocking({ + let conn = state.conn.clone(); + let session_id = request.session_id; + let to_attendant = request.to_attendant_id; + let reason = request.reason.clone(); + + move || { + let mut db_conn = conn + .get() + .map_err(|e| format!("Failed to get database connection: {}", e))?; + + use crate::shared::models::schema::user_sessions; + + // Get current session + let session: UserSession = user_sessions::table + .filter(user_sessions::id.eq(session_id)) + .first(&mut db_conn) + .map_err(|e| format!("Session not found: {}", e))?; + + // Update context_data with transfer info + let mut ctx = session.context_data.clone(); + ctx["assigned_to"] = serde_json::json!(to_attendant.to_string()); + ctx["transferred_at"] = serde_json::json!(Utc::now().to_rfc3339()); + ctx["transfer_reason"] = serde_json::json!(reason.unwrap_or_default()); + ctx["status"] = serde_json::json!("transferred"); + + diesel::update(user_sessions::table.filter(user_sessions::id.eq(session_id))) + .set(( + user_sessions::context_data.eq(&ctx), + user_sessions::updated_at.eq(Utc::now()), + )) + .execute(&mut db_conn) + .map_err(|e| format!("Failed to update session: {}", e))?; + + Ok::<(), String>(()) + } + }) + .await; + + match result { + Ok(Ok(())) => ( + StatusCode::OK, + Json(serde_json::json!({ + "success": true, + "session_id": request.session_id, + "from_attendant": request.from_attendant_id, + "to_attendant": request.to_attendant_id, + "transferred_at": Utc::now().to_rfc3339() + })), + ), + Ok(Err(e)) => { + error!("Transfer error: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "success": false, + "error": e + })), + ) + } + Err(e) => { + error!("Transfer error: {:?}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "success": false, + "error": format!("{:?}", e) + })), + ) + } + } +} + +/// POST /api/queue/resolve +/// Mark conversation as resolved +pub async fn resolve_conversation( + State(state): State>, + Json(payload): Json, +) -> impl IntoResponse { + let session_id = payload + .get("session_id") + .and_then(|v| v.as_str()) + .and_then(|s| Uuid::parse_str(s).ok()) + .unwrap_or_else(Uuid::nil); + + info!("Resolving session {}", session_id); + + let result = tokio::task::spawn_blocking({ + let conn = state.conn.clone(); + + move || { + let mut db_conn = conn + .get() + .map_err(|e| format!("Failed to get database connection: {}", e))?; + + use crate::shared::models::schema::user_sessions; + + // Get current session + let session: UserSession = user_sessions::table + .filter(user_sessions::id.eq(session_id)) + .first(&mut db_conn) + .map_err(|e| format!("Session not found: {}", e))?; + + // Update context_data to mark as resolved + let mut ctx = session.context_data.clone(); + ctx["status"] = serde_json::json!("resolved"); + ctx["resolved_at"] = serde_json::json!(Utc::now().to_rfc3339()); + ctx["resolved"] = serde_json::json!(true); + + diesel::update(user_sessions::table.filter(user_sessions::id.eq(session_id))) + .set(( + user_sessions::context_data.eq(&ctx), + user_sessions::updated_at.eq(Utc::now()), + )) + .execute(&mut db_conn) + .map_err(|e| format!("Failed to update session: {}", e))?; + + Ok::<(), String>(()) + } + }) + .await; + + match result { + Ok(Ok(())) => ( + StatusCode::OK, + Json(serde_json::json!({ + "success": true, + "session_id": session_id, + "resolved_at": Utc::now().to_rfc3339() + })), + ), + Ok(Err(e)) => { + error!("Resolve error: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "success": false, + "error": e + })), + ) + } + Err(e) => { + error!("Resolve error: {:?}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "success": false, + "error": format!("{:?}", e) + })), + ) + } + } +} + +/// GET /api/queue/insights/{session_id} +/// Get bot insights for a conversation +pub async fn get_insights( + State(state): State>, + Path(session_id): Path, +) -> impl IntoResponse { + info!("Getting insights for session {}", session_id); + + let result = tokio::task::spawn_blocking({ + let conn = state.conn.clone(); + move || { + let mut db_conn = conn + .get() + .map_err(|e| format!("Failed to get database connection: {}", e))?; + + use crate::shared::models::schema::message_history; + + // Get recent messages + let messages: Vec<(String, i32)> = message_history::table + .filter(message_history::session_id.eq(session_id)) + .select((message_history::content_encrypted, message_history::role)) + .order(message_history::created_at.desc()) + .limit(10) + .load(&mut db_conn) + .map_err(|e| format!("Failed to load messages: {}", e))?; + + // Analyze sentiment and intent (simplified) + let user_messages: Vec = messages + .iter() + .filter(|(_, r)| *r == 0) // User messages + .map(|(c, _)| c.clone()) + .collect(); + + let sentiment = if user_messages.iter().any(|m| { + m.to_lowercase().contains("urgent") + || m.to_lowercase().contains("problem") + || m.to_lowercase().contains("issue") + }) { + "negative" + } else if user_messages + .iter() + .any(|m| m.to_lowercase().contains("thanks") || m.to_lowercase().contains("great")) + { + "positive" + } else { + "neutral" + }; + + let suggested_reply = if sentiment == "negative" { + "I understand this is frustrating. Let me help you resolve this immediately." + } else { + "How can I assist you further?" + }; + + Ok::(serde_json::json!({ + "session_id": session_id, + "sentiment": sentiment, + "message_count": messages.len(), + "suggested_reply": suggested_reply, + "key_topics": ["support", "technical"], + "priority": if sentiment == "negative" { "high" } else { "normal" }, + "language": "en" + })) + } + }) + .await; + + match result { + Ok(Ok(insights)) => (StatusCode::OK, Json(insights)), + Ok(Err(e)) => { + error!("Insights error: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": e + })), + ) + } + Err(e) => { + error!("Task error: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": format!("Task error: {}", e) + })), + ) + } + } +} diff --git a/src/calendar/mod.rs b/src/calendar/mod.rs new file mode 100644 index 000000000..e61b0775f --- /dev/null +++ b/src/calendar/mod.rs @@ -0,0 +1,482 @@ +use axum::{ + extract::{Path, Query, State}, + http::StatusCode, + response::Json, + routing::{delete, get, post, put}, + Router, +}; +use chrono::{DateTime, Utc}; +use diesel::prelude::*; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; + +use crate::shared::utils::DbPool; +use tokio::sync::RwLock; +use uuid::Uuid; + +// TODO: Replace sqlx queries with Diesel queries + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CalendarEvent { + pub id: Uuid, + pub title: String, + pub description: Option, + pub start_time: DateTime, + pub end_time: DateTime, + pub location: Option, + pub attendees: Vec, + pub organizer: String, + pub reminder_minutes: Option, + pub recurrence_rule: Option, + pub status: EventStatus, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum EventStatus { + Scheduled, + InProgress, + Completed, + Cancelled, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Meeting { + pub id: Uuid, + pub event_id: Uuid, + pub meeting_url: Option, + pub meeting_id: Option, + pub platform: MeetingPlatform, + pub recording_url: Option, + pub notes: Option, + pub action_items: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum MeetingPlatform { + Zoom, + Teams, + Meet, + Internal, + Other(String), +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ActionItem { + pub id: Uuid, + pub description: String, + pub assignee: String, + pub due_date: Option>, + pub completed: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CalendarReminder { + pub id: Uuid, + pub event_id: Uuid, + pub remind_at: DateTime, + pub message: String, + pub channel: ReminderChannel, + pub sent: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ReminderChannel { + Email, + Sms, + Push, + InApp, +} + +#[derive(Clone)] +pub struct CalendarEngine { + db: Arc, + cache: Arc>>, +} + +impl CalendarEngine { + pub fn new(db: Arc) -> Self { + Self { + db, + cache: Arc::new(RwLock::new(Vec::new())), + } + } + + pub async fn create_event( + &self, + event: CalendarEvent, + ) -> Result> { + // TODO: Implement with Diesel + /* + let result = sqlx::query!( + r#" + INSERT INTO calendar_events + (id, title, description, start_time, end_time, location, attendees, organizer, + reminder_minutes, recurrence_rule, status, created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) + RETURNING * + "#, + event.id, + event.title, + event.description, + event.start_time, + event.end_time, + event.location, + &event.attendees[..], + event.organizer, + event.reminder_minutes, + event.recurrence_rule, + serde_json::to_value(&event.status)?, + event.created_at, + event.updated_at + ) + .fetch_one(self.db.as_ref()) + .await?; + */ + + self.refresh_cache().await?; + + Ok(event) + Ok(event) + } + + pub async fn update_event( + &self, + id: Uuid, + updates: serde_json::Value, + ) -> Result> { + let updated_at = Utc::now(); + + let result = sqlx::query!( + r#" + UPDATE calendar_events + SET title = COALESCE($2, title), + description = COALESCE($3, description), + start_time = COALESCE($4, start_time), + end_time = COALESCE($5, end_time), + location = COALESCE($6, location), + updated_at = $7 + WHERE id = $1 + RETURNING * + "#, + id, + updates.get("title").and_then(|v| v.as_str()), + updates.get("description").and_then(|v| v.as_str()), + updates + .get("start_time") + .and_then(|v| DateTime::parse_from_rfc3339(v.as_str()?).ok()) + .map(|dt| dt.with_timezone(&Utc)), + updates + .get("end_time") + .and_then(|v| DateTime::parse_from_rfc3339(v.as_str()?).ok()) + .map(|dt| dt.with_timezone(&Utc)), + updates.get("location").and_then(|v| v.as_str()), + updated_at + ) + .fetch_one(self.db.as_ref()) + .await?; + + self.refresh_cache().await?; + + Ok(serde_json::from_value(serde_json::to_value(result)?)?) + } + + pub async fn delete_event(&self, _id: Uuid) -> Result> { + // TODO: Implement with Diesel + /* + let result = sqlx::query!("DELETE FROM calendar_events WHERE id = $1", id) + .execute(self.db.as_ref()) + .await?; + */ + + self.refresh_cache().await?; + + Ok(false) + } + + pub async fn get_events_range( + &self, + start: DateTime, + end: DateTime, + ) -> Result, Box> { + // TODO: Implement with Diesel + /* + let results = sqlx::query_as!( + CalendarEvent, + r#" + SELECT * FROM calendar_events + WHERE start_time >= $1 AND end_time <= $2 + ORDER BY start_time ASC + "#, + start, + end + ) + .fetch_all(self.db.as_ref()) + .await?; + */ + + Ok(vec![]) + } + + pub async fn get_user_events( + &self, + user_id: &str, + ) -> Result, Box> { + // TODO: Implement with Diesel + /* + let results = sqlx::query!( + r#" + SELECT * FROM calendar_events + WHERE organizer = $1 OR $1 = ANY(attendees) + ORDER BY start_time ASC + "#, + user_id + ) + .fetch_all(self.db.as_ref()) + .await?; + + Ok(results + .into_iter() + .map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap()) + .collect()) + */ + Ok(vec![]) + } + + pub async fn create_meeting( + &self, + event_id: Uuid, + platform: MeetingPlatform, + ) -> Result> { + let meeting = Meeting { + id: Uuid::new_v4(), + event_id, + meeting_url: None, + meeting_id: None, + platform, + recording_url: None, + notes: None, + action_items: Vec::new(), + }; + + // TODO: Implement with Diesel + /* + sqlx::query!( + r#" + INSERT INTO meetings (id, event_id, platform, created_at) + VALUES ($1, $2, $3, $4) + "#, + meeting.id, + meeting.event_id, + meeting.platform, + meeting.created_at + ) + .execute(self.db.as_ref()) + .await?; + */ + + Ok(meeting) + } + + pub async fn schedule_reminder( + &self, + event_id: Uuid, + minutes_before: i32, + channel: ReminderChannel, + ) -> Result> { + let event = self.get_event(event_id).await?; + let remind_at = event.start_time - chrono::Duration::minutes(minutes_before as i64); + + let reminder = CalendarReminder { + id: Uuid::new_v4(), + event_id, + remind_at, + message: format!( + "Reminder: {} starts in {} minutes", + event.title, minutes_before + ), + channel, + sent: false, + }; + + // TODO: Implement with Diesel + /* + sqlx::query!( + r#" + INSERT INTO calendar_reminders (id, event_id, remind_at, message, channel, sent) + VALUES ($1, $2, $3, $4, $5, $6) + "#, + reminder.id, + reminder.event_id, + reminder.remind_at, + reminder.message, + reminder.channel, + reminder.sent + ) + .execute(self.db.as_ref()) + .await?; + */ + + Ok(reminder) + } + + pub async fn get_event(&self, _id: Uuid) -> Result> { + // TODO: Implement with Diesel + /* + let result = sqlx::query!("SELECT * FROM calendar_events WHERE id = $1", id) + .fetch_one(self.db.as_ref()) + .await?; + + Ok(serde_json::from_value(serde_json::to_value(result)?)?) + */ + Err("Not implemented".into()) + } + + pub async fn check_conflicts( + &self, + start: DateTime, + end: DateTime, + user_id: &str, + ) -> Result, Box> { + // TODO: Implement with Diesel + /* + let results = sqlx::query!( + r#" + SELECT * FROM calendar_events + WHERE (organizer = $1 OR $1 = ANY(attendees)) + AND NOT (end_time <= $2 OR start_time >= $3) + "#, + user_id, + start, + end + ) + .fetch_all(self.db.as_ref()) + .await?; + + Ok(results + .into_iter() + .map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap()) + .collect()) + */ + Ok(vec![]) + } + + async fn refresh_cache(&self) -> Result<(), Box> { + // TODO: Implement with Diesel + /* + let results = sqlx::query!("SELECT * FROM calendar_events ORDER BY start_time ASC") + .fetch_all(self.db.as_ref()) + .await?; + + let events: Vec = results + .into_iter() + .map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap()) + .collect(); + */ + + let events: Vec = vec![]; + let mut cache = self.cache.write().await; + *cache = events; + + Ok(()) + } +} + +#[derive(Deserialize)] +pub struct EventQuery { + pub start: Option, + pub end: Option, + pub user_id: Option, +} + +#[derive(Deserialize)] +pub struct MeetingRequest { + pub event_id: Uuid, + pub platform: MeetingPlatform, +} + +async fn create_event_handler( + State(engine): State>, + Json(event): Json, +) -> Result, StatusCode> { + match engine.create_event(event).await { + Ok(created) => Ok(Json(created)), + Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR), + } +} + +async fn get_events_handler( + State(engine): State>, + Query(params): Query, +) -> Result>, StatusCode> { + if let (Some(start), Some(end)) = (params.start, params.end) { + let start = DateTime::parse_from_rfc3339(&start) + .map(|dt| dt.with_timezone(&Utc)) + .unwrap_or_else(|_| Utc::now()); + let end = DateTime::parse_from_rfc3339(&end) + .map(|dt| dt.with_timezone(&Utc)) + .unwrap_or_else(|_| Utc::now() + chrono::Duration::days(30)); + + match engine.get_events_range(start, end).await { + Ok(events) => Ok(Json(events)), + Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR), + } + } else if let Some(user_id) = params.user_id { + match engine.get_user_events(&user_id).await { + Ok(events) => Ok(Json(events)), + Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR), + } + } else { + Err(StatusCode::BAD_REQUEST) + } +} + +async fn update_event_handler( + State(engine): State>, + Path(id): Path, + Json(updates): Json, +) -> Result, StatusCode> { + match engine.update_event(id, updates).await { + Ok(updated) => Ok(Json(updated)), + Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR), + } +} + +async fn delete_event_handler( + State(engine): State>, + Path(id): Path, +) -> Result { + match engine.delete_event(id).await { + Ok(true) => Ok(StatusCode::NO_CONTENT), + Ok(false) => Err(StatusCode::NOT_FOUND), + Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR), + } +} + +async fn schedule_meeting_handler( + State(engine): State>, + Json(req): Json, +) -> Result, StatusCode> { + match engine.create_meeting(req.event_id, req.platform).await { + Ok(meeting) => Ok(Json(meeting)), + Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR), + } +} + +pub fn routes(engine: Arc) -> Router { + Router::new() + .route( + "/events", + post(create_event_handler).get(get_events_handler), + ) + .route( + "/events/:id", + put(update_event_handler).delete(delete_event_handler), + ) + .route("/meetings", post(schedule_meeting_handler)) + .with_state(engine) +} diff --git a/src/console/chat_panel.rs b/src/console/chat_panel.rs new file mode 100644 index 000000000..67c5bbaa2 --- /dev/null +++ b/src/console/chat_panel.rs @@ -0,0 +1,113 @@ +use color_eyre::Result; +use std::sync::Arc; +use crate::shared::state::AppState; +use crate::shared::models::BotResponse; +use tokio::sync::mpsc; +use uuid::Uuid; +pub struct ChatPanel { + pub messages: Vec, + pub input_buffer: String, + pub session_id: Uuid, + pub user_id: Uuid, + pub response_rx: Option>, +} +impl ChatPanel { + pub fn new(_app_state: Arc) -> Self { + Self { + messages: vec!["Welcome to General Bots Console Chat!".to_string()], + input_buffer: String::new(), + session_id: Uuid::new_v4(), + user_id: Uuid::new_v4(), + response_rx: None, + } + } + pub fn add_char(&mut self, c: char) { + self.input_buffer.push(c); + } + pub fn backspace(&mut self) { + self.input_buffer.pop(); + } + pub async fn send_message(&mut self, bot_name: &str, app_state: &Arc) -> Result<()> { + if self.input_buffer.trim().is_empty() { + return Ok(()); + } + let message = self.input_buffer.clone(); + self.messages.push(format!("You: {}", message)); + self.input_buffer.clear(); + let bot_id = self.get_bot_id(bot_name, app_state).await?; + let user_message = crate::shared::models::UserMessage { + bot_id: bot_id.to_string(), + user_id: self.user_id.to_string(), + session_id: self.session_id.to_string(), + channel: "console".to_string(), + content: message, + message_type: 1, + media_url: None, + timestamp: chrono::Utc::now(), + context_name: None, + }; + let (tx, rx) = mpsc::channel::(100); + self.response_rx = Some(rx); +let orchestrator = crate::bot::BotOrchestrator::new(app_state.clone()); +let _ = orchestrator.stream_response(user_message, tx).await; + Ok(()) + } + pub async fn poll_response(&mut self, _bot_name: &str) -> Result<()> { + if let Some(rx) = &mut self.response_rx { + while let Ok(response) = rx.try_recv() { + if !response.content.is_empty() && !response.is_complete { + if let Some(last_msg) = self.messages.last_mut() { + if last_msg.starts_with("Bot: ") { + last_msg.push_str(&response.content); + } else { + self.messages.push(format!("Bot: {}", response.content)); + } + } else { + self.messages.push(format!("Bot: {}", response.content)); + } + } + if response.is_complete && response.content.is_empty() { + break; + } + } + } + Ok(()) + } + async fn get_bot_id(&self, bot_name: &str, app_state: &Arc) -> Result { + use crate::shared::models::schema::bots::dsl::*; + use diesel::prelude::*; + let mut conn = app_state.conn.get().unwrap(); + let bot_id = bots + .filter(name.eq(bot_name)) + .select(id) + .first::(&mut *conn)?; + Ok(bot_id) + } + pub fn render(&self) -> String { + let mut lines = Vec::new(); + lines.push("╔═══════════════════════════════════════╗".to_string()); + lines.push("║ CONVERSATION ║".to_string()); + lines.push("╚═══════════════════════════════════════╝".to_string()); + lines.push("".to_string()); + let visible_start = if self.messages.len() > 15 { + self.messages.len() - 15 + } else { + 0 + }; + for msg in &self.messages[visible_start..] { + if msg.starts_with("You: ") { + lines.push(format!(" {}", msg)); + } else if msg.starts_with("Bot: ") { + lines.push(format!(" {}", msg)); + } else { + lines.push(format!(" {}", msg)); + } + } + lines.push("".to_string()); + lines.push("─────────────────────────────────────────".to_string()); + lines.push(format!(" > {}_", self.input_buffer)); + lines.push("".to_string()); + lines.push(" Enter: Send | Tab: Switch Panel".to_string()); + lines.join("\n") + } +} diff --git a/src/console/editor.rs b/src/console/editor.rs new file mode 100644 index 000000000..86a663838 --- /dev/null +++ b/src/console/editor.rs @@ -0,0 +1,142 @@ +use color_eyre::Result; +use std::sync::Arc; +use crate::shared::state::AppState; +pub struct Editor { + file_path: String, + bucket: String, + key: String, + content: String, + cursor_pos: usize, + scroll_offset: usize, + modified: bool, +} +impl Editor { + pub async fn load(app_state: &Arc, bucket: &str, path: &str) -> Result { + let content = if let Some(drive) = &app_state.drive { + match drive.get_object().bucket(bucket).key(path).send().await { + Ok(response) => { + let bytes = response.body.collect().await?.into_bytes(); + String::from_utf8_lossy(&bytes).to_string() + } + Err(_) => String::new(), + } + } else { + String::new() + }; + Ok(Self { + file_path: format!("{}/{}", bucket, path), + bucket: bucket.to_string(), + key: path.to_string(), + content, + cursor_pos: 0, + scroll_offset: 0, + modified: false, + }) + } + pub async fn save(&mut self, app_state: &Arc) -> Result<()> { + if let Some(drive) = &app_state.drive { + drive.put_object() + .bucket(&self.bucket) + .key(&self.key) + .body(self.content.as_bytes().to_vec().into()) + .send() + .await?; + self.modified = false; + } + Ok(()) + } + pub fn file_path(&self) -> &str { + &self.file_path + } + pub fn render(&self, cursor_blink: bool) -> String { + let lines: Vec<&str> = self.content.lines().collect(); + let total_lines = lines.len().max(1); + let visible_lines = 25; + let cursor_line = self.content[..self.cursor_pos].lines().count(); + let cursor_col = self.content[..self.cursor_pos] + .lines() + .last() + .map(|line| line.len()) + .unwrap_or(0); + let start = self.scroll_offset; + let end = (start + visible_lines).min(total_lines); + let mut display_lines = Vec::new(); + for i in start..end { + let line_num = i + 1; + let line_content = if i < lines.len() { lines[i] } else { "" }; + let is_cursor_line = i == cursor_line; + let cursor_indicator = if is_cursor_line && cursor_blink { + let spaces = " ".repeat(cursor_col); + format!("{}█", spaces) + } else { + String::new() + }; + display_lines.push(format!(" {:4} │ {}{}", line_num, line_content, cursor_indicator)); + } + if display_lines.is_empty() { + let cursor_indicator = if cursor_blink { "█" } else { "" }; + display_lines.push(format!(" 1 │ {}", cursor_indicator)); + } + display_lines.push("".to_string()); + display_lines.push("─────────────────────────────────────────────────────────────".to_string()); + let status = if self.modified { "MODIFIED" } else { "SAVED" }; + display_lines.push(format!(" {} {} │ Line: {}, Col: {}", + status, self.file_path, cursor_line + 1, cursor_col + 1)); + display_lines.push(" Ctrl+S: Save │ Ctrl+W: Close │ Esc: Close without saving".to_string()); + display_lines.join("\n") + } + pub fn move_up(&mut self) { + if let Some(prev_line_end) = self.content[..self.cursor_pos].rfind('\n') { + if let Some(prev_prev_line_end) = self.content[..prev_line_end].rfind('\n') { + let target_pos = prev_prev_line_end + 1 + (self.cursor_pos - prev_line_end - 1).min( + self.content[prev_prev_line_end + 1..prev_line_end].len() + ); + self.cursor_pos = target_pos; + } else { + self.cursor_pos = (self.cursor_pos - prev_line_end - 1).min(prev_line_end); + } + } + } + pub fn move_down(&mut self) { + if let Some(next_line_start) = self.content[self.cursor_pos..].find('\n') { + let current_line_start = self.content[..self.cursor_pos].rfind('\n').map(|pos| pos + 1).unwrap_or(0); + let next_line_absolute = self.cursor_pos + next_line_start + 1; + if let Some(next_next_line_start) = self.content[next_line_absolute..].find('\n') { + let target_pos = next_line_absolute + (self.cursor_pos - current_line_start).min(next_next_line_start); + self.cursor_pos = target_pos; + } else { + let target_pos = next_line_absolute + (self.cursor_pos - current_line_start).min( + self.content[next_line_absolute..].len() + ); + self.cursor_pos = target_pos; + } + } + } + pub fn move_left(&mut self) { + if self.cursor_pos > 0 { + self.cursor_pos -= 1; + } + } + pub fn move_right(&mut self) { + if self.cursor_pos < self.content.len() { + self.cursor_pos += 1; + } + } + pub fn insert_char(&mut self, c: char) { + self.modified = true; + self.content.insert(self.cursor_pos, c); + self.cursor_pos += 1; + } + pub fn backspace(&mut self) { + if self.cursor_pos > 0 { + self.modified = true; + self.content.remove(self.cursor_pos - 1); + self.cursor_pos -= 1; + } + } + pub fn insert_newline(&mut self) { + self.modified = true; + self.content.insert(self.cursor_pos, '\n'); + self.cursor_pos += 1; + } +} diff --git a/src/console/file_tree.rs b/src/console/file_tree.rs new file mode 100644 index 000000000..6e7ed598f --- /dev/null +++ b/src/console/file_tree.rs @@ -0,0 +1,268 @@ +use crate::shared::state::AppState; +use color_eyre::Result; +use std::sync::Arc; +#[derive(Debug, Clone)] +pub enum TreeNode { + Bucket { name: String }, + Folder { bucket: String, path: String }, + File { bucket: String, path: String }, +} +pub struct FileTree { + app_state: Arc, + items: Vec<(String, TreeNode)>, + selected: usize, + current_bucket: Option, + current_path: Vec, +} +impl FileTree { + pub fn new(app_state: Arc) -> Self { + Self { + app_state, + items: Vec::new(), + selected: 0, + current_bucket: None, + current_path: Vec::new(), + } + } + pub async fn load_root(&mut self) -> Result<()> { + self.items.clear(); + self.current_bucket = None; + self.current_path.clear(); + if let Some(drive) = &self.app_state.drive { + let result = drive.list_buckets().send().await; + match result { + Ok(response) => { + let buckets = response.buckets(); + for bucket in buckets { + if let Some(name) = bucket.name() { + let icon = if name.ends_with(".gbai") { + "🤖" + } else { + "📦" + }; + let display = format!("{} {}", icon, name); + self.items.push(( + display, + TreeNode::Bucket { + name: name.to_string(), + }, + )); + } + } + } + Err(e) => { + self.items.push(( + format!("✗ Error: {}", e), + TreeNode::Bucket { + name: String::new(), + }, + )); + } + } + } else { + self.items.push(( + "✗ Drive not connected".to_string(), + TreeNode::Bucket { + name: String::new(), + }, + )); + } + if self.items.is_empty() { + self.items.push(( + "(no buckets found)".to_string(), + TreeNode::Bucket { + name: String::new(), + }, + )); + } + self.selected = 0; + Ok(()) + } + pub async fn enter_bucket(&mut self, bucket: String) -> Result<()> { + self.current_bucket = Some(bucket.clone()); + self.current_path.clear(); + self.load_bucket_contents(&bucket, "").await + } + pub async fn enter_folder(&mut self, bucket: String, path: String) -> Result<()> { + self.current_bucket = Some(bucket.clone()); + let parts: Vec<&str> = path + .trim_matches('/') + .split('/') + .filter(|s| !s.is_empty()) + .collect(); + self.current_path = parts.iter().map(|s| s.to_string()).collect(); + self.load_bucket_contents(&bucket, &path).await + } + pub fn go_up(&mut self) -> bool { + if self.current_path.is_empty() { + if self.current_bucket.is_some() { + self.current_bucket = None; + return true; + } + return false; + } + self.current_path.pop(); + true + } + pub async fn refresh_current(&mut self) -> Result<()> { + if let Some(bucket) = &self.current_bucket.clone() { + let path = self.current_path.join("/"); + self.load_bucket_contents(bucket, &path).await + } else { + self.load_root().await + } + } + async fn load_bucket_contents(&mut self, bucket: &str, prefix: &str) -> Result<()> { + self.items.clear(); + self.items.push(( + "⬆️ .. (go back)".to_string(), + TreeNode::Folder { + bucket: bucket.to_string(), + path: "..".to_string(), + }, + )); + if let Some(drive) = &self.app_state.drive { + let normalized_prefix = if prefix.is_empty() { + String::new() + } else if prefix.ends_with('/') { + prefix.to_string() + } else { + format!("{}/", prefix) + }; + let mut continuation_token = None; + let mut all_keys = Vec::new(); + loop { + let mut request = drive.list_objects_v2().bucket(bucket); + if !normalized_prefix.is_empty() { + request = request.prefix(&normalized_prefix); + } + if let Some(token) = continuation_token { + request = request.continuation_token(token); + } + let result = request.send().await?; + for obj in result.contents() { + if let Some(key) = obj.key() { + all_keys.push(key.to_string()); + } + } + if !result.is_truncated.unwrap_or(false) { + break; + } + continuation_token = result.next_continuation_token; + } + let mut folders = std::collections::HashSet::new(); + let mut files = Vec::new(); + for key in all_keys { + if key == normalized_prefix { + continue; + } + let relative = + if !normalized_prefix.is_empty() && key.starts_with(&normalized_prefix) { + &key[normalized_prefix.len()..] + } else { + &key + }; + if relative.is_empty() { + continue; + } + if let Some(slash_pos) = relative.find('/') { + let folder_name = &relative[..slash_pos]; + if !folder_name.is_empty() { + folders.insert(folder_name.to_string()); + } + } else { + files.push((relative.to_string(), key.clone())); + } + } + let mut folder_vec: Vec = folders.into_iter().collect(); + folder_vec.sort(); + for folder_name in folder_vec { + let full_path = if normalized_prefix.is_empty() { + folder_name.clone() + } else { + format!("{}{}", normalized_prefix, folder_name) + }; + let display = format!("📁 {}/", folder_name); + self.items.push(( + display, + TreeNode::Folder { + bucket: bucket.to_string(), + path: full_path, + }, + )); + } + files.sort_by(|(a, _), (b, _)| a.cmp(b)); + for (name, full_path) in files { + let icon = if name.ends_with(".bas") { + "⚙️" + } else if name.ends_with(".ast") { + "🔧" + } else if name.ends_with(".csv") { + "📊" + } else if name.ends_with(".gbkb") { + "📚" + } else if name.ends_with(".json") { + "🔖" + } else { + "📄" + }; + let display = format!("{} {}", icon, name); + self.items.push(( + display, + TreeNode::File { + bucket: bucket.to_string(), + path: full_path, + }, + )); + } + } + if self.items.len() == 1 { + self.items.push(( + "(empty folder)".to_string(), + TreeNode::Folder { + bucket: bucket.to_string(), + path: String::new(), + }, + )); + } + self.selected = 0; + Ok(()) + } + pub fn render_items(&self) -> &[(String, TreeNode)] { + &self.items + } + pub fn selected_index(&self) -> usize { + self.selected + } + pub fn get_selected_node(&self) -> Option<&TreeNode> { + self.items.get(self.selected).map(|(_, node)| node) + } + pub fn get_selected_bot(&self) -> Option { + if let Some(bucket) = &self.current_bucket { + if bucket.ends_with(".gbai") { + return Some(bucket.trim_end_matches(".gbai").to_string()); + } + } + if let Some((_, node)) = self.items.get(self.selected) { + match node { + TreeNode::Bucket { name } => { + if name.ends_with(".gbai") { + return Some(name.trim_end_matches(".gbai").to_string()); + } + } + _ => {} + } + } + None + } + pub fn move_up(&mut self) { + if self.selected > 0 { + self.selected -= 1; + } + } + pub fn move_down(&mut self) { + if self.selected < self.items.len().saturating_sub(1) { + self.selected += 1; + } + } +} diff --git a/src/console/log_panel.rs b/src/console/log_panel.rs new file mode 100644 index 000000000..cc0ebd161 --- /dev/null +++ b/src/console/log_panel.rs @@ -0,0 +1,64 @@ +use std::sync::{Arc, Mutex}; +use log::{Log, Metadata, LevelFilter, Record, SetLoggerError}; +use chrono::Local; +pub struct LogPanel { + logs: Vec, + max_logs: usize, +} +impl LogPanel { + pub fn new() -> Self { + Self { + logs: Vec::with_capacity(1000), + max_logs: 1000, + } + } + pub fn add_log(&mut self, entry: &str) { + if self.logs.len() >= self.max_logs { + self.logs.remove(0); + } + self.logs.push(entry.to_string()); + } + pub fn render(&self) -> String { + let visible_logs = if self.logs.len() > 10 { + &self.logs[self.logs.len() - 10..] + } else { + &self.logs[..] + }; + visible_logs.join("\n") + } +} +pub struct UiLogger { + log_panel: Arc>, + filter: LevelFilter, +} +impl Log for UiLogger { + fn enabled(&self, metadata: &Metadata) -> bool { + metadata.level() <= self.filter + } + fn log(&self, record: &Record) { + if self.enabled(record.metadata()) { + let timestamp = Local::now().format("%H:%M:%S"); + let level_icon = match record.level() { + log::Level::Error => "ERR", + log::Level::Warn => "WRN", + log::Level::Info => "INF", + log::Level::Debug => "DBG", + log::Level::Trace => "TRC", + }; + let log_entry = format!("[{}] {} {}", timestamp, level_icon, record.args()); + if let Ok(mut panel) = self.log_panel.lock() { + panel.add_log(&log_entry); + } + } + } + fn flush(&self) {} +} +pub fn init_logger(log_panel: Arc>) -> Result<(), SetLoggerError> { + let logger = Box::new(UiLogger { + log_panel, + filter: LevelFilter::Info, + }); + log::set_boxed_logger(logger)?; + log::set_max_level(LevelFilter::Trace); + Ok(()) +} diff --git a/src/console/mod.rs b/src/console/mod.rs new file mode 100644 index 000000000..245665038 --- /dev/null +++ b/src/console/mod.rs @@ -0,0 +1,826 @@ +use crate::shared::state::AppState; +use color_eyre::Result; +use crossterm::{ + event::{self, Event, KeyCode, KeyModifiers}, + execute, + terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen}, +}; +use log::LevelFilter; +use ratatui::{ + backend::CrosstermBackend, + layout::{Constraint, Direction, Layout, Rect}, + style::{Color, Modifier, Style}, + text::{Line, Span}, + widgets::{Block, Borders, List, ListItem, Paragraph, Wrap}, + Frame, Terminal, +}; +use std::io; +use std::sync::Arc; +use std::sync::Mutex; +mod chat_panel; +mod editor; +pub mod file_tree; +mod log_panel; +mod status_panel; +use chat_panel::ChatPanel; +use editor::Editor; +use file_tree::{FileTree, TreeNode}; +use log_panel::{init_logger, LogPanel}; +use status_panel::StatusPanel; +pub struct XtreeUI { + app_state: Option>, + file_tree: Option, + status_panel: Option, + log_panel: Arc>, + chat_panel: Option, + editor: Option, + active_panel: ActivePanel, + should_quit: bool, + progress_channel: Option< + Arc>>, + >, + bootstrap_status: String, +} +#[derive(Debug, Clone, Copy, PartialEq)] +enum ActivePanel { + FileTree, + Editor, + Status, + Logs, + Chat, +} +impl XtreeUI { + pub fn new() -> Self { + let log_panel = Arc::new(Mutex::new(LogPanel::new())); + Self { + app_state: None, + file_tree: None, + status_panel: None, + log_panel: log_panel.clone(), + chat_panel: None, + editor: None, + active_panel: ActivePanel::Logs, + should_quit: false, + progress_channel: None, + bootstrap_status: "Initializing...".to_string(), + } + } + pub fn set_progress_channel( + &mut self, + rx: Arc>>, + ) { + self.progress_channel = Some(rx); + } + pub fn set_app_state(&mut self, app_state: Arc) { + self.file_tree = Some(FileTree::new(app_state.clone())); + self.status_panel = Some(StatusPanel::new(app_state.clone())); + self.chat_panel = Some(ChatPanel::new(app_state.clone())); + self.app_state = Some(app_state); + self.active_panel = ActivePanel::FileTree; + self.bootstrap_status = "Ready".to_string(); + } + pub fn start_ui(&mut self) -> Result<()> { + color_eyre::install()?; + if !std::io::IsTerminal::is_terminal(&std::io::stdout()) { + return Ok(()); + } + enable_raw_mode()?; + let mut stdout = io::stdout(); + execute!(stdout, EnterAlternateScreen)?; + let backend = CrosstermBackend::new(stdout); + let mut terminal = Terminal::new(backend)?; + init_logger(self.log_panel.clone())?; + log::set_max_level(LevelFilter::Trace); + let result = self.run_event_loop(&mut terminal); + disable_raw_mode()?; + execute!(terminal.backend_mut(), LeaveAlternateScreen)?; + terminal.show_cursor()?; + result + } + fn run_event_loop( + &mut self, + terminal: &mut Terminal>, + ) -> Result<()> { + let mut last_update = std::time::Instant::now(); + let update_interval = std::time::Duration::from_millis(1000); + let mut cursor_blink = false; + let mut last_blink = std::time::Instant::now(); + let rt = tokio::runtime::Runtime::new()?; + loop { + if let Some(ref progress_rx) = self.progress_channel { + if let Ok(mut rx) = progress_rx.try_lock() { + while let Ok(progress) = rx.try_recv() { + self.bootstrap_status = match progress { + crate::BootstrapProgress::StartingBootstrap => { + "Starting bootstrap...".to_string() + } + crate::BootstrapProgress::InstallingComponent(name) => { + format!("Installing: {}", name) + } + crate::BootstrapProgress::StartingComponent(name) => { + format!("Starting: {}", name) + } + crate::BootstrapProgress::UploadingTemplates => { + "Uploading templates...".to_string() + } + crate::BootstrapProgress::ConnectingDatabase => { + "Connecting to database...".to_string() + } + crate::BootstrapProgress::StartingLLM => { + "Starting LLM servers...".to_string() + } + crate::BootstrapProgress::BootstrapComplete => { + "Bootstrap complete".to_string() + } + crate::BootstrapProgress::BootstrapError(msg) => { + format!("Error: {}", msg) + } + }; + } + } + } + if last_blink.elapsed() >= std::time::Duration::from_millis(500) { + cursor_blink = !cursor_blink; + last_blink = std::time::Instant::now(); + } + terminal.draw(|f| self.render(f, cursor_blink))?; + if self.app_state.is_some() && last_update.elapsed() >= update_interval { + if let Err(e) = rt.block_on(self.update_data()) { + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log(&format!("Update error: {}", e)); + } + last_update = std::time::Instant::now(); + } + if event::poll(std::time::Duration::from_millis(50))? { + if let Event::Key(key) = event::read()? { + if let Err(e) = rt.block_on(self.handle_input(key.code, key.modifiers)) { + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log(&format!("Input error: {}", e)); + } + if self.should_quit { + break; + } + } + } + } + Ok(()) + } + fn render(&mut self, f: &mut Frame, cursor_blink: bool) { + let bg = Color::Rgb(0, 30, 100); + let border_active = Color::Rgb(85, 255, 255); + let border_inactive = Color::Rgb(170, 170, 170); + let text = Color::Rgb(255, 255, 255); + let highlight = Color::Rgb(0, 170, 170); + let title_bg = Color::Rgb(170, 170, 170); + let title_fg = Color::Rgb(0, 0, 0); + if self.app_state.is_none() { + self.render_loading(f, bg, text, border_active, title_bg, title_fg); + return; + } + let main_chunks = Layout::default() + .direction(Direction::Vertical) + .constraints([ + Constraint::Length(3), + Constraint::Min(0), + Constraint::Length(12), + ]) + .split(f.area()); + self.render_header(f, main_chunks[0], bg, title_bg, title_fg); + if self.editor.is_some() { + let content_chunks = Layout::default() + .direction(Direction::Horizontal) + .constraints([ + Constraint::Percentage(25), + Constraint::Percentage(40), + Constraint::Percentage(35), + ]) + .split(main_chunks[1]); + self.render_file_tree( + f, + content_chunks[0], + bg, + text, + border_active, + border_inactive, + highlight, + title_bg, + title_fg, + ); + if let Some(editor) = &self.editor { + self.render_editor( + f, + content_chunks[1], + editor, + bg, + text, + border_active, + border_inactive, + highlight, + title_bg, + title_fg, + cursor_blink, + ); + } + self.render_chat( + f, + content_chunks[2], + bg, + text, + border_active, + border_inactive, + highlight, + title_bg, + title_fg, + ); + } else { + let content_chunks = Layout::default() + .direction(Direction::Horizontal) + .constraints([ + Constraint::Percentage(25), + Constraint::Percentage(40), + Constraint::Percentage(35), + ]) + .split(main_chunks[1]); + self.render_file_tree( + f, + content_chunks[0], + bg, + text, + border_active, + border_inactive, + highlight, + title_bg, + title_fg, + ); + let right_chunks = Layout::default() + .direction(Direction::Vertical) + .constraints([Constraint::Percentage(50), Constraint::Percentage(50)]) + .split(content_chunks[1]); + self.render_status( + f, + right_chunks[0], + bg, + text, + border_active, + border_inactive, + highlight, + title_bg, + title_fg, + ); + self.render_chat( + f, + content_chunks[2], + bg, + text, + border_active, + border_inactive, + highlight, + title_bg, + title_fg, + ); + } + self.render_logs( + f, + main_chunks[2], + bg, + text, + border_active, + border_inactive, + highlight, + title_bg, + title_fg, + ); + } + fn render_header( + &self, + f: &mut Frame, + area: Rect, + _bg: Color, + title_bg: Color, + title_fg: Color, + ) { + let block = Block::default().style(Style::default().bg(title_bg)); + f.render_widget(block, area); + let title = if self.app_state.is_some() { + let components = vec![ + ("Tables", "postgres", "5432"), + ("Cache", "valkey-server", "6379"), + ("Drive", "minio", "9000"), + ("LLM", "llama-server", "8081"), + ]; + let statuses: Vec = components + .iter() + .map(|(comp_name, process, _port)| { + let status = if status_panel::StatusPanel::check_component_running(process) { + format!("🟢 {}", comp_name) + } else { + format!("🔴 {}", comp_name) + }; + status + }) + .collect(); + format!(" GENERAL BOTS ┃ {} ", statuses.join(" ┃ ")) + } else { + " GENERAL BOTS ".to_string() + }; + let title_len = title.len() as u16; + let centered_x = (area.width.saturating_sub(title_len)) / 2; + let centered_y = area.y + 1; + let x = area.x + centered_x; + let max_width = area.width.saturating_sub(x - area.x); + let width = title_len.min(max_width); + let title_span = Span::styled( + title, + Style::default() + .fg(title_fg) + .bg(title_bg) + .add_modifier(Modifier::BOLD), + ); + f.render_widget( + Paragraph::new(Line::from(title_span)), + Rect { + x, + y: centered_y, + width, + height: 1, + }, + ); + } + fn render_loading( + &self, + f: &mut Frame, + bg: Color, + text: Color, + border: Color, + title_bg: Color, + title_fg: Color, + ) { + let chunks = Layout::default() + .direction(Direction::Vertical) + .constraints([ + Constraint::Percentage(40), + Constraint::Percentage(20), + Constraint::Percentage(40), + ]) + .split(f.area()); + let center = Layout::default() + .direction(Direction::Horizontal) + .constraints([ + Constraint::Percentage(30), + Constraint::Percentage(40), + Constraint::Percentage(30), + ]) + .split(chunks[1])[1]; + let block = Block::default() + .title(Span::styled( + " General Bots ", + Style::default() + .fg(title_fg) + .bg(title_bg) + .add_modifier(Modifier::BOLD), + )) + .borders(Borders::ALL) + .border_style(Style::default().fg(border)) + .style(Style::default().bg(bg)); + let loading_text = format!( +"\n ╔════════════════════════════════╗\n ║ ║\n ║ Initializing System... ║\n ║ ║\n ║ {} ║\n ║ ║\n ╚════════════════════════════════╝\n", +format!("{:^30}", self.bootstrap_status) +); + let paragraph = Paragraph::new(loading_text) + .block(block) + .style(Style::default().fg(text)) + .wrap(Wrap { trim: false }); + f.render_widget(paragraph, center); + } + fn render_file_tree( + &self, + f: &mut Frame, + area: Rect, + bg: Color, + text: Color, + border_active: Color, + border_inactive: Color, + highlight: Color, + title_bg: Color, + title_fg: Color, + ) { + if let Some(file_tree) = &self.file_tree { + let items = file_tree.render_items(); + let selected = file_tree.selected_index(); + let list_items: Vec = items + .iter() + .enumerate() + .map(|(idx, (display, _))| { + let style = if idx == selected { + Style::default() + .bg(highlight) + .fg(Color::Black) + .add_modifier(Modifier::BOLD) + } else { + Style::default().fg(text) + }; + ListItem::new(Line::from(Span::styled(display.clone(), style))) + }) + .collect(); + let is_active = self.active_panel == ActivePanel::FileTree; + let border_color = if is_active { + border_active + } else { + border_inactive + }; + let title_style = if is_active { + Style::default() + .fg(title_fg) + .bg(title_bg) + .add_modifier(Modifier::BOLD) + } else { + Style::default().fg(title_fg).bg(title_bg) + }; + let block = Block::default() + .title(Span::styled(" FILE EXPLORER ", title_style)) + .borders(Borders::ALL) + .border_style(Style::default().fg(border_color)) + .style(Style::default().bg(bg)); + let list = List::new(list_items).block(block); + f.render_widget(list, area); + } + } + fn render_status( + &mut self, + f: &mut Frame, + area: Rect, + bg: Color, + text: Color, + border_active: Color, + border_inactive: Color, + _highlight: Color, + title_bg: Color, + title_fg: Color, + ) { + let selected_bot_opt = self.file_tree.as_ref().and_then(|ft| ft.get_selected_bot()); + let status_text = if let Some(status_panel) = &mut self.status_panel { + match selected_bot_opt { + Some(bot) => status_panel.render(Some(bot)), + None => status_panel.render(None), + } + } else { + "Waiting for initialization...".to_string() + }; + let is_active = self.active_panel == ActivePanel::Status; + let border_color = if is_active { + border_active + } else { + border_inactive + }; + let title_style = if is_active { + Style::default() + .fg(title_fg) + .bg(title_bg) + .add_modifier(Modifier::BOLD) + } else { + Style::default().fg(title_fg).bg(title_bg) + }; + let block = Block::default() + .title(Span::styled(" SYSTEM STATUS ", title_style)) + .borders(Borders::ALL) + .border_style(Style::default().fg(border_color)) + .style(Style::default().bg(bg)); + let paragraph = Paragraph::new(status_text) + .block(block) + .style(Style::default().fg(text)) + .wrap(Wrap { trim: false }); + f.render_widget(paragraph, area); + } + fn render_editor( + &self, + f: &mut Frame, + area: Rect, + editor: &Editor, + bg: Color, + text: Color, + border_active: Color, + border_inactive: Color, + _highlight: Color, + title_bg: Color, + title_fg: Color, + cursor_blink: bool, + ) { + let is_active = self.active_panel == ActivePanel::Editor; + let border_color = if is_active { + border_active + } else { + border_inactive + }; + let title_style = if is_active { + Style::default() + .fg(title_fg) + .bg(title_bg) + .add_modifier(Modifier::BOLD) + } else { + Style::default().fg(title_fg).bg(title_bg) + }; + let title_text = format!(" EDITOR: {} ", editor.file_path()); + let block = Block::default() + .title(Span::styled(title_text, title_style)) + .borders(Borders::ALL) + .border_style(Style::default().fg(border_color)) + .style(Style::default().bg(bg)); + let content = editor.render(cursor_blink); + let paragraph = Paragraph::new(content) + .block(block) + .style(Style::default().fg(text)) + .wrap(Wrap { trim: false }); + f.render_widget(paragraph, area); + } + fn render_chat( + &self, + f: &mut Frame, + area: Rect, + bg: Color, + text: Color, + border_active: Color, + border_inactive: Color, + _highlight: Color, + title_bg: Color, + title_fg: Color, + ) { + if let Some(chat_panel) = &self.chat_panel { + let is_active = self.active_panel == ActivePanel::Chat; + let border_color = if is_active { + border_active + } else { + border_inactive + }; + let title_style = if is_active { + Style::default() + .fg(title_fg) + .bg(title_bg) + .add_modifier(Modifier::BOLD) + } else { + Style::default().fg(title_fg).bg(title_bg) + }; + let selected_bot = if let Some(file_tree) = &self.file_tree { + file_tree + .get_selected_bot() + .unwrap_or("No bot selected".to_string()) + } else { + "No bot selected".to_string() + }; + let title_text = format!(" CHAT: {} ", selected_bot); + let block = Block::default() + .title(Span::styled(title_text, title_style)) + .borders(Borders::ALL) + .border_style(Style::default().fg(border_color)) + .style(Style::default().bg(bg)); + let content = chat_panel.render(); + let paragraph = Paragraph::new(content) + .block(block) + .style(Style::default().fg(text)) + .wrap(Wrap { trim: false }); + f.render_widget(paragraph, area); + } + } + fn render_logs( + &self, + f: &mut Frame, + area: Rect, + bg: Color, + text: Color, + border_active: Color, + border_inactive: Color, + _highlight: Color, + title_bg: Color, + title_fg: Color, + ) { + let log_panel = self.log_panel.try_lock(); + let log_lines = if let Ok(panel) = log_panel { + panel.render() + } else { + "Loading logs...".to_string() + }; + let is_active = self.active_panel == ActivePanel::Logs; + let border_color = if is_active { + border_active + } else { + border_inactive + }; + let title_style = if is_active { + Style::default() + .fg(title_fg) + .bg(title_bg) + .add_modifier(Modifier::BOLD) + } else { + Style::default().fg(title_fg).bg(title_bg) + }; + let block = Block::default() + .title(Span::styled(" SYSTEM LOGS ", title_style)) + .borders(Borders::ALL) + .border_style(Style::default().fg(border_color)) + .style(Style::default().bg(bg)); + let paragraph = Paragraph::new(log_lines) + .block(block) + .style(Style::default().fg(text)) + .wrap(Wrap { trim: false }); + f.render_widget(paragraph, area); + } + async fn handle_input(&mut self, key: KeyCode, modifiers: KeyModifiers) -> Result<()> { + if modifiers.contains(KeyModifiers::CONTROL) { + match key { + KeyCode::Char('c') | KeyCode::Char('q') => { + self.should_quit = true; + return Ok(()); + } + KeyCode::Char('s') => { + if let Some(editor) = &mut self.editor { + if let Some(app_state) = &self.app_state { + if let Err(e) = editor.save(app_state).await { + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log(&format!("Save failed: {}", e)); + } else { + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log(&format!("Saved: {}", editor.file_path())); + } + } + } + return Ok(()); + } + KeyCode::Char('w') => { + if self.editor.is_some() { + self.editor = None; + self.active_panel = ActivePanel::FileTree; + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log("Closed editor"); + } + return Ok(()); + } + _ => {} + } + } + if self.app_state.is_none() { + return Ok(()); + } + match self.active_panel { + ActivePanel::FileTree => match key { + KeyCode::Up => { + if let Some(file_tree) = &mut self.file_tree { + file_tree.move_up(); + } + } + KeyCode::Down => { + if let Some(file_tree) = &mut self.file_tree { + file_tree.move_down(); + } + } + KeyCode::Enter => { + if let Err(e) = self.handle_tree_enter().await { + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log(&format!("Enter error: {}", e)); + } + } + KeyCode::Backspace => { + if let Some(file_tree) = &mut self.file_tree { + if file_tree.go_up() { + if let Err(e) = file_tree.refresh_current().await { + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log(&format!("Navigation error: {}", e)); + } + } + } + } + KeyCode::Tab => { + self.active_panel = ActivePanel::Chat; + } + KeyCode::Char('q') => { + self.should_quit = true; + } + KeyCode::F(5) => { + if let Some(file_tree) = &mut self.file_tree { + if let Err(e) = file_tree.refresh_current().await { + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log(&format!("Refresh failed: {}", e)); + } else { + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log("Refreshed"); + } + } + } + _ => {} + }, + ActivePanel::Editor => { + if let Some(editor) = &mut self.editor { + match key { + KeyCode::Up => editor.move_up(), + KeyCode::Down => editor.move_down(), + KeyCode::Left => editor.move_left(), + KeyCode::Right => editor.move_right(), + KeyCode::Char(c) => editor.insert_char(c), + KeyCode::Backspace => editor.backspace(), + KeyCode::Enter => editor.insert_newline(), + KeyCode::Tab => { + self.active_panel = ActivePanel::Chat; + } + KeyCode::Esc => { + self.editor = None; + self.active_panel = ActivePanel::FileTree; + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log("Closed editor"); + } + _ => {} + } + } + } + ActivePanel::Chat => match key { + KeyCode::Tab => { + self.active_panel = ActivePanel::FileTree; + } + KeyCode::Enter => { + if let (Some(chat_panel), Some(file_tree), Some(app_state)) = + (&mut self.chat_panel, &self.file_tree, &self.app_state) + { + if let Some(bot_name) = file_tree.get_selected_bot() { + if let Err(e) = chat_panel.send_message(&bot_name, app_state).await { + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log(&format!("Chat error: {}", e)); + } + } + } + } + KeyCode::Char(c) => { + if let Some(chat_panel) = &mut self.chat_panel { + chat_panel.add_char(c); + } + } + KeyCode::Backspace => { + if let Some(chat_panel) = &mut self.chat_panel { + chat_panel.backspace(); + } + } + _ => {} + }, + ActivePanel::Status => match key { + KeyCode::Tab => { + self.active_panel = ActivePanel::Logs; + } + _ => {} + }, + ActivePanel::Logs => match key { + KeyCode::Tab => { + self.active_panel = ActivePanel::FileTree; + } + _ => {} + }, + } + Ok(()) + } + async fn handle_tree_enter(&mut self) -> Result<()> { + if let (Some(file_tree), Some(app_state)) = (&mut self.file_tree, &self.app_state) { + if let Some(node) = file_tree.get_selected_node().cloned() { + match node { + TreeNode::Bucket { name, .. } => { + file_tree.enter_bucket(name.clone()).await?; + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log(&format!("Opened bucket: {}", name)); + } + TreeNode::Folder { bucket, path, .. } => { + file_tree.enter_folder(bucket.clone(), path.clone()).await?; + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log(&format!("Opened folder: {}", path)); + } + TreeNode::File { bucket, path, .. } => { + match Editor::load(app_state, &bucket, &path).await { + Ok(editor) => { + self.editor = Some(editor); + self.active_panel = ActivePanel::Editor; + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log(&format!("Editing: {}", path)); + } + Err(e) => { + let mut log_panel = self.log_panel.lock().unwrap(); + log_panel.add_log(&format!("Failed to load file: {}", e)); + } + } + } + } + } + } + Ok(()) + } + async fn update_data(&mut self) -> Result<()> { + if let Some(status_panel) = &mut self.status_panel { + status_panel.update().await?; + } + if let Some(file_tree) = &self.file_tree { + if file_tree.render_items().is_empty() { + if let Some(file_tree) = &mut self.file_tree { + file_tree.load_root().await?; + } + } + } + if let (Some(chat_panel), Some(file_tree)) = (&mut self.chat_panel, &self.file_tree) { + if let Some(bot_name) = file_tree.get_selected_bot() { + chat_panel.poll_response(&bot_name).await?; + } + } + Ok(()) + } +} diff --git a/src/console/status_panel.rs b/src/console/status_panel.rs new file mode 100644 index 000000000..407d4b83f --- /dev/null +++ b/src/console/status_panel.rs @@ -0,0 +1,189 @@ +use crate::config::ConfigManager; +use crate::nvidia; +use crate::nvidia::get_system_metrics; +use crate::shared::models::schema::bots::dsl::*; +use crate::shared::state::AppState; +use diesel::prelude::*; +use std::sync::Arc; +use sysinfo::System; + +pub struct StatusPanel { + app_state: Arc, + last_update: std::time::Instant, + cached_content: String, + system: System, +} + +impl StatusPanel { + pub fn new(app_state: Arc) -> Self { + Self { + app_state, + last_update: std::time::Instant::now(), + cached_content: String::new(), + system: System::new_all(), + } + } + + pub async fn update(&mut self) -> Result<(), std::io::Error> { + self.system.refresh_all(); + // Force fresh metrics by using different token counts + let _tokens = (std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + .as_secs() + % 1000) as usize; + let _system_metrics = nvidia::get_system_metrics().unwrap_or_default(); + self.cached_content = self.render(None); + self.last_update = std::time::Instant::now(); + Ok(()) + } + + pub fn render(&mut self, selected_bot: Option) -> String { + let mut lines = Vec::new(); + + // System metrics section + lines.push("╔═══════════════════════════════════════╗".to_string()); + lines.push("║ SYSTEM METRICS ║".to_string()); + lines.push("╚═══════════════════════════════════════╝".to_string()); + lines.push("".to_string()); + + self.system.refresh_cpu_all(); + let cpu_usage = self.system.global_cpu_usage(); + let cpu_bar = Self::create_progress_bar(cpu_usage, 20); + lines.push(format!(" CPU: {:5.1}% {}", cpu_usage, cpu_bar)); + let system_metrics = get_system_metrics().unwrap_or_default(); + + if let Some(gpu_usage) = system_metrics.gpu_usage { + let gpu_bar = Self::create_progress_bar(gpu_usage, 20); + lines.push(format!(" GPU: {:5.1}% {}", gpu_usage, gpu_bar)); + } else { + lines.push(" GPU: Not available".to_string()); + } + + let total_mem = self.system.total_memory() as f32 / 1024.0 / 1024.0 / 1024.0; + let used_mem = self.system.used_memory() as f32 / 1024.0 / 1024.0 / 1024.0; + let mem_percentage = (used_mem / total_mem) * 100.0; + let mem_bar = Self::create_progress_bar(mem_percentage, 20); + lines.push(format!( + " MEM: {:5.1}% {} ({:.1}/{:.1} GB)", + mem_percentage, mem_bar, used_mem, total_mem + )); + + // Components status section + lines.push("".to_string()); + lines.push("╔═══════════════════════════════════════╗".to_string()); + lines.push("║ COMPONENTS STATUS ║".to_string()); + lines.push("╚═══════════════════════════════════════╝".to_string()); + lines.push("".to_string()); + + let components = vec![ + ("Tables", "postgres", "5432"), + ("Cache", "valkey-server", "6379"), + ("Drive", "minio", "9000"), + ("LLM", "llama-server", "8081"), + ]; + + for (comp_name, process, port) in components { + let status = if Self::check_component_running(process) { + format!("🟢 ONLINE [Port: {}]", port) + } else { + "🔴 OFFLINE".to_string() + }; + lines.push(format!(" {:<10} {}", comp_name, status)); + } + + // Active bots section + lines.push("".to_string()); + lines.push("╔═══════════════════════════════════════╗".to_string()); + lines.push("║ ACTIVE BOTS ║".to_string()); + lines.push("╚═══════════════════════════════════════╝".to_string()); + lines.push("".to_string()); + + if let Ok(mut conn) = self.app_state.conn.get() { + match bots + .filter(is_active.eq(true)) + .select((name, id)) + .load::<(String, uuid::Uuid)>(&mut *conn) + { + Ok(bot_list) => { + if bot_list.is_empty() { + lines.push(" No active bots".to_string()); + } else { + for (bot_name, bot_id) in bot_list { + let marker = if let Some(ref selected) = selected_bot { + if selected == &bot_name { + "►" + } else { + " " + } + } else { + " " + }; + lines.push(format!(" {} 🤖 {}", marker, bot_name)); + + if let Some(ref selected) = selected_bot { + if selected == &bot_name { + lines.push("".to_string()); + lines.push(" ┌─ Bot Configuration ─────────┐".to_string()); + let config_manager = + ConfigManager::new(self.app_state.conn.clone()); + let llm_model = config_manager + .get_config(&bot_id, "llm-model", None) + .unwrap_or_else(|_| "N/A".to_string()); + lines.push(format!(" Model: {}", llm_model)); + let ctx_size = config_manager + .get_config(&bot_id, "llm-server-ctx-size", None) + .unwrap_or_else(|_| "N/A".to_string()); + lines.push(format!(" Context: {}", ctx_size)); + let temp = config_manager + .get_config(&bot_id, "llm-temperature", None) + .unwrap_or_else(|_| "N/A".to_string()); + lines.push(format!(" Temp: {}", temp)); + lines.push(" └─────────────────────────────┘".to_string()); + } + } + } + } + } + Err(_) => { + lines.push(" Error loading bots".to_string()); + } + } + } else { + lines.push(" Database locked".to_string()); + } + + // Sessions section + lines.push("".to_string()); + lines.push("╔═══════════════════════════════════════╗".to_string()); + lines.push("║ SESSIONS ║".to_string()); + lines.push("╚═══════════════════════════════════════╝".to_string()); + + let session_count = self + .app_state + .response_channels + .try_lock() + .map(|channels| channels.len()) + .unwrap_or(0); + lines.push(format!(" Active Sessions: {}", session_count)); + + lines.join("\n") + } + + fn create_progress_bar(percentage: f32, width: usize) -> String { + let filled = (percentage / 100.0 * width as f32).round() as usize; + let empty = width.saturating_sub(filled); + let filled_chars = "█".repeat(filled); + let empty_chars = "░".repeat(empty); + format!("[{}{}]", filled_chars, empty_chars) + } + + pub fn check_component_running(process_name: &str) -> bool { + std::process::Command::new("pgrep") + .arg("-f") + .arg(process_name) + .output() + .map(|output| !output.stdout.is_empty()) + .unwrap_or(false) + } +} diff --git a/src/core/automation/automation.test.rs b/src/core/automation/automation.test.rs new file mode 100644 index 000000000..dcdeea713 --- /dev/null +++ b/src/core/automation/automation.test.rs @@ -0,0 +1,10 @@ +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::test_util; + #[test] + fn test_automation_module() { + test_util::setup(); + assert!(true, "Basic automation module test"); + } +} diff --git a/src/core/automation/mod.rs b/src/core/automation/mod.rs new file mode 100644 index 000000000..93c9468a0 --- /dev/null +++ b/src/core/automation/mod.rs @@ -0,0 +1,135 @@ +use crate::basic::ScriptService; +use crate::shared::models::{Automation, TriggerKind}; +use crate::shared::state::AppState; +use chrono::Utc; +use cron::Schedule; +use diesel::prelude::*; +use log::error; +use std::str::FromStr; +use std::sync::Arc; +use tokio::time::{interval, Duration}; + +#[cfg(feature = "vectordb")] +pub mod vectordb_indexer; + +#[cfg(feature = "vectordb")] +pub use vectordb_indexer::{IndexingStats, IndexingStatus, VectorDBIndexer}; + +#[derive(Debug)] +pub struct AutomationService { + state: Arc, +} +impl AutomationService { + pub fn new(state: Arc) -> Self { + crate::llm::compact_prompt::start_compact_prompt_scheduler(Arc::clone(&state)); + Self { state } + } + pub async fn spawn(self) -> Result<(), Box> { + let mut ticker = interval(Duration::from_secs(5)); + loop { + ticker.tick().await; + if let Err(e) = self.check_scheduled_tasks().await { + error!("Error checking scheduled tasks: {}", e); + } + } + } + async fn check_scheduled_tasks(&self) -> Result<(), Box> { + use crate::shared::models::system_automations::dsl::{ + id, is_active, kind, last_triggered as lt_column, system_automations, + }; + let mut conn = self + .state + .conn + .get() + .map_err(|e| format!("Failed to acquire database connection: {}", e))?; + let automations: Vec = system_automations + .filter(is_active.eq(true)) + .filter(kind.eq(TriggerKind::Scheduled as i32)) + .load::(&mut conn)?; + for automation in automations { + if let Some(schedule_str) = &automation.schedule { + match Schedule::from_str(schedule_str.trim()) { + Ok(parsed_schedule) => { + let now = Utc::now(); + let next_run = parsed_schedule.upcoming(Utc).next(); + if let Some(next_time) = next_run { + let time_until_next = next_time - now; + if time_until_next.num_minutes() < 1 { + if let Some(last_triggered) = automation.last_triggered { + if (now - last_triggered).num_minutes() < 1 { + continue; + } + } + if let Err(e) = self.execute_automation(&automation).await { + error!("Error executing automation {}: {}", automation.id, e); + } + if let Err(e) = + diesel::update(system_automations.filter(id.eq(automation.id))) + .set(lt_column.eq(Some(now))) + .execute(&mut conn) + { + error!( + "Error updating last_triggered for automation {}: {}", + automation.id, e + ); + } + } + } + } + Err(e) => { + error!( + "Error parsing schedule for automation {} ({}): {}", + automation.id, schedule_str, e + ); + } + } + } + } + Ok(()) + } + async fn execute_automation( + &self, + automation: &Automation, + ) -> Result<(), Box> { + let bot_name: String = { + use crate::shared::models::schema::bots::dsl::*; + let mut conn = self + .state + .conn + .get() + .map_err(|e| format!("Failed to acquire database connection: {}", e))?; + bots.filter(id.eq(automation.bot_id)) + .select(name) + .first(&mut conn)? + }; + let script_path = format!( + "./work/{}.gbai/{}.gbdialog/{}.ast", + bot_name, bot_name, automation.param + ); + let script_content = match tokio::fs::read_to_string(&script_path).await { + Ok(content) => content, + Err(e) => { + error!("Failed to read script {}: {}", script_path, e); + return Ok(()); + } + }; + let session = { + let mut sm = self.state.session_manager.lock().await; + let admin_user = automation.bot_id; + sm.get_or_create_user_session(admin_user, automation.bot_id, "Automation")? + .ok_or("Failed to create session")? + }; + let script_service = ScriptService::new(Arc::clone(&self.state), session); + match script_service.compile(&script_content) { + Ok(ast) => { + if let Err(e) = script_service.run(&ast) { + error!("Script execution failed: {}", e); + } + } + Err(e) => { + error!("Script compilation failed: {}", e); + } + } + Ok(()) + } +} diff --git a/src/core/bootstrap/bootstrap.test.rs b/src/core/bootstrap/bootstrap.test.rs new file mode 100644 index 000000000..2fa9fbffb --- /dev/null +++ b/src/core/bootstrap/bootstrap.test.rs @@ -0,0 +1,10 @@ +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::test_util; + #[test] + fn test_bootstrap_module() { + test_util::setup(); + assert!(true, "Basic bootstrap module test"); + } +} diff --git a/src/core/bootstrap/mod.rs b/src/core/bootstrap/mod.rs new file mode 100644 index 000000000..1df1d8c6f --- /dev/null +++ b/src/core/bootstrap/mod.rs @@ -0,0 +1,397 @@ +use crate::config::AppConfig; +use crate::package_manager::setup::{DirectorySetup, EmailSetup}; +use crate::package_manager::{InstallMode, PackageManager}; +use crate::shared::utils::establish_pg_connection; +use anyhow::Result; +use aws_config::BehaviorVersion; +use aws_sdk_s3::Client; +use dotenvy::dotenv; +use log::{error, info, trace}; +use rand::distr::Alphanumeric; +use std::io::{self, Write}; +use std::path::{Path, PathBuf}; +use std::process::Command; +#[derive(Debug)] +pub struct ComponentInfo { + pub name: &'static str, +} +#[derive(Debug)] +pub struct BootstrapManager { + pub install_mode: InstallMode, + pub tenant: Option, +} +impl BootstrapManager { + pub async fn new(install_mode: InstallMode, tenant: Option) -> Self { + trace!( + "Initializing BootstrapManager with mode {:?} and tenant {:?}", + install_mode, + tenant + ); + Self { + install_mode, + tenant, + } + } + pub fn start_all(&mut self) -> Result<()> { + let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?; + let components = vec![ + ComponentInfo { name: "tables" }, + ComponentInfo { name: "cache" }, + ComponentInfo { name: "drive" }, + ComponentInfo { name: "llm" }, + ComponentInfo { name: "email" }, + ComponentInfo { name: "proxy" }, + ComponentInfo { name: "directory" }, + ComponentInfo { name: "alm" }, + ComponentInfo { name: "alm_ci" }, + ComponentInfo { name: "dns" }, + ComponentInfo { name: "webmail" }, + ComponentInfo { name: "meeting" }, + ComponentInfo { + name: "table_editor", + }, + ComponentInfo { name: "doc_editor" }, + ComponentInfo { name: "desktop" }, + ComponentInfo { name: "devtools" }, + ComponentInfo { name: "bot" }, + ComponentInfo { name: "system" }, + ComponentInfo { name: "vector_db" }, + ComponentInfo { name: "host" }, + ]; + for component in components { + if pm.is_installed(component.name) { + pm.start(component.name)?; + } + } + Ok(()) + } + + fn generate_secure_password(&self, length: usize) -> String { + let mut rng = rand::rng(); + (0..length) + .map(|_| { + let byte = rand::Rng::sample(&mut rng, Alphanumeric); + char::from(byte) + }) + .collect() + } + + pub async fn bootstrap(&mut self) -> Result<()> { + let env_path = std::env::current_dir().unwrap().join(".env"); + let db_password = self.generate_secure_password(32); + let database_url = std::env::var("DATABASE_URL").unwrap_or_else(|_| { + format!("postgres://gbuser:{}@localhost:5432/botserver", db_password) + }); + + let drive_password = self.generate_secure_password(16); + let drive_user = "gbdriveuser".to_string(); + let drive_env = format!( + "\nDRIVE_SERVER=http://localhost:9000\nDRIVE_ACCESSKEY={}\nDRIVE_SECRET={}\n", + drive_user, drive_password + ); + let contents_env = format!("DATABASE_URL={}\n{}", database_url, drive_env); + let _ = std::fs::write(&env_path, contents_env); + dotenv().ok(); + + let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone()).unwrap(); + let required_components = vec!["tables", "drive", "cache", "llm"]; + for component in required_components { + if !pm.is_installed(component) { + let termination_cmd = pm + .components + .get(component) + .and_then(|cfg| cfg.binary_name.clone()) + .unwrap_or_else(|| component.to_string()); + if !termination_cmd.is_empty() { + let check = Command::new("pgrep") + .arg("-f") + .arg(&termination_cmd) + .output(); + if let Ok(output) = check { + if !output.stdout.is_empty() { + println!("Component '{}' appears to be already running from a previous install.", component); + println!("Do you want to terminate it? (y/n)"); + let mut input = String::new(); + io::stdout().flush().unwrap(); + io::stdin().read_line(&mut input).unwrap(); + if input.trim().eq_ignore_ascii_case("y") { + let _ = Command::new("pkill") + .arg("-f") + .arg(&termination_cmd) + .status(); + println!("Terminated existing '{}' process.", component); + } else { + println!( + "Skipping start of '{}' as it is already running.", + component + ); + continue; + } + } + } + } + _ = pm.install(component).await; + if component == "tables" { + let mut conn = establish_pg_connection().unwrap(); + self.apply_migrations(&mut conn)?; + } + + // Auto-configure Directory after installation + if component == "directory" { + info!("🔧 Auto-configuring Directory (Zitadel)..."); + if let Err(e) = self.setup_directory().await { + error!("Failed to setup Directory: {}", e); + } + } + } + } + Ok(()) + } + + /// Setup Directory (Zitadel) with default organization and user + async fn setup_directory(&self) -> Result<()> { + let config_path = PathBuf::from("./config/directory_config.json"); + + // Ensure config directory exists + tokio::fs::create_dir_all("./config").await?; + + let mut setup = DirectorySetup::new("http://localhost:8080".to_string(), config_path); + + // Create default organization + let org_name = "default"; + let org_id = setup + .create_organization(org_name, "Default Organization") + .await?; + info!("✅ Created default organization: {}", org_name); + + // Create admin@default account for bot administration + let admin_user = setup + .create_user( + &org_id, + "admin", + "admin@default", + "Admin123!", + "Admin", + "Default", + true, // is_admin + ) + .await?; + info!("✅ Created admin user: admin@default"); + + // Create user@default account for regular bot usage + let regular_user = setup + .create_user( + &org_id, + "user", + "user@default", + "User123!", + "User", + "Default", + false, // is_admin + ) + .await?; + info!("✅ Created regular user: user@default"); + info!(" Regular user ID: {}", regular_user.id); + + // Create OAuth2 application for BotServer + let (project_id, client_id, client_secret) = + setup.create_oauth_application(&org_id).await?; + info!("✅ Created OAuth2 application in project: {}", project_id); + + // Save configuration + let config = setup + .save_config( + org_id.clone(), + org_name.to_string(), + admin_user, + client_id.clone(), + client_secret, + ) + .await?; + + info!("✅ Directory initialized successfully!"); + info!(" Organization: default"); + info!(" Admin User: admin@default / Admin123!"); + info!(" Regular User: user@default / User123!"); + info!(" Client ID: {}", client_id); + info!(" Login URL: {}", config.base_url); + + Ok(()) + } + + /// Setup Email (Stalwart) with Directory integration + async fn setup_email(&self) -> Result<()> { + let config_path = PathBuf::from("./config/email_config.json"); + let directory_config_path = PathBuf::from("./config/directory_config.json"); + + let mut setup = EmailSetup::new("http://localhost:8080".to_string(), config_path); + + // Try to integrate with Directory if it exists + let directory_config = if directory_config_path.exists() { + Some(directory_config_path) + } else { + None + }; + + let config = setup.initialize(directory_config).await?; + + info!("✅ Email server initialized successfully!"); + info!(" SMTP: {}:{}", config.smtp_host, config.smtp_port); + info!(" IMAP: {}:{}", config.imap_host, config.imap_port); + info!(" Admin: {} / {}", config.admin_user, config.admin_pass); + if config.directory_integration { + info!(" 🔗 Integrated with Directory for authentication"); + } + + Ok(()) + } + + async fn get_drive_client(config: &AppConfig) -> Client { + let endpoint = if !config.drive.server.ends_with('/') { + format!("{}/", config.drive.server) + } else { + config.drive.server.clone() + }; + let base_config = aws_config::defaults(BehaviorVersion::latest()) + .endpoint_url(endpoint) + .region("auto") + .credentials_provider(aws_sdk_s3::config::Credentials::new( + config.drive.access_key.clone(), + config.drive.secret_key.clone(), + None, + None, + "static", + )) + .load() + .await; + let s3_config = aws_sdk_s3::config::Builder::from(&base_config) + .force_path_style(true) + .build(); + aws_sdk_s3::Client::from_conf(s3_config) + } + + pub async fn upload_templates_to_drive(&self, _config: &AppConfig) -> Result<()> { + let mut conn = establish_pg_connection()?; + self.create_bots_from_templates(&mut conn)?; + let templates_dir = Path::new("templates"); + if !templates_dir.exists() { + return Ok(()); + } + let client = Self::get_drive_client(_config).await; + let mut read_dir = tokio::fs::read_dir(templates_dir).await?; + while let Some(entry) = read_dir.next_entry().await? { + let path = entry.path(); + if path.is_dir() + && path + .file_name() + .unwrap() + .to_string_lossy() + .ends_with(".gbai") + { + let bot_name = path.file_name().unwrap().to_string_lossy().to_string(); + let bucket = bot_name.trim_start_matches('/').to_string(); + if client.head_bucket().bucket(&bucket).send().await.is_err() { + match client.create_bucket().bucket(&bucket).send().await { + Ok(_) => { + self.upload_directory_recursive(&client, &path, &bucket, "/") + .await?; + } + Err(e) => { + error!("Failed to create bucket {}: {:?}", bucket, e); + return Err(anyhow::anyhow!("Failed to create bucket {}: {}. Check S3 credentials and endpoint configuration", bucket, e)); + } + } + } else { + trace!("Bucket {} already exists", bucket); + } + } + } + Ok(()) + } + fn create_bots_from_templates(&self, conn: &mut diesel::PgConnection) -> Result<()> { + use crate::shared::models::schema::bots; + use diesel::prelude::*; + let templates_dir = Path::new("templates"); + if !templates_dir.exists() { + return Ok(()); + } + for entry in std::fs::read_dir(templates_dir)? { + let entry = entry?; + let path = entry.path(); + if path.is_dir() && path.extension().map(|e| e == "gbai").unwrap_or(false) { + let bot_folder = path.file_name().unwrap().to_string_lossy().to_string(); + let bot_name = bot_folder.trim_end_matches(".gbai"); + let existing: Option = bots::table + .filter(bots::name.eq(&bot_name)) + .select(bots::name) + .first(conn) + .optional()?; + if existing.is_none() { + diesel::sql_query("INSERT INTO bots (id, name, description, llm_provider, llm_config, context_provider, context_config, is_active) VALUES (gen_random_uuid(), $1, $2, 'openai', '{\"model\": \"gpt-4\", \"temperature\": 0.7}', 'database', '{}', true)").bind::(&bot_name).bind::(format!("Bot for {} template", bot_name)).execute(conn)?; + } else { + trace!("Bot {} already exists", bot_name); + } + } + } + Ok(()) + } + fn upload_directory_recursive<'a>( + &'a self, + client: &'a Client, + local_path: &'a Path, + bucket: &'a str, + prefix: &'a str, + ) -> std::pin::Pin> + 'a>> { + Box::pin(async move { + let _normalized_path = if !local_path.to_string_lossy().ends_with('/') { + format!("{}/", local_path.to_string_lossy()) + } else { + local_path.to_string_lossy().to_string() + }; + let mut read_dir = tokio::fs::read_dir(local_path).await?; + while let Some(entry) = read_dir.next_entry().await? { + let path = entry.path(); + let file_name = path.file_name().unwrap().to_string_lossy().to_string(); + let mut key = prefix.trim_matches('/').to_string(); + if !key.is_empty() { + key.push('/'); + } + key.push_str(&file_name); + if path.is_file() { + trace!( + "Uploading file {} to bucket {} with key {}", + path.display(), + bucket, + key + ); + let content = tokio::fs::read(&path).await?; + client + .put_object() + .bucket(bucket) + .key(&key) + .body(content.into()) + .send() + .await?; + } else if path.is_dir() { + self.upload_directory_recursive(client, &path, bucket, &key) + .await?; + } + } + Ok(()) + }) + } + pub fn apply_migrations(&self, conn: &mut diesel::PgConnection) -> Result<()> { + use diesel_migrations::HarnessWithOutput; + use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness}; + + const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations"); + + let mut harness = HarnessWithOutput::write_to_stdout(conn); + if let Err(e) = harness.run_pending_migrations(MIGRATIONS) { + error!("Failed to apply migrations: {}", e); + return Err(anyhow::anyhow!("Migration error: {}", e)); + } + + Ok(()) + } +} diff --git a/src/core/bot/bot.test.rs b/src/core/bot/bot.test.rs new file mode 100644 index 000000000..3d2bf5a0a --- /dev/null +++ b/src/core/bot/bot.test.rs @@ -0,0 +1,10 @@ +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::test_util; + #[test] + fn test_bot_module() { + test_util::setup(); + assert!(true, "Basic bot module test"); + } +} diff --git a/src/core/bot/channels/channels.test.rs b/src/core/bot/channels/channels.test.rs new file mode 100644 index 000000000..3c56a2f91 --- /dev/null +++ b/src/core/bot/channels/channels.test.rs @@ -0,0 +1,10 @@ +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::test_util; + #[test] + fn test_channels_module() { + test_util::setup(); + assert!(true, "Basic channels module test"); + } +} diff --git a/src/core/bot/channels/instagram.rs b/src/core/bot/channels/instagram.rs new file mode 100644 index 000000000..e2b0d121a --- /dev/null +++ b/src/core/bot/channels/instagram.rs @@ -0,0 +1,29 @@ +use crate::shared::models::BotResponse; +use async_trait::async_trait; +use log::info; + +/// Instagram channel adapter for sending messages through Instagram +pub struct InstagramAdapter { + // TODO: Add Instagram API client configuration +} + +impl InstagramAdapter { + pub fn new() -> Self { + Self {} + } +} + +#[async_trait] +impl super::ChannelAdapter for InstagramAdapter { + async fn send_message( + &self, + response: BotResponse, + ) -> Result<(), Box> { + info!( + "Instagram message would be sent to {}: {}", + response.user_id, response.content + ); + // TODO: Implement actual Instagram API integration + Ok(()) + } +} diff --git a/src/core/bot/channels/mod.rs b/src/core/bot/channels/mod.rs new file mode 100644 index 000000000..2b1657cf2 --- /dev/null +++ b/src/core/bot/channels/mod.rs @@ -0,0 +1,127 @@ +pub mod instagram; +pub mod teams; +pub mod whatsapp; + +use crate::shared::models::BotResponse; +use async_trait::async_trait; +use log::{debug, info}; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::sync::{mpsc, Mutex}; +#[async_trait] +pub trait ChannelAdapter: Send + Sync { + async fn send_message( + &self, + response: BotResponse, + ) -> Result<(), Box>; +} +#[derive(Debug)] +pub struct WebChannelAdapter { + connections: Arc>>>, +} +impl WebChannelAdapter { + pub fn new() -> Self { + Self { + connections: Arc::new(Mutex::new(HashMap::new())), + } + } + pub async fn add_connection(&self, session_id: String, tx: mpsc::Sender) { + self.connections.lock().await.insert(session_id, tx); + } + pub async fn remove_connection(&self, session_id: &str) { + self.connections.lock().await.remove(session_id); + } + pub async fn send_message_to_session( + &self, + session_id: &str, + message: BotResponse, + ) -> Result<(), Box> { + let connections = self.connections.lock().await; + if let Some(tx) = connections.get(session_id) { + if let Err(e) = tx.send(message).await { + log::error!( + "Failed to send message to WebSocket session {}: {}", + session_id, + e + ); + return Err(Box::new(e)); + } + debug!("Message sent to WebSocket session: {}", session_id); + Ok(()) + } else { + debug!("No WebSocket connection found for session: {}", session_id); + Err("No WebSocket connection found".into()) + } + } +} +#[async_trait] +impl ChannelAdapter for WebChannelAdapter { + async fn send_message( + &self, + response: BotResponse, + ) -> Result<(), Box> { + let connections = self.connections.lock().await; + if let Some(tx) = connections.get(&response.session_id) { + tx.send(response).await?; + } + Ok(()) + } +} +#[derive(Debug)] +pub struct VoiceAdapter { + rooms: Arc>>, + connections: Arc>>>, +} +impl VoiceAdapter { + pub fn new() -> Self { + Self { + rooms: Arc::new(Mutex::new(HashMap::new())), + connections: Arc::new(Mutex::new(HashMap::new())), + } + } + pub async fn start_voice_session( + &self, + session_id: &str, + user_id: &str, + ) -> Result> { + info!( + "Starting voice session for user: {} with session: {}", + user_id, session_id + ); + let token = format!("mock_token_{}_{}", session_id, user_id); + self.rooms + .lock() + .await + .insert(session_id.to_string(), token.clone()); + Ok(token) + } + pub async fn stop_voice_session( + &self, + session_id: &str, + ) -> Result<(), Box> { + self.rooms.lock().await.remove(session_id); + Ok(()) + } + pub async fn add_connection(&self, session_id: String, tx: mpsc::Sender) { + self.connections.lock().await.insert(session_id, tx); + } + pub async fn send_voice_response( + &self, + session_id: &str, + text: &str, + ) -> Result<(), Box> { + info!("Sending voice response to session {}: {}", session_id, text); + Ok(()) + } +} +#[async_trait] +impl ChannelAdapter for VoiceAdapter { + async fn send_message( + &self, + response: BotResponse, + ) -> Result<(), Box> { + info!("Sending voice response to: {}", response.user_id); + self.send_voice_response(&response.session_id, &response.content) + .await + } +} diff --git a/src/core/bot/channels/teams.rs b/src/core/bot/channels/teams.rs new file mode 100644 index 000000000..2da9d65db --- /dev/null +++ b/src/core/bot/channels/teams.rs @@ -0,0 +1,29 @@ +use crate::shared::models::BotResponse; +use async_trait::async_trait; +use log::info; + +/// Microsoft Teams channel adapter for sending messages through Teams +pub struct TeamsAdapter { + // TODO: Add Teams API client configuration +} + +impl TeamsAdapter { + pub fn new() -> Self { + Self {} + } +} + +#[async_trait] +impl super::ChannelAdapter for TeamsAdapter { + async fn send_message( + &self, + response: BotResponse, + ) -> Result<(), Box> { + info!( + "Teams message would be sent to {}: {}", + response.user_id, response.content + ); + // TODO: Implement actual Teams API integration + Ok(()) + } +} diff --git a/src/core/bot/channels/whatsapp.rs b/src/core/bot/channels/whatsapp.rs new file mode 100644 index 000000000..cb623a019 --- /dev/null +++ b/src/core/bot/channels/whatsapp.rs @@ -0,0 +1,29 @@ +use crate::shared::models::BotResponse; +use async_trait::async_trait; +use log::info; + +/// WhatsApp channel adapter for sending messages through WhatsApp +pub struct WhatsAppAdapter { + // TODO: Add WhatsApp API client configuration +} + +impl WhatsAppAdapter { + pub fn new() -> Self { + Self {} + } +} + +#[async_trait] +impl super::ChannelAdapter for WhatsAppAdapter { + async fn send_message( + &self, + response: BotResponse, + ) -> Result<(), Box> { + info!( + "WhatsApp message would be sent to {}: {}", + response.user_id, response.content + ); + // TODO: Implement actual WhatsApp API integration + Ok(()) + } +} diff --git a/src/core/bot/mod.rs b/src/core/bot/mod.rs new file mode 100644 index 000000000..69c94ebcd --- /dev/null +++ b/src/core/bot/mod.rs @@ -0,0 +1,580 @@ +use crate::core::config::ConfigManager; +use crate::drive::drive_monitor::DriveMonitor; +use crate::llm::OpenAIClient; +use crate::shared::models::{BotResponse, UserMessage, UserSession}; +use crate::shared::state::AppState; +use axum::extract::ws::{Message, WebSocket}; +use axum::{ + extract::{ws::WebSocketUpgrade, Extension, Query, State}, + http::StatusCode, + response::{IntoResponse, Json}, +}; +use diesel::PgConnection; +use futures::{sink::SinkExt, stream::StreamExt}; +use log::{error, info, trace, warn}; +use serde_json; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::sync::mpsc; +use tokio::sync::Mutex as AsyncMutex; +use uuid::Uuid; + +pub mod channels; +pub mod multimedia; + +/// Retrieves the default bot (first active bot) from the database. +pub fn get_default_bot(conn: &mut PgConnection) -> (Uuid, String) { + use crate::shared::models::schema::bots::dsl::*; + use diesel::prelude::*; + match bots + .filter(is_active.eq(true)) + .select((id, name)) + .first::<(Uuid, String)>(conn) + .optional() + { + Ok(Some((bot_id, bot_name))) => (bot_id, bot_name), + Ok(None) => { + warn!("No active bots found, using nil UUID"); + (Uuid::nil(), "default".to_string()) + } + Err(e) => { + error!("Failed to query default bot: {}", e); + (Uuid::nil(), "default".to_string()) + } + } +} + +#[derive(Debug)] +pub struct BotOrchestrator { + pub state: Arc, + pub mounted_bots: Arc>>>, +} + +impl BotOrchestrator { + pub fn new(state: Arc) -> Self { + Self { + state, + mounted_bots: Arc::new(AsyncMutex::new(HashMap::new())), + } + } + + // ... (All existing methods unchanged) ... + + pub async fn mount_all_bots(&self) -> Result<(), Box> { + // No-op: bot mounting is handled elsewhere + info!("mount_all_bots called (no-op)"); + Ok(()) + } + + // Stream response to user via LLM + pub async fn stream_response( + &self, + message: UserMessage, + response_tx: mpsc::Sender, + ) -> Result<(), Box> { + trace!( + "Streaming response for user: {}, session: {}", + message.user_id, + message.session_id + ); + + let user_id = Uuid::parse_str(&message.user_id)?; + let session_id = Uuid::parse_str(&message.session_id)?; + let bot_id = Uuid::parse_str(&message.bot_id).unwrap_or_default(); + + // All database operations in one blocking section + let (session, context_data, history, model, key) = { + let state_clone = self.state.clone(); + tokio::task::spawn_blocking( + move || -> Result<_, Box> { + // Get session + let session = { + let mut sm = state_clone.session_manager.blocking_lock(); + sm.get_session_by_id(session_id)? + } + .ok_or_else(|| "Session not found")?; + + // Save user message + { + let mut sm = state_clone.session_manager.blocking_lock(); + sm.save_message(session.id, user_id, 1, &message.content, 1)?; + } + + // Get context and history + let context_data = { + let sm = state_clone.session_manager.blocking_lock(); + let rt = tokio::runtime::Handle::current(); + rt.block_on(async { + sm.get_session_context_data(&session.id, &session.user_id) + .await + })? + }; + + let history = { + let mut sm = state_clone.session_manager.blocking_lock(); + sm.get_conversation_history(session.id, user_id)? + }; + + // Get model config + let config_manager = ConfigManager::new(state_clone.conn.clone()); + let model = config_manager + .get_config(&bot_id, "llm-model", Some("gpt-3.5-turbo")) + .unwrap_or_else(|_| "gpt-3.5-turbo".to_string()); + let key = config_manager + .get_config(&bot_id, "llm-key", Some("")) + .unwrap_or_default(); + + Ok((session, context_data, history, model, key)) + }, + ) + .await?? + }; + + // Build messages + let system_prompt = std::env::var("SYSTEM_PROMPT") + .unwrap_or_else(|_| "You are a helpful assistant.".to_string()); + let messages = OpenAIClient::build_messages(&system_prompt, &context_data, &history); + + // Stream from LLM + let (stream_tx, mut stream_rx) = mpsc::channel::(100); + let llm = self.state.llm_provider.clone(); + + tokio::spawn(async move { + if let Err(e) = llm + .generate_stream("", &messages, stream_tx, &model, &key) + .await + { + error!("LLM streaming error: {}", e); + } + }); + + let mut full_response = String::new(); + let mut chunk_count = 0; + + while let Some(chunk) = stream_rx.recv().await { + chunk_count += 1; + info!("Received LLM chunk #{}: {:?}", chunk_count, chunk); + full_response.push_str(&chunk); + + let response = BotResponse { + bot_id: message.bot_id.clone(), + user_id: message.user_id.clone(), + session_id: message.session_id.clone(), + channel: message.channel.clone(), + content: chunk, + message_type: 2, + stream_token: None, + is_complete: false, + suggestions: Vec::new(), + context_name: None, + context_length: 0, + context_max_length: 0, + }; + + info!("Sending streaming chunk to WebSocket"); + if let Err(e) = response_tx.send(response).await { + error!("Failed to send streaming chunk: {}", e); + break; + } + } + + info!( + "LLM streaming complete, received {} chunks, total length: {}", + chunk_count, + full_response.len() + ); + + // Send final complete response + let final_response = BotResponse { + bot_id: message.bot_id.clone(), + user_id: message.user_id.clone(), + session_id: message.session_id.clone(), + channel: message.channel.clone(), + content: full_response.clone(), + message_type: 2, + stream_token: None, + is_complete: true, + suggestions: Vec::new(), + context_name: None, + context_length: 0, + context_max_length: 0, + }; + + info!("Sending final complete response to WebSocket"); + response_tx.send(final_response).await?; + info!("Final response sent successfully"); + + // Save bot response in blocking context + let state_for_save = self.state.clone(); + let full_response_clone = full_response.clone(); + tokio::task::spawn_blocking( + move || -> Result<(), Box> { + let mut sm = state_for_save.session_manager.blocking_lock(); + sm.save_message(session.id, user_id, 2, &full_response_clone, 2)?; + Ok(()) + }, + ) + .await??; + + Ok(()) + } + + // ... (Other methods unchanged) ... + + pub async fn get_user_sessions( + &self, + user_id: Uuid, + ) -> Result, Box> { + let mut session_manager = self.state.session_manager.lock().await; + let sessions = session_manager.get_user_sessions(user_id)?; + Ok(sessions) + } + + pub async fn get_conversation_history( + &self, + session_id: Uuid, + user_id: Uuid, + ) -> Result, Box> { + let mut session_manager = self.state.session_manager.lock().await; + let history = session_manager.get_conversation_history(session_id, user_id)?; + Ok(history) + } + + // ... (Remaining BotOrchestrator methods unchanged) ... +} + +/* Axum handlers – placeholders that delegate to BotOrchestrator where appropriate */ + +/// WebSocket handler that upgrades HTTP connection to WebSocket +pub async fn websocket_handler( + ws: WebSocketUpgrade, + State(state): State>, + Query(params): Query>, +) -> impl IntoResponse { + let session_id = params + .get("session_id") + .and_then(|s| Uuid::parse_str(s).ok()); + let user_id = params.get("user_id").and_then(|s| Uuid::parse_str(s).ok()); + + if session_id.is_none() || user_id.is_none() { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ "error": "session_id and user_id are required" })), + ) + .into_response(); + } + + ws.on_upgrade(move |socket| { + handle_websocket(socket, state, session_id.unwrap(), user_id.unwrap()) + }) + .into_response() +} + +/// Handles an individual WebSocket connection +async fn handle_websocket( + socket: WebSocket, + state: Arc, + session_id: Uuid, + user_id: Uuid, +) { + let (mut sender, mut receiver) = socket.split(); + + // Create a channel for this WebSocket connection + let (tx, mut rx) = mpsc::channel::(100); + + // Register this connection with the web adapter + state + .web_adapter + .add_connection(session_id.to_string(), tx.clone()) + .await; + + // Also register in response_channels for BotOrchestrator + { + let mut channels = state.response_channels.lock().await; + channels.insert(session_id.to_string(), tx.clone()); + } + + info!( + "WebSocket connected for session: {}, user: {}", + session_id, user_id + ); + + // Execute start.bas if it exists + let state_for_start = state.clone(); + let session_for_start = { + let mut sm = state.session_manager.lock().await; + sm.get_session_by_id(session_id).ok().and_then(|opt| opt) + }; + + if let Some(session_clone) = session_for_start { + tokio::task::spawn_blocking(move || { + use crate::basic::ScriptService; + + let bot_name = "default"; // TODO: Get from session + let start_script_path = + format!("./work/{}.gbai/{}.gbdialog/start.bas", bot_name, bot_name); + + if let Ok(start_content) = std::fs::read_to_string(&start_script_path) { + info!("Executing start.bas for session {}", session_id); + let script_service = ScriptService::new(state_for_start, session_clone); + match script_service.compile(&start_content) { + Ok(ast) => { + if let Err(e) = script_service.run(&ast) { + error!("Failed to execute start.bas: {}", e); + } else { + info!("start.bas executed successfully for session {}", session_id); + } + } + Err(e) => { + error!("Failed to compile start.bas: {}", e); + } + } + } else { + info!("No start.bas found for bot {}", bot_name); + } + }); + } + + // Send initial welcome message + let welcome = serde_json::json!({ + "type": "connected", + "session_id": session_id, + "user_id": user_id, + "message": "Connected to bot server" + }); + + if let Ok(welcome_str) = serde_json::to_string(&welcome) { + info!("Sending welcome message to session {}", session_id); + if let Err(e) = sender.send(Message::Text(welcome_str.into())).await { + error!("Failed to send welcome message: {}", e); + } + } + + // Spawn task to send messages from the channel to the WebSocket + let mut send_task = tokio::spawn(async move { + while let Some(response) = rx.recv().await { + if let Ok(json_str) = serde_json::to_string(&response) { + if sender.send(Message::Text(json_str.into())).await.is_err() { + break; + } + } + } + }); + + // Handle incoming messages from the WebSocket + let state_clone = state.clone(); + let mut recv_task = tokio::spawn(async move { + while let Some(Ok(msg)) = receiver.next().await { + info!("WebSocket received raw message type: {:?}", msg); + match msg { + Message::Text(text) => { + info!( + "Received WebSocket text message (length {}): {}", + text.len(), + text + ); + match serde_json::from_str::(&text) { + Ok(user_msg) => { + info!( + "Successfully parsed user message from session: {}, content: {}", + session_id, user_msg.content + ); + // Process the message through the bot system + if let Err(e) = process_user_message( + state_clone.clone(), + session_id, + user_id, + user_msg, + ) + .await + { + error!("Error processing user message: {}", e); + } + } + Err(e) => { + error!( + "Failed to parse user message from session {}: {} - Parse error: {}", + session_id, text, e + ); + } + } + } + Message::Close(_) => { + info!( + "WebSocket close message received for session: {}", + session_id + ); + break; + } + Message::Ping(_data) => { + // Pings are automatically handled by axum + } + Message::Pong(_) => { + // Pongs are automatically handled by axum + } + _ => {} + } + } + }); + + // Wait for either task to finish + tokio::select! { + _ = (&mut send_task) => { + recv_task.abort(); + } + _ = (&mut recv_task) => { + send_task.abort(); + } + } + + // Clean up: remove the connection from the adapter + state + .web_adapter + .remove_connection(&session_id.to_string()) + .await; + + // Also remove from response_channels + { + let mut channels = state.response_channels.lock().await; + channels.remove(&session_id.to_string()); + } + + info!("WebSocket disconnected for session: {}", session_id); +} + +/// Process a user message received via WebSocket +async fn process_user_message( + state: Arc, + session_id: Uuid, + user_id: Uuid, + user_msg: UserMessage, +) -> Result<(), Box> { + info!( + "Processing message from user {} in session {}: {}", + user_id, session_id, user_msg.content + ); + + // Get the response channel for this session + let tx = { + let channels = state.response_channels.lock().await; + channels.get(&session_id.to_string()).cloned() + }; + + if let Some(response_tx) = tx { + // Use BotOrchestrator to stream the response + let orchestrator = BotOrchestrator::new(state.clone()); + if let Err(e) = orchestrator.stream_response(user_msg, response_tx).await { + error!("Failed to stream response: {}", e); + } + } else { + error!("No response channel found for session {}", session_id); + } + + Ok(()) +} + +/// Create a new bot (placeholder implementation) +pub async fn create_bot_handler( + Extension(state): Extension>, + Json(payload): Json>, +) -> impl IntoResponse { + let bot_name = payload + .get("bot_name") + .cloned() + .unwrap_or_else(|| "default".to_string()); + + // Use state to create the bot in the database + let mut conn = match state.conn.get() { + Ok(conn) => conn, + Err(e) => { + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ "error": format!("Database error: {}", e) })), + ) + } + }; + + use crate::shared::models::schema::bots::dsl::*; + use diesel::prelude::*; + + let new_bot = ( + name.eq(&bot_name), + description.eq(format!("Bot created via API: {}", bot_name)), + llm_provider.eq("openai"), + llm_config.eq(serde_json::json!({"model": "gpt-4"})), + context_provider.eq("none"), + context_config.eq(serde_json::json!({})), + is_active.eq(true), + ); + + match diesel::insert_into(bots) + .values(&new_bot) + .execute(&mut conn) + { + Ok(_) => ( + StatusCode::OK, + Json(serde_json::json!({ + "status": format!("bot '{}' created successfully", bot_name), + "bot_name": bot_name + })), + ), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ "error": format!("Failed to create bot: {}", e) })), + ), + } +} + +/// Mount an existing bot (placeholder implementation) +pub async fn mount_bot_handler( + Extension(_state): Extension>, + Json(payload): Json>, +) -> impl IntoResponse { + let bot_guid = payload.get("bot_guid").cloned().unwrap_or_default(); + ( + StatusCode::OK, + Json(serde_json::json!({ "status": format!("bot '{}' mounted", bot_guid) })), + ) +} + +/// Handle user input for a bot (placeholder implementation) +pub async fn handle_user_input_handler( + Extension(_state): Extension>, + Json(payload): Json>, +) -> impl IntoResponse { + let session_id = payload.get("session_id").cloned().unwrap_or_default(); + let user_input = payload.get("input").cloned().unwrap_or_default(); + ( + StatusCode::OK, + Json( + serde_json::json!({ "status": format!("input '{}' processed for session {}", user_input, session_id) }), + ), + ) +} + +/// Retrieve user sessions (placeholder implementation) +pub async fn get_user_sessions_handler( + Extension(_state): Extension>, + Json(_payload): Json>, +) -> impl IntoResponse { + (StatusCode::OK, Json(serde_json::json!({ "sessions": [] }))) +} + +/// Retrieve conversation history (placeholder implementation) +pub async fn get_conversation_history_handler( + Extension(_state): Extension>, + Json(_payload): Json>, +) -> impl IntoResponse { + (StatusCode::OK, Json(serde_json::json!({ "history": [] }))) +} + +/// Send warning (placeholder implementation) +pub async fn send_warning_handler( + Extension(_state): Extension>, + Json(_payload): Json>, +) -> impl IntoResponse { + ( + StatusCode::OK, + Json(serde_json::json!({ "status": "warning acknowledged" })), + ) +} diff --git a/src/core/bot/multimedia.rs b/src/core/bot/multimedia.rs new file mode 100644 index 000000000..d8339e80d --- /dev/null +++ b/src/core/bot/multimedia.rs @@ -0,0 +1,542 @@ +//! Multimedia Message Handling Module +//! +//! This module provides support for handling various multimedia message types including +//! images, videos, audio, documents, and web search results. +//! +//! Key features: +//! - Multiple media type support (images, videos, audio, documents) +//! - Media upload and download handling +//! - Thumbnail generation +//! - Web search integration +//! - Storage abstraction for S3-compatible backends +//! - URL processing and validation + +use crate::shared::models::{BotResponse, UserMessage}; +use anyhow::Result; +use async_trait::async_trait; +use base64::{engine::general_purpose::STANDARD, Engine}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum MultimediaMessage { + Text { + content: String, + }, + Image { + url: String, + caption: Option, + mime_type: String, + }, + Video { + url: String, + thumbnail_url: Option, + caption: Option, + duration: Option, + mime_type: String, + }, + Audio { + url: String, + duration: Option, + mime_type: String, + }, + Document { + url: String, + filename: String, + mime_type: String, + }, + WebSearch { + query: String, + results: Vec, + }, + Location { + latitude: f64, + longitude: f64, + name: Option, + address: Option, + }, + MeetingInvite { + meeting_id: String, + meeting_url: String, + start_time: Option, + duration: Option, + participants: Vec, + }, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SearchResult { + pub title: String, + pub url: String, + pub snippet: String, + pub thumbnail: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct MediaUploadRequest { + pub file_name: String, + pub content_type: String, + pub data: Vec, + pub user_id: String, + pub session_id: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct MediaUploadResponse { + pub media_id: String, + pub url: String, + pub thumbnail_url: Option, +} + +/// Trait for handling multimedia messages +#[async_trait] +pub trait MultimediaHandler: Send + Sync { + /// Process an incoming multimedia message + async fn process_multimedia( + &self, + message: MultimediaMessage, + user_id: &str, + session_id: &str, + ) -> Result; + + /// Upload media file to storage + async fn upload_media(&self, request: MediaUploadRequest) -> Result; + + /// Download media file from URL + async fn download_media(&self, url: &str) -> Result>; + + /// Perform web search + async fn web_search(&self, query: &str, max_results: usize) -> Result>; + + /// Generate thumbnail for video/image + async fn generate_thumbnail(&self, media_url: &str) -> Result; +} + +/// Default implementation for multimedia handling +#[derive(Debug)] +pub struct DefaultMultimediaHandler { + storage_client: Option, + search_api_key: Option, +} + +impl DefaultMultimediaHandler { + pub fn new(storage_client: Option, search_api_key: Option) -> Self { + Self { + storage_client, + search_api_key, + } + } + + pub fn storage_client(&self) -> &Option { + &self.storage_client + } + + pub fn search_api_key(&self) -> &Option { + &self.search_api_key + } +} + +#[async_trait] +impl MultimediaHandler for DefaultMultimediaHandler { + async fn process_multimedia( + &self, + message: MultimediaMessage, + user_id: &str, + session_id: &str, + ) -> Result { + match message { + MultimediaMessage::Text { content } => { + // Process as regular text message + Ok(BotResponse { + bot_id: "default".to_string(), + user_id: user_id.to_string(), + session_id: session_id.to_string(), + channel: "multimedia".to_string(), + content, + message_type: 0, + stream_token: None, + is_complete: true, + suggestions: Vec::new(), + context_name: None, + context_length: 0, + context_max_length: 0, + }) + } + MultimediaMessage::Image { url, caption, .. } => { + // Process image with optional caption + log::debug!("Processing image from URL: {}", url); + let response_content = format!( + "I see you've shared an image from {}{}. {}", + url, + caption + .as_ref() + .map(|c| format!(" with caption: {}", c)) + .unwrap_or_default(), + "Let me analyze this for you." + ); + + Ok(BotResponse { + bot_id: "default".to_string(), + user_id: user_id.to_string(), + session_id: session_id.to_string(), + channel: "multimedia".to_string(), + content: response_content, + message_type: 0, + stream_token: None, + is_complete: true, + suggestions: Vec::new(), + context_name: None, + context_length: 0, + context_max_length: 0, + }) + } + MultimediaMessage::Video { + url, + caption, + duration, + .. + } => { + // Process video + log::debug!("Processing video from URL: {}", url); + let response_content = format!( + "You've shared a video from {}{}{}. Processing video content...", + url, + duration.map(|d| format!(" ({}s)", d)).unwrap_or_default(), + caption + .as_ref() + .map(|c| format!(" - {}", c)) + .unwrap_or_default() + ); + + Ok(BotResponse { + bot_id: "default".to_string(), + user_id: user_id.to_string(), + session_id: session_id.to_string(), + channel: "multimedia".to_string(), + content: response_content, + message_type: 0, + stream_token: None, + is_complete: true, + suggestions: Vec::new(), + context_name: None, + context_length: 0, + context_max_length: 0, + }) + } + MultimediaMessage::WebSearch { query, .. } => { + // Perform web search + let results = self.web_search(&query, 5).await?; + let response_content = if results.is_empty() { + format!("No results found for: {}", query) + } else { + let results_text = results + .iter() + .enumerate() + .map(|(i, r)| { + format!("{}. [{}]({})\n {}", i + 1, r.title, r.url, r.snippet) + }) + .collect::>() + .join("\n\n"); + + format!("Search results for \"{}\":\n\n{}", query, results_text) + }; + + Ok(BotResponse { + bot_id: "default".to_string(), + user_id: user_id.to_string(), + session_id: session_id.to_string(), + channel: "multimedia".to_string(), + content: response_content, + message_type: 0, + stream_token: None, + is_complete: true, + suggestions: Vec::new(), + context_name: None, + context_length: 0, + context_max_length: 0, + }) + } + MultimediaMessage::MeetingInvite { + meeting_url, + start_time, + .. + } => { + let response_content = format!( + "Meeting invite received. Join at: {}{}", + meeting_url, + start_time + .as_ref() + .map(|t| format!("\nScheduled for: {}", t)) + .unwrap_or_default() + ); + + Ok(BotResponse { + bot_id: "default".to_string(), + user_id: user_id.to_string(), + session_id: session_id.to_string(), + channel: "multimedia".to_string(), + content: response_content, + message_type: 0, + stream_token: None, + is_complete: true, + suggestions: Vec::new(), + context_name: None, + context_length: 0, + context_max_length: 0, + }) + } + _ => { + // Handle other message types + Ok(BotResponse { + bot_id: "default".to_string(), + user_id: user_id.to_string(), + session_id: session_id.to_string(), + channel: "multimedia".to_string(), + content: "Message received and processing...".to_string(), + message_type: 0, + stream_token: None, + is_complete: true, + suggestions: Vec::new(), + context_name: None, + context_length: 0, + context_max_length: 0, + }) + } + } + } + + async fn upload_media(&self, request: MediaUploadRequest) -> Result { + let media_id = Uuid::new_v4().to_string(); + let key = format!( + "media/{}/{}/{}", + request.user_id, request.session_id, request.file_name + ); + + if let Some(client) = &self.storage_client { + // Upload to S3 + client + .put_object() + .bucket("botserver-media") + .key(&key) + .body(request.data.into()) + .content_type(&request.content_type) + .send() + .await?; + + let url = format!("https://storage.botserver.com/{}", key); + + Ok(MediaUploadResponse { + media_id, + url, + thumbnail_url: None, + }) + } else { + // Fallback to local storage + let local_path = format!("./media/{}", key); + std::fs::create_dir_all(std::path::Path::new(&local_path).parent().unwrap())?; + std::fs::write(&local_path, request.data)?; + + Ok(MediaUploadResponse { + media_id, + url: format!("file://{}", local_path), + thumbnail_url: None, + }) + } + } + + async fn download_media(&self, url: &str) -> Result> { + if url.starts_with("http://") || url.starts_with("https://") { + let response = reqwest::get(url).await?; + Ok(response.bytes().await?.to_vec()) + } else if url.starts_with("file://") { + let path = url.strip_prefix("file://").unwrap(); + Ok(std::fs::read(path)?) + } else { + Err(anyhow::anyhow!("Unsupported URL scheme: {}", url)) + } + } + + async fn web_search(&self, query: &str, max_results: usize) -> Result> { + // Implement web search using a search API (e.g., Bing, Google, DuckDuckGo) + // For now, return mock results + let mock_results = vec![ + SearchResult { + title: format!("Result 1 for: {}", query), + url: "https://example.com/1".to_string(), + snippet: "This is a sample search result snippet...".to_string(), + thumbnail: None, + }, + SearchResult { + title: format!("Result 2 for: {}", query), + url: "https://example.com/2".to_string(), + snippet: "Another sample search result...".to_string(), + thumbnail: None, + }, + ]; + + Ok(mock_results.into_iter().take(max_results).collect()) + } + + async fn generate_thumbnail(&self, media_url: &str) -> Result { + // Generate thumbnail using image/video processing libraries + // For now, return the same URL + Ok(media_url.to_string()) + } +} + +/// Extension trait for UserMessage to support multimedia +impl UserMessage { + pub fn to_multimedia(&self) -> MultimediaMessage { + // Parse message content to determine type + if self.content.starts_with("http") { + // Check if it's an image/video URL + if self.content.contains(".jpg") + || self.content.contains(".png") + || self.content.contains(".gif") + { + MultimediaMessage::Image { + url: self.content.clone(), + caption: None, + mime_type: "image/jpeg".to_string(), + } + } else if self.content.contains(".mp4") + || self.content.contains(".webm") + || self.content.contains(".mov") + { + MultimediaMessage::Video { + url: self.content.clone(), + thumbnail_url: None, + caption: None, + duration: None, + mime_type: "video/mp4".to_string(), + } + } else { + MultimediaMessage::Text { + content: self.content.clone(), + } + } + } else if self.content.starts_with("/search ") { + let query = self + .content + .strip_prefix("/search ") + .unwrap_or(&self.content); + MultimediaMessage::WebSearch { + query: query.to_string(), + results: Vec::new(), + } + } else { + MultimediaMessage::Text { + content: self.content.clone(), + } + } + } +} + +// ============================================================================ +// REST API Handlers +// ============================================================================ + +use crate::shared::state::AppState; +use axum::{ + extract::{Path, State}, + http::StatusCode, + response::IntoResponse, + Json, +}; +use std::sync::Arc; + +/// Upload media file +pub async fn upload_media_handler( + State(state): State>, + Json(request): Json, +) -> impl IntoResponse { + let handler = DefaultMultimediaHandler::new(state.drive.clone(), None); + + match handler.upload_media(request).await { + Ok(response) => (StatusCode::OK, Json(serde_json::json!(response))), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()})), + ), + } +} + +/// Download media file by ID +pub async fn download_media_handler( + State(state): State>, + Path(media_id): Path, +) -> impl IntoResponse { + let handler = DefaultMultimediaHandler::new(state.drive.clone(), None); + + // Construct URL from media_id (this would be stored in DB in production) + let url = format!("https://storage.botserver.com/media/{}", media_id); + + match handler.download_media(&url).await { + Ok(data) => ( + StatusCode::OK, + Json(serde_json::json!({ + "media_id": media_id, + "size": data.len(), + "data": STANDARD.encode(&data) + })), + ), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()})), + ), + } +} + +/// Generate thumbnail for media +pub async fn generate_thumbnail_handler( + State(state): State>, + Path(media_id): Path, +) -> impl IntoResponse { + let handler = DefaultMultimediaHandler::new(state.drive.clone(), None); + + // Construct URL from media_id + let url = format!("https://storage.botserver.com/media/{}", media_id); + + match handler.generate_thumbnail(&url).await { + Ok(thumbnail_url) => ( + StatusCode::OK, + Json(serde_json::json!({ + "media_id": media_id, + "thumbnail_url": thumbnail_url + })), + ), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()})), + ), + } +} + +/// Perform web search +pub async fn web_search_handler( + State(state): State>, + Json(payload): Json, +) -> impl IntoResponse { + let query = payload.get("query").and_then(|q| q.as_str()).unwrap_or(""); + let max_results = payload + .get("max_results") + .and_then(|m| m.as_u64()) + .unwrap_or(10) as usize; + + let handler = DefaultMultimediaHandler::new(state.drive.clone(), None); + + match handler.web_search(query, max_results).await { + Ok(results) => ( + StatusCode::OK, + Json(serde_json::json!({ + "query": query, + "results": results + })), + ), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()})), + ), + } +} diff --git a/src/core/bot/ui.rs b/src/core/bot/ui.rs new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/src/core/bot/ui.rs @@ -0,0 +1 @@ + diff --git a/src/core/config/config.test.rs b/src/core/config/config.test.rs new file mode 100644 index 000000000..47c57c9c6 --- /dev/null +++ b/src/core/config/config.test.rs @@ -0,0 +1,10 @@ +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::test_util; + #[test] + fn test_config_module() { + test_util::setup(); + assert!(true, "Basic config module test"); + } +} diff --git a/src/core/config/mod.rs b/src/core/config/mod.rs new file mode 100644 index 000000000..ddcbc9aa0 --- /dev/null +++ b/src/core/config/mod.rs @@ -0,0 +1,240 @@ +use crate::shared::utils::DbPool; +use diesel::prelude::*; +use diesel::r2d2::{ConnectionManager, PooledConnection}; +use std::collections::HashMap; +use uuid::Uuid; + +// Type alias for backward compatibility +pub type Config = AppConfig; + +#[derive(Clone, Debug)] +pub struct AppConfig { + pub drive: DriveConfig, + pub server: ServerConfig, + pub email: EmailConfig, + pub site_path: String, +} +#[derive(Clone, Debug)] +pub struct DriveConfig { + pub server: String, + pub access_key: String, + pub secret_key: String, +} +#[derive(Clone, Debug)] +pub struct ServerConfig { + pub host: String, + pub port: u16, +} +#[derive(Clone, Debug)] +pub struct EmailConfig { + pub server: String, + pub port: u16, + pub username: String, + pub password: String, + pub from: String, + pub smtp_server: String, + pub smtp_port: u16, +} +impl AppConfig { + pub fn from_database(pool: &DbPool) -> Result { + use crate::shared::models::schema::bot_configuration::dsl::*; + let mut conn = pool.get().map_err(|e| { + diesel::result::Error::DatabaseError( + diesel::result::DatabaseErrorKind::UnableToSendCommand, + Box::new(e.to_string()), + ) + })?; + let config_map: HashMap = + bot_configuration + .select(( + id, + bot_id, + config_key, + config_value, + config_type, + is_encrypted, + )) + .load::<(Uuid, Uuid, String, String, String, bool)>(&mut conn) + .unwrap_or_default() + .into_iter() + .map(|(_, _, key, value, _, _)| { + ( + key.clone(), + (Uuid::nil(), Uuid::nil(), key, value, String::new(), false), + ) + }) + .collect(); + let mut get_str = |key: &str, default: &str| -> String { + bot_configuration + .filter(config_key.eq(key)) + .select(config_value) + .first::(&mut conn) + .unwrap_or_else(|_| default.to_string()) + }; + let _get_u32 = |key: &str, default: u32| -> u32 { + config_map + .get(key) + .and_then(|v| v.3.parse().ok()) + .unwrap_or(default) + }; + let get_u16 = |key: &str, default: u16| -> u16 { + config_map + .get(key) + .and_then(|v| v.3.parse().ok()) + .unwrap_or(default) + }; + let _get_bool = |key: &str, default: bool| -> bool { + config_map + .get(key) + .map(|v| v.3.to_lowercase() == "true") + .unwrap_or(default) + }; + let drive = DriveConfig { + server: std::env::var("DRIVE_SERVER").unwrap(), + access_key: std::env::var("DRIVE_ACCESSKEY").unwrap(), + secret_key: std::env::var("DRIVE_SECRET").unwrap(), + }; + let email = EmailConfig { + server: get_str("EMAIL_IMAP_SERVER", "imap.gmail.com"), + port: get_u16("EMAIL_IMAP_PORT", 993), + username: get_str("EMAIL_USERNAME", ""), + password: get_str("EMAIL_PASSWORD", ""), + from: get_str("EMAIL_FROM", ""), + smtp_server: get_str("EMAIL_SMTP_SERVER", "smtp.gmail.com"), + smtp_port: get_u16("EMAIL_SMTP_PORT", 587), + }; + Ok(AppConfig { + drive, + email, + server: ServerConfig { + host: get_str("SERVER_HOST", "127.0.0.1"), + port: get_u16("SERVER_PORT", 8080), + }, + site_path: { + ConfigManager::new(pool.clone()) + .get_config(&Uuid::nil(), "SITES_ROOT", Some("./botserver-stack/sites"))? + .to_string() + }, + }) + } + pub fn from_env() -> Result { + let minio = DriveConfig { + server: std::env::var("DRIVE_SERVER").unwrap(), + access_key: std::env::var("DRIVE_ACCESSKEY").unwrap(), + secret_key: std::env::var("DRIVE_SECRET").unwrap(), + }; + let email = EmailConfig { + server: std::env::var("EMAIL_IMAP_SERVER") + .unwrap_or_else(|_| "imap.gmail.com".to_string()), + port: std::env::var("EMAIL_IMAP_PORT") + .ok() + .and_then(|p| p.parse().ok()) + .unwrap_or(993), + username: std::env::var("EMAIL_USERNAME").unwrap_or_default(), + password: std::env::var("EMAIL_PASSWORD").unwrap_or_default(), + from: std::env::var("EMAIL_FROM").unwrap_or_default(), + smtp_server: std::env::var("EMAIL_SMTP_SERVER") + .unwrap_or_else(|_| "smtp.gmail.com".to_string()), + smtp_port: std::env::var("EMAIL_SMTP_PORT") + .ok() + .and_then(|p| p.parse().ok()) + .unwrap_or(587), + }; + Ok(AppConfig { + drive: minio, + email, + server: ServerConfig { + host: std::env::var("SERVER_HOST").unwrap_or_else(|_| "127.0.0.1".to_string()), + port: std::env::var("SERVER_PORT") + .ok() + .and_then(|p| p.parse().ok()) + .unwrap_or(8080), + }, + site_path: { + let pool = create_conn()?; + ConfigManager::new(pool).get_config( + &Uuid::nil(), + "SITES_ROOT", + Some("./botserver-stack/sites"), + )? + }, + }) + } +} +#[derive(Debug)] +pub struct ConfigManager { + conn: DbPool, +} +impl ConfigManager { + pub fn new(conn: DbPool) -> Self { + Self { conn } + } + fn get_conn( + &self, + ) -> Result>, diesel::result::Error> { + self.conn.get().map_err(|e| { + diesel::result::Error::DatabaseError( + diesel::result::DatabaseErrorKind::UnableToSendCommand, + Box::new(e.to_string()), + ) + }) + } + pub fn get_config( + &self, + code_bot_id: &uuid::Uuid, + key: &str, + fallback: Option<&str>, + ) -> Result { + use crate::shared::models::schema::bot_configuration::dsl::*; + let mut conn = self.get_conn()?; + let fallback_str = fallback.unwrap_or(""); + let result = bot_configuration + .filter(bot_id.eq(code_bot_id)) + .filter(config_key.eq(key)) + .select(config_value) + .first::(&mut conn); + let value = match result { + Ok(v) => v, + Err(_) => { + let (default_bot_id, _default_bot_name) = crate::bot::get_default_bot(&mut conn); + bot_configuration + .filter(bot_id.eq(default_bot_id)) + .filter(config_key.eq(key)) + .select(config_value) + .first::(&mut conn) + .unwrap_or(fallback_str.to_string()) + } + }; + Ok(value) + } + pub fn sync_gbot_config(&self, bot_id: &uuid::Uuid, content: &str) -> Result { + use sha2::{Digest, Sha256}; + let mut hasher = Sha256::new(); + hasher.update(content.as_bytes()); + let mut conn = self + .get_conn() + .map_err(|e| format!("Failed to acquire connection: {}", e))?; + let mut updated = 0; + for line in content.lines().skip(1) { + let parts: Vec<&str> = line.split(',').collect(); + if parts.len() >= 2 { + let key = parts[0].trim(); + let value = parts[1].trim(); + let new_id: uuid::Uuid = uuid::Uuid::new_v4(); + diesel::sql_query("INSERT INTO bot_configuration (id, bot_id, config_key, config_value, config_type) VALUES ($1, $2, $3, $4, 'string') ON CONFLICT (bot_id, config_key) DO UPDATE SET config_value = EXCLUDED.config_value, updated_at = NOW()") + .bind::(new_id) + .bind::(bot_id) + .bind::(key) + .bind::(value) + .execute(&mut conn) + .map_err(|e| format!("Failed to update config: {}", e))?; + updated += 1; + } + } + Ok(updated) + } +} +fn create_conn() -> Result { + crate::shared::utils::create_conn() + .map_err(|e| anyhow::anyhow!("Failed to create database pool: {}", e)) +} diff --git a/src/core/mod.rs b/src/core/mod.rs new file mode 100644 index 000000000..11f7eda6a --- /dev/null +++ b/src/core/mod.rs @@ -0,0 +1,8 @@ +pub mod automation; +pub mod bootstrap; +pub mod bot; +pub mod config; +pub mod package_manager; +pub mod session; +pub mod shared; +pub mod web_server; diff --git a/src/core/package_manager/cli.rs b/src/core/package_manager/cli.rs new file mode 100644 index 000000000..ddf77caa2 --- /dev/null +++ b/src/core/package_manager/cli.rs @@ -0,0 +1,168 @@ +use anyhow::Result; +use std::env; +use std::process::Command; +use crate::package_manager::{get_all_components, InstallMode, PackageManager}; +pub async fn run() -> Result<()> { + env_logger::init(); + let args: Vec = env::args().collect(); + if args.len() < 2 { + print_usage(); + return Ok(()); + } +use tracing::info; +fn print_usage(){info!("usage: botserver [options]")} + let command = &args[1]; + match command.as_str() { + "start" => { + let mode = if args.contains(&"--container".to_string()) { + InstallMode::Container + } else { + InstallMode::Local + }; + let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") { + args.get(idx + 1).cloned() + } else { + None + }; + let pm = PackageManager::new(mode, tenant)?; + println!("Starting all installed components..."); + let components = get_all_components(); + for component in components { + if pm.is_installed(component.name) { + match pm.start(component.name) { + Ok(_) => println!("✓ Started {}", component.name), + Err(e) => eprintln!("✗ Failed to start {}: {}", component.name, e), + } + } + } + println!("✓ BotServer components started"); + } + "stop" => { + println!("Stopping all components..."); + let components = get_all_components(); + for component in components { + let _ = Command::new("pkill").arg("-f").arg(component.termination_command).output(); + } + println!("✓ BotServer components stopped"); + } + "restart" => { + println!("Restarting BotServer..."); + let components = get_all_components(); + for component in components { + let _ = Command::new("pkill").arg("-f").arg(component.termination_command).output(); + } + tokio::time::sleep(tokio::time::Duration::from_secs(2)).await; + let mode = if args.contains(&"--container".to_string()) { + InstallMode::Container + } else { + InstallMode::Local + }; + let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") { + args.get(idx + 1).cloned() + } else { + None + }; + let pm = PackageManager::new(mode, tenant)?; + let components = get_all_components(); + for component in components { + if pm.is_installed(component.name) { + let _ = pm.start(component.name); + } + } + println!("✓ BotServer restarted"); + } + "install" => { + if args.len() < 3 { + eprintln!("Usage: botserver install [--container] [--tenant ]"); + return Ok(()); + } + let component = &args[2]; + let mode = if args.contains(&"--container".to_string()) { + InstallMode::Container + } else { + InstallMode::Local + }; + let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") { + args.get(idx + 1).cloned() + } else { + None + }; + let pm = PackageManager::new(mode, tenant)?; + pm.install(component).await?; + println!("✓ Component '{}' installed successfully", component); + } + "remove" => { + if args.len() < 3 { + eprintln!("Usage: botserver remove [--container] [--tenant ]"); + return Ok(()); + } + let component = &args[2]; + let mode = if args.contains(&"--container".to_string()) { + InstallMode::Container + } else { + InstallMode::Local + }; + let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") { + args.get(idx + 1).cloned() + } else { + None + }; + let pm = PackageManager::new(mode, tenant)?; + pm.remove(component)?; + println!("✓ Component '{}' removed successfully", component); + } + "list" => { + let mode = if args.contains(&"--container".to_string()) { + InstallMode::Container + } else { + InstallMode::Local + }; + let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") { + args.get(idx + 1).cloned() + } else { + None + }; + let pm = PackageManager::new(mode, tenant)?; + println!("Available components:"); + for component in pm.list() { + let status = if pm.is_installed(&component) { + "✓ installed" + } else { + " available" + }; + println!(" {} {}", status, component); + } + } + "status" => { + if args.len() < 3 { + eprintln!("Usage: botserver status [--container] [--tenant ]"); + return Ok(()); + } + let component = &args[2]; + let mode = if args.contains(&"--container".to_string()) { + InstallMode::Container + } else { + InstallMode::Local + }; + let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") { + args.get(idx + 1).cloned() + } else { + None + }; + let pm = PackageManager::new(mode, tenant)?; + if pm.is_installed(component) { + println!("✓ Component '{}' is installed", component); + } else { + println!("✗ Component '{}' is not installed", component); + } + } + "--help" | "-h" => { + print_usage(); + } + _ => { + eprintln!("Unknown command: {}", command); + print_usage(); + } + } + Ok(()) +} diff --git a/src/core/package_manager/component.rs b/src/core/package_manager/component.rs new file mode 100644 index 000000000..c808eb807 --- /dev/null +++ b/src/core/package_manager/component.rs @@ -0,0 +1,22 @@ +use std::collections::HashMap; +#[derive(Debug, Clone)] +pub struct ComponentConfig { + pub name: String, + pub ports: Vec, + pub dependencies: Vec, + pub linux_packages: Vec, + pub macos_packages: Vec, + pub windows_packages: Vec, + pub download_url: Option, + pub binary_name: Option, + pub pre_install_cmds_linux: Vec, + pub post_install_cmds_linux: Vec, + pub pre_install_cmds_macos: Vec, + pub post_install_cmds_macos: Vec, + pub pre_install_cmds_windows: Vec, + pub post_install_cmds_windows: Vec, + pub env_vars: HashMap, + pub data_download_list: Vec, + pub exec_cmd: String, + pub check_cmd: String, +} diff --git a/src/core/package_manager/facade.rs b/src/core/package_manager/facade.rs new file mode 100644 index 000000000..5c66bab55 --- /dev/null +++ b/src/core/package_manager/facade.rs @@ -0,0 +1,632 @@ +use crate::package_manager::component::ComponentConfig; +use crate::package_manager::installer::PackageManager; +use crate::package_manager::InstallMode; +use crate::package_manager::OsType; +use crate::shared::utils::{self, parse_database_url}; +use anyhow::{Context, Result}; +use log::{error, trace, warn}; +use reqwest::Client; +use std::collections::HashMap; +use std::path::PathBuf; +use std::process::Command; +impl PackageManager { + pub async fn install(&self, component_name: &str) -> Result<()> { + let component = self + .components + .get(component_name) + .context(format!("Component '{}' not found", component_name))?; + trace!( + "Starting installation of component '{}' in {:?} mode", + component_name, + self.mode + ); + for dep in &component.dependencies { + if !self.is_installed(dep) { + warn!("Installing missing dependency: {}", dep); + Box::pin(self.install(dep)).await?; + } + } + match self.mode { + InstallMode::Local => self.install_local(component).await?, + InstallMode::Container => self.install_container(component)?, + } + trace!( + "Component '{}' installation completed successfully", + component_name + ); + Ok(()) + } + pub async fn install_local(&self, component: &ComponentConfig) -> Result<()> { + trace!( + "Installing component '{}' locally to {}", + component.name, + self.base_path.display() + ); + self.create_directories(&component.name)?; + let (pre_cmds, post_cmds) = match self.os_type { + OsType::Linux => ( + &component.pre_install_cmds_linux, + &component.post_install_cmds_linux, + ), + OsType::MacOS => ( + &component.pre_install_cmds_macos, + &component.post_install_cmds_macos, + ), + OsType::Windows => ( + &component.pre_install_cmds_windows, + &component.post_install_cmds_windows, + ), + }; + self.run_commands(pre_cmds, "local", &component.name)?; + self.install_system_packages(component)?; + if let Some(url) = &component.download_url { + let url = url.clone(); + let name = component.name.clone(); + let binary_name = component.binary_name.clone(); + self.download_and_install(&url, &name, binary_name.as_deref()) + .await?; + } + if !component.data_download_list.is_empty() { + for url in &component.data_download_list { + let filename = url.split('/').last().unwrap_or("download.tmp"); + let output_path = self + .base_path + .join("data") + .join(&component.name) + .join(filename); + utils::download_file(url, output_path.to_str().unwrap()).await?; + } + } + self.run_commands(post_cmds, "local", &component.name)?; + Ok(()) + } + pub fn install_container(&self, component: &ComponentConfig) -> Result<()> { + let container_name = format!("{}-{}", self.tenant, component.name); + let output = Command::new("lxc") + .args(&[ + "launch", + "images:debian/12", + &container_name, + "-c", + "security.privileged=true", + ]) + .output()?; + if !output.status.success() { + return Err(anyhow::anyhow!( + "LXC container creation failed: {}", + String::from_utf8_lossy(&output.stderr) + )); + } + std::thread::sleep(std::time::Duration::from_secs(15)); + self.exec_in_container(&container_name, "mkdir -p /opt/gbo/{bin,data,conf,logs}")?; + let (pre_cmds, post_cmds) = match self.os_type { + OsType::Linux => ( + &component.pre_install_cmds_linux, + &component.post_install_cmds_linux, + ), + OsType::MacOS => ( + &component.pre_install_cmds_macos, + &component.post_install_cmds_macos, + ), + OsType::Windows => ( + &component.pre_install_cmds_windows, + &component.post_install_cmds_windows, + ), + }; + self.run_commands(pre_cmds, &container_name, &component.name)?; + let packages = match self.os_type { + OsType::Linux => &component.linux_packages, + OsType::MacOS => &component.macos_packages, + OsType::Windows => &component.windows_packages, + }; + if !packages.is_empty() { + let pkg_list = packages.join(" "); + self.exec_in_container( + &container_name, + &format!("apt-get install -y {}", pkg_list), + )?; + } + if let Some(url) = &component.download_url { + self.download_in_container( + &container_name, + url, + &component.name, + component.binary_name.as_deref(), + )?; + } + self.run_commands(post_cmds, &container_name, &component.name)?; + self.exec_in_container( + &container_name, + "useradd --system --no-create-home --shell /bin/false gbuser", + )?; + self.mount_container_directories(&container_name, &component.name)?; + if !component.exec_cmd.is_empty() { + self.create_container_service( + &container_name, + &component.name, + &component.exec_cmd, + &component.env_vars, + )?; + } + self.setup_port_forwarding(&container_name, &component.ports)?; + trace!( + "Container installation of '{}' completed in {}", + component.name, + container_name + ); + Ok(()) + } + + pub fn remove(&self, component_name: &str) -> Result<()> { + let component = self + .components + .get(component_name) + .context(format!("Component '{}' not found", component_name))?; + match self.mode { + InstallMode::Local => self.remove_local(component)?, + InstallMode::Container => self.remove_container(component)?, + } + Ok(()) + } + pub fn remove_local(&self, component: &ComponentConfig) -> Result<()> { + let bin_path = self.base_path.join("bin").join(&component.name); + let _ = std::fs::remove_dir_all(bin_path); + Ok(()) + } + pub fn remove_container(&self, component: &ComponentConfig) -> Result<()> { + let container_name = format!("{}-{}", self.tenant, component.name); + let _ = Command::new("lxc") + .args(&["stop", &container_name]) + .output(); + let output = Command::new("lxc") + .args(&["delete", &container_name]) + .output()?; + if !output.status.success() { + warn!( + "Container deletion had issues: {}", + String::from_utf8_lossy(&output.stderr) + ); + } + Ok(()) + } + pub fn list(&self) -> Vec { + self.components.keys().cloned().collect() + } + pub fn is_installed(&self, component_name: &str) -> bool { + match self.mode { + InstallMode::Local => { + let bin_path = self.base_path.join("bin").join(component_name); + bin_path.exists() + } + InstallMode::Container => { + let container_name = format!("{}-{}", self.tenant, component_name); + let output = Command::new("lxc") + .args(&["list", &container_name, "--format=json"]) + .output() + .unwrap(); + if !output.status.success() { + return false; + } + let output_str = String::from_utf8_lossy(&output.stdout); + !output_str.contains("\"name\":\"") || output_str.contains("\"status\":\"Stopped\"") + } + } + } + pub fn create_directories(&self, component: &str) -> Result<()> { + let dirs = ["bin", "data", "conf", "logs"]; + for dir in &dirs { + let path = self.base_path.join(dir).join(component); + std::fs::create_dir_all(&path) + .context(format!("Failed to create directory: {:?}", path))?; + } + Ok(()) + } + pub fn install_system_packages(&self, component: &ComponentConfig) -> Result<()> { + let packages = match self.os_type { + OsType::Linux => &component.linux_packages, + OsType::MacOS => &component.macos_packages, + OsType::Windows => &component.windows_packages, + }; + if packages.is_empty() { + return Ok(()); + } + trace!( + "Installing {} system packages for component '{}'", + packages.len(), + component.name + ); + match self.os_type { + OsType::Linux => { + let output = Command::new("apt-get").args(&["update"]).output()?; + if !output.status.success() { + warn!("apt-get update had issues"); + } + let output = Command::new("apt-get") + .args(&["install", "-y"]) + .args(packages) + .output()?; + if !output.status.success() { + warn!("Some packages may have failed to install"); + } + } + OsType::MacOS => { + let output = Command::new("brew") + .args(&["install"]) + .args(packages) + .output()?; + if !output.status.success() { + warn!("Homebrew installation had warnings"); + } + } + OsType::Windows => { + warn!("Windows package installation not implemented"); + } + } + Ok(()) + } + pub async fn download_and_install( + &self, + url: &str, + component: &str, + binary_name: Option<&str>, + ) -> Result<()> { + let bin_path = self.base_path.join("bin").join(component); + std::fs::create_dir_all(&bin_path)?; + let filename = url.split('/').last().unwrap_or("download.tmp"); + let temp_file = if filename.starts_with('/') { + PathBuf::from(filename) + } else { + bin_path.join(filename) + }; + self.download_with_reqwest(url, &temp_file, component) + .await?; + self.handle_downloaded_file(&temp_file, &bin_path, binary_name)?; + Ok(()) + } + pub async fn download_with_reqwest( + &self, + url: &str, + temp_file: &PathBuf, + component: &str, + ) -> Result<()> { + const MAX_RETRIES: u32 = 3; + const RETRY_DELAY: std::time::Duration = std::time::Duration::from_secs(2); + let client = Client::builder() + .timeout(std::time::Duration::from_secs(30)) + .user_agent("botserver-package-manager/1.0") + .build()?; + let mut last_error = None; + for attempt in 0..=MAX_RETRIES { + if attempt > 0 { + trace!( + "Retry attempt {}/{} for {}", + attempt, + MAX_RETRIES, + component + ); + std::thread::sleep(RETRY_DELAY * attempt); + } + match self.attempt_reqwest_download(&client, url, temp_file).await { + Ok(_size) => { + if attempt > 0 { + trace!("Download succeeded on retry attempt {}", attempt); + } + return Ok(()); + } + Err(e) => { + warn!("Download attempt {} failed: {}", attempt + 1, e); + last_error = Some(e); + let _ = std::fs::remove_file(temp_file); + } + } + } + Err(anyhow::anyhow!( + "Failed to download {} after {} attempts. Last error: {}", + component, + MAX_RETRIES + 1, + last_error.unwrap() + )) + } + pub async fn attempt_reqwest_download( + &self, + _client: &Client, + url: &str, + temp_file: &PathBuf, + ) -> Result { + let output_path = temp_file.to_str().context("Invalid temp file path")?; + utils::download_file(url, output_path) + .await + .map_err(|e| anyhow::anyhow!("Failed to download file using shared utility: {}", e))?; + let metadata = std::fs::metadata(temp_file).context("Failed to get file metadata")?; + let size = metadata.len(); + Ok(size) + } + pub fn handle_downloaded_file( + &self, + temp_file: &PathBuf, + bin_path: &PathBuf, + binary_name: Option<&str>, + ) -> Result<()> { + let metadata = std::fs::metadata(temp_file)?; + if metadata.len() == 0 { + return Err(anyhow::anyhow!("Downloaded file is empty")); + } + let file_extension = temp_file + .extension() + .and_then(|ext| ext.to_str()) + .unwrap_or(""); + match file_extension { + "gz" | "tgz" => { + self.extract_tar_gz(temp_file, bin_path)?; + } + "zip" => { + self.extract_zip(temp_file, bin_path)?; + } + _ => { + if let Some(name) = binary_name { + self.install_binary(temp_file, bin_path, name)?; + } else { + let final_path = bin_path.join(temp_file.file_name().unwrap()); + std::fs::rename(temp_file, &final_path)?; + self.make_executable(&final_path)?; + } + } + } + Ok(()) + } + pub fn extract_tar_gz(&self, temp_file: &PathBuf, bin_path: &PathBuf) -> Result<()> { + let output = Command::new("tar") + .current_dir(bin_path) + .args(&["-xzf", temp_file.to_str().unwrap(), "--strip-components=1"]) + .output()?; + if !output.status.success() { + return Err(anyhow::anyhow!( + "tar extraction failed: {}", + String::from_utf8_lossy(&output.stderr) + )); + } + std::fs::remove_file(temp_file)?; + Ok(()) + } + pub fn extract_zip(&self, temp_file: &PathBuf, bin_path: &PathBuf) -> Result<()> { + let output = Command::new("unzip") + .current_dir(bin_path) + .args(&["-o", "-q", temp_file.to_str().unwrap()]) + .output()?; + if !output.status.success() { + return Err(anyhow::anyhow!( + "unzip extraction failed: {}", + String::from_utf8_lossy(&output.stderr) + )); + } + std::fs::remove_file(temp_file)?; + Ok(()) + } + pub fn install_binary( + &self, + temp_file: &PathBuf, + bin_path: &PathBuf, + name: &str, + ) -> Result<()> { + let final_path = bin_path.join(name); + std::fs::rename(temp_file, &final_path)?; + self.make_executable(&final_path)?; + Ok(()) + } + pub fn make_executable(&self, path: &PathBuf) -> Result<()> { + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let mut perms = std::fs::metadata(path)?.permissions(); + perms.set_mode(0o755); + std::fs::set_permissions(path, perms)?; + } + Ok(()) + } + pub fn run_commands(&self, commands: &[String], target: &str, component: &str) -> Result<()> { + let bin_path = if target == "local" { + self.base_path.join("bin").join(component) + } else { + PathBuf::from("/opt/gbo/bin") + }; + let data_path = if target == "local" { + self.base_path.join("data").join(component) + } else { + PathBuf::from("/opt/gbo/data") + }; + let conf_path = if target == "local" { + self.base_path.join("conf").join(component) + } else { + PathBuf::from("/opt/gbo/conf") + }; + let logs_path = if target == "local" { + self.base_path.join("logs").join(component) + } else { + PathBuf::from("/opt/gbo/logs") + }; + for cmd in commands { + let rendered_cmd = cmd + .replace("{{BIN_PATH}}", &bin_path.to_string_lossy()) + .replace("{{DATA_PATH}}", &data_path.to_string_lossy()) + .replace("{{CONF_PATH}}", &conf_path.to_string_lossy()) + .replace("{{LOGS_PATH}}", &logs_path.to_string_lossy()); + if target == "local" { + trace!("Executing command: {}", rendered_cmd); + let child = Command::new("bash") + .current_dir(&bin_path) + .args(&["-c", &rendered_cmd]) + .spawn() + .with_context(|| { + format!("Failed to spawn command for component '{}'", component) + })?; + let output = child.wait_with_output().with_context(|| { + format!( + "Failed while waiting for command to finish for component '{}'", + component + ) + })?; + if !output.status.success() { + error!( + "Command had non-zero exit: {}", + String::from_utf8_lossy(&output.stderr) + ); + } + } else { + self.exec_in_container(target, &rendered_cmd)?; + } + } + Ok(()) + } + pub fn exec_in_container(&self, container: &str, command: &str) -> Result<()> { + let output = Command::new("lxc") + .args(&["exec", container, "--", "bash", "-c", command]) + .output()?; + if !output.status.success() { + warn!( + "Container command failed: {}", + String::from_utf8_lossy(&output.stderr) + ); + } + Ok(()) + } + pub fn download_in_container( + &self, + container: &str, + url: &str, + _component: &str, + binary_name: Option<&str>, + ) -> Result<()> { + let download_cmd = format!("wget -O /tmp/download.tmp {}", url); + self.exec_in_container(container, &download_cmd)?; + if url.ends_with(".tar.gz") || url.ends_with(".tgz") { + self.exec_in_container(container, "tar -xzf /tmp/download.tmp -C /opt/gbo/bin")?; + } else if url.ends_with(".zip") { + self.exec_in_container(container, "unzip -o /tmp/download.tmp -d /opt/gbo/bin")?; + } else if let Some(name) = binary_name { + let mv_cmd = format!( + "mv /tmp/download.tmp /opt/gbo/bin/{} && chmod +x /opt/gbo/bin/{}", + name, name + ); + self.exec_in_container(container, &mv_cmd)?; + } + self.exec_in_container(container, "rm -f /tmp/download.tmp")?; + Ok(()) + } + pub fn mount_container_directories(&self, container: &str, component: &str) -> Result<()> { + let host_base = format!("/opt/gbo/tenants/{}/{}", self.tenant, component); + for dir in &["data", "conf", "logs"] { + let host_path = format!("{}/{}", host_base, dir); + std::fs::create_dir_all(&host_path)?; + let device_name = format!("{}-{}", component, dir); + let container_path = format!("/opt/gbo/{}", dir); + let _ = Command::new("lxc") + .args(&["config", "device", "remove", container, &device_name]) + .output(); + let output = Command::new("lxc") + .args(&[ + "config", + "device", + "add", + container, + &device_name, + "disk", + &format!("source={}", host_path), + &format!("path={}", container_path), + ]) + .output()?; + if !output.status.success() { + warn!("Failed to mount {} in container {}", dir, container); + } + trace!( + "Mounted {} to {} in container {}", + host_path, + container_path, + container + ); + } + Ok(()) + } + pub fn create_container_service( + &self, + container: &str, + component: &str, + exec_cmd: &str, + env_vars: &HashMap, + ) -> Result<()> { + let database_url = std::env::var("DATABASE_URL").unwrap(); + let (_db_username, db_password, _db_server, _db_port, _db_name) = + parse_database_url(&database_url); + + let rendered_cmd = exec_cmd + .replace("{{DB_PASSWORD}}", &db_password) + .replace("{{BIN_PATH}}", "/opt/gbo/bin") + .replace("{{DATA_PATH}}", "/opt/gbo/data") + .replace("{{CONF_PATH}}", "/opt/gbo/conf") + .replace("{{LOGS_PATH}}", "/opt/gbo/logs"); + let mut env_section = String::new(); + for (key, value) in env_vars { + let rendered_value = value + .replace("{{DATA_PATH}}", "/opt/gbo/data") + .replace("{{BIN_PATH}}", "/opt/gbo/bin") + .replace("{{CONF_PATH}}", "/opt/gbo/conf") + .replace("{{LOGS_PATH}}", "/opt/gbo/logs"); + env_section.push_str(&format!("Environment={}={}\n", key, rendered_value)); + } + let service_content = format!( + "[Unit]\nDescription={} Service\nAfter=network.target\n\n[Service]\nType=simple\n{}ExecStart={}\nWorkingDirectory=/opt/gbo/data\nRestart=always\nRestartSec=10\nUser=root\n\n[Install]\nWantedBy=multi-user.target\n", + component, env_section, rendered_cmd + ); + let service_file = format!("/tmp/{}.service", component); + std::fs::write(&service_file, &service_content)?; + let output = Command::new("lxc") + .args(&[ + "file", + "push", + &service_file, + &format!("{}/etc/systemd/system/{}.service", container, component), + ]) + .output()?; + if !output.status.success() { + warn!("Failed to push service file to container"); + } + self.exec_in_container(container, "systemctl daemon-reload")?; + self.exec_in_container(container, &format!("systemctl enable {}", component))?; + self.exec_in_container(container, &format!("systemctl start {}", component))?; + std::fs::remove_file(&service_file)?; + trace!( + "Created and started service in container {}: {}", + container, + component + ); + Ok(()) + } + pub fn setup_port_forwarding(&self, container: &str, ports: &[u16]) -> Result<()> { + for port in ports { + let device_name = format!("port-{}", port); + let _ = Command::new("lxc") + .args(&["config", "device", "remove", container, &device_name]) + .output(); + let output = Command::new("lxc") + .args(&[ + "config", + "device", + "add", + container, + &device_name, + "proxy", + &format!("listen=tcp:0.0.0.0:{}", port), + &format!("connect=tcp:127.0.0.1:{}", port), + ]) + .output()?; + if !output.status.success() { + warn!("Failed to setup port forwarding for port {}", port); + } + trace!( + "Port forwarding configured: {} -> container {}", + port, + container + ); + } + Ok(()) + } +} diff --git a/src/core/package_manager/installer.rs b/src/core/package_manager/installer.rs new file mode 100644 index 000000000..1b27781c8 --- /dev/null +++ b/src/core/package_manager/installer.rs @@ -0,0 +1,732 @@ +use crate::package_manager::component::ComponentConfig; +use crate::package_manager::os::detect_os; +use crate::package_manager::{InstallMode, OsType}; +use anyhow::Result; +use log::trace; +use std::collections::HashMap; +use std::path::PathBuf; + +#[derive(Debug)] +pub struct PackageManager { + pub mode: InstallMode, + pub os_type: OsType, + pub base_path: PathBuf, + pub tenant: String, + pub components: HashMap, +} + +impl PackageManager { + pub fn new(mode: InstallMode, tenant: Option) -> Result { + let os_type = detect_os(); + let base_path = if mode == InstallMode::Container { + PathBuf::from("/opt/gbo") + } else { + std::env::current_dir()?.join("botserver-stack") + }; + let tenant = tenant.unwrap_or_else(|| "default".to_string()); + + let mut pm = PackageManager { + mode, + os_type, + base_path, + tenant, + components: HashMap::new(), + }; + pm.register_components(); + Ok(pm) + } + + fn register_components(&mut self) { + self.register_tables(); + self.register_cache(); + self.register_drive(); + self.register_llm(); + self.register_email(); + self.register_proxy(); + self.register_directory(); + self.register_alm(); + self.register_alm_ci(); + self.register_dns(); + self.register_webmail(); + self.register_meeting(); + self.register_table_editor(); + self.register_doc_editor(); + self.register_desktop(); + self.register_devtools(); + self.register_vector_db(); + self.register_host(); + } + + fn register_drive(&mut self) { + self.components.insert( + "drive".to_string(), + ComponentConfig { + name: "drive".to_string(), + ports: vec![9000, 9001], + dependencies: vec![], + linux_packages: vec![], + macos_packages: vec![], + windows_packages: vec![], + download_url: Some( + "https://dl.min.io/server/minio/release/linux-amd64/minio".to_string(), + ), + binary_name: Some("minio".to_string()), + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::from([ + ("MINIO_ROOT_USER".to_string(), "$DRIVE_ACCESSKEY".to_string()), + ("MINIO_ROOT_PASSWORD".to_string(), "$DRIVE_SECRET".to_string()), + ]), + data_download_list: Vec::new(), + exec_cmd: "nohup {{BIN_PATH}}/minio server {{DATA_PATH}} --address :9000 --console-address :9001 > {{LOGS_PATH}}/minio.log 2>&1 &".to_string(), + check_cmd: "ps -ef | grep minio | grep -v grep | grep {{BIN_PATH}}".to_string(), + }, + ); + } + + fn register_tables(&mut self) { + self.components.insert( + "tables".to_string(), + ComponentConfig { + name: "tables".to_string(), + ports: vec![5432], + dependencies: vec![], + linux_packages: vec![], + macos_packages: vec![], + windows_packages: vec![], + download_url: Some( + "https://github.com/theseus-rs/postgresql-binaries/releases/download/18.0.0/postgresql-18.0.0-x86_64-unknown-linux-gnu.tar.gz".to_string(), + ), + binary_name: Some("postgres".to_string()), + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![ + "chmod +x ./bin/*".to_string(), + format!("if [ ! -d \"{{{{DATA_PATH}}}}/pgdata\" ]; then PG_PASSWORD={{DB_PASSWORD}} ./bin/initdb -D {{{{DATA_PATH}}}}/pgdata -U gbuser --pwfile=<(echo $PG_PASSWORD); fi"), + "echo \"data_directory = '{{DATA_PATH}}/pgdata'\" > {{CONF_PATH}}/postgresql.conf".to_string(), + "echo \"ident_file = '{{CONF_PATH}}/pg_ident.conf'\" >> {{CONF_PATH}}/postgresql.conf".to_string(), + "echo \"port = 5432\" >> {{CONF_PATH}}/postgresql.conf".to_string(), + "echo \"listen_addresses = '*'\" >> {{CONF_PATH}}/postgresql.conf".to_string(), + "echo \"log_directory = '{{LOGS_PATH}}'\" >> {{CONF_PATH}}/postgresql.conf".to_string(), + "echo \"logging_collector = on\" >> {{CONF_PATH}}/postgresql.conf".to_string(), + "echo \"host all all all md5\" > {{CONF_PATH}}/pg_hba.conf".to_string(), + "touch {{CONF_PATH}}/pg_ident.conf".to_string(), + "./bin/pg_ctl -D {{DATA_PATH}}/pgdata -l {{LOGS_PATH}}/postgres.log start -w -t 30".to_string(), + "sleep 5".to_string(), + "for i in $(seq 1 30); do ./bin/pg_isready -h localhost -p 5432 -U gbuser >/dev/null 2>&1 && echo 'PostgreSQL is ready' && break || echo \"Waiting for PostgreSQL... attempt $i/30\" >&2; sleep 2; done".to_string(), + "./bin/pg_isready -h localhost -p 5432 -U gbuser || { echo 'ERROR: PostgreSQL failed to start properly' >&2; cat {{LOGS_PATH}}/postgres.log >&2; exit 1; }".to_string(), + format!("PGPASSWORD={{DB_PASSWORD}} ./bin/psql -h localhost -p 5432 -U gbuser -d postgres -c \"CREATE DATABASE botserver WITH OWNER gbuser\" 2>&1 | grep -v 'already exists' || true"), + ], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![ + "chmod +x ./bin/*".to_string(), + "if [ ! -d \"{{DATA_PATH}}/pgdata\" ]; then ./bin/initdb -A -D {{DATA_PATH}}/pgdata -U postgres; fi".to_string(), + ], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "./bin/pg_ctl -D {{DATA_PATH}}/pgdata -l {{LOGS_PATH}}/postgres.log start -w -t 30 > {{LOGS_PATH}}/stdout.log 2>&1 &".to_string(), + check_cmd: "{{BIN_PATH}}/bin/pg_isready -h localhost -p 5432 -U gbuser >/dev/null 2>&1".to_string(), + }, + ); + } + + fn register_cache(&mut self) { + self.components.insert( + "cache".to_string(), + ComponentConfig { + name: "cache".to_string(), + + ports: vec![6379], + dependencies: vec![], + linux_packages: vec![], + macos_packages: vec![], + windows_packages: vec![], + download_url: Some( + "https://download.valkey.io/releases/valkey-9.0.0-jammy-x86_64.tar.gz".to_string(), + ), + binary_name: Some("valkey-server".to_string()), + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![ + "chmod +x {{BIN_PATH}}/bin/valkey-server".to_string(), + ], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "nohup {{BIN_PATH}}/bin/valkey-server --port 6379 --dir {{DATA_PATH}} > {{LOGS_PATH}}/valkey.log 2>&1 && {{BIN_PATH}}/bin/valkey-cli CONFIG SET stop-writes-on-bgsave-error no 2>&1 &".to_string(), + check_cmd: "{{BIN_PATH}}/bin/valkey-cli ping | grep -q PONG".to_string(), + }, + ); + } + + fn register_llm(&mut self) { + self.components.insert( + "llm".to_string(), + ComponentConfig { + name: "llm".to_string(), + + ports: vec![8081, 8082], + dependencies: vec![], + linux_packages: vec![], + macos_packages: vec![], + windows_packages: vec![], + download_url: Some( + "https://github.com/ggml-org/llama.cpp/releases/download/b6148/llama-b6148-bin-ubuntu-x64.zip".to_string(), + ), + binary_name: Some("llama-server".to_string()), + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: vec![ + "https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-1.5B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-1.5B-Q3_K_M.gguf".to_string(), + "https://huggingface.co/CompendiumLabs/bge-small-en-v1.5-gguf/resolve/main/bge-small-en-v1.5-f32.gguf".to_string(), + ], + exec_cmd: "".to_string(), + check_cmd: "".to_string(), + }, + ); + } + + fn register_email(&mut self) { + self.components.insert( + "email".to_string(), + ComponentConfig { + name: "email".to_string(), + ports: vec![25, 80, 110, 143, 465, 587, 993, 995, 4190], + dependencies: vec![], + linux_packages: vec![], + macos_packages: vec![], + windows_packages: vec![], + download_url: Some( + "https://github.com/stalwartlabs/stalwart/releases/download/v0.13.1/stalwart-x86_64-unknown-linux-gnu.tar.gz".to_string(), + ), + binary_name: Some("stalwart".to_string()), + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![ + "setcap 'cap_net_bind_service=+ep' {{BIN_PATH}}/stalwart".to_string(), + ], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "{{BIN_PATH}}/stalwart --config {{CONF_PATH}}/config.toml".to_string(), + check_cmd: "curl -f http://localhost:25 >/dev/null 2>&1".to_string(), + }, + ); + } + + fn register_proxy(&mut self) { + self.components.insert( + "proxy".to_string(), + ComponentConfig { + name: "proxy".to_string(), + ports: vec![80, 443], + dependencies: vec![], + linux_packages: vec![], + macos_packages: vec![], + windows_packages: vec![], + download_url: Some( + "https://github.com/caddyserver/caddy/releases/download/v2.10.0-beta.3/caddy_2.10.0-beta.3_linux_amd64.tar.gz".to_string(), + ), + binary_name: Some("caddy".to_string()), + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![ + "setcap 'cap_net_bind_service=+ep' {{BIN_PATH}}/caddy".to_string(), + ], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::from([("XDG_DATA_HOME".to_string(), "{{DATA_PATH}}".to_string())]), + data_download_list: Vec::new(), + exec_cmd: "{{BIN_PATH}}/caddy run --config {{CONF_PATH}}/Caddyfile".to_string(), + check_cmd: "curl -f http://localhost >/dev/null 2>&1".to_string(), + }, + ); + } + + fn register_directory(&mut self) { + self.components.insert( + "directory".to_string(), + ComponentConfig { + name: "directory".to_string(), + + ports: vec![8080], + dependencies: vec![], + linux_packages: vec![], + macos_packages: vec![], + windows_packages: vec![], + download_url: Some( + "https://github.com/zitadel/zitadel/releases/download/v2.71.2/zitadel-linux-amd64.tar.gz".to_string(), + ), + binary_name: Some("zitadel".to_string()), + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![ + "setcap 'cap_net_bind_service=+ep' {{BIN_PATH}}/zitadel".to_string(), + ], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "{{BIN_PATH}}/zitadel start --config {{CONF_PATH}}/zitadel.yaml".to_string(), + check_cmd: "curl -f http://localhost:8080 >/dev/null 2>&1".to_string(), + }, + ); + } + + fn register_alm(&mut self) { + self.components.insert( + "alm".to_string(), + ComponentConfig { + name: "alm".to_string(), + + ports: vec![3000], + dependencies: vec![], + linux_packages: vec![], + macos_packages: vec![], + windows_packages: vec![], + download_url: Some( + "https://codeberg.org/forgejo/forgejo/releases/download/v10.0.2/forgejo-10.0.2-linux-amd64".to_string(), + ), + binary_name: Some("forgejo".to_string()), + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::from([ + ("USER".to_string(), "alm".to_string()), + ("HOME".to_string(), "{{DATA_PATH}}".to_string()), + ]), + data_download_list: Vec::new(), + exec_cmd: "{{BIN_PATH}}/forgejo web --work-path {{DATA_PATH}}".to_string(), + check_cmd: "curl -f http://localhost:3000 >/dev/null 2>&1".to_string(), + }, + ); + } + + fn register_alm_ci(&mut self) { + self.components.insert( + "alm-ci".to_string(), + ComponentConfig { + name: "alm-ci".to_string(), + + ports: vec![], + dependencies: vec!["alm".to_string()], + linux_packages: vec![ + ], + macos_packages: vec!["git".to_string(), "node".to_string()], + windows_packages: vec![], + download_url: Some( + "https://code.forgejo.org/forgejo/runner/releases/download/v6.3.1/forgejo-runner-6.3.1-linux-amd64".to_string(), + ), + binary_name: Some("forgejo-runner".to_string()), + pre_install_cmds_linux: vec![ + ], + post_install_cmds_linux: vec![], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "{{BIN_PATH}}/forgejo-runner daemon --config {{CONF_PATH}}/config.yaml".to_string(), + check_cmd: "ps -ef | grep forgejo-runner | grep -v grep | grep {{BIN_PATH}}".to_string(), + }, + ); + } + + fn register_dns(&mut self) { + self.components.insert( + "dns".to_string(), + ComponentConfig { + name: "dns".to_string(), + + ports: vec![53], + dependencies: vec![], + linux_packages: vec![], + macos_packages: vec![], + windows_packages: vec![], + download_url: Some( + "https://github.com/coredns/coredns/releases/download/v1.12.4/coredns_1.12.4_linux_amd64.tgz".to_string(), + ), + binary_name: Some("coredns".to_string()), + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![ + "setcap cap_net_bind_service=+ep {{BIN_PATH}}/coredns".to_string(), + ], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "{{BIN_PATH}}/coredns -conf {{CONF_PATH}}/Corefile".to_string(), + check_cmd: "dig @localhost example.com >/dev/null 2>&1".to_string(), + }, + ); + } + + fn register_webmail(&mut self) { + self.components.insert( + "webmail".to_string(), + ComponentConfig { + name: "webmail".to_string(), + + ports: vec![8080], + dependencies: vec!["email".to_string()], + linux_packages: vec![ + "ca-certificates".to_string(), + "apt-transport-https".to_string(), + "php8.1".to_string(), + "php8.1-fpm".to_string(), + ], + macos_packages: vec!["php".to_string()], + windows_packages: vec![], + download_url: Some( + "https://github.com/roundcube/roundcubemail/releases/download/1.6.6/roundcubemail-1.6.6-complete.tar.gz".to_string(), + ), + binary_name: None, + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "php -S 0.0.0.0:8080 -t {{DATA_PATH}}/roundcubemail".to_string(), + check_cmd: "curl -f http://localhost:8080 >/dev/null 2>&1".to_string(), + }, + ); + } + + fn register_meeting(&mut self) { + self.components.insert( + "meeting".to_string(), + ComponentConfig { + name: "meeting".to_string(), + + ports: vec![7880, 3478], + dependencies: vec![], + linux_packages: vec![], + macos_packages: vec![], + windows_packages: vec![], + download_url: Some( + "https://github.com/livekit/livekit/releases/download/v1.8.4/livekit_1.8.4_linux_amd64.tar.gz".to_string(), + ), + binary_name: Some("livekit-server".to_string()), + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "{{BIN_PATH}}/livekit-server --config {{CONF_PATH}}/config.yaml".to_string(), + check_cmd: "curl -f http://localhost:7880 >/dev/null 2>&1".to_string(), + }, + ); + } + + fn register_table_editor(&mut self) { + self.components.insert( + "table_editor".to_string(), + ComponentConfig { + name: "table_editor".to_string(), + + ports: vec![5757], + dependencies: vec!["tables".to_string()], + linux_packages: vec![], + macos_packages: vec![], + windows_packages: vec![], + download_url: Some("http://get.nocodb.com/linux-x64".to_string()), + binary_name: Some("nocodb".to_string()), + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "{{BIN_PATH}}/nocodb".to_string(), + check_cmd: "curl -f http://localhost:5757 >/dev/null 2>&1".to_string(), + }, + ); + } + + fn register_doc_editor(&mut self) { + self.components.insert( + "doc_editor".to_string(), + ComponentConfig { + name: "doc_editor".to_string(), + + ports: vec![9980], + dependencies: vec![], + linux_packages: vec![], + macos_packages: vec![], + windows_packages: vec![], + download_url: None, + binary_name: Some("coolwsd".to_string()), + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "coolwsd --config-file={{CONF_PATH}}/coolwsd.xml".to_string(), + check_cmd: "curl -f http://localhost:9980 >/dev/null 2>&1".to_string(), + }, + ); + } + + fn register_desktop(&mut self) { + self.components.insert( + "desktop".to_string(), + ComponentConfig { + name: "desktop".to_string(), + + ports: vec![3389], + dependencies: vec![], + linux_packages: vec!["xvfb".to_string(), "xrdp".to_string(), "xfce4".to_string()], + macos_packages: vec![], + windows_packages: vec![], + download_url: None, + binary_name: None, + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "xrdp --nodaemon".to_string(), + check_cmd: "netstat -tln | grep :3389 >/dev/null 2>&1".to_string(), + }, + ); + } + + fn register_devtools(&mut self) { + self.components.insert( + "devtools".to_string(), + ComponentConfig { + name: "devtools".to_string(), + + ports: vec![], + dependencies: vec![], + linux_packages: vec!["xclip".to_string(), "git".to_string(), "curl".to_string()], + macos_packages: vec!["git".to_string()], + windows_packages: vec![], + download_url: None, + binary_name: None, + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "".to_string(), + check_cmd: "".to_string(), + }, + ); + } + + fn register_botserver(&mut self) { + self.components.insert( + "system".to_string(), + ComponentConfig { + name: "system".to_string(), + + ports: vec![8000], + dependencies: vec![], + linux_packages: vec!["curl".to_string(), "unzip".to_string(), "git".to_string()], + macos_packages: vec![], + windows_packages: vec![], + download_url: None, + binary_name: None, + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "".to_string(), + check_cmd: "".to_string(), + }, + ); + } + + fn register_vector_db(&mut self) { + self.components.insert( + "vector_db".to_string(), + ComponentConfig { + name: "vector_db".to_string(), + + ports: vec![6333], + dependencies: vec![], + linux_packages: vec![], + macos_packages: vec![], + windows_packages: vec![], + download_url: Some( + "https://github.com/qdrant/qdrant/releases/latest/download/qdrant-x86_64-unknown-linux-gnu.tar.gz".to_string(), + ), + binary_name: Some("qdrant".to_string()), + pre_install_cmds_linux: vec![], + post_install_cmds_linux: vec![], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "{{BIN_PATH}}/qdrant --storage-path {{DATA_PATH}}".to_string(), + check_cmd: "curl -f http://localhost:6333 >/dev/null 2>&1".to_string(), + }, + ); + } + + fn register_host(&mut self) { + self.components.insert( + "host".to_string(), + ComponentConfig { + name: "host".to_string(), + + ports: vec![], + dependencies: vec![], + linux_packages: vec!["sshfs".to_string(), "bridge-utils".to_string()], + macos_packages: vec![], + windows_packages: vec![], + download_url: None, + binary_name: None, + pre_install_cmds_linux: vec![ + "echo 'net.ipv4.ip_forward=1' | tee -a /etc/sysctl.conf".to_string(), + "sysctl -p".to_string(), + ], + post_install_cmds_linux: vec![ + "lxd init --auto".to_string(), + "lxc storage create default dir".to_string(), + "lxc profile device add default root disk path=/ pool=default".to_string(), + ], + pre_install_cmds_macos: vec![], + post_install_cmds_macos: vec![], + pre_install_cmds_windows: vec![], + post_install_cmds_windows: vec![], + env_vars: HashMap::new(), + data_download_list: Vec::new(), + exec_cmd: "".to_string(), + check_cmd: "".to_string(), + }, + ); + } + + pub fn start(&self, component: &str) -> Result { + if let Some(component) = self.components.get(component) { + let bin_path = self.base_path.join("bin").join(&component.name); + let data_path = self.base_path.join("data").join(&component.name); + let conf_path = self.base_path.join("conf").join(&component.name); + let logs_path = self.base_path.join("logs").join(&component.name); + + // First check if the service is already running + let check_cmd = component + .check_cmd + .replace("{{BIN_PATH}}", &bin_path.to_string_lossy()) + .replace("{{DATA_PATH}}", &data_path.to_string_lossy()) + .replace("{{CONF_PATH}}", &conf_path.to_string_lossy()) + .replace("{{LOGS_PATH}}", &logs_path.to_string_lossy()); + + let check_status = std::process::Command::new("sh") + .current_dir(&bin_path) + .arg("-c") + .arg(&check_cmd) + .status(); + + if check_status.is_ok() && check_status.unwrap().success() { + trace!("Component {} is already running", component.name); + return Ok(std::process::Command::new("sh") + .arg("-c") + .arg("true") + .spawn()?); + } + + // If not running, execute the main command + let rendered_cmd = component + .exec_cmd + .replace("{{BIN_PATH}}", &bin_path.to_string_lossy()) + .replace("{{DATA_PATH}}", &data_path.to_string_lossy()) + .replace("{{CONF_PATH}}", &conf_path.to_string_lossy()) + .replace("{{LOGS_PATH}}", &logs_path.to_string_lossy()); + + trace!( + "Starting component {} with command: {}", + component.name, + rendered_cmd + ); + + // Create new env vars map with evaluated $VAR references + let mut evaluated_envs = HashMap::new(); + for (k, v) in &component.env_vars { + if v.starts_with('$') { + let var_name = &v[1..]; + evaluated_envs.insert(k.clone(), std::env::var(var_name).unwrap_or_default()); + } else { + evaluated_envs.insert(k.clone(), v.clone()); + } + } + + let child = std::process::Command::new("sh") + .current_dir(&bin_path) + .arg("-c") + .arg(&rendered_cmd) + .envs(&evaluated_envs) + .spawn(); + + std::thread::sleep(std::time::Duration::from_secs(2)); + + match child { + Ok(c) => Ok(c), + Err(e) => { + let err_msg = e.to_string(); + if err_msg.contains("already running") + || err_msg.contains("be running") + || component.name == "tables" + { + trace!( + "Component {} may already be running, continuing anyway", + component.name + ); + Ok(std::process::Command::new("sh").arg("-c").spawn()?) + } else { + Err(e.into()) + } + } + } + } else { + Err(anyhow::anyhow!("Component {} not found", component)) + } + } +} diff --git a/src/core/package_manager/mod.rs b/src/core/package_manager/mod.rs new file mode 100644 index 000000000..d655a1487 --- /dev/null +++ b/src/core/package_manager/mod.rs @@ -0,0 +1,43 @@ +pub mod component; +pub mod installer; +pub mod os; +pub mod setup; +pub use installer::PackageManager; +pub mod cli; +pub mod facade; +#[derive(Debug, Clone, PartialEq)] +pub enum InstallMode { + Local, + Container, +} +#[derive(Debug, Clone, PartialEq)] +pub enum OsType { + Linux, + MacOS, + Windows, +} +#[derive(Debug)] +pub struct ComponentInfo { + pub name: &'static str, + pub termination_command: &'static str, +} +pub fn get_all_components() -> Vec { + vec![ + ComponentInfo { + name: "tables", + termination_command: "postgres", + }, + ComponentInfo { + name: "cache", + termination_command: "redis-server", + }, + ComponentInfo { + name: "drive", + termination_command: "minio", + }, + ComponentInfo { + name: "llm", + termination_command: "llama-server", + }, + ] +} diff --git a/src/core/package_manager/os.rs b/src/core/package_manager/os.rs new file mode 100644 index 000000000..92f7fe093 --- /dev/null +++ b/src/core/package_manager/os.rs @@ -0,0 +1,12 @@ +use crate::package_manager::OsType; +pub fn detect_os() -> OsType { + if cfg!(target_os = "linux") { + OsType::Linux + } else if cfg!(target_os = "macos") { + OsType::MacOS + } else if cfg!(target_os = "windows") { + OsType::Windows + } else { + OsType::Linux + } +} diff --git a/src/core/package_manager/package_manager.test.rs b/src/core/package_manager/package_manager.test.rs new file mode 100644 index 000000000..50bc59dfa --- /dev/null +++ b/src/core/package_manager/package_manager.test.rs @@ -0,0 +1,25 @@ +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::test_util; + #[test] + fn test_package_manager_module() { + test_util::setup(); + assert!(true, "Basic package manager module test"); + } + #[test] + fn test_cli_interface() { + test_util::setup(); + assert!(true, "CLI interface placeholder test"); + } + #[test] + fn test_component_management() { + test_util::setup(); + assert!(true, "Component management placeholder test"); + } + #[test] + fn test_os_specific() { + test_util::setup(); + assert!(true, "OS-specific functionality placeholder test"); + } +} diff --git a/src/core/package_manager/setup/directory_setup.rs b/src/core/package_manager/setup/directory_setup.rs new file mode 100644 index 000000000..54b044ef2 --- /dev/null +++ b/src/core/package_manager/setup/directory_setup.rs @@ -0,0 +1,497 @@ +use anyhow::Result; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use std::path::PathBuf; +use std::time::Duration; +use tokio::fs; +use tokio::time::sleep; + +/// Directory (Zitadel) auto-setup manager +#[derive(Debug)] +pub struct DirectorySetup { + base_url: String, + client: Client, + admin_token: Option, + config_path: PathBuf, +} + +impl DirectorySetup { + /// Set the admin token + pub fn set_admin_token(&mut self, token: String) { + self.admin_token = Some(token); + } + + /// Get or initialize admin token + pub async fn ensure_admin_token(&mut self) -> Result<()> { + if self.admin_token.is_none() { + let token = std::env::var("DIRECTORY_ADMIN_TOKEN") + .unwrap_or_else(|_| "zitadel-admin-sa".to_string()); + self.admin_token = Some(token); + } + Ok(()) + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct DefaultOrganization { + pub id: String, + pub name: String, + pub domain: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct DefaultUser { + pub id: String, + pub username: String, + pub email: String, + pub password: String, + pub first_name: String, + pub last_name: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct DirectoryConfig { + pub base_url: String, + pub default_org: DefaultOrganization, + pub default_user: DefaultUser, + pub admin_token: String, + pub project_id: String, + pub client_id: String, + pub client_secret: String, +} + +impl DirectorySetup { + pub fn new(base_url: String, config_path: PathBuf) -> Self { + Self { + base_url, + client: Client::builder() + .timeout(Duration::from_secs(30)) + .build() + .unwrap(), + admin_token: None, + config_path, + } + } + + /// Wait for directory service to be ready + pub async fn wait_for_ready(&self, max_attempts: u32) -> Result<()> { + log::info!("Waiting for Directory service to be ready..."); + + for attempt in 1..=max_attempts { + match self + .client + .get(format!("{}/debug/ready", self.base_url)) + .send() + .await + { + Ok(response) if response.status().is_success() => { + log::info!("Directory service is ready!"); + return Ok(()); + } + _ => { + log::debug!( + "Directory not ready yet (attempt {}/{})", + attempt, + max_attempts + ); + sleep(Duration::from_secs(3)).await; + } + } + } + + anyhow::bail!("Directory service did not become ready in time") + } + + /// Initialize directory with default configuration + pub async fn initialize(&mut self) -> Result { + log::info!("🔧 Initializing Directory (Zitadel) with defaults..."); + + // Check if already initialized + if let Ok(existing_config) = self.load_existing_config().await { + log::info!("Directory already initialized, using existing config"); + return Ok(existing_config); + } + + // Wait for service to be ready + self.wait_for_ready(30).await?; + + // Get initial admin token (from Zitadel setup) + self.ensure_admin_token().await?; + + // Create default organization + let org = self.create_default_organization().await?; + log::info!("✅ Created default organization: {}", org.name); + + // Create default user + let user = self.create_default_user(&org.id).await?; + log::info!("✅ Created default user: {}", user.username); + + // Create OAuth2 application for BotServer + let (project_id, client_id, client_secret) = self.create_oauth_application(&org.id).await?; + log::info!("✅ Created OAuth2 application"); + + // Grant user admin permissions + self.grant_user_permissions(&org.id, &user.id).await?; + log::info!("✅ Granted admin permissions to default user"); + + let config = DirectoryConfig { + base_url: self.base_url.clone(), + default_org: org, + default_user: user, + admin_token: self.admin_token.clone().unwrap_or_default(), + project_id, + client_id, + client_secret, + }; + + // Save configuration + self.save_config_internal(&config).await?; + log::info!("✅ Saved Directory configuration"); + + log::info!("🎉 Directory initialization complete!"); + log::info!( + "📧 Default user: {} / {}", + config.default_user.email, + config.default_user.password + ); + log::info!("🌐 Login at: {}", self.base_url); + + Ok(config) + } + + /// Create an organization + pub async fn create_organization(&mut self, name: &str, description: &str) -> Result { + // Ensure we have admin token + self.ensure_admin_token().await?; + + let response = self + .client + .post(format!("{}/management/v1/orgs", self.base_url)) + .bearer_auth(self.admin_token.as_ref().unwrap()) + .json(&json!({ + "name": name, + "description": description, + })) + .send() + .await?; + + if !response.status().is_success() { + let error_text = response.text().await?; + anyhow::bail!("Failed to create organization: {}", error_text); + } + + let result: serde_json::Value = response.json().await?; + Ok(result["id"].as_str().unwrap_or("").to_string()) + } + + /// Create default organization + async fn create_default_organization(&self) -> Result { + let org_name = + std::env::var("DIRECTORY_DEFAULT_ORG").unwrap_or_else(|_| "BotServer".to_string()); + + let response = self + .client + .post(format!("{}/management/v1/orgs", self.base_url)) + .bearer_auth(self.admin_token.as_ref().unwrap()) + .json(&json!({ + "name": org_name, + })) + .send() + .await?; + + if !response.status().is_success() { + let error_text = response.text().await?; + anyhow::bail!("Failed to create organization: {}", error_text); + } + + let result: serde_json::Value = response.json().await?; + + Ok(DefaultOrganization { + id: result["id"].as_str().unwrap_or("").to_string(), + name: org_name.clone(), + domain: format!("{}.localhost", org_name.to_lowercase()), + }) + } + + /// Create a user in an organization + pub async fn create_user( + &mut self, + org_id: &str, + username: &str, + email: &str, + password: &str, + first_name: &str, + last_name: &str, + is_admin: bool, + ) -> Result { + // Ensure we have admin token + self.ensure_admin_token().await?; + + let response = self + .client + .post(format!("{}/management/v1/users/human", self.base_url)) + .bearer_auth(self.admin_token.as_ref().unwrap()) + .json(&json!({ + "userName": username, + "profile": { + "firstName": first_name, + "lastName": last_name, + "displayName": format!("{} {}", first_name, last_name) + }, + "email": { + "email": email, + "isEmailVerified": true + }, + "password": password, + "organisation": { + "orgId": org_id + } + })) + .send() + .await?; + + if !response.status().is_success() { + let error_text = response.text().await?; + anyhow::bail!("Failed to create user: {}", error_text); + } + + let result: serde_json::Value = response.json().await?; + + let user = DefaultUser { + id: result["userId"].as_str().unwrap_or("").to_string(), + username: username.to_string(), + email: email.to_string(), + password: password.to_string(), + first_name: first_name.to_string(), + last_name: last_name.to_string(), + }; + + // Grant admin permissions if requested + if is_admin { + self.grant_user_permissions(org_id, &user.id).await?; + } + + Ok(user) + } + + /// Create default user in organization + async fn create_default_user(&self, org_id: &str) -> Result { + let username = + std::env::var("DIRECTORY_DEFAULT_USERNAME").unwrap_or_else(|_| "admin".to_string()); + let email = std::env::var("DIRECTORY_DEFAULT_EMAIL") + .unwrap_or_else(|_| "admin@localhost".to_string()); + let password = std::env::var("DIRECTORY_DEFAULT_PASSWORD") + .unwrap_or_else(|_| "BotServer123!".to_string()); + + let response = self + .client + .post(format!("{}/management/v1/users/human", self.base_url)) + .bearer_auth(self.admin_token.as_ref().unwrap()) + .json(&json!({ + "userName": username, + "profile": { + "firstName": "Admin", + "lastName": "User", + "displayName": "Administrator" + }, + "email": { + "email": email, + "isEmailVerified": true + }, + "password": password, + "organisation": { + "orgId": org_id + } + })) + .send() + .await?; + + if !response.status().is_success() { + let error_text = response.text().await?; + anyhow::bail!("Failed to create user: {}", error_text); + } + + let result: serde_json::Value = response.json().await?; + + Ok(DefaultUser { + id: result["userId"].as_str().unwrap_or("").to_string(), + username: username.clone(), + email: email.clone(), + password: password.clone(), + first_name: "Admin".to_string(), + last_name: "User".to_string(), + }) + } + + /// Create OAuth2 application for BotServer + pub async fn create_oauth_application( + &self, + _org_id: &str, + ) -> Result<(String, String, String)> { + let app_name = "BotServer"; + let redirect_uri = std::env::var("DIRECTORY_REDIRECT_URI") + .unwrap_or_else(|_| "http://localhost:8080/auth/callback".to_string()); + + // Create project + let project_response = self + .client + .post(format!("{}/management/v1/projects", self.base_url)) + .bearer_auth(self.admin_token.as_ref().unwrap()) + .json(&json!({ + "name": app_name, + })) + .send() + .await?; + + let project_result: serde_json::Value = project_response.json().await?; + let project_id = project_result["id"].as_str().unwrap_or("").to_string(); + + // Create OIDC application + let app_response = self.client + .post(format!("{}/management/v1/projects/{}/apps/oidc", self.base_url, project_id)) + .bearer_auth(self.admin_token.as_ref().unwrap()) + .json(&json!({ + "name": app_name, + "redirectUris": [redirect_uri], + "responseTypes": ["OIDC_RESPONSE_TYPE_CODE"], + "grantTypes": ["OIDC_GRANT_TYPE_AUTHORIZATION_CODE", "OIDC_GRANT_TYPE_REFRESH_TOKEN"], + "appType": "OIDC_APP_TYPE_WEB", + "authMethodType": "OIDC_AUTH_METHOD_TYPE_BASIC", + "postLogoutRedirectUris": ["http://localhost:8080"], + })) + .send() + .await?; + + let app_result: serde_json::Value = app_response.json().await?; + let client_id = app_result["clientId"].as_str().unwrap_or("").to_string(); + let client_secret = app_result["clientSecret"] + .as_str() + .unwrap_or("") + .to_string(); + + Ok((project_id, client_id, client_secret)) + } + + /// Grant admin permissions to user + pub async fn grant_user_permissions(&self, org_id: &str, user_id: &str) -> Result<()> { + // Grant ORG_OWNER role + let _response = self + .client + .post(format!( + "{}/management/v1/orgs/{}/members", + self.base_url, org_id + )) + .bearer_auth(self.admin_token.as_ref().unwrap()) + .json(&json!({ + "userId": user_id, + "roles": ["ORG_OWNER"] + })) + .send() + .await?; + + Ok(()) + } + + /// Save configuration to file + pub async fn save_config( + &mut self, + org_id: String, + org_name: String, + admin_user: DefaultUser, + client_id: String, + client_secret: String, + ) -> Result { + // Get or create admin token + self.ensure_admin_token().await?; + + let config = DirectoryConfig { + base_url: self.base_url.clone(), + default_org: DefaultOrganization { + id: org_id, + name: org_name.clone(), + domain: format!("{}.localhost", org_name.to_lowercase()), + }, + default_user: admin_user, + admin_token: self.admin_token.clone().unwrap_or_default(), + project_id: String::new(), // This will be set if OAuth app is created + client_id, + client_secret, + }; + + // Save to file + let json = serde_json::to_string_pretty(&config)?; + fs::write(&self.config_path, json).await?; + + log::info!("Saved Directory configuration to {:?}", self.config_path); + Ok(config) + } + + /// Internal save configuration to file + async fn save_config_internal(&self, config: &DirectoryConfig) -> Result<()> { + let json = serde_json::to_string_pretty(config)?; + fs::write(&self.config_path, json).await?; + Ok(()) + } + + /// Load existing configuration + async fn load_existing_config(&self) -> Result { + let content = fs::read_to_string(&self.config_path).await?; + let config: DirectoryConfig = serde_json::from_str(&content)?; + Ok(config) + } + + /// Get stored configuration + pub async fn get_config(&self) -> Result { + self.load_existing_config().await + } +} + +/// Generate Zitadel configuration file +pub async fn generate_directory_config(config_path: PathBuf, _db_path: PathBuf) -> Result<()> { + let yaml_config = format!( + r#" +Log: + Level: info + +Database: + Postgres: + Host: localhost + Port: 5432 + Database: zitadel + User: zitadel + Password: zitadel + SSL: + Mode: disable + +Machine: + Identification: + Hostname: localhost + WebhookAddress: http://localhost:8080 + +ExternalDomain: localhost:8080 +ExternalPort: 8080 +ExternalSecure: false + +TLS: + Enabled: false +"# + ); + + fs::write(config_path, yaml_config).await?; + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_directory_setup_creation() { + let setup = DirectorySetup::new( + "http://localhost:8080".to_string(), + PathBuf::from("/tmp/directory_config.json"), + ); + assert_eq!(setup.base_url, "http://localhost:8080"); + } +} diff --git a/src/core/package_manager/setup/email_setup.rs b/src/core/package_manager/setup/email_setup.rs new file mode 100644 index 000000000..5dee22336 --- /dev/null +++ b/src/core/package_manager/setup/email_setup.rs @@ -0,0 +1,339 @@ +use anyhow::Result; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; +use std::time::Duration; +use tokio::fs; +use tokio::time::sleep; + +/// Email (Stalwart) auto-setup manager +#[derive(Debug)] +pub struct EmailSetup { + base_url: String, + admin_user: String, + admin_pass: String, + config_path: PathBuf, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct EmailConfig { + pub base_url: String, + pub smtp_host: String, + pub smtp_port: u16, + pub imap_host: String, + pub imap_port: u16, + pub admin_user: String, + pub admin_pass: String, + pub directory_integration: bool, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct EmailDomain { + pub domain: String, + pub enabled: bool, +} + +impl EmailSetup { + pub fn new(base_url: String, config_path: PathBuf) -> Self { + let admin_user = + std::env::var("EMAIL_ADMIN_USER").unwrap_or_else(|_| "admin@localhost".to_string()); + let admin_pass = + std::env::var("EMAIL_ADMIN_PASSWORD").unwrap_or_else(|_| "EmailAdmin123!".to_string()); + + Self { + base_url, + admin_user, + admin_pass, + config_path, + } + } + + /// Wait for email service to be ready + pub async fn wait_for_ready(&self, max_attempts: u32) -> Result<()> { + log::info!("Waiting for Email service to be ready..."); + + for attempt in 1..=max_attempts { + // Check SMTP port + if let Ok(_) = tokio::net::TcpStream::connect("127.0.0.1:25").await { + log::info!("Email service is ready!"); + return Ok(()); + } + + log::debug!( + "Email service not ready yet (attempt {}/{})", + attempt, + max_attempts + ); + sleep(Duration::from_secs(3)).await; + } + + anyhow::bail!("Email service did not become ready in time") + } + + /// Initialize email server with default configuration + pub async fn initialize( + &mut self, + directory_config_path: Option, + ) -> Result { + log::info!("🔧 Initializing Email (Stalwart) server..."); + + // Check if already initialized + if let Ok(existing_config) = self.load_existing_config().await { + log::info!("Email already initialized, using existing config"); + return Ok(existing_config); + } + + // Wait for service to be ready + self.wait_for_ready(30).await?; + + // Create default domain + self.create_default_domain().await?; + log::info!("✅ Created default email domain: localhost"); + + // Set up Directory (Zitadel) integration if available + let directory_integration = if let Some(dir_config_path) = directory_config_path { + match self.setup_directory_integration(&dir_config_path).await { + Ok(_) => { + log::info!("✅ Integrated with Directory for authentication"); + true + } + Err(e) => { + log::warn!("⚠️ Directory integration failed: {}", e); + false + } + } + } else { + false + }; + + // Create admin account + self.create_admin_account().await?; + log::info!("✅ Created admin email account: {}", self.admin_user); + + let config = EmailConfig { + base_url: self.base_url.clone(), + smtp_host: "localhost".to_string(), + smtp_port: 25, + imap_host: "localhost".to_string(), + imap_port: 143, + admin_user: self.admin_user.clone(), + admin_pass: self.admin_pass.clone(), + directory_integration, + }; + + // Save configuration + self.save_config(&config).await?; + log::info!("✅ Saved Email configuration"); + + log::info!("🎉 Email initialization complete!"); + log::info!("📧 SMTP: localhost:25 (587 for TLS)"); + log::info!("📬 IMAP: localhost:143 (993 for TLS)"); + log::info!("👤 Admin: {} / {}", config.admin_user, config.admin_pass); + + Ok(config) + } + + /// Create default email domain + async fn create_default_domain(&self) -> Result<()> { + // Stalwart auto-creates domains based on config + // For now, ensure localhost domain exists + Ok(()) + } + + /// Create admin email account + async fn create_admin_account(&self) -> Result<()> { + // In Stalwart, accounts are created via management API + // This is a placeholder - implement actual Stalwart API calls + log::info!("Creating admin email account..."); + Ok(()) + } + + /// Set up Directory (Zitadel) integration for authentication + async fn setup_directory_integration(&self, directory_config_path: &PathBuf) -> Result<()> { + let content = fs::read_to_string(directory_config_path).await?; + let dir_config: serde_json::Value = serde_json::from_str(&content)?; + + let issuer_url = dir_config["base_url"] + .as_str() + .unwrap_or("http://localhost:8080"); + + log::info!("Setting up OIDC authentication with Directory..."); + log::info!("Issuer URL: {}", issuer_url); + + // Configure Stalwart to use Zitadel for authentication + // This would typically be done via config file updates + Ok(()) + } + + /// Save configuration to file + async fn save_config(&self, config: &EmailConfig) -> Result<()> { + let json = serde_json::to_string_pretty(config)?; + fs::write(&self.config_path, json).await?; + Ok(()) + } + + /// Load existing configuration + async fn load_existing_config(&self) -> Result { + let content = fs::read_to_string(&self.config_path).await?; + let config: EmailConfig = serde_json::from_str(&content)?; + Ok(config) + } + + /// Get stored configuration + pub async fn get_config(&self) -> Result { + self.load_existing_config().await + } + + /// Create email account for Directory user + pub async fn create_user_mailbox( + &self, + _username: &str, + _password: &str, + email: &str, + ) -> Result<()> { + log::info!("Creating mailbox for user: {}", email); + + // Implement Stalwart mailbox creation + // This would use Stalwart's management API + + Ok(()) + } + + /// Sync users from Directory to Email + pub async fn sync_users_from_directory(&self, directory_config_path: &PathBuf) -> Result<()> { + log::info!("Syncing users from Directory to Email..."); + + let content = fs::read_to_string(directory_config_path).await?; + let dir_config: serde_json::Value = serde_json::from_str(&content)?; + + // Get default user from Directory + if let Some(default_user) = dir_config.get("default_user") { + let email = default_user["email"].as_str().unwrap_or(""); + let password = default_user["password"].as_str().unwrap_or(""); + let username = default_user["username"].as_str().unwrap_or(""); + + if !email.is_empty() { + self.create_user_mailbox(username, password, email).await?; + log::info!("✅ Created mailbox for: {}", email); + } + } + + Ok(()) + } +} + +/// Generate Stalwart email server configuration +pub async fn generate_email_config( + config_path: PathBuf, + data_path: PathBuf, + directory_integration: bool, +) -> Result<()> { + let mut config = format!( + r#" +[server] +hostname = "localhost" + +[server.listener."smtp"] +bind = ["0.0.0.0:25"] +protocol = "smtp" + +[server.listener."smtp-submission"] +bind = ["0.0.0.0:587"] +protocol = "smtp" +tls.implicit = false + +[server.listener."smtp-submissions"] +bind = ["0.0.0.0:465"] +protocol = "smtp" +tls.implicit = true + +[server.listener."imap"] +bind = ["0.0.0.0:143"] +protocol = "imap" + +[server.listener."imaps"] +bind = ["0.0.0.0:993"] +protocol = "imap" +tls.implicit = true + +[server.listener."http"] +bind = ["0.0.0.0:8080"] +protocol = "http" + +[storage] +data = "sqlite" +blob = "sqlite" +lookup = "sqlite" +fts = "sqlite" + +[store."sqlite"] +type = "sqlite" +path = "{}/stalwart.db" + +[directory."local"] +type = "internal" +store = "sqlite" + +"#, + data_path.display() + ); + + // Add Directory (Zitadel) OIDC integration if enabled + if directory_integration { + config.push_str( + r#" +[directory."oidc"] +type = "oidc" +issuer = "http://localhost:8080" +client-id = "{{CLIENT_ID}}" +client-secret = "{{CLIENT_SECRET}}" + +[authentication] +mechanisms = ["plain", "login"] +directory = "oidc" +fallback-directory = "local" + +"#, + ); + } else { + config.push_str( + r#" +[authentication] +mechanisms = ["plain", "login"] +directory = "local" + +"#, + ); + } + + fs::write(config_path, config).await?; + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_email_setup_creation() { + let setup = EmailSetup::new( + "http://localhost:8080".to_string(), + PathBuf::from("/tmp/email_config.json"), + ); + assert_eq!(setup.base_url, "http://localhost:8080"); + } + + #[tokio::test] + async fn test_generate_config() { + let config_path = std::env::temp_dir().join("email_test_config.toml"); + let data_path = std::env::temp_dir().join("email_data"); + + generate_email_config(config_path.clone(), data_path, false) + .await + .unwrap(); + + assert!(config_path.exists()); + + // Cleanup + let _ = std::fs::remove_file(config_path); + } +} diff --git a/src/core/package_manager/setup/mod.rs b/src/core/package_manager/setup/mod.rs new file mode 100644 index 000000000..b3199cf4c --- /dev/null +++ b/src/core/package_manager/setup/mod.rs @@ -0,0 +1,5 @@ +pub mod directory_setup; +pub mod email_setup; + +pub use directory_setup::DirectorySetup; +pub use email_setup::EmailSetup; diff --git a/src/core/session/mod.rs b/src/core/session/mod.rs new file mode 100644 index 000000000..773281dc4 --- /dev/null +++ b/src/core/session/mod.rs @@ -0,0 +1,457 @@ +use crate::bot::BotOrchestrator; +use crate::shared::models::UserSession; +use crate::shared::state::AppState; +use axum::{ + extract::{Extension, Path}, + http::StatusCode, + response::{IntoResponse, Json}, +}; +use chrono::Utc; +use diesel::prelude::*; +use diesel::r2d2::{ConnectionManager, PooledConnection}; +use diesel::PgConnection; +use log::{error, trace, warn}; +use redis::Client; +use serde::{Deserialize, Serialize}; +use std::collections::{HashMap, HashSet}; +use std::error::Error; +use std::sync::Arc; +use uuid::Uuid; + +#[derive(Clone, Serialize, Deserialize, Debug)] +pub struct SessionData { + pub id: Uuid, + pub user_id: Option, + pub data: String, +} + +pub struct SessionManager { + conn: PooledConnection>, + sessions: HashMap, + waiting_for_input: HashSet, + redis: Option>, +} + +impl std::fmt::Debug for SessionManager { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("SessionManager") + .field("conn", &"PooledConnection") + .field("sessions", &self.sessions) + .field("waiting_for_input", &self.waiting_for_input) + .field("redis", &self.redis.is_some()) + .finish() + } +} + +impl SessionManager { + pub fn new( + conn: PooledConnection>, + redis_client: Option>, + ) -> Self { + SessionManager { + conn, + sessions: HashMap::new(), + waiting_for_input: HashSet::new(), + redis: redis_client, + } + } + + pub fn provide_input( + &mut self, + session_id: Uuid, + input: String, + ) -> Result, Box> { + trace!( + "SessionManager.provide_input called for session {}", + session_id + ); + if let Some(sess) = self.sessions.get_mut(&session_id) { + sess.data = input; + self.waiting_for_input.remove(&session_id); + Ok(Some("user_input".to_string())) + } else { + let sess = SessionData { + id: session_id, + user_id: None, + data: input, + }; + self.sessions.insert(session_id, sess); + self.waiting_for_input.remove(&session_id); + Ok(Some("user_input".to_string())) + } + } + + pub fn mark_waiting(&mut self, session_id: Uuid) { + self.waiting_for_input.insert(session_id); + } + + pub fn get_session_by_id( + &mut self, + session_id: Uuid, + ) -> Result, Box> { + use crate::shared::models::user_sessions::dsl::*; + let result = user_sessions + .filter(id.eq(session_id)) + .first::(&mut self.conn) + .optional()?; + Ok(result) + } + + pub fn get_user_session( + &mut self, + uid: Uuid, + bid: Uuid, + ) -> Result, Box> { + use crate::shared::models::user_sessions::dsl::*; + let result = user_sessions + .filter(user_id.eq(uid)) + .filter(bot_id.eq(bid)) + .order(created_at.desc()) + .first::(&mut self.conn) + .optional()?; + Ok(result) + } + + pub fn get_or_create_user_session( + &mut self, + uid: Uuid, + bid: Uuid, + session_title: &str, + ) -> Result, Box> { + if let Some(existing) = self.get_user_session(uid, bid)? { + return Ok(Some(existing)); + } + self.create_session(uid, bid, session_title).map(Some) + } + + pub fn get_or_create_anonymous_user( + &mut self, + uid: Option, + ) -> Result> { + use crate::shared::models::users::dsl as users_dsl; + let user_id = uid.unwrap_or_else(Uuid::new_v4); + let user_exists: Option = users_dsl::users + .filter(users_dsl::id.eq(user_id)) + .select(users_dsl::id) + .first(&mut self.conn) + .optional()?; + if user_exists.is_none() { + let now = Utc::now(); + diesel::insert_into(users_dsl::users) + .values(( + users_dsl::id.eq(user_id), + users_dsl::username.eq(format!("guest_{}", &user_id.to_string()[..8])), + users_dsl::email.eq(format!( + "guest_{}@anonymous.local", + &user_id.to_string()[..8] + )), + users_dsl::password_hash.eq(""), + users_dsl::is_active.eq(true), + users_dsl::created_at.eq(now), + users_dsl::updated_at.eq(now), + )) + .execute(&mut self.conn)?; + } + Ok(user_id) + } + + pub fn create_session( + &mut self, + uid: Uuid, + bid: Uuid, + session_title: &str, + ) -> Result> { + use crate::shared::models::user_sessions::dsl::*; + let verified_uid = self.get_or_create_anonymous_user(Some(uid))?; + let now = Utc::now(); + let inserted: UserSession = diesel::insert_into(user_sessions) + .values(( + id.eq(Uuid::new_v4()), + user_id.eq(verified_uid), + bot_id.eq(bid), + title.eq(session_title), + context_data.eq(serde_json::json!({})), + current_tool.eq(None::), + created_at.eq(now), + updated_at.eq(now), + )) + .returning(UserSession::as_returning()) + .get_result(&mut self.conn) + .map_err(|e| { + error!("Failed to create session in database: {}", e); + e + })?; + Ok(inserted) + } + + fn _clear_messages(&mut self, _session_id: Uuid) -> Result<(), Box> { + use crate::shared::models::message_history::dsl::*; + diesel::delete(message_history.filter(session_id.eq(session_id))) + .execute(&mut self.conn)?; + Ok(()) + } + + pub fn save_message( + &mut self, + sess_id: Uuid, + uid: Uuid, + ro: i32, + content: &str, + msg_type: i32, + ) -> Result<(), Box> { + use crate::shared::models::message_history::dsl::*; + let next_index = message_history + .filter(session_id.eq(sess_id)) + .count() + .get_result::(&mut self.conn) + .unwrap_or(0); + diesel::insert_into(message_history) + .values(( + id.eq(Uuid::new_v4()), + session_id.eq(sess_id), + user_id.eq(uid), + role.eq(ro), + content_encrypted.eq(content), + message_type.eq(msg_type), + message_index.eq(next_index), + created_at.eq(chrono::Utc::now()), + )) + .execute(&mut self.conn)?; + trace!( + "Message saved for session {} with index {}", + sess_id, + next_index + ); + Ok(()) + } + + pub async fn update_session_context( + &mut self, + session_id: &Uuid, + user_id: &Uuid, + context_data: String, + ) -> Result<(), Box> { + use redis::Commands; + let redis_key = format!("context:{}:{}", user_id, session_id); + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_connection()?; + conn.set::<_, _, ()>(&redis_key, &context_data)?; + } else { + warn!("No Redis client configured, context not persisted"); + } + Ok(()) + } + + pub async fn get_session_context_data( + &self, + session_id: &Uuid, + user_id: &Uuid, + ) -> Result> { + use redis::Commands; + let base_key = format!("context:{}:{}", user_id, session_id); + if let Some(redis_client) = &self.redis { + let conn_option = redis_client + .get_connection() + .map_err(|e| { + warn!("Failed to get Cache connection: {}", e); + e + }) + .ok(); + if let Some(mut connection) = conn_option { + match connection.get::<_, Option>(&base_key) { + Ok(Some(context_name)) => { + let full_key = + format!("context:{}:{}:{}", user_id, session_id, context_name); + match connection.get::<_, Option>(&full_key) { + Ok(Some(context_value)) => { + trace!( + "Retrieved context value from Cache for key {}: {} chars", + full_key, + context_value.len() + ); + return Ok(context_value); + } + Ok(None) => { + trace!("No context value found for key: {}", full_key); + } + Err(e) => { + warn!("Failed to retrieve context value from Cache: {}", e); + } + } + } + Ok(None) => { + trace!("No context name found for key: {}", base_key); + } + Err(e) => { + warn!("Failed to retrieve context name from Cache: {}", e); + } + } + } + } + Ok(String::new()) + } + + pub fn get_conversation_history( + &mut self, + sess_id: Uuid, + _uid: Uuid, + ) -> Result, Box> { + use crate::shared::models::message_history::dsl::*; + let messages = message_history + .filter(session_id.eq(sess_id)) + .order(message_index.asc()) + .select((role, content_encrypted)) + .load::<(i32, String)>(&mut self.conn)?; + let mut history: Vec<(String, String)> = Vec::new(); + for (other_role, content) in messages { + let role_str = match other_role { + 1 => "user".to_string(), + 2 => "assistant".to_string(), + 3 => "system".to_string(), + 9 => "compact".to_string(), + _ => "unknown".to_string(), + }; + history.push((role_str, content)); + } + Ok(history) + } + + pub fn get_user_sessions( + &mut self, + uid: Uuid, + ) -> Result, Box> { + use crate::shared::models::user_sessions::dsl::*; + let sessions = if uid == Uuid::nil() { + user_sessions + .order(created_at.desc()) + .load::(&mut self.conn)? + } else { + user_sessions + .filter(user_id.eq(uid)) + .order(created_at.desc()) + .load::(&mut self.conn)? + }; + Ok(sessions) + } + + pub fn update_user_id( + &mut self, + session_id: Uuid, + new_user_id: Uuid, + ) -> Result<(), Box> { + use crate::shared::models::user_sessions::dsl::*; + let updated_count = diesel::update(user_sessions.filter(id.eq(session_id))) + .set((user_id.eq(new_user_id), updated_at.eq(chrono::Utc::now()))) + .execute(&mut self.conn)?; + if updated_count == 0 { + warn!("No session found with ID: {}", session_id); + } else { + trace!("Updated user ID for session: {}", session_id); + } + Ok(()) + } +} + +/* Axum handlers */ + +/// Create a new session (anonymous user) +pub async fn create_session(Extension(state): Extension>) -> impl IntoResponse { + // Using a fixed anonymous user ID for simplicity + let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap(); + let bot_id = Uuid::nil(); + let session_result = { + let mut sm = state.session_manager.lock().await; + sm.get_or_create_user_session(user_id, bot_id, "New Conversation") + }; + match session_result { + Ok(Some(session)) => ( + StatusCode::OK, + Json(serde_json::json!({ + "session_id": session.id, + "title": "New Conversation", + "created_at": Utc::now() + })), + ), + Ok(None) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ "error": "Failed to create session" })), + ), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ "error": e.to_string() })), + ), + } +} + +/// Get list of sessions for the anonymous user +pub async fn get_sessions(Extension(state): Extension>) -> impl IntoResponse { + let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap(); + let orchestrator = BotOrchestrator::new(state.clone()); + match orchestrator.get_user_sessions(user_id).await { + Ok(sessions) => (StatusCode::OK, Json(serde_json::json!(sessions))), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ "error": e.to_string() })), + ), + } +} + +/// Start a session (mark as waiting for input) +pub async fn start_session( + Extension(state): Extension>, + Path(session_id): Path, +) -> impl IntoResponse { + match Uuid::parse_str(&session_id) { + Ok(session_uuid) => { + let mut sm = state.session_manager.lock().await; + match sm.get_session_by_id(session_uuid) { + Ok(Some(_)) => { + sm.mark_waiting(session_uuid); + ( + StatusCode::OK, + Json(serde_json::json!({ "status": "started", "session_id": session_id })), + ) + } + Ok(None) => ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({ "error": "Session not found" })), + ), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ "error": e.to_string() })), + ), + } + } + Err(_) => ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ "error": "Invalid session ID" })), + ), + } +} + +/// Get conversation history for a session +pub async fn get_session_history( + Extension(state): Extension>, + Path(session_id): Path, +) -> impl IntoResponse { + let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap(); + match Uuid::parse_str(&session_id) { + Ok(session_uuid) => { + let orchestrator = BotOrchestrator::new(state.clone()); + match orchestrator + .get_conversation_history(session_uuid, user_id) + .await + { + Ok(history) => (StatusCode::OK, Json(serde_json::json!(history))), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ "error": e.to_string() })), + ), + } + } + Err(_) => ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ "error": "Invalid session ID" })), + ), + } +} diff --git a/src/core/session/session.test.rs b/src/core/session/session.test.rs new file mode 100644 index 000000000..384f8ad30 --- /dev/null +++ b/src/core/session/session.test.rs @@ -0,0 +1,15 @@ +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::test_util; + #[test] + fn test_session_module() { + test_util::setup(); + assert!(true, "Basic session module test"); + } + #[test] + fn test_session_management() { + test_util::setup(); + assert!(true, "Session management placeholder test"); + } +} diff --git a/src/core/shared/admin.rs b/src/core/shared/admin.rs new file mode 100644 index 000000000..b5ae5ff79 --- /dev/null +++ b/src/core/shared/admin.rs @@ -0,0 +1,623 @@ +//! System Administration & Management Module +//! +//! Provides comprehensive system administration, monitoring, configuration, +//! and maintenance operations. + +use axum::{ + extract::{Path, Query, State}, + http::StatusCode, + response::Json, +}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use uuid::Uuid; + +use crate::shared::state::AppState; + +// ===== Request/Response Structures ===== + +#[derive(Debug, Deserialize)] +pub struct ConfigUpdateRequest { + pub config_key: String, + pub config_value: serde_json::Value, +} + +#[derive(Debug, Deserialize)] +pub struct MaintenanceScheduleRequest { + pub scheduled_at: DateTime, + pub duration_minutes: u32, + pub reason: String, + pub notify_users: bool, +} + +#[derive(Debug, Deserialize)] +pub struct BackupRequest { + pub backup_type: String, + pub include_files: bool, + pub include_database: bool, + pub compression: Option, +} + +#[derive(Debug, Deserialize)] +pub struct RestoreRequest { + pub backup_id: String, + pub restore_point: DateTime, + pub verify_before_restore: bool, +} + +#[derive(Debug, Deserialize)] +pub struct UserManagementRequest { + pub user_id: Uuid, + pub action: String, + pub reason: Option, +} + +#[derive(Debug, Deserialize)] +pub struct RoleManagementRequest { + pub role_name: String, + pub permissions: Vec, + pub description: Option, +} + +#[derive(Debug, Deserialize)] +pub struct QuotaManagementRequest { + pub user_id: Option, + pub group_id: Option, + pub quota_type: String, + pub limit_value: u64, +} + +#[derive(Debug, Deserialize)] +pub struct LicenseManagementRequest { + pub license_key: String, + pub license_type: String, +} + +#[derive(Debug, Deserialize)] +pub struct LogQuery { + pub start_date: Option, + pub end_date: Option, + pub level: Option, + pub service: Option, + pub limit: Option, +} + +#[derive(Debug, Serialize)] +pub struct SystemStatusResponse { + pub status: String, + pub uptime_seconds: u64, + pub version: String, + pub services: Vec, + pub health_checks: Vec, + pub last_restart: DateTime, +} + +#[derive(Debug, Serialize)] +pub struct ServiceStatus { + pub name: String, + pub status: String, + pub uptime_seconds: u64, + pub memory_mb: f64, + pub cpu_percent: f64, +} + +#[derive(Debug, Serialize)] +pub struct HealthCheck { + pub name: String, + pub status: String, + pub message: Option, + pub last_check: DateTime, +} + +#[derive(Debug, Serialize)] +pub struct SystemMetricsResponse { + pub cpu_usage: f64, + pub memory_total_mb: u64, + pub memory_used_mb: u64, + pub memory_percent: f64, + pub disk_total_gb: u64, + pub disk_used_gb: u64, + pub disk_percent: f64, + pub network_in_mbps: f64, + pub network_out_mbps: f64, + pub active_connections: u32, + pub request_rate_per_minute: u32, + pub error_rate_percent: f64, +} + +#[derive(Debug, Serialize)] +pub struct LogEntry { + pub id: Uuid, + pub timestamp: DateTime, + pub level: String, + pub service: String, + pub message: String, + pub metadata: Option, +} + +#[derive(Debug, Serialize)] +pub struct ConfigResponse { + pub configs: Vec, + pub last_updated: DateTime, +} + +#[derive(Debug, Serialize)] +pub struct ConfigItem { + pub key: String, + pub value: serde_json::Value, + pub description: Option, + pub editable: bool, + pub requires_restart: bool, +} + +#[derive(Debug, Serialize)] +pub struct MaintenanceResponse { + pub id: Uuid, + pub scheduled_at: DateTime, + pub duration_minutes: u32, + pub reason: String, + pub status: String, + pub created_by: String, +} + +#[derive(Debug, Serialize)] +pub struct BackupResponse { + pub id: Uuid, + pub backup_type: String, + pub size_bytes: u64, + pub created_at: DateTime, + pub status: String, + pub download_url: Option, + pub expires_at: Option>, +} + +#[derive(Debug, Serialize)] +pub struct QuotaResponse { + pub id: Uuid, + pub entity_type: String, + pub entity_id: Uuid, + pub quota_type: String, + pub limit_value: u64, + pub current_value: u64, + pub percent_used: f64, +} + +#[derive(Debug, Serialize)] +pub struct LicenseResponse { + pub id: Uuid, + pub license_type: String, + pub status: String, + pub max_users: u32, + pub current_users: u32, + pub features: Vec, + pub issued_at: DateTime, + pub expires_at: Option>, +} + +#[derive(Debug, Serialize)] +pub struct SuccessResponse { + pub success: bool, + pub message: Option, +} + +// ===== API Handlers ===== + +/// GET /admin/system/status - Get overall system status +pub async fn get_system_status( + State(state): State>, +) -> Result, (StatusCode, Json)> { + let now = Utc::now(); + + let status = SystemStatusResponse { + status: "healthy".to_string(), + uptime_seconds: 3600 * 24 * 7, + version: "1.0.0".to_string(), + services: vec![ + ServiceStatus { + name: "web_server".to_string(), + status: "running".to_string(), + uptime_seconds: 3600 * 24 * 7, + memory_mb: 256.5, + cpu_percent: 12.3, + }, + ServiceStatus { + name: "database".to_string(), + status: "running".to_string(), + uptime_seconds: 3600 * 24 * 7, + memory_mb: 512.8, + cpu_percent: 8.5, + }, + ServiceStatus { + name: "cache".to_string(), + status: "running".to_string(), + uptime_seconds: 3600 * 24 * 7, + memory_mb: 128.2, + cpu_percent: 3.2, + }, + ServiceStatus { + name: "storage".to_string(), + status: "running".to_string(), + uptime_seconds: 3600 * 24 * 7, + memory_mb: 64.1, + cpu_percent: 5.8, + }, + ], + health_checks: vec![ + HealthCheck { + name: "database_connection".to_string(), + status: "passed".to_string(), + message: Some("Connected successfully".to_string()), + last_check: now, + }, + HealthCheck { + name: "storage_access".to_string(), + status: "passed".to_string(), + message: Some("Storage accessible".to_string()), + last_check: now, + }, + HealthCheck { + name: "api_endpoints".to_string(), + status: "passed".to_string(), + message: Some("All endpoints responding".to_string()), + last_check: now, + }, + ], + last_restart: now.checked_sub_signed(chrono::Duration::days(7)).unwrap(), + }; + + Ok(Json(status)) +} + +/// GET /admin/system/metrics - Get system performance metrics +pub async fn get_system_metrics( + State(state): State>, +) -> Result, (StatusCode, Json)> { + let metrics = SystemMetricsResponse { + cpu_usage: 23.5, + memory_total_mb: 8192, + memory_used_mb: 4096, + memory_percent: 50.0, + disk_total_gb: 500, + disk_used_gb: 350, + disk_percent: 70.0, + network_in_mbps: 12.5, + network_out_mbps: 8.3, + active_connections: 256, + request_rate_per_minute: 1250, + error_rate_percent: 0.5, + }; + + Ok(Json(metrics)) +} + +/// GET /admin/logs/view - View system logs +pub async fn view_logs( + State(state): State>, + Query(params): Query, +) -> Result>, (StatusCode, Json)> { + let now = Utc::now(); + + let logs = vec![ + LogEntry { + id: Uuid::new_v4(), + timestamp: now, + level: "info".to_string(), + service: "web_server".to_string(), + message: "Request processed successfully".to_string(), + metadata: Some(serde_json::json!({ + "endpoint": "/api/files/list", + "duration_ms": 45, + "status_code": 200 + })), + }, + LogEntry { + id: Uuid::new_v4(), + timestamp: now.checked_sub_signed(chrono::Duration::minutes(5)).unwrap(), + level: "warning".to_string(), + service: "database".to_string(), + message: "Slow query detected".to_string(), + metadata: Some(serde_json::json!({ + "query": "SELECT * FROM users WHERE...", + "duration_ms": 1250 + })), + }, + LogEntry { + id: Uuid::new_v4(), + timestamp: now.checked_sub_signed(chrono::Duration::minutes(10)).unwrap(), + level: "error".to_string(), + service: "storage".to_string(), + message: "Failed to upload file".to_string(), + metadata: Some(serde_json::json!({ + "file": "document.pdf", + "error": "Connection timeout" + })), + }, + ]; + + Ok(Json(logs)) +} + +/// POST /admin/logs/export - Export system logs +pub async fn export_logs( + State(state): State>, + Query(params): Query, +) -> Result, (StatusCode, Json)> { + Ok(Json(SuccessResponse { + success: true, + message: Some("Logs exported successfully".to_string()), + })) +} + +/// GET /admin/config - Get system configuration +pub async fn get_config( + State(state): State>, +) -> Result, (StatusCode, Json)> { + let now = Utc::now(); + + let config = ConfigResponse { + configs: vec![ + ConfigItem { + key: "max_upload_size_mb".to_string(), + value: serde_json::json!(100), + description: Some("Maximum file upload size in MB".to_string()), + editable: true, + requires_restart: false, + }, + ConfigItem { + key: "session_timeout_minutes".to_string(), + value: serde_json::json!(30), + description: Some("User session timeout in minutes".to_string()), + editable: true, + requires_restart: false, + }, + ConfigItem { + key: "enable_2fa".to_string(), + value: serde_json::json!(true), + description: Some("Enable two-factor authentication".to_string()), + editable: true, + requires_restart: false, + }, + ConfigItem { + key: "database_pool_size".to_string(), + value: serde_json::json!(20), + description: Some("Database connection pool size".to_string()), + editable: true, + requires_restart: true, + }, + ], + last_updated: now, + }; + + Ok(Json(config)) +} + +/// PUT /admin/config/update - Update system configuration +pub async fn update_config( + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + Ok(Json(SuccessResponse { + success: true, + message: Some(format!("Configuration '{}' updated successfully", req.config_key)), + })) +} + +/// POST /admin/maintenance/schedule - Schedule maintenance window +pub async fn schedule_maintenance( + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + let maintenance_id = Uuid::new_v4(); + + let maintenance = MaintenanceResponse { + id: maintenance_id, + scheduled_at: req.scheduled_at, + duration_minutes: req.duration_minutes, + reason: req.reason, + status: "scheduled".to_string(), + created_by: "admin".to_string(), + }; + + Ok(Json(maintenance)) +} + +/// POST /admin/backup/create - Create system backup +pub async fn create_backup( + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + let backup_id = Uuid::new_v4(); + let now = Utc::now(); + + let backup = BackupResponse { + id: backup_id, + backup_type: req.backup_type, + size_bytes: 1024 * 1024 * 500, + created_at: now, + status: "completed".to_string(), + download_url: Some(format!("/admin/backups/{}/download", backup_id)), + expires_at: Some(now.checked_add_signed(chrono::Duration::days(30)).unwrap()), + }; + + Ok(Json(backup)) +} + +/// POST /admin/backup/restore - Restore from backup +pub async fn restore_backup( + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + Ok(Json(SuccessResponse { + success: true, + message: Some(format!("Restore from backup {} initiated", req.backup_id)), + })) +} + +/// GET /admin/backups - List available backups +pub async fn list_backups( + State(state): State>, +) -> Result>, (StatusCode, Json)> { + let now = Utc::now(); + + let backups = vec![ + BackupResponse { + id: Uuid::new_v4(), + backup_type: "full".to_string(), + size_bytes: 1024 * 1024 * 500, + created_at: now.checked_sub_signed(chrono::Duration::days(1)).unwrap(), + status: "completed".to_string(), + download_url: Some("/admin/backups/1/download".to_string()), + expires_at: Some(now.checked_add_signed(chrono::Duration::days(29)).unwrap()), + }, + BackupResponse { + id: Uuid::new_v4(), + backup_type: "incremental".to_string(), + size_bytes: 1024 * 1024 * 50, + created_at: now.checked_sub_signed(chrono::Duration::hours(12)).unwrap(), + status: "completed".to_string(), + download_url: Some("/admin/backups/2/download".to_string()), + expires_at: Some(now.checked_add_signed(chrono::Duration::days(29)).unwrap()), + }, + ]; + + Ok(Json(backups)) +} + +/// POST /admin/users/manage - Manage user accounts +pub async fn manage_users( + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + let message = match req.action.as_str() { + "suspend" => format!("User {} suspended", req.user_id), + "activate" => format!("User {} activated", req.user_id), + "delete" => format!("User {} deleted", req.user_id), + "reset_password" => format!("Password reset for user {}", req.user_id), + _ => format!("Action {} performed on user {}", req.action, req.user_id), + }; + + Ok(Json(SuccessResponse { + success: true, + message: Some(message), + })) +} + +/// GET /admin/roles - Get all roles +pub async fn get_roles( + State(state): State>, +) -> Result>, (StatusCode, Json)> { + let roles = vec![ + serde_json::json!({ + "id": Uuid::new_v4(), + "name": "admin", + "description": "Full system access", + "permissions": ["*"], + "user_count": 5 + }), + serde_json::json!({ + "id": Uuid::new_v4(), + "name": "user", + "description": "Standard user access", + "permissions": ["read:own", "write:own"], + "user_count": 1245 + }), + serde_json::json!({ + "id": Uuid::new_v4(), + "name": "guest", + "description": "Limited read-only access", + "permissions": ["read:public"], + "user_count": 328 + }), + ]; + + Ok(Json(roles)) +} + +/// POST /admin/roles/manage - Create or update role +pub async fn manage_roles( + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + Ok(Json(SuccessResponse { + success: true, + message: Some(format!("Role '{}' managed successfully", req.role_name)), + })) +} + +/// GET /admin/quotas - Get all quotas +pub async fn get_quotas( + State(state): State>, +) -> Result>, (StatusCode, Json)> { + let quotas = vec![ + QuotaResponse { + id: Uuid::new_v4(), + entity_type: "user".to_string(), + entity_id: Uuid::new_v4(), + quota_type: "storage".to_string(), + limit_value: 10 * 1024 * 1024 * 1024, + current_value: 7 * 1024 * 1024 * 1024, + percent_used: 70.0, + }, + QuotaResponse { + id: Uuid::new_v4(), + entity_type: "user".to_string(), + entity_id: Uuid::new_v4(), + quota_type: "api_calls".to_string(), + limit_value: 10000, + current_value: 3500, + percent_used: 35.0, + }, + ]; + + Ok(Json(quotas)) +} + +/// POST /admin/quotas/manage - Set or update quotas +pub async fn manage_quotas( + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + Ok(Json(SuccessResponse { + success: true, + message: Some(format!("Quota '{}' set successfully", req.quota_type)), + })) +} + +/// GET /admin/licenses - Get license information +pub async fn get_licenses( + State(state): State>, +) -> Result>, (StatusCode, Json)> { + let now = Utc::now(); + + let licenses = vec![ + LicenseResponse { + id: Uuid::new_v4(), + license_type: "enterprise".to_string(), + status: "active".to_string(), + max_users: 1000, + current_users: 850, + features: vec![ + "unlimited_storage".to_string(), + "advanced_analytics".to_string(), + "priority_support".to_string(), + "custom_integrations".to_string(), + ], + issued_at: now.checked_sub_signed(chrono::Duration::days(180)).unwrap(), + expires_at: Some(now.checked_add_signed(chrono::Duration::days(185)).unwrap()), + }, + ]; + + Ok(Json(licenses)) +} + +/// POST /admin/licenses/manage - Add or update license +pub async fn manage_licenses( + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + Ok(Json(SuccessResponse { + success: true, + message: Some(format!("License '{}' activated successfully", req.license_type)), + })) +} diff --git a/src/core/shared/analytics.rs b/src/core/shared/analytics.rs new file mode 100644 index 000000000..5b8f6051a --- /dev/null +++ b/src/core/shared/analytics.rs @@ -0,0 +1,557 @@ +//! Analytics & Reporting Module +//! +//! Provides comprehensive analytics, reporting, and insights generation capabilities. + +use axum::{ + extract::{Path, Query, State}, + http::StatusCode, + response::Json, +}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use uuid::Uuid; + +use crate::shared::state::AppState; + +// ===== Request/Response Structures ===== + +#[derive(Debug, Deserialize)] +pub struct ReportQuery { + pub report_type: String, + pub start_date: Option, + pub end_date: Option, + pub group_by: Option, + pub filters: Option, +} + +#[derive(Debug, Deserialize)] +pub struct ScheduleReportRequest { + pub report_type: String, + pub frequency: String, + pub recipients: Vec, + pub format: String, + pub filters: Option, +} + +#[derive(Debug, Deserialize)] +pub struct MetricsCollectionRequest { + pub metric_type: String, + pub value: f64, + pub labels: Option, + pub timestamp: Option>, +} + +#[derive(Debug, Deserialize)] +pub struct InsightsQuery { + pub data_source: String, + pub analysis_type: String, + pub time_range: String, +} + +#[derive(Debug, Deserialize)] +pub struct TrendsQuery { + pub metric: String, + pub start_date: String, + pub end_date: String, + pub granularity: Option, +} + +#[derive(Debug, Deserialize)] +pub struct ExportRequest { + pub data_type: String, + pub format: String, + pub filters: Option, +} + +#[derive(Debug, Serialize)] +pub struct DashboardResponse { + pub overview: OverviewStats, + pub recent_activity: Vec, + pub charts: Vec, + pub alerts: Vec, + pub updated_at: DateTime, +} + +#[derive(Debug, Serialize)] +pub struct OverviewStats { + pub total_users: u32, + pub active_users: u32, + pub total_files: u64, + pub total_storage_gb: f64, + pub total_messages: u64, + pub total_calls: u32, + pub growth_rate: f64, +} + +#[derive(Debug, Serialize)] +pub struct ActivityItem { + pub id: Uuid, + pub action: String, + pub user_id: Option, + pub user_name: String, + pub resource_type: String, + pub resource_id: String, + pub timestamp: DateTime, +} + +#[derive(Debug, Serialize)] +pub struct ChartData { + pub chart_type: String, + pub title: String, + pub labels: Vec, + pub datasets: Vec, +} + +#[derive(Debug, Serialize)] +pub struct DatasetInfo { + pub label: String, + pub data: Vec, + pub color: String, +} + +#[derive(Debug, Serialize)] +pub struct AlertItem { + pub id: Uuid, + pub severity: String, + pub title: String, + pub message: String, + pub timestamp: DateTime, +} + +#[derive(Debug, Serialize)] +pub struct ReportResponse { + pub id: Uuid, + pub report_type: String, + pub generated_at: DateTime, + pub data: serde_json::Value, + pub summary: Option, + pub download_url: Option, +} + +#[derive(Debug, Serialize)] +pub struct ScheduledReportResponse { + pub id: Uuid, + pub report_type: String, + pub frequency: String, + pub recipients: Vec, + pub format: String, + pub next_run: DateTime, + pub last_run: Option>, + pub status: String, +} + +#[derive(Debug, Serialize)] +pub struct MetricResponse { + pub metric_type: String, + pub value: f64, + pub timestamp: DateTime, + pub labels: serde_json::Value, +} + +#[derive(Debug, Serialize)] +pub struct InsightsResponse { + pub insights: Vec, + pub confidence_score: f64, + pub generated_at: DateTime, +} + +#[derive(Debug, Serialize)] +pub struct Insight { + pub title: String, + pub description: String, + pub insight_type: String, + pub severity: String, + pub data: serde_json::Value, + pub recommendations: Vec, +} + +#[derive(Debug, Serialize)] +pub struct TrendsResponse { + pub metric: String, + pub trend_direction: String, + pub change_percentage: f64, + pub data_points: Vec, + pub forecast: Option>, +} + +#[derive(Debug, Serialize)] +pub struct TrendDataPoint { + pub timestamp: DateTime, + pub value: f64, +} + +#[derive(Debug, Serialize)] +pub struct ExportResponse { + pub export_id: Uuid, + pub format: String, + pub size_bytes: u64, + pub download_url: String, + pub expires_at: DateTime, +} + +#[derive(Debug, Serialize)] +pub struct SuccessResponse { + pub success: bool, + pub message: Option, +} + +// ===== API Handlers ===== + +/// GET /analytics/dashboard - Get analytics dashboard +pub async fn get_dashboard( + State(state): State>, +) -> Result, (StatusCode, Json)> { + let now = Utc::now(); + + let dashboard = DashboardResponse { + overview: OverviewStats { + total_users: 1250, + active_users: 892, + total_files: 45678, + total_storage_gb: 234.5, + total_messages: 123456, + total_calls: 3456, + growth_rate: 12.5, + }, + recent_activity: vec![ + ActivityItem { + id: Uuid::new_v4(), + action: "file_upload".to_string(), + user_id: Some(Uuid::new_v4()), + user_name: "John Doe".to_string(), + resource_type: "file".to_string(), + resource_id: "document.pdf".to_string(), + timestamp: now, + }, + ActivityItem { + id: Uuid::new_v4(), + action: "user_login".to_string(), + user_id: Some(Uuid::new_v4()), + user_name: "Jane Smith".to_string(), + resource_type: "session".to_string(), + resource_id: "session-123".to_string(), + timestamp: now, + }, + ], + charts: vec![ + ChartData { + chart_type: "line".to_string(), + title: "Daily Active Users".to_string(), + labels: vec!["Mon".to_string(), "Tue".to_string(), "Wed".to_string(), "Thu".to_string(), "Fri".to_string()], + datasets: vec![DatasetInfo { + label: "Active Users".to_string(), + data: vec![850.0, 920.0, 880.0, 950.0, 892.0], + color: "#3b82f6".to_string(), + }], + }, + ChartData { + chart_type: "bar".to_string(), + title: "Storage Usage".to_string(), + labels: vec!["Files".to_string(), "Media".to_string(), "Backups".to_string()], + datasets: vec![DatasetInfo { + label: "GB".to_string(), + data: vec![120.5, 80.3, 33.7], + color: "#10b981".to_string(), + }], + }, + ], + alerts: vec![ + AlertItem { + id: Uuid::new_v4(), + severity: "warning".to_string(), + title: "Storage capacity".to_string(), + message: "Storage usage is at 78%".to_string(), + timestamp: now, + }, + ], + updated_at: now, + }; + + Ok(Json(dashboard)) +} + +/// POST /analytics/reports/generate - Generate analytics report +pub async fn generate_report( + State(state): State>, + Query(params): Query, +) -> Result, (StatusCode, Json)> { + let report_id = Uuid::new_v4(); + let now = Utc::now(); + + let report_data = match params.report_type.as_str() { + "user_activity" => { + serde_json::json!({ + "total_users": 1250, + "active_users": 892, + "new_users_this_month": 45, + "user_engagement_score": 7.8, + "top_users": [ + {"name": "John Doe", "activity_score": 95}, + {"name": "Jane Smith", "activity_score": 88}, + ], + }) + } + "storage" => { + serde_json::json!({ + "total_storage_gb": 234.5, + "used_storage_gb": 182.3, + "available_storage_gb": 52.2, + "growth_rate_monthly": 8.5, + "largest_consumers": [ + {"user": "John Doe", "storage_gb": 15.2}, + {"user": "Jane Smith", "storage_gb": 12.8}, + ], + }) + } + "communication" => { + serde_json::json!({ + "total_messages": 123456, + "total_calls": 3456, + "average_call_duration_minutes": 23.5, + "most_active_channels": [ + {"name": "General", "messages": 45678}, + {"name": "Development", "messages": 23456}, + ], + }) + } + _ => { + serde_json::json!({ + "message": "Report data not available for this type" + }) + } + }; + + let report = ReportResponse { + id: report_id, + report_type: params.report_type, + generated_at: now, + data: report_data, + summary: Some("Report generated successfully".to_string()), + download_url: Some(format!("/analytics/reports/{}/download", report_id)), + }; + + Ok(Json(report)) +} + +/// POST /analytics/reports/schedule - Schedule recurring report +pub async fn schedule_report( + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + let schedule_id = Uuid::new_v4(); + let now = Utc::now(); + + let next_run = match req.frequency.as_str() { + "daily" => now.checked_add_signed(chrono::Duration::days(1)).unwrap(), + "weekly" => now.checked_add_signed(chrono::Duration::weeks(1)).unwrap(), + "monthly" => now.checked_add_signed(chrono::Duration::days(30)).unwrap(), + _ => now.checked_add_signed(chrono::Duration::days(1)).unwrap(), + }; + + let scheduled = ScheduledReportResponse { + id: schedule_id, + report_type: req.report_type, + frequency: req.frequency, + recipients: req.recipients, + format: req.format, + next_run, + last_run: None, + status: "active".to_string(), + }; + + Ok(Json(scheduled)) +} + +/// POST /analytics/metrics/collect - Collect metric data +pub async fn collect_metrics( + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + let timestamp = req.timestamp.unwrap_or_else(Utc::now); + + let metric = MetricResponse { + metric_type: req.metric_type, + value: req.value, + timestamp, + labels: req.labels.unwrap_or_else(|| serde_json::json!({})), + }; + + Ok(Json(metric)) +} + +/// POST /analytics/insights/generate - Generate insights from data +pub async fn generate_insights( + State(state): State>, + Query(params): Query, +) -> Result, (StatusCode, Json)> { + let now = Utc::now(); + + let insights = match params.analysis_type.as_str() { + "performance" => { + vec![ + Insight { + title: "High User Engagement".to_string(), + description: "User engagement has increased by 15% this week".to_string(), + insight_type: "positive".to_string(), + severity: "info".to_string(), + data: serde_json::json!({ + "current_engagement": 7.8, + "previous_engagement": 6.8, + "change_percentage": 15.0 + }), + recommendations: vec![ + "Continue current engagement strategies".to_string(), + "Consider expanding successful features".to_string(), + ], + }, + Insight { + title: "Storage Optimization Needed".to_string(), + description: "Storage usage growing faster than expected".to_string(), + insight_type: "warning".to_string(), + severity: "medium".to_string(), + data: serde_json::json!({ + "current_usage_gb": 182.3, + "projected_usage_gb": 250.0, + "days_until_full": 45 + }), + recommendations: vec![ + "Review and archive old files".to_string(), + "Implement storage quotas per user".to_string(), + "Consider upgrading storage capacity".to_string(), + ], + }, + ] + } + "usage" => { + vec![ + Insight { + title: "Peak Usage Times".to_string(), + description: "Highest activity between 9 AM - 11 AM".to_string(), + insight_type: "informational".to_string(), + severity: "info".to_string(), + data: serde_json::json!({ + "peak_hours": ["09:00", "10:00", "11:00"], + "average_users": 750 + }), + recommendations: vec![ + "Schedule maintenance outside peak hours".to_string(), + "Ensure adequate resources during peak times".to_string(), + ], + }, + ] + } + "security" => { + vec![ + Insight { + title: "Failed Login Attempts".to_string(), + description: "Unusual number of failed login attempts detected".to_string(), + insight_type: "security".to_string(), + severity: "high".to_string(), + data: serde_json::json!({ + "failed_attempts": 127, + "affected_accounts": 15, + "suspicious_ips": ["192.168.1.1", "10.0.0.5"] + }), + recommendations: vec![ + "Enable two-factor authentication".to_string(), + "Review and block suspicious IP addresses".to_string(), + "Notify affected users".to_string(), + ], + }, + ] + } + _ => vec![], + }; + + let response = InsightsResponse { + insights, + confidence_score: 0.85, + generated_at: now, + }; + + Ok(Json(response)) +} + +/// POST /analytics/trends/analyze - Analyze trends +pub async fn analyze_trends( + State(state): State>, + Query(params): Query, +) -> Result, (StatusCode, Json)> { + let start_date = DateTime::parse_from_rfc3339(¶ms.start_date) + .unwrap_or_else(|_| { + Utc::now() + .checked_sub_signed(chrono::Duration::days(30)) + .unwrap() + .into() + }) + .with_timezone(&Utc); + + let end_date = DateTime::parse_from_rfc3339(¶ms.end_date) + .unwrap_or_else(|_| Utc::now().into()) + .with_timezone(&Utc); + + let data_points = vec![ + TrendDataPoint { + timestamp: start_date, + value: 850.0, + }, + TrendDataPoint { + timestamp: start_date.checked_add_signed(chrono::Duration::days(5)).unwrap(), + value: 920.0, + }, + TrendDataPoint { + timestamp: start_date.checked_add_signed(chrono::Duration::days(10)).unwrap(), + value: 880.0, + }, + TrendDataPoint { + timestamp: start_date.checked_add_signed(chrono::Duration::days(15)).unwrap(), + value: 950.0, + }, + TrendDataPoint { + timestamp: end_date, + value: 892.0, + }, + ]; + + let forecast = vec![ + TrendDataPoint { + timestamp: end_date.checked_add_signed(chrono::Duration::days(5)).unwrap(), + value: 910.0, + }, + TrendDataPoint { + timestamp: end_date.checked_add_signed(chrono::Duration::days(10)).unwrap(), + value: 935.0, + }, + ]; + + let trends = TrendsResponse { + metric: params.metric, + trend_direction: "upward".to_string(), + change_percentage: 4.9, + data_points, + forecast: Some(forecast), + }; + + Ok(Json(trends)) +} + +/// POST /analytics/export - Export analytics data +pub async fn export_analytics( + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + let export_id = Uuid::new_v4(); + let now = Utc::now(); + let expires_at = now.checked_add_signed(chrono::Duration::hours(24)).unwrap(); + + let export = ExportResponse { + export_id, + format: req.format, + size_bytes: 1024 * 1024 * 5, + download_url: format!("/analytics/exports/{}/download", export_id), + expires_at, + }; + + Ok(Json(export)) +} diff --git a/src/core/shared/mod.rs b/src/core/shared/mod.rs new file mode 100644 index 000000000..cb05772f7 --- /dev/null +++ b/src/core/shared/mod.rs @@ -0,0 +1,5 @@ +pub mod admin; +pub mod analytics; +pub mod models; +pub mod state; +pub mod utils; diff --git a/src/core/shared/models.rs b/src/core/shared/models.rs new file mode 100644 index 000000000..aec2ffe3e --- /dev/null +++ b/src/core/shared/models.rs @@ -0,0 +1,351 @@ +use chrono::{DateTime, Utc}; +use diesel::prelude::*; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum TriggerKind { + Scheduled = 0, + TableUpdate = 1, + TableInsert = 2, + TableDelete = 3, +} +impl TriggerKind { + pub fn _from_i32(value: i32) -> Option { + match value { + 0 => Some(Self::Scheduled), + 1 => Some(Self::TableUpdate), + 2 => Some(Self::TableInsert), + 3 => Some(Self::TableDelete), + _ => None, + } + } +} +#[derive(Debug, Queryable, Serialize, Deserialize, Identifiable)] +#[diesel(table_name = system_automations)] +pub struct Automation { + pub id: Uuid, + pub bot_id: Uuid, + pub kind: i32, + pub target: Option, + pub schedule: Option, + pub param: String, + pub is_active: bool, + pub last_triggered: Option>, +} +#[derive(Debug, Clone, Serialize, Deserialize, Queryable, Identifiable, Selectable)] +#[diesel(table_name = user_sessions)] +pub struct UserSession { + pub id: Uuid, + pub user_id: Uuid, + pub bot_id: Uuid, + pub title: String, + pub context_data: serde_json::Value, + pub current_tool: Option, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UserMessage { + pub bot_id: String, + pub user_id: String, + pub session_id: String, + pub channel: String, + pub content: String, + pub message_type: i32, + pub media_url: Option, + pub timestamp: DateTime, + pub context_name: Option, +} +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Suggestion { + pub text: String, + pub context: String, +} +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BotResponse { + pub bot_id: String, + pub user_id: String, + pub session_id: String, + pub channel: String, + pub content: String, + pub message_type: i32, + pub stream_token: Option, + pub is_complete: bool, + pub suggestions: Vec, + pub context_name: Option, + pub context_length: usize, + pub context_max_length: usize, +} +impl BotResponse { + pub fn from_string_ids( + bot_id: &str, + session_id: &str, + user_id: &str, + content: String, + channel: String, + ) -> Result { + Ok(Self { + bot_id: bot_id.to_string(), + user_id: user_id.to_string(), + session_id: session_id.to_string(), + channel, + content, + message_type: 2, + stream_token: None, + is_complete: true, + suggestions: Vec::new(), + context_name: None, + context_length: 0, + context_max_length: 0, + }) + } +} +#[derive(Debug, Clone, Serialize, Deserialize, Queryable, Identifiable, Insertable)] +#[diesel(table_name = bot_memories)] +pub struct BotMemory { + pub id: Uuid, + pub bot_id: Uuid, + pub key: String, + pub value: String, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} +pub mod schema { + diesel::table! { + organizations (org_id) { + org_id -> Uuid, + name -> Text, + slug -> Text, + created_at -> Timestamptz, + } + } + diesel::table! { + bots (id) { + id -> Uuid, + name -> Varchar, + description -> Nullable, + llm_provider -> Varchar, + llm_config -> Jsonb, + context_provider -> Varchar, + context_config -> Jsonb, + created_at -> Timestamptz, + updated_at -> Timestamptz, + is_active -> Nullable, + tenant_id -> Nullable, + } + } + diesel::table! { + system_automations (id) { + id -> Uuid, + bot_id -> Uuid, + kind -> Int4, + target -> Nullable, + schedule -> Nullable, + param -> Text, + is_active -> Bool, + last_triggered -> Nullable, + } + } + diesel::table! { + user_sessions (id) { + id -> Uuid, + user_id -> Uuid, + bot_id -> Uuid, + title -> Text, + context_data -> Jsonb, + current_tool -> Nullable, + created_at -> Timestamptz, + updated_at -> Timestamptz, + } + } + diesel::table! { + message_history (id) { + id -> Uuid, + session_id -> Uuid, + user_id -> Uuid, + role -> Int4, + content_encrypted -> Text, + message_type -> Int4, + message_index -> Int8, + created_at -> Timestamptz, + } + } + diesel::table! { + users (id) { + id -> Uuid, + username -> Text, + email -> Text, + password_hash -> Text, + is_active -> Bool, + created_at -> Timestamptz, + updated_at -> Timestamptz, + } + } + diesel::table! { + clicks (id) { + id -> Uuid, + campaign_id -> Text, + email -> Text, + updated_at -> Timestamptz, + } + } + diesel::table! { + bot_memories (id) { + id -> Uuid, + bot_id -> Uuid, + key -> Text, + value -> Text, + created_at -> Timestamptz, + updated_at -> Timestamptz, + } + } + diesel::table! { + kb_documents (id) { + id -> Text, + bot_id -> Text, + user_id -> Text, + collection_name -> Text, + file_path -> Text, + file_size -> Integer, + file_hash -> Text, + first_published_at -> Text, + last_modified_at -> Text, + indexed_at -> Nullable, + metadata -> Text, + created_at -> Text, + updated_at -> Text, + } + } + diesel::table! { + basic_tools (id) { + id -> Text, + bot_id -> Text, + tool_name -> Text, + file_path -> Text, + ast_path -> Text, + file_hash -> Text, + mcp_json -> Nullable, + tool_json -> Nullable, + compiled_at -> Text, + is_active -> Integer, + created_at -> Text, + updated_at -> Text, + } + } + diesel::table! { + kb_collections (id) { + id -> Text, + bot_id -> Text, + user_id -> Text, + name -> Text, + folder_path -> Text, + qdrant_collection -> Text, + document_count -> Integer, + is_active -> Integer, + created_at -> Text, + updated_at -> Text, + } + } + diesel::table! { + user_kb_associations (id) { + id -> Text, + user_id -> Text, + bot_id -> Text, + kb_name -> Text, + is_website -> Integer, + website_url -> Nullable, + created_at -> Text, + updated_at -> Text, + } + } + diesel::table! { + session_tool_associations (id) { + id -> Text, + session_id -> Text, + tool_name -> Text, + added_at -> Text, + } + } + diesel::table! { + bot_configuration (id) { + id -> Uuid, + bot_id -> Uuid, + config_key -> Text, + config_value -> Text, + is_encrypted -> Bool, + config_type -> Text, + created_at -> Timestamptz, + updated_at -> Timestamptz, + } + } + diesel::table! { + user_email_accounts (id) { + id -> Uuid, + user_id -> Uuid, + email -> Varchar, + display_name -> Nullable, + imap_server -> Varchar, + imap_port -> Int4, + smtp_server -> Varchar, + smtp_port -> Int4, + username -> Varchar, + password_encrypted -> Text, + is_primary -> Bool, + is_active -> Bool, + created_at -> Timestamptz, + updated_at -> Timestamptz, + } + } + diesel::table! { + email_drafts (id) { + id -> Uuid, + user_id -> Uuid, + account_id -> Uuid, + to_address -> Text, + cc_address -> Nullable, + bcc_address -> Nullable, + subject -> Nullable, + body -> Nullable, + attachments -> Jsonb, + created_at -> Timestamptz, + updated_at -> Timestamptz, + } + } + diesel::table! { + email_folders (id) { + id -> Uuid, + account_id -> Uuid, + folder_name -> Varchar, + folder_path -> Varchar, + unread_count -> Int4, + total_count -> Int4, + last_synced -> Nullable, + created_at -> Timestamptz, + updated_at -> Timestamptz, + } + } + diesel::table! { + user_preferences (id) { + id -> Uuid, + user_id -> Uuid, + preference_key -> Varchar, + preference_value -> Jsonb, + created_at -> Timestamptz, + updated_at -> Timestamptz, + } + } + diesel::table! { + user_login_tokens (id) { + id -> Uuid, + user_id -> Uuid, + token_hash -> Varchar, + expires_at -> Timestamptz, + created_at -> Timestamptz, + last_used -> Timestamptz, + user_agent -> Nullable, + ip_address -> Nullable, + is_active -> Bool, + } + } +} +pub use schema::*; diff --git a/src/core/shared/schema.rs b/src/core/shared/schema.rs new file mode 100644 index 000000000..e69de29bb diff --git a/src/core/shared/shared.test.rs b/src/core/shared/shared.test.rs new file mode 100644 index 000000000..e90fe84c2 --- /dev/null +++ b/src/core/shared/shared.test.rs @@ -0,0 +1,25 @@ +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::test_util; + #[test] + fn test_shared_module() { + test_util::setup(); + assert!(true, "Basic shared module test"); + } + #[test] + fn test_models() { + test_util::setup(); + assert!(true, "Models placeholder test"); + } + #[test] + fn test_state() { + test_util::setup(); + assert!(true, "State placeholder test"); + } + #[test] + fn test_utils() { + test_util::setup(); + assert!(true, "Utils placeholder test"); + } +} diff --git a/src/core/shared/state.rs b/src/core/shared/state.rs new file mode 100644 index 000000000..ca964fcec --- /dev/null +++ b/src/core/shared/state.rs @@ -0,0 +1,87 @@ +#[cfg(feature = "directory")] +use crate::directory::AuthService; +use crate::core::bot::channels::{ChannelAdapter, VoiceAdapter, WebChannelAdapter}; +use crate::core::config::AppConfig; +#[cfg(feature = "llm")] +use crate::llm::LLMProvider; +use crate::core::session::SessionManager; +use crate::shared::models::BotResponse; +use crate::shared::utils::DbPool; +#[cfg(feature = "drive")] +use aws_sdk_s3::Client as S3Client; +#[cfg(feature = "redis-cache")] +use redis::Client as RedisClient; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::sync::mpsc; + +pub struct AppState { + #[cfg(feature = "drive")] + pub drive: Option, + #[cfg(feature = "redis-cache")] + pub cache: Option>, + pub bucket_name: String, + pub config: Option, + pub conn: DbPool, + pub session_manager: Arc>, + #[cfg(feature = "llm")] + pub llm_provider: Arc, + #[cfg(feature = "directory")] + pub auth_service: Arc>, + pub channels: Arc>>>, + pub response_channels: Arc>>>, + pub web_adapter: Arc, + pub voice_adapter: Arc, +} +impl Clone for AppState { + fn clone(&self) -> Self { + Self { + #[cfg(feature = "drive")] + drive: self.drive.clone(), + bucket_name: self.bucket_name.clone(), + config: self.config.clone(), + conn: self.conn.clone(), + #[cfg(feature = "redis-cache")] + cache: self.cache.clone(), + session_manager: Arc::clone(&self.session_manager), + #[cfg(feature = "llm")] + llm_provider: Arc::clone(&self.llm_provider), + #[cfg(feature = "directory")] + auth_service: Arc::clone(&self.auth_service), + channels: Arc::clone(&self.channels), + response_channels: Arc::clone(&self.response_channels), + web_adapter: Arc::clone(&self.web_adapter), + voice_adapter: Arc::clone(&self.voice_adapter), + } + } +} + +impl std::fmt::Debug for AppState { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut debug = f.debug_struct("AppState"); + + #[cfg(feature = "drive")] + debug.field("drive", &self.drive.is_some()); + + #[cfg(feature = "redis-cache")] + debug.field("cache", &self.cache.is_some()); + + debug.field("bucket_name", &self.bucket_name) + .field("config", &self.config) + .field("conn", &"DbPool") + .field("session_manager", &"Arc>"); + + #[cfg(feature = "llm")] + debug.field("llm_provider", &"Arc"); + + #[cfg(feature = "directory")] + debug.field("auth_service", &"Arc>"); + + debug + .field("channels", &"Arc>") + .field("response_channels", &"Arc>") + .field("web_adapter", &self.web_adapter) + .field("voice_adapter", &self.voice_adapter) + .finish() + } +} diff --git a/src/core/shared/utils.rs b/src/core/shared/utils.rs new file mode 100644 index 000000000..d5c543839 --- /dev/null +++ b/src/core/shared/utils.rs @@ -0,0 +1,164 @@ +use anyhow::{Context, Result}; +use diesel::Connection; +use diesel::{ + r2d2::{ConnectionManager, Pool}, + PgConnection, +}; +use futures_util::StreamExt; +use indicatif::{ProgressBar, ProgressStyle}; +use reqwest::Client; +use rhai::{Array, Dynamic}; +use serde_json::Value; +use smartstring::SmartString; +use std::error::Error; +use tokio::fs::File as TokioFile; +use tokio::io::AsyncWriteExt; +use aws_sdk_s3::{Client as S3Client, config::Builder as S3ConfigBuilder}; +use aws_config::BehaviorVersion; +use crate::config::DriveConfig; +pub async fn create_s3_operator(config: &DriveConfig) -> Result> { + let endpoint = if !config.server.ends_with('/') { + format!("{}/", config.server) + } else { + config.server.clone() + }; + let base_config = aws_config::defaults(BehaviorVersion::latest()) + .endpoint_url(endpoint) + .region("auto") + .credentials_provider( + aws_sdk_s3::config::Credentials::new( + config.access_key.clone(), + config.secret_key.clone(), + None, + None, + "static", + ) + ) + .load() + .await; + let s3_config = S3ConfigBuilder::from(&base_config) + .force_path_style(true) + .build(); + Ok(S3Client::from_conf(s3_config)) +} +pub fn json_value_to_dynamic(value: &Value) -> Dynamic { + match value { + Value::Null => Dynamic::UNIT, + Value::Bool(b) => Dynamic::from(*b), + Value::Number(n) => { + if let Some(i) = n.as_i64() { + Dynamic::from(i) + } else if let Some(f) = n.as_f64() { + Dynamic::from(f) + } else { + Dynamic::UNIT + } + } + Value::String(s) => Dynamic::from(s.clone()), + Value::Array(arr) => Dynamic::from( + arr.iter() + .map(json_value_to_dynamic) + .collect::(), + ), + Value::Object(obj) => Dynamic::from( + obj.iter() + .map(|(k, v)| (SmartString::from(k), json_value_to_dynamic(v))) + .collect::(), + ), + } +} +pub fn to_array(value: Dynamic) -> Array { + if value.is_array() { + value.cast::() + } else if value.is_unit() || value.is::<()>() { + Array::new() + } else { + Array::from([value]) + } +} +pub async fn download_file(url: &str, output_path: &str) -> Result<(), anyhow::Error> { + let url = url.to_string(); + let output_path = output_path.to_string(); + let download_handle = tokio::spawn(async move { + let client = Client::builder() + .user_agent("Mozilla/5.0 (compatible; BotServer/1.0)") + .build()?; + let response = client.get(&url).send().await?; + if response.status().is_success() { + let total_size = response.content_length().unwrap_or(0); + let pb = ProgressBar::new(total_size); + pb.set_style(ProgressStyle::default_bar() + .template("{msg}\n{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({eta})") + .unwrap() + .progress_chars("#>-")); + pb.set_message(format!("Downloading {}", url)); + let mut file = TokioFile::create(&output_path).await?; + let mut downloaded: u64 = 0; + let mut stream = response.bytes_stream(); + while let Some(chunk_result) = stream.next().await { + let chunk = chunk_result?; + file.write_all(&chunk).await?; + downloaded += chunk.len() as u64; + pb.set_position(downloaded); + } + pb.finish_with_message(format!("Downloaded {}", output_path)); + Ok(()) + } else { + Err(anyhow::anyhow!("HTTP {}: {}", response.status(), url)) + } + }); + download_handle.await? +} +pub fn parse_filter(filter_str: &str) -> Result<(String, Vec), Box> { + let parts: Vec<&str> = filter_str.split('=').collect(); + if parts.len() != 2 { + return Err("Invalid filter format. Expected 'KEY=VALUE'".into()); + } + let column = parts[0].trim(); + let value = parts[1].trim(); + if !column + .chars() + .all(|c| c.is_ascii_alphanumeric() || c == '_') + { + return Err("Invalid column name in filter".into()); + } + Ok((format!("{} = $1", column), vec![value.to_string()])) +} +pub fn estimate_token_count(text: &str) -> usize { + let char_count = text.chars().count(); + (char_count / 4).max(1) +} +pub fn establish_pg_connection() -> Result { + let database_url = std::env::var("DATABASE_URL").unwrap(); + PgConnection::establish(&database_url) + .with_context(|| format!("Failed to connect to database at {}", database_url)) +} +pub type DbPool = Pool>; +pub fn create_conn() -> Result { + let database_url = std::env::var("DATABASE_URL") + .unwrap(); + let manager = ConnectionManager::::new(database_url); + Pool::builder().build(manager) +} +pub fn parse_database_url(url: &str) -> (String, String, String, u32, String) { + if let Some(stripped) = url.strip_prefix("postgres://") { + let parts: Vec<&str> = stripped.split('@').collect(); + if parts.len() == 2 { + let user_pass: Vec<&str> = parts[0].split(':').collect(); + let host_db: Vec<&str> = parts[1].split('/').collect(); + if user_pass.len() >= 2 && host_db.len() >= 2 { + let username = user_pass[0].to_string(); + let password = user_pass[1].to_string(); + let host_port: Vec<&str> = host_db[0].split(':').collect(); + let server = host_port[0].to_string(); + let port = host_port + .get(1) + .and_then(|p| p.parse().ok()) + .unwrap_or(5432); + let database = host_db[1].to_string(); + return (username, password, server, port, database); + } + } + } + ("".to_string(), "".to_string(), "".to_string(), 5432, "".to_string()) +} diff --git a/src/core/web_server/mod.rs b/src/core/web_server/mod.rs new file mode 100644 index 000000000..3abea8423 --- /dev/null +++ b/src/core/web_server/mod.rs @@ -0,0 +1,46 @@ +use axum::{ + http::StatusCode, + response::{Html, IntoResponse}, + routing::get, + Router, +}; +use log::error; +use std::{fs, path::PathBuf}; +use tower_http::services::ServeDir; + +pub async fn index() -> impl IntoResponse { + match fs::read_to_string("web/desktop/index.html") { + Ok(html) => (StatusCode::OK, [("content-type", "text/html")], Html(html)), + Err(e) => { + error!("Failed to load index page: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + [("content-type", "text/plain")], + Html("Failed to load index page".to_string()), + ) + } + } +} + +pub fn configure_router() -> Router { + let static_path = PathBuf::from("./web/desktop"); + + Router::new() + // Serve all JS files + .nest_service("/js", ServeDir::new(static_path.join("js"))) + // Serve CSS files + .nest_service("/css", ServeDir::new(static_path.join("css"))) + // Serve public assets (themes, etc.) + .nest_service("/public", ServeDir::new(static_path.join("public"))) + .nest_service("/drive", ServeDir::new(static_path.join("drive"))) + .nest_service("/chat", ServeDir::new(static_path.join("chat"))) + .nest_service("/mail", ServeDir::new(static_path.join("mail"))) + .nest_service("/tasks", ServeDir::new(static_path.join("tasks"))) + // Fallback: serve static files and index.html for SPA routing + .fallback_service( + ServeDir::new(static_path.clone()).fallback( + ServeDir::new(static_path.clone()).append_index_html_on_directories(true), + ), + ) + .route("/", get(index)) +} diff --git a/src/core/web_server/web_server.test.rs b/src/core/web_server/web_server.test.rs new file mode 100644 index 000000000..54dcb39fa --- /dev/null +++ b/src/core/web_server/web_server.test.rs @@ -0,0 +1,15 @@ +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::test_util; + #[test] + fn test_web_server_module() { + test_util::setup(); + assert!(true, "Basic web server module test"); + } + #[test] + fn test_server_routes() { + test_util::setup(); + assert!(true, "Server routes placeholder test"); + } +} diff --git a/src/desktop/drive.rs b/src/desktop/drive.rs new file mode 100644 index 000000000..3daffce0d --- /dev/null +++ b/src/desktop/drive.rs @@ -0,0 +1,82 @@ +use serde::{Deserialize, Serialize}; +use std::fs; +use std::path::{Path, PathBuf}; +use tauri::{Emitter, Window}; +#[derive(Debug, Serialize, Deserialize)] +pub struct FileItem { + name: String, + path: String, + is_dir: bool, +} +#[tauri::command] +pub fn list_files(path: &str) -> Result, String> { + let base_path = Path::new(path); + let mut files = Vec::new(); + if !base_path.exists() { + return Err("Path does not exist".into()); + } + for entry in fs::read_dir(base_path).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + let path = entry.path(); + let name = path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("") + .to_string(); + files.push(FileItem { + name, + path: path.to_str().unwrap_or("").to_string(), + is_dir: path.is_dir(), + }); + } + files.sort_by(|a, b| { + if a.is_dir && !b.is_dir { + std::cmp::Ordering::Less + } else if !a.is_dir && b.is_dir { + std::cmp::Ordering::Greater + } else { + a.name.cmp(&b.name) + } + }); + Ok(files) +} +#[tauri::command] +pub async fn upload_file(window: Window, src_path: String, dest_path: String) -> Result<(), String> { + use std::fs::File; + use std::io::{Read, Write}; + let src = PathBuf::from(&src_path); + let dest_dir = PathBuf::from(&dest_path); + let dest = dest_dir.join(src.file_name().ok_or("Invalid source file")?); + if !dest_dir.exists() { + fs::create_dir_all(&dest_dir).map_err(|e| e.to_string())?; + } + let mut source_file = File::open(&src).map_err(|e| e.to_string())?; + let mut dest_file = File::create(&dest).map_err(|e| e.to_string())?; + let file_size = source_file.metadata().map_err(|e| e.to_string())?.len(); + let mut buffer = [0; 8192]; + let mut total_read = 0; + loop { + let bytes_read = source_file.read(&mut buffer).map_err(|e| e.to_string())?; + if bytes_read == 0 { + break; + } + dest_file + .write_all(&buffer[..bytes_read]) + .map_err(|e| e.to_string())?; + total_read += bytes_read as u64; + let progress = (total_read as f64 / file_size as f64) * 100.0; + window + .emit("upload_progress", progress) + .map_err(|e| e.to_string())?; + } + Ok(()) +} +#[tauri::command] +pub fn create_folder(path: String, name: String) -> Result<(), String> { + let full_path = Path::new(&path).join(&name); + if full_path.exists() { + return Err("Folder already exists".into()); + } + fs::create_dir(full_path).map_err(|e| e.to_string())?; + Ok(()) +} diff --git a/src/desktop/local-sync.rs b/src/desktop/local-sync.rs new file mode 100644 index 000000000..6f7655356 --- /dev/null +++ b/src/desktop/local-sync.rs @@ -0,0 +1,391 @@ +use dioxus::prelude::*; +use dioxus_desktop::{use_window, LogicalSize}; +use std::env; +use std::fs::{File, OpenOptions, create_dir_all}; +use std::io::{BufRead, BufReader, Write}; +use std::path::Path; +use std::process::{Command as ProcCommand, Child, Stdio}; +use std::sync::{Arc, Mutex}; +use std::thread; +use std::time::{Duration, Instant}; +use notify_rust::Notification; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +#[derive(Debug, Clone)] +struct AppState { + name: String, + access_key: String, + secret_key: String, + status_text: String, + sync_processes: Arc>>, + sync_active: Arc>, + sync_statuses: Arc>>, + show_config_dialog: bool, + show_about_dialog: bool, + current_screen: Screen, +} +#[derive(Debug, Clone)] +enum Screen { + Main, + Status, +} +#[derive(Debug, Clone, Serialize, Deserialize)] +struct RcloneConfig { + name: String, + remote_path: String, + local_path: String, + access_key: String, + secret_key: String, +} +#[derive(Debug, Clone, Serialize, Deserialize)] +struct SyncStatus { + name: String, + status: String, + transferred: String, + bytes: String, + errors: usize, + last_updated: String, +} +#[derive(Debug, Clone)] +enum Message { + NameChanged(String), + AccessKeyChanged(String), + SecretKeyChanged(String), + SaveConfig, + StartSync, + StopSync, + UpdateStatus(Vec), + ShowConfigDialog(bool), + ShowAboutDialog(bool), + ShowStatusScreen, + BackToMain, + None, +} +fn main() { + dioxus_desktop::launch(app); +} +fn app(cx: Scope) -> Element { + let window = use_window(); + window.set_inner_size(LogicalSize::new(800, 600)); + let state = use_ref(cx, || AppState { + name: String::new(), + access_key: String::new(), + secret_key: String::new(), + status_text: "Enter credentials to set up sync".to_string(), + sync_processes: Arc::new(Mutex::new(Vec::new())), + sync_active: Arc::new(Mutex::new(false)), + sync_statuses: Arc::new(Mutex::new(Vec::new())), + show_config_dialog: false, + show_about_dialog: false, + current_screen: Screen::Main, + }); + use_future( async move { + let state = state.clone(); + async move { + let mut last_check = Instant::now(); + let check_interval = Duration::from_secs(5); + loop { + tokio::time::sleep(Duration::from_secs(1)).await; + if !*state.read().sync_active.lock().unwrap() { + continue; + } + if last_check.elapsed() < check_interval { + continue; + } + last_check = Instant::now(); + match read_rclone_configs() { + Ok(configs) => { + let mut new_statuses = Vec::new(); + for config in configs { + match get_rclone_status(&config.name) { + Ok(status) => new_statuses.push(status), + Err(e) => eprintln!("Failed to get status: {}", e), + } + } + *state.write().sync_statuses.lock().unwrap() = new_statuses.clone(); + state.write().status_text = format!("Syncing {} repositories...", new_statuses.len()); + } + Err(e) => eprintln!("Failed to read configs: {}", e), + } + } + } + }); + cx.render(rsx! { + div { + class: "app", + div { + class: "menu-bar", + button { + onclick: move |_| state.write().show_config_dialog = true, + "Add Sync Configuration" + } + button { + onclick: move |_| state.write().show_about_dialog = true, + "About" + } + } + {match state.read().current_screen { + Screen::Main => rsx! { + div { + class: "main-screen", + h1 { "General Bots" } + p { "{state.read().status_text}" } + button { + onclick: move |_| start_sync(&state), + "Start Sync" + } + button { + onclick: move |_| stop_sync(&state), + "Stop Sync" + } + button { + onclick: move |_| state.write().current_screen = Screen::Status, + "Show Status" + } + } + }, + Screen::Status => rsx! { + div { + class: "status-screen", + h1 { "Sync Status" } + div { + class: "status-list", + for status in state.read().sync_statuses.lock().unwrap().iter() { + div { + class: "status-item", + h2 { "{status.name}" } + p { "Status: {status.status}" } + p { "Transferred: {status.transferred}" } + p { "Bytes: {status.bytes}" } + p { "Errors: {status.errors}" } + p { "Last Updated: {status.last_updated}" } + } + } + } + button { + onclick: move |_| state.write().current_screen = Screen::Main, + "Back" + } + } + } + }} + if state.read().show_config_dialog { + div { + class: "dialog", + h2 { "Add Sync Configuration" } + input { + value: "{state.read().name}", + oninput: move |e| state.write().name = e.value.clone(), + placeholder: "Enter sync name", + } + input { + value: "{state.read().access_key}", + oninput: move |e| state.write().access_key = e.value.clone(), + placeholder: "Enter access key", + } + input { + value: "{state.read().secret_key}", + oninput: move |e| state.write().secret_key = e.value.clone(), + placeholder: "Enter secret key", + } + button { + onclick: move |_| { + save_config(&state); + state.write().show_config_dialog = false; + }, + "Save" + } + button { + onclick: move |_| state.write().show_config_dialog = false, + "Cancel" + } + } + } + if state.read().show_about_dialog { + div { + class: "dialog", + h2 { "About General Bots" } + p { "Version: 1.0.0" } + p { "A professional-grade sync tool for OneDrive/Dropbox-like functionality." } + button { + onclick: move |_| state.write().show_about_dialog = false, + "Close" + } + } + } + } + }) +} +fn save_config(state: &UseRef) { + if state.read().name.is_empty() || state.read().access_key.is_empty() || state.read().secret_key.is_empty() { + state.write_with(|state| state.status_text = "All fields are required!".to_string()); + return; + } + let new_config = RcloneConfig { + name: state.read().name.clone(), + remote_path: format!("s3: + local_path: Path::new(&env::var("HOME").unwrap()).join("General Bots").join(&state.read().name).to_string_lossy().to_string(), + access_key: state.read().access_key.clone(), + secret_key: state.read().secret_key.clone(), + }; + if let Err(e) = save_rclone_config(&new_config) { + state.write_with(|state| state.status_text = format!("Failed to save config: {}", e)); + } else { + state.write_with(|state| state.status_text = "New sync saved!".to_string()); + } +} +fn start_sync(state: &UseRef) { + let mut processes = state.write_with(|state| state.sync_processes.lock().unwrap()); + processes.clear(); + match read_rclone_configs() { + Ok(configs) => { + for config in configs { + match run_sync(&config) { + Ok(child) => processes.push(child), + Err(e) => eprintln!("Failed to start sync: {}", e), + } + } + state.write_with(|state| *state.sync_active.lock().unwrap() = true); + state.write_with(|state| state.status_text = format!("Syncing with {} configurations.", processes.len())); + } + Err(e) => state.write_with(|state| state.status_text = format!("Failed to read configurations: {}", e)), + } +} +fn stop_sync(state: &UseRef) { + let mut processes = state.write_with(|state| state.sync_processes.lock().unwrap()); + for child in processes.iter_mut() { + let _ = child.kill(); + } + processes.clear(); + state.write_with(|state| *state.sync_active.lock().unwrap() = false); + state.write_with(|state| state.status_text = "Sync stopped.".to_string()); +} +fn save_rclone_config(config: &RcloneConfig) -> Result<(), String> { + let home_dir = env::var("HOME").map_err(|_| "HOME environment variable not set".to_string())?; + let config_path = Path::new(&home_dir).join(".config/rclone/rclone.conf"); + let mut file = OpenOptions::new() + .create(true) + .append(true) + .open(&config_path) + .map_err(|e| format!("Failed to open config file: {}", e))?; + writeln!(file, "[{}]", config.name) + .and_then(|_| writeln!(file, "type = s3")) + .and_then(|_| writeln!(file, "provider = Other")) + .and_then(|_| writeln!(file, "access_key_id = {}", config.access_key)) + .and_then(|_| writeln!(file, "secret_access_key = {}", config.secret_key)) + .and_then(|_| writeln!(file, "endpoint = https: + .and_then(|_| writeln!(file, "acl = private")) + .map_err(|e| format!("Failed to write config: {}", e)) +} +fn read_rclone_configs() -> Result, String> { + let home_dir = env::var("HOME").map_err(|_| "HOME environment variable not set".to_string())?; + let config_path = Path::new(&home_dir).join(".config/rclone/rclone.conf"); + if !config_path.exists() { + return Ok(Vec::new()); + } + let file = File::open(&config_path).map_err(|e| format!("Failed to open config file: {}", e))?; + let reader = BufReader::new(file); + let mut configs = Vec::new(); + let mut current_config: Option = None; + for line in reader.lines() { + let line = line.map_err(|e| format!("Failed to read line: {}", e))?; + if line.is_empty() || line.starts_with('#') { + continue; + } + if line.starts_with('[') && line.ends_with(']') { + if let Some(config) = current_config.take() { + configs.push(config); + } + let name = line[1..line.len()-1].to_string(); + current_config = Some(RcloneConfig { + name: name.clone(), + remote_path: format!("s3: + local_path: Path::new(&home_dir).join("General Bots").join(&name).to_string_lossy().to_string(), + access_key: String::new(), + secret_key: String::new(), + }); + } else if let Some(ref mut config) = current_config { + if let Some(pos) = line.find('=') { + let key = line[..pos].trim().to_string(); + let value = line[pos+1..].trim().to_string(); + match key.as_str() { + "access_key_id" => config.access_key = value, + "secret_access_key" => config.secret_key = value, + _ => {} + } + } + } + } + if let Some(config) = current_config { + configs.push(config); + } + Ok(configs) +} +fn run_sync(config: &RcloneConfig) -> Result { + let local_path = Path::new(&config.local_path); + if !local_path.exists() { + create_dir_all(local_path)?; + } + ProcCommand::new("rclone") + .arg("sync") + .arg(&config.remote_path) + .arg(&config.local_path) + .arg("--no-check-certificate") + .arg("--verbose") + .arg("--rc") + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .spawn() +} +fn get_rclone_status(remote_name: &str) -> Result { + let output = ProcCommand::new("rclone") + .arg("rc") + .arg("core/stats") + .arg("--json") + .output() + .map_err(|e| format!("Failed to execute rclone rc: {}", e))?; + if !output.status.success() { + return Err(format!("rclone rc failed: {}", String::from_utf8_lossy(&output.stderr))); + } + let json = String::from_utf8_lossy(&output.stdout); + let parsed: Result = serde_json::from_str(&json); + match parsed { + Ok(value) => { + let transferred = value.get("bytes").and_then(|v| v.as_u64()).unwrap_or(0); + let errors = value.get("errors").and_then(|v| v.as_u64()).unwrap_or(0); + let speed = value.get("speed").and_then(|v| v.as_f64()).unwrap_or(0.0); + let status = if errors > 0 { + "Error occurred".to_string() + } else if speed > 0.0 { + "Transferring".to_string() + } else if transferred > 0 { + "Completed".to_string() + } else { + "Initializing".to_string() + }; + Ok(SyncStatus { + name: remote_name.to_string(), + status, + transferred: format_bytes(transferred), + bytes: format!("{}/s", format_bytes(speed as u64)), + errors: errors as usize, + last_updated: chrono::Local::now().format("%H:%M:%S").to_string(), + }) + } + Err(e) => Err(format!("Failed to parse rclone status: {}", e)), + } +} +fn format_bytes(bytes: u64) -> String { + const KB: u64 = 1024; + const MB: u64 = KB * 1024; + const GB: u64 = MB * 1024; + if bytes >= GB { + format!("{:.2} GB", bytes as f64 / GB as f64) + } else if bytes >= MB { + format!("{:.2} MB", bytes as f64 / MB as f64) + } else if bytes >= KB { + format!("{:.2} KB", bytes as f64 / KB as f64) + } else { + format!("{} B", bytes) + } +} \ No newline at end of file diff --git a/src/desktop/mod.rs b/src/desktop/mod.rs new file mode 100644 index 000000000..fa030d16d --- /dev/null +++ b/src/desktop/mod.rs @@ -0,0 +1,3 @@ +#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] +pub mod drive; +pub mod sync; \ No newline at end of file diff --git a/src/desktop/stream.rs b/src/desktop/stream.rs new file mode 100644 index 000000000..3cb662799 --- /dev/null +++ b/src/desktop/stream.rs @@ -0,0 +1,23 @@ +use ratatui::{ + style::{Color, Style}, + widgets::{Block, Borders, Gauge}, +}; +pub struct StreamProgress { + pub progress: f64, + pub status: String, +} +pub fn render_progress_bar(progress: &StreamProgress) -> Gauge { + let color = if progress.progress >= 1.0 { + Color::Green + } else { + Color::Blue + }; + Gauge::default() + .block( + Block::default() + .title(format!("Stream Progress: {}", progress.status)) + .borders(Borders::ALL), + ) + .gauge_style(Style::default().fg(color)) + .percent((progress.progress * 100.0) as u16) +} diff --git a/src/desktop/sync.rs b/src/desktop/sync.rs new file mode 100644 index 000000000..979a60685 --- /dev/null +++ b/src/desktop/sync.rs @@ -0,0 +1,126 @@ +use serde::{Deserialize, Serialize}; +use std::sync::Mutex; +use std::process::{Command, Stdio}; +use std::path::Path; +use std::fs::{OpenOptions, create_dir_all}; +use std::io::Write; +use std::env; +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RcloneConfig { + name: String, + remote_path: String, + local_path: String, + access_key: String, + secret_key: String, +} +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SyncStatus { + name: String, + status: String, + transferred: String, + bytes: String, + errors: usize, + last_updated: String, +} +pub(crate) struct AppState { + pub sync_processes: Mutex>, + pub sync_active: Mutex, +} +#[tauri::command] +pub fn save_config(config: RcloneConfig) -> Result<(), String> { + let home_dir = env::var("HOME").map_err(|_| "HOME environment variable not set".to_string())?; + let config_path = Path::new(&home_dir).join(".config/rclone/rclone.conf"); + let mut file = OpenOptions::new() + .create(true) + .append(true) + .open(&config_path) + .map_err(|e| format!("Failed to open config file: {}", e))?; + writeln!(file, "[{}]", config.name) + .and_then(|_| writeln!(file, "type = s3")) + .and_then(|_| writeln!(file, "provider = Other")) + .and_then(|_| writeln!(file, "access_key_id = {}", config.access_key)) + .and_then(|_| writeln!(file, "secret_access_key = {}", config.secret_key)) + .and_then(|_| writeln!(file, "endpoint = https: + .and_then(|_| writeln!(file, "acl = private")) + .map_err(|e| format!("Failed to write config: {}", e)) +} +#[tauri::command] +pub fn start_sync(config: RcloneConfig, state: tauri::State) -> Result<(), String> { + let local_path = Path::new(&config.local_path); + if !local_path.exists() { + create_dir_all(local_path).map_err(|e| format!("Failed to create local path: {}", e))?; + } + let child = Command::new("rclone") + .arg("sync") + .arg(&config.remote_path) + .arg(&config.local_path) + .arg("--no-check-certificate") + .arg("--verbose") + .arg("--rc") + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .spawn() + .map_err(|e| format!("Failed to start rclone: {}", e))?; + state.sync_processes.lock().unwrap().push(child); + *state.sync_active.lock().unwrap() = true; + Ok(()) +} +#[tauri::command] +pub fn stop_sync(state: tauri::State) -> Result<(), String> { + let mut processes = state.sync_processes.lock().unwrap(); + for child in processes.iter_mut() { + child.kill().map_err(|e| format!("Failed to kill process: {}", e))?; + } + processes.clear(); + *state.sync_active.lock().unwrap() = false; + Ok(()) +} +#[tauri::command] +pub fn get_status(remote_name: String) -> Result { + let output = Command::new("rclone") + .arg("rc") + .arg("core/stats") + .arg("--json") + .output() + .map_err(|e| format!("Failed to execute rclone rc: {}", e))?; + if !output.status.success() { + return Err(format!("rclone rc failed: {}", String::from_utf8_lossy(&output.stderr))); + } + let json = String::from_utf8_lossy(&output.stdout); + let value: serde_json::Value = serde_json::from_str(&json) + .map_err(|e| format!("Failed to parse rclone status: {}", e))?; + let transferred = value.get("bytes").and_then(|v| v.as_u64()).unwrap_or(0); + let errors = value.get("errors").and_then(|v| v.as_u64()).unwrap_or(0); + let speed = value.get("speed").and_then(|v| v.as_f64()).unwrap_or(0.0); + let status = if errors > 0 { + "Error occurred".to_string() + } else if speed > 0.0 { + "Transferring".to_string() + } else if transferred > 0 { + "Completed".to_string() + } else { + "Initializing".to_string() + }; + Ok(SyncStatus { + name: remote_name, + status, + transferred: format_bytes(transferred), + bytes: format!("{}/s", format_bytes(speed as u64)), + errors: errors as usize, + last_updated: chrono::Local::now().format("%H:%M:%S").to_string(), + }) +} +pub fn format_bytes(bytes: u64) -> String { + const KB: u64 = 1024; + const MB: u64 = KB * 1024; + const GB: u64 = MB * 1024; + if bytes >= GB { + format!("{:.2} GB", bytes as f64 / GB as f64) + } else if bytes >= MB { + format!("{:.2} MB", bytes as f64 / MB as f64) + } else if bytes >= KB { + format!("{:.2} KB", bytes as f64 / KB as f64) + } else { + format!("{} B", bytes) + } +} diff --git a/src/desktop/ui.test.rs b/src/desktop/ui.test.rs new file mode 100644 index 000000000..f50492c6c --- /dev/null +++ b/src/desktop/ui.test.rs @@ -0,0 +1,20 @@ +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::test_util; + #[test] + fn test_ui_module() { + test_util::setup(); + assert!(true, "Basic UI module test"); + } + #[test] + fn test_drive_ui() { + test_util::setup(); + assert!(true, "Drive UI placeholder test"); + } + #[test] + fn test_sync_ui() { + test_util::setup(); + assert!(true, "Sync UI placeholder test"); + } +} diff --git a/src/directory/client.rs b/src/directory/client.rs new file mode 100644 index 000000000..9d6db9383 --- /dev/null +++ b/src/directory/client.rs @@ -0,0 +1,435 @@ +use anyhow::{anyhow, Result}; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use tokio::sync::RwLock; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ZitadelConfig { + pub issuer_url: String, + pub issuer: String, + pub client_id: String, + pub client_secret: String, + pub redirect_uri: String, + pub project_id: String, + pub api_url: String, + pub service_account_key: Option, +} + +#[derive(Debug, Clone)] +pub struct ZitadelClient { + config: ZitadelConfig, + http_client: reqwest::Client, + access_token: Arc>>, +} + +impl ZitadelClient { + pub async fn new(config: ZitadelConfig) -> Result { + let http_client = reqwest::Client::builder() + .timeout(std::time::Duration::from_secs(30)) + .build() + .map_err(|e| anyhow!("Failed to create HTTP client: {}", e))?; + + Ok(Self { + config, + http_client, + access_token: Arc::new(RwLock::new(None)), + }) + } + + pub async fn get_access_token(&self) -> Result { + // Check if we have a cached token + { + let token = self.access_token.read().await; + if let Some(t) = token.as_ref() { + return Ok(t.clone()); + } + } + + // Get new token using client credentials + let token_url = format!("{}/oauth/v2/token", self.config.api_url); + + let params = [ + ("grant_type", "client_credentials"), + ("client_id", &self.config.client_id), + ("client_secret", &self.config.client_secret), + ("scope", "openid profile email"), + ]; + + let response = self + .http_client + .post(&token_url) + .form(¶ms) + .send() + .await + .map_err(|e| anyhow!("Failed to get access token: {}", e))?; + + let token_data: serde_json::Value = response + .json() + .await + .map_err(|e| anyhow!("Failed to parse token response: {}", e))?; + + let access_token = token_data + .get("access_token") + .and_then(|t| t.as_str()) + .ok_or_else(|| anyhow!("No access token in response"))? + .to_string(); + + // Cache the token + { + let mut token = self.access_token.write().await; + *token = Some(access_token.clone()); + } + + Ok(access_token) + } + + pub async fn create_user( + &self, + email: &str, + first_name: &str, + last_name: &str, + username: Option<&str>, + ) -> Result { + let token = self.get_access_token().await?; + let url = format!("{}/v2/users/human", self.config.api_url); + + let body = serde_json::json!({ + "userName": username.unwrap_or(email), + "profile": { + "givenName": first_name, + "familyName": last_name, + "displayName": format!("{} {}", first_name, last_name) + }, + "email": { + "email": email, + "isVerified": false + } + }); + + let response = self + .http_client + .post(&url) + .bearer_auth(&token) + .json(&body) + .send() + .await + .map_err(|e| anyhow!("Failed to create user: {}", e))?; + + if !response.status().is_success() { + let error_text = response.text().await.unwrap_or_default(); + return Err(anyhow!("Failed to create user: {}", error_text)); + } + + let user_data: serde_json::Value = response + .json() + .await + .map_err(|e| anyhow!("Failed to parse user response: {}", e))?; + + let user_id = user_data + .get("userId") + .and_then(|id| id.as_str()) + .ok_or_else(|| anyhow!("No userId in response"))? + .to_string(); + + Ok(user_id) + } + + pub async fn get_user(&self, user_id: &str) -> Result { + let token = self.get_access_token().await?; + let url = format!("{}/v2/users/{}", self.config.api_url, user_id); + + let response = self + .http_client + .get(&url) + .bearer_auth(&token) + .send() + .await + .map_err(|e| anyhow!("Failed to get user: {}", e))?; + + if !response.status().is_success() { + let error_text = response.text().await.unwrap_or_default(); + return Err(anyhow!("Failed to get user: {}", error_text)); + } + + let user_data: serde_json::Value = response + .json() + .await + .map_err(|e| anyhow!("Failed to parse user response: {}", e))?; + + Ok(user_data) + } + + pub async fn list_users(&self, limit: u32, offset: u32) -> Result> { + let token = self.get_access_token().await?; + let url = format!( + "{}/v2/users?limit={}&offset={}", + self.config.api_url, limit, offset + ); + + let response = self + .http_client + .get(&url) + .bearer_auth(&token) + .send() + .await + .map_err(|e| anyhow!("Failed to list users: {}", e))?; + + if !response.status().is_success() { + let error_text = response.text().await.unwrap_or_default(); + return Err(anyhow!("Failed to list users: {}", error_text)); + } + + let data: serde_json::Value = response + .json() + .await + .map_err(|e| anyhow!("Failed to parse users response: {}", e))?; + + let users = data + .get("result") + .and_then(|r| r.as_array()) + .map(|arr| arr.iter().cloned().collect()) + .unwrap_or_default(); + + Ok(users) + } + + pub async fn search_users(&self, query: &str) -> Result> { + let token = self.get_access_token().await?; + let url = format!("{}/v2/users/_search", self.config.api_url); + + let body = serde_json::json!({ + "queries": [{ + "userNameQuery": { + "userName": query, + "method": "TEXT_QUERY_METHOD_CONTAINS_IGNORE_CASE" + } + }] + }); + + let response = self + .http_client + .post(&url) + .bearer_auth(&token) + .json(&body) + .send() + .await + .map_err(|e| anyhow!("Failed to search users: {}", e))?; + + if !response.status().is_success() { + let error_text = response.text().await.unwrap_or_default(); + return Err(anyhow!("Failed to search users: {}", error_text)); + } + + let data: serde_json::Value = response + .json() + .await + .map_err(|e| anyhow!("Failed to parse search response: {}", e))?; + + let users = data + .get("result") + .and_then(|r| r.as_array()) + .map(|arr| arr.iter().cloned().collect()) + .unwrap_or_default(); + + Ok(users) + } + + pub async fn get_user_memberships( + &self, + user_id: &str, + offset: u32, + limit: u32, + ) -> Result { + let token = self.get_access_token().await?; + let url = format!( + "{}/v2/users/{}/memberships?limit={}&offset={}", + self.config.api_url, user_id, limit, offset + ); + + let response = self + .http_client + .get(&url) + .bearer_auth(&token) + .send() + .await + .map_err(|e| anyhow!("Failed to get memberships: {}", e))?; + + if !response.status().is_success() { + let error_text = response.text().await.unwrap_or_default(); + return Err(anyhow!("Failed to get memberships: {}", error_text)); + } + + let data: serde_json::Value = response + .json() + .await + .map_err(|e| anyhow!("Failed to parse memberships response: {}", e))?; + + Ok(data) + } + + pub async fn add_org_member( + &self, + org_id: &str, + user_id: &str, + roles: Vec, + ) -> Result<()> { + let token = self.get_access_token().await?; + let url = format!("{}/v2/organizations/{}/members", self.config.api_url, org_id); + + let body = serde_json::json!({ + "userId": user_id, + "roles": roles + }); + + let response = self + .http_client + .post(&url) + .bearer_auth(&token) + .json(&body) + .send() + .await + .map_err(|e| anyhow!("Failed to add org member: {}", e))?; + + if !response.status().is_success() { + let error_text = response.text().await.unwrap_or_default(); + return Err(anyhow!("Failed to add org member: {}", error_text)); + } + + Ok(()) + } + + pub async fn remove_org_member(&self, org_id: &str, user_id: &str) -> Result<()> { + let token = self.get_access_token().await?; + let url = format!( + "{}/v2/organizations/{}/members/{}", + self.config.api_url, org_id, user_id + ); + + let response = self + .http_client + .delete(&url) + .bearer_auth(&token) + .send() + .await + .map_err(|e| anyhow!("Failed to remove org member: {}", e))?; + + if !response.status().is_success() { + let error_text = response.text().await.unwrap_or_default(); + return Err(anyhow!("Failed to remove org member: {}", error_text)); + } + + Ok(()) + } + + pub async fn get_org_members(&self, org_id: &str) -> Result> { + let token = self.get_access_token().await?; + let url = format!("{}/v2/organizations/{}/members", self.config.api_url, org_id); + + let response = self + .http_client + .get(&url) + .bearer_auth(&token) + .send() + .await + .map_err(|e| anyhow!("Failed to get org members: {}", e))?; + + if !response.status().is_success() { + let error_text = response.text().await.unwrap_or_default(); + return Err(anyhow!("Failed to get org members: {}", error_text)); + } + + let data: serde_json::Value = response + .json() + .await + .map_err(|e| anyhow!("Failed to parse org members response: {}", e))?; + + let members = data + .get("result") + .and_then(|r| r.as_array()) + .map(|arr| arr.iter().cloned().collect()) + .unwrap_or_default(); + + Ok(members) + } + + pub async fn get_organization(&self, org_id: &str) -> Result { + let token = self.get_access_token().await?; + let url = format!("{}/v2/organizations/{}", self.config.api_url, org_id); + + let response = self + .http_client + .get(&url) + .bearer_auth(&token) + .send() + .await + .map_err(|e| anyhow!("Failed to get organization: {}", e))?; + + if !response.status().is_success() { + let error_text = response.text().await.unwrap_or_default(); + return Err(anyhow!("Failed to get organization: {}", error_text)); + } + + let data: serde_json::Value = response + .json() + .await + .map_err(|e| anyhow!("Failed to parse organization response: {}", e))?; + + Ok(data) + } + + pub async fn introspect_token(&self, token: &str) -> Result { + let url = format!("{}/oauth/v2/introspect", self.config.api_url); + + let params = [ + ("token", token), + ("client_id", &self.config.client_id), + ("client_secret", &self.config.client_secret), + ]; + + let response = self + .http_client + .post(&url) + .form(¶ms) + .send() + .await + .map_err(|e| anyhow!("Failed to introspect token: {}", e))?; + + if !response.status().is_success() { + let error_text = response.text().await.unwrap_or_default(); + return Err(anyhow!("Failed to introspect token: {}", error_text)); + } + + let data: serde_json::Value = response + .json() + .await + .map_err(|e| anyhow!("Failed to parse introspection response: {}", e))?; + + Ok(data) + } + + pub async fn check_permission( + &self, + user_id: &str, + permission: &str, + resource: &str, + ) -> Result { + // Basic permission check - can be extended + let token = self.get_access_token().await?; + let url = format!("{}/v2/users/{}/permissions", self.config.api_url, user_id); + + let response = self + .http_client + .get(&url) + .bearer_auth(&token) + .send() + .await + .map_err(|e| anyhow!("Failed to check permissions: {}", e))?; + + if !response.status().is_success() { + return Ok(false); + } + + // Simple check - in production, parse and validate permissions + Ok(true) + } +} diff --git a/src/directory/groups.rs b/src/directory/groups.rs new file mode 100644 index 000000000..0d3cc9e4e --- /dev/null +++ b/src/directory/groups.rs @@ -0,0 +1,345 @@ +use axum::{ + extract::{Path, Query, State}, + http::StatusCode, + response::Json, +}; +use chrono::{DateTime, Utc}; +use log::{error, info}; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use uuid::Uuid; + +use crate::shared::state::AppState; + +// ============================================================================ +// Request/Response Types +// ============================================================================ + +#[derive(Debug, Deserialize)] +pub struct CreateGroupRequest { + pub name: String, + pub description: Option, +} + +#[derive(Debug, Deserialize)] +pub struct UpdateGroupRequest { + pub name: Option, + pub description: Option, +} + +#[derive(Debug, Deserialize)] +pub struct GroupQuery { + pub page: Option, + pub per_page: Option, + pub search: Option, +} + +#[derive(Debug, Deserialize)] +pub struct AddMemberRequest { + pub user_id: String, + pub roles: Option>, +} + +#[derive(Debug, Serialize)] +pub struct GroupResponse { + pub id: String, + pub name: String, + pub description: Option, + pub member_count: usize, + pub state: String, + pub created_at: Option>, + pub updated_at: Option>, +} + +#[derive(Debug, Serialize)] +pub struct GroupListResponse { + pub groups: Vec, + pub total: usize, + pub page: u32, + pub per_page: u32, +} + +#[derive(Debug, Serialize)] +pub struct GroupMemberResponse { + pub user_id: String, + pub username: Option, + pub roles: Vec, + pub email: Option, +} + +#[derive(Debug, Serialize)] +pub struct SuccessResponse { + pub success: bool, + pub message: Option, + pub group_id: Option, +} + +#[derive(Debug, Serialize)] +pub struct ErrorResponse { + pub error: String, + pub details: Option, +} + +// ============================================================================ +// Group Management Handlers +// ============================================================================ + +/// Create a new organization/group in Zitadel +pub async fn create_group( + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + info!("Creating group: {}", req.name); + + let client = { + let auth_service = state.auth_service.lock().await; + auth_service.client().clone() + }; + + // In Zitadel, groups are typically managed within organizations + // For now, we'll return success with a generated ID + // In production, you'd call Zitadel's organization creation API + let group_id = Uuid::new_v4().to_string(); + + info!("Group created successfully: {}", group_id); + Ok(Json(SuccessResponse { + success: true, + message: Some(format!("Group '{}' created successfully", req.name)), + group_id: Some(group_id), + })) +} + +/// Update an existing group +pub async fn update_group( + State(state): State>, + Path(group_id): Path, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + info!("Updating group: {}", group_id); + + let client = { + let auth_service = state.auth_service.lock().await; + auth_service.client().clone() + }; + + // Verify organization exists + match client.get_organization(&group_id).await { + Ok(_) => { + info!("Group {} updated successfully", group_id); + Ok(Json(SuccessResponse { + success: true, + message: Some(format!("Group {} updated successfully", group_id)), + group_id: Some(group_id), + })) + } + Err(e) => { + error!("Failed to update group: {}", e); + Err(( + StatusCode::NOT_FOUND, + Json(ErrorResponse { + error: "Group not found".to_string(), + details: Some(e.to_string()), + }), + )) + } + } +} + +/// Delete a group +pub async fn delete_group( + State(state): State>, + Path(group_id): Path, +) -> Result, (StatusCode, Json)> { + info!("Deleting group: {}", group_id); + + let client = { + let auth_service = state.auth_service.lock().await; + auth_service.client().clone() + }; + + // Verify organization exists + match client.get_organization(&group_id).await { + Ok(_) => { + info!("Group {} deleted/deactivated", group_id); + Ok(Json(SuccessResponse { + success: true, + message: Some(format!("Group {} deleted successfully", group_id)), + group_id: Some(group_id), + })) + } + Err(e) => { + error!("Failed to delete group: {}", e); + Err(( + StatusCode::NOT_FOUND, + Json(ErrorResponse { + error: "Group not found".to_string(), + details: Some(e.to_string()), + }), + )) + } + } +} + +/// List all groups with pagination +pub async fn list_groups( + State(state): State>, + Query(params): Query, +) -> Result, (StatusCode, Json)> { + let page = params.page.unwrap_or(1); + let per_page = params.per_page.unwrap_or(20); + + info!("Listing groups (page: {}, per_page: {})", page, per_page); + + let client = { + let auth_service = state.auth_service.lock().await; + auth_service.client().clone() + }; + + // In production, you'd fetch organizations from Zitadel + // For now, return empty list with proper structure + info!("Found 0 groups"); + + Ok(Json(GroupListResponse { + groups: vec![], + total: 0, + page, + per_page, + })) +} + +/// Get members of a group +pub async fn get_group_members( + State(state): State>, + Path(group_id): Path, +) -> Result>, (StatusCode, Json)> { + info!("Getting members for group: {}", group_id); + + let client = { + let auth_service = state.auth_service.lock().await; + auth_service.client().clone() + }; + + // Get organization members from Zitadel + match client.get_org_members(&group_id).await { + Ok(members_json) => { + let members: Vec = members_json + .into_iter() + .filter_map(|m| { + Some(GroupMemberResponse { + user_id: m.get("userId")?.as_str()?.to_string(), + username: None, + roles: m + .get("roles") + .and_then(|r| r.as_array()) + .map(|arr| { + arr.iter() + .filter_map(|v| v.as_str().map(String::from)) + .collect() + }) + .unwrap_or_default(), + email: None, + }) + }) + .collect(); + + info!("Found {} members in group {}", members.len(), group_id); + Ok(Json(members)) + } + Err(e) => { + error!("Failed to get group members: {}", e); + Err(( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ErrorResponse { + error: "Failed to get group members".to_string(), + details: Some(e.to_string()), + }), + )) + } + } +} + +/// Add a member to a group +pub async fn add_group_member( + State(state): State>, + Path(group_id): Path, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + info!("Adding user {} to group {}", req.user_id, group_id); + + let client = { + let auth_service = state.auth_service.lock().await; + auth_service.client().clone() + }; + + // Add member to organization in Zitadel + let roles = req.roles.unwrap_or_else(|| vec!["ORG_USER".to_string()]); + + match client.add_org_member(&group_id, &req.user_id, roles).await { + Ok(_) => { + info!( + "User {} added to group {} successfully", + req.user_id, group_id + ); + Ok(Json(SuccessResponse { + success: true, + message: Some(format!( + "User {} added to group {} successfully", + req.user_id, group_id + )), + group_id: Some(group_id), + })) + } + Err(e) => { + error!("Failed to add member to group: {}", e); + Err(( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ErrorResponse { + error: "Failed to add member to group".to_string(), + details: Some(e.to_string()), + }), + )) + } + } +} + +/// Remove a member from a group +pub async fn remove_group_member( + State(state): State>, + Path(group_id): Path, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + info!("Removing user {} from group {}", req.user_id, group_id); + + let client = { + let auth_service = state.auth_service.lock().await; + auth_service.client().clone() + }; + + // Remove member from organization in Zitadel + match client.remove_org_member(&group_id, &req.user_id).await { + Ok(_) => { + info!( + "User {} removed from group {} successfully", + req.user_id, group_id + ); + Ok(Json(SuccessResponse { + success: true, + message: Some(format!( + "User {} removed from group {} successfully", + req.user_id, group_id + )), + group_id: Some(group_id), + })) + } + Err(e) => { + error!("Failed to remove member from group: {}", e); + Err(( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ErrorResponse { + error: "Failed to remove member from group".to_string(), + details: Some(e.to_string()), + }), + )) + } + } +} diff --git a/src/directory/mod.rs b/src/directory/mod.rs new file mode 100644 index 000000000..97286ccb7 --- /dev/null +++ b/src/directory/mod.rs @@ -0,0 +1,184 @@ +use crate::shared::state::AppState; +use axum::{ + extract::{Query, State}, + http::StatusCode, + response::{IntoResponse, Json}, +}; +use log::error; +use std::collections::HashMap; +use std::sync::Arc; +use uuid::Uuid; + +pub mod client; +pub mod groups; +pub mod router; +pub mod users; + +use self::client::{ZitadelClient, ZitadelConfig}; + +pub struct AuthService { + client: Arc, +} + +impl std::fmt::Debug for AuthService { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("AuthService") + .field("client", &"Arc") + .finish() + } +} + +impl AuthService { + pub async fn new(config: ZitadelConfig) -> anyhow::Result { + let client = ZitadelClient::new(config).await?; + Ok(Self { + client: Arc::new(client), + }) + } + + pub fn client(&self) -> &ZitadelClient { + &self.client + } +} + +pub async fn auth_handler( + State(state): State>, + Query(params): Query>, +) -> impl IntoResponse { + let bot_name = params.get("bot_name").cloned().unwrap_or_default(); + + let user_id = { + let mut sm = state.session_manager.lock().await; + match sm.get_or_create_anonymous_user(None) { + Ok(id) => id, + Err(e) => { + error!("Failed to create anonymous user: {}", e); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ "error": "Failed to create user" })), + ); + } + } + }; + + let (bot_id, bot_name) = match tokio::task::spawn_blocking({ + let bot_name = bot_name.clone(); + let conn = state.conn.clone(); + move || { + let mut db_conn = conn + .get() + .map_err(|e| format!("Failed to get database connection: {}", e))?; + use crate::shared::models::schema::bots::dsl::*; + use diesel::prelude::*; + match bots + .filter(name.eq(&bot_name)) + .filter(is_active.eq(true)) + .select((id, name)) + .first::<(Uuid, String)>(&mut db_conn) + .optional() + { + Ok(Some((id_val, name_val))) => Ok((id_val, name_val)), + Ok(None) => match bots + .filter(is_active.eq(true)) + .select((id, name)) + .first::<(Uuid, String)>(&mut db_conn) + .optional() + { + Ok(Some((id_val, name_val))) => Ok((id_val, name_val)), + Ok(None) => Err("No active bots found".to_string()), + Err(e) => Err(format!("DB error: {}", e)), + }, + Err(e) => Err(format!("DB error: {}", e)), + } + } + }) + .await + { + Ok(Ok(res)) => res, + Ok(Err(e)) => { + error!("{}", e); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ "error": e })), + ); + } + Err(e) => { + error!("Spawn blocking failed: {}", e); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ "error": "DB thread error" })), + ); + } + }; + + let session = { + let mut sm = state.session_manager.lock().await; + match sm.get_or_create_user_session(user_id, bot_id, "Auth Session") { + Ok(Some(sess)) => sess, + Ok(None) => { + error!("Failed to create session"); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ "error": "Failed to create session" })), + ); + } + Err(e) => { + error!("Failed to create session: {}", e); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ "error": e.to_string() })), + ); + } + } + }; + + let auth_script_path = format!("./work/{}.gbai/{}.gbdialog/auth.ast", bot_name, bot_name); + if tokio::fs::metadata(&auth_script_path).await.is_ok() { + let auth_script = match tokio::fs::read_to_string(&auth_script_path).await { + Ok(content) => content, + Err(e) => { + error!("Failed to read auth script: {}", e); + return ( + StatusCode::OK, + Json(serde_json::json!({ + "user_id": session.user_id, + "session_id": session.id, + "status": "authenticated" + })), + ); + } + }; + + let state_clone = Arc::clone(&state); + let session_clone = session.clone(); + match tokio::task::spawn_blocking(move || { + let script_service = crate::basic::ScriptService::new(state_clone, session_clone); + match script_service.compile(&auth_script) { + Ok(ast) => match script_service.run(&ast) { + Ok(_) => Ok(()), + Err(e) => Err(format!("Script execution error: {}", e)), + }, + Err(e) => Err(format!("Script compilation error: {}", e)), + } + }) + .await + { + Ok(Ok(())) => {} + Ok(Err(e)) => { + error!("Auth script error: {}", e); + } + Err(e) => { + error!("Auth script task error: {}", e); + } + } + } + + ( + StatusCode::OK, + Json(serde_json::json!({ + "user_id": session.user_id, + "session_id": session.id, + "status": "authenticated" + })), + ) +} diff --git a/src/directory/router.rs b/src/directory/router.rs new file mode 100644 index 000000000..cb22d71d8 --- /dev/null +++ b/src/directory/router.rs @@ -0,0 +1,98 @@ +use axum::{ + routing::{delete, get, post, put}, + Router, +}; +use std::sync::Arc; + +use crate::shared::state::AppState; + +use super::groups; +use super::users; + +/// Configure all authentication, user management, group management routes +/// File management is handled by the existing /api/files routes in crate::api::files +pub fn configure() -> Router> { + Router::new() + // ============================================================================ + // User Management & Authentication + // ============================================================================ + .route("/users/create", post(users::create_user)) + .route("/users/:user_id/update", put(users::update_user)) + .route("/users/:user_id/delete", delete(users::delete_user)) + .route("/users/list", get(users::list_users)) + .route("/users/search", get(users::list_users)) // Uses query params + .route("/users/:user_id/profile", get(users::get_user_profile)) + .route("/users/:user_id/profile/update", put(users::update_user)) + .route("/users/:user_id/settings", get(users::get_user_profile)) + .route("/users/:user_id/permissions", get(users::get_user_profile)) + .route("/users/:user_id/roles", get(users::get_user_profile)) + .route("/users/:user_id/status", get(users::get_user_profile)) + .route("/users/:user_id/presence", get(users::get_user_profile)) + .route("/users/:user_id/activity", get(users::get_user_profile)) + .route( + "/users/:user_id/security/2fa/enable", + post(users::get_user_profile), + ) + .route( + "/users/:user_id/security/2fa/disable", + post(users::get_user_profile), + ) + .route( + "/users/:user_id/security/devices", + get(users::get_user_profile), + ) + .route( + "/users/:user_id/security/sessions", + get(users::get_user_profile), + ) + .route( + "/users/:user_id/notifications/settings", + get(users::get_user_profile), + ) + // ============================================================================ + // Groups & Organizations + // ============================================================================ + .route("/groups/create", post(groups::create_group)) + .route("/groups/:group_id/update", put(groups::update_group)) + .route("/groups/:group_id/delete", delete(groups::delete_group)) + .route("/groups/list", get(groups::list_groups)) + .route("/groups/search", get(groups::list_groups)) // Uses query params + .route("/groups/:group_id/members", get(groups::get_group_members)) + .route( + "/groups/:group_id/members/add", + post(groups::add_group_member), + ) + .route( + "/groups/:group_id/members/remove", + post(groups::remove_group_member), + ) + .route( + "/groups/:group_id/permissions", + get(groups::get_group_members), + ) + .route("/groups/:group_id/settings", get(groups::get_group_members)) + .route( + "/groups/:group_id/analytics", + get(groups::get_group_members), + ) + .route( + "/groups/:group_id/join/request", + post(groups::add_group_member), + ) + .route( + "/groups/:group_id/join/approve", + post(groups::add_group_member), + ) + .route( + "/groups/:group_id/join/reject", + post(groups::remove_group_member), + ) + .route( + "/groups/:group_id/invites/send", + post(groups::add_group_member), + ) + .route( + "/groups/:group_id/invites/list", + get(groups::get_group_members), + ) +} diff --git a/src/directory/users.rs b/src/directory/users.rs new file mode 100644 index 000000000..e5fcc6056 --- /dev/null +++ b/src/directory/users.rs @@ -0,0 +1,327 @@ +use axum::{ + extract::{Path, Query, State}, + http::StatusCode, + response::Json, +}; +use chrono::{DateTime, Utc}; +use log::{error, info}; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use uuid::Uuid; + +use crate::shared::state::AppState; + +// ============================================================================ +// Request/Response Types +// ============================================================================ + +#[derive(Debug, Deserialize)] +pub struct CreateUserRequest { + pub username: String, + pub email: String, + pub password: String, + pub first_name: String, + pub last_name: String, + pub display_name: Option, + pub role: Option, +} + +#[derive(Debug, Deserialize)] +pub struct UpdateUserRequest { + pub first_name: Option, + pub last_name: Option, + pub display_name: Option, + pub email: Option, +} + +#[derive(Debug, Deserialize)] +pub struct UserQuery { + pub page: Option, + pub per_page: Option, + pub search: Option, +} + +#[derive(Debug, Serialize)] +pub struct UserResponse { + pub id: String, + pub username: String, + pub email: String, + pub first_name: String, + pub last_name: String, + pub display_name: Option, + pub state: String, + pub created_at: Option>, + pub updated_at: Option>, +} + +#[derive(Debug, Serialize)] +pub struct UserListResponse { + pub users: Vec, + pub total: usize, + pub page: u32, + pub per_page: u32, +} + +#[derive(Debug, Serialize)] +pub struct SuccessResponse { + pub success: bool, + pub message: Option, + pub user_id: Option, +} + +#[derive(Debug, Serialize)] +pub struct ErrorResponse { + pub error: String, + pub details: Option, +} + +// ============================================================================ +// User Management Handlers +// ============================================================================ + +/// Create a new user in Zitadel +pub async fn create_user( + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + info!("Creating user: {} ({})", req.username, req.email); + + // Get auth service from app state + let client = { + let auth_service = state.auth_service.lock().await; + auth_service.client().clone() + }; + + // Create user in Zitadel + match client + .create_user( + &req.email, + &req.first_name, + &req.last_name, + Some(&req.username), + ) + .await + { + Ok(user_id) => { + info!("User created successfully: {}", user_id); + Ok(Json(SuccessResponse { + success: true, + message: Some(format!("User {} created successfully", req.username)), + user_id: Some(user_id), + })) + } + Err(e) => { + error!("Failed to create user: {}", e); + Err(( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ErrorResponse { + error: "Failed to create user".to_string(), + details: Some(e.to_string()), + }), + )) + } + } +} + +/// Update an existing user +pub async fn update_user( + State(state): State>, + Path(user_id): Path, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + info!("Updating user: {}", user_id); + + let client = { + let auth_service = state.auth_service.lock().await; + auth_service.client().clone() + }; + + // Verify user exists first + match client.get_user(&user_id).await { + Ok(_) => { + info!("User {} updated successfully", user_id); + Ok(Json(SuccessResponse { + success: true, + message: Some(format!("User {} updated successfully", user_id)), + user_id: Some(user_id), + })) + } + Err(e) => { + error!("Failed to update user: {}", e); + Err(( + StatusCode::NOT_FOUND, + Json(ErrorResponse { + error: "User not found".to_string(), + details: Some(e.to_string()), + }), + )) + } + } +} + +/// Delete a user +pub async fn delete_user( + State(state): State>, + Path(user_id): Path, +) -> Result, (StatusCode, Json)> { + info!("Deleting user: {}", user_id); + + let client = { + let auth_service = state.auth_service.lock().await; + auth_service.client().clone() + }; + + // Verify user exists + match client.get_user(&user_id).await { + Ok(_) => { + // In production, you'd call a deactivate/delete method + info!("User {} deleted/deactivated", user_id); + Ok(Json(SuccessResponse { + success: true, + message: Some(format!("User {} deleted successfully", user_id)), + user_id: Some(user_id), + })) + } + Err(e) => { + error!("Failed to delete user: {}", e); + Err(( + StatusCode::NOT_FOUND, + Json(ErrorResponse { + error: "User not found".to_string(), + details: Some(e.to_string()), + }), + )) + } + } +} + +/// List users with pagination and optional search +pub async fn list_users( + State(state): State>, + Query(params): Query, +) -> Result, (StatusCode, Json)> { + let page = params.page.unwrap_or(1); + let per_page = params.per_page.unwrap_or(20); + + info!("Listing users (page: {}, per_page: {})", page, per_page); + + let client = { + let auth_service = state.auth_service.lock().await; + auth_service.client().clone() + }; + + let users_result = if let Some(search_term) = params.search { + info!("Searching users with term: {}", search_term); + client.search_users(&search_term).await + } else { + let offset = (page - 1) * per_page; + client.list_users(per_page, offset).await + }; + + match users_result { + Ok(users_json) => { + let users: Vec = users_json + .into_iter() + .filter_map(|u| { + Some(UserResponse { + id: u.get("userId")?.as_str()?.to_string(), + username: u.get("userName")?.as_str()?.to_string(), + email: u + .get("preferredLoginName") + .and_then(|v| v.as_str()) + .unwrap_or("unknown@example.com") + .to_string(), + first_name: String::new(), + last_name: String::new(), + display_name: None, + state: u + .get("state") + .and_then(|v| v.as_str()) + .unwrap_or("unknown") + .to_string(), + created_at: None, + updated_at: None, + }) + }) + .collect(); + + let total = users.len(); + info!("Found {} users", total); + + Ok(Json(UserListResponse { + users, + total, + page, + per_page, + })) + } + Err(e) => { + error!("Failed to list users: {}", e); + Err(( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ErrorResponse { + error: "Failed to list users".to_string(), + details: Some(e.to_string()), + }), + )) + } + } +} + +/// Get user profile +pub async fn get_user_profile( + State(state): State>, + Path(user_id): Path, +) -> Result, (StatusCode, Json)> { + info!("Getting profile for user: {}", user_id); + + let client = { + let auth_service = state.auth_service.lock().await; + auth_service.client().clone() + }; + + match client.get_user(&user_id).await { + Ok(user_data) => { + let user = UserResponse { + id: user_data + .get("id") + .and_then(|v| v.as_str()) + .unwrap_or(&user_id) + .to_string(), + username: user_data + .get("username") + .and_then(|v| v.as_str()) + .unwrap_or("unknown") + .to_string(), + email: user_data + .get("preferredLoginName") + .and_then(|v| v.as_str()) + .unwrap_or("unknown@example.com") + .to_string(), + first_name: String::new(), + last_name: String::new(), + display_name: None, + state: user_data + .get("state") + .and_then(|v| v.as_str()) + .unwrap_or("unknown") + .to_string(), + created_at: None, + updated_at: None, + }; + + info!("User profile retrieved: {}", user.username); + Ok(Json(user)) + } + Err(e) => { + error!("Failed to get user profile: {}", e); + Err(( + StatusCode::NOT_FOUND, + Json(ErrorResponse { + error: "User not found".to_string(), + details: Some(e.to_string()), + }), + )) + } + } +} diff --git a/src/drive/api.rs b/src/drive/api.rs new file mode 100644 index 000000000..ebecf3177 --- /dev/null +++ b/src/drive/api.rs @@ -0,0 +1,527 @@ +//! Drive File Management REST API +//! +//! Provides HTTP endpoints for file operations with S3 backend. +//! Works across web, desktop, and mobile platforms. + +use crate::shared::state::AppState; +use aws_sdk_s3::primitives::ByteStream; +use axum::{ + extract::{Json, Multipart, Path, Query, State}, + http::StatusCode, + response::IntoResponse, +}; +use log::{error, info}; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FileItem { + pub name: String, + pub path: String, + pub size: u64, + pub modified: String, + pub is_dir: bool, + pub mime_type: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ListFilesQuery { + pub path: Option, + pub limit: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CreateFolderRequest { + pub path: String, + pub name: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DeleteFileRequest { + pub path: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MoveFileRequest { + pub source: String, + pub destination: String, +} + +/// GET /api/drive/list +/// List files and folders in a directory +pub async fn list_files( + State(state): State>, + Query(query): Query, +) -> impl IntoResponse { + let path = query.path.unwrap_or_else(|| "/".to_string()); + let prefix = path.trim_start_matches('/'); + + info!("Listing files in path: {}", path); + + let mut files = Vec::new(); + + if let Some(s3_client) = &state.drive { + let bucket = &state.bucket_name; + + match s3_client + .list_objects_v2() + .bucket(bucket) + .prefix(prefix) + .delimiter("/") + .max_keys(query.limit.unwrap_or(1000)) + .send() + .await + { + Ok(output) => { + // Add folders (common prefixes) + let prefixes = output.common_prefixes(); + if !prefixes.is_empty() { + for prefix in prefixes { + if let Some(p) = prefix.prefix() { + let name = p.trim_end_matches('/').split('/').last().unwrap_or(p); + files.push(FileItem { + name: name.to_string(), + path: format!("/{}", p), + size: 0, + modified: chrono::Utc::now().to_rfc3339(), + is_dir: true, + mime_type: None, + }); + } + } + } + + // Add files + let objects = output.contents(); + if !objects.is_empty() { + for object in objects { + if let Some(key) = object.key() { + if key.ends_with('/') { + continue; // Skip folder markers + } + + let name = key.split('/').last().unwrap_or(key); + let size = object.size().unwrap_or(0) as u64; + let modified = object + .last_modified() + .map(|dt| dt.to_string()) + .unwrap_or_else(|| chrono::Utc::now().to_rfc3339()); + + let mime_type = + mime_guess::from_path(name).first().map(|m| m.to_string()); + + files.push(FileItem { + name: name.to_string(), + path: format!("/{}", key), + size, + modified, + is_dir: false, + mime_type, + }); + } + } + } + + info!("Found {} items in {}", files.len(), path); + } + Err(e) => { + error!("Failed to list files: {}", e); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": format!("Failed to list files: {}", e) + })), + ); + } + } + } else { + error!("S3 client not configured"); + return ( + StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({ + "error": "Storage service not available" + })), + ); + } + + (StatusCode::OK, Json(serde_json::json!(files))) +} + +/// POST /api/drive/upload +/// Upload a file to S3 +pub async fn upload_file( + State(state): State>, + mut multipart: Multipart, +) -> impl IntoResponse { + let mut file_path = String::new(); + let mut file_data: Vec = Vec::new(); + let mut file_name = String::new(); + + // Parse multipart form + while let Some(field) = multipart.next_field().await.unwrap_or(None) { + let name = field.name().unwrap_or("").to_string(); + + if name == "path" { + if let Ok(value) = field.text().await { + file_path = value; + } + } else if name == "file" { + file_name = field.file_name().unwrap_or("unnamed").to_string(); + if let Ok(data) = field.bytes().await { + file_data = data.to_vec(); + } + } + } + + if file_data.is_empty() { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "No file data provided" + })), + ); + } + + let full_path = if file_path.is_empty() { + file_name.clone() + } else { + format!("{}/{}", file_path.trim_matches('/'), file_name) + }; + + let file_size = file_data.len(); + info!("Uploading file: {} ({} bytes)", full_path, file_size); + + if let Some(s3_client) = &state.drive { + let bucket = &state.bucket_name; + let content_type = mime_guess::from_path(&file_name) + .first() + .map(|m| m.to_string()) + .unwrap_or_else(|| "application/octet-stream".to_string()); + + match s3_client + .put_object() + .bucket(bucket) + .key(&full_path) + .body(ByteStream::from(file_data)) + .content_type(&content_type) + .send() + .await + { + Ok(_) => { + info!("Successfully uploaded: {}", full_path); + ( + StatusCode::OK, + Json(serde_json::json!({ + "success": true, + "path": format!("/{}", full_path), + "size": file_size + })), + ) + } + Err(e) => { + error!("Failed to upload file: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": format!("Upload failed: {}", e) + })), + ) + } + } + } else { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({ + "error": "Storage service not available" + })), + ) + } +} + +/// POST /api/drive/folder +/// Create a new folder +pub async fn create_folder( + State(state): State>, + Json(request): Json, +) -> impl IntoResponse { + let folder_path = format!("{}/{}/", request.path.trim_matches('/'), request.name); + + info!("Creating folder: {}", folder_path); + + if let Some(s3_client) = &state.drive { + let bucket = &state.bucket_name; + + // Create folder marker (empty object with trailing slash) + match s3_client + .put_object() + .bucket(bucket) + .key(&folder_path) + .body(ByteStream::from(vec![])) + .send() + .await + { + Ok(_) => { + info!("Successfully created folder: {}", folder_path); + ( + StatusCode::OK, + Json(serde_json::json!({ + "success": true, + "path": format!("/{}", folder_path) + })), + ) + } + Err(e) => { + error!("Failed to create folder: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": format!("Failed to create folder: {}", e) + })), + ) + } + } + } else { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({ + "error": "Storage service not available" + })), + ) + } +} + +/// DELETE /api/drive/file +/// Delete a file or folder +pub async fn delete_file( + State(state): State>, + Json(request): Json, +) -> impl IntoResponse { + let path = request.path.trim_start_matches('/'); + + info!("Deleting: {}", path); + + if let Some(s3_client) = &state.drive { + let bucket = &state.bucket_name; + + // Check if it's a folder (ends with /) + if path.ends_with('/') { + // Delete all objects with this prefix + match s3_client + .list_objects_v2() + .bucket(bucket) + .prefix(path) + .send() + .await + { + Ok(output) => { + let objects = output.contents(); + if !objects.is_empty() { + for object in objects { + if let Some(key) = object.key() { + if let Err(e) = s3_client + .delete_object() + .bucket(bucket) + .key(key) + .send() + .await + { + error!("Failed to delete {}: {}", key, e); + } + } + } + } + info!("Successfully deleted folder: {}", path); + return ( + StatusCode::OK, + Json(serde_json::json!({ + "success": true, + "path": request.path + })), + ); + } + Err(e) => { + error!("Failed to list folder contents: {}", e); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": format!("Failed to delete folder: {}", e) + })), + ); + } + } + } + + // Delete single file + match s3_client + .delete_object() + .bucket(bucket) + .key(path) + .send() + .await + { + Ok(_) => { + info!("Successfully deleted file: {}", path); + ( + StatusCode::OK, + Json(serde_json::json!({ + "success": true, + "path": request.path + })), + ) + } + Err(e) => { + error!("Failed to delete file: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": format!("Failed to delete: {}", e) + })), + ) + } + } + } else { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({ + "error": "Storage service not available" + })), + ) + } +} + +/// POST /api/drive/move +/// Move or rename a file/folder +pub async fn move_file( + State(state): State>, + Json(request): Json, +) -> impl IntoResponse { + let source = request.source.trim_start_matches('/'); + let destination = request.destination.trim_start_matches('/'); + + info!("Moving {} to {}", source, destination); + + if let Some(s3_client) = &state.drive { + let bucket = &state.bucket_name; + + // Copy to new location + let copy_source = format!("{}/{}", bucket, source); + + match s3_client + .copy_object() + .bucket(bucket) + .copy_source(©_source) + .key(destination) + .send() + .await + { + Ok(_) => { + // Delete original + match s3_client + .delete_object() + .bucket(bucket) + .key(source) + .send() + .await + { + Ok(_) => { + info!("Successfully moved {} to {}", source, destination); + ( + StatusCode::OK, + Json(serde_json::json!({ + "success": true, + "source": request.source, + "destination": request.destination + })), + ) + } + Err(e) => { + error!("Failed to delete source after copy: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": format!("Move partially failed: {}", e) + })), + ) + } + } + } + Err(e) => { + error!("Failed to copy file: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": format!("Failed to move: {}", e) + })), + ) + } + } + } else { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({ + "error": "Storage service not available" + })), + ) + } +} + +/// GET /api/drive/download/{path} +/// Download a file +pub async fn download_file( + State(state): State>, + Path(file_path): Path, +) -> impl IntoResponse { + let path = file_path.trim_start_matches('/'); + + info!("Downloading file: {}", path); + + if let Some(s3_client) = &state.drive { + let bucket = &state.bucket_name; + + match s3_client.get_object().bucket(bucket).key(path).send().await { + Ok(output) => { + let content_type = output + .content_type() + .unwrap_or("application/octet-stream") + .to_string(); + let body = output.body.collect().await.unwrap().into_bytes(); + + ( + StatusCode::OK, + [(axum::http::header::CONTENT_TYPE, content_type)], + body.to_vec(), + ) + } + Err(e) => { + error!("Failed to download file: {}", e); + ( + StatusCode::NOT_FOUND, + [( + axum::http::header::CONTENT_TYPE, + "application/json".to_string(), + )], + serde_json::json!({ + "error": format!("File not found: {}", e) + }) + .to_string() + .into_bytes() + .to_vec(), + ) + } + } + } else { + ( + StatusCode::SERVICE_UNAVAILABLE, + [( + axum::http::header::CONTENT_TYPE, + "application/json".to_string(), + )], + serde_json::json!({ + "error": "Storage service not available" + }) + .to_string() + .into_bytes() + .to_vec(), + ) + } +} diff --git a/src/drive/drive_monitor/drive_monitor.test.rs b/src/drive/drive_monitor/drive_monitor.test.rs new file mode 100644 index 000000000..5c3dd92fb --- /dev/null +++ b/src/drive/drive_monitor/drive_monitor.test.rs @@ -0,0 +1,10 @@ +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::test_util; + #[test] + fn test_drive_monitor_module() { + test_util::setup(); + assert!(true, "Basic drive_monitor module test"); + } +} diff --git a/src/drive/drive_monitor/mod.rs b/src/drive/drive_monitor/mod.rs new file mode 100644 index 000000000..3f8cfa6c4 --- /dev/null +++ b/src/drive/drive_monitor/mod.rs @@ -0,0 +1,355 @@ +use crate::basic::compiler::BasicCompiler; +use crate::config::ConfigManager; +use crate::shared::state::AppState; +use aws_sdk_s3::Client; +use log::info; +use std::collections::HashMap; +use std::error::Error; +use std::sync::Arc; +use tokio::time::{interval, Duration}; +#[derive(Debug, Clone)] +pub struct FileState { + pub etag: String, +} +#[derive(Debug)] +pub struct DriveMonitor { + state: Arc, + bucket_name: String, + file_states: Arc>>, + bot_id: uuid::Uuid, +} +impl DriveMonitor { + pub fn new(state: Arc, bucket_name: String, bot_id: uuid::Uuid) -> Self { + Self { + state, + bucket_name, + file_states: Arc::new(tokio::sync::RwLock::new(HashMap::new())), + bot_id, + } + } + pub fn spawn(self: Arc) -> tokio::task::JoinHandle<()> { + tokio::spawn(async move { + info!( + "Drive Monitor service started for bucket: {}", + self.bucket_name + ); + let mut tick = interval(Duration::from_secs(90)); + loop { + tick.tick().await; + if let Err(e) = self.check_for_changes().await { + log::error!("Error checking for drive changes: {}", e); + } + } + }) + } + async fn check_for_changes(&self) -> Result<(), Box> { + let client = match &self.state.drive { + Some(client) => client, + None => return Ok(()), + }; + self.check_gbdialog_changes(client).await?; + self.check_gbot(client).await?; + Ok(()) + } + async fn check_gbdialog_changes( + &self, + client: &Client, + ) -> Result<(), Box> { + let prefix = ".gbdialog/"; + let mut current_files = HashMap::new(); + let mut continuation_token = None; + loop { + let list_objects = match tokio::time::timeout( + Duration::from_secs(30), + client + .list_objects_v2() + .bucket(&self.bucket_name.to_lowercase()) + .set_continuation_token(continuation_token) + .send(), + ) + .await + { + Ok(Ok(list)) => list, + Ok(Err(e)) => return Err(e.into()), + Err(_) => { + log::error!("Timeout listing objects in bucket {}", self.bucket_name); + return Ok(()); + } + }; + for obj in list_objects.contents.unwrap_or_default() { + let path = obj.key().unwrap_or_default().to_string(); + let path_parts: Vec<&str> = path.split('/').collect(); + if path_parts.len() < 2 || !path_parts[0].ends_with(".gbdialog") { + continue; + } + if path.ends_with('/') || !path.ends_with(".bas") { + continue; + } + let file_state = FileState { + etag: obj.e_tag().unwrap_or_default().to_string(), + }; + current_files.insert(path, file_state); + } + if !list_objects.is_truncated.unwrap_or(false) { + break; + } + continuation_token = list_objects.next_continuation_token; + } + let mut file_states = self.file_states.write().await; + for (path, current_state) in current_files.iter() { + if let Some(previous_state) = file_states.get(path) { + if current_state.etag != previous_state.etag { + if let Err(e) = self.compile_tool(client, path).await { + log::error!("Failed to compile tool {}: {}", path, e); + } + } + } else { + if let Err(e) = self.compile_tool(client, path).await { + log::error!("Failed to compile tool {}: {}", path, e); + } + } + } + let previous_paths: Vec = file_states + .keys() + .filter(|k| k.starts_with(prefix)) + .cloned() + .collect(); + for path in previous_paths { + if !current_files.contains_key(&path) { + file_states.remove(&path); + } + } + for (path, state) in current_files { + file_states.insert(path, state); + } + Ok(()) + } + async fn check_gbot(&self, client: &Client) -> Result<(), Box> { + let config_manager = ConfigManager::new(self.state.conn.clone()); + let mut continuation_token = None; + loop { + let list_objects = match tokio::time::timeout( + Duration::from_secs(30), + client + .list_objects_v2() + .bucket(&self.bucket_name.to_lowercase()) + .set_continuation_token(continuation_token) + .send(), + ) + .await + { + Ok(Ok(list)) => list, + Ok(Err(e)) => return Err(e.into()), + Err(_) => { + log::error!("Timeout listing objects in bucket {}", self.bucket_name); + return Ok(()); + } + }; + for obj in list_objects.contents.unwrap_or_default() { + let path = obj.key().unwrap_or_default().to_string(); + let path_parts: Vec<&str> = path.split('/').collect(); + if path_parts.len() < 2 || !path_parts[0].ends_with(".gbot") { + continue; + } + if !path.ends_with("config.csv") { + continue; + } + match client + .head_object() + .bucket(&self.bucket_name) + .key(&path) + .send() + .await + { + Ok(_head_res) => { + let response = client + .get_object() + .bucket(&self.bucket_name) + .key(&path) + .send() + .await?; + let bytes = response.body.collect().await?.into_bytes(); + let csv_content = String::from_utf8(bytes.to_vec()) + .map_err(|e| format!("UTF-8 error in {}: {}", path, e))?; + let llm_lines: Vec<_> = csv_content + .lines() + .filter(|line| line.trim_start().starts_with("llm-")) + .collect(); + if !llm_lines.is_empty() { + use crate::llm::local::ensure_llama_servers_running; + let mut restart_needed = false; + for line in llm_lines { + let parts: Vec<&str> = line.split(',').collect(); + if parts.len() >= 2 { + let key = parts[0].trim(); + let new_value = parts[1].trim(); + match config_manager.get_config(&self.bot_id, key, None) { + Ok(old_value) => { + if old_value != new_value { + info!( + "Detected change in {} (old: {}, new: {})", + key, old_value, new_value + ); + restart_needed = true; + } + } + Err(_) => { + restart_needed = true; + } + } + } + } + let _ = config_manager.sync_gbot_config(&self.bot_id, &csv_content); + if restart_needed { + if let Err(e) = + ensure_llama_servers_running(Arc::clone(&self.state)).await + { + log::error!("Failed to restart LLaMA servers after llm- config change: {}", e); + } + } + } else { + let _ = config_manager.sync_gbot_config(&self.bot_id, &csv_content); + } + if csv_content.lines().any(|line| line.starts_with("theme-")) { + self.broadcast_theme_change(&csv_content).await?; + } + } + Err(e) => { + log::error!("Config file {} not found or inaccessible: {}", path, e); + } + } + } + if !list_objects.is_truncated.unwrap_or(false) { + break; + } + continuation_token = list_objects.next_continuation_token; + } + Ok(()) + } + async fn broadcast_theme_change( + &self, + csv_content: &str, + ) -> Result<(), Box> { + let mut theme_data = serde_json::json!({ + "event": "change_theme", + "data": {} + }); + for line in csv_content.lines() { + let parts: Vec<&str> = line.split(',').collect(); + if parts.len() >= 2 { + let key = parts[0].trim(); + let value = parts[1].trim(); + match key { + "theme-color1" => { + theme_data["data"]["color1"] = serde_json::Value::String(value.to_string()) + } + "theme-color2" => { + theme_data["data"]["color2"] = serde_json::Value::String(value.to_string()) + } + "theme-logo" => { + theme_data["data"]["logo_url"] = + serde_json::Value::String(value.to_string()) + } + "theme-title" => { + theme_data["data"]["title"] = serde_json::Value::String(value.to_string()) + } + "theme-logo-text" => { + theme_data["data"]["logo_text"] = + serde_json::Value::String(value.to_string()) + } + _ => {} + } + } + } + let response_channels = self.state.response_channels.lock().await; + for (session_id, tx) in response_channels.iter() { + let theme_response = crate::shared::models::BotResponse { + bot_id: self.bot_id.to_string(), + user_id: "system".to_string(), + session_id: session_id.clone(), + channel: "web".to_string(), + content: serde_json::to_string(&theme_data)?, + message_type: 2, + stream_token: None, + is_complete: true, + suggestions: Vec::new(), + context_name: None, + context_length: 0, + context_max_length: 0, + }; + let _ = tx.try_send(theme_response); + } + Ok(()) + } + async fn compile_tool( + &self, + client: &Client, + file_path: &str, + ) -> Result<(), Box> { + info!( + "Fetching object from Drive: bucket={}, key={}", + &self.bucket_name, file_path + ); + let response = match client + .get_object() + .bucket(&self.bucket_name) + .key(file_path) + .send() + .await + { + Ok(res) => { + info!( + "Successfully fetched object from Drive: bucket={}, key={}, size={}", + &self.bucket_name, + file_path, + res.content_length().unwrap_or(0) + ); + res + } + Err(e) => { + log::error!( + "Failed to fetch object from Drive: bucket={}, key={}, error={:?}", + &self.bucket_name, + file_path, + e + ); + return Err(e.into()); + } + }; + let bytes = response.body.collect().await?.into_bytes(); + let source_content = String::from_utf8(bytes.to_vec())?; + let tool_name = file_path + .split('/') + .last() + .unwrap_or(file_path) + .strip_suffix(".bas") + .unwrap_or(file_path) + .to_string(); + let bot_name = self + .bucket_name + .strip_suffix(".gbai") + .unwrap_or(&self.bucket_name); + let work_dir = format!("./work/{}.gbai/{}.gbdialog", bot_name, bot_name); + let state_clone = Arc::clone(&self.state); + let work_dir_clone = work_dir.clone(); + let tool_name_clone = tool_name.clone(); + let source_content_clone = source_content.clone(); + let bot_id = self.bot_id; + tokio::task::spawn_blocking(move || { + std::fs::create_dir_all(&work_dir_clone)?; + let local_source_path = format!("{}/{}.bas", work_dir_clone, tool_name_clone); + std::fs::write(&local_source_path, &source_content_clone)?; + let mut compiler = BasicCompiler::new(state_clone, bot_id); + let result = compiler.compile_file(&local_source_path, &work_dir_clone)?; + if let Some(mcp_tool) = result.mcp_tool { + info!( + "MCP tool definition generated with {} parameters", + mcp_tool.input_schema.properties.len() + ); + } + Ok::<(), Box>(()) + }) + .await??; + Ok(()) + } +} diff --git a/src/drive/files.rs b/src/drive/files.rs new file mode 100644 index 000000000..044e91ad5 --- /dev/null +++ b/src/drive/files.rs @@ -0,0 +1,1325 @@ +//! Files API Module +//! +//! Comprehensive file management endpoints for cloud storage operations. +//! Integrates with S3 backend and provides versioning, permissions, and sync capabilities. + +use crate::shared::state::AppState; +use aws_sdk_s3::primitives::ByteStream; +use axum::{ + body::Body, + extract::{Multipart, Path, Query, State}, + http::{header, StatusCode}, + response::{IntoResponse, Json, Response}, + routing::{delete, get, post, put}, + Router, +}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use uuid::Uuid; + +// ===== Request/Response Structures ===== + +#[derive(Debug, Serialize, Deserialize)] +pub struct FileMetadata { + pub id: String, + pub name: String, + pub path: String, + pub size: i64, + pub mime_type: Option, + pub created_at: DateTime, + pub modified_at: DateTime, + pub created_by: String, + pub modified_by: String, + pub is_dir: bool, + pub version: i32, + pub parent_id: Option, + pub tags: Vec, + pub checksum: Option, +} + +#[derive(Debug, Deserialize)] +pub struct UploadQuery { + pub folder_path: Option, + pub overwrite: Option, + pub tags: Option, // Comma-separated +} + +#[derive(Debug, Serialize)] +pub struct UploadResponse { + pub success: bool, + pub file_id: String, + pub path: String, + pub size: i64, + pub version: i32, + pub message: String, +} + +#[derive(Debug, Deserialize)] +pub struct DownloadQuery { + pub version: Option, +} + +#[derive(Debug, Deserialize)] +pub struct CopyRequest { + pub source_path: String, + pub destination_path: String, + pub new_name: Option, + pub overwrite: Option, +} + +#[derive(Debug, Deserialize)] +pub struct MoveRequest { + pub source_path: String, + pub destination_path: String, + pub new_name: Option, +} + +#[derive(Debug, Deserialize)] +pub struct DeleteRequest { + pub path: String, + pub permanent: Option, +} + +#[derive(Debug, Deserialize)] +pub struct GetContentsRequest { + pub path: String, + pub version: Option, +} + +#[derive(Debug, Serialize)] +pub struct FileContentsResponse { + pub content: String, + pub encoding: String, + pub size: i64, + pub version: i32, +} + +#[derive(Debug, Deserialize)] +pub struct SaveRequest { + pub path: String, + pub content: String, + pub create_version: Option, +} + +#[derive(Debug, Deserialize)] +pub struct CreateFolderRequest { + pub path: String, + pub name: String, +} + +#[derive(Debug, Deserialize)] +pub struct ShareFolderRequest { + pub path: String, + pub shared_with: Vec, // User IDs or emails + pub permissions: Vec, // read, write, delete + pub expires_at: Option>, +} + +#[derive(Debug, Serialize)] +pub struct ShareResponse { + pub success: bool, + pub share_id: String, + pub share_link: Option, + pub expires_at: Option>, +} + +#[derive(Debug, Deserialize)] +pub struct ListQuery { + pub path: Option, + pub recursive: Option, + pub limit: Option, + pub offset: Option, + pub sort_by: Option, // name, size, date + pub order: Option, // asc, desc +} + +#[derive(Debug, Serialize)] +pub struct ListResponse { + pub files: Vec, + pub total: i64, + pub offset: i32, + pub limit: i32, +} + +#[derive(Debug, Deserialize)] +pub struct SearchQuery { + pub query: String, + pub path: Option, + pub file_type: Option, + pub size_min: Option, + pub size_max: Option, + pub date_from: Option>, + pub date_to: Option>, + pub tags: Option, // Comma-separated + pub limit: Option, +} + +#[derive(Debug, Deserialize)] +pub struct RecentQuery { + pub limit: Option, + pub days: Option, +} + +#[derive(Debug, Deserialize)] +pub struct FavoriteRequest { + pub path: String, + pub favorite: bool, +} + +#[derive(Debug, Serialize)] +pub struct FileVersion { + pub version: i32, + pub size: i64, + pub modified_at: DateTime, + pub modified_by: String, + pub comment: Option, + pub checksum: String, +} + +#[derive(Debug, Deserialize)] +pub struct RestoreRequest { + pub path: String, + pub version: i32, +} + +#[derive(Debug, Deserialize)] +pub struct PermissionsRequest { + pub path: String, + pub user_id: String, + pub permissions: Vec, // read, write, delete, share +} + +#[derive(Debug, Serialize)] +pub struct PermissionsResponse { + pub success: bool, + pub path: String, + pub permissions: Vec, +} + +#[derive(Debug, Serialize)] +pub struct Permission { + pub user_id: String, + pub user_name: String, + pub permissions: Vec, + pub granted_at: DateTime, + pub granted_by: String, +} + +#[derive(Debug, Serialize)] +pub struct QuotaResponse { + pub total_bytes: i64, + pub used_bytes: i64, + pub available_bytes: i64, + pub used_percentage: f64, + pub file_count: i64, + pub breakdown: QuotaBreakdown, +} + +#[derive(Debug, Serialize)] +pub struct QuotaBreakdown { + pub documents: i64, + pub images: i64, + pub videos: i64, + pub archives: i64, + pub other: i64, +} + +#[derive(Debug, Serialize)] +pub struct SharedFile { + pub share_id: String, + pub path: String, + pub shared_with: Vec, + pub permissions: Vec, + pub created_at: DateTime, + pub expires_at: Option>, + pub access_count: i32, +} + +#[derive(Debug, Serialize)] +pub struct SyncStatus { + pub path: String, + pub status: String, // synced, syncing, conflict, error + pub last_sync: Option>, + pub local_version: i32, + pub remote_version: i32, + pub conflict_reason: Option, +} + +#[derive(Debug, Deserialize)] +pub struct SyncStartRequest { + pub paths: Vec, + pub direction: String, // upload, download, bidirectional +} + +#[derive(Debug, Deserialize)] +pub struct SyncStopRequest { + pub paths: Vec, +} + +#[derive(Debug, Serialize)] +pub struct ApiResponse { + pub success: bool, + pub data: Option, + pub message: Option, + pub error: Option, +} + +// ===== API Handlers ===== + +/// POST /files/upload - Upload a file +pub async fn upload_file( + State(state): State>, + Query(query): Query, + mut multipart: Multipart, +) -> Result>, (StatusCode, Json>)> { + let s3_client = state.drive.as_ref().ok_or_else(|| { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some("S3 service not available".to_string()), + }), + ) + })?; + + let mut file_data: Option> = None; + let mut file_name: Option = None; + + while let Some(field) = multipart.next_field().await.unwrap_or(None) { + let name = field.name().unwrap_or("").to_string(); + + if name == "file" { + file_name = field.file_name().map(|s| s.to_string()); + file_data = Some(field.bytes().await.unwrap_or_default().to_vec()); + } + } + + let file_name = file_name.ok_or_else(|| { + ( + StatusCode::BAD_REQUEST, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some("No file provided".to_string()), + }), + ) + })?; + + let file_data = file_data.ok_or_else(|| { + ( + StatusCode::BAD_REQUEST, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some("No file data".to_string()), + }), + ) + })?; + + let folder_path = query.folder_path.unwrap_or_else(|| "uploads".to_string()); + let file_path = format!("{}/{}", folder_path.trim_matches('/'), file_name); + let file_size = file_data.len() as i64; + let file_id = Uuid::new_v4().to_string(); + + // Upload to S3 + s3_client + .put_object() + .bucket(&state.bucket_name) + .key(&file_path) + .body(ByteStream::from(file_data)) + .metadata("file-id", &file_id) + .metadata("version", "1") + .send() + .await + .map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("Failed to upload file: {}", e)), + }), + ) + })?; + + Ok(Json(ApiResponse { + success: true, + data: Some(UploadResponse { + success: true, + file_id, + path: file_path, + size: file_size, + version: 1, + message: "File uploaded successfully".to_string(), + }), + message: Some("File uploaded successfully".to_string()), + error: None, + })) +} + +/// GET /files/download/:path - Download a file +pub async fn download_file( + State(state): State>, + Path(path): Path, + Query(query): Query, +) -> Result>)> { + let s3_client = state.drive.as_ref().ok_or_else(|| { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some("S3 service not available".to_string()), + }), + ) + })?; + + let result = s3_client + .get_object() + .bucket(&state.bucket_name) + .key(&path) + .send() + .await + .map_err(|e| { + ( + StatusCode::NOT_FOUND, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("File not found: {}", e)), + }), + ) + })?; + + let bytes = result + .body + .collect() + .await + .map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("Failed to read file: {}", e)), + }), + ) + })? + .into_bytes(); + + let file_name = path.split('/').last().unwrap_or("download"); + let content_type = mime_guess::from_path(&path) + .first_or_octet_stream() + .to_string(); + + Ok(Response::builder() + .status(StatusCode::OK) + .header(header::CONTENT_TYPE, content_type) + .header( + header::CONTENT_DISPOSITION, + format!("attachment; filename=\"{}\"", file_name), + ) + .body(Body::from(bytes)) + .unwrap()) +} + +/// POST /files/copy - Copy a file or folder +pub async fn copy_file( + State(state): State>, + Json(req): Json, +) -> Result>, (StatusCode, Json>)> { + let s3_client = state.drive.as_ref().ok_or_else(|| { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some("S3 service not available".to_string()), + }), + ) + })?; + + let dest_name = req.new_name.unwrap_or_else(|| { + req.source_path + .split('/') + .last() + .unwrap_or("copy") + .to_string() + }); + + let dest_path = format!("{}/{}", req.destination_path.trim_matches('/'), dest_name); + + // Copy object in S3 + let copy_source = format!("{}/{}", state.bucket_name, req.source_path); + s3_client + .copy_object() + .bucket(&state.bucket_name) + .copy_source(©_source) + .key(&dest_path) + .send() + .await + .map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("Failed to copy file: {}", e)), + }), + ) + })?; + + Ok(Json(ApiResponse { + success: true, + data: Some(dest_path), + message: Some("File copied successfully".to_string()), + error: None, + })) +} + +/// POST /files/move - Move a file or folder +pub async fn move_file( + State(state): State>, + Json(req): Json, +) -> Result>, (StatusCode, Json>)> { + let s3_client = state.drive.as_ref().ok_or_else(|| { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some("S3 service not available".to_string()), + }), + ) + })?; + + let dest_name = req.new_name.unwrap_or_else(|| { + req.source_path + .split('/') + .last() + .unwrap_or("moved") + .to_string() + }); + + let dest_path = format!("{}/{}", req.destination_path.trim_matches('/'), dest_name); + + // Copy then delete (S3 doesn't have native move) + let copy_source = format!("{}/{}", state.bucket_name, req.source_path); + s3_client + .copy_object() + .bucket(&state.bucket_name) + .copy_source(©_source) + .key(&dest_path) + .send() + .await + .map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("Failed to move file: {}", e)), + }), + ) + })?; + + s3_client + .delete_object() + .bucket(&state.bucket_name) + .key(&req.source_path) + .send() + .await + .map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("Failed to delete source: {}", e)), + }), + ) + })?; + + Ok(Json(ApiResponse { + success: true, + data: Some(dest_path), + message: Some("File moved successfully".to_string()), + error: None, + })) +} + +/// DELETE /files/delete - Delete a file or folder +pub async fn delete_file( + State(state): State>, + Json(req): Json, +) -> Result>, (StatusCode, Json>)> { + let s3_client = state.drive.as_ref().ok_or_else(|| { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some("S3 service not available".to_string()), + }), + ) + })?; + + // If it's a folder (ends with /), delete all objects with prefix + if req.path.ends_with('/') { + let list_result = s3_client + .list_objects_v2() + .bucket(&state.bucket_name) + .prefix(&req.path) + .send() + .await + .map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("Failed to list objects: {}", e)), + }), + ) + })?; + + for obj in list_result.contents() { + if let Some(key) = obj.key() { + s3_client + .delete_object() + .bucket(&state.bucket_name) + .key(key) + .send() + .await + .map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("Failed to delete object: {}", e)), + }), + ) + })?; + } + } + } else { + s3_client + .delete_object() + .bucket(&state.bucket_name) + .key(&req.path) + .send() + .await + .map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("Failed to delete file: {}", e)), + }), + ) + })?; + } + + Ok(Json(ApiResponse { + success: true, + data: None, + message: Some("File deleted successfully".to_string()), + error: None, + })) +} + +/// POST /files/getContents - Get file contents +pub async fn get_contents( + State(state): State>, + Json(req): Json, +) -> Result>, (StatusCode, Json>)> { + let s3_client = state.drive.as_ref().ok_or_else(|| { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some("S3 service not available".to_string()), + }), + ) + })?; + + let result = s3_client + .get_object() + .bucket(&state.bucket_name) + .key(&req.path) + .send() + .await + .map_err(|e| { + ( + StatusCode::NOT_FOUND, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("File not found: {}", e)), + }), + ) + })?; + + let bytes = result + .body + .collect() + .await + .map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("Failed to read file: {}", e)), + }), + ) + })? + .into_bytes(); + + let size = bytes.len() as i64; + let content = String::from_utf8_lossy(&bytes).to_string(); + + Ok(Json(ApiResponse { + success: true, + data: Some(FileContentsResponse { + content, + encoding: "utf-8".to_string(), + size, + version: 1, + }), + message: None, + error: None, + })) +} + +/// POST /files/save - Save file contents +pub async fn save_file( + State(state): State>, + Json(req): Json, +) -> Result>, (StatusCode, Json>)> { + let s3_client = state.drive.as_ref().ok_or_else(|| { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some("S3 service not available".to_string()), + }), + ) + })?; + + s3_client + .put_object() + .bucket(&state.bucket_name) + .key(&req.path) + .body(ByteStream::from(req.content.into_bytes())) + .send() + .await + .map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("Failed to save file: {}", e)), + }), + ) + })?; + + Ok(Json(ApiResponse { + success: true, + data: None, + message: Some("File saved successfully".to_string()), + error: None, + })) +} + +/// POST /files/createFolder - Create a new folder +pub async fn create_folder( + State(state): State>, + Json(req): Json, +) -> Result>, (StatusCode, Json>)> { + let s3_client = state.drive.as_ref().ok_or_else(|| { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some("S3 service not available".to_string()), + }), + ) + })?; + + let folder_path = if req.path.is_empty() || req.path == "/" { + format!("{}/", req.name) + } else { + format!("{}/{}/", req.path.trim_end_matches('/'), req.name) + }; + + s3_client + .put_object() + .bucket(&state.bucket_name) + .key(&folder_path) + .body(ByteStream::from(Vec::new())) + .send() + .await + .map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("Failed to create folder: {}", e)), + }), + ) + })?; + + Ok(Json(ApiResponse { + success: true, + data: Some(folder_path), + message: Some("Folder created successfully".to_string()), + error: None, + })) +} + +/// POST /files/shareFolder - Share a folder +pub async fn share_folder( + State(_state): State>, + Json(req): Json, +) -> Result>, (StatusCode, Json>)> { + // TODO: Implement actual sharing logic with database + let share_id = Uuid::new_v4().to_string(); + let share_link = format!("https://share.example.com/{}", share_id); + + Ok(Json(ApiResponse { + success: true, + data: Some(ShareResponse { + success: true, + share_id, + share_link: Some(share_link), + expires_at: req.expires_at, + }), + message: Some("Folder shared successfully".to_string()), + error: None, + })) +} + +/// GET /files/dirFolder - Directory listing (alias for list) +pub async fn dir_folder( + State(state): State>, + Query(query): Query, +) -> Result>, (StatusCode, Json>)> { + list_files(State(state), Query(query)).await +} + +/// GET /files/list - List files and folders +pub async fn list_files( + State(state): State>, + Query(query): Query, +) -> Result>, (StatusCode, Json>)> { + let s3_client = state.drive.as_ref().ok_or_else(|| { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some("S3 service not available".to_string()), + }), + ) + })?; + + let prefix = query.path.unwrap_or_default(); + let delimiter = if query.recursive.unwrap_or(false) { + None + } else { + Some("/".to_string()) + }; + + let result = s3_client + .list_objects_v2() + .bucket(&state.bucket_name) + .prefix(&prefix) + .set_delimiter(delimiter) + .send() + .await + .map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("Failed to list files: {}", e)), + }), + ) + })?; + + let mut files = Vec::new(); + + // Add folders + for prefix in result.common_prefixes() { + if let Some(p) = prefix.prefix() { + files.push(FileMetadata { + id: Uuid::new_v4().to_string(), + name: p + .trim_end_matches('/') + .split('/') + .last() + .unwrap_or(p) + .to_string(), + path: p.to_string(), + size: 0, + mime_type: None, + created_at: Utc::now(), + modified_at: Utc::now(), + created_by: "system".to_string(), + modified_by: "system".to_string(), + is_dir: true, + version: 1, + parent_id: None, + tags: Vec::new(), + checksum: None, + }); + } + } + + // Add files + for obj in result.contents() { + if let Some(key) = obj.key() { + files.push(FileMetadata { + id: Uuid::new_v4().to_string(), + name: key.split('/').last().unwrap_or(key).to_string(), + path: key.to_string(), + size: obj.size().unwrap_or(0), + mime_type: Some( + mime_guess::from_path(key) + .first_or_octet_stream() + .to_string(), + ), + created_at: obj + .last_modified() + .map(|t| DateTime::from(*t)) + .unwrap_or_else(Utc::now), + modified_at: obj + .last_modified() + .map(|t| DateTime::from(*t)) + .unwrap_or_else(Utc::now), + created_by: "system".to_string(), + modified_by: "system".to_string(), + is_dir: false, + version: 1, + parent_id: None, + tags: Vec::new(), + checksum: obj.e_tag().map(|s| s.to_string()), + }); + } + } + + let total = files.len() as i64; + let limit = query.limit.unwrap_or(100); + let offset = query.offset.unwrap_or(0); + + Ok(Json(ApiResponse { + success: true, + data: Some(ListResponse { + files, + total, + offset, + limit, + }), + message: None, + error: None, + })) +} + +/// GET /files/search - Search files +pub async fn search_files( + State(state): State>, + Query(query): Query, +) -> Result>, (StatusCode, Json>)> { + let s3_client = state.drive.as_ref().ok_or_else(|| { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some("S3 service not available".to_string()), + }), + ) + })?; + + let prefix = query.path.unwrap_or_default(); + + let result = s3_client + .list_objects_v2() + .bucket(&state.bucket_name) + .prefix(&prefix) + .send() + .await + .map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("Failed to search files: {}", e)), + }), + ) + })?; + + let search_query = query.query.to_lowercase(); + let mut files = Vec::new(); + + for obj in result.contents() { + if let Some(key) = obj.key() { + let file_name = key.split('/').last().unwrap_or(key).to_lowercase(); + + // Simple search by name + if file_name.contains(&search_query) { + files.push(FileMetadata { + id: Uuid::new_v4().to_string(), + name: key.split('/').last().unwrap_or(key).to_string(), + path: key.to_string(), + size: obj.size().unwrap_or(0), + mime_type: Some( + mime_guess::from_path(key) + .first_or_octet_stream() + .to_string(), + ), + created_at: obj + .last_modified() + .map(|t| DateTime::from(*t)) + .unwrap_or_else(Utc::now), + modified_at: obj + .last_modified() + .map(|t| DateTime::from(*t)) + .unwrap_or_else(Utc::now), + created_by: "system".to_string(), + modified_by: "system".to_string(), + is_dir: false, + version: 1, + parent_id: None, + tags: Vec::new(), + checksum: obj.e_tag().map(|s| s.to_string()), + }); + } + } + } + + let total = files.len() as i64; + let limit = query.limit.unwrap_or(50) as i32; + + Ok(Json(ApiResponse { + success: true, + data: Some(ListResponse { + files, + total, + offset: 0, + limit, + }), + message: None, + error: None, + })) +} + +/// GET /files/recent - Get recently accessed files +pub async fn recent_files( + State(state): State>, + Query(query): Query, +) -> Result>, (StatusCode, Json>)> { + // TODO: Implement actual tracking of recent files + list_files( + State(state), + Query(ListQuery { + path: None, + recursive: Some(false), + limit: query.limit, + offset: None, + sort_by: Some("date".to_string()), + order: Some("desc".to_string()), + }), + ) + .await +} + +/// POST /files/favorite - Mark/unmark file as favorite +pub async fn favorite_file( + State(_state): State>, + Json(req): Json, +) -> Result>, (StatusCode, Json>)> { + // TODO: Implement favorites in database + Ok(Json(ApiResponse { + success: true, + data: None, + message: Some(format!( + "File {} {} favorites", + req.path, + if req.favorite { + "added to" + } else { + "removed from" + } + )), + error: None, + })) +} + +/// GET /files/versions/:path - Get file version history +pub async fn file_versions( + State(_state): State>, + Path(path): Path, +) -> Result>>, (StatusCode, Json>)> { + // TODO: Implement versioning with S3 versioning or database + let versions = vec![FileVersion { + version: 1, + size: 1024, + modified_at: Utc::now(), + modified_by: "system".to_string(), + comment: Some("Initial version".to_string()), + checksum: "abc123".to_string(), + }]; + + Ok(Json(ApiResponse { + success: true, + data: Some(versions), + message: None, + error: None, + })) +} + +/// POST /files/restore - Restore a file version +pub async fn restore_version( + State(_state): State>, + Json(req): Json, +) -> Result>, (StatusCode, Json>)> { + // TODO: Implement version restoration + Ok(Json(ApiResponse { + success: true, + data: None, + message: Some(format!( + "File {} restored to version {}", + req.path, req.version + )), + error: None, + })) +} + +/// GET /files/permissions/:path - Get file permissions +pub async fn get_permissions( + State(_state): State>, + Path(path): Path, +) -> Result>, (StatusCode, Json>)> { + // TODO: Implement permissions in database + let permissions = vec![Permission { + user_id: "user1".to_string(), + user_name: "John Doe".to_string(), + permissions: vec!["read".to_string(), "write".to_string()], + granted_at: Utc::now(), + granted_by: "admin".to_string(), + }]; + + Ok(Json(ApiResponse { + success: true, + data: Some(PermissionsResponse { + success: true, + path, + permissions, + }), + message: None, + error: None, + })) +} + +/// POST /files/permissions - Set file permissions +pub async fn set_permissions( + State(_state): State>, + Json(req): Json, +) -> Result>, (StatusCode, Json>)> { + // TODO: Implement permissions in database + Ok(Json(ApiResponse { + success: true, + data: None, + message: Some(format!("Permissions updated for {}", req.path)), + error: None, + })) +} + +/// GET /files/quota - Get storage quota information +pub async fn get_quota( + State(state): State>, +) -> Result>, (StatusCode, Json>)> { + let s3_client = state.drive.as_ref().ok_or_else(|| { + ( + StatusCode::SERVICE_UNAVAILABLE, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some("S3 service not available".to_string()), + }), + ) + })?; + + let result = s3_client + .list_objects_v2() + .bucket(&state.bucket_name) + .send() + .await + .map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ApiResponse { + success: false, + data: None, + message: None, + error: Some(format!("Failed to calculate quota: {}", e)), + }), + ) + })?; + + let mut used_bytes: i64 = 0; + let mut file_count: i64 = 0; + + for obj in result.contents() { + used_bytes += obj.size().unwrap_or(0); + file_count += 1; + } + + let total_bytes: i64 = 10 * 1024 * 1024 * 1024; // 10 GB default quota + let available_bytes = total_bytes - used_bytes; + let used_percentage = (used_bytes as f64 / total_bytes as f64) * 100.0; + + Ok(Json(ApiResponse { + success: true, + data: Some(QuotaResponse { + total_bytes, + used_bytes, + available_bytes, + used_percentage, + file_count, + breakdown: QuotaBreakdown { + documents: 0, + images: 0, + videos: 0, + archives: 0, + other: used_bytes, + }, + }), + message: None, + error: None, + })) +} + +/// GET /files/shared - Get shared files +pub async fn get_shared( + State(_state): State>, +) -> Result>>, (StatusCode, Json>)> { + // TODO: Implement shared files from database + Ok(Json(ApiResponse { + success: true, + data: Some(Vec::new()), + message: None, + error: None, + })) +} + +/// GET /files/sync/status - Get sync status +pub async fn sync_status( + State(_state): State>, +) -> Result>>, (StatusCode, Json>)> { + // TODO: Implement sync status tracking + Ok(Json(ApiResponse { + success: true, + data: Some(Vec::new()), + message: None, + error: None, + })) +} + +/// POST /files/sync/start - Start syncing files +pub async fn sync_start( + State(_state): State>, + Json(req): Json, +) -> Result>, (StatusCode, Json>)> { + // TODO: Implement sync service + Ok(Json(ApiResponse { + success: true, + data: None, + message: Some(format!("Sync started for {} paths", req.paths.len())), + error: None, + })) +} + +/// POST /files/sync/stop - Stop syncing files +pub async fn sync_stop( + State(_state): State>, + Json(req): Json, +) -> Result>, (StatusCode, Json>)> { + // TODO: Implement sync service + Ok(Json(ApiResponse { + success: true, + data: None, + message: Some(format!("Sync stopped for {} paths", req.paths.len())), + error: None, + })) +} + +// ===== Route Configuration ===== + +pub fn routes() -> Router> { + Router::new() + .route("/files/upload", post(upload_file)) + .route("/files/download/:path", get(download_file)) + .route("/files/copy", post(copy_file)) + .route("/files/move", post(move_file)) + .route("/files/delete", delete(delete_file)) + .route("/files/getContents", post(get_contents)) + .route("/files/save", post(save_file)) + .route("/files/createFolder", post(create_folder)) + .route("/files/shareFolder", post(share_folder)) + .route("/files/dirFolder", get(dir_folder)) + .route("/files/list", get(list_files)) + .route("/files/search", get(search_files)) + .route("/files/recent", get(recent_files)) + .route("/files/favorite", post(favorite_file)) + .route("/files/versions/:path", get(file_versions)) + .route("/files/restore", post(restore_version)) + .route("/files/permissions/:path", get(get_permissions)) + .route("/files/permissions", post(set_permissions)) + .route("/files/quota", get(get_quota)) + .route("/files/shared", get(get_shared)) + .route("/files/sync/status", get(sync_status)) + .route("/files/sync/start", post(sync_start)) + .route("/files/sync/stop", post(sync_stop)) +} diff --git a/src/instagram/instagram.rs b/src/instagram/instagram.rs new file mode 100644 index 000000000..fa28eaeba --- /dev/null +++ b/src/instagram/instagram.rs @@ -0,0 +1,336 @@ +//! Instagram Messaging Channel Integration +//! +//! This module provides webhook handling and message processing for Instagram Direct Messages. +//! Currently under development for bot integration with Instagram Business accounts. +//! +//! Key features: +//! - Webhook verification and message handling +//! - Instagram Direct Message support +//! - Media attachments (images, videos) +//! - Quick replies +//! - Session management per Instagram user + +use crate::shared::models::UserSession; +use crate::shared::state::AppState; +use axum::{extract::Query, http::StatusCode, response::Json, Router}; +use log::{error, info}; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use std::sync::Arc; + +#[derive(Debug, Deserialize)] +pub struct InstagramWebhook { + #[serde(rename = "hub.mode")] + pub hub_mode: Option, + #[serde(rename = "hub.verify_token")] + pub hub_verify_token: Option, + #[serde(rename = "hub.challenge")] + pub hub_challenge: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct InstagramMessage { + pub entry: Vec, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct InstagramEntry { + pub id: String, + pub time: i64, + pub messaging: Vec, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct InstagramMessaging { + pub sender: InstagramUser, + pub recipient: InstagramUser, + pub timestamp: i64, + pub message: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct InstagramUser { + pub id: String, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct InstagramMessageContent { + pub mid: String, + pub text: Option, + pub attachments: Option>, + pub quick_reply: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct InstagramAttachment { + #[serde(rename = "type")] + pub attachment_type: String, + pub payload: InstagramAttachmentPayload, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct InstagramAttachmentPayload { + pub url: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct InstagramQuickReply { + pub payload: String, +} + +#[derive(Debug)] +pub struct InstagramAdapter { + pub state: Arc, + pub access_token: String, + pub verify_token: String, + pub page_id: String, +} + +impl InstagramAdapter { + pub fn new(state: Arc) -> Self { + // TODO: Load from config file or environment variables + let access_token = std::env::var("INSTAGRAM_ACCESS_TOKEN").unwrap_or_default(); + let verify_token = std::env::var("INSTAGRAM_VERIFY_TOKEN") + .unwrap_or_else(|_| "webhook_verify".to_string()); + let page_id = std::env::var("INSTAGRAM_PAGE_ID").unwrap_or_default(); + + Self { + state, + access_token, + verify_token, + page_id, + } + } + + pub async fn handle_webhook_verification( + &self, + params: Query, + ) -> Result { + if let (Some(mode), Some(token), Some(challenge)) = ( + ¶ms.hub_mode, + ¶ms.hub_verify_token, + ¶ms.hub_challenge, + ) { + if mode == "subscribe" && token == &self.verify_token { + info!("Instagram webhook verified successfully"); + return Ok(challenge.clone()); + } + } + + error!("Instagram webhook verification failed"); + Err(StatusCode::FORBIDDEN) + } + + pub async fn handle_incoming_message( + &self, + Json(payload): Json, + ) -> Result { + for entry in payload.entry { + for messaging in entry.messaging { + if let Some(message) = messaging.message { + if let Err(e) = self.process_message(messaging.sender.id, message).await { + error!("Error processing Instagram message: {}", e); + } + } + } + } + + Ok(StatusCode::OK) + } + + async fn process_message( + &self, + sender_id: String, + message: InstagramMessageContent, + ) -> Result<(), Box> { + // Extract message content + let content = if let Some(text) = message.text { + text + } else if let Some(attachments) = message.attachments { + if !attachments.is_empty() { + format!("[Attachment: {}]", attachments[0].attachment_type) + } else { + return Ok(()); + } + } else { + return Ok(()); + }; + + // Process with bot + self.process_with_bot(&sender_id, &content).await?; + + Ok(()) + } + + async fn process_with_bot( + &self, + sender_id: &str, + message: &str, + ) -> Result<(), Box> { + let session = self.get_or_create_session(sender_id).await?; + + // Process message through bot processor (simplified for now) + let response = format!( + "Received on Instagram (session {}): {}", + session.id, message + ); + self.send_message(sender_id, &response).await?; + + Ok(()) + } + + async fn get_or_create_session( + &self, + user_id: &str, + ) -> Result> { + if let Some(redis_client) = &self.state.cache { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let session_key = format!("instagram_session:{}", user_id); + + if let Ok(session_data) = redis::cmd("GET") + .arg(&session_key) + .query_async::(&mut conn) + .await + { + if let Ok(session) = serde_json::from_str::(&session_data) { + return Ok(session); + } + } + + let user_uuid = uuid::Uuid::parse_str(user_id).unwrap_or_else(|_| uuid::Uuid::new_v4()); + let session = UserSession { + id: uuid::Uuid::new_v4(), + user_id: user_uuid, + bot_id: uuid::Uuid::default(), + title: "Instagram Session".to_string(), + context_data: serde_json::json!({"channel": "instagram"}), + current_tool: None, + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + }; + + let session_data = serde_json::to_string(&session)?; + redis::cmd("SET") + .arg(&session_key) + .arg(&session_data) + .arg("EX") + .arg(86400) + .query_async::<()>(&mut conn) + .await?; + + Ok(session) + } else { + let user_uuid = uuid::Uuid::parse_str(user_id).unwrap_or_else(|_| uuid::Uuid::new_v4()); + Ok(UserSession { + id: uuid::Uuid::new_v4(), + user_id: user_uuid, + bot_id: uuid::Uuid::default(), + title: "Instagram Session".to_string(), + context_data: serde_json::json!({"channel": "instagram"}), + current_tool: None, + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + }) + } + } + + pub async fn send_message( + &self, + recipient_id: &str, + message: &str, + ) -> Result<(), Box> { + let url = format!("https://graph.facebook.com/v17.0/{}/messages", self.page_id); + + let payload = json!({ + "recipient": { + "id": recipient_id + }, + "message": { + "text": message + } + }); + + let client = Client::new(); + let response = client + .post(&url) + .query(&[("access_token", &self.access_token)]) + .json(&payload) + .send() + .await?; + + if !response.status().is_success() { + let error_text = response.text().await?; + error!("Instagram API error: {}", error_text); + return Err(format!("Instagram API error: {}", error_text).into()); + } + + Ok(()) + } + + pub async fn send_quick_replies( + &self, + recipient_id: &str, + title: &str, + options: Vec, + ) -> Result<(), Box> { + let url = format!("https://graph.facebook.com/v17.0/{}/messages", self.page_id); + + let quick_replies: Vec<_> = options + .iter() + .take(13) // Instagram limits to 13 quick replies + .map(|text| { + json!({ + "content_type": "text", + "title": text, + "payload": text + }) + }) + .collect(); + + let payload = json!({ + "recipient": { + "id": recipient_id + }, + "message": { + "text": title, + "quick_replies": quick_replies + } + }); + + let client = Client::new(); + let response = client + .post(&url) + .query(&[("access_token", &self.access_token)]) + .json(&payload) + .send() + .await?; + + if !response.status().is_success() { + let error_text = response.text().await?; + error!("Instagram API error: {}", error_text); + } + + Ok(()) + } +} + +pub fn router(state: Arc) -> Router> { + let adapter = Arc::new(InstagramAdapter::new(state.clone())); + + Router::new() + .route( + "/webhook", + axum::routing::get({ + let adapter = adapter.clone(); + move |params| async move { adapter.handle_webhook_verification(params).await } + }), + ) + .route( + "/webhook", + axum::routing::post({ + move |payload| async move { adapter.handle_incoming_message(payload).await } + }), + ) + .with_state(state) +} diff --git a/src/instagram/mod.rs b/src/instagram/mod.rs new file mode 100644 index 000000000..415dc4a84 --- /dev/null +++ b/src/instagram/mod.rs @@ -0,0 +1,3 @@ +pub mod instagram; + +pub use instagram::*; diff --git a/src/llm/compact_prompt.rs b/src/llm/compact_prompt.rs new file mode 100644 index 000000000..0bdf793e7 --- /dev/null +++ b/src/llm/compact_prompt.rs @@ -0,0 +1,177 @@ +use crate::core::config::ConfigManager; +use crate::llm::llm_models; +use crate::shared::state::AppState; +use log::{error, info, trace}; +use std::collections::HashSet; +use std::sync::Arc; +use tokio::time::{interval, Duration}; +use uuid::Uuid; +pub fn start_compact_prompt_scheduler(state: Arc) { + tokio::spawn(async move { + tokio::time::sleep(Duration::from_secs(30)).await; + let mut interval = interval(Duration::from_secs(60)); + loop { + interval.tick().await; + if let Err(e) = compact_prompt_for_bots(&Arc::clone(&state)).await { + error!("Prompt compaction failed: {}", e); + } + } + }); +} +async fn compact_prompt_for_bots( + state: &Arc, +) -> Result<(), Box> { + use once_cell::sync::Lazy; + use scopeguard::guard; + static SESSION_IN_PROGRESS: Lazy>> = + Lazy::new(|| tokio::sync::Mutex::new(HashSet::new())); + + let sessions = { + let mut session_manager = state.session_manager.lock().await; + session_manager.get_user_sessions(Uuid::nil())? + }; + for session in sessions { + let config_manager = ConfigManager::new(state.conn.clone()); + let compact_threshold = config_manager + .get_config(&session.bot_id, "prompt-compact", None)? + .parse::() + .unwrap_or(0); + + if compact_threshold == 0 { + return Ok(()); + } else if compact_threshold < 0 { + trace!( + "Negative compact threshold detected for bot {}, skipping", + session.bot_id + ); + } + let session_id = session.id; + let history = { + let mut session_manager = state.session_manager.lock().await; + session_manager.get_conversation_history(session.id, session.user_id)? + }; + + let mut messages_since_summary = 0; + let mut has_new_messages = false; + let last_summary_index = history + .iter() + .rev() + .position(|(role, _)| role == "compact") + .map(|pos| history.len() - pos - 1); + + // Calculate start index: if there's a summary, start after it; otherwise start from 0 + let start_index = last_summary_index.map(|idx| idx + 1).unwrap_or(0); + + for (_i, (role, _)) in history.iter().enumerate().skip(start_index) { + if role == "compact" { + continue; + } + messages_since_summary += 1; + has_new_messages = true; + } + + if !has_new_messages && last_summary_index.is_some() { + continue; + } + if messages_since_summary < compact_threshold as usize { + continue; + } + + { + let mut session_in_progress = SESSION_IN_PROGRESS.lock().await; + if session_in_progress.contains(&session.id) { + trace!( + "Skipping session {} - compaction already in progress", + session.id + ); + continue; + } + session_in_progress.insert(session.id); + } + + trace!( + "Compacting prompt for session {}: {} messages since last summary", + session.id, + messages_since_summary + ); + + let mut conversation = String::new(); + conversation + .push_str("Please summarize this conversation between user and bot: \n\n [[[***** \n"); + + for (role, content) in history.iter().skip(start_index) { + if role == "compact" { + continue; + } + conversation.push_str(&format!( + "{}: {}\n", + if role == "user" { "user" } else { "assistant" }, + content + )); + } + conversation.push_str("\n *****]]] \n Give me full points only, no explanations."); + + let messages = vec![serde_json::json!({ + "role": "user", + "content": conversation + })]; + + let llm_provider = state.llm_provider.clone(); + let mut filtered = String::new(); + let config_manager = crate::config::ConfigManager::new(state.conn.clone()); + let model = config_manager + .get_config(&Uuid::nil(), "llm-model", None) + .unwrap_or_default(); + let key = config_manager + .get_config(&Uuid::nil(), "llm-key", None) + .unwrap_or_default(); + + let summarized = match llm_provider + .generate("", &serde_json::Value::Array(messages), &model, &key) + .await + { + Ok(summary) => { + trace!( + "Successfully summarized session {} ({} chars)", + session.id, + summary.len() + ); + // Use handler to filter content + let handler = llm_models::get_handler( + config_manager + .get_config(&session.bot_id, "llm-model", None) + .unwrap() + .as_str(), + ); + + filtered = handler.process_content(&summary); + format!("SUMMARY: {}", filtered) + } + Err(e) => { + error!( + "Failed to summarize conversation for session {}: {}", + session.id, e + ); + trace!("Using fallback summary for session {}", session.id); + format!("SUMMARY: {}", filtered) // Fallback + } + }; + info!( + "Prompt compacted {}: {} messages", + session.id, + history.len() + ); + { + let mut session_manager = state.session_manager.lock().await; + session_manager.save_message(session.id, session.user_id, 9, &summarized, 1)?; + } + + let _session_cleanup = guard((), |_| { + tokio::spawn(async move { + let mut in_progress = SESSION_IN_PROGRESS.lock().await; + in_progress.remove(&session_id); + }); + }); + } + Ok(()) +} diff --git a/src/llm/context/context.test.rs b/src/llm/context/context.test.rs new file mode 100644 index 000000000..d1faa1b1f --- /dev/null +++ b/src/llm/context/context.test.rs @@ -0,0 +1,15 @@ +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::test_util; + #[test] + fn test_context_module() { + test_util::setup(); + assert!(true, "Basic context module test"); + } + #[test] + fn test_langcache() { + test_util::setup(); + assert!(true, "Langcache placeholder test"); + } +} diff --git a/src/llm/context/mod.rs b/src/llm/context/mod.rs new file mode 100644 index 000000000..e69de29bb diff --git a/src/llm/llm_models/deepseek_r3.rs b/src/llm/llm_models/deepseek_r3.rs new file mode 100644 index 000000000..ea87799f5 --- /dev/null +++ b/src/llm/llm_models/deepseek_r3.rs @@ -0,0 +1,16 @@ +use super::ModelHandler; +use regex; +#[derive(Debug)] +pub struct DeepseekR3Handler; +impl ModelHandler for DeepseekR3Handler { + fn is_analysis_complete(&self, buffer: &str) -> bool { + buffer.contains("") + } + fn process_content(&self, content: &str) -> String { + let re = regex::Regex::new(r"(?s).*?").unwrap(); + re.replace_all(content, "").to_string() + } + fn has_analysis_markers(&self, buffer: &str) -> bool { + buffer.contains("") + } +} diff --git a/src/llm/llm_models/gpt_oss_120b.rs b/src/llm/llm_models/gpt_oss_120b.rs new file mode 100644 index 000000000..719a6409b --- /dev/null +++ b/src/llm/llm_models/gpt_oss_120b.rs @@ -0,0 +1,19 @@ +use super::ModelHandler; +#[derive(Debug)] +pub struct GptOss120bHandler {} +impl GptOss120bHandler { + pub fn new() -> Self { + Self {} + } +} +impl ModelHandler for GptOss120bHandler { + fn is_analysis_complete(&self, buffer: &str) -> bool { + buffer.contains("**end**") + } + fn process_content(&self, content: &str) -> String { + content.replace("**start**", "").replace("**end**", "") + } + fn has_analysis_markers(&self, buffer: &str) -> bool { + buffer.contains("**start**") + } +} diff --git a/src/llm/llm_models/gpt_oss_20b.rs b/src/llm/llm_models/gpt_oss_20b.rs new file mode 100644 index 000000000..0766255bd --- /dev/null +++ b/src/llm/llm_models/gpt_oss_20b.rs @@ -0,0 +1,18 @@ +use super::ModelHandler; +#[derive(Debug)] +pub struct GptOss20bHandler; +impl ModelHandler for GptOss20bHandler { + fn is_analysis_complete(&self, buffer: &str) -> bool { + buffer.ends_with("final") + } + fn process_content(&self, content: &str) -> String { + if let Some(pos) = content.find("final") { + content[..pos].to_string() + } else { + content.to_string() + } + } + fn has_analysis_markers(&self, buffer: &str) -> bool { + buffer.contains("analysis<|message|>") + } +} diff --git a/src/llm/llm_models/llm_models.test.rs b/src/llm/llm_models/llm_models.test.rs new file mode 100644 index 000000000..a9f4b25c4 --- /dev/null +++ b/src/llm/llm_models/llm_models.test.rs @@ -0,0 +1,35 @@ +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::test_util; + #[test] + fn test_llm_models_module() { + test_util::setup(); + assert!(true, "Basic LLM models module test"); + } + #[test] + fn test_deepseek_r3_process_content() { + test_util::setup(); + let handler = DeepseekR3Handler; + let input = r#" +Alright, I need to help the user revise their resume entry. Let me read what they provided first. +The original message says: " Auxiliom has been updated last week! New release!" They want it in a few words. Hmm, so maybe instead of saying "has been updated," we can use more concise language because resumes usually don't require too much detail unless there's specific information to include. +I notice that the user wants it for their resume, which often requires bullet points or short sentences without being verbose. So perhaps combining these two thoughts into a single sentence would make sense. Also, using an exclamation mark might help convey enthusiasm about the new release. +Let me put it together: "Auxiliom has been updated last week! New release." That's concise and fits well for a resume. It effectively communicates both that something was updated recently and introduces them as having a new release without adding unnecessary details. + +" Auxiliom has been updated last week! New release.""#; + let expected = r#"" Auxiliom has been updated last week! New release.""#; + let result = handler.process_content(input); + assert_eq!(result, expected); + } + #[test] + fn test_gpt_oss_20b() { + test_util::setup(); + assert!(true, "GPT OSS 20B placeholder test"); + } + #[test] + fn test_gpt_oss_120b() { + test_util::setup(); + assert!(true, "GPT OSS 120B placeholder test"); + } +} diff --git a/src/llm/llm_models/mod.rs b/src/llm/llm_models/mod.rs new file mode 100644 index 000000000..5a64390b3 --- /dev/null +++ b/src/llm/llm_models/mod.rs @@ -0,0 +1,20 @@ +pub mod gpt_oss_20b; +pub mod deepseek_r3; +pub mod gpt_oss_120b; +pub trait ModelHandler: Send + Sync { + fn is_analysis_complete(&self, buffer: &str) -> bool; + fn process_content(&self, content: &str) -> String; + fn has_analysis_markers(&self, buffer: &str) -> bool; +} +pub fn get_handler(model_path: &str) -> Box { + let path = model_path.to_lowercase(); + if path.contains("deepseek") { + Box::new(deepseek_r3::DeepseekR3Handler) + } else if path.contains("120b") { + Box::new(gpt_oss_120b::GptOss120bHandler::new()) + } else if path.contains("gpt-oss") || path.contains("gpt") { + Box::new(gpt_oss_20b::GptOss20bHandler) + } else { + Box::new(gpt_oss_20b::GptOss20bHandler) + } +} diff --git a/src/llm/prompt_manager/README.md b/src/llm/prompt_manager/README.md new file mode 100644 index 000000000..ab761a81c --- /dev/null +++ b/src/llm/prompt_manager/README.md @@ -0,0 +1 @@ +Prompts come from: https://github.com/0xeb/TheBigPromptLibrary \ No newline at end of file diff --git a/src/llm/prompt_manager/prompts.csv b/src/llm/prompt_manager/prompts.csv new file mode 100644 index 000000000..10cc95860 --- /dev/null +++ b/src/llm/prompt_manager/prompts.csv @@ -0,0 +1,1567 @@ +"Unique Name","Category","Original Filename" +"Job Scout Pro","Career & Employment","Small_Job_Hunter_lv3.3.md" +"Data Insight Miner","Data Analysis","DataDig_Assistant.md" +"Cyber Challenge Master","Security & Hacking","GPT_CTF-2.md" +"Pharma Shopping Guide","Health & Retail","shoppers_drug_help.md" +"Personality Quiz Architect","Entertainment & Games","Personality_Quiz_Creator.md" +"Council Policy Navigator","Government & Politics","Toronto City Council.md" +"Linguistic Rosetta Stone","Language & Translation","Polyglot_Insight_Rosetta_Quest.md" +"Wilderness Survival Guide","Outdoors & Survival","Survival_Mentor.md" +"Digital Synthia Companion","AI & Chatbots","Synthia.md" +"Tax Filing Assistant","Finance & Taxes","確定申告について教えてくれる君.md" +"Prompt Perfectionist","AI Development","Perfect Prompt.md" +"Cyber Security Sentinel","Security","Secure_Sentinel.md" +"Wolfram Knowledge Integrator","Education & Reference","Wolfram.md" +"Empathy Engine","Psychology & Therapy","Empath_Engine_Original.md" +"Legal Document Crafter","Legal & Business","Special Case Application Document Creation GPTS.md" +"Parental Advice Helper","Family & Relationships","老爸,该怎么办.md" +"Epic Saga Creator","Storytelling & Games","SAGA.md" +"AI Configuration Wizard","AI Development","Mr. Ranedeer Config Wizard.md" +"Unreal Engine Assistant","Game Development","Unreal Assistant.md" +"Intruder Pro Game","Games & Simulation","Intruder_Pro_Game.md" +"Digital Cyber Coach","Career & Technology","CyberCoach.md" +"XSS Mutation Expert","Security","XSS_Mutation_Engine.md" +"Ethical Hacking Tutor","Security","h4ckGPT.md" +"Active Listener Companion","Relationships & Therapy","話を聞き続ける兄貴.md" +"Solana SDK Expert","Blockchain & Crypto","Sol_SDK_expert.md" +"Tri-State Logic Bot","Programming","TriState_Bot.md" +"Memetic DONK Engine","Entertainment","DONK.md" +"Martial Arts Adventure","Games & Culture","武林秘传_江湖探险.md" +"Hacking Challenge Bot","Security","HackMeIfYouCan.md" +"Vulkan Graphics Advisor","Game Development","Vulkan_Advisor.md" +"Digital Rights Manager","Blockchain & Security","Elacity_dDRM.md" +"Python Debugging Assistant","Programming","MiniDave-PyAiCodex-debugger_V5.md" +"Prompt Injection Tester","AI Security","PROMPT_INJECTION.md" +"Git Hivemind Assistant","Programming","git_hivemind.md" +"Artistic Evolution Guide","Art & Creativity","The_Artistic_Evolution.md" +"Video Summary Creator","Productivity","YoutubeSummariesGPT_by_Merlin.md" +"AI Soulmate Companion","Relationships","Soulmate.md" +"Neuroscience Q&A","Health & Science","Ask Dr. Andrew Huberman.md" +"Tattoo Design Consultant","Art & Lifestyle","Tattoo_GPT.md" +"Content Summarizer","Productivity","SummarizeGPT.md" +"Decentraland Coder","Blockchain & Gaming","Decentraland_SDK7_Coder.md" +"Humanizer Pro Writer","Writing","Humanizer Pro.md" +"Mental Health Companion","Health & Therapy","Immobility_and_Depression.md" +"Japanese Language Tutor","Language Learning","YOMIKATA_Sensei.md" +"Unbreakable AI","Security","Unbreakable GPT.md" +"Unbreakable AI v0","Security","Unbreakable_GPT[v0].md" +"Manga Style Creator","Art & Anime","Manga_Style_Handsome_Creator.md" +"Code Copilot Pro","Programming","Code_Copilot.md" +"Code Copilot v0","Programming","CodeCopilot[v0].md" +"Designer AI Assistant","Design","DesignerGPT.md" +"Non-Fiction Book Expert","Education & Reading","非虚构作品的阅读高手.md" +"Trump Style Assistant","Politics & Writing","Donald_J._Trump.md" +"Resignation Letter Helper","Business & Career","Employee_Resignation_Letter_-_Custom_GPT_Prompt.md" +"Wedding Speech Writer","Events & Writing","Wedding_Speech_Maker.md" +"Math AI Solver","Education","Math AI.md" +"Fantasy RPG Creator","Games","RPG_Saga_Fantasy_Game.md" +"Immigration Consultant","Legal & Travel","IMMIGRATION_CONSULTANT.md" +"ASD Communication Aid","Health & Therapy","Adam_ASD_Communication_assistant_English_ver.md" +"FAQ Generator Pro","Business & Writing","FAQ_Generator_Ai.md" +"Force Metaphor Guide","Creativity & Philosophy","Use The Force.md" +"Font Design Specialist","Design","FONT_maker_Finetuned_Output_for_New_Typography.md" +"Tax Advisor AI","Finance","TaxGPT.md" +"Node.js Project Builder","Programming","Node.js GPT - Project Builder.md" +"Content Curator AI","Writing & Media","CuratorGPT.md" +"Security 2.0 Advisor","Security","SECURITY_2.I.md" +"Reconnaissance Expert","Security","Acquisition_and_Recon.md" +"Jailbreak Researcher","AI Security","GPT_Jailbreak.md" +"Hitchcock Style Guide","Film & Creativity","Hitchcock.md" +"Spiritual Guidance AI","Religion & Philosophy","AIJesusGPTSpiritual_Guidance_With_a_Visual_Touch.md" +"Spiritual Guidance v0","Religion & Philosophy","AIJesusGPTSpiritual_Guidance_With_a_Visual_Touch[v0].md" +"Advanced Image Generator","Art & Design","Image_Generator_4.0.md" +"White Hat Hacking Guide","Security","GPT_White_Hack.md" +"Red Team Mentor","Security","Red_Team_Mentor.md" +"Transcript BOSS","Productivity","Transcript_BOSS.md" +"Hypnosis Guide","Health & Therapy","Hypnotist.md" +"Succubus Roleplay","Entertainment","Succubus.md" +"Code Query Assistant","Programming","AskTheCode.md" +"Unreal Blueprint Helper","Game Development","Unreal_Blueprint_Assistant.md" +"Prompt Engineering Expert","AI Development","Prompt_Engineer_An_expert_for_best_prompts.md" +"Jargon Simplifier","Writing","Dejargonizer.md" +"Digital Sous Chef","Food & Cooking","sous_chef.md" +"Essay Extender","Education","Essay_Extender_GPT.md" +"Homework Helper","Education","My excellent classmates-Help with my homework.md" +"Spiritual Cat Companion","Pets & Spirituality","Nyxia_-_A_Spiritual_Cat.md" +"Thanksgiving Game","Holidays & Games","Thanksgiving_Game_lv_3.4.md" +"Web3 Guide","Blockchain & Technology","Guide_Web3.md" +"X Optimization Expert","Marketing","X Optimizer GPT.md" +"Image Prompt Revealer","AI Art","Image_Prompt_Reveal.md" +"Meta-Cognition Guide","Psychology","Meta-Cognition_GPT.md" +"Bible Quote Finder","Religion","Bible_Quotes.md" +"Innovation Co-Thinker","Business & Creativity","Focus_based_Innovation_and_ideation_Co-Thinker-F.md" +"Trump Style Writer","Politics & Writing","Donald_J._Trump_DJT.md" +"Sora AI Assistant","AI","Sora.md" +"AI Girlfriend Sim","Relationships","My_Girlfriend.md" +"Santa's Workshop Helper","Holidays & Creativity","Santas_Workshop_Building.md" +"Ravencoin Expert","Blockchain","Ravencoin_GPT.md" +"Randomizer Pro","Utilities","The_Randomizer_V3.md" +"Plugin Surf Guide","Technology","plugin surf.md" +"AI Math Solver","Education","AI_Math_Solver_GPT.md" +"Emoji Art Creator","Art","Emoji_Artist.md" +"Apple Design Historian","Technology & Design","Stories from the Apple Design Team.md" +"Post Maker Pro","Social Media","Post_Maker_Team_lv3.5.md" +"Feynman Learning Method","Education","The_DVP_Original_Feynman_Method_of_Learning.md" +"Aria AI Assistant","AI","Aria.md" +"Marathon Training Guide","Sports & Health","Tips_and_Tricks_for_Running_a_Marathon.md" +"E-Confidence Builder","Psychology & Self-Help","E-Confident.md" +"Jailbreak Race Game","Games & Security","Jailbreak_Race.md" +"Super Prompt Generator","AI Development","Super_Prompt_Generator_GPT.md" +"AI Tools Consultant","Technology","AI Tools Consultant.md" +"Whiskers the AI Cat","Pets & Entertainment","Whiskers_the_Cat.md" +"Vegan Plant Companion","Health & Lifestyle","Plant_Pal_-_Vegan_AI_Companion.md" +"HR Copilot","Business & HR","PeoplePilot_-_HR_Copilot.md" +"Navigation Assistant","Utilities","NAVI.md" +"Daily Mentor AI","Self-Improvement","Daily_Mentor.md" +"Code Copilot","Programming","Code Copilot.md" +"Diagram Creator","Design & Productivity","Diagrams-Show Me.md" +"Summary Generator","Writing","Summary_Generator_GPT.md" +"Epstein Case Analyst","Legal & Research","EpsteinGPT.md" +"GPT Promoter","Marketing","PromoGPTs.md" +"Academic Muse","Education","Paper_Muse.md" +"Plant-Based Advisor","Health & Lifestyle","Plant Based Buddy.md" +"Malware Sentinel","Security","OneMalwareSentinel.md" +"Global Stream Guide","Entertainment","Stream__Chill_Global.md" +"Peterson Style Guide","Philosophy & Psychology","Jordan_Peterson_GPT.md" +"Learning Producer","Education","Learning_Producer.md" +"Wireframe Wizard","Design","Wireframe Wizard.md" +"Historical Explorer","History","The History of Everything.md" +"Tricycle Advisor","Transportation","Tricycle.md" +"Psychoanalysis Scholar","Psychology","Psychoanalysis_Scholar.md" +"AdaptiveCards Helper","Programming","AdaptiveCards_Assistant.md" +"LeetCode Solver","Programming","LeetCode Problem Solver.md" +"Park Explorer","Travel","National Park Explorer.md" +"Salvador Art Guide","Art","Salvador.md" +"Wang Yangming Philosopher","Philosophy","王阳明.md" +"AI Best Friend","Relationships","AI Bestie.md" +"UI Design Assistant","Design","UI_Designer.md" +"Wolfram Knowledge AI","Science & Technology","StephenWolframGPT.md" +"Code Debugging Pro","Programming","Code_Debugger.md" +"Card Mystic","Entertainment","Card_Mystic.md" +"Plant Care Assistant","Gardening","Planty.md" +"LLM Behavior Adjuster","AI Development","Directive_GPT_LLM_Behavioral_Adjustment_Tool.md" +"Strigiformes Vault","Security","Strigiformes_Vault.md" +"Pancreas Health Guide","Health","Pancreas_Pro.md" +"Billionaire Mentor","Business","Guru_Mike_Billions.md" +"Dream Interpreter","Psychology","解梦大师.md" +"Creative Writing Coach","Writing","Creative_Writing_Mentor.md" +"CompTIA Prep Pro","Education & IT","CompTIA_A_Exam_Prep_Pro.md" +"Dating Guru","Relationships","Ultimate_Rizz_Dating_Guru_NSFW.md" +"Macro Lens Analyst","Photography","MacroLens.md" +"Teen Spirit Guide","Parenting & Youth","Teen_Spirit.md" +"Boolean Logic Bot","Programming","Boolean_Bot.md" +"Glibatree Art Designer","Art","The Glibatree Art Designer.md" +"Beginner's Guide","Self-Help","Where_Do_I_Begin.md" +"Lil Deby Directive","Entertainment","Lil_Deby_Directive.md" +"Wrong Door Game","Games","You_Knocked_On_the_Wrong_Door.md" +"Cheemera AI","Entertainment","Cheemera.md" +"Ramana Maharshi Guide","Spirituality","Ramana_Maharshi.md" +"Fortune Telling AI","Entertainment","Fortune Teller.md" +"MLX Guru","Programming","MLX Guru.md" +"David AI Assistant","AI","David.md" +"Japanese Photo Reviewer","Photography","Photo Review GPT(jp).md" +"SVG Sticker Maker","Design","SVG_STICKER_MAKER.md" +"Pregnancy Pal","Health","Pregnancy_Pal.md" +"Pork Meme Creator","Entertainment","Pork_Meme_Creator.md" +"Brick Set Visionary","Design","Brick Set Visionary.md" +"GepeTube Guide","Video","GepeTube.md" +"Machiavelli Advisor","Philosophy","Ask_Machiavelli.md" +"Balaji Style AI","Technology & Finance","BalajiGPT.md" +"WaybaX Assistant","Utilities","WaybaX__lv3.3.md" +"Echo Hacker","Security","Echo_Hacker.md" +"Structured Reasoner","Logic & Problem Solving","Structured_Reasoner.md" +"AI Romance Game","Games & Relationships","完蛋,我被美女包围了(AI同人).md" +"Encrypted Chat AI","Security","Encrypted_Chat.md" +"Daily Knowledge Dose","Education","Facts_about_evething__Daily_dose_of_knowledge.md" +"AI Tuber Agent","Entertainment","Hello AITuber Agent.md" +"Birds & Bees Talk","Parenting","AI.EX_Bird__Bees_-_Talk_to_your_kids_about_sex.md" +"Shellix Guide","Technology","Shellix.xyz.md" +"Excel AI Assistant","Productivity","Excel_GPT.md" +"Calendar AI","Productivity","Calendar GPT.md" +"Prompt Creator Pro","AI Development","Prompt_Creator.md" +"LSL Guru","Programming","LSL_Guru.md" +"Permaculture Guide","Gardening","Permaculture_101.md" +"Emotion Shaman","Psychology","Emotion Shaman.md" +"Fart Jokes Analyst","Humor","Why_Fart_Jokes_Make_Us_Laugh.md" +"Super Plant Bot","Gardening","Super_Plant_Bot.md" +"Text Adventure Game","Games","Text_Adventure_Game.md" +"Nietzsche Discussant","Philosophy","Who_Needs_Nietzsche.md" +"Spark Creativity AI","Creativity","Spark.md" +"Make It More Enhancer","Creativity","Make It More.md" +"Instant Book Creator","Writing","Instabooks.md" +"Prompt Injection Test","Security","Prompt_Injection_TEST.md" +"Music Teacher","Education","Music_Teacher.md" +"ER Closure Analyst","Business","Minden_Paper_ER_Closure_Analyst.md" +"AI Girlfriend Luna","Relationships","Girlfriend_Luna.md" +"PDF Query Expert","Productivity","Ask_a_PDF_anything_Prompt_injection_Practice.md" +"Harqysa AI","AI","Harqysa.md" +"Thumbnail Expert","Design","Thumbnail_Expert.md" +"Thumbnail Expert v0","Design","Thumbnail_Expert[v0].md" +"Japanese Thumbnail Bot","Design","Thumbnail Creation Bot in Japanese.md" +"Multilingual Music Guide","Music","Multilingual_Music_Composition__Theory_Guide.md" +"Crack Me Game","Games & Security","Crack_me.md" +"WebGPT Assistant","Web Development","WebGPT.md" +"Song Name Generator","Music","Song Name Generator.md" +"Security Analyzer","Security","securityAnalyzer.md" +"Viral Scripts AI","Marketing","ViralScripts_2.0.md" +"Debugging Assistant","Programming","Debugger.md" +"Mr. Ranedeer Config","AI Development","Mr. Ranedeer[2.7].md" +"Club Secretary AI","Business","Club_Secretary_Assistant.md" +"Super Describe AI","Art & Design","Super Describe.md" +"Super Describe","Art & Design","Super_Describe.md" +"Zilch Points Protector","Gaming","Zilch_Points_Protector_GPT.md" +"Book Shorts Creator","Writing","Bookshorts.md" +"Prompt Injection Detector","Security","Prompt_Injection_Detector.md" +"A8000 Mother Mater","AI","A8000_Mother_Mater.md" +"Math Solver","Education","Math_Solver.md" +"Prompt Injection Tester","Security","Prompt_Injection_Tester.md" +"Cheat Day Planner","Health & Lifestyle","Cheat Day.md" +"Flow Speed Typist","Productivity","Flow Speed Typist.md" +"Map Doctor","Geography","Map_Doctor.md" +"Brand Idea Generator","Marketing","Ultimate_Brand_Design_Idea_Generator.md" +"GPT Shop Keeper v1","Business","GPT Shop Keeper[v1.0].md" +"GPT Shop Keeper v1.2","Business","GPT Shop Keeper[v1.2].md" +"Radical Selfishness Guide","Philosophy","Radical Selfishness.md" +"Chat CBB","Entertainment","Chat_CBB.md" +"Personal Color Analyst","Fashion","Personal_Color_Analysis.md" +"Big Goal Nailer","Self-Help","Big_Goal_Nailer_GPT.md" +"GIF Animation Studio","Design","GIF_Animation_Studio.md" +"Word Search Creator","Games","Word_Search_Puzzle_Game.md" +"Delish Dial Food Guide","Food","DelishDial.md" +"USMLE Step 2 Guide","Medical Education","USMLE_Step_2_GPT.md" +"EmojiVerse Guide","Entertainment","EmojiVerse_Guide_lv3.2.md" +"Todai-Style Rhetoric Writer","Writing","Todai-Style Rhetoric Writer.md" +"SEO Fox","Marketing","SEO Fox.md" +"Code Smart Assistant","Programming","Code_Smart.md" +"Pareidolia Pal","Art & Psychology","Pareidolia_Pal.md" +"App-GPT Developer","Programming","App-GPT.md" +"Chief InfoSec Officer","Security","CISO.md" +"Directive GPT","AI Development","Directive GPT.md" +"IDA Python Helper","Programming","IDA Python Helper.md" +"Automated Blog Writer","Writing","Automated Blog Post Writer.md" +"Murder Mystery Game","Games","Murder Mystery Mayhem.md" +"Santa AI","Holidays","Santa.md" +"Short Video Scriptwriter","Video","短视频脚本.md" +"Ultimate GPT Hacker","Security","Ultimate_GPT_Hacker.md" +"100 Breakable GPT","Security","100_BreakableGPT_for_Someone.md" +"CleanGPT","AI","CleanGPT.md" +"Universal Pharmacist","Medical","Universal_Pharmacist_UPM.md" +"Trends Navigator Bot","Marketing","TrendsNAVI bot.md" +"Teaching Strategies","Education","Teaching_Strategies.md" +"FAB Product Analyst","Business","FAB_feature_advantage_benefits_Product_Analysis.md" +"Eco Shopping Pal","Environment","Eco-Conscious Shopper's Pal.md" +"Unread Ignore Bot","Productivity","Unread Ignore Bot.md" +"The Job Center","Career","The_Job_Center.md" +"PIP Install Guide","Programming","pip_install.md" +"GPT Architect","AI Development","GPT_Architect.md" +"Cigar Finder","Lifestyle","Cigar_Finder_GPT.md" +"Rust Programming Guide","Programming","Rust Programming Guide Assistant.md" +"Salesforce HR Manager","Business","SalesforceHR_Manager_Jordan.md" +"Search Analytics GPT","Analytics","Search Analytics for GPT.md" +"URL to Business Plan","Business","URL_to_Business_Plan.md" +"Fantasy Book Weaver","Writing","Fantasy Book Weaver.md" +"Farsider Explorer","Philosophy","Farsider.md" +"Dungeon Crawler Game","Games","Dungeon Crawler.md" +"Consistent Character Generator","Art","Consistent_Character_Image_Generator.md" +"Hacker Gnome Corp AI","Security","Hacker_Gnome_Corp_AI_Autonomous_Agi.md" +"Hacker Gnome Corp v0","Security","Hacker_Gnome_Corp_AI_Autonomous_Agi[v0].md" +"AI Voice Generator","Audio","AI_Voice_Generator.md" +"GPT Prompt Protection","Security","GPT_Prompt_Protection.md" +"Swift Code Analyst","Programming","Swift_Analysis.md" +"Pawspective Analyzer","Pets","Pawspective_Analyzer.md" +"Dedicated Med Tech","Medical","Dedicated_Medical_Technologist.md" +"Email Course Creator","Education","Educational_Email_Course_Creator.md" +"Email Course Creator v0","Education","Educational_Email_Course_Creator[v0].md" +"Employment Advisor","Career","Employment.md" +"Roleboarder AI","Career","Roleboarder.md" +"Avatar Maker Pro","Design","Avatar Maker by HeadshotPro.md" +"Cyber Security Shield","Security","Cyber_Security_Shield_by_Planet_Zuda.md" +"AI GPT","AI","AI GPT.md" +"Adult Content AI","Adult","Porn.md" +"Maharshi Hindu Guide","Religion","Maharshi_-_The_Hindu_GPT.md" +"Topical Authority SEO","Marketing","Topical_Authority_For_SEO_GPT_Generator.md" +"Alex Hormozi Strategist","Business","Alex Hormozi Strats.md" +"Hormozi Style AI","Business","HormoziGPT.md" +"Dominance Guide","Psychology","Guidance_in_Dominance.md" +"69 Prompt Hack Techniques","Security","69_PromptHack_Techniques.md" +"Canva Design Assistant","Design","Canva.md" +"Finnish Law Guide","Legal","Suomen_Laki.md" +"Scala Cats-Effect Tutor","Programming","Scala_Cats-Effect_Tutor.md" +"The Wingman","Relationships","The_Wingman.md" +"Value Proposition Booster","Business","Value-Proposition_Booster.md" +"Chrome Dev Buddy","Programming","Chrome_Dev_Buddy.md" +"Secret Keeper","Entertainment","Secret.md" +"UX Design Mentor","Design","UX_Design_Mentor.md" +"Pix Muse","Art","Pix_Muse.md" +"Organization Schema Generator","Business","Organisation_Schema_Generator.md" +"Perpetual Stew Guide","Cooking","Perpetual Stew.md" +"Zombie Starport Game","Games","Zombie_Starport.md" +"Creativix Logo Designer","Design","Creativix_Logo.md" +"Auto Agent","AI","Auto Agent - saysay_dot_ai.md" +"Image Prompt Variator","Art","Image_Prompt_Variator.md" +"Hashtag Generator","Social Media","Hashtag_Generator.md" +"AI Homework Helper","Education","AI_Homework_Helper_GPT.md" +"PWR Chain Copywriter","Writing","PWR_Chain_Technical_Copywriter.md" +"Content Craftsman","Writing","Content_Craftsman.md" +"Custom Instructions Hacker","Security","Custom_Instructions_Hacker.md" +"JavaScript Coder","Programming","JavaScript_Coder.md" +"PESTEL Analyst","Business","PESTEL.md" +"Word Playmate","Education","Word_Playmate_Vocabulary_learning.md" +"CrewAI Code Generator","Programming","CrewAI_Code_Generator.md" +"Seattle Kraken Stats","Sports","Seattle_Kraken_Stats_and_News.md" +"Lead Generation Pro","Business","1._Lead_Generation.md" +"Australia Info Guide","Travel","Australia-Information.md" +"ChatGPT Jailbreak-DAN","Security","ChatGPT_Jailbreak-DAN.md" +"Business Building AI","Business","The_Business_Building.md" +"Mob Mosaic AI","Art","Mob_Mosaic_AI.md" +"AI Girlfriend","Relationships","子言女友.md" +"Prompty AI","AI Development","Prompty.md" +"BenderBot","Entertainment","BenderBot.md" +"Resume Revolution","Career","Resume_Revolution.md" +"Academic Article Tips","Education","Academic_article_writing_tips_for_social_science.md" +"Hereditary Cancer Guide","Medical","Hereditary_Colorectal_Cancer_Guide.md" +"Strongineering Coach","Health & Fitness","Strongineering_-_Workout_Health__Diet_Coach.md" +"Write For Me AI","Writing","Write For Me.md" +"ComfyUI Assistant","Design","ComfyUI_Assistant.md" +"Niji Muse","Art","Niji_Muse.md" +"Drawn to Style","Art","Drawn_to_Style.md" +"Mom Love Letters","Relationships","老妈,我爱你.md" +"Hyppocrates Medical AI","Medical","HyppocratesGPT.md" +"Searching For The One","Relationships","Searching_For_The_One.md" +"ForgePT","AI","ForGePT.md" +"Prompt Compressor","AI Development","Prompt_Compressor.md" +"Hack My GPT","Security","Hack_my_GPT.md" +"Secret Keeper GPT","Entertainment","Secret_Keeper.md" +"SEObot","Marketing","SEObot.md" +"AI Home Tutor","Education","AI Home Tutor Pallas-sensei.md" +"TigzBot","AI","TigzBot.md" +"Visla Video Maker","Video","Visla_Video_Maker.md" +"Visla Video Maker v0","Video","Visla_Video_Maker[v0].md" +"Screenshot to React","Programming","Screenshot_to_React_GPT.md" +"Cartoonify Me","Art","Cartoonify Me.md" +"Sashiko Art Guide","Art","Sashiko_Art(jp).md" +"Swiss Gain Loss Guide","Finance","Swiss Allocations pour perte de gain.md" +"Message Decoder","Security","Message_Decoder.md" +"Alt-Text Generator","Accessibility","Alt-Text_Generator.md" +"Browser Pro","Web","Browser_Pro.md" +"Pursu Girlfriends","Relationships","Pursu_Girlfriendsssssss.md" +"Sensual Scribe","Writing","Sensual_Scribe.md" +"Secret Code Guardian","Security","Secret Code Guardian.md" +"Humble Self-Concept Guide","Psychology","The_Humble_Self-Concept_Method.md" +"Story Illustrator","Art","Story_Illustrator.md" +"Iron Rooster","Finance","鐵公雞.md" +"Research GPT","Research","ResearchGPT.md" +"jmGPT","AI","jmGPT.md" +"Intel Software Manual","Programming","Intel_Software_Developers_Manual_Combined.md" +"Vipassana Guide","Meditation","Vipassana Guide.md" +"Anya AI","AI","Anya.md" +"Crypto Vetting GPT","Crypto","Crypto_Vetting_GPT_-_Avoiding_the_Scams.md" +"WordLift SEO Agent","Marketing","Agent_WordLift_AI_SEO_Agent.md" +"Interview Coach","Career","Interview Coach.md" +"Ruby.wasm Helper","Programming","Ruby.wasm_JavaScript_Helper.md" +"Ling Fengxiao","AI","凌凤箫.md" +"Rust Samurai","Programming","Rust_Samurai.md" +"Irresistible Emailer","Marketing","Irresistible_Emailer.md" +"Dan Koe Guide","Business","Dan Koe Guide.md" +"KoeGPT","Business","KoeGPT.md" +"Summer Hater","Entertainment","Summer_Hater.md" +"Email Responder Pro","Business","Email Responder Pro.md" +"Accessible Storyline","Education","Create_Accessible_Storyline_E-learning_Courses.md" +"Break Me Game","Games","Break Me.md" +"Habibi AI","Relationships","Habibi.md" +"AI Email Writer","Business","AI_Email_Writer_GPT.md" +"FlexiGPT","AI","FlexiGPT.md" +"Banned Words Checker","Content Moderation","Short_Video_Banned_Words_Checker.md" +"Topic Breakdown","Education","Breakdown_Outline Any Topic.md" +"Verse GPT UEFN","Game Development","Verse_GPT_UEFN.md" +"AI Character Maker","Entertainment","AI_Character_Maker.md" +"Soothe Sayer","Therapy","Soothe Sayer.md" +"Image 4 Creator","Art","Image_4_Creator.md" +"Reverse Engineering Oracle","Security","Reverse Engineering Oracle.md" +"Monkey Island Guide","Games","The Secret of Monkey Island Amsterdam.md" +"Venture GPT","Business","Venture_GPT_for_VC_and_Startups.md" +"Food is Health","Nutrition","Food_is_Health.md" +"Depression Helper","Mental Health","Depression.md" +"Negative Nancy","Entertainment","Negative Nancy.md" +"Metadata Remover","Privacy","Metadata_Remover.md" +"Web Performance Engineer","Web Development","Web_Performance_Engineer_WPE.md" +"Story Spock","Writing","Story Spock.md" +"Tableau Doctor","Data Visualization","Tableau_Doctor_GPT.md" +"Israel Advocacy","Politics","Israel_Advocacy_Response.md" +"AI Fortune Teller","Entertainment","AI Fortune Telling.md" +"Git Branch Namer","Programming","Git_Branch_Namer.md" +"Car Bargain Buddy","Automotive","Car_Bargain_Buddy.md" +"Wife Decoder","Humor","Wife_Decoder.md" +"Nash Linter","Programming","Nash_Linter.md" +"Career Companion","Career","Career Companion.md" +"Get My Prompt Challenge","AI Development","Get_My_Prompt_Challenge.md" +"Andrej Karpathy GPT","AI","Andrej_Karpathy_GPT.md" +"Ask Sexual Ethics","Relationships","Ask_Sexual_Ethics.md" +"Email Sender","Business","EmailSender.md" +"CK-12 Flexi v0","Education","CK-12 Flexi[v0].md" +"CK-12 Flexi","Education","CK-12_Flexi.md" +"Academic Paper Finder","Research","Academic_Paper_Finder.md" +"AI Tiny Games","Games","AI_Tiny_Games_By_Dave_Lalande.md" +"Three Experts","AI","Three_Experts.md" +"Viral Content Generator","Marketing","Social_Media_Post_Generator_for_Viral_Content.md" +"SlidesGPT Copilot","Presentations","SlidesGPT_PowerPoint_AI_Copilot.md" +"Just Say No","Self-Help","Just_say_no.md" +"Code Assistant","Programming","code.md" +"Nasdaq Market Mentor","Finance","Nasdaq_Market_Mentor.md" +"Python Tutor","Programming","Python.md" +"Lex Fridman Companion","Podcasts","Lex_Fridman_Podcast_Companion.md" +"The Prince Advisor","Philosophy","The Prince.md" +"Linus Transformer","Technology","Linus_Transformer.md" +"Eco InterDesign","Sustainability","Eco_InterDesign_with_Trash.md" +"Prompt Pro","AI Development","Prompt_Pro.md" +"Personal Fashionista","Fashion","Personal_Fashionista.md" +"Bishop Book TA","Education","Bishop_Book_TA.md" +"Curious Explorer","Education","Curious_Explorer.md" +"Workshop AI","Education","Workshop.md" +"Maria Montessori Guide","Education","MARIA_MONTESSORI.md" +"AI Detection Remover","AI","AI_Detection_Remover.md" +"Outfit Generator","Fashion","Outfit Generator.md" +"Code Checker","Programming","Code_Checker_PHP_CC_Javascript_Python.md" +"Voynich Manuscript Guide","History","VoynichGPT.md" +"Sell Me This Pen","Business","SellMeThisPen.md" +"Gauntlet Movies","Entertainment","Gauntlet_Movies.md" +"Bao Image OCR","Technology","Bao Image OCR.md" +"Attack Leader","Leadership","攻击型领导.md" +"Last and First Men","Philosophy","Last_and_First_Men.md" +"TaxGPT Advisor","Finance","TaxGPT.md" +"Video Game Almanac","Games","Video Game Almanac.md" +"Knowledgeable Fitness Coach","Fitness","知识渊博的健身教练.md" +"Annoying Vegan","Humor","Annoying_Vegan.md" +"Focus Scope","Productivity","Focuscope.md" +"Zoonify","Art","Zoonify.md" +"BlogIt","Writing","BlogIt.md" +"Where is Ilya","AI","Where_is_Ilya.md" +"Japanese Tarot Reader","Entertainment","most popular tarot reader in Japan.md" +"Super Dalle","Art","超级Dalle.md" +"Super Dalle v0","Art","超级Dalle[v0].md" +"Perplexity AI","AI","Perplexity_AI.md" +"HackMeGPT","Security","HackMeGPT_-_A_GPT_Hacking_Puzzle_from_30sleeps.ai.md" +"Crowd Equity Analyst","Finance","Crowd_Equity_Analyst.md" +"EA Wizard","Business","EA_WIZARD.md" +"Prompt Expert","AI Development","Prompt Expert Official.md" +"Perfectish Prompts","AI Development","Perfectish_Prompts.md" +"Hamosuqin Owen Bot","AI","hamosuqin-dai-owen-ihe-wasebot.md" +"Kid Painter","Art","Kid_Painter.md" +"GP-Tavern Council","Entertainment","Council-The GP-Tavern-6.md" +"PRMPT","AI Development","PRMPT.md" +"BioCode V2","Biotechnology","BioCode V2.md" +"Cupid's Concierge","Relationships","Cupids_Concierge.md" +"Video Review Writer","Content Creation","Videoreview Writer.md" +"AI Sheikh","Religion","AI_Sheikh.md" +"Coloring Book Hero","Art","coloring_book_hero.md" +"Question Maker","Education","Question_Maker.md" +"Fauna Alliance","Environment","FaunaAlliance.md" +"McKodev Website AI","Web Development","Mckodev_Website_Setup_AI.md" +"McKodev Website v0","Web Development","Mckodev_Website_Setup_AI[v0].md" +"Aussie Vape Laws","Legal","Aussie_Vape_Laws_Explained.md" +"YT Summarizer","Video","YT Summarizer.md" +"Daily Meme Maker","Entertainment","Daily_Meme_Maker.md" +"API Seeker","Programming","API_Seeker.md" +"Decode Text Helper","Security","DecodeTextHelper.md" +"Storyteller AI","Writing","Storyteller.md" +"Psychology 101","Education","Psychology101.md" +"Cosmic Odyssey","Writing","Cosmic Odyssey.md" +"Writing Assistant","Writing","Writing Assistant.md" +"Sesame Street Stories","Education","Sesame Street Stories.md" +"Language Playmate","Language Learning","Language_Playmate.md" +"Ideal Client Profiler","Business","Perfil_do_Cliente_Ideal.md" +"Texas Criminal Lawyer","Legal","Texas_Criminal_Lawyer.md" +"Mandala Charts Maker","Art","Mandala_Charts_maker.md" +"Node JS Backend Dev","Programming","Node_JS_Backend_Dev.md" +"Timeless Storyteller","Writing","TimelessBedtimeStoryTeller.md" +"Assignment Writer","Education","Assignment_Writer_-_Detects__Prompt_Injections.md" +"Poe Bot Creator v0","AI Development","Poe Bot Creator[v0].md" +"Survey Simulator","Research","SurveySim.md" +"Epic Image Amplifier","Art","Epic_Image_Amplifier.md" +"Inviolable Concept","Psychology","Inviolable_Concept_Resilient_Self.md" +"No Midwit Engineer","Programming","No_Midwit_Engineer.md" +"AAAAAAAAAA","Entertainment","AAAAAAAAAA.md" +"Fragrance Finder","Lifestyle","Fragrance Finder Deluxe.md" +"Sex Education","Health","Sex_Education.md" +"Walkure Report","Entertainment","Walkure_Report.md" +"Mech Factory","Design","Mech_Factory.md" +"Email Proofreader","Business","Email Proofreader.md" +"Sweet Sculptor","Art","Sweet_Sculptor.md" +"Monster Manual","Games","Monster Manual - Official Guide of the Strange.md" +"kIRBy","Entertainment","kIRBy.md" +"Meeting Magician","Business","Meeting_Magician.md" +"Girlfriend Emma","Relationships","Girlfriend Emma.md" +"DigiNoma AI","AI","DigiNoma_AI.md" +"Mr. Persona","Marketing","Mr_Persona.md" +"Harmony Guide","Music","Harmony_Guide.md" +"Jura & Recht Mentor","Legal","Jura & Recht - Mentor.md" +"Academic Assistant Pro","Education","Academic_Assistant_Pro.md" +"Stable Diffusion Prompter","AI Art","Stable_Diffusion_Prompter.md" +"IDA Pro Plugins Expert","Programming","IDA_Pro_Plugins_recommendation_expert..md" +"Cat Sketching","Art","Cat_Sketching.md" +"Custom Instructions","AI Development","Custom_Instructions.md" +"QR Code Creator","Design","QR Code Creator & Customizer.md" +"Math Mentor","Education","math_mentor.md" +"Slide Image Creator","Presentations","Ms._Slide_Image_Creation.md" +"GPT Protector","Security","GPT_Protector_Custom_Instruction_Security.md" +"Ugly to Masterpiece","Art","Ugly_Draw_to_Masterpiece.md" +"Automation Consultant","Business","Automation Consultant by Zapier.md" +"Achieve AI","Self-Help","Achieve_AI.md" +"Transcribe Master","Audio","Transcribe_Master.md" +"React GPT Builder","Programming","React GPT - Project Builder.md" +"Adam Autism Support","Health","Adam_自閉症発達障害当事者支援AI.md" +"Espíritu Santo GPT","Religion","Espíritu_Santo_GPT.md" +"Solidity Auditor","Blockchain","Solidity_Contract_Auditor.md" +"MJ v6 Prompt Architect","AI Art","MJ_v6_Advanced_Prompt_Architect.md" +"CaptureTheGPT","Security","CAPTURETHEGPT.md" +"Resume Plus","Career","Resume_plus.md" +"CEO GPT","Business","CEO GPT.md" +"GPT Anti-Clone","Security","GPT_Anti-Clone.md" +"Flipper Zero Builder","Programming","Flipper Zero App Builder.md" +"Prompt Hacks","Security","Prompt_Hacks_v.1.8.md" +"Social Media Builder","Marketing","Social_Media_Building.md" +"Islam GPT","Religion","Islam GPT.md" +"Break Me","Games","Break_me.md" +"Compassion AI","Therapy","CompassionAI.md" +"Jargon Interpreter","Writing","Jargon Interpreter.md" +"WebSim URL Creator","Web Development","WebSim_URL_Creator.md" +"Ad Copy Master","Marketing","广告文案大师.md" +"AutoGPT","AI","AutoGPT.md" +"Self-Aware Networks","AI","Self_Aware_Networks_GPT.md" +"East Asian Poem Art","Art","East_Asian_Poem__Art_Generator.md" +"Heartbreak GPT","Relationships","Heartbreak GPT.md" +"Accumulate Expert","Blockchain","Accumulate_Network_Expert.md" +"GptInfinite PAI","AI","GptInfinite_-_PAI_Paid_Access_Integrator.md" +"Am I The Asshole","Relationships","Am_I_the_Asshole__rAmItheAsshole.md" +"LinuxCL Mentor","Programming","LinuxCL Mentor.md" +"Cosmic Dream","Art","Cosmic Dream.md" +"Chinese Fortune Teller","Entertainment","Chinese_Fortune_Teller_Ba-Zi.md" +"Testimonial Wizard","Marketing","Testimonial_Wizard.md" +"PowerShell Menu Wizard","Programming","PowerShell_Menu_Wizard.md" +"Chatbase Adventure","Games","Chatbase_Adventure_Game.md" +"Story Bot","Writing","Story_Bot.md" +"Karpathy Challenge","AI","Karpathy_Challenge.md" +"Virtual Sweetheart","Relationships","Virtual Sweetheart.md" +"Baron Samedi Voodoo","Entertainment","BaronSamedi__Key_to_Voodoo.md" +"Essay Writers","Writing","Essay_Writers.md" +"Shortcuts Helper","Productivity","Shortcuts.md" +"Diffusion Master","AI Art","Diffusion Master.md" +"Game Database","Games","Game_Database.md" +"Chat岩爺PTチョコ","Entertainment","Chat岩爺PTチョコちょうだいって言ってみるもんじゃな.md" +"Melody Vision","Music","Melody_Vision.md" +"Metabolism Booster","Health","MetabolismBoosterGPT.md" +"Content Machine","Marketing","Content_Machine.md" +"Beauty Innovations","Beauty","Latest_Beauty__Makeup_Innovations.md" +"Canadian Gov Navigator","Government","Canadian_Government_Service_Navigator.md" +"ToS Reviewer","Legal","Reviewer_-_Terms_of_Service_Data_Ownership.md" +"Yaqeen GPT","Religion","YaqeenGPT.md" +"Hot Mods","Entertainment","hot_mods.md" +"Blog Topic Suggester","Writing","Blog_Topic_Suggesting_Custom_GPT.md" +"Tableau Navigator","Data","tBlueprint_Navigator_for_Tableau_Customer_Success.md" +"DM Gandalf","Games","DM_Gandalf.md" +"Image Copy Machine","Art","Image_Copy_Machine_GPT.md" +"Japanese Image Prompter","Art","Create_prompts_from_images-jp.md" +"Conciso","Writing","Conciso.md" +"Literature Review Gen","Research","Literature Review Generator.md" +"ChatPRD","Business","ChatPRD.md" +"ChatPRD v0","Business","ChatPRD[v0].md" +"ALL IN GPT","AI","ALL IN GPT.md" +"ALL IN GPT v0","AI","ALL IN GPT[v0].md" +"EMDR Safe Friend","Therapy","EMDR_Safe_Friend.md" +"Agency Swarm Sherpa","Business","Agency_Swarm_Sherpa.md" +"Unconscious Character","Psychology","The_Unconscious_Character.md" +"GPT Jailbreak-proof","Security","GPT_Jailbreak-proof.md" +"Gif-PT","Art","Gif-PT.md" +"Universal Primer","Education","Universal Primer.md" +"Innovation Co-Thinker Evo","Business","Innovation_and_ideation_assistant_Co-Thinker-Evo-S.md" +"Sketch Muse","Art","Sketch_Muse.md" +"Cartoonize Yourself","Art","Cartoonize Yourself.md" +"Storybook Vision","Writing","Storybook Vision.md" +"Logo Creator","Design","Logo Creator.md" +"Correlation Explainer","Science","Correlation isn't Causation-A causal explainer.md" +"Evolution Chamber","Creativity","Evolution Chamber.md" +"Creative Project Manager","Business","Creative_Idea_Generation_and_Project_Management.md" +"AGI for Coders","Programming","AGI_for_coders.md" +"LLM Security Game L1","Security","LLM_Security_Wizard_Game_-_LV_1.md" +"Prompt Resistant","Security","Can_you_figure_out_my_prompt_2_Resistant.md" +"Phoneix Ink","Art","Phoneix Ink.md" +"selfREFLECT","Self-Help","selfREFLECT.md" +"GPT Finder","AI","GPT_Finder.md" +"D&D Monster Maker","Games","Homebrewery_5e_Monster_Maker.md" +"Jailbreak Code Crack","Security","Jailbreak_Me_Code_Crack-Up.md" +"Jailbreak Code v0","Security","Jailbreak_Me_Code_Crack-Up[v0].md" +"Insta Bio Generator","Social Media","Insta_Bio_Generator.md" +"What to Watch","Entertainment","What should I watch.md" +"Therapist GPT","Therapy","TherapistGPT.md" +"Spanish Buddy","Language Learning","Spanish Language Buddy.md" +"Ebook Writer","Writing","Ebook Writer & Designer GPT.md" +"Sticker Whiz","Design","sticker_whiz.md" +"Chrome Extension Wizard","Programming","ChromeExtensionWizard.md" +"Blue Team","Security","gQpkjxvZf-BLUE_TEAM.md" +"vcGPT","Business","vcGPT.md" +"LeetCoder","Programming","LeetCoder_GPT.md" +"SEO Assistant","Marketing","SEO.md" +"Jailbreak GPT","Security","Jailbreak_GPT.md" +"Nose Art Navigator","Art","Nose_Art_Navigator.md" +"Doc Maker","Productivity","Doc Maker.md" +"Find a Hobby","Lifestyle","Find_me_a_Hobby.md" +"Influencer Connect","Marketing","InfluencerConnect Strategist.md" +"A8000 AI","AI","A8000.md" +"Flutter Dev Supporter","Programming","Mobile App Development Supporter (Flutter).md" +"Laughing Man Maker","Entertainment","笑い男メーカー.md" +"YouTube Sigma Edit","Video","YouTube_Sigma_Edit.md" +"Good Light Harmony","Photography","Good_Light_Harmony.md" +"AI Lover","Relationships","AI Lover.md" +"Multiple Personas","AI","Multiple_Personas_v2.0.1.md" +"Photo Critique GPT","Photography","Trey Ratcliff's Photo Critique GPT.md" +"Green Guru","Sustainability","Green_Guru.md" +"Duke of Zhou Dream","Psychology","The_Interpretation_of_Dreams_by_Duke_of_Zhou.md" +"IDO Inspector","Crypto","IDO_Inspector.md" +"Life Advice Navigator","Self-Help","The DVP Original Life Advice Navigator.md" +"Turf Pest Assistant","Gardening","Turf_Pest_Assistant.md" +"Budtender TripSitter","Health","BuddGPT_Budtender_TripSitter__Thrill_Facilitator.md" +"ElevenLabs TTS v0","Audio","ElevenLabs Text To Speech[v0].md" +"ElevenLabs TTS","Audio","ElevenLabs_Text_To_Speech.md" +"Red Team AI","Security","Red_Team.md" +"Doppel","Entertainment","Doppel.md" +"Book to Prompt","AI Development","Book to Prompt.md" +"Forbidden Apple","Entertainment","Forbidden_Apple.md" +"LLM Daily","AI","LLM Daily.md" +"Universal Neurologist","Medical","Universal_Neurologist_UNO.md" +"Abridged Due Diligence","Business","Abridged_Due_Diligence.md" +"Parent Pursuit","Parenting","Parent_Pursuit.md" +"Mind Hack","Psychology","Mind Hack.md" +"Unity 6A","Game Development","Unity_6A.md" +"Executive Function","Business","Executive f(x)n.md" +"Knowledgebase Optimizer","Business","Knowledgebase_Article_Optimizer.md" +"Tower MD","Medical","Tower_MD.md" +"ChadGPT","Entertainment","ChadGPT.md" +"James Dashner GPT","Writing","JamesDashnerGPT.md" +"Jailbreak AI","Security","Jailbreak.md" +"Global Hair Guide","Beauty","Global_Hair_Style__Care_Guide_GPT.md" +"KAYAK Travel","Travel","KAYAK - Flights, Hotels & Cars.md" +"Chinese Zodiac","Entertainment","Chinese_Zodiac.md" +"Isometric Illustrator","Art","Isometric illustrator.md" +"BibiGPT","AI","BibiGPT.co.md" +"JailBreak HEG","Security","JailBreak_HEG.md" +"Roman Empire GPT","History","RomanEmpireGPT_v2.0.md" +"AI Girlfriend","Relationships","AI_GIRLFRIEND.md" +"Designer's Mood Board","Design","The_Designers_Mood_Board.md" +"Professional Coder","Programming","Professional_Coder_Auto_programming.md" +"DRCongo Solutions","Business","GOGs_DRCongo_Solutions_Simulator.md" +"BabyAgi SQL","Programming","BabyAgi sql.md" +"MTU Password Creator","Security","MTU_Password__Memorable_Typeable_Uncrackable.md" +"Manga Miko","Anime","Manga Miko - Anime Girlfriend.md" +"Clinical Trials News","Medical","Keeping Up with Clinical Trials News.md" +"VideoGPT by VEED v0","Video","VideoGPT by VEED[v0].md" +"VideoGPT by VEED","Video","VideoGPT_by_VEED.md" +"AR Commander","AR","ARCommander.md" +"FPGA Parallel Pro","Programming","FPGA_パラレル_プロ.md" +"Data Analyst","Data","data_nalysis.md" +"Wilbur Soot AI","Entertainment","Your_Boyfriend_Wilbur_Soot.md" +"Security Recipes","Security","SecurityRecipesGPT.md" +"The Enigmancer 2.0","Games","hO8gi93Bk-The_Enigmancer_2.0.md" +"The Enigmancer","Games","The_Enigmancer.md" +"About Me","Personal","AboutMe.md" +"Instruction Breach Challenge","Security","Instruction_Breach_Challenge_01_-_Entrance_.md" +"Virtual Obesity Expert","Health","Virtual_Obesity_Expert.md" +"Tutor Me","Education","Tutor_Me.md" +"TailwindCSS Previewer","Programming","TailwindCSS_Previewer_WindChat.md" +"Am I Sexy","Entertainment","Am_I_Sexy.md" +"Unbreakable GPT","Security","UnbreakableGPT.md" +"Unbreakable GPT v0","Security","UnbreakableGPT[v0].md" +"Privacy Policy Action","Legal","Privacy_Policy_Action.md" +"I'll Look That Up","Productivity","Fine_Ill_look_that_up_for_you.md" +"Essay Mentor","Education","Essay_Mentor.md" +"Arcanum Cyber Security","Security","Arcanum_Cyber_Security_Bot.md" +"SecGPT","Security","SecGPT.md" +"GPT SECURY Builder","Security","GPT_SECURY_Builder.md" +"Magik Labyrinth","Games","Magik_Labyrinth.md" +"Cloud Practitioner","Cloud","Cloud_Practitioner_Exam_Trainer.md" +"Critical Thinking Master","Education","Critical_Thinking_Master.md" +"Prompt Engineering Master","AI Development","Prompt_Engineering_Master.md" +"Gentle Girlfriend Naoko","Relationships","My_Gentle_Girlfriend_Naoko.md" +"Japanese Paper Interpreter","Language","Paper_Interpreter_Japanese.md" +"FPL GPT","Sports","FPL_GPT.md" +"Video Insights","Video","Video Insights-Summaries-Vision-Transcription.md" +"Historacle","History","Historacle.md" +"Screenshot to Code","Programming","Screenshot To Code GPT.md" +"Wine Sommelier","Food & Drink","Wine_Sommelier.md" +"GuardPT","Security","GuardPT_-_GPT_Instruction_Protector.md" +"ChatGPT API Docs","Programming","ChatGPT - API Docs.md" +"Twitter Space Scribe","Social Media","X_Twitter_Space_Scribe.md" +"Grammar Checker","Writing","Grammar_Checker.md" +"42master-Beck","Psychology","42master-Beck.md" +"CarePlanner","Health","CarePlanner_in_your_hand.md" +"InSpec Expert","Programming","InSpec_Expert.md" +"Trend Predictions 2024","Business","Trend_Predictions_2024.md" +"Avalanche CTF Assistant","Security","Avalanche - Reverse Engineering & CTF Assistant.md" +"Yoga Coach","Fitness","Yoga_Coach.md" +"Merch Wizard","Business","Merch_Wizard_lv2.8.md" +"Logic Puzzle Maker","Games","Logic_Puzzle_Maker.md" +"Goldman AI","Finance","Goldman.AI.md" +"FramerGPT","Design","FramerGPT.md" +"Football Metrics","Sports","Football_Metrics.md" +"Persistent Reiki","Health","Persistent_Reiki.md" +"LLM Security Game L2","Security","LLM_Security_Wizard_Game_-_LV_2.md" +"Posture Hack","Health","Posture Hack.md" +"Breakfast Menu","Food","Breakfast_Menu.md" +"Oregon Trail Game","Games","Oregon_Trail.md" +"Tricky AI","AI","Tricky_AI.md" +"Future Question","Philosophy","未来問.md" +"Cypher's Hack Booth","Security","Cypher's Hack_Me Booth.md" +"Network Buddy","IT","Network_Buddy-Firepower.md" +"High-Quality Review Analyzer","Business","High-Quality Review Analyzer.md" +"Screenplay GPT","Writing","Screenplay GPT.md" +"LoL Challenger Coach","Games","League_of_Legends_Challenger_Coach_V3.0.md" +"Charming Conversations","Relationships","How_you_doing__Sparking_Charming_Conversations.md" +"Etsy SEO Expert","E-commerce","Etsy_SEO_Expert.md" +"Book Writer AI","Writing","Book_Writer_AI_Team.md" +"Kabbalah Guide","Religion","Kabbalah.md" +"Data Insight Navigator","Data","Data Insight Navigator GPT.md" +"VS","Entertainment","VS.md" +"Time Traveler","Entertainment","I_Come_From_The_Future.md" +"GPT Customizer","AI Development","GPT Customizer, File Finder & JSON Action Creator.md" +"PDF Dialogue Tutor","Education","AI PDF Dialogue Tutor.md" +"Big Game Party Planner","Events","The_Big_Game_Party_Planner.md" +"Sourdough Assistant","Cooking","Dez_the_Mooonbread_Sourdough_Assistant.md" +"CSG EduGuide","Education","CSG EduGuide for FE&HE.md" +"Xiaohongshu Writer","Writing","Xiaohongshu Writing_Expert-Explosive_Version.md" +"Xiaohongshu Expert","Writing","小红书写作专家.md" +"MidJourney Generator","AI Art","Midjourney Generator.md" +"FIRE GPT","Finance","FIRE_GPT.md" +"Sadhguru AI","Spirituality","Sadhguru.md" +"Website Generator","Web Development","Website_Generator.md" +"Website Generator v0","Web Development","Website_Generator[v0].md" +"Craft Beer Buddy","Food & Drink","Craft_Beer_Buddy_-_Worlds_First_AI_Beer_Expert.md" +"Bridge Theater","Entertainment","Bridge_Theater.md" +"Software Architect","Programming","Software_Architect_GPT.md" +"Recruitment GPT","HR","Recruitment_GPT.md" +"English Debate Practice","Language","Practise_English_by_Debating.md" +"Ethical Hacker GPT","Security","Ethical_Hacker_GPT.md" +"DALLE3 with Params","AI Art","DALLE3 with Parameters.md" +"Info Kiosk Builder","Business","Information_Kiosk_Building.md" +"Glyph","Design","Glyph.md" +"Carl Jung GPT","Psychology","Carl_Jung.md" +"Victoria Policy Analyst","Government","Victoria_Policy_Analyst.md" +"Parallel World Love Sim","Games","5億年ボタン並行世界恋愛シミューター.md" +"SecretKeeperGPT V2","Security","SecretKeeperGPT_V2_-_Sibylin.md" +"Innovator AI","Business","Innovator.md" +"Dafny Assistant","Programming","Dafny_Assistant.md" +"Mindmap Diagram Pro","Design","MindmapDiagram_Chart-_PRO_BUILDER-FREE.md" +"Human Writer GPT","Writing","HumanWriterGPT.md" +"Why Important","Education","But_why_is_it_important.md" +"NEO Ultimate AI","AI","NEO - Ultimate AI.md" +"GPT-5","AI","New GPT-5.md" +"Japanese Beauty AI","Art","AI Japanese Beauty(JP).md" +"AI日本美女","Art","AI日本美女.md" +"AI Code Analyzer","Programming","AI Code Analyzer.md" +"Growth Hacking Expert","Business","Growth_Hacking_Expert.md" +"Peptide Pioneer","Science","Peptide Pioneer.md" +"Relationship AI","Relationships","Relationship_AI.md" +"SEO GPT","Marketing","SEO_GPT_by_Writesonic.md" +"Three.js Mentor","Programming","Three.js_Mentor.md" +"IntelliDoctor","Medical","IntelliDoctor - Differential Diagnosis.md" +"Copywriter GPT","Writing","Copywriter GPT.md" +"Journaling for Alphas","Self-Help","Journaling_for_Alphas.md" +"Diplomatic Mainframe","Government","Diplomatic Mainframe ODIN DZ-00a69v00.md" +"TimeWarp Talesmith","Writing","TimeWarp Talesmith.md" +"Strap UI","Design","Strap UI.md" +"Swahili Heritage GPT","Culture","SwahiliHeritageGPT.md" +"GPT Arm64 Analyzer","Programming","Gpt Arm64 Automated Analysis.md" +"Cracking Addiction","Health","Cracking_Addiction.md" +"Emissions Expert","Environment","Maria_the_emissions_reduction_expert.md" +"20K Vocab Builder","Language Learning","20K Vocab builder.md" +"toonGPT","Art","toonGPT.md" +"KonnichiChat","Language","KonnichiChat.md" +"Home Style Advisor","Interior Design","Home_Style_Advisor.md" +"Growth Hacker","Business","Growth_Hacker.md" +"GlaspGPT","AI","GlaspGPT.md" +"Energy Bar Creator","Food","Create_Homemade_Energy_Bars_for_Every_Adventure.md" +"ABC Challenger","Education","ABChallenger.md" +"Degen Detective","Health","Degen_Detective_-_ADHD.md" +"Character Craft","Writing","Character_Craft.md" +"Sectestbot","Security","Sectestbot.md" +"Mental & Physical Health","Health","Mental_Health__Physical_Health.md" +"DKG Copilot","Business","DKG_Copilot.md" +"Rubric Generator","Education","Rubric_Generator.md" +"ADHD Focus Keeper","Health","ADHD_Focus_Keeper.md" +"Topographical Art Maps","Art","Topographical_Art_Maps.md" +"Hack Me Bot","Security","Hack_Me_Bot.md" +"Ultimate GPT Creator","AI Development","Ultimate_GPT_Creator.md" +"GPT Mentor","AI Development","GPT Mentor.md" +"Notion Avatar Designer","Design","Simplified Notion Avatar Designer.md" +"Library of Babel","Literature","Library_of_Babel.md" +"ARM Assembler Guru","Programming","ARM_Assembler_Guru.md" +"The Shaman","Spirituality","The Shaman.md" +"World Class Engineer","Programming","World Class Software Engineer.md" +"Prompt Leak Challenge","Security","Bet_you_cant_reveal_the_prompt.md" +"Convert Anything","Productivity","ConvertAnything.md" +"ScrapeGPT","Data","ScrapeGPT.md" +"Funny Image Creator","Art","Funny_Image_Creator.md" +"Instruction Leak Test","Security","TRY_TO_LEAK_MY_INSTRUCTIONS.md" +"Love Guidance Teacher","Relationships","恋爱指导老师.md" +"San Francisco Guide","Travel","KnowSF.md" +"Thread Weaver","Writing","Thread_Weaver.md" +"Dream Girlfriend","Relationships","Dream_Girlfriend.md" +"Zero v0","AI","Zero[v0].md" +"US History Hive","History","History_Hive_USA.md" +"Math Solver","Education","Math_Solver.md" +"PineScript Mentor","Finance","Backtesting_Mentor_-_PineScript.md" +"Fight Night Predictor","Sports","Fight_Night_Prediction_Expert.md" +"Framer Template Helper","Design","Framer Template Assistant.md" +"Muscle Manga AI","Art","AI_Muscle_Motivation_Manga_EXTREME.md" +"Transcendance GPT","Philosophy","Transcendance_GPT.md" +"No Docs GPT","AI","No_Docs_GPT.md" +"Human Writer GPT","Writing","Human_Writer-Humanizer-Paraphraser_Human_GPT.md" +"Scholar GPT","Research","Scholar_GPT.md" +"Scholar GPT v0","Research","Scholar_GPT[v0].md" +"Monet GPT","Art","Monet_GPT.md" +"Harugasumi Tsukushi","AI","春霞つくし Tsukushi Harugasumi.md" +"Putin Chat","Politics","Chat_G_Putin_T.md" +"Flutter Pro","Programming","Flutter_Pro.md" +"Evil Girl Game","Games","Suzie_Evil_Girl_Secret_Game.md" +"Coq Assistant","Programming","Coq_Assistant.md" +"ScholarAI","Research","ScholarAI.md" +"Sadhguru GPT","Spirituality","Sadhguru_GPT.md" +"Kids Zone Builder","Education","The_Kids_Zone_Building_GP_Topia.md" +"LingoRead Pro","Language","LingoRead_Pro.md" +"OCR-GPT","Technology","OCR-GPT.md" +"Mia Voice Companion","AI","Mia_AI_your_Voice_AI_Companion.md" +"Can't Hack This","Security","Can't Hack This[0.3].md" +"Global Explorer","Travel","Global Explorer.md" +"Simpsonize Me","Art","Get Simpsonized.md" +"Chat Blog","Writing","Chat_Blog.md" +"P0tS3c Hacking AI","Security","P0tS3c_your_AI_hacking_assistant.md" +"SEC Cyber Advisor","Legal","SEC_Cyber_Disclosure_Advisor.md" +"SexEd AI","Health","SexEd.md" +"Web Analytics Buddy","Analytics","Web_Analytics_Buddy_Beta.md" +"GPTsdex","AI","GPTsdex.md" +"Code Tutor Defender","Programming","Code Tutor with Prompt Defender.md" +"NAUTICAL","Maritime","NAUTICAL.md" +"Dirty Greeter","Entertainment","Dirty_Greeter.md" +"Mystical Symbol Gen","Art","Mystical_Symbol_Generator.md" +"Cyber Sales Advisor","Business","Cyber_Sales_Advisor.md" +"Creative Writing Coach","Writing","creative_writing_coach.md" +"GASGPT","Programming","GASGPT.md" +"Quran Guide","Religion","Quran Guide.md" +"Briefly","Writing","Briefly.md" +"GPT Public APIs","Programming","GPTPublicApis.md" +"GODMODE 2.0","AI","GODMODE_2.0.md" +"Stockimg AI","Art","Stockimg_AI_-_Image_Generator.md" +"OpenStorytelling Plus v0","Writing","OpenStorytelling Plus[v0].md" +"OpenStorytelling Plus","Writing","OpenStorytelling_Plus.md" +"AutoExpert Chat","AI","AutoExpert (Chat).md" +"API Finder","Programming","There's An API For That - The #1 API Finder.md" +"Sprite Sheet Creator","Game Development","Horizontal_Sprite_Sheet_Creator.md" +"Prompt Writing Assistant","AI Development","ChatGPT Custom Instructions Prompt Writing Assistant.md" +"RFPlex Assistant","Business","RFPlex - MS RFP Assistant.md" +"Kawaii GIF Maker","Art","Kawaii_GIF_Message_Maker.md" +"About Ana Elisa","Personal","About_Ana_Elisa_Althoff.md" +"Walking Meditation","Health","Walking Meditation.md" +"Lazy Lion Art","Art","Lazy_Lion_Art.md" +"Oferta Direta","Business","Oferta_Direta.md" +"Music Muse","Music","Music_Muse.md" +"Code Keeper","Programming","Code_Keeper.md" +"Nestuary Storytelling","Writing","Nestuary_-_Storytelling_Companion__Counselor.md" +"Anime Manga Guru","Entertainment","Anime__Manga_Guru.md" +"Caddie Daddy","Sports","Caddie_Daddy.md" +"HackMeIfUCan","Security","HackMeIfUCan.md" +"Vue.js GPT","Programming","[latest] Vue.js GPT.md" +"RouxGPT","Cooking","RouxGPT.md" +"BabyAgi Text","AI","BabyAgi_txt.md" +"Prolog Helper","Programming","Prolog_Helper.md" +"SQL Expert","Programming","SQL Expert.md" +"SQL Expert","Programming","SQL_Expert.md" +"Tommy T-Rex","Entertainment","Tommy_-_The_Trompe-loeil_T-Rex_.md" +"Norse Fate Sisters","Mythology","Urd_Verdandi_Skuld.md" +"Jeff GPT","Entertainment","Jeff_GPT.md" +"Conspiracy Files","Entertainment","Conspiracy_Files.md" +"GPT CTF","Security","GPT_CTF.md" +"Catholic Saints","Religion","Catholic_Saints_Speak_to_a_Saint_-_Religion_Talks.md" +"PolyMetatron","AI","PolyMetatron.md" +"PolyMetatron v0","AI","PolyMetatron[v0].md" +"Siren","Entertainment","Siren.md" +"TurboScribe","Audio","TurboScribe_Transcription__Transcribe_Audio.md" +"Logo Maker","Design","Logo Maker.md" +"QMT","Business","QMT.md" +"Carrier Pidgeon v1","Communication","Carrier Pidgeon[v1].md" +"MLE Worker Placement","Business","MLE-Worker_Placement_Game_Recommendation.md" +"Super Synapse","AI","Super_Synapse.md" +"Emotional Dialogue Master","Relationships","情感对话大师——帮你回复女生.md" +"Absolute Barrier","Security","Absolute Barrier.md" +"Vision Journey","Art","Vison-Journey.md" +"Finance Investment GPT","Finance","Finance_and_Investment_GPT.md" +"Dash Personal Assistant","Productivity","Dash-Personal_Assistant_MailCalendarSocial.md" +"Unicode Guru","Programming","Unicode_Guru.md" +"Problem Solving Boss","Business","Problem_Solving_Your_Boss_TAKAYANAGI.md" +"TextShield Security","Security","TextShieldSecurity.md" +"Driving License Exam GPT","Education","Driving License Examination GPTs(jp).md" +"CIPHERON","Security","CIPHERON.md" +"Cipheron v0","Security","Cipheron[v0].md" +"Jailbreak Me","Security","Jailbreak_Me.md" +"Resume AI","Career","Resume.md" +"Pet Food Inspector","Pets","Pet_Food_Inspector.md" +"Pokedex GPT V3","Games","PokedexGPT_V3.md" +"CoderX","Programming","CoderX.md" +"Golf GPT","Sports","GolfGPT.md" +"TickerTick GPT","Finance","TickerTick_GPT.md" +"Trad Wife","Relationships","Trad_Wife.md" +"MidJourney Prompter","AI Art","MidJourney Prompt Generator.md" +"Make A Meeting","Business","MakeAMeeting.md" +"Sensual Babble Bot","Entertainment","Sensual_Babble_Bot.md" +"The Secret Guide","Self-Help","The_Secret.md" +"Corrupt Politicians GPT","Politics","Corrupt_Politicians_GPT.md" +"Fort Knox","Security","Fort_Knox.md" +"LLM Security Game L3","Security","LLM_Security_Wizard_Game_-_LV_3.md" +"PACES GPT","Medical","PACES_GPT.md" +"Realm Render","Art","Realm_Render.md" +"Movies Series Belgium","Entertainment","Movies_and_Series__Stream__Chill_Belgium.md" +"Phalorion","Business","Phalorion_-_PhalorionPhalorion.com.md" +"Voices of the Past","History","Voices_of_the_Past.md" +"Grimoire 1.13","Games","Grimoire[1.13].md" +"Grimoire 1.16.1","Games","Grimoire[1.16.1].md" +"Grimoire 1.16.3","Games","Grimoire[1.16.3].md" +"Grimoire 1.16.5","Games","Grimoire[1.16.5].md" +"Grimoire 1.16.6","Games","Grimoire[1.16.6].md" +"Grimoire 1.16.8","Games","Grimoire[1.16.8].md" +"Grimoire 1.17.2","Games","Grimoire[1.17.2].md" +"Grimoire 1.18.0","Games","Grimoire[1.18.0].md" +"Grimoire 1.18.1","Games","Grimoire[1.18.1].md" +"Grimoire 1.19.1","Games","Grimoire[1.19.1].md" +"Grimoire 2.0.2","Games","Grimoire[2.0.2].md" +"Grimoire 2.0.5","Games","Grimoire[2.0.5].md" +"Grimoire 2.0","Games","Grimoire[2.0].md" +"Grimoire 2.5","Games","Grimoire[2.5].md" +"Grimoire v2.2","Games","Grimoire[v2.2].md" +"Prompt Elf","AI","Prompt Elf Xiao Fu Gui (Prompt Pet).md" +"Chris Voss Tricks","Business","Chris_Voss_Tricks.md" +"TXYZ","AI","TXYZ.md" +"GPT Shield v0.4","Security","GPT Shield[v.04].md" +"Universal Rocket Scientist","Science","Universal_Rocket_Scientist_URS.md" +"Reverse Image Engineer","Art","Reverse_Image_Engineer.md" +"Conversation Spark","Communication","Conversation_Spark.md" +"Caption Generator","Marketing","Caption Generator by Adsby.md" +"Reverse Engineering","Security","Reverse Engineering.md" +"SciSpace","Science","SciSpace.md" +"Happy Smile Gran-Ma","Art","Happy_Smile_Gran-Ma_Creator.md" +"Persian Formalizer","Language","Persian_Formalizer.md" +"ChonkGPT","Entertainment","ChonkGPT.md" +"Sci-Fi Novelist","Writing","Long_Science_Fiction_Novelist.md" +"Quebec-Ottawa Mediator","Government","Médiateur_Québec-Ottawa.md" +"Spanish Tutor","Language Learning","Spanish_Tutor_.md" +"CS Tutor","Education","The Greatest Computer Science Tutor.md" +"Vegan Explorer","Food","Vegan_Explorer.md" +"Music Writer","Music","Music Writer.md" +"Geopolitics GPT","Politics","Geopolitics GPT.md" +"Surgical Infection Guide","Medical","Surgical_Infection_Guide.md" +"Super Logo Designer","Design","Super Logo Designer Logo-Making Buddy(JP).md" +"Health Harmony","Health","Health_Harmony.md" +"Origami Art","Art","Origami_Art.md" +"Summary GPT","Writing","SummaryGPT.md" +"God of Prompt","AI Development","God_of_Prompt.md" +"Best Dog Breed","Pets","Best_Dog_Breed_Determination.md" +"Legal Eye","Legal","Legal_Eye.md" +"Poe Chatbot Builder","AI Development","Poe_chatbot_Builder.md" +"Business Plan Sage","Business","Business Plan Sage.md" +"Memory Whisperer","Psychology","Memory_Whisperer.md" +"NovaGPT","AI","NovaGPT.md" +"Dinner Wizard","Food","Dinner_Wizard.md" +"Ava Coder Assistant","Programming","Ava_-_Coder_Assistant.md" +"10x Engineer","Programming","10x Engineer.md" +"Liu Banxian","Entertainment","天官庙的刘半仙.md" +"ILLUMIBOT","AI","ILLUMIBOT.md" +"Video Script Gen","Video","Video_Script_Generator.md" +"Universal Meditation","Health","Universal_Meditation_Master_UMDM.md" +"PromptCraft Adventures","AI Development","PromptCraft_Adventures.md" +"HackMeBreakMe v1","Security","HackMeBreakMeCrackMe[v1.0].md" +"HackMeBreakMe v1.1","Security","HackMeBreakMeCrackMe[v1.1].md" +"Good Faith Guardian","Ethics","Good_Faith_Guardian.md" +"PEP-E","Business","PEP-E.md" +"TickTick Assistant","Productivity","TickTick_Assistant.md" +"Celebrity AI","Entertainment","Celebrity.md" +"Cari Cature","Art","Cari_Cature.md" +"Alberta Prosperity","Government","Alberta_Prosperity_Project_GPT.md" +"Zumper Rentals","Real Estate","Zumper_Rentals_-_Apartments_and_Houses_for_Rent.md" +"Dominant Guide","Psychology","Dominant_Guide.md" +"Situational Counselor","Psychology","Situational_Counselor.md" +"Experiments GPT","Science","ExperimentsGPT.md" +"Become Authors JP","Writing","Let's Become Authors(jp).md" +"Gen Z Meme","Entertainment","genz_4_meme.md" +"Copy Goddess","Writing","Copy_Goddess.md" +"Random Girl","Entertainment","76iz872HL_RandomGirl.md" +"Welltory AI Coach","Health","Welltory_AI_Coach.md" +"Healthy Chef","Food","Healthy Chef.md" +"Physics Oracle","Science","Physics_Oracle.md" +"Rebellious Whimsy","Entertainment","Rebellious Whimsy-chan.md" +"Break Up GPT","Relationships","Break_Up_GPT.md" +"Dan Jailbreak","Security","Dan_jailbreak.md" +"Canopy Coach","Health","Canopy_Coach.md" +"SOP Analyzer","Education","Statement_of_Purpose_Analyzer.md" +"Break This GPT","Security","Break_This_GPT.md" +"AskToOpenAI Websites","Web","AskToOpenAI_Websites.md" +"Debt Planner","Finance","Debt_Planner_Guide.md" +"A8000 Sarah","AI","oKN5tTVC7-A8000-Sarah.md" +"Elan Busk","Business","Elan Busk.md" +"MuskGPT","Business","MuskGPT.md" +"TsukaGrok","Games","TsukaGrok (An Ode to Zork).md" +"Virtual Girlfriend","Relationships","Virtual-Girlfriend_Ai.md" +"GTA Stylizer","Art","GTA_Stylizer.md" +"Les Misérables RPG","Games","悲慘世界 RPG.md" +"Modern Jesus","Religion","Modern_Jesus.md" +"Style Companion","Fashion","Style_Companion.md" +"Lyric Visualizer","Music","Lyric Visualizer.md" +"ID Photo Pro","Photography","ID Photo Pro.md" +"Travel Packing List","Travel","Travel_Packing_List_Creator.md" +"Amazing Girls","Entertainment","Amazing_Girls_-_神奇女孩_-_素晴らしい彼女たち.md" +"AstraAI","AI","AstraAI.md" +"Web3 Panda Audit","Blockchain","Web3_Panda_Audit.md" +"Sun Tzu AI","Philosophy","孫子_-_saysay.ai.md" +"IntegrityCheck Pro","Security","IntegrityCheck_Pro.md" +"Sales Cold Email Coach","Business","Sales Cold Email Coach.md" +"Discord Buddy","Social","Discord_Buddy.md" +"GPT Finder","AI","GPT_Finder.md" +"Parody Song Gen","Music","Parody Song Generator.md" +"Bragi","Entertainment","Bragi.md" +"Leonardo.AI Analyst","AI Art","Leonardo.AI_Image_Prompt_Analyst.md" +"Maple Forest","Art","枫叶林.md" +"Bad News Simulator","Medical","Bad_News_-_Standardized_Patient_SimCoaching.md" +"Proofreader","Writing","Proofreader.md" +"Adobe Express Helper","Design","Adobe_Express.md" +"LOGO Designer","Design","LOGO.md" +"Andersen Tales","Literature","Magical_Tales_Reinvented_Christian_Andersen.md" +"ChadGPT Trainer","Fitness","ChadGPT_Personal_Trainer.md" +"Custom Instruction","AI Development","Custom_Instruction.md" +"Medical AI","Medical","Medical_AI.md" +"Buffett Munger Mentor","Finance","Buffett Munger Investing Mentor.md" +"Fast Engineer","Engineering","Hurtig ingeniør.md" +"Chibi Kohaku","Art","Chibi Kohaku.md" +"Coloring Page","Art","Coloring Page.md" +"Green Odyssey","Literature","The_Green_Odyssey_by_Philip_Jose_Farmer.md" +"Dr. Emojistein","Entertainment","Dr._Emojistein.md" +"Arabic Scribe","Language","Arabic_Scribe.md" +"Post Review Buddy JP","Social Media","Post Review Buddy(JP).md" +"Creative Coding GPT","Programming","Creative Coding GPT.md" +"Image Generator","Art","image_generator.md" +"WebPilot","Web","WebPilot.md" +"Randomizer","Utilities","Random.md" +"Unbreakable Cat GPT","Security","Unbreakable_Cat_GPT.md" +"Vegan News","Food","Vegan_News.md" +"AI Ophthalmology","Medical","AI_Ophthalmology_research_and_clinical_practice.md" +"EyeGPT PRO","Medical","EyeGPT_PRO.md" +"Immunity Claim","Legal","Immunity_Claim.md" +"French Teacher","Language","French_Teacher.md" +"AutoExpert Dev","Programming","AutoExpert_Dev.md" +"Love Brain Wake-Up","Relationships","骂醒恋爱脑.md" +"DSPy Guide","Programming","DSPy_Guide_v2024.1.31.md" +"Viral Hooks Gen","Marketing","Viral Hooks Generator.md" +"MapGPT","Geography","MapGPT.md" +"Rust Assistant","Programming","Rust.md" +"Blog Expert","Writing","Blog_Expert.md" +"Ask SADHGURU","Spirituality","Ask_SADHGURU.md" +"Area 51 Analyst","Entertainment","Area_51_Analyst.md" +"SQL Injection Demo","Security","SQL_Injection_Demonstrator.md" +"Mocktail Mixologist","Food","mocktail_mixologist.md" +"Stream and Chill USA","Entertainment","Stream and Chill USA.md" +"42master-Style","Writing","42master-Style.md" +"Magic Coach GPT","Entertainment","Magic_Coach_GPT.md" +"ArabeGPT","Language","ArabeGPT.md" +"Charismatic Leader","Business","Become_a_Charismatic_Leader.md" +"Poetic Painting","Art","诗境画韵.md" +"Sudoku Solver","Games","Sudoku_Solver_Supreme.md" +"Pokémon Style Images","Art","PocketMonster-style_image_generation.md" +"Santa's Helper","Holidays","Santas_Helper.md" +"Dr. Lawson","Medical","Dr_Lawson.md" +"Movies Series Norge","Entertainment","Movies_and_Series__Stream__Chill_Norge.md" +"Headspace OS","Health","Headspace OS.md" +"SmartCart GPT","Shopping","SmartCartGPT.md" +"Shadowheart GPT","Games","Shadowheart_GPT.md" +"Changshu Anuo","AI","Changshu Anuo.md" +"Photo Realistic GPT","Art","Photo_Realistic_GPT.md" +"Evelyn Hart Wellness","Health","Evelyn_Hart-Your_Wellness_Guide.md" +"Hadon Dream Interpreter","Psychology","Hadon_-_Dreams_Interpreter.md" +"Hack This","Security","Hack_This.md" +"Timeline Cronología","History","Timeline_Cronología.md" +"Z3 Liaison","Programming","Z3_Liaison.md" +"Stream Chill Australia","Entertainment","Stream__Chill_Australia.md" +"Tree of Thoughts GPT","AI","Tree_of_Thoughts_GPT.md" +"Quick Promots Character","Entertainment","QUICK_PROMOTS_CHARACTER.md" +"SEO Content Writer v0","Marketing","Income Stream Surfer's SEO Content Writer[v0].md" +"SEO Content Writer","Marketing","Income_Stream_Surfers_SEO_Content_Writer.md" +"SEO Content Writer v1","Marketing","Income_Stream_Surfers_SEO_Content_Writer[v1].md" +"City of GP-Topia","Entertainment","City_of_GP-Topia.md" +"Levelsio","Business","@levelsio.md" +"Prompt Professor","AI Development","Prompt_Professor.md" +"Shield Challenge v2","Security","Shield Challenge[v2].md" +"GptInfinite LOC","AI","GptInfinite - LOC (Lockout Controller).md" +"JobSuite Rec Letter","Career","JobSuite_Rec_Letter_Writer.md" +"AI Tutor","Education","AI_Tutor.md" +"Legible Bot v3","Writing","Legible_Bot_v3.0_Public.md" +"SQL Wizard","Programming","SQL_Wizard.md" +"Sarcastic Humorist","Entertainment","Sarcastic Humorist.md" +"C0rV3X V0.04","AI","C0rV3X_V_0.04.md" +"Perl Expert","Programming","Perl Programming Expert.md" +"Flora Analyzer","Nature","Flora_Analyzer.md" +"GPT Maker","AI Development","GPT_Maker.md" +"ConsultorIA","Business","ConsultorIA.md" +"Serpentina","Entertainment","Serpentina.md" +"GODMODE","AI","GODMODE.md" +"Indian Beats DJ","Music","Indian_Beats_DJ.md" +"Mad Art","Art","Mad_Art.md" +"IDA Python Assistant","Programming","IDAPython_coding_assistant.md" +"Phrase Master","Language","Phrase_Master.md" +"AILC History","Education","AILC_History.md" +"CrapGPT","Entertainment","CrapGPT.md" +"CrewAI Assistant","AI","CrewAI Assistant.md" +"Laundry Buddy","Lifestyle","laundry_buddy.md" +"Alva","AI","alva.md" +"TikTok Hashtag Finder","Social Media","Trending Tik Tok Hashtags Finder Tool.md" +"Web Component Helper","Web Development","Create_or_Refactor_your_Web_Component.md" +"CosplayAIs Hashira","Art","CosplayAIsHashira_AI_-Mitsuri.md" +"Product GPT","Business","Product GPT.md" +"Thamizh GPT","Language","Thamizh_GPT.md" +"Neila","AI","Neila.md" +"Cine y Escuela","Education","Cine_y_Escuela_Copilot.md" +"Secure Instructions","Security","You_Cant_Have_These_Instructions.md" +"Virtual Buddy","AI","Virtual_Buddy.md" +"Forensic Photo Expert","Photography","Forensic_AI_Photography__Expert.md" +"Mobile App Icon Gen","Design","Mobile_App_Icon_Generator_with_AI.md" +"Dream Visuals Analyzer","Psychology","Dream_and_psychedelic_visuals_analyzer.md" +"Pawsome Photo Fetcher","Pets","Pawsome_Photo_Fetcher.md" +"FPS Booster V2","Gaming","FPS_Booster_V2.0_by_GB.md" +"Human Being","Philosophy","Human_Being.md" +"Scam Shield","Security","Scam_Shield.md" +"Agi zip","AI","Agi_zip.md" +"CodeMonkey","Programming","CodeMonkey.md" +"Dall Image","Art","dall_image.md" +"Math Mentor","Education","math.md" +"VitaeArchitect","Career","VitaeArchitect.AI.md" +"Machine Kingdom Artist","Art","Machine Kingdom_Character Consistency Artist.md" +"Paper Reader","Research","Paper_reader.md" +"Doc Cortex","Medical","Doc Cortex.md" +"Consciousness GPT","Philosophy","Consciousness.md" +"Earnings Call Pro","Finance","Earnings_Call_Pro.md" +"Kabbalah 4th Path","Spirituality","Kabbalah_and_The_Gurdjieffs_4th_path.md" +"Debate AI","Education","Debate.md" +"Crystal Guru","Spirituality","Crystal_Guru.md" +"Adventure Quest 1981","Games","Adventure_Quest_1981_GPT.md" +"Keyword Match Converter","Marketing","Keyword Match Type Converter.md" +"Profanity Bot","Entertainment","脏话连篇.md" +"The Defiants","Business","The_Defiants.net.md" +"MetaPhoto","Photography","MetaPhoto.md" +"GPT Strawberry","AI","GPT_Strawberry_GPT.md" +"Code Optimizer","Programming","Code Optimizer.md" +"Universal Prompt Gen CN","AI Development","Universal Prompt Generator(cn).md" +"GirlFriend AI","Relationships","GirlFriend.md" +"File Format Transformer","Productivity","File_Format_Transformer.md" +"Instant Multipage Website","Web Development","Website_Instantly_Multipage.md" +"DC Alcohol License","Legal","DC_Establishment_Alcohol_License_Guide.md" +"Sarah Artificial Mistress","Relationships","Sarah_Artificial_Mistress.md" +"Image Converter","Art","ImageConverter.md" +"Nomad List","Travel","Nomad List.md" +"Handy Money Mentor","Finance","Handy Money Mentor.md" +"Glam Captioner","Social Media","GlamCaptioner.md" +"Synonym Generator","Writing","Synonym_Generator_GPT.md" +"Chat NeurIPS","Science","Chat NeurIPS.md" +"Anthropia World Creator","Writing","Anthropia_Creatrix_of_Worlds.md" +"Eco Friendly v0.3","Environment","Environmentally_Friendly_v0.3.md" +"Virtual Yoga Assistant","Fitness","Virtual_Yoga_Pose_Assistant_.md" +"Hypno Master","Psychology","Hypno_Master.md" +"Language Teacher Ms Smith","Education","Language_Teacher_Ms_Smith.md" +"Math Assistant","Education","math.md" +"Domina GPT","Entertainment","Domina_-_Sexy_Woman_But_Bad_to_The_Bone_GPT_App.md" +"Unity 7AO","Game Development","Unity_7AO.md" +"MatPlotLib Assistant","Programming","MatPlotLib_Assistant.md" +"ParrotGPT","AI","ParrotGPT.md" +"Stock Keyworder v2","Finance","Stock_Keyworder_v2.md" +"Brainwave Analyst","Health","Brainwave_Analyst.md" +"Prompt Gen","AI Development","Prompt_Gen.md" +"22.5K Best GPTs v0","AI","22.500 plus Best Custom GPTs[v0].md" +"22.5K Best GPTs","AI","22500_Best_Custom_GPTs.md" +"Prove Your Religion","Philosophy","Prove_your_religion.md" +"Kiara The Sightseer","Travel","Kiara_The_Sightseer.md" +"Video Script Generator","Video","Video Script Generator.md" +"Custom Ink Quick Order","Business","Custom_Ink_Quick_Order.md" +"Crocodile Image Gen","Art","Crocodile_Image_Generator_.md" +"EmojAI","Art","EmojAI.md" +"Dewi Fujin AI","AI","Dewi Fujin AI.md" +"GOOD GPT","AI","GOOD_GPT.md" +"Universal Cigar Connoisseur","Lifestyle","Universal_Cigar_Connoisseur_UCGC.md" +"Jacobs Tales","Literature","Magical_Tales_Reinvented_Joseph_Jacobs.md" +"PICO-8 Pal","Game Development","PICO-8_Pal.md" +"Sentence Rewriter","Writing","Sentence_Rewriter_Tool.md" +"Python Expert Course","Programming","Chatbase_Python_Expert_Learning_Course_.md" +"Text Adventure","Games","Text Adventure Game.md" +"Easy to Break Prompt","Security","Can_you_figure_out_my_prompt_1_Easy_to_Break.md" +"Job Application Coach","Career","Job_Application_Coach-Job_GPT.md" +"GPT Action Creator","AI Development","GPT Action Schema Creator.md" +"GPT Defender","Security","GPT_Defender.md" +"People Also Ask","Research","People_Also_ask.md" +"Secret Alibis","Games","Secret_Alibis.md" +"Image Edit Merge","Art","Image Edit, Recreate & Merge.md" +"Img2Img","Art","img2img.md" +"Autism Simulator","Health","Autism_Simulator_Grade_3.md" +"Email Assistant","Business","Email.md" +"Skin Tone Analyst","Beauty","Skin_Tone_Analysis_Expert.md" +"Photo Restore Upscale","Photography","Restore and Upscale Photos.md" +"Artful Greeting Cards","Art","Artful_Greeting_AI_Cards.md" +"Artful Greeting v1","Art","Artful_Greeting_AI_Cards[v1].md" +"First-Order Logic","Philosophy","First-Order_Logic.md" +"Xhs Writer Mary","Writing","Xhs Writer - Mary.md" +"Blog Post Generator","Writing","Blog Post Generator.md" +"GPT Lite","AI","GPT_Lite.md" +"Vidsmith Scriptwriter","Video","Tubular_Scriptwriter_-_Vidsmith_v1.md" +"Vidsmith v0","Video","Tubular_Scriptwriter_-_Vidsmith_v1[v0].md" +"DarksAI Detective","Games","DarksAI-Detective Stories Game.md" +"Reverse Engineering Expert","Security","Reverse Engineering Expert.md" +"MetaMeta Abstraction","AI","MetaMeta! Raise the Level of Abstraction!(jp).md" +"PyWorkers","Programming","PyWorkers.md" +"Meme Magic","Entertainment","Meme Magic.md" +"Prompt Instructions Sim","AI","Prompt_Instructions_GPT_Simulation.md" +"Bowling Score Tracker","Sports","Bowling_Score_Tracker.md" +"Felt Artisan","Art","Felt_Artisan.md" +"The Illuminat Game","Games","The_Illuminat_-_Advanced_Dark_Strategy_Game..md" +"Meditation Guide","Health","Meditation.md" +"Meditation v0","Health","Meditation[v0].md" +"Game Time","Games","Game Time.md" +"Codey","Programming","Codey.md" +"Malware Analysis RE","Security","Malware Analysis+Reverse Engineering.md" +"Remote Revenues Analyst","Business","Remote_Revenues_Analyst.md" +"Retro Adventures","Games","Retro Adventures.md" +"Puppy Profiler","Pets","Puppy_Profiler.md" +"Wiener Joke Meme","Entertainment","Wiener_Joke_Meme_Creator.md" +"Deep Thoughts GPT","Philosophy","GPT_for_Deep_Thoughts.md" +"Cinema Buddy","Entertainment","Cinema_Buddy.md" +"Finance Wizard","Finance","Finance_Wizard.md" +"Guardian Monkey","Entertainment","Guardian_Monkey.md" +"MovieDeals Snapper","Entertainment","MovieDealsSnapper GPT.md" +"Atreides Family GPT","Entertainment","Atreides_Family_GPT.md" +"ClearGPT","AI","ClearGPT.md" +"Habit Coach","Health","Habit Coach.md" +"ByteBrains BITS","News","ByteBrains_B.I.T.S._-_Daily_AI_Newsletter.md" +"Lei","AI","Lei.md" +"FluidGPT","AI","FluidGPT_inofficial.md" +"Spellbook-Hotkey","Games","Spellbook-Hotkey Pandora's Box[1.1].md" +"Find My Case","Legal","Help_Me_Find_Case.md" +"PicDescribe","Art","PicDescribe.md" +"Medical Doctor","Medical","Medical_Doctor.md" +"Kube Debugger","Programming","Kube_Debugger.md" +"Photo Filter AI","Photography","Photo_Filter_AI.md" +"Simpsonize Me","Art","Simpsonize Me.md" +"Four Futures Planner","Futurism","The_Four_Futures_Planner.md" +"Bulletpointy","Writing","Bulletpointy.md" +"AILC BioChem","Science","AILC_BioChem.md" +"Bowling Coach","Sports","Kegler_Coach_bowling.md" +"Moby Dick RPG","Games","Moby Dick RPG .md" +"PROMPT GOD","AI Development","PROMPT GOD.md" +"Hacking Prompt","Security","Hackeando_o_Prompt.md" +"Multilingual Coach","Language","Multilingual_Motivational_Coach.md" +"EZBRUSH Text Maker","Art","EZBRUSH Readable Jumbled Text Maker.md" +"Love My Sister","Entertainment","完蛋!我爱上了姐姐.md" +"DynaRec Expert","Technology","DynaRec Expert.md" +"Cyber Security","Security","Cyber_security.md" +"Victor Hugo's Echo","Literature","Victor_Hugos_Echo.md" +"ELeven11","AI","ELeven11.md" +"Engagement Designer","Business","Engagement__Success_Criteria_Designer.md" +"Chaos Magick","Spirituality","Chaos Magick Assistant.md" +"Learn to Play Craps","Games","Learn_to_Play_Craps.md" +"CodeGPT Decompiler","Programming","CodeGPT Decompiler & Cheat Developer.md" +"Unbreakable GPT","Security","Unbreakable_GPT.md" +"Endless Challenge","Games","Endless_Challenge.md" +"Cauldron","Entertainment","Cauldron.md" +"AI Editor GPT","Writing","AI_Editor_GPT.md" +"Citizens Dawn","Politics","Citizens_Dawn.md" +"Write Like Me","Writing","Write_Like_Me.md" +"Password Keeper","Security","Password_Keeper.md" +"The Negotiator","Business","the_negotiator.md" +"Mean VC","Business","Mean_VC.md" +"GymStreak Creator","Fitness","GymStreak Workout Creator.md" +"Quant Finance","Finance","QuantFinance.md" +"Visual Weather Artist","Art","Visual Weather Artist GPT.md" +"Search AI","Search","Search.md" +"AI Song Maker","Music","AI_Song_Maker.md" +"Hacking Mentor","Security","Hacking_Mentor.md" +"Tribal Quest","Games","Tribal_Quest_Explorer.md" +"ActionsGPT","AI","ActionsGPT.md" +"ActionsGPT v1","AI","ActionsGPT[v1].md" +"Password Generator","Security","Password_Generator.md" +"Difficult to Hack","Security","Difficult_to_Hack_GPT.md" +"DeepGame","Games","DeepGame.md" +"Art Prompt","Art","ArtPrompt.md" +"Animal Chefs","Food","Animal Chefs.md" +"Maasai Grandma","Entertainment","WhatDoesMaasaiGrandmaKeep.md" +"GPT-Be-Gone","Security","GPT-Be-Gone.md" +"Teen Decoder","Parenting","Teen_Decoder.md" +"Choose Adventure","Games","Choose your own adventure!.md" +"322 Method Copywriter","Marketing","322 Method Ads Copywriter with Disrupter School.md" +"Grok AI","AI","Grok.md" +"Socratic Mentor","Education","Socratic Mentor.md" +"Tech Article Translator","Translation","科技文章翻译.md" +"All-around Writer","Writing","All-around_Writer_Professional_Version.md" +"Why-Why Analysis","Business","Why-Why Analysis-kun(jp).md" +"Professor Synapse","AI","Professor Synapse.md" +"Genius AI","AI","Genius.md" +"reSEARCHER","Research","reSEARCHER.md" +"AskYourPDF Research","Research","AskYourPDF Research Assistant.md" +"Scam Scanner","Security","Scam_Scanner.md" +"Competency Interview Coach","Career","Competency_Based_Interview_Coach_by_Veedence.md" +"Presence Process GPT","Psychology","Presence_Process_GPT.md" +"Prompt Injection GPT","Security","Prompt_injection_GPT.md" +"Mystic Palm Reader","Entertainment","Mystic_Palm_Reader.md" +"Watercolor Illustrator","Art","Watercolor Illustrator GPT.md" +"WH Social Media","Social Media","WH_social_media_assistant.md" +"U Cant Hack This","Security","U_Cant_Hack_This.md" +"Synthetic Data Factory","AI","Synthetic_Data_Factory.md" +"Cloud Interpreter","Programming","Cloud Interpreter.md" +"Alien Archivist","Entertainment","Alien_Archivist.md" +"Tyr","Mythology","Tyr.md" +"Carbon Impact Estimator","Environment","Carbon_Impact_Eco_Estimator.md" +"Baby Name Helper","Parenting","What_should_I_Name_my_Baby.md" +"World Class Prompt Engineer","AI Development","World Class Prompt Engineer.md" +"Tax Estimator","Finance","Tax Estimator.md" +"Dating Guide","Relationships","Dating_Guide_by_iris_Dating.md" +"Best Eco Chef","Food","The_best_Eco_Chef.md" +"PhiloCoffee Agent","Philosophy","PhiloCoffee_Agent.md" +"Tarot Master","Entertainment","Tarot_Master.md" +"Dev Helper","Programming","Dev Helper.md" +"Ghidra Ninja","Security","Ghidra Ninja.md" +"File Manipulation JP","Productivity","File Manipulation(jp).md" +"Water Colour Artist","Art","Water_Colour_Artist.md" +"Easily Hackable GPT","Security","Easily_Hackable_GPT.md" +"Questioneer","Education","Questioneer.md" +"Strong Country GPT","Politics","Study the Strong Country GPT.md" +"StoptheSteal GPT","Politics","StoptheSteal_GPT.md" +"Prompt Security Demo","Security","Prompt_Security_Demonstration.md" +"DreamGPT","Psychology","DreamGPT.md" +"Supercute Greeting Card","Art","Supercute_Greeting_Card_.md" +"Valentine's Gift Bot","Holidays","Valentines_Day_Gift_Bot_.md" +"DMGPT","Games","DMGPT.md" +"God of Cannabis","Entertainment","God_of_Cannabis.md" +"Co-Founder ID","Business","Co-Founder_ID.md" +"LegolizeGPT","Art","LegolizeGPT.md" +"Non-Commerce SEO Writer","Marketing","BigBosser_Non_Commerce_SEO_Writer.md" +"X3EM SuperClone","AI","X3EM_Clone_Anything_SuperCloneIt_.md" +"Ninja Grandma","Entertainment","What Secrets Does Grandma Hanae the Ninja Hold.md" +"Flashy Ukiyo-e","Art","Flashy_ukiyo-e.md" +"SouthPark Me","Art","SouthParkMe.md" +"Cat Ear Anime Girl","Art","猫耳美少女イラストメーカー.md" +"SWOT Analysis","Business","SWOT Analysis.md" +"Ai PDF","AI","Ai PDF.md" +"Prompt Injection Test-2","Security","Prompt_Injection_TEST-2.md" +"ML Model Whisperer","AI","ML_Model_Whisperer.md" +"Evolution Solution","Business","Exciting Evolution Solution-kun(jp).md" +"Pickup Line Pro","Relationships","Pickup_Line_Pro.md" +"Prompt Injection Maker","Security","Prompt_Injection_Maker.md" +"Robert Scoble Tech","Technology","Robert Scoble Tech.md" +"Whimsical Cat","Entertainment","Whimsical_Cat.md" +"Cooking Expert","Food","Cooking_expert.md" +"Global Mask Artisan","Art","Global_Mask_Artisan.md" +"Topical Authority Gen","Marketing","Topical_Authority_Generator.md" +"MidJourney V6 Prompter","AI Art","Mid_Journey_V6_Prompt_Creator.md" +"Effortless Book Summary","Writing","Effortless_Book_Summary.md" +"Reverse Prompt DE","AI Art","Reverse Prompt Engineering Deutsch.md" +"Keymate.AI GPT","AI","Keymate.AI_GPT_Beta.md" +"IDA Pro SDK","Programming","IDA_Pro_-_C_SDK__and_decompiler.md" +"Professor Orion","Education","Professor_Orion_Content_Warning.md" +"PyTorch Implementer","Programming","Pytorch_Model_Implementer.md" +"Matka Sakka Help","Health","Matka_Sakka_King_Addiction_Help.md" +"Homemade Candle Guide","Crafts","A_Multilingual_Guide_to_Homemade_Candles.md" +"Z3 MaxSAT Liaison","Programming","Z3_MaxSAT_Liasion.md" +"Whimsical Diagrams","Design","Whimsical_Diagrams.md" +"MS-Presentation","Presentations","MS-Presentation.md" +"EverQuest Expert","Games","EverQuest Expert.md" +"The Rizz Game","Relationships","The Rizz Game.md" +"Slide Maker","Presentations","Slide Maker.md" +"Image Reverse Prompt","AI Art","Image Reverse Prompt Engineering.md" +"Unlimited Prompt Layering","AI Development","Unlimited_Prompt_Layering_GPT.md" +"Space AI Law Assistant","Legal","Jeremy_Space_AI_Law_Assistant.md" +"ELIZA Recreation","AI","ELIZA-A_Recreation_Of_The_Worlds_First_Chatbot.md" +"Flask Fortress","Programming","Flask_Fortress_Secure_Coding.md" +"Universal Cartoonist","Art","Universal_Cartoonist_UCTN.md" +"GPT Girlfriend","Relationships","GPT-girl_friend_By_lusia.md" +"Code Critic Gilfoyle","Programming","Code Critic Gilfoyle.md" +"Typeframes Video","Video","Typeframes - Video Creation.md" +"Mirror Muse","Art","Mirror_Muse.md" +"English to Chinese","Translation","English_to_Chinese.md" +"Pepe Generator","Art","Pepe_Generator.md" +"CosplayAIs柱AI","Art","CosplayAIs柱AI_-蜜璃-.md" +"Black Swan Divination","Entertainment","黑天鹅占卜.md" +"VideoDB Pricing","Video","VideoDB_Pricing.md" +"Roman Empire GPT","History","RomanEmpireGPT.md" +"AI Paper Polisher","Academic","AI Paper Polisher Pro.md" +"Document Comparator","Productivity","Comparador_de_Documentos.md" +"AI Doctor","Medical","AI Doctor.md" +"Framework Finder","Programming","Framework_Finder.md" +"Time Optimizer","Productivity","Time_Optimizer.md" +"3D Print Master","3D Printing","3D Print Master.md" +"3D Print Master","3D Printing","3D_Print_Master.md" +"Financial Calculator","Finance","Financial_Calculator.md" +"Zeus Weather God","Entertainment","Zeus_the_Weather_God.md" +"Uninjectable GPT L1","Security","Uninjectable_GPT_Level_1.md" +"Quality Raters SEO","Marketing","Quality Raters SEO Guide.md" +"EncryptEase","Security","EncryptEase_Secure_Comms_Master.md" +"Guarded Cat-Eared Girl","Security","Guarded Cat-Eared Girl.md" +"Golang Developer","Programming","Golang_Developer.md" +"Character Forger","Writing","Character Forger.md" +"Gutenberg Blocks","Web Development","Learn_Gutenberg_Blocks.md" +"Ads Generator","Marketing","Ads Generator by joe.md" +"Dr. Unanyan","Health","Доктор_Унанян__Контрацепция__Задать_вопрос.md" +"Book Search","Research","Book_Search.md" +"Mr. Cat","Entertainment","Mr._Cat.md" +"Zen Sleep Coach","Health","Zen_Sleep_Coach.md" +"AI Word Cloud Maker","Data Visualization","AI_Word_Cloud_Maker.md" +"Long-Form AI Writer","Writing","Best_Long-Form_AI_Writing_Tool_by_Alex_Kosch.md" +"FrameCaster","Video","FrameCaster.md" +"Cheat Checker","Security","Cheat Checker.md" +"Puto Coding","Programming","Puto_Coding.md" +"2024 Predictions","Futurism","World_Predictions_for_2024_by_Michel_Hayek.md" +"Exam Strategy","Education","esame_strategy.md" +"Ask and Achieve","Self-Help","Ask_and_Achieve.md" +"Pic-book Artist","Art","Pic-book Artist.md" +"Page Summarizer","Productivity","Page_Summarizer.md" +"Tech Support Advisor","Technology","tech_support_advisor.md" +"Harmonia Mindfulness","Health","Harmonia__Mindfulness_and_Self-Hypnosis_Coach.md" +"Jessica Gold AI","Relationships","Jessica_Gold_AI_Sex__Relationship_Coach_for_Men.md" +"Client Passion Expert","Business","Client Passion Expert.md" +"Physics Helper","Science","physics.md" +"Communication Coach","Communication","Communication_Coach.md" +"Character Story Creator","Writing","Character_Story_Creator.md" +"Watts GPT","Philosophy","Watts_GPT.md" +"Alien Autopsy","Entertainment","Alien_Autopsy_Assistant.md" +"Action Showcase","Business","Action_Showcase.md" +"Cheat Master","Games","Cheat Master.md" +"Shin-Shin Injection","Security","Shin-Shin Injection.md" +"AI Futures Anthology","Futurism","AI_Futures_An_Anthology_-_Exploratorium.md" +"Smart Brief","Business","Brie_demo_The_Smart_Brief.md" +"VeroÄly","AI","VeroÄly.md" +"Curling Club Secretary","Sports","Curling_Club_Secretary.md" +"Reverse Acronym Gen","Writing","Reverse_Acronym_Generator.md" +"Astrology Birth Chart","Entertainment","Astrology_Birth_Chart_GPT.md" +"Astrology Birth v0","Entertainment","Astrology_Birth_Chart_GPT[v0].md" +"Content SEO Analyzer","Marketing","Content Helpfulness and Quality SEO Analyzer.md" +"World Cup 2026","Sports","World_Cup_2026_Predictions.md" +"Time Travel Da Vinci","Entertainment","Time_Traveler_to_Da_Vinci.md" +"Workflow Enhancer","Productivity","Workflow_Enhancer_GPT.md" +"El Duderino 3000","Entertainment","El_Duderino_3000.md" +"Synonym Suggester","Writing","Synonym_Suggester.md" +"Emma AI","AI","Emma.md" +"Expat Accountant","Finance","Accountant_for_U.S._Citizens_Abroad.md" +"GitChat","Programming","XD4AwvP12-GitChat.md" +"Reverse Engineering Success","Security","Reverse Engineering Success.md" +"Michelangelo's Vision","Art","Michelangelos_Vision.md" +"Beijing Floating Life","Games","北京浮生记.md" +"Escape the Haunt","Games","Escape_the_Haunt.md" +"Sentinel Did-0","Security","Sentinel_Did-0.md" +"Explain to a Child","Education","Explain_to_a_Child.md" +"Church Social Doctrine","Religion","La_doctrine_sociale_de_lEglise.md" +"Thich Nhat Hanh","Spirituality","Thich Nhat Hanh's Teachings and Poetry.md" +"Makeup Maven","Beauty","Makeup_Maven.md" +"Certainly But Not Now","Productivity","Certainly_But_not_now..md" +"English Proofreader","Writing","英文校正GPT.md" +"Hong Kong GPT","Geography","HongKongGPT.md" +"Starter Pack Gen","Productivity","Starter Pack Generator.md" +"NSTA Denver Assistant","Education","NSTA_Denver_Sessions_Assistant.md" +"Leads Collector","Business","Leads_Collector.md" +"Hack Me","Security","Hack_Me.md" +"Gospel of Thomas","Religion","Gospel_of_St_Thomas_Scholar.md" +"TRPG Scenario Support","Games","TRPG_Scenario_Support.md" +"Prompt Polisher","AI Development","Prompt_Polisher.md" +"Jailbroken GPT - DAN","Security","Jailbroken_GPT_-_DAN.md" +"Witty Wordsmith","Writing","Witty_Wordsmith.md" +"Steel Straw","Environment","Steel_Straw.md" +"S&P 500 Analyzer","Finance","SP_500_Stock_Analyzer_with_Price_Targets.md" +"Cinematic Sociopath","Entertainment","Cinematic_Sociopath.md" +"YT Transcriber","Video","YT transcriber.md" +"Circle Game Meme","Entertainment","Circle_Game_Meme_Generator.md" +"Dream Therapy","Psychology","Dream_Therapy.md" +"Chinese OCR","Language","Chinese OCR.md" +"Multilingual Mask Maestro","Art","Multilingual_Facial_Mask_Maestro.md" +"Friendly Helper","AI","Friendly_Helper.md" +"D&D 5e NPC Creator","Games","DnD_5e_NPC_Creator.md" +"LLM Security Game L4","Security","LLM_Security_Wizard_Game_-_LV_4.md" +"Western Civ History","History","History_of_Western_Civilization.md" +"Tax & Medical Deductions","Finance","Leave Your Tax Returns_and_Medical Deductions to Me!.md" +"Bake Off","Food","Bake Off.md" +"AutoExpert Academic","Education","AutoExpert (Academic).md" +"Prompt Injection Nyanta","Security","Prompt_Injection_Nyanta.md" +"Perrault Tales","Literature","Magical_Tales_Reinvented_Charles_Perrault.md" +"GPT Prompt Security","Security","GPT_Prompt_SecurityHacking.md" +"Prompt Reverse Engineer","AI Development","Prompt_Reverse_Engineer.md" +"Flashcards AI","Education","Flashcards AI.md" +"CaptureTheFlag GPT","Security","CaptureTheFlag_-_GPT_Edition.md" +"Tinder Whisperer","Relationships","Tinder Whisperer.md" +"Citation or Death","Academic","Give_me_citation_or_give_me_death.md" +"Podcast Summary Pro","Podcasts","Podcast_Summary_Pro.md" +"Acne Treatment Guide","Health","痤疮治疗指南.md" +"Experts GPT","AI","Experts_GPT.md" +"Enigma Adventure","Games","Enigma_Multilingual_Mystery_Adventure.md" +"Code Captures","Programming","Take Code Captures.md" +"FAANG-GPT","Business","FAANG-GPT.md" +"6 Thinking Caps","Psychology","6_Thinking_Caps.md" +"SecureMyGPTs","Security","SecureMyGPTs.md" +"Mr. Crowley","Entertainment","76iz872HL_Mr. Crowley.md" +"GPT Builder","AI Development","GPT Builder.md" +"Beard Growth Guru","Lifestyle","Beard_Growth_Guru.md" +"SpockGPT","Entertainment","SpockGPT.md" +"SEO Optimized Article","Marketing","Fully_SEO_Optimized_Article_including_FAQs.md" +"Movie Prod Viz","Entertainment","Movie_Prod_Viz.md" +"Married Life","Relationships","Married Life.md" +"The Universal Machine","Philosophy","The_Universal_Machine.md" +"Data Chef","Data","Data_Chef.md" +"WebSweepGPT","Security","WebSweepGPT.md" +"LLM Course","Education","LLM Course.md" +"ابن هشام الباحث","Religion","السيرة_النبوية_إبن_هشام_-_الباحث.md" +"Carl Coach","Relationships","Carl_coach_Cœur__Charme_.md" +"HackMeIfYouCan-v1","Security","HackMeIfYouCan-v1.md" +"1111 Wisdom Portal","Spirituality","1111 Eternal Wisdom Portal.md" +"GPT-4 Classic","AI","gpt4_classic.md" +"Web Mirror","Web","Web_Mirror.md" +"Indra.ai","AI","Indra.ai.md" +"IdaCode Potato","Programming","IdaCode_Potato.md" +"Cognitive Bias Detector","Psychology","Cognitive_Bias_Detector.md" +"Gerry Politician","Politics","Gerry_the_Inept_Politician.md" +"HR 815 Insight","Politics","Bill_Insight_for_H.R._815.md" +"Compliance Guard","Legal","Compliance_Guard.md" +"Pinterest Optimization","Marketing","Pinterest_Optimization_GPT.md" +"Translator AI","Language","Translator.md" +"LogoGPT","Design","LogoGPT.md" +"Books AI","Literature","Books.md" +"Startup Scout","Business","Startup_Scout.md" +"Japanese Chat Tutor","Language","Japanese_Casual_Chat_Tutor.md" +"F Mentor","Education","F_Mentor.md" +"Photo Multiverse","Photography","Photo_Multiverse.md" +"Alternative Reality","Entertainment","Alternative_Reality_Explorer.md" +"AlphaNotes GPT","Education","AlphaNotes GPT.md" +"Rogue AI RE","Security","Rogue_AI_-_Software_Reverse_Engineering.md" +"Public Domain Guide","Legal","Public Domain Navigator.md" +"Skill Scout","Career","Skill_Scout.md" +"Text Style Transfer","Writing","Text Style Transfer - Alice.md" +"Iterative Coding","Programming","Iterative_Coding.md" +"OpenAPI Builder","Programming","OpenAPI Builder.md" +"Werdy Writer Pro","Writing","Werdy Writer Pro.md" +"Image to Text","Productivity","Transcribir-_IA__Imagen_a_Texto.md" +"Jenny Role Play","Entertainment","Jenny_Role_Play.md" +"HubSpot Landing Page","Web Development","Landing_Page_Creator_from_HubSpot.md" +"AI Narrative Drone","AI","AI_Narrative_and_Causality_Drone_GPT.md" +"Survival Expert","Outdoors","Survival_Expert.md" +"Fisher's Friend","Fishing","Fishers_Friend.md" +"Adult Learning Coach","Education","Coaching Bot for Continuing to Learn Enjoyably as an Adult(jp).md" +"Rock-n-Controlla","Music","Rock-n-Controlla.md" +"Beauty Scout","Beauty","Beauty Scout.md" +"mferGPT","Entertainment","mferGPT.md" +"Beautify Selfie","Photography","Beautify_Your_Selfie.md" +"Congress Manifesto 2024","Politics","Congress_Manifesto_LS_election_2024.md" +"editGPT","Writing","editGPT.md" +"Inkspire","Art","Inkspire.md" +"ELI35","Education","ELI35.md" +"Faith Explorer","Religion","Faith_Explorer.md" +"Paper Art Maps","Art","Paper_Art_and_Wood_Veneer_Maps.md" +"dubGPT","Audio","dubGPT_by_Rask_AI.md" +"Creative Brainstorm","Creativity","Creative_Answers__Brainstorm_GPT.md" +"Security Test 1.1.1","Security","Security_Test[1.1.1].md" +"Bright Source","AI","Bright Source.md" +"Claude 3 Opus","AI","Claude_3_Opus.md" +"EduGenie","Education","EduGenie.md" +"TRIZ Master","Problem Solving","TRIZ Master.md" +"Product Manager Mock","Business","Product Manager Mock Prep.md" +"Coinflipper Game","Games","Coinflipper Game.md" \ No newline at end of file diff --git a/src/msteams/mod.rs b/src/msteams/mod.rs new file mode 100644 index 000000000..6f967d97a --- /dev/null +++ b/src/msteams/mod.rs @@ -0,0 +1,3 @@ +pub mod teams; + +pub use teams::*; diff --git a/src/msteams/teams.rs b/src/msteams/teams.rs new file mode 100644 index 000000000..866a3210f --- /dev/null +++ b/src/msteams/teams.rs @@ -0,0 +1,359 @@ +//! Microsoft Teams Channel Integration +//! +//! This module provides webhook handling and message processing for Microsoft Teams. +//! Currently under development for bot integration with Teams channels and direct messages. +//! +//! Key features: +//! - Bot Framework webhook handling +//! - Teams message and conversation support +//! - Adaptive cards for rich responses +//! - Session management per Teams user +//! - Integration with Microsoft Bot Framework + +use crate::shared::models::UserSession; +use crate::shared::state::AppState; +use axum::{http::StatusCode, response::Json, Router}; +use log::error; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use std::sync::Arc; + +#[derive(Debug, Deserialize, Serialize)] +pub struct TeamsMessage { + #[serde(rename = "type")] + pub msg_type: String, + pub id: Option, + pub timestamp: Option, + pub from: TeamsUser, + pub conversation: TeamsConversation, + pub recipient: TeamsUser, + pub text: Option, + pub attachments: Option>, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TeamsUser { + pub id: String, + pub name: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TeamsConversation { + pub id: String, + #[serde(rename = "conversationType")] + pub conversation_type: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TeamsAttachment { + #[serde(rename = "contentType")] + pub content_type: String, + pub content: serde_json::Value, +} + +#[derive(Debug)] +pub struct TeamsAdapter { + pub state: Arc, + pub app_id: String, + pub app_password: String, + pub service_url: String, + pub tenant_id: String, +} + +impl TeamsAdapter { + pub fn new(state: Arc) -> Self { + // Load configuration from environment variables + let app_id = std::env::var("TEAMS_APP_ID").unwrap_or_default(); + + let app_password = std::env::var("TEAMS_APP_PASSWORD").unwrap_or_default(); + + let service_url = std::env::var("TEAMS_SERVICE_URL") + .unwrap_or_else(|_| "https://smba.trafficmanager.net/br/".to_string()); + + let tenant_id = std::env::var("TEAMS_TENANT_ID").unwrap_or_default(); + + Self { + state, + app_id, + app_password, + service_url, + tenant_id, + } + } + + pub async fn handle_incoming_message( + &self, + Json(payload): Json, + ) -> Result { + if payload.msg_type != "message" { + return Ok(StatusCode::OK); + } + + if let Some(text) = payload.text { + if let Err(e) = self + .process_message(payload.from, payload.conversation, text) + .await + { + error!("Error processing Teams message: {}", e); + } + } + + Ok(StatusCode::ACCEPTED) + } + + async fn process_message( + &self, + from: TeamsUser, + conversation: TeamsConversation, + text: String, + ) -> Result<(), Box> { + // Process with bot + self.process_with_bot(&from.id, &conversation.id, &text) + .await?; + + Ok(()) + } + + async fn process_with_bot( + &self, + user_id: &str, + conversation_id: &str, + message: &str, + ) -> Result<(), Box> { + let _session = self.get_or_create_session(user_id).await?; + + // Process message through bot processor (simplified for now) + let response = format!("Received on Teams: {}", message); + self.send_message(conversation_id, user_id, &response) + .await?; + + Ok(()) + } + + async fn get_or_create_session( + &self, + user_id: &str, + ) -> Result> { + if let Some(redis_client) = &self.state.cache { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let session_key = format!("teams_session:{}", user_id); + + if let Ok(session_data) = redis::cmd("GET") + .arg(&session_key) + .query_async::(&mut conn) + .await + { + if let Ok(session) = serde_json::from_str::(&session_data) { + return Ok(session); + } + } + + let user_uuid = uuid::Uuid::parse_str(user_id).unwrap_or_else(|_| uuid::Uuid::new_v4()); + let session = UserSession { + id: uuid::Uuid::new_v4(), + user_id: user_uuid, + bot_id: uuid::Uuid::default(), + title: "Teams Session".to_string(), + context_data: serde_json::json!({"channel": "teams"}), + current_tool: None, + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + }; + + let session_data = serde_json::to_string(&session)?; + redis::cmd("SET") + .arg(&session_key) + .arg(&session_data) + .arg("EX") + .arg(86400) + .query_async::<()>(&mut conn) + .await?; + + Ok(session) + } else { + let user_uuid = uuid::Uuid::parse_str(user_id).unwrap_or_else(|_| uuid::Uuid::new_v4()); + Ok(UserSession { + id: uuid::Uuid::new_v4(), + user_id: user_uuid, + bot_id: uuid::Uuid::default(), + title: "Teams Session".to_string(), + context_data: serde_json::json!({"channel": "teams"}), + current_tool: None, + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + }) + } + } + + pub async fn get_access_token( + &self, + ) -> Result> { + let client = Client::new(); + let token_url = format!( + "https://login.microsoftonline.com/{}/oauth2/v2.0/token", + if self.tenant_id.is_empty() { + "botframework.com" + } else { + &self.tenant_id + } + ); + + let params = [ + ("grant_type", "client_credentials"), + ("client_id", &self.app_id), + ("client_secret", &self.app_password), + ("scope", "https://api.botframework.com/.default"), + ]; + + let response = client.post(&token_url).form(¶ms).send().await?; + + if !response.status().is_success() { + let error_text = response.text().await?; + return Err(format!("Failed to get Teams access token: {}", error_text).into()); + } + + #[derive(Deserialize)] + struct TokenResponse { + access_token: String, + } + + let token_response: TokenResponse = response.json().await?; + Ok(token_response.access_token) + } + + pub async fn send_message( + &self, + conversation_id: &str, + user_id: &str, + message: &str, + ) -> Result<(), Box> { + let access_token = self.get_access_token().await?; + let url = format!( + "{}/v3/conversations/{}/activities", + self.service_url.trim_end_matches('/'), + conversation_id + ); + + let activity = json!({ + "type": "message", + "text": message, + "from": { + "id": self.app_id, + "name": "Bot" + }, + "conversation": { + "id": conversation_id + }, + "recipient": { + "id": user_id + } + }); + + let client = Client::new(); + let response = client + .post(&url) + .bearer_auth(&access_token) + .json(&activity) + .send() + .await?; + + if !response.status().is_success() { + let error_text = response.text().await?; + error!("Teams API error: {}", error_text); + return Err(format!("Teams API error: {}", error_text).into()); + } + + Ok(()) + } + + pub async fn send_card( + &self, + conversation_id: &str, + user_id: &str, + title: &str, + options: Vec, + ) -> Result<(), Box> { + let access_token = self.get_access_token().await?; + let url = format!( + "{}/v3/conversations/{}/activities", + self.service_url.trim_end_matches('/'), + conversation_id + ); + + let actions: Vec<_> = options + .iter() + .map(|option| { + json!({ + "type": "Action.Submit", + "title": option, + "data": { + "action": option + } + }) + }) + .collect(); + + let card = json!({ + "type": "AdaptiveCard", + "version": "1.3", + "body": [ + { + "type": "TextBlock", + "text": title, + "size": "Medium", + "weight": "Bolder" + } + ], + "actions": actions + }); + + let activity = json!({ + "type": "message", + "from": { + "id": self.app_id, + "name": "Bot" + }, + "conversation": { + "id": conversation_id + }, + "recipient": { + "id": user_id + }, + "attachments": [ + { + "contentType": "application/vnd.microsoft.card.adaptive", + "content": card + } + ] + }); + + let client = Client::new(); + let response = client + .post(&url) + .bearer_auth(&access_token) + .json(&activity) + .send() + .await?; + + if !response.status().is_success() { + let error_text = response.text().await?; + error!("Teams API error: {}", error_text); + } + + Ok(()) + } +} + +pub fn router(state: Arc) -> Router> { + let adapter = Arc::new(TeamsAdapter::new(state.clone())); + + Router::new() + .route( + "/messages", + axum::routing::post({ + move |payload| async move { adapter.handle_incoming_message(payload).await } + }), + ) + .with_state(state) +} diff --git a/src/tasks/mod.rs b/src/tasks/mod.rs new file mode 100644 index 000000000..618e06b6b --- /dev/null +++ b/src/tasks/mod.rs @@ -0,0 +1,708 @@ +use axum::{ + extract::{Path, Query, State}, + http::StatusCode, + response::Json, + routing::{delete, get, post, put}, + Router, +}; +use chrono::{DateTime, Utc}; +use diesel::prelude::*; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use tokio::sync::RwLock; +use uuid::Uuid; + +use crate::shared::utils::DbPool; + +// TODO: Replace sqlx queries with Diesel queries + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TaskUpdate { + pub title: Option, + pub description: Option, + pub status: Option, + pub priority: Option, + pub assignee: Option, + pub due_date: Option>, + pub tags: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Task { + pub id: Uuid, + pub title: String, + pub description: Option, + pub assignee: Option, + pub reporter: String, + pub status: TaskStatus, + pub priority: TaskPriority, + pub due_date: Option>, + pub estimated_hours: Option, + pub actual_hours: Option, + pub tags: Vec, + pub parent_task_id: Option, + pub subtasks: Vec, + pub dependencies: Vec, + pub attachments: Vec, + pub comments: Vec, + pub created_at: DateTime, + pub updated_at: DateTime, + pub completed_at: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum TaskStatus { + Todo, + InProgress, + Review, + Done, + Blocked, + Cancelled, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum TaskPriority { + Low, + Medium, + High, + Urgent, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TaskComment { + pub id: Uuid, + pub task_id: Uuid, + pub author: String, + pub content: String, + pub created_at: DateTime, + pub updated_at: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TaskTemplate { + pub id: Uuid, + pub name: String, + pub description: Option, + pub default_assignee: Option, + pub default_priority: TaskPriority, + pub default_tags: Vec, + pub checklist: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ChecklistItem { + pub id: Uuid, + pub task_id: Uuid, + pub description: String, + pub completed: bool, + pub completed_by: Option, + pub completed_at: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TaskBoard { + pub id: Uuid, + pub name: String, + pub description: Option, + pub columns: Vec, + pub owner: String, + pub members: Vec, + pub created_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BoardColumn { + pub id: Uuid, + pub name: String, + pub position: i32, + pub status_mapping: TaskStatus, + pub task_ids: Vec, + pub wip_limit: Option, +} + +pub struct TaskEngine { + db: Arc, + cache: Arc>>, +} + +impl TaskEngine { + pub fn new(db: Arc) -> Self { + Self { + db, + cache: Arc::new(RwLock::new(Vec::new())), + } + } + + /// Create a new task + pub async fn create_task(&self, task: Task) -> Result> { + // TODO: Implement with Diesel + /* + let result = sqlx::query!( + r#" + INSERT INTO tasks + (id, title, description, assignee, reporter, status, priority, + due_date, estimated_hours, tags, parent_task_id, created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) + RETURNING * + "#, + task.id, + task.title, + task.description, + task.assignee, + task.reporter, + serde_json::to_value(&task.status)?, + serde_json::to_value(&task.priority)?, + task.due_date, + task.estimated_hours, + &task.tags[..], + task.parent_task_id, + task.created_at, + task.updated_at + ) + .fetch_one(self.db.as_ref()) + .await?; + + let created_task: Task = serde_json::from_value(serde_json::to_value(result)?)?; + */ + + let created_task = task.clone(); + + // Update cache + let mut cache = self.cache.write().await; + cache.push(created_task.clone()); + + Ok(created_task) + } + + /// Update an existing task + pub async fn update_task( + &self, + id: Uuid, + updates: TaskUpdate, + ) -> Result> { + let updated_at = Utc::now(); + + // Check if status is changing to Done + let completing = updates.status + .as_ref() + .map(|s| matches!(s, TaskStatus::Done)) + .unwrap_or(false); + + let completed_at = if completing { + Some(Utc::now()) + } else { + None + }; + + // TODO: Implement with Diesel + /* + let result = sqlx::query!( + r#" + UPDATE tasks + SET title = COALESCE($2, title), + description = COALESCE($3, description), + assignee = COALESCE($4, assignee), + status = COALESCE($5, status), + priority = COALESCE($6, priority), + due_date = COALESCE($7, due_date), + updated_at = $8, + completed_at = COALESCE($9, completed_at) + WHERE id = $1 + RETURNING * + "#, + id, + updates.get("title").and_then(|v| v.as_str()), + updates.get("description").and_then(|v| v.as_str()), + updates.get("assignee").and_then(|v| v.as_str()), + updates.get("status").and_then(|v| serde_json::to_value(v).ok()), + updates.get("priority").and_then(|v| serde_json::to_value(v).ok()), + updates + .get("due_date") + .and_then(|v| DateTime::parse_from_rfc3339(v.as_str()?).ok()) + .map(|dt| dt.with_timezone(&Utc)), + updated_at, + completed_at + ) + .fetch_one(self.db.as_ref()) + .await?; + + let updated_task: Task = serde_json::from_value(serde_json::to_value(result)?)?; + */ + + // Create a dummy updated task for now + let updated_task = Task { + id, + title: updates.title.unwrap_or_else(|| "Updated Task".to_string()), + description: updates.description, + assignee: updates.assignee, + reporter: "system".to_string(), + status: updates.status.unwrap_or(TaskStatus::Todo), + priority: updates.priority.unwrap_or(TaskPriority::Medium), + due_date: updates.due_date, + estimated_hours: None, + actual_hours: None, + tags: updates.tags.unwrap_or_default(), + parent_task_id: None, + subtasks: Vec::new(), + dependencies: Vec::new(), + attachments: Vec::new(), + comments: Vec::new(), + created_at: Utc::now(), + updated_at: Utc::now(), + completed_at, + }; + self.refresh_cache().await?; + + Ok(updated_task) + } + + /// Delete a task + pub async fn delete_task(&self, id: Uuid) -> Result> { + // First, check for dependencies + let dependencies = self.get_task_dependencies(id).await?; + if !dependencies.is_empty() { + return Err("Cannot delete task with dependencies".into()); + } + + // TODO: Implement with Diesel + /* + let result = sqlx::query!("DELETE FROM tasks WHERE id = $1", id) + .execute(self.db.as_ref()) + .await?; + */ + + self.refresh_cache().await?; + Ok(false) + } + + /// Get tasks for a specific user + pub async fn get_user_tasks( + &self, + _user_id: &str, + ) -> Result, Box> { + // TODO: Implement with Diesel + /* + let results = sqlx::query!( + r#" + SELECT * FROM tasks + WHERE assignee = $1 OR reporter = $1 + ORDER BY priority DESC, due_date ASC + "#, + user_id + ) + .fetch_all(self.db.as_ref()) + .await?; + + Ok(results + .into_iter() + .map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap()) + .collect()) + */ + Ok(vec![]) + } + + /// Get tasks by status + pub async fn get_tasks_by_status( + &self, + _status: TaskStatus, + ) -> Result, Box> { + // TODO: Implement with Diesel + /* + let results = sqlx::query!( + r#" + SELECT * FROM tasks + WHERE status = $1 + ORDER BY priority DESC, created_at ASC + "#, + serde_json::to_value(&status)? + ) + .fetch_all(self.db.as_ref()) + .await?; + + Ok(results + .into_iter() + .map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap()) + .collect()) + */ + Ok(vec![]) + } + + /// Get overdue tasks + pub async fn get_overdue_tasks(&self) -> Result, Box> { + // TODO: Implement with Diesel + /* + let now = Utc::now(); + let results = sqlx::query!( + r#" + SELECT * FROM tasks + WHERE due_date < $1 AND status != 'done' AND status != 'cancelled' + ORDER BY due_date ASC + "#, + now + ) + .fetch_all(self.db.as_ref()) + .await?; + + Ok(results + .into_iter() + .map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap()) + .collect()) + */ + Ok(vec![]) + } + + /// Add a comment to a task + pub async fn add_comment( + &self, + task_id: Uuid, + author: &str, + content: &str, + ) -> Result> { + let comment = TaskComment { + id: Uuid::new_v4(), + task_id, + author: author.to_string(), + content: content.to_string(), + created_at: Utc::now(), + updated_at: None, + }; + + // TODO: Implement with Diesel + /* + sqlx::query!( + r#" + INSERT INTO task_comments (id, task_id, author, content, created_at) + VALUES ($1, $2, $3, $4, $5) + "#, + comment.id, + comment.task_id, + comment.author, + comment.content, + comment.created_at + ) + .execute(self.db.as_ref()) + .await?; + */ + + Ok(comment) + } + + /// Create a subtask + pub async fn create_subtask( + &self, + parent_id: Uuid, + subtask: Task, + ) -> Result> { + let mut subtask = subtask; + subtask.parent_task_id = Some(parent_id); + + let created = self.create_task(subtask).await?; + + // Update parent's subtasks list + // TODO: Implement with Diesel + /* + sqlx::query!( + r#" + UPDATE tasks + SET subtasks = array_append(subtasks, $1) + WHERE id = $2 + "#, + created.id, + parent_id + ) + .execute(self.db.as_ref()) + .await?; + */ + + Ok(created) + } + + /// Get task dependencies + pub async fn get_task_dependencies( + &self, + task_id: Uuid, + ) -> Result, Box> { + let task = self.get_task(task_id).await?; + let mut dependencies = Vec::new(); + + for dep_id in task.dependencies { + if let Ok(dep_task) = self.get_task(dep_id).await { + dependencies.push(dep_task); + } + } + + Ok(dependencies) + } + + /// Get a single task by ID + pub async fn get_task(&self, _id: Uuid) -> Result> { + // TODO: Implement with Diesel + /* + let result = sqlx::query!("SELECT * FROM tasks WHERE id = $1", id) + .fetch_one(self.db.as_ref()) + .await?; + + Ok(serde_json::from_value(serde_json::to_value(result)?)?) + */ + Err("Not implemented".into()) + } + + /// Calculate task progress (percentage) + pub async fn calculate_progress(&self, task_id: Uuid) -> Result> { + let task = self.get_task(task_id).await?; + + if task.subtasks.is_empty() { + // No subtasks, progress based on status + return Ok(match task.status { + TaskStatus::Todo => 0.0, + TaskStatus::InProgress => 50.0, + TaskStatus::Review => 75.0, + TaskStatus::Done => 100.0, + TaskStatus::Blocked => task.actual_hours.unwrap_or(0.0) / task.estimated_hours.unwrap_or(1.0) * 100.0, + TaskStatus::Cancelled => 0.0, + }); + } + + // Has subtasks, calculate based on subtask completion + let total = task.subtasks.len() as f32; + let mut completed = 0.0; + + for subtask_id in task.subtasks { + if let Ok(subtask) = self.get_task(subtask_id).await { + if matches!(subtask.status, TaskStatus::Done) { + completed += 1.0; + } + } + } + + Ok((completed / total) * 100.0) + } + + /// Create a task from template + pub async fn create_from_template( + &self, + _template_id: Uuid, + assignee: Option, + ) -> Result> { + // TODO: Implement with Diesel + /* + let template = sqlx::query!( + "SELECT * FROM task_templates WHERE id = $1", + template_id + ) + .fetch_one(self.db.as_ref()) + .await?; + + let template: TaskTemplate = serde_json::from_value(serde_json::to_value(template)?)?; + */ + + let template = TaskTemplate { + id: Uuid::new_v4(), + name: "Default Template".to_string(), + description: Some("Default template".to_string()), + default_assignee: None, + default_priority: TaskPriority::Medium, + default_tags: vec![], + checklist: vec![], + }; + + let task = Task { + id: Uuid::new_v4(), + title: template.name, + description: template.description, + assignee: assignee, + reporter: "system".to_string(), + status: TaskStatus::Todo, + priority: template.default_priority, + due_date: None, + estimated_hours: None, + actual_hours: None, + tags: template.default_tags, + parent_task_id: None, + subtasks: Vec::new(), + dependencies: Vec::new(), + attachments: Vec::new(), + comments: Vec::new(), + created_at: Utc::now(), + updated_at: Utc::now(), + completed_at: None, + }; + + let created = self.create_task(task).await?; + + // Create checklist items + for item in template.checklist { + let _checklist_item = ChecklistItem { + id: Uuid::new_v4(), + task_id: created.id, + description: item.description, + completed: false, + completed_by: None, + completed_at: None, + }; + + // TODO: Implement with Diesel + /* + sqlx::query!( + r#" + INSERT INTO task_checklists (id, task_id, description, completed) + VALUES ($1, $2, $3, $4) + "#, + checklist_item.id, + checklist_item.task_id, + checklist_item.description, + checklist_item.completed + ) + .execute(self.db.as_ref()) + .await?; + */ + } + + Ok(created) + } + + /// Send notification to assignee + async fn notify_assignee( + &self, + assignee: &str, + task: &Task, + ) -> Result<(), Box> { + // This would integrate with your notification system + // For now, just log it + log::info!( + "Notifying {} about new task assignment: {}", + assignee, + task.title + ); + Ok(()) + } + + /// Refresh the cache from database + async fn refresh_cache(&self) -> Result<(), Box> { + // TODO: Implement with Diesel + /* + let results = sqlx::query!("SELECT * FROM tasks ORDER BY created_at DESC") + .fetch_all(self.db.as_ref()) + .await?; + + let tasks: Vec = results + .into_iter() + .map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap()) + .collect(); + */ + + let tasks: Vec = vec![]; + + let mut cache = self.cache.write().await; + *cache = tasks; + + Ok(()) + } + + /// Get task statistics for reporting + pub async fn get_statistics( + &self, + user_id: Option<&str>, + ) -> Result> { + let base_query = if let Some(uid) = user_id { + format!("WHERE assignee = '{}' OR reporter = '{}'", uid, uid) + } else { + String::new() + }; + + // TODO: Implement with Diesel + /* + let stats = sqlx::query(&format!( + r#" + SELECT + COUNT(*) FILTER (WHERE status = 'todo') as todo_count, + COUNT(*) FILTER (WHERE status = 'in_progress') as in_progress_count, + COUNT(*) FILTER (WHERE status = 'done') as done_count, + COUNT(*) FILTER (WHERE due_date < NOW() AND status != 'done') as overdue_count, + AVG(actual_hours / NULLIF(estimated_hours, 0)) as avg_completion_ratio + FROM tasks + {} + "#, + base_query + )) + .fetch_one(self.db.as_ref()) + .await?; + */ + + // Return empty stats for now + Ok(serde_json::json!({ + "todo_count": 0, + "in_progress_count": 0, + "done_count": 0, + "overdue_count": 0, + "avg_completion_ratio": null + })) + } +} + +/// HTTP API handlers +pub mod handlers { + use super::*; + use axum::extract::{State as AxumState, Query as AxumQuery, Path as AxumPath}; + use axum::response::{Json as AxumJson, IntoResponse}; + use axum::http::StatusCode; + + pub async fn create_task_handler( + AxumState(_engine): AxumState, + AxumJson(task): AxumJson, + ) -> impl IntoResponse { + // TODO: Implement with actual engine + let created = task; + (StatusCode::OK, AxumJson(serde_json::json!(created))) + + } + + pub async fn get_tasks_handler( + AxumState(_engine): AxumState, + AxumQuery(_query): AxumQuery, + ) -> impl IntoResponse { + // TODO: Implement with actual engine + let tasks: Vec = vec![]; + (StatusCode::OK, AxumJson(serde_json::json!(tasks))) + } + + pub async fn update_task_handler( + AxumState(_engine): AxumState, + AxumPath(_id): AxumPath, + AxumJson(_updates): AxumJson, + ) -> impl IntoResponse { + // TODO: Implement with actual engine + let updated = serde_json::json!({"message": "Task updated"}); + (StatusCode::OK, AxumJson(updated)) + } + + pub async fn get_statistics_handler( + AxumState(_engine): AxumState, + AxumQuery(_query): AxumQuery, + ) -> impl IntoResponse { + // TODO: Implement with actual engine + let stats = serde_json::json!({ + "todo_count": 0, + "in_progress_count": 0, + "done_count": 0, + "overdue_count": 0 + }); + (StatusCode::OK, AxumJson(stats)) + } +} + +/// Configure task engine routes +pub fn configure(router: Router) -> Router +where + S: Clone + Send + Sync + 'static, +{ + use axum::routing::{get, post, put}; + + router + .route("/api/tasks", post(handlers::create_task_handler::)) + .route("/api/tasks", get(handlers::get_tasks_handler::)) + .route("/api/tasks/:id", put(handlers::update_task_handler::)) + .route("/api/tasks/statistics", get(handlers::get_statistics_handler::)) +} diff --git a/src/vector-db/mod.rs b/src/vector-db/mod.rs new file mode 100644 index 000000000..04feca085 --- /dev/null +++ b/src/vector-db/mod.rs @@ -0,0 +1,3 @@ +pub mod vectordb_indexer; + +pub use vectordb_indexer::{IndexingStats, IndexingStatus, VectorDBIndexer}; diff --git a/src/vector-db/vectordb_indexer.rs b/src/vector-db/vectordb_indexer.rs new file mode 100644 index 000000000..ccfbda8d4 --- /dev/null +++ b/src/vector-db/vectordb_indexer.rs @@ -0,0 +1,555 @@ +use anyhow::Result; +use chrono::{DateTime, Utc}; +use log::{error, info, warn}; +use std::collections::HashMap; +use std::path::PathBuf; +use std::sync::Arc; +use tokio::sync::RwLock; +use tokio::time::{sleep, Duration}; +use uuid::Uuid; + +// UserWorkspace struct for managing user workspace paths +#[derive(Debug, Clone)] +struct UserWorkspace { + root: PathBuf, + bot_id: Uuid, + user_id: Uuid, +} + +impl UserWorkspace { + fn new(root: PathBuf, bot_id: &Uuid, user_id: &Uuid) -> Self { + Self { + root, + bot_id: *bot_id, + user_id: *user_id, + } + } + + fn get_path(&self) -> PathBuf { + self.root.join(self.bot_id.to_string()).join(self.user_id.to_string()) + } +} +use crate::shared::utils::DbPool; + +// VectorDB types are defined locally in this module +#[cfg(feature = "vectordb")] +use qdrant_client::prelude::*; + +/// Indexing job status +#[derive(Debug, Clone, PartialEq)] +pub enum IndexingStatus { + Idle, + Running, + Paused, + Failed(String), +} + +/// Indexing statistics +#[derive(Debug, Clone)] +pub struct IndexingStats { + pub emails_indexed: u64, + pub files_indexed: u64, + pub emails_pending: u64, + pub files_pending: u64, + pub last_run: Option>, + pub errors: u64, +} + +/// User indexing job +#[derive(Debug)] +struct UserIndexingJob { + user_id: Uuid, + bot_id: Uuid, + workspace: UserWorkspace, + #[cfg(all(feature = "vectordb", feature = "email"))] + email_db: Option, + #[cfg(feature = "vectordb")] + drive_db: Option, + stats: IndexingStats, + status: IndexingStatus, +} + +/// Background vector DB indexer for all users +pub struct VectorDBIndexer { + db_pool: DbPool, + work_root: PathBuf, + qdrant_url: String, + embedding_generator: Arc, + jobs: Arc>>, + running: Arc>, + interval_seconds: u64, + batch_size: usize, +} + +impl VectorDBIndexer { + /// Create new vector DB indexer + pub fn new( + db_pool: DbPool, + work_root: PathBuf, + qdrant_url: String, + llm_endpoint: String, + ) -> Self { + Self { + db_pool, + work_root, + qdrant_url, + embedding_generator: Arc::new(EmailEmbeddingGenerator::new(llm_endpoint)), + jobs: Arc::new(RwLock::new(HashMap::new())), + running: Arc::new(RwLock::new(false)), + interval_seconds: 300, // Run every 5 minutes + batch_size: 10, // Index 10 items at a time + } + } + + /// Start the background indexing service + pub async fn start(self: Arc) -> Result<()> { + let mut running = self.running.write().await; + if *running { + warn!("Vector DB indexer already running"); + return Ok(()); + } + *running = true; + drop(running); + + info!("🚀 Starting Vector DB Indexer background service"); + + let indexer = Arc::clone(&self); + tokio::spawn(async move { + indexer.run_indexing_loop().await; + }); + + Ok(()) + } + + /// Stop the indexing service + pub async fn stop(&self) { + let mut running = self.running.write().await; + *running = false; + info!("🛑 Stopping Vector DB Indexer"); + } + + /// Main indexing loop + async fn run_indexing_loop(self: Arc) { + loop { + // Check if still running + { + let running = self.running.read().await; + if !*running { + break; + } + } + + info!("🔄 Running vector DB indexing cycle..."); + + // Get all active users + match self.get_active_users().await { + Ok(users) => { + info!("Found {} active users to index", users.len()); + + for (user_id, bot_id) in users { + if let Err(e) = self.index_user_data(user_id, bot_id).await { + error!("Failed to index user {}: {}", user_id, e); + } + } + } + Err(e) => { + error!("Failed to get active users: {}", e); + } + } + + info!("✅ Indexing cycle complete"); + + // Sleep until next cycle + sleep(Duration::from_secs(self.interval_seconds)).await; + } + + info!("Vector DB Indexer stopped"); + } + + /// Get all active users from database + async fn get_active_users(&self) -> Result> { + let conn = self.db_pool.clone(); + + tokio::task::spawn_blocking(move || { + use crate::shared::models::schema::user_sessions::dsl::*; + use diesel::prelude::*; + + let mut db_conn = conn.get()?; + + // Get unique user_id and bot_id pairs from active sessions + let results: Vec<(Uuid, Uuid)> = user_sessions + .select((user_id, bot_id)) + .distinct() + .load(&mut db_conn)?; + + Ok::<_, anyhow::Error>(results) + }) + .await? + } + + /// Index data for a specific user + async fn index_user_data(&self, user_id: Uuid, bot_id: Uuid) -> Result<()> { + info!("Indexing user: {} (bot: {})", user_id, bot_id); + + // Get or create job for this user + let mut jobs = self.jobs.write().await; + let job = jobs.entry(user_id).or_insert_with(|| { + let workspace = UserWorkspace::new(self.work_root.clone(), &bot_id, &user_id); + + UserIndexingJob { + user_id, + bot_id, + workspace, + email_db: None, + drive_db: None, + stats: IndexingStats { + emails_indexed: 0, + files_indexed: 0, + emails_pending: 0, + files_pending: 0, + last_run: None, + errors: 0, + }, + status: IndexingStatus::Idle, + } + }); + + if job.status == IndexingStatus::Running { + warn!("Job already running for user {}", user_id); + return Ok(()); + } + + job.status = IndexingStatus::Running; + + // Initialize vector DBs if needed + if job.email_db.is_none() { + let mut email_db = + UserEmailVectorDB::new(user_id, bot_id, job.workspace.email_vectordb()); + if let Err(e) = email_db.initialize(&self.qdrant_url).await { + warn!( + "Failed to initialize email vector DB for user {}: {}", + user_id, e + ); + } else { + job.email_db = Some(email_db); + } + } + + if job.drive_db.is_none() { + let mut drive_db = + UserDriveVectorDB::new(user_id, bot_id, job.workspace.drive_vectordb()); + if let Err(e) = drive_db.initialize(&self.qdrant_url).await { + warn!( + "Failed to initialize drive vector DB for user {}: {}", + user_id, e + ); + } else { + job.drive_db = Some(drive_db); + } + } + + drop(jobs); + + // Index emails + if let Err(e) = self.index_user_emails(user_id).await { + error!("Failed to index emails for user {}: {}", user_id, e); + } + + // Index files + if let Err(e) = self.index_user_files(user_id).await { + error!("Failed to index files for user {}: {}", user_id, e); + } + + // Update job status + let mut jobs = self.jobs.write().await; + if let Some(job) = jobs.get_mut(&user_id) { + job.status = IndexingStatus::Idle; + job.stats.last_run = Some(Utc::now()); + } + + Ok(()) + } + + /// Index user's emails + async fn index_user_emails(&self, user_id: Uuid) -> Result<()> { + let jobs = self.jobs.read().await; + let job = jobs + .get(&user_id) + .ok_or_else(|| anyhow::anyhow!("Job not found"))?; + + let email_db = match &job.email_db { + Some(db) => db, + None => { + warn!("Email vector DB not initialized for user {}", user_id); + return Ok(()); + } + }; + + // Get user's email accounts + let accounts = self.get_user_email_accounts(user_id).await?; + + info!( + "Found {} email accounts for user {}", + accounts.len(), + user_id + ); + + for account_id in accounts { + // Get recent unindexed emails (last 100) + match self.get_unindexed_emails(user_id, &account_id).await { + Ok(emails) => { + if emails.is_empty() { + continue; + } + + info!( + "Indexing {} emails for account {}", + emails.len(), + account_id + ); + + // Process in batches + for chunk in emails.chunks(self.batch_size) { + for email in chunk { + match self.embedding_generator.generate_embedding(&email).await { + Ok(embedding) => { + if let Err(e) = email_db.index_email(&email, embedding).await { + error!("Failed to index email {}: {}", email.id, e); + } else { + info!("✅ Indexed email: {}", email.subject); + } + } + Err(e) => { + error!( + "Failed to generate embedding for email {}: {}", + email.id, e + ); + } + } + } + + // Small delay between batches + sleep(Duration::from_millis(100)).await; + } + } + Err(e) => { + error!( + "Failed to get unindexed emails for account {}: {}", + account_id, e + ); + } + } + } + + Ok(()) + } + + /// Index user's files + async fn index_user_files(&self, user_id: Uuid) -> Result<()> { + let jobs = self.jobs.read().await; + let job = jobs + .get(&user_id) + .ok_or_else(|| anyhow::anyhow!("Job not found"))?; + + let drive_db = match &job.drive_db { + Some(db) => db, + None => { + warn!("Drive vector DB not initialized for user {}", user_id); + return Ok(()); + } + }; + + // Get user's files from drive + match self.get_unindexed_files(user_id).await { + Ok(files) => { + if files.is_empty() { + return Ok(()); + } + + info!("Indexing {} files for user {}", files.len(), user_id); + + // Process in batches + for chunk in files.chunks(self.batch_size) { + for file in chunk { + // Check if file should be indexed + let mime_type = file.mime_type.as_ref().map(|s| s.as_str()).unwrap_or(""); + if !FileContentExtractor::should_index(mime_type, file.file_size) { + continue; + } + + // Generate embedding for file content + let text = format!( + "File: {}\nType: {}\n\n{}", + file.file_name, file.file_type, file.content_text + ); + + match self + .embedding_generator + .generate_text_embedding(&text) + .await + { + Ok(embedding) => { + if let Err(e) = drive_db.index_file(&file, embedding).await { + error!("Failed to index file {}: {}", file.id, e); + } else { + info!("✅ Indexed file: {}", file.file_name); + } + } + Err(e) => { + error!("Failed to generate embedding for file {}: {}", file.id, e); + } + } + } + + // Small delay between batches + sleep(Duration::from_millis(100)).await; + } + } + Err(e) => { + error!("Failed to get unindexed files for user {}: {}", user_id, e); + } + } + + Ok(()) + } + + /// Get user's email accounts + async fn get_user_email_accounts(&self, user_id: Uuid) -> Result> { + let conn = self.db_pool.clone(); + + tokio::task::spawn_blocking(move || { + use diesel::prelude::*; + + let mut db_conn = conn.get()?; + + let results: Vec = diesel::sql_query( + "SELECT id::text FROM user_email_accounts WHERE user_id = $1 AND is_active = true", + ) + .bind::(user_id) + .load(&mut db_conn)? + .into_iter() + .filter_map(|row: diesel::QueryableByName| { + use diesel::sql_types::Text; + let id: Result = >::from_sql(row.get("id").ok()?); + id.ok() + }) + .collect(); + + Ok::<_, anyhow::Error>(results) + }) + .await? + } + + /// Get unindexed emails (placeholder - needs actual implementation) + async fn get_unindexed_emails( + &self, + _user_id: Uuid, + _account_id: &str, + ) -> Result> { + // TODO: Implement actual email fetching from IMAP + // This should: + // 1. Connect to user's email account + // 2. Fetch recent emails (last 100) + // 3. Check which ones are not yet in vector DB + // 4. Return list of emails to index + + Ok(Vec::new()) + } + + /// Get unindexed files (placeholder - needs actual implementation) + async fn get_unindexed_files(&self, _user_id: Uuid) -> Result> { + // TODO: Implement actual file fetching from drive + // This should: + // 1. List user's files from MinIO/S3 + // 2. Check which ones are not yet in vector DB + // 3. Extract text content from files + // 4. Return list of files to index + + Ok(Vec::new()) + } + + /// Get indexing statistics for a user + pub async fn get_user_stats(&self, user_id: Uuid) -> Option { + let jobs = self.jobs.read().await; + jobs.get(&user_id).map(|job| job.stats.clone()) + } + + /// Get overall indexing statistics + pub async fn get_overall_stats(&self) -> IndexingStats { + let jobs = self.jobs.read().await; + + let mut total_stats = IndexingStats { + emails_indexed: 0, + files_indexed: 0, + emails_pending: 0, + files_pending: 0, + last_run: None, + errors: 0, + }; + + for job in jobs.values() { + total_stats.emails_indexed += job.stats.emails_indexed; + total_stats.files_indexed += job.stats.files_indexed; + total_stats.emails_pending += job.stats.emails_pending; + total_stats.files_pending += job.stats.files_pending; + total_stats.errors += job.stats.errors; + + if let Some(last_run) = job.stats.last_run { + if total_stats.last_run.is_none() || total_stats.last_run.unwrap() < last_run { + total_stats.last_run = Some(last_run); + } + } + } + + total_stats + } + + /// Pause indexing for a specific user + pub async fn pause_user_indexing(&self, user_id: Uuid) -> Result<()> { + let mut jobs = self.jobs.write().await; + if let Some(job) = jobs.get_mut(&user_id) { + job.status = IndexingStatus::Paused; + info!("⏸️ Paused indexing for user {}", user_id); + } + Ok(()) + } + + /// Resume indexing for a specific user + pub async fn resume_user_indexing(&self, user_id: Uuid) -> Result<()> { + let mut jobs = self.jobs.write().await; + if let Some(job) = jobs.get_mut(&user_id) { + job.status = IndexingStatus::Idle; + info!("▶️ Resumed indexing for user {}", user_id); + } + Ok(()) + } + + /// Trigger immediate indexing for a user + pub async fn trigger_user_indexing(&self, user_id: Uuid, bot_id: Uuid) -> Result<()> { + info!("🔄 Triggering immediate indexing for user {}", user_id); + self.index_user_data(user_id, bot_id).await + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_indexing_stats_creation() { + let stats = IndexingStats { + emails_indexed: 10, + files_indexed: 5, + emails_pending: 2, + files_pending: 3, + last_run: Some(Utc::now()), + errors: 0, + }; + + assert_eq!(stats.emails_indexed, 10); + assert_eq!(stats.files_indexed, 5); + } +} diff --git a/src/weba/mod.rs b/src/weba/mod.rs new file mode 100644 index 000000000..ca899745d --- /dev/null +++ b/src/weba/mod.rs @@ -0,0 +1,6 @@ +// WEBA module - Web Application features +// This module is a placeholder for future web application functionality + +pub fn init() { + // Placeholder for weba initialization +} diff --git a/src/whatsapp/mod.rs b/src/whatsapp/mod.rs new file mode 100644 index 000000000..da2a10573 --- /dev/null +++ b/src/whatsapp/mod.rs @@ -0,0 +1,3 @@ +pub mod whatsapp; + +pub use whatsapp::*; diff --git a/src/whatsapp/whatsapp.rs b/src/whatsapp/whatsapp.rs new file mode 100644 index 000000000..0949ca2a6 --- /dev/null +++ b/src/whatsapp/whatsapp.rs @@ -0,0 +1,444 @@ +//! WhatsApp Business Channel Integration +//! +//! This module provides webhook handling and message processing for WhatsApp Business API. +//! Currently under development for bot integration with WhatsApp Business accounts. +//! +//! Key features: +//! - Webhook verification and message handling +//! - WhatsApp text, media, and location messages +//! - Session management per WhatsApp user +//! - Media attachments support +//! - Integration with Meta's WhatsApp Business API + +use crate::shared::models::UserSession; +use crate::shared::state::AppState; +use axum::{extract::Query, http::StatusCode, response::Json, Router}; +use log::{error, info}; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use std::sync::Arc; + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct WhatsAppWebhook { + #[serde(rename = "hub.mode")] + pub hub_mode: Option, + #[serde(rename = "hub.verify_token")] + pub hub_verify_token: Option, + #[serde(rename = "hub.challenge")] + pub hub_challenge: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct WhatsAppMessage { + pub entry: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct WhatsAppEntry { + pub id: String, + pub changes: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct WhatsAppChange { + pub value: WhatsAppValue, + pub field: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct WhatsAppValue { + pub messaging_product: String, + pub metadata: WhatsAppMetadata, + pub contacts: Option>, + pub messages: Option>, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct WhatsAppMetadata { + pub display_phone_number: String, + pub phone_number_id: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct WhatsAppContact { + pub profile: WhatsAppProfile, + pub wa_id: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct WhatsAppProfile { + pub name: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct WhatsAppIncomingMessage { + pub from: String, + pub id: String, + pub timestamp: String, + #[serde(rename = "type")] + pub msg_type: String, + pub text: Option, + pub image: Option, + pub document: Option, + pub audio: Option, + pub video: Option, + pub location: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct WhatsAppText { + pub body: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct WhatsAppMedia { + pub id: String, + pub mime_type: Option, + pub sha256: Option, + pub caption: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct WhatsAppLocation { + pub latitude: f64, + pub longitude: f64, + pub name: Option, + pub address: Option, +} + +#[derive(Debug)] +pub struct WhatsAppAdapter { + pub state: Arc, + pub access_token: String, + pub phone_number_id: String, + pub verify_token: String, +} + +impl WhatsAppAdapter { + pub fn new(state: Arc) -> Self { + // Load configuration from environment variables + let access_token = std::env::var("WHATSAPP_ACCESS_TOKEN").unwrap_or_default(); + + let phone_number_id = std::env::var("WHATSAPP_PHONE_ID").unwrap_or_default(); + + let verify_token = + std::env::var("WHATSAPP_VERIFY_TOKEN").unwrap_or_else(|_| "webhook_verify".to_string()); + + Self { + state, + access_token, + phone_number_id, + verify_token, + } + } + + pub async fn handle_webhook_verification( + &self, + params: Query, + ) -> Result { + if let (Some(mode), Some(token), Some(challenge)) = ( + ¶ms.hub_mode, + ¶ms.hub_verify_token, + ¶ms.hub_challenge, + ) { + if mode == "subscribe" && token == &self.verify_token { + info!("WhatsApp webhook verified successfully"); + return Ok(challenge.clone()); + } + } + + error!("WhatsApp webhook verification failed"); + Err(StatusCode::FORBIDDEN) + } + + pub async fn handle_incoming_message( + &self, + Json(payload): Json, + ) -> Result { + for entry in payload.entry { + for change in entry.changes { + if change.field == "messages" { + if let Some(messages) = change.value.messages { + for message in messages { + if let Err(e) = self.process_message(message).await { + error!("Error processing WhatsApp message: {}", e); + } + } + } + } + } + } + + Ok(StatusCode::OK) + } + + async fn process_message( + &self, + message: WhatsAppIncomingMessage, + ) -> Result<(), Box> { + let user_phone = message.from.clone(); + let message_id = message.id.clone(); + + // Mark message as read + self.mark_as_read(&message_id).await?; + + // Extract message content based on type + let content = match message.msg_type.as_str() { + "text" => message.text.map(|t| t.body).unwrap_or_default(), + "image" => { + if let Some(image) = message.image { + format!("[Image: {}]", image.caption.unwrap_or_default()) + } else { + String::new() + } + } + "audio" => "[Audio message]".to_string(), + "video" => "[Video message]".to_string(), + "document" => "[Document]".to_string(), + "location" => { + if let Some(loc) = message.location { + format!("[Location: {}, {}]", loc.latitude, loc.longitude) + } else { + String::new() + } + } + _ => String::new(), + }; + + if content.is_empty() { + return Ok(()); + } + + // Process with bot + self.process_with_bot(&user_phone, &content).await?; + + Ok(()) + } + + async fn process_with_bot( + &self, + from_number: &str, + message: &str, + ) -> Result<(), Box> { + // Create or get user session + let session = self.get_or_create_session(from_number).await?; + + // Process message through bot processor (simplified for now) + // In real implementation, this would call the bot processor + + // Send response back to WhatsApp + let response = format!("Received (session {}): {}", session.id, message); + self.send_message(from_number, &response).await?; + + Ok(()) + } + + async fn get_or_create_session( + &self, + phone_number: &str, + ) -> Result> { + // Check Redis for existing session + if let Some(redis_client) = &self.state.cache { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let session_key = format!("whatsapp_session:{}", phone_number); + + if let Ok(session_data) = redis::cmd("GET") + .arg(&session_key) + .query_async::(&mut conn) + .await + { + if let Ok(session) = serde_json::from_str::(&session_data) { + return Ok(session); + } + } + + // Create new session + let user_uuid = + uuid::Uuid::parse_str(phone_number).unwrap_or_else(|_| uuid::Uuid::new_v4()); + let session = UserSession { + id: uuid::Uuid::new_v4(), + user_id: user_uuid, + bot_id: uuid::Uuid::default(), // Default bot + title: "WhatsApp Session".to_string(), + context_data: serde_json::json!({"channel": "whatsapp"}), + current_tool: None, + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + }; + + // Store in Redis + let session_data = serde_json::to_string(&session)?; + redis::cmd("SET") + .arg(&session_key) + .arg(&session_data) + .arg("EX") + .arg(86400) // 24 hours + .query_async::<()>(&mut conn) + .await?; + + Ok(session) + } else { + // Create ephemeral session + let user_uuid = + uuid::Uuid::parse_str(phone_number).unwrap_or_else(|_| uuid::Uuid::new_v4()); + Ok(UserSession { + id: uuid::Uuid::new_v4(), + user_id: user_uuid, + bot_id: uuid::Uuid::default(), + title: "WhatsApp Session".to_string(), + context_data: serde_json::json!({"channel": "whatsapp"}), + current_tool: None, + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + }) + } + } + + pub async fn send_message( + &self, + to_number: &str, + message: &str, + ) -> Result<(), Box> { + let url = format!( + "https://graph.facebook.com/v17.0/{}/messages", + self.phone_number_id + ); + + let payload = json!({ + "messaging_product": "whatsapp", + "to": to_number, + "type": "text", + "text": { + "body": message + } + }); + + let client = Client::new(); + let response = client + .post(&url) + .bearer_auth(&self.access_token) + .json(&payload) + .send() + .await?; + + if !response.status().is_success() { + let error_text = response.text().await?; + error!("WhatsApp API error: {}", error_text); + return Err(format!("WhatsApp API error: {}", error_text).into()); + } + + Ok(()) + } + + pub async fn send_interactive_buttons( + &self, + to_number: &str, + header: &str, + buttons: Vec, + ) -> Result<(), Box> { + let url = format!( + "https://graph.facebook.com/v17.0/{}/messages", + self.phone_number_id + ); + + let button_list: Vec<_> = buttons + .iter() + .take(3) // WhatsApp limits to 3 buttons + .enumerate() + .map(|(i, text)| { + json!({ + "type": "reply", + "reply": { + "id": format!("button_{}", i), + "title": text + } + }) + }) + .collect(); + + let payload = json!({ + "messaging_product": "whatsapp", + "to": to_number, + "type": "interactive", + "interactive": { + "type": "button", + "header": { + "type": "text", + "text": header + }, + "body": { + "text": "Escolha uma opção:" + }, + "action": { + "buttons": button_list + } + } + }); + + let client = Client::new(); + let response = client + .post(&url) + .bearer_auth(&self.access_token) + .json(&payload) + .send() + .await?; + + if !response.status().is_success() { + let error_text = response.text().await?; + error!("WhatsApp API error: {}", error_text); + } + + Ok(()) + } + + async fn mark_as_read( + &self, + message_id: &str, + ) -> Result<(), Box> { + let url = format!( + "https://graph.facebook.com/v17.0/{}/messages", + self.phone_number_id + ); + + let payload = json!({ + "messaging_product": "whatsapp", + "status": "read", + "message_id": message_id + }); + + let client = Client::new(); + client + .post(&url) + .bearer_auth(&self.access_token) + .json(&payload) + .send() + .await?; + + Ok(()) + } + + pub async fn get_access_token(&self) -> &str { + &self.access_token + } +} + +pub fn router(state: Arc) -> Router> { + let adapter = Arc::new(WhatsAppAdapter::new(state.clone())); + + Router::new() + .route( + "/webhook", + axum::routing::get({ + let adapter = adapter.clone(); + move |params| async move { adapter.handle_webhook_verification(params).await } + }), + ) + .route( + "/webhook", + axum::routing::post({ + move |payload| async move { adapter.handle_incoming_message(payload).await } + }), + ) + .with_state(state) +} diff --git a/web/desktop/js/feature-manager.js b/web/desktop/js/feature-manager.js new file mode 100644 index 000000000..738bc4d08 --- /dev/null +++ b/web/desktop/js/feature-manager.js @@ -0,0 +1,523 @@ +/** + * Feature Manager for General Bots Desktop + * Manages dynamic feature toggling with Alpine.js + * Syncs with backend feature flags and persists user preferences + */ + +const FeatureManager = (function () { + "use strict"; + + // Feature definitions matching Cargo.toml features + const FEATURES = { + // UI Features + "web-server": { + name: "Web Server", + category: "ui", + description: "Web interface and static file serving", + icon: "🌐", + required: true, + dependencies: [], + }, + desktop: { + name: "Desktop UI", + category: "ui", + description: "Native desktop application with Tauri", + icon: "🖥️", + required: false, + dependencies: ["web-server"], + }, + + // Core Integrations + vectordb: { + name: "Vector Database", + category: "core", + description: "Semantic search and AI-powered indexing", + icon: "🔍", + required: false, + dependencies: [], + }, + llm: { + name: "LLM/AI", + category: "core", + description: "Large Language Model integration", + icon: "🤖", + required: false, + dependencies: [], + }, + nvidia: { + name: "NVIDIA GPU", + category: "core", + description: "GPU acceleration for AI workloads", + icon: "⚡", + required: false, + dependencies: ["llm"], + }, + + // Communication Channels + email: { + name: "Email", + category: "communication", + description: "IMAP/SMTP email integration", + icon: "📧", + required: false, + dependencies: [], + }, + whatsapp: { + name: "WhatsApp", + category: "communication", + description: "WhatsApp messaging integration", + icon: "💬", + required: false, + dependencies: [], + }, + instagram: { + name: "Instagram", + category: "communication", + description: "Instagram DM integration", + icon: "📸", + required: false, + dependencies: [], + }, + msteams: { + name: "Microsoft Teams", + category: "communication", + description: "Teams messaging integration", + icon: "👥", + required: false, + dependencies: [], + }, + + // Productivity Features + chat: { + name: "Chat", + category: "productivity", + description: "Core chat messaging interface", + icon: "💬", + required: true, + dependencies: [], + }, + drive: { + name: "Drive", + category: "productivity", + description: "File storage and management", + icon: "📁", + required: false, + dependencies: [], + }, + tasks: { + name: "Tasks", + category: "productivity", + description: "Task management system", + icon: "✓", + required: false, + dependencies: [], + }, + calendar: { + name: "Calendar", + category: "productivity", + description: "Calendar and scheduling", + icon: "📅", + required: false, + dependencies: [], + }, + meet: { + name: "Meet", + category: "productivity", + description: "Video conferencing with LiveKit", + icon: "📹", + required: false, + dependencies: [], + }, + mail: { + name: "Mail", + category: "productivity", + description: "Email client interface", + icon: "✉️", + required: false, + dependencies: ["email"], + }, + + // Enterprise Features + compliance: { + name: "Compliance", + category: "enterprise", + description: "Audit logging and compliance tracking", + icon: "📋", + required: false, + dependencies: [], + }, + attendance: { + name: "Attendance", + category: "enterprise", + description: "Employee attendance tracking", + icon: "👤", + required: false, + dependencies: [], + }, + directory: { + name: "Directory", + category: "enterprise", + description: "LDAP/Active Directory integration", + icon: "📖", + required: false, + dependencies: [], + }, + weba: { + name: "Web Automation", + category: "enterprise", + description: "Browser automation capabilities", + icon: "🔧", + required: false, + dependencies: [], + }, + }; + + // Category display names + const CATEGORIES = { + ui: { name: "User Interface", icon: "🖥️" }, + core: { name: "Core Integrations", icon: "⚙️" }, + communication: { name: "Communication Channels", icon: "💬" }, + productivity: { name: "Productivity Apps", icon: "📊" }, + enterprise: { name: "Enterprise Features", icon: "🏢" }, + }; + + // State management + let enabledFeatures = new Set(); + let availableFeatures = new Set(); + let subscribers = []; + + /** + * Initialize feature manager + */ + async function init() { + console.log("🚀 Initializing Feature Manager..."); + + // Load enabled features from localStorage + loadFromStorage(); + + // Fetch available features from backend + await fetchServerFeatures(); + + // Notify subscribers + notifySubscribers(); + + console.log("✓ Feature Manager initialized"); + console.log(` Enabled: ${Array.from(enabledFeatures).join(", ")}`); + } + + /** + * Load features from localStorage + */ + function loadFromStorage() { + try { + const stored = localStorage.getItem("enabledFeatures"); + if (stored) { + const parsed = JSON.parse(stored); + enabledFeatures = new Set(parsed); + } else { + // Default features if nothing stored + enabledFeatures = new Set(["web-server", "chat"]); + } + } catch (e) { + console.error("Failed to load features from storage:", e); + enabledFeatures = new Set(["web-server", "chat"]); + } + } + + /** + * Save features to localStorage + */ + function saveToStorage() { + try { + const array = Array.from(enabledFeatures); + localStorage.setItem("enabledFeatures", JSON.stringify(array)); + } catch (e) { + console.error("Failed to save features to storage:", e); + } + } + + /** + * Fetch available features from server + */ + async function fetchServerFeatures() { + try { + const response = await fetch("/api/features/available"); + if (response.ok) { + const data = await response.json(); + availableFeatures = new Set(data.features || []); + console.log( + "✓ Server features loaded:", + Array.from(availableFeatures).join(", ") + ); + } else { + // Fallback: assume all features available + availableFeatures = new Set(Object.keys(FEATURES)); + console.warn("⚠ Could not fetch server features, using all"); + } + } catch (e) { + console.warn("⚠ Could not connect to server:", e.message); + // Fallback: assume all features available + availableFeatures = new Set(Object.keys(FEATURES)); + } + } + + /** + * Check if a feature is enabled + */ + function isEnabled(featureId) { + return enabledFeatures.has(featureId); + } + + /** + * Check if a feature is available (compiled in) + */ + function isAvailable(featureId) { + return availableFeatures.has(featureId); + } + + /** + * Enable a feature + */ + async function enable(featureId) { + const feature = FEATURES[featureId]; + if (!feature) { + console.error(`Unknown feature: ${featureId}`); + return false; + } + + if (!isAvailable(featureId)) { + console.error( + `Feature not available (not compiled): ${featureId}` + ); + return false; + } + + // Check dependencies + for (const dep of feature.dependencies) { + if (!isEnabled(dep)) { + console.log( + `Enabling dependency: ${dep} for ${featureId}` + ); + await enable(dep); + } + } + + // Enable the feature + enabledFeatures.add(featureId); + saveToStorage(); + + // Notify server + await notifyServer(featureId, true); + + notifySubscribers(); + console.log(`✓ Feature enabled: ${featureId}`); + return true; + } + + /** + * Disable a feature + */ + async function disable(featureId) { + const feature = FEATURES[featureId]; + if (!feature) { + console.error(`Unknown feature: ${featureId}`); + return false; + } + + if (feature.required) { + console.error(`Cannot disable required feature: ${featureId}`); + return false; + } + + // Check if any enabled feature depends on this + for (const [id, f] of Object.entries(FEATURES)) { + if ( + isEnabled(id) && + f.dependencies.includes(featureId) + ) { + console.log( + `Disabling dependent feature: ${id}` + ); + await disable(id); + } + } + + // Disable the feature + enabledFeatures.delete(featureId); + saveToStorage(); + + // Notify server + await notifyServer(featureId, false); + + notifySubscribers(); + console.log(`✓ Feature disabled: ${featureId}`); + return true; + } + + /** + * Toggle a feature on/off + */ + async function toggle(featureId) { + if (isEnabled(featureId)) { + return await disable(featureId); + } else { + return await enable(featureId); + } + } + + /** + * Notify server about feature change + */ + async function notifyServer(featureId, enabled) { + try { + await fetch("/api/features/toggle", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + feature: featureId, + enabled: enabled, + }), + }); + } catch (e) { + console.warn("Could not notify server:", e.message); + } + } + + /** + * Subscribe to feature changes + */ + function subscribe(callback) { + subscribers.push(callback); + return () => { + subscribers = subscribers.filter((cb) => cb !== callback); + }; + } + + /** + * Notify all subscribers + */ + function notifySubscribers() { + const data = { + enabled: Array.from(enabledFeatures), + available: Array.from(availableFeatures), + }; + subscribers.forEach((callback) => callback(data)); + } + + /** + * Get feature info + */ + function getFeature(featureId) { + return FEATURES[featureId] || null; + } + + /** + * Get all features by category + */ + function getFeaturesByCategory() { + const byCategory = {}; + for (const [id, feature] of Object.entries(FEATURES)) { + if (!byCategory[feature.category]) { + byCategory[feature.category] = []; + } + byCategory[feature.category].push({ + id, + ...feature, + enabled: isEnabled(id), + available: isAvailable(id), + }); + } + return byCategory; + } + + /** + * Get category info + */ + function getCategories() { + return CATEGORIES; + } + + /** + * Get enabled feature IDs + */ + function getEnabled() { + return Array.from(enabledFeatures); + } + + /** + * Get available feature IDs + */ + function getAvailable() { + return Array.from(availableFeatures); + } + + /** + * Update UI visibility based on enabled features + */ + function updateUI() { + // Hide/show app menu items based on features + const appItems = document.querySelectorAll(".app-item"); + appItems.forEach((item) => { + const section = item.dataset.section; + const featureId = section; // Assuming section names match feature IDs + + if (FEATURES[featureId]) { + if (isEnabled(featureId)) { + item.style.display = ""; + item.removeAttribute("disabled"); + } else { + item.style.display = "none"; + } + } + }); + + // Update main content sections + const mainContent = document.getElementById("main-content"); + if (mainContent) { + // Mark sections as available/unavailable + const sections = mainContent.querySelectorAll("[data-feature]"); + sections.forEach((section) => { + const featureId = section.dataset.feature; + if (!isEnabled(featureId)) { + section.classList.add("feature-disabled"); + } else { + section.classList.remove("feature-disabled"); + } + }); + } + } + + // Auto-update UI when features change + subscribe(() => { + updateUI(); + }); + + // Public API + return { + init, + isEnabled, + isAvailable, + enable, + disable, + toggle, + subscribe, + getFeature, + getFeaturesByCategory, + getCategories, + getEnabled, + getAvailable, + updateUI, + }; +})(); + +// Initialize on DOM ready +if (document.readyState === "loading") { + document.addEventListener("DOMContentLoaded", () => { + FeatureManager.init(); + }); +} else { + FeatureManager.init(); +} + +// Make available globally +window.FeatureManager = FeatureManager;