- Refactor folder as features.

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-11-22 22:55:35 -03:00
parent 33c9df8418
commit bf0ed6223f
83 changed files with 18803 additions and 0 deletions

12
src/attendance/mod.rs Normal file
View file

@ -0,0 +1,12 @@
//! REST API Module
//!
//! Provides HTTP endpoints for cloud-based functionality.
//! Supports web, desktop, and mobile clients.
//!
//! Note: Local operations require native access and are handled separately:
//! - Screen capture: Tauri commands (desktop) or WebRTC (web/mobile)
//! - File sync: Tauri commands with local rclone process (desktop only)
pub mod drive;
pub mod keyword_services;
pub mod queue;

658
src/attendance/queue.rs Normal file
View file

@ -0,0 +1,658 @@
//! Queue Management API for Attendant System
//!
//! Handles conversation queues, attendant assignment, and real-time updates.
//! Reads attendant data from attendant.csv in bot's .gbai folder.
use crate::shared::models::UserSession;
use crate::shared::state::AppState;
use axum::{
extract::{Path, Query, State},
http::StatusCode,
response::IntoResponse,
Json,
};
use chrono::Utc;
use diesel::prelude::*;
use log::{error, info, warn};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct QueueItem {
pub session_id: Uuid,
pub user_id: Uuid,
pub bot_id: Uuid,
pub channel: String,
pub user_name: String,
pub user_email: Option<String>,
pub last_message: String,
pub last_message_time: String,
pub waiting_time_seconds: i64,
pub priority: i32,
pub status: QueueStatus,
pub assigned_to: Option<Uuid>,
pub assigned_to_name: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum QueueStatus {
Waiting,
Assigned,
Active,
Resolved,
Abandoned,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AttendantStats {
pub attendant_id: String,
pub attendant_name: String,
pub channel: String,
pub preferences: String,
pub active_conversations: i32,
pub total_handled_today: i32,
pub avg_response_time_seconds: i32,
pub status: AttendantStatus,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AttendantCSV {
pub id: String,
pub name: String,
pub channel: String,
pub preferences: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum AttendantStatus {
Online,
Busy,
Away,
Offline,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AssignRequest {
pub session_id: Uuid,
pub attendant_id: Uuid,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TransferRequest {
pub session_id: Uuid,
pub from_attendant_id: Uuid,
pub to_attendant_id: Uuid,
pub reason: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct QueueFilters {
pub channel: Option<String>,
pub status: Option<String>,
pub assigned_to: Option<Uuid>,
}
/// Check if bot has transfer enabled in config.csv
async fn is_transfer_enabled(bot_id: Uuid, work_path: &str) -> bool {
let config_path = PathBuf::from(work_path)
.join(format!("{}.gbai", bot_id))
.join("config.csv");
if !config_path.exists() {
warn!("Config file not found: {:?}", config_path);
return false;
}
match std::fs::read_to_string(&config_path) {
Ok(content) => {
for line in content.lines() {
if line.to_lowercase().contains("transfer") && line.to_lowercase().contains("true")
{
return true;
}
}
false
}
Err(e) => {
error!("Failed to read config file: {}", e);
false
}
}
}
/// Read attendants from attendant.csv
async fn read_attendants_csv(bot_id: Uuid, work_path: &str) -> Vec<AttendantCSV> {
let attendant_path = PathBuf::from(work_path)
.join(format!("{}.gbai", bot_id))
.join("attendant.csv");
if !attendant_path.exists() {
warn!("Attendant file not found: {:?}", attendant_path);
return Vec::new();
}
match std::fs::read_to_string(&attendant_path) {
Ok(content) => {
let mut attendants = Vec::new();
let mut lines = content.lines();
// Skip header
lines.next();
for line in lines {
let parts: Vec<&str> = line.split(',').map(|s| s.trim()).collect();
if parts.len() >= 4 {
attendants.push(AttendantCSV {
id: parts[0].to_string(),
name: parts[1].to_string(),
channel: parts[2].to_string(),
preferences: parts[3].to_string(),
});
}
}
attendants
}
Err(e) => {
error!("Failed to read attendant file: {}", e);
Vec::new()
}
}
}
/// GET /api/queue/list
/// Get all conversations in queue (only if bot has transfer=true)
pub async fn list_queue(
State(state): State<Arc<AppState>>,
Query(filters): Query<QueueFilters>,
) -> impl IntoResponse {
info!("Listing queue items with filters: {:?}", filters);
let result = tokio::task::spawn_blocking({
let conn = state.conn.clone();
move || {
let mut db_conn = conn
.get()
.map_err(|e| format!("Failed to get database connection: {}", e))?;
use crate::shared::models::schema::user_sessions;
use crate::shared::models::schema::users;
// Build query - get recent sessions with user info
let sessions_data: Vec<UserSession> = user_sessions::table
.order(user_sessions::created_at.desc())
.limit(50)
.load(&mut db_conn)
.map_err(|e| format!("Failed to load sessions: {}", e))?;
let mut queue_items = Vec::new();
for session_data in sessions_data {
// Get user info separately
let user_info: Option<(String, String)> = users::table
.filter(users::id.eq(session_data.user_id))
.select((users::username, users::email))
.first(&mut db_conn)
.optional()
.map_err(|e| format!("Failed to load user: {}", e))?;
let (uname, uemail) = user_info.unwrap_or_else(|| {
(
format!("user_{}", session_data.user_id),
format!("{}@unknown.local", session_data.user_id),
)
});
let channel = session_data
.context_data
.get("channel")
.and_then(|c| c.as_str())
.unwrap_or("web")
.to_string();
let waiting_time = (Utc::now() - session_data.updated_at).num_seconds();
queue_items.push(QueueItem {
session_id: session_data.id,
user_id: session_data.user_id,
bot_id: session_data.bot_id,
channel,
user_name: uname,
user_email: Some(uemail),
last_message: session_data.title.clone(),
last_message_time: session_data.updated_at.to_rfc3339(),
waiting_time_seconds: waiting_time,
priority: if waiting_time > 300 { 2 } else { 1 },
status: QueueStatus::Waiting,
assigned_to: None,
assigned_to_name: None,
});
}
Ok::<Vec<QueueItem>, String>(queue_items)
}
})
.await;
match result {
Ok(Ok(queue_items)) => {
info!("Found {} queue items", queue_items.len());
(StatusCode::OK, Json(queue_items))
}
Ok(Err(e)) => {
error!("Queue list error: {}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(vec![] as Vec<QueueItem>),
)
}
Err(e) => {
error!("Task error: {}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(vec![] as Vec<QueueItem>),
)
}
}
}
/// GET /api/queue/attendants?bot_id={bot_id}
/// Get all attendants from attendant.csv for a bot
pub async fn list_attendants(
State(state): State<Arc<AppState>>,
Query(params): Query<HashMap<String, String>>,
) -> impl IntoResponse {
info!("Listing attendants");
let bot_id_str = params.get("bot_id").cloned().unwrap_or_default();
let bot_id = match Uuid::parse_str(&bot_id_str) {
Ok(id) => id,
Err(_) => {
// Get default bot
let conn = state.conn.clone();
let result = tokio::task::spawn_blocking(move || {
let mut db_conn = conn.get().ok()?;
use crate::shared::models::schema::bots;
bots::table
.filter(bots::is_active.eq(true))
.select(bots::id)
.first::<Uuid>(&mut db_conn)
.ok()
})
.await;
match result {
Ok(Some(id)) => id,
_ => {
error!("No valid bot_id provided and no default bot found");
return (StatusCode::BAD_REQUEST, Json(vec![] as Vec<AttendantStats>));
}
}
}
};
// Check if transfer is enabled
let work_path = "./work";
if !is_transfer_enabled(bot_id, work_path).await {
warn!("Transfer not enabled for bot {}", bot_id);
return (StatusCode::OK, Json(vec![] as Vec<AttendantStats>));
}
// Read attendants from CSV
let attendant_csvs = read_attendants_csv(bot_id, work_path).await;
let attendants: Vec<AttendantStats> = attendant_csvs
.into_iter()
.map(|att| AttendantStats {
attendant_id: att.id,
attendant_name: att.name,
channel: att.channel,
preferences: att.preferences,
active_conversations: 0,
total_handled_today: 0,
avg_response_time_seconds: 0,
status: AttendantStatus::Online,
})
.collect();
info!("Found {} attendants from CSV", attendants.len());
(StatusCode::OK, Json(attendants))
}
/// POST /api/queue/assign
/// Assign conversation to attendant (stores in session context_data)
pub async fn assign_conversation(
State(state): State<Arc<AppState>>,
Json(request): Json<AssignRequest>,
) -> impl IntoResponse {
info!(
"Assigning session {} to attendant {}",
request.session_id, request.attendant_id
);
// Store assignment in session context_data
let result = tokio::task::spawn_blocking({
let conn = state.conn.clone();
let session_id = request.session_id;
let attendant_id = request.attendant_id;
move || {
let mut db_conn = conn
.get()
.map_err(|e| format!("Failed to get database connection: {}", e))?;
use crate::shared::models::schema::user_sessions;
// Get current session
let session: UserSession = user_sessions::table
.filter(user_sessions::id.eq(session_id))
.first(&mut db_conn)
.map_err(|e| format!("Session not found: {}", e))?;
// Update context_data with assignment
let mut ctx = session.context_data.clone();
ctx["assigned_to"] = serde_json::json!(attendant_id.to_string());
ctx["assigned_at"] = serde_json::json!(Utc::now().to_rfc3339());
ctx["status"] = serde_json::json!("assigned");
diesel::update(user_sessions::table.filter(user_sessions::id.eq(session_id)))
.set(user_sessions::context_data.eq(&ctx))
.execute(&mut db_conn)
.map_err(|e| format!("Failed to update session: {}", e))?;
Ok::<(), String>(())
}
})
.await;
match result {
Ok(Ok(())) => (
StatusCode::OK,
Json(serde_json::json!({
"success": true,
"session_id": request.session_id,
"attendant_id": request.attendant_id,
"assigned_at": Utc::now().to_rfc3339()
})),
),
Ok(Err(e)) => {
error!("Assignment error: {}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"success": false,
"error": e
})),
)
}
Err(e) => {
error!("Assignment error: {:?}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"success": false,
"error": format!("{:?}", e)
})),
)
}
}
}
/// POST /api/queue/transfer
/// Transfer conversation between attendants
pub async fn transfer_conversation(
State(state): State<Arc<AppState>>,
Json(request): Json<TransferRequest>,
) -> impl IntoResponse {
info!(
"Transferring session {} from {} to {}",
request.session_id, request.from_attendant_id, request.to_attendant_id
);
let result = tokio::task::spawn_blocking({
let conn = state.conn.clone();
let session_id = request.session_id;
let to_attendant = request.to_attendant_id;
let reason = request.reason.clone();
move || {
let mut db_conn = conn
.get()
.map_err(|e| format!("Failed to get database connection: {}", e))?;
use crate::shared::models::schema::user_sessions;
// Get current session
let session: UserSession = user_sessions::table
.filter(user_sessions::id.eq(session_id))
.first(&mut db_conn)
.map_err(|e| format!("Session not found: {}", e))?;
// Update context_data with transfer info
let mut ctx = session.context_data.clone();
ctx["assigned_to"] = serde_json::json!(to_attendant.to_string());
ctx["transferred_at"] = serde_json::json!(Utc::now().to_rfc3339());
ctx["transfer_reason"] = serde_json::json!(reason.unwrap_or_default());
ctx["status"] = serde_json::json!("transferred");
diesel::update(user_sessions::table.filter(user_sessions::id.eq(session_id)))
.set((
user_sessions::context_data.eq(&ctx),
user_sessions::updated_at.eq(Utc::now()),
))
.execute(&mut db_conn)
.map_err(|e| format!("Failed to update session: {}", e))?;
Ok::<(), String>(())
}
})
.await;
match result {
Ok(Ok(())) => (
StatusCode::OK,
Json(serde_json::json!({
"success": true,
"session_id": request.session_id,
"from_attendant": request.from_attendant_id,
"to_attendant": request.to_attendant_id,
"transferred_at": Utc::now().to_rfc3339()
})),
),
Ok(Err(e)) => {
error!("Transfer error: {}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"success": false,
"error": e
})),
)
}
Err(e) => {
error!("Transfer error: {:?}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"success": false,
"error": format!("{:?}", e)
})),
)
}
}
}
/// POST /api/queue/resolve
/// Mark conversation as resolved
pub async fn resolve_conversation(
State(state): State<Arc<AppState>>,
Json(payload): Json<serde_json::Value>,
) -> impl IntoResponse {
let session_id = payload
.get("session_id")
.and_then(|v| v.as_str())
.and_then(|s| Uuid::parse_str(s).ok())
.unwrap_or_else(Uuid::nil);
info!("Resolving session {}", session_id);
let result = tokio::task::spawn_blocking({
let conn = state.conn.clone();
move || {
let mut db_conn = conn
.get()
.map_err(|e| format!("Failed to get database connection: {}", e))?;
use crate::shared::models::schema::user_sessions;
// Get current session
let session: UserSession = user_sessions::table
.filter(user_sessions::id.eq(session_id))
.first(&mut db_conn)
.map_err(|e| format!("Session not found: {}", e))?;
// Update context_data to mark as resolved
let mut ctx = session.context_data.clone();
ctx["status"] = serde_json::json!("resolved");
ctx["resolved_at"] = serde_json::json!(Utc::now().to_rfc3339());
ctx["resolved"] = serde_json::json!(true);
diesel::update(user_sessions::table.filter(user_sessions::id.eq(session_id)))
.set((
user_sessions::context_data.eq(&ctx),
user_sessions::updated_at.eq(Utc::now()),
))
.execute(&mut db_conn)
.map_err(|e| format!("Failed to update session: {}", e))?;
Ok::<(), String>(())
}
})
.await;
match result {
Ok(Ok(())) => (
StatusCode::OK,
Json(serde_json::json!({
"success": true,
"session_id": session_id,
"resolved_at": Utc::now().to_rfc3339()
})),
),
Ok(Err(e)) => {
error!("Resolve error: {}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"success": false,
"error": e
})),
)
}
Err(e) => {
error!("Resolve error: {:?}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"success": false,
"error": format!("{:?}", e)
})),
)
}
}
}
/// GET /api/queue/insights/{session_id}
/// Get bot insights for a conversation
pub async fn get_insights(
State(state): State<Arc<AppState>>,
Path(session_id): Path<Uuid>,
) -> impl IntoResponse {
info!("Getting insights for session {}", session_id);
let result = tokio::task::spawn_blocking({
let conn = state.conn.clone();
move || {
let mut db_conn = conn
.get()
.map_err(|e| format!("Failed to get database connection: {}", e))?;
use crate::shared::models::schema::message_history;
// Get recent messages
let messages: Vec<(String, i32)> = message_history::table
.filter(message_history::session_id.eq(session_id))
.select((message_history::content_encrypted, message_history::role))
.order(message_history::created_at.desc())
.limit(10)
.load(&mut db_conn)
.map_err(|e| format!("Failed to load messages: {}", e))?;
// Analyze sentiment and intent (simplified)
let user_messages: Vec<String> = messages
.iter()
.filter(|(_, r)| *r == 0) // User messages
.map(|(c, _)| c.clone())
.collect();
let sentiment = if user_messages.iter().any(|m| {
m.to_lowercase().contains("urgent")
|| m.to_lowercase().contains("problem")
|| m.to_lowercase().contains("issue")
}) {
"negative"
} else if user_messages
.iter()
.any(|m| m.to_lowercase().contains("thanks") || m.to_lowercase().contains("great"))
{
"positive"
} else {
"neutral"
};
let suggested_reply = if sentiment == "negative" {
"I understand this is frustrating. Let me help you resolve this immediately."
} else {
"How can I assist you further?"
};
Ok::<serde_json::Value, String>(serde_json::json!({
"session_id": session_id,
"sentiment": sentiment,
"message_count": messages.len(),
"suggested_reply": suggested_reply,
"key_topics": ["support", "technical"],
"priority": if sentiment == "negative" { "high" } else { "normal" },
"language": "en"
}))
}
})
.await;
match result {
Ok(Ok(insights)) => (StatusCode::OK, Json(insights)),
Ok(Err(e)) => {
error!("Insights error: {}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": e
})),
)
}
Err(e) => {
error!("Task error: {}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": format!("Task error: {}", e)
})),
)
}
}
}

482
src/calendar/mod.rs Normal file
View file

@ -0,0 +1,482 @@
use axum::{
extract::{Path, Query, State},
http::StatusCode,
response::Json,
routing::{delete, get, post, put},
Router,
};
use chrono::{DateTime, Utc};
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use crate::shared::utils::DbPool;
use tokio::sync::RwLock;
use uuid::Uuid;
// TODO: Replace sqlx queries with Diesel queries
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CalendarEvent {
pub id: Uuid,
pub title: String,
pub description: Option<String>,
pub start_time: DateTime<Utc>,
pub end_time: DateTime<Utc>,
pub location: Option<String>,
pub attendees: Vec<String>,
pub organizer: String,
pub reminder_minutes: Option<i32>,
pub recurrence_rule: Option<String>,
pub status: EventStatus,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum EventStatus {
Scheduled,
InProgress,
Completed,
Cancelled,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Meeting {
pub id: Uuid,
pub event_id: Uuid,
pub meeting_url: Option<String>,
pub meeting_id: Option<String>,
pub platform: MeetingPlatform,
pub recording_url: Option<String>,
pub notes: Option<String>,
pub action_items: Vec<ActionItem>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum MeetingPlatform {
Zoom,
Teams,
Meet,
Internal,
Other(String),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ActionItem {
pub id: Uuid,
pub description: String,
pub assignee: String,
pub due_date: Option<DateTime<Utc>>,
pub completed: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CalendarReminder {
pub id: Uuid,
pub event_id: Uuid,
pub remind_at: DateTime<Utc>,
pub message: String,
pub channel: ReminderChannel,
pub sent: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum ReminderChannel {
Email,
Sms,
Push,
InApp,
}
#[derive(Clone)]
pub struct CalendarEngine {
db: Arc<DbPool>,
cache: Arc<RwLock<Vec<CalendarEvent>>>,
}
impl CalendarEngine {
pub fn new(db: Arc<PgPool>) -> Self {
Self {
db,
cache: Arc::new(RwLock::new(Vec::new())),
}
}
pub async fn create_event(
&self,
event: CalendarEvent,
) -> Result<CalendarEvent, Box<dyn std::error::Error>> {
// TODO: Implement with Diesel
/*
let result = sqlx::query!(
r#"
INSERT INTO calendar_events
(id, title, description, start_time, end_time, location, attendees, organizer,
reminder_minutes, recurrence_rule, status, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
RETURNING *
"#,
event.id,
event.title,
event.description,
event.start_time,
event.end_time,
event.location,
&event.attendees[..],
event.organizer,
event.reminder_minutes,
event.recurrence_rule,
serde_json::to_value(&event.status)?,
event.created_at,
event.updated_at
)
.fetch_one(self.db.as_ref())
.await?;
*/
self.refresh_cache().await?;
Ok(event)
Ok(event)
}
pub async fn update_event(
&self,
id: Uuid,
updates: serde_json::Value,
) -> Result<CalendarEvent, Box<dyn std::error::Error>> {
let updated_at = Utc::now();
let result = sqlx::query!(
r#"
UPDATE calendar_events
SET title = COALESCE($2, title),
description = COALESCE($3, description),
start_time = COALESCE($4, start_time),
end_time = COALESCE($5, end_time),
location = COALESCE($6, location),
updated_at = $7
WHERE id = $1
RETURNING *
"#,
id,
updates.get("title").and_then(|v| v.as_str()),
updates.get("description").and_then(|v| v.as_str()),
updates
.get("start_time")
.and_then(|v| DateTime::parse_from_rfc3339(v.as_str()?).ok())
.map(|dt| dt.with_timezone(&Utc)),
updates
.get("end_time")
.and_then(|v| DateTime::parse_from_rfc3339(v.as_str()?).ok())
.map(|dt| dt.with_timezone(&Utc)),
updates.get("location").and_then(|v| v.as_str()),
updated_at
)
.fetch_one(self.db.as_ref())
.await?;
self.refresh_cache().await?;
Ok(serde_json::from_value(serde_json::to_value(result)?)?)
}
pub async fn delete_event(&self, _id: Uuid) -> Result<bool, Box<dyn std::error::Error>> {
// TODO: Implement with Diesel
/*
let result = sqlx::query!("DELETE FROM calendar_events WHERE id = $1", id)
.execute(self.db.as_ref())
.await?;
*/
self.refresh_cache().await?;
Ok(false)
}
pub async fn get_events_range(
&self,
start: DateTime<Utc>,
end: DateTime<Utc>,
) -> Result<Vec<CalendarEvent>, Box<dyn std::error::Error>> {
// TODO: Implement with Diesel
/*
let results = sqlx::query_as!(
CalendarEvent,
r#"
SELECT * FROM calendar_events
WHERE start_time >= $1 AND end_time <= $2
ORDER BY start_time ASC
"#,
start,
end
)
.fetch_all(self.db.as_ref())
.await?;
*/
Ok(vec![])
}
pub async fn get_user_events(
&self,
user_id: &str,
) -> Result<Vec<CalendarEvent>, Box<dyn std::error::Error>> {
// TODO: Implement with Diesel
/*
let results = sqlx::query!(
r#"
SELECT * FROM calendar_events
WHERE organizer = $1 OR $1 = ANY(attendees)
ORDER BY start_time ASC
"#,
user_id
)
.fetch_all(self.db.as_ref())
.await?;
Ok(results
.into_iter()
.map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap())
.collect())
*/
Ok(vec![])
}
pub async fn create_meeting(
&self,
event_id: Uuid,
platform: MeetingPlatform,
) -> Result<Meeting, Box<dyn std::error::Error>> {
let meeting = Meeting {
id: Uuid::new_v4(),
event_id,
meeting_url: None,
meeting_id: None,
platform,
recording_url: None,
notes: None,
action_items: Vec::new(),
};
// TODO: Implement with Diesel
/*
sqlx::query!(
r#"
INSERT INTO meetings (id, event_id, platform, created_at)
VALUES ($1, $2, $3, $4)
"#,
meeting.id,
meeting.event_id,
meeting.platform,
meeting.created_at
)
.execute(self.db.as_ref())
.await?;
*/
Ok(meeting)
}
pub async fn schedule_reminder(
&self,
event_id: Uuid,
minutes_before: i32,
channel: ReminderChannel,
) -> Result<CalendarReminder, Box<dyn std::error::Error>> {
let event = self.get_event(event_id).await?;
let remind_at = event.start_time - chrono::Duration::minutes(minutes_before as i64);
let reminder = CalendarReminder {
id: Uuid::new_v4(),
event_id,
remind_at,
message: format!(
"Reminder: {} starts in {} minutes",
event.title, minutes_before
),
channel,
sent: false,
};
// TODO: Implement with Diesel
/*
sqlx::query!(
r#"
INSERT INTO calendar_reminders (id, event_id, remind_at, message, channel, sent)
VALUES ($1, $2, $3, $4, $5, $6)
"#,
reminder.id,
reminder.event_id,
reminder.remind_at,
reminder.message,
reminder.channel,
reminder.sent
)
.execute(self.db.as_ref())
.await?;
*/
Ok(reminder)
}
pub async fn get_event(&self, _id: Uuid) -> Result<CalendarEvent, Box<dyn std::error::Error>> {
// TODO: Implement with Diesel
/*
let result = sqlx::query!("SELECT * FROM calendar_events WHERE id = $1", id)
.fetch_one(self.db.as_ref())
.await?;
Ok(serde_json::from_value(serde_json::to_value(result)?)?)
*/
Err("Not implemented".into())
}
pub async fn check_conflicts(
&self,
start: DateTime<Utc>,
end: DateTime<Utc>,
user_id: &str,
) -> Result<Vec<CalendarEvent>, Box<dyn std::error::Error>> {
// TODO: Implement with Diesel
/*
let results = sqlx::query!(
r#"
SELECT * FROM calendar_events
WHERE (organizer = $1 OR $1 = ANY(attendees))
AND NOT (end_time <= $2 OR start_time >= $3)
"#,
user_id,
start,
end
)
.fetch_all(self.db.as_ref())
.await?;
Ok(results
.into_iter()
.map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap())
.collect())
*/
Ok(vec![])
}
async fn refresh_cache(&self) -> Result<(), Box<dyn std::error::Error>> {
// TODO: Implement with Diesel
/*
let results = sqlx::query!("SELECT * FROM calendar_events ORDER BY start_time ASC")
.fetch_all(self.db.as_ref())
.await?;
let events: Vec<CalendarEvent> = results
.into_iter()
.map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap())
.collect();
*/
let events: Vec<CalendarEvent> = vec![];
let mut cache = self.cache.write().await;
*cache = events;
Ok(())
}
}
#[derive(Deserialize)]
pub struct EventQuery {
pub start: Option<String>,
pub end: Option<String>,
pub user_id: Option<String>,
}
#[derive(Deserialize)]
pub struct MeetingRequest {
pub event_id: Uuid,
pub platform: MeetingPlatform,
}
async fn create_event_handler(
State(engine): State<Arc<CalendarEngine>>,
Json(event): Json<CalendarEvent>,
) -> Result<Json<CalendarEvent>, StatusCode> {
match engine.create_event(event).await {
Ok(created) => Ok(Json(created)),
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
}
}
async fn get_events_handler(
State(engine): State<Arc<CalendarEngine>>,
Query(params): Query<EventQuery>,
) -> Result<Json<Vec<CalendarEvent>>, StatusCode> {
if let (Some(start), Some(end)) = (params.start, params.end) {
let start = DateTime::parse_from_rfc3339(&start)
.map(|dt| dt.with_timezone(&Utc))
.unwrap_or_else(|_| Utc::now());
let end = DateTime::parse_from_rfc3339(&end)
.map(|dt| dt.with_timezone(&Utc))
.unwrap_or_else(|_| Utc::now() + chrono::Duration::days(30));
match engine.get_events_range(start, end).await {
Ok(events) => Ok(Json(events)),
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
}
} else if let Some(user_id) = params.user_id {
match engine.get_user_events(&user_id).await {
Ok(events) => Ok(Json(events)),
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
}
} else {
Err(StatusCode::BAD_REQUEST)
}
}
async fn update_event_handler(
State(engine): State<Arc<CalendarEngine>>,
Path(id): Path<Uuid>,
Json(updates): Json<serde_json::Value>,
) -> Result<Json<CalendarEvent>, StatusCode> {
match engine.update_event(id, updates).await {
Ok(updated) => Ok(Json(updated)),
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
}
}
async fn delete_event_handler(
State(engine): State<Arc<CalendarEngine>>,
Path(id): Path<Uuid>,
) -> Result<StatusCode, StatusCode> {
match engine.delete_event(id).await {
Ok(true) => Ok(StatusCode::NO_CONTENT),
Ok(false) => Err(StatusCode::NOT_FOUND),
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
}
}
async fn schedule_meeting_handler(
State(engine): State<Arc<CalendarEngine>>,
Json(req): Json<MeetingRequest>,
) -> Result<Json<Meeting>, StatusCode> {
match engine.create_meeting(req.event_id, req.platform).await {
Ok(meeting) => Ok(Json(meeting)),
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
}
}
pub fn routes(engine: Arc<CalendarEngine>) -> Router {
Router::new()
.route(
"/events",
post(create_event_handler).get(get_events_handler),
)
.route(
"/events/:id",
put(update_event_handler).delete(delete_event_handler),
)
.route("/meetings", post(schedule_meeting_handler))
.with_state(engine)
}

113
src/console/chat_panel.rs Normal file
View file

@ -0,0 +1,113 @@
use color_eyre::Result;
use std::sync::Arc;
use crate::shared::state::AppState;
use crate::shared::models::BotResponse;
use tokio::sync::mpsc;
use uuid::Uuid;
pub struct ChatPanel {
pub messages: Vec<String>,
pub input_buffer: String,
pub session_id: Uuid,
pub user_id: Uuid,
pub response_rx: Option<mpsc::Receiver<BotResponse>>,
}
impl ChatPanel {
pub fn new(_app_state: Arc<AppState>) -> Self {
Self {
messages: vec!["Welcome to General Bots Console Chat!".to_string()],
input_buffer: String::new(),
session_id: Uuid::new_v4(),
user_id: Uuid::new_v4(),
response_rx: None,
}
}
pub fn add_char(&mut self, c: char) {
self.input_buffer.push(c);
}
pub fn backspace(&mut self) {
self.input_buffer.pop();
}
pub async fn send_message(&mut self, bot_name: &str, app_state: &Arc<AppState>) -> Result<()> {
if self.input_buffer.trim().is_empty() {
return Ok(());
}
let message = self.input_buffer.clone();
self.messages.push(format!("You: {}", message));
self.input_buffer.clear();
let bot_id = self.get_bot_id(bot_name, app_state).await?;
let user_message = crate::shared::models::UserMessage {
bot_id: bot_id.to_string(),
user_id: self.user_id.to_string(),
session_id: self.session_id.to_string(),
channel: "console".to_string(),
content: message,
message_type: 1,
media_url: None,
timestamp: chrono::Utc::now(),
context_name: None,
};
let (tx, rx) = mpsc::channel::<BotResponse>(100);
self.response_rx = Some(rx);
let orchestrator = crate::bot::BotOrchestrator::new(app_state.clone());
let _ = orchestrator.stream_response(user_message, tx).await;
Ok(())
}
pub async fn poll_response(&mut self, _bot_name: &str) -> Result<()> {
if let Some(rx) = &mut self.response_rx {
while let Ok(response) = rx.try_recv() {
if !response.content.is_empty() && !response.is_complete {
if let Some(last_msg) = self.messages.last_mut() {
if last_msg.starts_with("Bot: ") {
last_msg.push_str(&response.content);
} else {
self.messages.push(format!("Bot: {}", response.content));
}
} else {
self.messages.push(format!("Bot: {}", response.content));
}
}
if response.is_complete && response.content.is_empty() {
break;
}
}
}
Ok(())
}
async fn get_bot_id(&self, bot_name: &str, app_state: &Arc<AppState>) -> Result<Uuid> {
use crate::shared::models::schema::bots::dsl::*;
use diesel::prelude::*;
let mut conn = app_state.conn.get().unwrap();
let bot_id = bots
.filter(name.eq(bot_name))
.select(id)
.first::<Uuid>(&mut *conn)?;
Ok(bot_id)
}
pub fn render(&self) -> String {
let mut lines = Vec::new();
lines.push("╔═══════════════════════════════════════╗".to_string());
lines.push("║ CONVERSATION ║".to_string());
lines.push("╚═══════════════════════════════════════╝".to_string());
lines.push("".to_string());
let visible_start = if self.messages.len() > 15 {
self.messages.len() - 15
} else {
0
};
for msg in &self.messages[visible_start..] {
if msg.starts_with("You: ") {
lines.push(format!(" {}", msg));
} else if msg.starts_with("Bot: ") {
lines.push(format!(" {}", msg));
} else {
lines.push(format!(" {}", msg));
}
}
lines.push("".to_string());
lines.push("─────────────────────────────────────────".to_string());
lines.push(format!(" > {}_", self.input_buffer));
lines.push("".to_string());
lines.push(" Enter: Send | Tab: Switch Panel".to_string());
lines.join("\n")
}
}

142
src/console/editor.rs Normal file
View file

@ -0,0 +1,142 @@
use color_eyre::Result;
use std::sync::Arc;
use crate::shared::state::AppState;
pub struct Editor {
file_path: String,
bucket: String,
key: String,
content: String,
cursor_pos: usize,
scroll_offset: usize,
modified: bool,
}
impl Editor {
pub async fn load(app_state: &Arc<AppState>, bucket: &str, path: &str) -> Result<Self> {
let content = if let Some(drive) = &app_state.drive {
match drive.get_object().bucket(bucket).key(path).send().await {
Ok(response) => {
let bytes = response.body.collect().await?.into_bytes();
String::from_utf8_lossy(&bytes).to_string()
}
Err(_) => String::new(),
}
} else {
String::new()
};
Ok(Self {
file_path: format!("{}/{}", bucket, path),
bucket: bucket.to_string(),
key: path.to_string(),
content,
cursor_pos: 0,
scroll_offset: 0,
modified: false,
})
}
pub async fn save(&mut self, app_state: &Arc<AppState>) -> Result<()> {
if let Some(drive) = &app_state.drive {
drive.put_object()
.bucket(&self.bucket)
.key(&self.key)
.body(self.content.as_bytes().to_vec().into())
.send()
.await?;
self.modified = false;
}
Ok(())
}
pub fn file_path(&self) -> &str {
&self.file_path
}
pub fn render(&self, cursor_blink: bool) -> String {
let lines: Vec<&str> = self.content.lines().collect();
let total_lines = lines.len().max(1);
let visible_lines = 25;
let cursor_line = self.content[..self.cursor_pos].lines().count();
let cursor_col = self.content[..self.cursor_pos]
.lines()
.last()
.map(|line| line.len())
.unwrap_or(0);
let start = self.scroll_offset;
let end = (start + visible_lines).min(total_lines);
let mut display_lines = Vec::new();
for i in start..end {
let line_num = i + 1;
let line_content = if i < lines.len() { lines[i] } else { "" };
let is_cursor_line = i == cursor_line;
let cursor_indicator = if is_cursor_line && cursor_blink {
let spaces = " ".repeat(cursor_col);
format!("{}", spaces)
} else {
String::new()
};
display_lines.push(format!(" {:4}{}{}", line_num, line_content, cursor_indicator));
}
if display_lines.is_empty() {
let cursor_indicator = if cursor_blink { "" } else { "" };
display_lines.push(format!(" 1 │ {}", cursor_indicator));
}
display_lines.push("".to_string());
display_lines.push("─────────────────────────────────────────────────────────────".to_string());
let status = if self.modified { "MODIFIED" } else { "SAVED" };
display_lines.push(format!(" {} {} │ Line: {}, Col: {}",
status, self.file_path, cursor_line + 1, cursor_col + 1));
display_lines.push(" Ctrl+S: Save │ Ctrl+W: Close │ Esc: Close without saving".to_string());
display_lines.join("\n")
}
pub fn move_up(&mut self) {
if let Some(prev_line_end) = self.content[..self.cursor_pos].rfind('\n') {
if let Some(prev_prev_line_end) = self.content[..prev_line_end].rfind('\n') {
let target_pos = prev_prev_line_end + 1 + (self.cursor_pos - prev_line_end - 1).min(
self.content[prev_prev_line_end + 1..prev_line_end].len()
);
self.cursor_pos = target_pos;
} else {
self.cursor_pos = (self.cursor_pos - prev_line_end - 1).min(prev_line_end);
}
}
}
pub fn move_down(&mut self) {
if let Some(next_line_start) = self.content[self.cursor_pos..].find('\n') {
let current_line_start = self.content[..self.cursor_pos].rfind('\n').map(|pos| pos + 1).unwrap_or(0);
let next_line_absolute = self.cursor_pos + next_line_start + 1;
if let Some(next_next_line_start) = self.content[next_line_absolute..].find('\n') {
let target_pos = next_line_absolute + (self.cursor_pos - current_line_start).min(next_next_line_start);
self.cursor_pos = target_pos;
} else {
let target_pos = next_line_absolute + (self.cursor_pos - current_line_start).min(
self.content[next_line_absolute..].len()
);
self.cursor_pos = target_pos;
}
}
}
pub fn move_left(&mut self) {
if self.cursor_pos > 0 {
self.cursor_pos -= 1;
}
}
pub fn move_right(&mut self) {
if self.cursor_pos < self.content.len() {
self.cursor_pos += 1;
}
}
pub fn insert_char(&mut self, c: char) {
self.modified = true;
self.content.insert(self.cursor_pos, c);
self.cursor_pos += 1;
}
pub fn backspace(&mut self) {
if self.cursor_pos > 0 {
self.modified = true;
self.content.remove(self.cursor_pos - 1);
self.cursor_pos -= 1;
}
}
pub fn insert_newline(&mut self) {
self.modified = true;
self.content.insert(self.cursor_pos, '\n');
self.cursor_pos += 1;
}
}

268
src/console/file_tree.rs Normal file
View file

@ -0,0 +1,268 @@
use crate::shared::state::AppState;
use color_eyre::Result;
use std::sync::Arc;
#[derive(Debug, Clone)]
pub enum TreeNode {
Bucket { name: String },
Folder { bucket: String, path: String },
File { bucket: String, path: String },
}
pub struct FileTree {
app_state: Arc<AppState>,
items: Vec<(String, TreeNode)>,
selected: usize,
current_bucket: Option<String>,
current_path: Vec<String>,
}
impl FileTree {
pub fn new(app_state: Arc<AppState>) -> Self {
Self {
app_state,
items: Vec::new(),
selected: 0,
current_bucket: None,
current_path: Vec::new(),
}
}
pub async fn load_root(&mut self) -> Result<()> {
self.items.clear();
self.current_bucket = None;
self.current_path.clear();
if let Some(drive) = &self.app_state.drive {
let result = drive.list_buckets().send().await;
match result {
Ok(response) => {
let buckets = response.buckets();
for bucket in buckets {
if let Some(name) = bucket.name() {
let icon = if name.ends_with(".gbai") {
"🤖"
} else {
"📦"
};
let display = format!("{} {}", icon, name);
self.items.push((
display,
TreeNode::Bucket {
name: name.to_string(),
},
));
}
}
}
Err(e) => {
self.items.push((
format!("✗ Error: {}", e),
TreeNode::Bucket {
name: String::new(),
},
));
}
}
} else {
self.items.push((
"✗ Drive not connected".to_string(),
TreeNode::Bucket {
name: String::new(),
},
));
}
if self.items.is_empty() {
self.items.push((
"(no buckets found)".to_string(),
TreeNode::Bucket {
name: String::new(),
},
));
}
self.selected = 0;
Ok(())
}
pub async fn enter_bucket(&mut self, bucket: String) -> Result<()> {
self.current_bucket = Some(bucket.clone());
self.current_path.clear();
self.load_bucket_contents(&bucket, "").await
}
pub async fn enter_folder(&mut self, bucket: String, path: String) -> Result<()> {
self.current_bucket = Some(bucket.clone());
let parts: Vec<&str> = path
.trim_matches('/')
.split('/')
.filter(|s| !s.is_empty())
.collect();
self.current_path = parts.iter().map(|s| s.to_string()).collect();
self.load_bucket_contents(&bucket, &path).await
}
pub fn go_up(&mut self) -> bool {
if self.current_path.is_empty() {
if self.current_bucket.is_some() {
self.current_bucket = None;
return true;
}
return false;
}
self.current_path.pop();
true
}
pub async fn refresh_current(&mut self) -> Result<()> {
if let Some(bucket) = &self.current_bucket.clone() {
let path = self.current_path.join("/");
self.load_bucket_contents(bucket, &path).await
} else {
self.load_root().await
}
}
async fn load_bucket_contents(&mut self, bucket: &str, prefix: &str) -> Result<()> {
self.items.clear();
self.items.push((
"⬆️ .. (go back)".to_string(),
TreeNode::Folder {
bucket: bucket.to_string(),
path: "..".to_string(),
},
));
if let Some(drive) = &self.app_state.drive {
let normalized_prefix = if prefix.is_empty() {
String::new()
} else if prefix.ends_with('/') {
prefix.to_string()
} else {
format!("{}/", prefix)
};
let mut continuation_token = None;
let mut all_keys = Vec::new();
loop {
let mut request = drive.list_objects_v2().bucket(bucket);
if !normalized_prefix.is_empty() {
request = request.prefix(&normalized_prefix);
}
if let Some(token) = continuation_token {
request = request.continuation_token(token);
}
let result = request.send().await?;
for obj in result.contents() {
if let Some(key) = obj.key() {
all_keys.push(key.to_string());
}
}
if !result.is_truncated.unwrap_or(false) {
break;
}
continuation_token = result.next_continuation_token;
}
let mut folders = std::collections::HashSet::new();
let mut files = Vec::new();
for key in all_keys {
if key == normalized_prefix {
continue;
}
let relative =
if !normalized_prefix.is_empty() && key.starts_with(&normalized_prefix) {
&key[normalized_prefix.len()..]
} else {
&key
};
if relative.is_empty() {
continue;
}
if let Some(slash_pos) = relative.find('/') {
let folder_name = &relative[..slash_pos];
if !folder_name.is_empty() {
folders.insert(folder_name.to_string());
}
} else {
files.push((relative.to_string(), key.clone()));
}
}
let mut folder_vec: Vec<String> = folders.into_iter().collect();
folder_vec.sort();
for folder_name in folder_vec {
let full_path = if normalized_prefix.is_empty() {
folder_name.clone()
} else {
format!("{}{}", normalized_prefix, folder_name)
};
let display = format!("📁 {}/", folder_name);
self.items.push((
display,
TreeNode::Folder {
bucket: bucket.to_string(),
path: full_path,
},
));
}
files.sort_by(|(a, _), (b, _)| a.cmp(b));
for (name, full_path) in files {
let icon = if name.ends_with(".bas") {
"⚙️"
} else if name.ends_with(".ast") {
"🔧"
} else if name.ends_with(".csv") {
"📊"
} else if name.ends_with(".gbkb") {
"📚"
} else if name.ends_with(".json") {
"🔖"
} else {
"📄"
};
let display = format!("{} {}", icon, name);
self.items.push((
display,
TreeNode::File {
bucket: bucket.to_string(),
path: full_path,
},
));
}
}
if self.items.len() == 1 {
self.items.push((
"(empty folder)".to_string(),
TreeNode::Folder {
bucket: bucket.to_string(),
path: String::new(),
},
));
}
self.selected = 0;
Ok(())
}
pub fn render_items(&self) -> &[(String, TreeNode)] {
&self.items
}
pub fn selected_index(&self) -> usize {
self.selected
}
pub fn get_selected_node(&self) -> Option<&TreeNode> {
self.items.get(self.selected).map(|(_, node)| node)
}
pub fn get_selected_bot(&self) -> Option<String> {
if let Some(bucket) = &self.current_bucket {
if bucket.ends_with(".gbai") {
return Some(bucket.trim_end_matches(".gbai").to_string());
}
}
if let Some((_, node)) = self.items.get(self.selected) {
match node {
TreeNode::Bucket { name } => {
if name.ends_with(".gbai") {
return Some(name.trim_end_matches(".gbai").to_string());
}
}
_ => {}
}
}
None
}
pub fn move_up(&mut self) {
if self.selected > 0 {
self.selected -= 1;
}
}
pub fn move_down(&mut self) {
if self.selected < self.items.len().saturating_sub(1) {
self.selected += 1;
}
}
}

64
src/console/log_panel.rs Normal file
View file

@ -0,0 +1,64 @@
use std::sync::{Arc, Mutex};
use log::{Log, Metadata, LevelFilter, Record, SetLoggerError};
use chrono::Local;
pub struct LogPanel {
logs: Vec<String>,
max_logs: usize,
}
impl LogPanel {
pub fn new() -> Self {
Self {
logs: Vec::with_capacity(1000),
max_logs: 1000,
}
}
pub fn add_log(&mut self, entry: &str) {
if self.logs.len() >= self.max_logs {
self.logs.remove(0);
}
self.logs.push(entry.to_string());
}
pub fn render(&self) -> String {
let visible_logs = if self.logs.len() > 10 {
&self.logs[self.logs.len() - 10..]
} else {
&self.logs[..]
};
visible_logs.join("\n")
}
}
pub struct UiLogger {
log_panel: Arc<Mutex<LogPanel>>,
filter: LevelFilter,
}
impl Log for UiLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= self.filter
}
fn log(&self, record: &Record) {
if self.enabled(record.metadata()) {
let timestamp = Local::now().format("%H:%M:%S");
let level_icon = match record.level() {
log::Level::Error => "ERR",
log::Level::Warn => "WRN",
log::Level::Info => "INF",
log::Level::Debug => "DBG",
log::Level::Trace => "TRC",
};
let log_entry = format!("[{}] {} {}", timestamp, level_icon, record.args());
if let Ok(mut panel) = self.log_panel.lock() {
panel.add_log(&log_entry);
}
}
}
fn flush(&self) {}
}
pub fn init_logger(log_panel: Arc<Mutex<LogPanel>>) -> Result<(), SetLoggerError> {
let logger = Box::new(UiLogger {
log_panel,
filter: LevelFilter::Info,
});
log::set_boxed_logger(logger)?;
log::set_max_level(LevelFilter::Trace);
Ok(())
}

826
src/console/mod.rs Normal file
View file

@ -0,0 +1,826 @@
use crate::shared::state::AppState;
use color_eyre::Result;
use crossterm::{
event::{self, Event, KeyCode, KeyModifiers},
execute,
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},
};
use log::LevelFilter;
use ratatui::{
backend::CrosstermBackend,
layout::{Constraint, Direction, Layout, Rect},
style::{Color, Modifier, Style},
text::{Line, Span},
widgets::{Block, Borders, List, ListItem, Paragraph, Wrap},
Frame, Terminal,
};
use std::io;
use std::sync::Arc;
use std::sync::Mutex;
mod chat_panel;
mod editor;
pub mod file_tree;
mod log_panel;
mod status_panel;
use chat_panel::ChatPanel;
use editor::Editor;
use file_tree::{FileTree, TreeNode};
use log_panel::{init_logger, LogPanel};
use status_panel::StatusPanel;
pub struct XtreeUI {
app_state: Option<Arc<AppState>>,
file_tree: Option<FileTree>,
status_panel: Option<StatusPanel>,
log_panel: Arc<Mutex<LogPanel>>,
chat_panel: Option<ChatPanel>,
editor: Option<Editor>,
active_panel: ActivePanel,
should_quit: bool,
progress_channel: Option<
Arc<tokio::sync::Mutex<tokio::sync::mpsc::UnboundedReceiver<crate::BootstrapProgress>>>,
>,
bootstrap_status: String,
}
#[derive(Debug, Clone, Copy, PartialEq)]
enum ActivePanel {
FileTree,
Editor,
Status,
Logs,
Chat,
}
impl XtreeUI {
pub fn new() -> Self {
let log_panel = Arc::new(Mutex::new(LogPanel::new()));
Self {
app_state: None,
file_tree: None,
status_panel: None,
log_panel: log_panel.clone(),
chat_panel: None,
editor: None,
active_panel: ActivePanel::Logs,
should_quit: false,
progress_channel: None,
bootstrap_status: "Initializing...".to_string(),
}
}
pub fn set_progress_channel(
&mut self,
rx: Arc<tokio::sync::Mutex<tokio::sync::mpsc::UnboundedReceiver<crate::BootstrapProgress>>>,
) {
self.progress_channel = Some(rx);
}
pub fn set_app_state(&mut self, app_state: Arc<AppState>) {
self.file_tree = Some(FileTree::new(app_state.clone()));
self.status_panel = Some(StatusPanel::new(app_state.clone()));
self.chat_panel = Some(ChatPanel::new(app_state.clone()));
self.app_state = Some(app_state);
self.active_panel = ActivePanel::FileTree;
self.bootstrap_status = "Ready".to_string();
}
pub fn start_ui(&mut self) -> Result<()> {
color_eyre::install()?;
if !std::io::IsTerminal::is_terminal(&std::io::stdout()) {
return Ok(());
}
enable_raw_mode()?;
let mut stdout = io::stdout();
execute!(stdout, EnterAlternateScreen)?;
let backend = CrosstermBackend::new(stdout);
let mut terminal = Terminal::new(backend)?;
init_logger(self.log_panel.clone())?;
log::set_max_level(LevelFilter::Trace);
let result = self.run_event_loop(&mut terminal);
disable_raw_mode()?;
execute!(terminal.backend_mut(), LeaveAlternateScreen)?;
terminal.show_cursor()?;
result
}
fn run_event_loop(
&mut self,
terminal: &mut Terminal<CrosstermBackend<io::Stdout>>,
) -> Result<()> {
let mut last_update = std::time::Instant::now();
let update_interval = std::time::Duration::from_millis(1000);
let mut cursor_blink = false;
let mut last_blink = std::time::Instant::now();
let rt = tokio::runtime::Runtime::new()?;
loop {
if let Some(ref progress_rx) = self.progress_channel {
if let Ok(mut rx) = progress_rx.try_lock() {
while let Ok(progress) = rx.try_recv() {
self.bootstrap_status = match progress {
crate::BootstrapProgress::StartingBootstrap => {
"Starting bootstrap...".to_string()
}
crate::BootstrapProgress::InstallingComponent(name) => {
format!("Installing: {}", name)
}
crate::BootstrapProgress::StartingComponent(name) => {
format!("Starting: {}", name)
}
crate::BootstrapProgress::UploadingTemplates => {
"Uploading templates...".to_string()
}
crate::BootstrapProgress::ConnectingDatabase => {
"Connecting to database...".to_string()
}
crate::BootstrapProgress::StartingLLM => {
"Starting LLM servers...".to_string()
}
crate::BootstrapProgress::BootstrapComplete => {
"Bootstrap complete".to_string()
}
crate::BootstrapProgress::BootstrapError(msg) => {
format!("Error: {}", msg)
}
};
}
}
}
if last_blink.elapsed() >= std::time::Duration::from_millis(500) {
cursor_blink = !cursor_blink;
last_blink = std::time::Instant::now();
}
terminal.draw(|f| self.render(f, cursor_blink))?;
if self.app_state.is_some() && last_update.elapsed() >= update_interval {
if let Err(e) = rt.block_on(self.update_data()) {
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log(&format!("Update error: {}", e));
}
last_update = std::time::Instant::now();
}
if event::poll(std::time::Duration::from_millis(50))? {
if let Event::Key(key) = event::read()? {
if let Err(e) = rt.block_on(self.handle_input(key.code, key.modifiers)) {
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log(&format!("Input error: {}", e));
}
if self.should_quit {
break;
}
}
}
}
Ok(())
}
fn render(&mut self, f: &mut Frame, cursor_blink: bool) {
let bg = Color::Rgb(0, 30, 100);
let border_active = Color::Rgb(85, 255, 255);
let border_inactive = Color::Rgb(170, 170, 170);
let text = Color::Rgb(255, 255, 255);
let highlight = Color::Rgb(0, 170, 170);
let title_bg = Color::Rgb(170, 170, 170);
let title_fg = Color::Rgb(0, 0, 0);
if self.app_state.is_none() {
self.render_loading(f, bg, text, border_active, title_bg, title_fg);
return;
}
let main_chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Length(3),
Constraint::Min(0),
Constraint::Length(12),
])
.split(f.area());
self.render_header(f, main_chunks[0], bg, title_bg, title_fg);
if self.editor.is_some() {
let content_chunks = Layout::default()
.direction(Direction::Horizontal)
.constraints([
Constraint::Percentage(25),
Constraint::Percentage(40),
Constraint::Percentage(35),
])
.split(main_chunks[1]);
self.render_file_tree(
f,
content_chunks[0],
bg,
text,
border_active,
border_inactive,
highlight,
title_bg,
title_fg,
);
if let Some(editor) = &self.editor {
self.render_editor(
f,
content_chunks[1],
editor,
bg,
text,
border_active,
border_inactive,
highlight,
title_bg,
title_fg,
cursor_blink,
);
}
self.render_chat(
f,
content_chunks[2],
bg,
text,
border_active,
border_inactive,
highlight,
title_bg,
title_fg,
);
} else {
let content_chunks = Layout::default()
.direction(Direction::Horizontal)
.constraints([
Constraint::Percentage(25),
Constraint::Percentage(40),
Constraint::Percentage(35),
])
.split(main_chunks[1]);
self.render_file_tree(
f,
content_chunks[0],
bg,
text,
border_active,
border_inactive,
highlight,
title_bg,
title_fg,
);
let right_chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Percentage(50), Constraint::Percentage(50)])
.split(content_chunks[1]);
self.render_status(
f,
right_chunks[0],
bg,
text,
border_active,
border_inactive,
highlight,
title_bg,
title_fg,
);
self.render_chat(
f,
content_chunks[2],
bg,
text,
border_active,
border_inactive,
highlight,
title_bg,
title_fg,
);
}
self.render_logs(
f,
main_chunks[2],
bg,
text,
border_active,
border_inactive,
highlight,
title_bg,
title_fg,
);
}
fn render_header(
&self,
f: &mut Frame,
area: Rect,
_bg: Color,
title_bg: Color,
title_fg: Color,
) {
let block = Block::default().style(Style::default().bg(title_bg));
f.render_widget(block, area);
let title = if self.app_state.is_some() {
let components = vec![
("Tables", "postgres", "5432"),
("Cache", "valkey-server", "6379"),
("Drive", "minio", "9000"),
("LLM", "llama-server", "8081"),
];
let statuses: Vec<String> = components
.iter()
.map(|(comp_name, process, _port)| {
let status = if status_panel::StatusPanel::check_component_running(process) {
format!("🟢 {}", comp_name)
} else {
format!("🔴 {}", comp_name)
};
status
})
.collect();
format!(" GENERAL BOTS ┃ {} ", statuses.join(""))
} else {
" GENERAL BOTS ".to_string()
};
let title_len = title.len() as u16;
let centered_x = (area.width.saturating_sub(title_len)) / 2;
let centered_y = area.y + 1;
let x = area.x + centered_x;
let max_width = area.width.saturating_sub(x - area.x);
let width = title_len.min(max_width);
let title_span = Span::styled(
title,
Style::default()
.fg(title_fg)
.bg(title_bg)
.add_modifier(Modifier::BOLD),
);
f.render_widget(
Paragraph::new(Line::from(title_span)),
Rect {
x,
y: centered_y,
width,
height: 1,
},
);
}
fn render_loading(
&self,
f: &mut Frame,
bg: Color,
text: Color,
border: Color,
title_bg: Color,
title_fg: Color,
) {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Percentage(40),
Constraint::Percentage(20),
Constraint::Percentage(40),
])
.split(f.area());
let center = Layout::default()
.direction(Direction::Horizontal)
.constraints([
Constraint::Percentage(30),
Constraint::Percentage(40),
Constraint::Percentage(30),
])
.split(chunks[1])[1];
let block = Block::default()
.title(Span::styled(
" General Bots ",
Style::default()
.fg(title_fg)
.bg(title_bg)
.add_modifier(Modifier::BOLD),
))
.borders(Borders::ALL)
.border_style(Style::default().fg(border))
.style(Style::default().bg(bg));
let loading_text = format!(
"\n ╔════════════════════════════════╗\n ║ ║\n ║ Initializing System... ║\n ║ ║\n ║ {} ║\n ║ ║\n ╚════════════════════════════════╝\n",
format!("{:^30}", self.bootstrap_status)
);
let paragraph = Paragraph::new(loading_text)
.block(block)
.style(Style::default().fg(text))
.wrap(Wrap { trim: false });
f.render_widget(paragraph, center);
}
fn render_file_tree(
&self,
f: &mut Frame,
area: Rect,
bg: Color,
text: Color,
border_active: Color,
border_inactive: Color,
highlight: Color,
title_bg: Color,
title_fg: Color,
) {
if let Some(file_tree) = &self.file_tree {
let items = file_tree.render_items();
let selected = file_tree.selected_index();
let list_items: Vec<ListItem> = items
.iter()
.enumerate()
.map(|(idx, (display, _))| {
let style = if idx == selected {
Style::default()
.bg(highlight)
.fg(Color::Black)
.add_modifier(Modifier::BOLD)
} else {
Style::default().fg(text)
};
ListItem::new(Line::from(Span::styled(display.clone(), style)))
})
.collect();
let is_active = self.active_panel == ActivePanel::FileTree;
let border_color = if is_active {
border_active
} else {
border_inactive
};
let title_style = if is_active {
Style::default()
.fg(title_fg)
.bg(title_bg)
.add_modifier(Modifier::BOLD)
} else {
Style::default().fg(title_fg).bg(title_bg)
};
let block = Block::default()
.title(Span::styled(" FILE EXPLORER ", title_style))
.borders(Borders::ALL)
.border_style(Style::default().fg(border_color))
.style(Style::default().bg(bg));
let list = List::new(list_items).block(block);
f.render_widget(list, area);
}
}
fn render_status(
&mut self,
f: &mut Frame,
area: Rect,
bg: Color,
text: Color,
border_active: Color,
border_inactive: Color,
_highlight: Color,
title_bg: Color,
title_fg: Color,
) {
let selected_bot_opt = self.file_tree.as_ref().and_then(|ft| ft.get_selected_bot());
let status_text = if let Some(status_panel) = &mut self.status_panel {
match selected_bot_opt {
Some(bot) => status_panel.render(Some(bot)),
None => status_panel.render(None),
}
} else {
"Waiting for initialization...".to_string()
};
let is_active = self.active_panel == ActivePanel::Status;
let border_color = if is_active {
border_active
} else {
border_inactive
};
let title_style = if is_active {
Style::default()
.fg(title_fg)
.bg(title_bg)
.add_modifier(Modifier::BOLD)
} else {
Style::default().fg(title_fg).bg(title_bg)
};
let block = Block::default()
.title(Span::styled(" SYSTEM STATUS ", title_style))
.borders(Borders::ALL)
.border_style(Style::default().fg(border_color))
.style(Style::default().bg(bg));
let paragraph = Paragraph::new(status_text)
.block(block)
.style(Style::default().fg(text))
.wrap(Wrap { trim: false });
f.render_widget(paragraph, area);
}
fn render_editor(
&self,
f: &mut Frame,
area: Rect,
editor: &Editor,
bg: Color,
text: Color,
border_active: Color,
border_inactive: Color,
_highlight: Color,
title_bg: Color,
title_fg: Color,
cursor_blink: bool,
) {
let is_active = self.active_panel == ActivePanel::Editor;
let border_color = if is_active {
border_active
} else {
border_inactive
};
let title_style = if is_active {
Style::default()
.fg(title_fg)
.bg(title_bg)
.add_modifier(Modifier::BOLD)
} else {
Style::default().fg(title_fg).bg(title_bg)
};
let title_text = format!(" EDITOR: {} ", editor.file_path());
let block = Block::default()
.title(Span::styled(title_text, title_style))
.borders(Borders::ALL)
.border_style(Style::default().fg(border_color))
.style(Style::default().bg(bg));
let content = editor.render(cursor_blink);
let paragraph = Paragraph::new(content)
.block(block)
.style(Style::default().fg(text))
.wrap(Wrap { trim: false });
f.render_widget(paragraph, area);
}
fn render_chat(
&self,
f: &mut Frame,
area: Rect,
bg: Color,
text: Color,
border_active: Color,
border_inactive: Color,
_highlight: Color,
title_bg: Color,
title_fg: Color,
) {
if let Some(chat_panel) = &self.chat_panel {
let is_active = self.active_panel == ActivePanel::Chat;
let border_color = if is_active {
border_active
} else {
border_inactive
};
let title_style = if is_active {
Style::default()
.fg(title_fg)
.bg(title_bg)
.add_modifier(Modifier::BOLD)
} else {
Style::default().fg(title_fg).bg(title_bg)
};
let selected_bot = if let Some(file_tree) = &self.file_tree {
file_tree
.get_selected_bot()
.unwrap_or("No bot selected".to_string())
} else {
"No bot selected".to_string()
};
let title_text = format!(" CHAT: {} ", selected_bot);
let block = Block::default()
.title(Span::styled(title_text, title_style))
.borders(Borders::ALL)
.border_style(Style::default().fg(border_color))
.style(Style::default().bg(bg));
let content = chat_panel.render();
let paragraph = Paragraph::new(content)
.block(block)
.style(Style::default().fg(text))
.wrap(Wrap { trim: false });
f.render_widget(paragraph, area);
}
}
fn render_logs(
&self,
f: &mut Frame,
area: Rect,
bg: Color,
text: Color,
border_active: Color,
border_inactive: Color,
_highlight: Color,
title_bg: Color,
title_fg: Color,
) {
let log_panel = self.log_panel.try_lock();
let log_lines = if let Ok(panel) = log_panel {
panel.render()
} else {
"Loading logs...".to_string()
};
let is_active = self.active_panel == ActivePanel::Logs;
let border_color = if is_active {
border_active
} else {
border_inactive
};
let title_style = if is_active {
Style::default()
.fg(title_fg)
.bg(title_bg)
.add_modifier(Modifier::BOLD)
} else {
Style::default().fg(title_fg).bg(title_bg)
};
let block = Block::default()
.title(Span::styled(" SYSTEM LOGS ", title_style))
.borders(Borders::ALL)
.border_style(Style::default().fg(border_color))
.style(Style::default().bg(bg));
let paragraph = Paragraph::new(log_lines)
.block(block)
.style(Style::default().fg(text))
.wrap(Wrap { trim: false });
f.render_widget(paragraph, area);
}
async fn handle_input(&mut self, key: KeyCode, modifiers: KeyModifiers) -> Result<()> {
if modifiers.contains(KeyModifiers::CONTROL) {
match key {
KeyCode::Char('c') | KeyCode::Char('q') => {
self.should_quit = true;
return Ok(());
}
KeyCode::Char('s') => {
if let Some(editor) = &mut self.editor {
if let Some(app_state) = &self.app_state {
if let Err(e) = editor.save(app_state).await {
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log(&format!("Save failed: {}", e));
} else {
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log(&format!("Saved: {}", editor.file_path()));
}
}
}
return Ok(());
}
KeyCode::Char('w') => {
if self.editor.is_some() {
self.editor = None;
self.active_panel = ActivePanel::FileTree;
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log("Closed editor");
}
return Ok(());
}
_ => {}
}
}
if self.app_state.is_none() {
return Ok(());
}
match self.active_panel {
ActivePanel::FileTree => match key {
KeyCode::Up => {
if let Some(file_tree) = &mut self.file_tree {
file_tree.move_up();
}
}
KeyCode::Down => {
if let Some(file_tree) = &mut self.file_tree {
file_tree.move_down();
}
}
KeyCode::Enter => {
if let Err(e) = self.handle_tree_enter().await {
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log(&format!("Enter error: {}", e));
}
}
KeyCode::Backspace => {
if let Some(file_tree) = &mut self.file_tree {
if file_tree.go_up() {
if let Err(e) = file_tree.refresh_current().await {
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log(&format!("Navigation error: {}", e));
}
}
}
}
KeyCode::Tab => {
self.active_panel = ActivePanel::Chat;
}
KeyCode::Char('q') => {
self.should_quit = true;
}
KeyCode::F(5) => {
if let Some(file_tree) = &mut self.file_tree {
if let Err(e) = file_tree.refresh_current().await {
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log(&format!("Refresh failed: {}", e));
} else {
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log("Refreshed");
}
}
}
_ => {}
},
ActivePanel::Editor => {
if let Some(editor) = &mut self.editor {
match key {
KeyCode::Up => editor.move_up(),
KeyCode::Down => editor.move_down(),
KeyCode::Left => editor.move_left(),
KeyCode::Right => editor.move_right(),
KeyCode::Char(c) => editor.insert_char(c),
KeyCode::Backspace => editor.backspace(),
KeyCode::Enter => editor.insert_newline(),
KeyCode::Tab => {
self.active_panel = ActivePanel::Chat;
}
KeyCode::Esc => {
self.editor = None;
self.active_panel = ActivePanel::FileTree;
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log("Closed editor");
}
_ => {}
}
}
}
ActivePanel::Chat => match key {
KeyCode::Tab => {
self.active_panel = ActivePanel::FileTree;
}
KeyCode::Enter => {
if let (Some(chat_panel), Some(file_tree), Some(app_state)) =
(&mut self.chat_panel, &self.file_tree, &self.app_state)
{
if let Some(bot_name) = file_tree.get_selected_bot() {
if let Err(e) = chat_panel.send_message(&bot_name, app_state).await {
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log(&format!("Chat error: {}", e));
}
}
}
}
KeyCode::Char(c) => {
if let Some(chat_panel) = &mut self.chat_panel {
chat_panel.add_char(c);
}
}
KeyCode::Backspace => {
if let Some(chat_panel) = &mut self.chat_panel {
chat_panel.backspace();
}
}
_ => {}
},
ActivePanel::Status => match key {
KeyCode::Tab => {
self.active_panel = ActivePanel::Logs;
}
_ => {}
},
ActivePanel::Logs => match key {
KeyCode::Tab => {
self.active_panel = ActivePanel::FileTree;
}
_ => {}
},
}
Ok(())
}
async fn handle_tree_enter(&mut self) -> Result<()> {
if let (Some(file_tree), Some(app_state)) = (&mut self.file_tree, &self.app_state) {
if let Some(node) = file_tree.get_selected_node().cloned() {
match node {
TreeNode::Bucket { name, .. } => {
file_tree.enter_bucket(name.clone()).await?;
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log(&format!("Opened bucket: {}", name));
}
TreeNode::Folder { bucket, path, .. } => {
file_tree.enter_folder(bucket.clone(), path.clone()).await?;
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log(&format!("Opened folder: {}", path));
}
TreeNode::File { bucket, path, .. } => {
match Editor::load(app_state, &bucket, &path).await {
Ok(editor) => {
self.editor = Some(editor);
self.active_panel = ActivePanel::Editor;
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log(&format!("Editing: {}", path));
}
Err(e) => {
let mut log_panel = self.log_panel.lock().unwrap();
log_panel.add_log(&format!("Failed to load file: {}", e));
}
}
}
}
}
}
Ok(())
}
async fn update_data(&mut self) -> Result<()> {
if let Some(status_panel) = &mut self.status_panel {
status_panel.update().await?;
}
if let Some(file_tree) = &self.file_tree {
if file_tree.render_items().is_empty() {
if let Some(file_tree) = &mut self.file_tree {
file_tree.load_root().await?;
}
}
}
if let (Some(chat_panel), Some(file_tree)) = (&mut self.chat_panel, &self.file_tree) {
if let Some(bot_name) = file_tree.get_selected_bot() {
chat_panel.poll_response(&bot_name).await?;
}
}
Ok(())
}
}

189
src/console/status_panel.rs Normal file
View file

@ -0,0 +1,189 @@
use crate::config::ConfigManager;
use crate::nvidia;
use crate::nvidia::get_system_metrics;
use crate::shared::models::schema::bots::dsl::*;
use crate::shared::state::AppState;
use diesel::prelude::*;
use std::sync::Arc;
use sysinfo::System;
pub struct StatusPanel {
app_state: Arc<AppState>,
last_update: std::time::Instant,
cached_content: String,
system: System,
}
impl StatusPanel {
pub fn new(app_state: Arc<AppState>) -> Self {
Self {
app_state,
last_update: std::time::Instant::now(),
cached_content: String::new(),
system: System::new_all(),
}
}
pub async fn update(&mut self) -> Result<(), std::io::Error> {
self.system.refresh_all();
// Force fresh metrics by using different token counts
let _tokens = (std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs()
% 1000) as usize;
let _system_metrics = nvidia::get_system_metrics().unwrap_or_default();
self.cached_content = self.render(None);
self.last_update = std::time::Instant::now();
Ok(())
}
pub fn render(&mut self, selected_bot: Option<String>) -> String {
let mut lines = Vec::new();
// System metrics section
lines.push("╔═══════════════════════════════════════╗".to_string());
lines.push("║ SYSTEM METRICS ║".to_string());
lines.push("╚═══════════════════════════════════════╝".to_string());
lines.push("".to_string());
self.system.refresh_cpu_all();
let cpu_usage = self.system.global_cpu_usage();
let cpu_bar = Self::create_progress_bar(cpu_usage, 20);
lines.push(format!(" CPU: {:5.1}% {}", cpu_usage, cpu_bar));
let system_metrics = get_system_metrics().unwrap_or_default();
if let Some(gpu_usage) = system_metrics.gpu_usage {
let gpu_bar = Self::create_progress_bar(gpu_usage, 20);
lines.push(format!(" GPU: {:5.1}% {}", gpu_usage, gpu_bar));
} else {
lines.push(" GPU: Not available".to_string());
}
let total_mem = self.system.total_memory() as f32 / 1024.0 / 1024.0 / 1024.0;
let used_mem = self.system.used_memory() as f32 / 1024.0 / 1024.0 / 1024.0;
let mem_percentage = (used_mem / total_mem) * 100.0;
let mem_bar = Self::create_progress_bar(mem_percentage, 20);
lines.push(format!(
" MEM: {:5.1}% {} ({:.1}/{:.1} GB)",
mem_percentage, mem_bar, used_mem, total_mem
));
// Components status section
lines.push("".to_string());
lines.push("╔═══════════════════════════════════════╗".to_string());
lines.push("║ COMPONENTS STATUS ║".to_string());
lines.push("╚═══════════════════════════════════════╝".to_string());
lines.push("".to_string());
let components = vec![
("Tables", "postgres", "5432"),
("Cache", "valkey-server", "6379"),
("Drive", "minio", "9000"),
("LLM", "llama-server", "8081"),
];
for (comp_name, process, port) in components {
let status = if Self::check_component_running(process) {
format!("🟢 ONLINE [Port: {}]", port)
} else {
"🔴 OFFLINE".to_string()
};
lines.push(format!(" {:<10} {}", comp_name, status));
}
// Active bots section
lines.push("".to_string());
lines.push("╔═══════════════════════════════════════╗".to_string());
lines.push("║ ACTIVE BOTS ║".to_string());
lines.push("╚═══════════════════════════════════════╝".to_string());
lines.push("".to_string());
if let Ok(mut conn) = self.app_state.conn.get() {
match bots
.filter(is_active.eq(true))
.select((name, id))
.load::<(String, uuid::Uuid)>(&mut *conn)
{
Ok(bot_list) => {
if bot_list.is_empty() {
lines.push(" No active bots".to_string());
} else {
for (bot_name, bot_id) in bot_list {
let marker = if let Some(ref selected) = selected_bot {
if selected == &bot_name {
""
} else {
" "
}
} else {
" "
};
lines.push(format!(" {} 🤖 {}", marker, bot_name));
if let Some(ref selected) = selected_bot {
if selected == &bot_name {
lines.push("".to_string());
lines.push(" ┌─ Bot Configuration ─────────┐".to_string());
let config_manager =
ConfigManager::new(self.app_state.conn.clone());
let llm_model = config_manager
.get_config(&bot_id, "llm-model", None)
.unwrap_or_else(|_| "N/A".to_string());
lines.push(format!(" Model: {}", llm_model));
let ctx_size = config_manager
.get_config(&bot_id, "llm-server-ctx-size", None)
.unwrap_or_else(|_| "N/A".to_string());
lines.push(format!(" Context: {}", ctx_size));
let temp = config_manager
.get_config(&bot_id, "llm-temperature", None)
.unwrap_or_else(|_| "N/A".to_string());
lines.push(format!(" Temp: {}", temp));
lines.push(" └─────────────────────────────┘".to_string());
}
}
}
}
}
Err(_) => {
lines.push(" Error loading bots".to_string());
}
}
} else {
lines.push(" Database locked".to_string());
}
// Sessions section
lines.push("".to_string());
lines.push("╔═══════════════════════════════════════╗".to_string());
lines.push("║ SESSIONS ║".to_string());
lines.push("╚═══════════════════════════════════════╝".to_string());
let session_count = self
.app_state
.response_channels
.try_lock()
.map(|channels| channels.len())
.unwrap_or(0);
lines.push(format!(" Active Sessions: {}", session_count));
lines.join("\n")
}
fn create_progress_bar(percentage: f32, width: usize) -> String {
let filled = (percentage / 100.0 * width as f32).round() as usize;
let empty = width.saturating_sub(filled);
let filled_chars = "".repeat(filled);
let empty_chars = "".repeat(empty);
format!("[{}{}]", filled_chars, empty_chars)
}
pub fn check_component_running(process_name: &str) -> bool {
std::process::Command::new("pgrep")
.arg("-f")
.arg(process_name)
.output()
.map(|output| !output.stdout.is_empty())
.unwrap_or(false)
}
}

View file

@ -0,0 +1,10 @@
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::test_util;
#[test]
fn test_automation_module() {
test_util::setup();
assert!(true, "Basic automation module test");
}
}

135
src/core/automation/mod.rs Normal file
View file

@ -0,0 +1,135 @@
use crate::basic::ScriptService;
use crate::shared::models::{Automation, TriggerKind};
use crate::shared::state::AppState;
use chrono::Utc;
use cron::Schedule;
use diesel::prelude::*;
use log::error;
use std::str::FromStr;
use std::sync::Arc;
use tokio::time::{interval, Duration};
#[cfg(feature = "vectordb")]
pub mod vectordb_indexer;
#[cfg(feature = "vectordb")]
pub use vectordb_indexer::{IndexingStats, IndexingStatus, VectorDBIndexer};
#[derive(Debug)]
pub struct AutomationService {
state: Arc<AppState>,
}
impl AutomationService {
pub fn new(state: Arc<AppState>) -> Self {
crate::llm::compact_prompt::start_compact_prompt_scheduler(Arc::clone(&state));
Self { state }
}
pub async fn spawn(self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let mut ticker = interval(Duration::from_secs(5));
loop {
ticker.tick().await;
if let Err(e) = self.check_scheduled_tasks().await {
error!("Error checking scheduled tasks: {}", e);
}
}
}
async fn check_scheduled_tasks(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
use crate::shared::models::system_automations::dsl::{
id, is_active, kind, last_triggered as lt_column, system_automations,
};
let mut conn = self
.state
.conn
.get()
.map_err(|e| format!("Failed to acquire database connection: {}", e))?;
let automations: Vec<Automation> = system_automations
.filter(is_active.eq(true))
.filter(kind.eq(TriggerKind::Scheduled as i32))
.load::<Automation>(&mut conn)?;
for automation in automations {
if let Some(schedule_str) = &automation.schedule {
match Schedule::from_str(schedule_str.trim()) {
Ok(parsed_schedule) => {
let now = Utc::now();
let next_run = parsed_schedule.upcoming(Utc).next();
if let Some(next_time) = next_run {
let time_until_next = next_time - now;
if time_until_next.num_minutes() < 1 {
if let Some(last_triggered) = automation.last_triggered {
if (now - last_triggered).num_minutes() < 1 {
continue;
}
}
if let Err(e) = self.execute_automation(&automation).await {
error!("Error executing automation {}: {}", automation.id, e);
}
if let Err(e) =
diesel::update(system_automations.filter(id.eq(automation.id)))
.set(lt_column.eq(Some(now)))
.execute(&mut conn)
{
error!(
"Error updating last_triggered for automation {}: {}",
automation.id, e
);
}
}
}
}
Err(e) => {
error!(
"Error parsing schedule for automation {} ({}): {}",
automation.id, schedule_str, e
);
}
}
}
}
Ok(())
}
async fn execute_automation(
&self,
automation: &Automation,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let bot_name: String = {
use crate::shared::models::schema::bots::dsl::*;
let mut conn = self
.state
.conn
.get()
.map_err(|e| format!("Failed to acquire database connection: {}", e))?;
bots.filter(id.eq(automation.bot_id))
.select(name)
.first(&mut conn)?
};
let script_path = format!(
"./work/{}.gbai/{}.gbdialog/{}.ast",
bot_name, bot_name, automation.param
);
let script_content = match tokio::fs::read_to_string(&script_path).await {
Ok(content) => content,
Err(e) => {
error!("Failed to read script {}: {}", script_path, e);
return Ok(());
}
};
let session = {
let mut sm = self.state.session_manager.lock().await;
let admin_user = automation.bot_id;
sm.get_or_create_user_session(admin_user, automation.bot_id, "Automation")?
.ok_or("Failed to create session")?
};
let script_service = ScriptService::new(Arc::clone(&self.state), session);
match script_service.compile(&script_content) {
Ok(ast) => {
if let Err(e) = script_service.run(&ast) {
error!("Script execution failed: {}", e);
}
}
Err(e) => {
error!("Script compilation failed: {}", e);
}
}
Ok(())
}
}

View file

@ -0,0 +1,10 @@
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::test_util;
#[test]
fn test_bootstrap_module() {
test_util::setup();
assert!(true, "Basic bootstrap module test");
}
}

397
src/core/bootstrap/mod.rs Normal file
View file

@ -0,0 +1,397 @@
use crate::config::AppConfig;
use crate::package_manager::setup::{DirectorySetup, EmailSetup};
use crate::package_manager::{InstallMode, PackageManager};
use crate::shared::utils::establish_pg_connection;
use anyhow::Result;
use aws_config::BehaviorVersion;
use aws_sdk_s3::Client;
use dotenvy::dotenv;
use log::{error, info, trace};
use rand::distr::Alphanumeric;
use std::io::{self, Write};
use std::path::{Path, PathBuf};
use std::process::Command;
#[derive(Debug)]
pub struct ComponentInfo {
pub name: &'static str,
}
#[derive(Debug)]
pub struct BootstrapManager {
pub install_mode: InstallMode,
pub tenant: Option<String>,
}
impl BootstrapManager {
pub async fn new(install_mode: InstallMode, tenant: Option<String>) -> Self {
trace!(
"Initializing BootstrapManager with mode {:?} and tenant {:?}",
install_mode,
tenant
);
Self {
install_mode,
tenant,
}
}
pub fn start_all(&mut self) -> Result<()> {
let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?;
let components = vec![
ComponentInfo { name: "tables" },
ComponentInfo { name: "cache" },
ComponentInfo { name: "drive" },
ComponentInfo { name: "llm" },
ComponentInfo { name: "email" },
ComponentInfo { name: "proxy" },
ComponentInfo { name: "directory" },
ComponentInfo { name: "alm" },
ComponentInfo { name: "alm_ci" },
ComponentInfo { name: "dns" },
ComponentInfo { name: "webmail" },
ComponentInfo { name: "meeting" },
ComponentInfo {
name: "table_editor",
},
ComponentInfo { name: "doc_editor" },
ComponentInfo { name: "desktop" },
ComponentInfo { name: "devtools" },
ComponentInfo { name: "bot" },
ComponentInfo { name: "system" },
ComponentInfo { name: "vector_db" },
ComponentInfo { name: "host" },
];
for component in components {
if pm.is_installed(component.name) {
pm.start(component.name)?;
}
}
Ok(())
}
fn generate_secure_password(&self, length: usize) -> String {
let mut rng = rand::rng();
(0..length)
.map(|_| {
let byte = rand::Rng::sample(&mut rng, Alphanumeric);
char::from(byte)
})
.collect()
}
pub async fn bootstrap(&mut self) -> Result<()> {
let env_path = std::env::current_dir().unwrap().join(".env");
let db_password = self.generate_secure_password(32);
let database_url = std::env::var("DATABASE_URL").unwrap_or_else(|_| {
format!("postgres://gbuser:{}@localhost:5432/botserver", db_password)
});
let drive_password = self.generate_secure_password(16);
let drive_user = "gbdriveuser".to_string();
let drive_env = format!(
"\nDRIVE_SERVER=http://localhost:9000\nDRIVE_ACCESSKEY={}\nDRIVE_SECRET={}\n",
drive_user, drive_password
);
let contents_env = format!("DATABASE_URL={}\n{}", database_url, drive_env);
let _ = std::fs::write(&env_path, contents_env);
dotenv().ok();
let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone()).unwrap();
let required_components = vec!["tables", "drive", "cache", "llm"];
for component in required_components {
if !pm.is_installed(component) {
let termination_cmd = pm
.components
.get(component)
.and_then(|cfg| cfg.binary_name.clone())
.unwrap_or_else(|| component.to_string());
if !termination_cmd.is_empty() {
let check = Command::new("pgrep")
.arg("-f")
.arg(&termination_cmd)
.output();
if let Ok(output) = check {
if !output.stdout.is_empty() {
println!("Component '{}' appears to be already running from a previous install.", component);
println!("Do you want to terminate it? (y/n)");
let mut input = String::new();
io::stdout().flush().unwrap();
io::stdin().read_line(&mut input).unwrap();
if input.trim().eq_ignore_ascii_case("y") {
let _ = Command::new("pkill")
.arg("-f")
.arg(&termination_cmd)
.status();
println!("Terminated existing '{}' process.", component);
} else {
println!(
"Skipping start of '{}' as it is already running.",
component
);
continue;
}
}
}
}
_ = pm.install(component).await;
if component == "tables" {
let mut conn = establish_pg_connection().unwrap();
self.apply_migrations(&mut conn)?;
}
// Auto-configure Directory after installation
if component == "directory" {
info!("🔧 Auto-configuring Directory (Zitadel)...");
if let Err(e) = self.setup_directory().await {
error!("Failed to setup Directory: {}", e);
}
}
}
}
Ok(())
}
/// Setup Directory (Zitadel) with default organization and user
async fn setup_directory(&self) -> Result<()> {
let config_path = PathBuf::from("./config/directory_config.json");
// Ensure config directory exists
tokio::fs::create_dir_all("./config").await?;
let mut setup = DirectorySetup::new("http://localhost:8080".to_string(), config_path);
// Create default organization
let org_name = "default";
let org_id = setup
.create_organization(org_name, "Default Organization")
.await?;
info!("✅ Created default organization: {}", org_name);
// Create admin@default account for bot administration
let admin_user = setup
.create_user(
&org_id,
"admin",
"admin@default",
"Admin123!",
"Admin",
"Default",
true, // is_admin
)
.await?;
info!("✅ Created admin user: admin@default");
// Create user@default account for regular bot usage
let regular_user = setup
.create_user(
&org_id,
"user",
"user@default",
"User123!",
"User",
"Default",
false, // is_admin
)
.await?;
info!("✅ Created regular user: user@default");
info!(" Regular user ID: {}", regular_user.id);
// Create OAuth2 application for BotServer
let (project_id, client_id, client_secret) =
setup.create_oauth_application(&org_id).await?;
info!("✅ Created OAuth2 application in project: {}", project_id);
// Save configuration
let config = setup
.save_config(
org_id.clone(),
org_name.to_string(),
admin_user,
client_id.clone(),
client_secret,
)
.await?;
info!("✅ Directory initialized successfully!");
info!(" Organization: default");
info!(" Admin User: admin@default / Admin123!");
info!(" Regular User: user@default / User123!");
info!(" Client ID: {}", client_id);
info!(" Login URL: {}", config.base_url);
Ok(())
}
/// Setup Email (Stalwart) with Directory integration
async fn setup_email(&self) -> Result<()> {
let config_path = PathBuf::from("./config/email_config.json");
let directory_config_path = PathBuf::from("./config/directory_config.json");
let mut setup = EmailSetup::new("http://localhost:8080".to_string(), config_path);
// Try to integrate with Directory if it exists
let directory_config = if directory_config_path.exists() {
Some(directory_config_path)
} else {
None
};
let config = setup.initialize(directory_config).await?;
info!("✅ Email server initialized successfully!");
info!(" SMTP: {}:{}", config.smtp_host, config.smtp_port);
info!(" IMAP: {}:{}", config.imap_host, config.imap_port);
info!(" Admin: {} / {}", config.admin_user, config.admin_pass);
if config.directory_integration {
info!(" 🔗 Integrated with Directory for authentication");
}
Ok(())
}
async fn get_drive_client(config: &AppConfig) -> Client {
let endpoint = if !config.drive.server.ends_with('/') {
format!("{}/", config.drive.server)
} else {
config.drive.server.clone()
};
let base_config = aws_config::defaults(BehaviorVersion::latest())
.endpoint_url(endpoint)
.region("auto")
.credentials_provider(aws_sdk_s3::config::Credentials::new(
config.drive.access_key.clone(),
config.drive.secret_key.clone(),
None,
None,
"static",
))
.load()
.await;
let s3_config = aws_sdk_s3::config::Builder::from(&base_config)
.force_path_style(true)
.build();
aws_sdk_s3::Client::from_conf(s3_config)
}
pub async fn upload_templates_to_drive(&self, _config: &AppConfig) -> Result<()> {
let mut conn = establish_pg_connection()?;
self.create_bots_from_templates(&mut conn)?;
let templates_dir = Path::new("templates");
if !templates_dir.exists() {
return Ok(());
}
let client = Self::get_drive_client(_config).await;
let mut read_dir = tokio::fs::read_dir(templates_dir).await?;
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
if path.is_dir()
&& path
.file_name()
.unwrap()
.to_string_lossy()
.ends_with(".gbai")
{
let bot_name = path.file_name().unwrap().to_string_lossy().to_string();
let bucket = bot_name.trim_start_matches('/').to_string();
if client.head_bucket().bucket(&bucket).send().await.is_err() {
match client.create_bucket().bucket(&bucket).send().await {
Ok(_) => {
self.upload_directory_recursive(&client, &path, &bucket, "/")
.await?;
}
Err(e) => {
error!("Failed to create bucket {}: {:?}", bucket, e);
return Err(anyhow::anyhow!("Failed to create bucket {}: {}. Check S3 credentials and endpoint configuration", bucket, e));
}
}
} else {
trace!("Bucket {} already exists", bucket);
}
}
}
Ok(())
}
fn create_bots_from_templates(&self, conn: &mut diesel::PgConnection) -> Result<()> {
use crate::shared::models::schema::bots;
use diesel::prelude::*;
let templates_dir = Path::new("templates");
if !templates_dir.exists() {
return Ok(());
}
for entry in std::fs::read_dir(templates_dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() && path.extension().map(|e| e == "gbai").unwrap_or(false) {
let bot_folder = path.file_name().unwrap().to_string_lossy().to_string();
let bot_name = bot_folder.trim_end_matches(".gbai");
let existing: Option<String> = bots::table
.filter(bots::name.eq(&bot_name))
.select(bots::name)
.first(conn)
.optional()?;
if existing.is_none() {
diesel::sql_query("INSERT INTO bots (id, name, description, llm_provider, llm_config, context_provider, context_config, is_active) VALUES (gen_random_uuid(), $1, $2, 'openai', '{\"model\": \"gpt-4\", \"temperature\": 0.7}', 'database', '{}', true)").bind::<diesel::sql_types::Text, _>(&bot_name).bind::<diesel::sql_types::Text, _>(format!("Bot for {} template", bot_name)).execute(conn)?;
} else {
trace!("Bot {} already exists", bot_name);
}
}
}
Ok(())
}
fn upload_directory_recursive<'a>(
&'a self,
client: &'a Client,
local_path: &'a Path,
bucket: &'a str,
prefix: &'a str,
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<()>> + 'a>> {
Box::pin(async move {
let _normalized_path = if !local_path.to_string_lossy().ends_with('/') {
format!("{}/", local_path.to_string_lossy())
} else {
local_path.to_string_lossy().to_string()
};
let mut read_dir = tokio::fs::read_dir(local_path).await?;
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
let file_name = path.file_name().unwrap().to_string_lossy().to_string();
let mut key = prefix.trim_matches('/').to_string();
if !key.is_empty() {
key.push('/');
}
key.push_str(&file_name);
if path.is_file() {
trace!(
"Uploading file {} to bucket {} with key {}",
path.display(),
bucket,
key
);
let content = tokio::fs::read(&path).await?;
client
.put_object()
.bucket(bucket)
.key(&key)
.body(content.into())
.send()
.await?;
} else if path.is_dir() {
self.upload_directory_recursive(client, &path, bucket, &key)
.await?;
}
}
Ok(())
})
}
pub fn apply_migrations(&self, conn: &mut diesel::PgConnection) -> Result<()> {
use diesel_migrations::HarnessWithOutput;
use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness};
const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations");
let mut harness = HarnessWithOutput::write_to_stdout(conn);
if let Err(e) = harness.run_pending_migrations(MIGRATIONS) {
error!("Failed to apply migrations: {}", e);
return Err(anyhow::anyhow!("Migration error: {}", e));
}
Ok(())
}
}

10
src/core/bot/bot.test.rs Normal file
View file

@ -0,0 +1,10 @@
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::test_util;
#[test]
fn test_bot_module() {
test_util::setup();
assert!(true, "Basic bot module test");
}
}

View file

@ -0,0 +1,10 @@
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::test_util;
#[test]
fn test_channels_module() {
test_util::setup();
assert!(true, "Basic channels module test");
}
}

View file

@ -0,0 +1,29 @@
use crate::shared::models::BotResponse;
use async_trait::async_trait;
use log::info;
/// Instagram channel adapter for sending messages through Instagram
pub struct InstagramAdapter {
// TODO: Add Instagram API client configuration
}
impl InstagramAdapter {
pub fn new() -> Self {
Self {}
}
}
#[async_trait]
impl super::ChannelAdapter for InstagramAdapter {
async fn send_message(
&self,
response: BotResponse,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!(
"Instagram message would be sent to {}: {}",
response.user_id, response.content
);
// TODO: Implement actual Instagram API integration
Ok(())
}
}

View file

@ -0,0 +1,127 @@
pub mod instagram;
pub mod teams;
pub mod whatsapp;
use crate::shared::models::BotResponse;
use async_trait::async_trait;
use log::{debug, info};
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::{mpsc, Mutex};
#[async_trait]
pub trait ChannelAdapter: Send + Sync {
async fn send_message(
&self,
response: BotResponse,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>>;
}
#[derive(Debug)]
pub struct WebChannelAdapter {
connections: Arc<Mutex<HashMap<String, mpsc::Sender<BotResponse>>>>,
}
impl WebChannelAdapter {
pub fn new() -> Self {
Self {
connections: Arc::new(Mutex::new(HashMap::new())),
}
}
pub async fn add_connection(&self, session_id: String, tx: mpsc::Sender<BotResponse>) {
self.connections.lock().await.insert(session_id, tx);
}
pub async fn remove_connection(&self, session_id: &str) {
self.connections.lock().await.remove(session_id);
}
pub async fn send_message_to_session(
&self,
session_id: &str,
message: BotResponse,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let connections = self.connections.lock().await;
if let Some(tx) = connections.get(session_id) {
if let Err(e) = tx.send(message).await {
log::error!(
"Failed to send message to WebSocket session {}: {}",
session_id,
e
);
return Err(Box::new(e));
}
debug!("Message sent to WebSocket session: {}", session_id);
Ok(())
} else {
debug!("No WebSocket connection found for session: {}", session_id);
Err("No WebSocket connection found".into())
}
}
}
#[async_trait]
impl ChannelAdapter for WebChannelAdapter {
async fn send_message(
&self,
response: BotResponse,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let connections = self.connections.lock().await;
if let Some(tx) = connections.get(&response.session_id) {
tx.send(response).await?;
}
Ok(())
}
}
#[derive(Debug)]
pub struct VoiceAdapter {
rooms: Arc<Mutex<HashMap<String, String>>>,
connections: Arc<Mutex<HashMap<String, mpsc::Sender<BotResponse>>>>,
}
impl VoiceAdapter {
pub fn new() -> Self {
Self {
rooms: Arc::new(Mutex::new(HashMap::new())),
connections: Arc::new(Mutex::new(HashMap::new())),
}
}
pub async fn start_voice_session(
&self,
session_id: &str,
user_id: &str,
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
info!(
"Starting voice session for user: {} with session: {}",
user_id, session_id
);
let token = format!("mock_token_{}_{}", session_id, user_id);
self.rooms
.lock()
.await
.insert(session_id.to_string(), token.clone());
Ok(token)
}
pub async fn stop_voice_session(
&self,
session_id: &str,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
self.rooms.lock().await.remove(session_id);
Ok(())
}
pub async fn add_connection(&self, session_id: String, tx: mpsc::Sender<BotResponse>) {
self.connections.lock().await.insert(session_id, tx);
}
pub async fn send_voice_response(
&self,
session_id: &str,
text: &str,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!("Sending voice response to session {}: {}", session_id, text);
Ok(())
}
}
#[async_trait]
impl ChannelAdapter for VoiceAdapter {
async fn send_message(
&self,
response: BotResponse,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!("Sending voice response to: {}", response.user_id);
self.send_voice_response(&response.session_id, &response.content)
.await
}
}

View file

@ -0,0 +1,29 @@
use crate::shared::models::BotResponse;
use async_trait::async_trait;
use log::info;
/// Microsoft Teams channel adapter for sending messages through Teams
pub struct TeamsAdapter {
// TODO: Add Teams API client configuration
}
impl TeamsAdapter {
pub fn new() -> Self {
Self {}
}
}
#[async_trait]
impl super::ChannelAdapter for TeamsAdapter {
async fn send_message(
&self,
response: BotResponse,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!(
"Teams message would be sent to {}: {}",
response.user_id, response.content
);
// TODO: Implement actual Teams API integration
Ok(())
}
}

View file

@ -0,0 +1,29 @@
use crate::shared::models::BotResponse;
use async_trait::async_trait;
use log::info;
/// WhatsApp channel adapter for sending messages through WhatsApp
pub struct WhatsAppAdapter {
// TODO: Add WhatsApp API client configuration
}
impl WhatsAppAdapter {
pub fn new() -> Self {
Self {}
}
}
#[async_trait]
impl super::ChannelAdapter for WhatsAppAdapter {
async fn send_message(
&self,
response: BotResponse,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!(
"WhatsApp message would be sent to {}: {}",
response.user_id, response.content
);
// TODO: Implement actual WhatsApp API integration
Ok(())
}
}

580
src/core/bot/mod.rs Normal file
View file

@ -0,0 +1,580 @@
use crate::core::config::ConfigManager;
use crate::drive::drive_monitor::DriveMonitor;
use crate::llm::OpenAIClient;
use crate::shared::models::{BotResponse, UserMessage, UserSession};
use crate::shared::state::AppState;
use axum::extract::ws::{Message, WebSocket};
use axum::{
extract::{ws::WebSocketUpgrade, Extension, Query, State},
http::StatusCode,
response::{IntoResponse, Json},
};
use diesel::PgConnection;
use futures::{sink::SinkExt, stream::StreamExt};
use log::{error, info, trace, warn};
use serde_json;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::mpsc;
use tokio::sync::Mutex as AsyncMutex;
use uuid::Uuid;
pub mod channels;
pub mod multimedia;
/// Retrieves the default bot (first active bot) from the database.
pub fn get_default_bot(conn: &mut PgConnection) -> (Uuid, String) {
use crate::shared::models::schema::bots::dsl::*;
use diesel::prelude::*;
match bots
.filter(is_active.eq(true))
.select((id, name))
.first::<(Uuid, String)>(conn)
.optional()
{
Ok(Some((bot_id, bot_name))) => (bot_id, bot_name),
Ok(None) => {
warn!("No active bots found, using nil UUID");
(Uuid::nil(), "default".to_string())
}
Err(e) => {
error!("Failed to query default bot: {}", e);
(Uuid::nil(), "default".to_string())
}
}
}
#[derive(Debug)]
pub struct BotOrchestrator {
pub state: Arc<AppState>,
pub mounted_bots: Arc<AsyncMutex<HashMap<String, Arc<DriveMonitor>>>>,
}
impl BotOrchestrator {
pub fn new(state: Arc<AppState>) -> Self {
Self {
state,
mounted_bots: Arc::new(AsyncMutex::new(HashMap::new())),
}
}
// ... (All existing methods unchanged) ...
pub async fn mount_all_bots(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
// No-op: bot mounting is handled elsewhere
info!("mount_all_bots called (no-op)");
Ok(())
}
// Stream response to user via LLM
pub async fn stream_response(
&self,
message: UserMessage,
response_tx: mpsc::Sender<BotResponse>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
trace!(
"Streaming response for user: {}, session: {}",
message.user_id,
message.session_id
);
let user_id = Uuid::parse_str(&message.user_id)?;
let session_id = Uuid::parse_str(&message.session_id)?;
let bot_id = Uuid::parse_str(&message.bot_id).unwrap_or_default();
// All database operations in one blocking section
let (session, context_data, history, model, key) = {
let state_clone = self.state.clone();
tokio::task::spawn_blocking(
move || -> Result<_, Box<dyn std::error::Error + Send + Sync>> {
// Get session
let session = {
let mut sm = state_clone.session_manager.blocking_lock();
sm.get_session_by_id(session_id)?
}
.ok_or_else(|| "Session not found")?;
// Save user message
{
let mut sm = state_clone.session_manager.blocking_lock();
sm.save_message(session.id, user_id, 1, &message.content, 1)?;
}
// Get context and history
let context_data = {
let sm = state_clone.session_manager.blocking_lock();
let rt = tokio::runtime::Handle::current();
rt.block_on(async {
sm.get_session_context_data(&session.id, &session.user_id)
.await
})?
};
let history = {
let mut sm = state_clone.session_manager.blocking_lock();
sm.get_conversation_history(session.id, user_id)?
};
// Get model config
let config_manager = ConfigManager::new(state_clone.conn.clone());
let model = config_manager
.get_config(&bot_id, "llm-model", Some("gpt-3.5-turbo"))
.unwrap_or_else(|_| "gpt-3.5-turbo".to_string());
let key = config_manager
.get_config(&bot_id, "llm-key", Some(""))
.unwrap_or_default();
Ok((session, context_data, history, model, key))
},
)
.await??
};
// Build messages
let system_prompt = std::env::var("SYSTEM_PROMPT")
.unwrap_or_else(|_| "You are a helpful assistant.".to_string());
let messages = OpenAIClient::build_messages(&system_prompt, &context_data, &history);
// Stream from LLM
let (stream_tx, mut stream_rx) = mpsc::channel::<String>(100);
let llm = self.state.llm_provider.clone();
tokio::spawn(async move {
if let Err(e) = llm
.generate_stream("", &messages, stream_tx, &model, &key)
.await
{
error!("LLM streaming error: {}", e);
}
});
let mut full_response = String::new();
let mut chunk_count = 0;
while let Some(chunk) = stream_rx.recv().await {
chunk_count += 1;
info!("Received LLM chunk #{}: {:?}", chunk_count, chunk);
full_response.push_str(&chunk);
let response = BotResponse {
bot_id: message.bot_id.clone(),
user_id: message.user_id.clone(),
session_id: message.session_id.clone(),
channel: message.channel.clone(),
content: chunk,
message_type: 2,
stream_token: None,
is_complete: false,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
info!("Sending streaming chunk to WebSocket");
if let Err(e) = response_tx.send(response).await {
error!("Failed to send streaming chunk: {}", e);
break;
}
}
info!(
"LLM streaming complete, received {} chunks, total length: {}",
chunk_count,
full_response.len()
);
// Send final complete response
let final_response = BotResponse {
bot_id: message.bot_id.clone(),
user_id: message.user_id.clone(),
session_id: message.session_id.clone(),
channel: message.channel.clone(),
content: full_response.clone(),
message_type: 2,
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
info!("Sending final complete response to WebSocket");
response_tx.send(final_response).await?;
info!("Final response sent successfully");
// Save bot response in blocking context
let state_for_save = self.state.clone();
let full_response_clone = full_response.clone();
tokio::task::spawn_blocking(
move || -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let mut sm = state_for_save.session_manager.blocking_lock();
sm.save_message(session.id, user_id, 2, &full_response_clone, 2)?;
Ok(())
},
)
.await??;
Ok(())
}
// ... (Other methods unchanged) ...
pub async fn get_user_sessions(
&self,
user_id: Uuid,
) -> Result<Vec<UserSession>, Box<dyn std::error::Error + Send + Sync>> {
let mut session_manager = self.state.session_manager.lock().await;
let sessions = session_manager.get_user_sessions(user_id)?;
Ok(sessions)
}
pub async fn get_conversation_history(
&self,
session_id: Uuid,
user_id: Uuid,
) -> Result<Vec<(String, String)>, Box<dyn std::error::Error + Send + Sync>> {
let mut session_manager = self.state.session_manager.lock().await;
let history = session_manager.get_conversation_history(session_id, user_id)?;
Ok(history)
}
// ... (Remaining BotOrchestrator methods unchanged) ...
}
/* Axum handlers placeholders that delegate to BotOrchestrator where appropriate */
/// WebSocket handler that upgrades HTTP connection to WebSocket
pub async fn websocket_handler(
ws: WebSocketUpgrade,
State(state): State<Arc<AppState>>,
Query(params): Query<HashMap<String, String>>,
) -> impl IntoResponse {
let session_id = params
.get("session_id")
.and_then(|s| Uuid::parse_str(s).ok());
let user_id = params.get("user_id").and_then(|s| Uuid::parse_str(s).ok());
if session_id.is_none() || user_id.is_none() {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({ "error": "session_id and user_id are required" })),
)
.into_response();
}
ws.on_upgrade(move |socket| {
handle_websocket(socket, state, session_id.unwrap(), user_id.unwrap())
})
.into_response()
}
/// Handles an individual WebSocket connection
async fn handle_websocket(
socket: WebSocket,
state: Arc<AppState>,
session_id: Uuid,
user_id: Uuid,
) {
let (mut sender, mut receiver) = socket.split();
// Create a channel for this WebSocket connection
let (tx, mut rx) = mpsc::channel::<BotResponse>(100);
// Register this connection with the web adapter
state
.web_adapter
.add_connection(session_id.to_string(), tx.clone())
.await;
// Also register in response_channels for BotOrchestrator
{
let mut channels = state.response_channels.lock().await;
channels.insert(session_id.to_string(), tx.clone());
}
info!(
"WebSocket connected for session: {}, user: {}",
session_id, user_id
);
// Execute start.bas if it exists
let state_for_start = state.clone();
let session_for_start = {
let mut sm = state.session_manager.lock().await;
sm.get_session_by_id(session_id).ok().and_then(|opt| opt)
};
if let Some(session_clone) = session_for_start {
tokio::task::spawn_blocking(move || {
use crate::basic::ScriptService;
let bot_name = "default"; // TODO: Get from session
let start_script_path =
format!("./work/{}.gbai/{}.gbdialog/start.bas", bot_name, bot_name);
if let Ok(start_content) = std::fs::read_to_string(&start_script_path) {
info!("Executing start.bas for session {}", session_id);
let script_service = ScriptService::new(state_for_start, session_clone);
match script_service.compile(&start_content) {
Ok(ast) => {
if let Err(e) = script_service.run(&ast) {
error!("Failed to execute start.bas: {}", e);
} else {
info!("start.bas executed successfully for session {}", session_id);
}
}
Err(e) => {
error!("Failed to compile start.bas: {}", e);
}
}
} else {
info!("No start.bas found for bot {}", bot_name);
}
});
}
// Send initial welcome message
let welcome = serde_json::json!({
"type": "connected",
"session_id": session_id,
"user_id": user_id,
"message": "Connected to bot server"
});
if let Ok(welcome_str) = serde_json::to_string(&welcome) {
info!("Sending welcome message to session {}", session_id);
if let Err(e) = sender.send(Message::Text(welcome_str.into())).await {
error!("Failed to send welcome message: {}", e);
}
}
// Spawn task to send messages from the channel to the WebSocket
let mut send_task = tokio::spawn(async move {
while let Some(response) = rx.recv().await {
if let Ok(json_str) = serde_json::to_string(&response) {
if sender.send(Message::Text(json_str.into())).await.is_err() {
break;
}
}
}
});
// Handle incoming messages from the WebSocket
let state_clone = state.clone();
let mut recv_task = tokio::spawn(async move {
while let Some(Ok(msg)) = receiver.next().await {
info!("WebSocket received raw message type: {:?}", msg);
match msg {
Message::Text(text) => {
info!(
"Received WebSocket text message (length {}): {}",
text.len(),
text
);
match serde_json::from_str::<UserMessage>(&text) {
Ok(user_msg) => {
info!(
"Successfully parsed user message from session: {}, content: {}",
session_id, user_msg.content
);
// Process the message through the bot system
if let Err(e) = process_user_message(
state_clone.clone(),
session_id,
user_id,
user_msg,
)
.await
{
error!("Error processing user message: {}", e);
}
}
Err(e) => {
error!(
"Failed to parse user message from session {}: {} - Parse error: {}",
session_id, text, e
);
}
}
}
Message::Close(_) => {
info!(
"WebSocket close message received for session: {}",
session_id
);
break;
}
Message::Ping(_data) => {
// Pings are automatically handled by axum
}
Message::Pong(_) => {
// Pongs are automatically handled by axum
}
_ => {}
}
}
});
// Wait for either task to finish
tokio::select! {
_ = (&mut send_task) => {
recv_task.abort();
}
_ = (&mut recv_task) => {
send_task.abort();
}
}
// Clean up: remove the connection from the adapter
state
.web_adapter
.remove_connection(&session_id.to_string())
.await;
// Also remove from response_channels
{
let mut channels = state.response_channels.lock().await;
channels.remove(&session_id.to_string());
}
info!("WebSocket disconnected for session: {}", session_id);
}
/// Process a user message received via WebSocket
async fn process_user_message(
state: Arc<AppState>,
session_id: Uuid,
user_id: Uuid,
user_msg: UserMessage,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!(
"Processing message from user {} in session {}: {}",
user_id, session_id, user_msg.content
);
// Get the response channel for this session
let tx = {
let channels = state.response_channels.lock().await;
channels.get(&session_id.to_string()).cloned()
};
if let Some(response_tx) = tx {
// Use BotOrchestrator to stream the response
let orchestrator = BotOrchestrator::new(state.clone());
if let Err(e) = orchestrator.stream_response(user_msg, response_tx).await {
error!("Failed to stream response: {}", e);
}
} else {
error!("No response channel found for session {}", session_id);
}
Ok(())
}
/// Create a new bot (placeholder implementation)
pub async fn create_bot_handler(
Extension(state): Extension<Arc<AppState>>,
Json(payload): Json<HashMap<String, String>>,
) -> impl IntoResponse {
let bot_name = payload
.get("bot_name")
.cloned()
.unwrap_or_else(|| "default".to_string());
// Use state to create the bot in the database
let mut conn = match state.conn.get() {
Ok(conn) => conn,
Err(e) => {
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": format!("Database error: {}", e) })),
)
}
};
use crate::shared::models::schema::bots::dsl::*;
use diesel::prelude::*;
let new_bot = (
name.eq(&bot_name),
description.eq(format!("Bot created via API: {}", bot_name)),
llm_provider.eq("openai"),
llm_config.eq(serde_json::json!({"model": "gpt-4"})),
context_provider.eq("none"),
context_config.eq(serde_json::json!({})),
is_active.eq(true),
);
match diesel::insert_into(bots)
.values(&new_bot)
.execute(&mut conn)
{
Ok(_) => (
StatusCode::OK,
Json(serde_json::json!({
"status": format!("bot '{}' created successfully", bot_name),
"bot_name": bot_name
})),
),
Err(e) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": format!("Failed to create bot: {}", e) })),
),
}
}
/// Mount an existing bot (placeholder implementation)
pub async fn mount_bot_handler(
Extension(_state): Extension<Arc<AppState>>,
Json(payload): Json<HashMap<String, String>>,
) -> impl IntoResponse {
let bot_guid = payload.get("bot_guid").cloned().unwrap_or_default();
(
StatusCode::OK,
Json(serde_json::json!({ "status": format!("bot '{}' mounted", bot_guid) })),
)
}
/// Handle user input for a bot (placeholder implementation)
pub async fn handle_user_input_handler(
Extension(_state): Extension<Arc<AppState>>,
Json(payload): Json<HashMap<String, String>>,
) -> impl IntoResponse {
let session_id = payload.get("session_id").cloned().unwrap_or_default();
let user_input = payload.get("input").cloned().unwrap_or_default();
(
StatusCode::OK,
Json(
serde_json::json!({ "status": format!("input '{}' processed for session {}", user_input, session_id) }),
),
)
}
/// Retrieve user sessions (placeholder implementation)
pub async fn get_user_sessions_handler(
Extension(_state): Extension<Arc<AppState>>,
Json(_payload): Json<HashMap<String, String>>,
) -> impl IntoResponse {
(StatusCode::OK, Json(serde_json::json!({ "sessions": [] })))
}
/// Retrieve conversation history (placeholder implementation)
pub async fn get_conversation_history_handler(
Extension(_state): Extension<Arc<AppState>>,
Json(_payload): Json<HashMap<String, String>>,
) -> impl IntoResponse {
(StatusCode::OK, Json(serde_json::json!({ "history": [] })))
}
/// Send warning (placeholder implementation)
pub async fn send_warning_handler(
Extension(_state): Extension<Arc<AppState>>,
Json(_payload): Json<HashMap<String, String>>,
) -> impl IntoResponse {
(
StatusCode::OK,
Json(serde_json::json!({ "status": "warning acknowledged" })),
)
}

542
src/core/bot/multimedia.rs Normal file
View file

@ -0,0 +1,542 @@
//! Multimedia Message Handling Module
//!
//! This module provides support for handling various multimedia message types including
//! images, videos, audio, documents, and web search results.
//!
//! Key features:
//! - Multiple media type support (images, videos, audio, documents)
//! - Media upload and download handling
//! - Thumbnail generation
//! - Web search integration
//! - Storage abstraction for S3-compatible backends
//! - URL processing and validation
use crate::shared::models::{BotResponse, UserMessage};
use anyhow::Result;
use async_trait::async_trait;
use base64::{engine::general_purpose::STANDARD, Engine};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum MultimediaMessage {
Text {
content: String,
},
Image {
url: String,
caption: Option<String>,
mime_type: String,
},
Video {
url: String,
thumbnail_url: Option<String>,
caption: Option<String>,
duration: Option<u32>,
mime_type: String,
},
Audio {
url: String,
duration: Option<u32>,
mime_type: String,
},
Document {
url: String,
filename: String,
mime_type: String,
},
WebSearch {
query: String,
results: Vec<SearchResult>,
},
Location {
latitude: f64,
longitude: f64,
name: Option<String>,
address: Option<String>,
},
MeetingInvite {
meeting_id: String,
meeting_url: String,
start_time: Option<String>,
duration: Option<u32>,
participants: Vec<String>,
},
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchResult {
pub title: String,
pub url: String,
pub snippet: String,
pub thumbnail: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct MediaUploadRequest {
pub file_name: String,
pub content_type: String,
pub data: Vec<u8>,
pub user_id: String,
pub session_id: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct MediaUploadResponse {
pub media_id: String,
pub url: String,
pub thumbnail_url: Option<String>,
}
/// Trait for handling multimedia messages
#[async_trait]
pub trait MultimediaHandler: Send + Sync {
/// Process an incoming multimedia message
async fn process_multimedia(
&self,
message: MultimediaMessage,
user_id: &str,
session_id: &str,
) -> Result<BotResponse>;
/// Upload media file to storage
async fn upload_media(&self, request: MediaUploadRequest) -> Result<MediaUploadResponse>;
/// Download media file from URL
async fn download_media(&self, url: &str) -> Result<Vec<u8>>;
/// Perform web search
async fn web_search(&self, query: &str, max_results: usize) -> Result<Vec<SearchResult>>;
/// Generate thumbnail for video/image
async fn generate_thumbnail(&self, media_url: &str) -> Result<String>;
}
/// Default implementation for multimedia handling
#[derive(Debug)]
pub struct DefaultMultimediaHandler {
storage_client: Option<aws_sdk_s3::Client>,
search_api_key: Option<String>,
}
impl DefaultMultimediaHandler {
pub fn new(storage_client: Option<aws_sdk_s3::Client>, search_api_key: Option<String>) -> Self {
Self {
storage_client,
search_api_key,
}
}
pub fn storage_client(&self) -> &Option<aws_sdk_s3::Client> {
&self.storage_client
}
pub fn search_api_key(&self) -> &Option<String> {
&self.search_api_key
}
}
#[async_trait]
impl MultimediaHandler for DefaultMultimediaHandler {
async fn process_multimedia(
&self,
message: MultimediaMessage,
user_id: &str,
session_id: &str,
) -> Result<BotResponse> {
match message {
MultimediaMessage::Text { content } => {
// Process as regular text message
Ok(BotResponse {
bot_id: "default".to_string(),
user_id: user_id.to_string(),
session_id: session_id.to_string(),
channel: "multimedia".to_string(),
content,
message_type: 0,
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
})
}
MultimediaMessage::Image { url, caption, .. } => {
// Process image with optional caption
log::debug!("Processing image from URL: {}", url);
let response_content = format!(
"I see you've shared an image from {}{}. {}",
url,
caption
.as_ref()
.map(|c| format!(" with caption: {}", c))
.unwrap_or_default(),
"Let me analyze this for you."
);
Ok(BotResponse {
bot_id: "default".to_string(),
user_id: user_id.to_string(),
session_id: session_id.to_string(),
channel: "multimedia".to_string(),
content: response_content,
message_type: 0,
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
})
}
MultimediaMessage::Video {
url,
caption,
duration,
..
} => {
// Process video
log::debug!("Processing video from URL: {}", url);
let response_content = format!(
"You've shared a video from {}{}{}. Processing video content...",
url,
duration.map(|d| format!(" ({}s)", d)).unwrap_or_default(),
caption
.as_ref()
.map(|c| format!(" - {}", c))
.unwrap_or_default()
);
Ok(BotResponse {
bot_id: "default".to_string(),
user_id: user_id.to_string(),
session_id: session_id.to_string(),
channel: "multimedia".to_string(),
content: response_content,
message_type: 0,
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
})
}
MultimediaMessage::WebSearch { query, .. } => {
// Perform web search
let results = self.web_search(&query, 5).await?;
let response_content = if results.is_empty() {
format!("No results found for: {}", query)
} else {
let results_text = results
.iter()
.enumerate()
.map(|(i, r)| {
format!("{}. [{}]({})\n {}", i + 1, r.title, r.url, r.snippet)
})
.collect::<Vec<_>>()
.join("\n\n");
format!("Search results for \"{}\":\n\n{}", query, results_text)
};
Ok(BotResponse {
bot_id: "default".to_string(),
user_id: user_id.to_string(),
session_id: session_id.to_string(),
channel: "multimedia".to_string(),
content: response_content,
message_type: 0,
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
})
}
MultimediaMessage::MeetingInvite {
meeting_url,
start_time,
..
} => {
let response_content = format!(
"Meeting invite received. Join at: {}{}",
meeting_url,
start_time
.as_ref()
.map(|t| format!("\nScheduled for: {}", t))
.unwrap_or_default()
);
Ok(BotResponse {
bot_id: "default".to_string(),
user_id: user_id.to_string(),
session_id: session_id.to_string(),
channel: "multimedia".to_string(),
content: response_content,
message_type: 0,
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
})
}
_ => {
// Handle other message types
Ok(BotResponse {
bot_id: "default".to_string(),
user_id: user_id.to_string(),
session_id: session_id.to_string(),
channel: "multimedia".to_string(),
content: "Message received and processing...".to_string(),
message_type: 0,
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
})
}
}
}
async fn upload_media(&self, request: MediaUploadRequest) -> Result<MediaUploadResponse> {
let media_id = Uuid::new_v4().to_string();
let key = format!(
"media/{}/{}/{}",
request.user_id, request.session_id, request.file_name
);
if let Some(client) = &self.storage_client {
// Upload to S3
client
.put_object()
.bucket("botserver-media")
.key(&key)
.body(request.data.into())
.content_type(&request.content_type)
.send()
.await?;
let url = format!("https://storage.botserver.com/{}", key);
Ok(MediaUploadResponse {
media_id,
url,
thumbnail_url: None,
})
} else {
// Fallback to local storage
let local_path = format!("./media/{}", key);
std::fs::create_dir_all(std::path::Path::new(&local_path).parent().unwrap())?;
std::fs::write(&local_path, request.data)?;
Ok(MediaUploadResponse {
media_id,
url: format!("file://{}", local_path),
thumbnail_url: None,
})
}
}
async fn download_media(&self, url: &str) -> Result<Vec<u8>> {
if url.starts_with("http://") || url.starts_with("https://") {
let response = reqwest::get(url).await?;
Ok(response.bytes().await?.to_vec())
} else if url.starts_with("file://") {
let path = url.strip_prefix("file://").unwrap();
Ok(std::fs::read(path)?)
} else {
Err(anyhow::anyhow!("Unsupported URL scheme: {}", url))
}
}
async fn web_search(&self, query: &str, max_results: usize) -> Result<Vec<SearchResult>> {
// Implement web search using a search API (e.g., Bing, Google, DuckDuckGo)
// For now, return mock results
let mock_results = vec![
SearchResult {
title: format!("Result 1 for: {}", query),
url: "https://example.com/1".to_string(),
snippet: "This is a sample search result snippet...".to_string(),
thumbnail: None,
},
SearchResult {
title: format!("Result 2 for: {}", query),
url: "https://example.com/2".to_string(),
snippet: "Another sample search result...".to_string(),
thumbnail: None,
},
];
Ok(mock_results.into_iter().take(max_results).collect())
}
async fn generate_thumbnail(&self, media_url: &str) -> Result<String> {
// Generate thumbnail using image/video processing libraries
// For now, return the same URL
Ok(media_url.to_string())
}
}
/// Extension trait for UserMessage to support multimedia
impl UserMessage {
pub fn to_multimedia(&self) -> MultimediaMessage {
// Parse message content to determine type
if self.content.starts_with("http") {
// Check if it's an image/video URL
if self.content.contains(".jpg")
|| self.content.contains(".png")
|| self.content.contains(".gif")
{
MultimediaMessage::Image {
url: self.content.clone(),
caption: None,
mime_type: "image/jpeg".to_string(),
}
} else if self.content.contains(".mp4")
|| self.content.contains(".webm")
|| self.content.contains(".mov")
{
MultimediaMessage::Video {
url: self.content.clone(),
thumbnail_url: None,
caption: None,
duration: None,
mime_type: "video/mp4".to_string(),
}
} else {
MultimediaMessage::Text {
content: self.content.clone(),
}
}
} else if self.content.starts_with("/search ") {
let query = self
.content
.strip_prefix("/search ")
.unwrap_or(&self.content);
MultimediaMessage::WebSearch {
query: query.to_string(),
results: Vec::new(),
}
} else {
MultimediaMessage::Text {
content: self.content.clone(),
}
}
}
}
// ============================================================================
// REST API Handlers
// ============================================================================
use crate::shared::state::AppState;
use axum::{
extract::{Path, State},
http::StatusCode,
response::IntoResponse,
Json,
};
use std::sync::Arc;
/// Upload media file
pub async fn upload_media_handler(
State(state): State<Arc<AppState>>,
Json(request): Json<MediaUploadRequest>,
) -> impl IntoResponse {
let handler = DefaultMultimediaHandler::new(state.drive.clone(), None);
match handler.upload_media(request).await {
Ok(response) => (StatusCode::OK, Json(serde_json::json!(response))),
Err(e) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({"error": e.to_string()})),
),
}
}
/// Download media file by ID
pub async fn download_media_handler(
State(state): State<Arc<AppState>>,
Path(media_id): Path<String>,
) -> impl IntoResponse {
let handler = DefaultMultimediaHandler::new(state.drive.clone(), None);
// Construct URL from media_id (this would be stored in DB in production)
let url = format!("https://storage.botserver.com/media/{}", media_id);
match handler.download_media(&url).await {
Ok(data) => (
StatusCode::OK,
Json(serde_json::json!({
"media_id": media_id,
"size": data.len(),
"data": STANDARD.encode(&data)
})),
),
Err(e) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({"error": e.to_string()})),
),
}
}
/// Generate thumbnail for media
pub async fn generate_thumbnail_handler(
State(state): State<Arc<AppState>>,
Path(media_id): Path<String>,
) -> impl IntoResponse {
let handler = DefaultMultimediaHandler::new(state.drive.clone(), None);
// Construct URL from media_id
let url = format!("https://storage.botserver.com/media/{}", media_id);
match handler.generate_thumbnail(&url).await {
Ok(thumbnail_url) => (
StatusCode::OK,
Json(serde_json::json!({
"media_id": media_id,
"thumbnail_url": thumbnail_url
})),
),
Err(e) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({"error": e.to_string()})),
),
}
}
/// Perform web search
pub async fn web_search_handler(
State(state): State<Arc<AppState>>,
Json(payload): Json<serde_json::Value>,
) -> impl IntoResponse {
let query = payload.get("query").and_then(|q| q.as_str()).unwrap_or("");
let max_results = payload
.get("max_results")
.and_then(|m| m.as_u64())
.unwrap_or(10) as usize;
let handler = DefaultMultimediaHandler::new(state.drive.clone(), None);
match handler.web_search(query, max_results).await {
Ok(results) => (
StatusCode::OK,
Json(serde_json::json!({
"query": query,
"results": results
})),
),
Err(e) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({"error": e.to_string()})),
),
}
}

1
src/core/bot/ui.rs Normal file
View file

@ -0,0 +1 @@

View file

@ -0,0 +1,10 @@
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::test_util;
#[test]
fn test_config_module() {
test_util::setup();
assert!(true, "Basic config module test");
}
}

240
src/core/config/mod.rs Normal file
View file

@ -0,0 +1,240 @@
use crate::shared::utils::DbPool;
use diesel::prelude::*;
use diesel::r2d2::{ConnectionManager, PooledConnection};
use std::collections::HashMap;
use uuid::Uuid;
// Type alias for backward compatibility
pub type Config = AppConfig;
#[derive(Clone, Debug)]
pub struct AppConfig {
pub drive: DriveConfig,
pub server: ServerConfig,
pub email: EmailConfig,
pub site_path: String,
}
#[derive(Clone, Debug)]
pub struct DriveConfig {
pub server: String,
pub access_key: String,
pub secret_key: String,
}
#[derive(Clone, Debug)]
pub struct ServerConfig {
pub host: String,
pub port: u16,
}
#[derive(Clone, Debug)]
pub struct EmailConfig {
pub server: String,
pub port: u16,
pub username: String,
pub password: String,
pub from: String,
pub smtp_server: String,
pub smtp_port: u16,
}
impl AppConfig {
pub fn from_database(pool: &DbPool) -> Result<Self, diesel::result::Error> {
use crate::shared::models::schema::bot_configuration::dsl::*;
let mut conn = pool.get().map_err(|e| {
diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::UnableToSendCommand,
Box::new(e.to_string()),
)
})?;
let config_map: HashMap<String, (Uuid, Uuid, String, String, String, bool)> =
bot_configuration
.select((
id,
bot_id,
config_key,
config_value,
config_type,
is_encrypted,
))
.load::<(Uuid, Uuid, String, String, String, bool)>(&mut conn)
.unwrap_or_default()
.into_iter()
.map(|(_, _, key, value, _, _)| {
(
key.clone(),
(Uuid::nil(), Uuid::nil(), key, value, String::new(), false),
)
})
.collect();
let mut get_str = |key: &str, default: &str| -> String {
bot_configuration
.filter(config_key.eq(key))
.select(config_value)
.first::<String>(&mut conn)
.unwrap_or_else(|_| default.to_string())
};
let _get_u32 = |key: &str, default: u32| -> u32 {
config_map
.get(key)
.and_then(|v| v.3.parse().ok())
.unwrap_or(default)
};
let get_u16 = |key: &str, default: u16| -> u16 {
config_map
.get(key)
.and_then(|v| v.3.parse().ok())
.unwrap_or(default)
};
let _get_bool = |key: &str, default: bool| -> bool {
config_map
.get(key)
.map(|v| v.3.to_lowercase() == "true")
.unwrap_or(default)
};
let drive = DriveConfig {
server: std::env::var("DRIVE_SERVER").unwrap(),
access_key: std::env::var("DRIVE_ACCESSKEY").unwrap(),
secret_key: std::env::var("DRIVE_SECRET").unwrap(),
};
let email = EmailConfig {
server: get_str("EMAIL_IMAP_SERVER", "imap.gmail.com"),
port: get_u16("EMAIL_IMAP_PORT", 993),
username: get_str("EMAIL_USERNAME", ""),
password: get_str("EMAIL_PASSWORD", ""),
from: get_str("EMAIL_FROM", ""),
smtp_server: get_str("EMAIL_SMTP_SERVER", "smtp.gmail.com"),
smtp_port: get_u16("EMAIL_SMTP_PORT", 587),
};
Ok(AppConfig {
drive,
email,
server: ServerConfig {
host: get_str("SERVER_HOST", "127.0.0.1"),
port: get_u16("SERVER_PORT", 8080),
},
site_path: {
ConfigManager::new(pool.clone())
.get_config(&Uuid::nil(), "SITES_ROOT", Some("./botserver-stack/sites"))?
.to_string()
},
})
}
pub fn from_env() -> Result<Self, anyhow::Error> {
let minio = DriveConfig {
server: std::env::var("DRIVE_SERVER").unwrap(),
access_key: std::env::var("DRIVE_ACCESSKEY").unwrap(),
secret_key: std::env::var("DRIVE_SECRET").unwrap(),
};
let email = EmailConfig {
server: std::env::var("EMAIL_IMAP_SERVER")
.unwrap_or_else(|_| "imap.gmail.com".to_string()),
port: std::env::var("EMAIL_IMAP_PORT")
.ok()
.and_then(|p| p.parse().ok())
.unwrap_or(993),
username: std::env::var("EMAIL_USERNAME").unwrap_or_default(),
password: std::env::var("EMAIL_PASSWORD").unwrap_or_default(),
from: std::env::var("EMAIL_FROM").unwrap_or_default(),
smtp_server: std::env::var("EMAIL_SMTP_SERVER")
.unwrap_or_else(|_| "smtp.gmail.com".to_string()),
smtp_port: std::env::var("EMAIL_SMTP_PORT")
.ok()
.and_then(|p| p.parse().ok())
.unwrap_or(587),
};
Ok(AppConfig {
drive: minio,
email,
server: ServerConfig {
host: std::env::var("SERVER_HOST").unwrap_or_else(|_| "127.0.0.1".to_string()),
port: std::env::var("SERVER_PORT")
.ok()
.and_then(|p| p.parse().ok())
.unwrap_or(8080),
},
site_path: {
let pool = create_conn()?;
ConfigManager::new(pool).get_config(
&Uuid::nil(),
"SITES_ROOT",
Some("./botserver-stack/sites"),
)?
},
})
}
}
#[derive(Debug)]
pub struct ConfigManager {
conn: DbPool,
}
impl ConfigManager {
pub fn new(conn: DbPool) -> Self {
Self { conn }
}
fn get_conn(
&self,
) -> Result<PooledConnection<ConnectionManager<PgConnection>>, diesel::result::Error> {
self.conn.get().map_err(|e| {
diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::UnableToSendCommand,
Box::new(e.to_string()),
)
})
}
pub fn get_config(
&self,
code_bot_id: &uuid::Uuid,
key: &str,
fallback: Option<&str>,
) -> Result<String, diesel::result::Error> {
use crate::shared::models::schema::bot_configuration::dsl::*;
let mut conn = self.get_conn()?;
let fallback_str = fallback.unwrap_or("");
let result = bot_configuration
.filter(bot_id.eq(code_bot_id))
.filter(config_key.eq(key))
.select(config_value)
.first::<String>(&mut conn);
let value = match result {
Ok(v) => v,
Err(_) => {
let (default_bot_id, _default_bot_name) = crate::bot::get_default_bot(&mut conn);
bot_configuration
.filter(bot_id.eq(default_bot_id))
.filter(config_key.eq(key))
.select(config_value)
.first::<String>(&mut conn)
.unwrap_or(fallback_str.to_string())
}
};
Ok(value)
}
pub fn sync_gbot_config(&self, bot_id: &uuid::Uuid, content: &str) -> Result<usize, String> {
use sha2::{Digest, Sha256};
let mut hasher = Sha256::new();
hasher.update(content.as_bytes());
let mut conn = self
.get_conn()
.map_err(|e| format!("Failed to acquire connection: {}", e))?;
let mut updated = 0;
for line in content.lines().skip(1) {
let parts: Vec<&str> = line.split(',').collect();
if parts.len() >= 2 {
let key = parts[0].trim();
let value = parts[1].trim();
let new_id: uuid::Uuid = uuid::Uuid::new_v4();
diesel::sql_query("INSERT INTO bot_configuration (id, bot_id, config_key, config_value, config_type) VALUES ($1, $2, $3, $4, 'string') ON CONFLICT (bot_id, config_key) DO UPDATE SET config_value = EXCLUDED.config_value, updated_at = NOW()")
.bind::<diesel::sql_types::Uuid, _>(new_id)
.bind::<diesel::sql_types::Uuid, _>(bot_id)
.bind::<diesel::sql_types::Text, _>(key)
.bind::<diesel::sql_types::Text, _>(value)
.execute(&mut conn)
.map_err(|e| format!("Failed to update config: {}", e))?;
updated += 1;
}
}
Ok(updated)
}
}
fn create_conn() -> Result<DbPool, anyhow::Error> {
crate::shared::utils::create_conn()
.map_err(|e| anyhow::anyhow!("Failed to create database pool: {}", e))
}

8
src/core/mod.rs Normal file
View file

@ -0,0 +1,8 @@
pub mod automation;
pub mod bootstrap;
pub mod bot;
pub mod config;
pub mod package_manager;
pub mod session;
pub mod shared;
pub mod web_server;

View file

@ -0,0 +1,168 @@
use anyhow::Result;
use std::env;
use std::process::Command;
use crate::package_manager::{get_all_components, InstallMode, PackageManager};
pub async fn run() -> Result<()> {
env_logger::init();
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
print_usage();
return Ok(());
}
use tracing::info;
fn print_usage(){info!("usage: botserver <command> [options]")}
let command = &args[1];
match command.as_str() {
"start" => {
let mode = if args.contains(&"--container".to_string()) {
InstallMode::Container
} else {
InstallMode::Local
};
let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") {
args.get(idx + 1).cloned()
} else {
None
};
let pm = PackageManager::new(mode, tenant)?;
println!("Starting all installed components...");
let components = get_all_components();
for component in components {
if pm.is_installed(component.name) {
match pm.start(component.name) {
Ok(_) => println!("✓ Started {}", component.name),
Err(e) => eprintln!("✗ Failed to start {}: {}", component.name, e),
}
}
}
println!("✓ BotServer components started");
}
"stop" => {
println!("Stopping all components...");
let components = get_all_components();
for component in components {
let _ = Command::new("pkill").arg("-f").arg(component.termination_command).output();
}
println!("✓ BotServer components stopped");
}
"restart" => {
println!("Restarting BotServer...");
let components = get_all_components();
for component in components {
let _ = Command::new("pkill").arg("-f").arg(component.termination_command).output();
}
tokio::time::sleep(tokio::time::Duration::from_secs(2)).await;
let mode = if args.contains(&"--container".to_string()) {
InstallMode::Container
} else {
InstallMode::Local
};
let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") {
args.get(idx + 1).cloned()
} else {
None
};
let pm = PackageManager::new(mode, tenant)?;
let components = get_all_components();
for component in components {
if pm.is_installed(component.name) {
let _ = pm.start(component.name);
}
}
println!("✓ BotServer restarted");
}
"install" => {
if args.len() < 3 {
eprintln!("Usage: botserver install <component> [--container] [--tenant <name>]");
return Ok(());
}
let component = &args[2];
let mode = if args.contains(&"--container".to_string()) {
InstallMode::Container
} else {
InstallMode::Local
};
let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") {
args.get(idx + 1).cloned()
} else {
None
};
let pm = PackageManager::new(mode, tenant)?;
pm.install(component).await?;
println!("✓ Component '{}' installed successfully", component);
}
"remove" => {
if args.len() < 3 {
eprintln!("Usage: botserver remove <component> [--container] [--tenant <name>]");
return Ok(());
}
let component = &args[2];
let mode = if args.contains(&"--container".to_string()) {
InstallMode::Container
} else {
InstallMode::Local
};
let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") {
args.get(idx + 1).cloned()
} else {
None
};
let pm = PackageManager::new(mode, tenant)?;
pm.remove(component)?;
println!("✓ Component '{}' removed successfully", component);
}
"list" => {
let mode = if args.contains(&"--container".to_string()) {
InstallMode::Container
} else {
InstallMode::Local
};
let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") {
args.get(idx + 1).cloned()
} else {
None
};
let pm = PackageManager::new(mode, tenant)?;
println!("Available components:");
for component in pm.list() {
let status = if pm.is_installed(&component) {
"✓ installed"
} else {
" available"
};
println!(" {} {}", status, component);
}
}
"status" => {
if args.len() < 3 {
eprintln!("Usage: botserver status <component> [--container] [--tenant <name>]");
return Ok(());
}
let component = &args[2];
let mode = if args.contains(&"--container".to_string()) {
InstallMode::Container
} else {
InstallMode::Local
};
let tenant = if let Some(idx) = args.iter().position(|a| a == "--tenant") {
args.get(idx + 1).cloned()
} else {
None
};
let pm = PackageManager::new(mode, tenant)?;
if pm.is_installed(component) {
println!("✓ Component '{}' is installed", component);
} else {
println!("✗ Component '{}' is not installed", component);
}
}
"--help" | "-h" => {
print_usage();
}
_ => {
eprintln!("Unknown command: {}", command);
print_usage();
}
}
Ok(())
}

View file

@ -0,0 +1,22 @@
use std::collections::HashMap;
#[derive(Debug, Clone)]
pub struct ComponentConfig {
pub name: String,
pub ports: Vec<u16>,
pub dependencies: Vec<String>,
pub linux_packages: Vec<String>,
pub macos_packages: Vec<String>,
pub windows_packages: Vec<String>,
pub download_url: Option<String>,
pub binary_name: Option<String>,
pub pre_install_cmds_linux: Vec<String>,
pub post_install_cmds_linux: Vec<String>,
pub pre_install_cmds_macos: Vec<String>,
pub post_install_cmds_macos: Vec<String>,
pub pre_install_cmds_windows: Vec<String>,
pub post_install_cmds_windows: Vec<String>,
pub env_vars: HashMap<String, String>,
pub data_download_list: Vec<String>,
pub exec_cmd: String,
pub check_cmd: String,
}

View file

@ -0,0 +1,632 @@
use crate::package_manager::component::ComponentConfig;
use crate::package_manager::installer::PackageManager;
use crate::package_manager::InstallMode;
use crate::package_manager::OsType;
use crate::shared::utils::{self, parse_database_url};
use anyhow::{Context, Result};
use log::{error, trace, warn};
use reqwest::Client;
use std::collections::HashMap;
use std::path::PathBuf;
use std::process::Command;
impl PackageManager {
pub async fn install(&self, component_name: &str) -> Result<()> {
let component = self
.components
.get(component_name)
.context(format!("Component '{}' not found", component_name))?;
trace!(
"Starting installation of component '{}' in {:?} mode",
component_name,
self.mode
);
for dep in &component.dependencies {
if !self.is_installed(dep) {
warn!("Installing missing dependency: {}", dep);
Box::pin(self.install(dep)).await?;
}
}
match self.mode {
InstallMode::Local => self.install_local(component).await?,
InstallMode::Container => self.install_container(component)?,
}
trace!(
"Component '{}' installation completed successfully",
component_name
);
Ok(())
}
pub async fn install_local(&self, component: &ComponentConfig) -> Result<()> {
trace!(
"Installing component '{}' locally to {}",
component.name,
self.base_path.display()
);
self.create_directories(&component.name)?;
let (pre_cmds, post_cmds) = match self.os_type {
OsType::Linux => (
&component.pre_install_cmds_linux,
&component.post_install_cmds_linux,
),
OsType::MacOS => (
&component.pre_install_cmds_macos,
&component.post_install_cmds_macos,
),
OsType::Windows => (
&component.pre_install_cmds_windows,
&component.post_install_cmds_windows,
),
};
self.run_commands(pre_cmds, "local", &component.name)?;
self.install_system_packages(component)?;
if let Some(url) = &component.download_url {
let url = url.clone();
let name = component.name.clone();
let binary_name = component.binary_name.clone();
self.download_and_install(&url, &name, binary_name.as_deref())
.await?;
}
if !component.data_download_list.is_empty() {
for url in &component.data_download_list {
let filename = url.split('/').last().unwrap_or("download.tmp");
let output_path = self
.base_path
.join("data")
.join(&component.name)
.join(filename);
utils::download_file(url, output_path.to_str().unwrap()).await?;
}
}
self.run_commands(post_cmds, "local", &component.name)?;
Ok(())
}
pub fn install_container(&self, component: &ComponentConfig) -> Result<()> {
let container_name = format!("{}-{}", self.tenant, component.name);
let output = Command::new("lxc")
.args(&[
"launch",
"images:debian/12",
&container_name,
"-c",
"security.privileged=true",
])
.output()?;
if !output.status.success() {
return Err(anyhow::anyhow!(
"LXC container creation failed: {}",
String::from_utf8_lossy(&output.stderr)
));
}
std::thread::sleep(std::time::Duration::from_secs(15));
self.exec_in_container(&container_name, "mkdir -p /opt/gbo/{bin,data,conf,logs}")?;
let (pre_cmds, post_cmds) = match self.os_type {
OsType::Linux => (
&component.pre_install_cmds_linux,
&component.post_install_cmds_linux,
),
OsType::MacOS => (
&component.pre_install_cmds_macos,
&component.post_install_cmds_macos,
),
OsType::Windows => (
&component.pre_install_cmds_windows,
&component.post_install_cmds_windows,
),
};
self.run_commands(pre_cmds, &container_name, &component.name)?;
let packages = match self.os_type {
OsType::Linux => &component.linux_packages,
OsType::MacOS => &component.macos_packages,
OsType::Windows => &component.windows_packages,
};
if !packages.is_empty() {
let pkg_list = packages.join(" ");
self.exec_in_container(
&container_name,
&format!("apt-get install -y {}", pkg_list),
)?;
}
if let Some(url) = &component.download_url {
self.download_in_container(
&container_name,
url,
&component.name,
component.binary_name.as_deref(),
)?;
}
self.run_commands(post_cmds, &container_name, &component.name)?;
self.exec_in_container(
&container_name,
"useradd --system --no-create-home --shell /bin/false gbuser",
)?;
self.mount_container_directories(&container_name, &component.name)?;
if !component.exec_cmd.is_empty() {
self.create_container_service(
&container_name,
&component.name,
&component.exec_cmd,
&component.env_vars,
)?;
}
self.setup_port_forwarding(&container_name, &component.ports)?;
trace!(
"Container installation of '{}' completed in {}",
component.name,
container_name
);
Ok(())
}
pub fn remove(&self, component_name: &str) -> Result<()> {
let component = self
.components
.get(component_name)
.context(format!("Component '{}' not found", component_name))?;
match self.mode {
InstallMode::Local => self.remove_local(component)?,
InstallMode::Container => self.remove_container(component)?,
}
Ok(())
}
pub fn remove_local(&self, component: &ComponentConfig) -> Result<()> {
let bin_path = self.base_path.join("bin").join(&component.name);
let _ = std::fs::remove_dir_all(bin_path);
Ok(())
}
pub fn remove_container(&self, component: &ComponentConfig) -> Result<()> {
let container_name = format!("{}-{}", self.tenant, component.name);
let _ = Command::new("lxc")
.args(&["stop", &container_name])
.output();
let output = Command::new("lxc")
.args(&["delete", &container_name])
.output()?;
if !output.status.success() {
warn!(
"Container deletion had issues: {}",
String::from_utf8_lossy(&output.stderr)
);
}
Ok(())
}
pub fn list(&self) -> Vec<String> {
self.components.keys().cloned().collect()
}
pub fn is_installed(&self, component_name: &str) -> bool {
match self.mode {
InstallMode::Local => {
let bin_path = self.base_path.join("bin").join(component_name);
bin_path.exists()
}
InstallMode::Container => {
let container_name = format!("{}-{}", self.tenant, component_name);
let output = Command::new("lxc")
.args(&["list", &container_name, "--format=json"])
.output()
.unwrap();
if !output.status.success() {
return false;
}
let output_str = String::from_utf8_lossy(&output.stdout);
!output_str.contains("\"name\":\"") || output_str.contains("\"status\":\"Stopped\"")
}
}
}
pub fn create_directories(&self, component: &str) -> Result<()> {
let dirs = ["bin", "data", "conf", "logs"];
for dir in &dirs {
let path = self.base_path.join(dir).join(component);
std::fs::create_dir_all(&path)
.context(format!("Failed to create directory: {:?}", path))?;
}
Ok(())
}
pub fn install_system_packages(&self, component: &ComponentConfig) -> Result<()> {
let packages = match self.os_type {
OsType::Linux => &component.linux_packages,
OsType::MacOS => &component.macos_packages,
OsType::Windows => &component.windows_packages,
};
if packages.is_empty() {
return Ok(());
}
trace!(
"Installing {} system packages for component '{}'",
packages.len(),
component.name
);
match self.os_type {
OsType::Linux => {
let output = Command::new("apt-get").args(&["update"]).output()?;
if !output.status.success() {
warn!("apt-get update had issues");
}
let output = Command::new("apt-get")
.args(&["install", "-y"])
.args(packages)
.output()?;
if !output.status.success() {
warn!("Some packages may have failed to install");
}
}
OsType::MacOS => {
let output = Command::new("brew")
.args(&["install"])
.args(packages)
.output()?;
if !output.status.success() {
warn!("Homebrew installation had warnings");
}
}
OsType::Windows => {
warn!("Windows package installation not implemented");
}
}
Ok(())
}
pub async fn download_and_install(
&self,
url: &str,
component: &str,
binary_name: Option<&str>,
) -> Result<()> {
let bin_path = self.base_path.join("bin").join(component);
std::fs::create_dir_all(&bin_path)?;
let filename = url.split('/').last().unwrap_or("download.tmp");
let temp_file = if filename.starts_with('/') {
PathBuf::from(filename)
} else {
bin_path.join(filename)
};
self.download_with_reqwest(url, &temp_file, component)
.await?;
self.handle_downloaded_file(&temp_file, &bin_path, binary_name)?;
Ok(())
}
pub async fn download_with_reqwest(
&self,
url: &str,
temp_file: &PathBuf,
component: &str,
) -> Result<()> {
const MAX_RETRIES: u32 = 3;
const RETRY_DELAY: std::time::Duration = std::time::Duration::from_secs(2);
let client = Client::builder()
.timeout(std::time::Duration::from_secs(30))
.user_agent("botserver-package-manager/1.0")
.build()?;
let mut last_error = None;
for attempt in 0..=MAX_RETRIES {
if attempt > 0 {
trace!(
"Retry attempt {}/{} for {}",
attempt,
MAX_RETRIES,
component
);
std::thread::sleep(RETRY_DELAY * attempt);
}
match self.attempt_reqwest_download(&client, url, temp_file).await {
Ok(_size) => {
if attempt > 0 {
trace!("Download succeeded on retry attempt {}", attempt);
}
return Ok(());
}
Err(e) => {
warn!("Download attempt {} failed: {}", attempt + 1, e);
last_error = Some(e);
let _ = std::fs::remove_file(temp_file);
}
}
}
Err(anyhow::anyhow!(
"Failed to download {} after {} attempts. Last error: {}",
component,
MAX_RETRIES + 1,
last_error.unwrap()
))
}
pub async fn attempt_reqwest_download(
&self,
_client: &Client,
url: &str,
temp_file: &PathBuf,
) -> Result<u64> {
let output_path = temp_file.to_str().context("Invalid temp file path")?;
utils::download_file(url, output_path)
.await
.map_err(|e| anyhow::anyhow!("Failed to download file using shared utility: {}", e))?;
let metadata = std::fs::metadata(temp_file).context("Failed to get file metadata")?;
let size = metadata.len();
Ok(size)
}
pub fn handle_downloaded_file(
&self,
temp_file: &PathBuf,
bin_path: &PathBuf,
binary_name: Option<&str>,
) -> Result<()> {
let metadata = std::fs::metadata(temp_file)?;
if metadata.len() == 0 {
return Err(anyhow::anyhow!("Downloaded file is empty"));
}
let file_extension = temp_file
.extension()
.and_then(|ext| ext.to_str())
.unwrap_or("");
match file_extension {
"gz" | "tgz" => {
self.extract_tar_gz(temp_file, bin_path)?;
}
"zip" => {
self.extract_zip(temp_file, bin_path)?;
}
_ => {
if let Some(name) = binary_name {
self.install_binary(temp_file, bin_path, name)?;
} else {
let final_path = bin_path.join(temp_file.file_name().unwrap());
std::fs::rename(temp_file, &final_path)?;
self.make_executable(&final_path)?;
}
}
}
Ok(())
}
pub fn extract_tar_gz(&self, temp_file: &PathBuf, bin_path: &PathBuf) -> Result<()> {
let output = Command::new("tar")
.current_dir(bin_path)
.args(&["-xzf", temp_file.to_str().unwrap(), "--strip-components=1"])
.output()?;
if !output.status.success() {
return Err(anyhow::anyhow!(
"tar extraction failed: {}",
String::from_utf8_lossy(&output.stderr)
));
}
std::fs::remove_file(temp_file)?;
Ok(())
}
pub fn extract_zip(&self, temp_file: &PathBuf, bin_path: &PathBuf) -> Result<()> {
let output = Command::new("unzip")
.current_dir(bin_path)
.args(&["-o", "-q", temp_file.to_str().unwrap()])
.output()?;
if !output.status.success() {
return Err(anyhow::anyhow!(
"unzip extraction failed: {}",
String::from_utf8_lossy(&output.stderr)
));
}
std::fs::remove_file(temp_file)?;
Ok(())
}
pub fn install_binary(
&self,
temp_file: &PathBuf,
bin_path: &PathBuf,
name: &str,
) -> Result<()> {
let final_path = bin_path.join(name);
std::fs::rename(temp_file, &final_path)?;
self.make_executable(&final_path)?;
Ok(())
}
pub fn make_executable(&self, path: &PathBuf) -> Result<()> {
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut perms = std::fs::metadata(path)?.permissions();
perms.set_mode(0o755);
std::fs::set_permissions(path, perms)?;
}
Ok(())
}
pub fn run_commands(&self, commands: &[String], target: &str, component: &str) -> Result<()> {
let bin_path = if target == "local" {
self.base_path.join("bin").join(component)
} else {
PathBuf::from("/opt/gbo/bin")
};
let data_path = if target == "local" {
self.base_path.join("data").join(component)
} else {
PathBuf::from("/opt/gbo/data")
};
let conf_path = if target == "local" {
self.base_path.join("conf").join(component)
} else {
PathBuf::from("/opt/gbo/conf")
};
let logs_path = if target == "local" {
self.base_path.join("logs").join(component)
} else {
PathBuf::from("/opt/gbo/logs")
};
for cmd in commands {
let rendered_cmd = cmd
.replace("{{BIN_PATH}}", &bin_path.to_string_lossy())
.replace("{{DATA_PATH}}", &data_path.to_string_lossy())
.replace("{{CONF_PATH}}", &conf_path.to_string_lossy())
.replace("{{LOGS_PATH}}", &logs_path.to_string_lossy());
if target == "local" {
trace!("Executing command: {}", rendered_cmd);
let child = Command::new("bash")
.current_dir(&bin_path)
.args(&["-c", &rendered_cmd])
.spawn()
.with_context(|| {
format!("Failed to spawn command for component '{}'", component)
})?;
let output = child.wait_with_output().with_context(|| {
format!(
"Failed while waiting for command to finish for component '{}'",
component
)
})?;
if !output.status.success() {
error!(
"Command had non-zero exit: {}",
String::from_utf8_lossy(&output.stderr)
);
}
} else {
self.exec_in_container(target, &rendered_cmd)?;
}
}
Ok(())
}
pub fn exec_in_container(&self, container: &str, command: &str) -> Result<()> {
let output = Command::new("lxc")
.args(&["exec", container, "--", "bash", "-c", command])
.output()?;
if !output.status.success() {
warn!(
"Container command failed: {}",
String::from_utf8_lossy(&output.stderr)
);
}
Ok(())
}
pub fn download_in_container(
&self,
container: &str,
url: &str,
_component: &str,
binary_name: Option<&str>,
) -> Result<()> {
let download_cmd = format!("wget -O /tmp/download.tmp {}", url);
self.exec_in_container(container, &download_cmd)?;
if url.ends_with(".tar.gz") || url.ends_with(".tgz") {
self.exec_in_container(container, "tar -xzf /tmp/download.tmp -C /opt/gbo/bin")?;
} else if url.ends_with(".zip") {
self.exec_in_container(container, "unzip -o /tmp/download.tmp -d /opt/gbo/bin")?;
} else if let Some(name) = binary_name {
let mv_cmd = format!(
"mv /tmp/download.tmp /opt/gbo/bin/{} && chmod +x /opt/gbo/bin/{}",
name, name
);
self.exec_in_container(container, &mv_cmd)?;
}
self.exec_in_container(container, "rm -f /tmp/download.tmp")?;
Ok(())
}
pub fn mount_container_directories(&self, container: &str, component: &str) -> Result<()> {
let host_base = format!("/opt/gbo/tenants/{}/{}", self.tenant, component);
for dir in &["data", "conf", "logs"] {
let host_path = format!("{}/{}", host_base, dir);
std::fs::create_dir_all(&host_path)?;
let device_name = format!("{}-{}", component, dir);
let container_path = format!("/opt/gbo/{}", dir);
let _ = Command::new("lxc")
.args(&["config", "device", "remove", container, &device_name])
.output();
let output = Command::new("lxc")
.args(&[
"config",
"device",
"add",
container,
&device_name,
"disk",
&format!("source={}", host_path),
&format!("path={}", container_path),
])
.output()?;
if !output.status.success() {
warn!("Failed to mount {} in container {}", dir, container);
}
trace!(
"Mounted {} to {} in container {}",
host_path,
container_path,
container
);
}
Ok(())
}
pub fn create_container_service(
&self,
container: &str,
component: &str,
exec_cmd: &str,
env_vars: &HashMap<String, String>,
) -> Result<()> {
let database_url = std::env::var("DATABASE_URL").unwrap();
let (_db_username, db_password, _db_server, _db_port, _db_name) =
parse_database_url(&database_url);
let rendered_cmd = exec_cmd
.replace("{{DB_PASSWORD}}", &db_password)
.replace("{{BIN_PATH}}", "/opt/gbo/bin")
.replace("{{DATA_PATH}}", "/opt/gbo/data")
.replace("{{CONF_PATH}}", "/opt/gbo/conf")
.replace("{{LOGS_PATH}}", "/opt/gbo/logs");
let mut env_section = String::new();
for (key, value) in env_vars {
let rendered_value = value
.replace("{{DATA_PATH}}", "/opt/gbo/data")
.replace("{{BIN_PATH}}", "/opt/gbo/bin")
.replace("{{CONF_PATH}}", "/opt/gbo/conf")
.replace("{{LOGS_PATH}}", "/opt/gbo/logs");
env_section.push_str(&format!("Environment={}={}\n", key, rendered_value));
}
let service_content = format!(
"[Unit]\nDescription={} Service\nAfter=network.target\n\n[Service]\nType=simple\n{}ExecStart={}\nWorkingDirectory=/opt/gbo/data\nRestart=always\nRestartSec=10\nUser=root\n\n[Install]\nWantedBy=multi-user.target\n",
component, env_section, rendered_cmd
);
let service_file = format!("/tmp/{}.service", component);
std::fs::write(&service_file, &service_content)?;
let output = Command::new("lxc")
.args(&[
"file",
"push",
&service_file,
&format!("{}/etc/systemd/system/{}.service", container, component),
])
.output()?;
if !output.status.success() {
warn!("Failed to push service file to container");
}
self.exec_in_container(container, "systemctl daemon-reload")?;
self.exec_in_container(container, &format!("systemctl enable {}", component))?;
self.exec_in_container(container, &format!("systemctl start {}", component))?;
std::fs::remove_file(&service_file)?;
trace!(
"Created and started service in container {}: {}",
container,
component
);
Ok(())
}
pub fn setup_port_forwarding(&self, container: &str, ports: &[u16]) -> Result<()> {
for port in ports {
let device_name = format!("port-{}", port);
let _ = Command::new("lxc")
.args(&["config", "device", "remove", container, &device_name])
.output();
let output = Command::new("lxc")
.args(&[
"config",
"device",
"add",
container,
&device_name,
"proxy",
&format!("listen=tcp:0.0.0.0:{}", port),
&format!("connect=tcp:127.0.0.1:{}", port),
])
.output()?;
if !output.status.success() {
warn!("Failed to setup port forwarding for port {}", port);
}
trace!(
"Port forwarding configured: {} -> container {}",
port,
container
);
}
Ok(())
}
}

View file

@ -0,0 +1,732 @@
use crate::package_manager::component::ComponentConfig;
use crate::package_manager::os::detect_os;
use crate::package_manager::{InstallMode, OsType};
use anyhow::Result;
use log::trace;
use std::collections::HashMap;
use std::path::PathBuf;
#[derive(Debug)]
pub struct PackageManager {
pub mode: InstallMode,
pub os_type: OsType,
pub base_path: PathBuf,
pub tenant: String,
pub components: HashMap<String, ComponentConfig>,
}
impl PackageManager {
pub fn new(mode: InstallMode, tenant: Option<String>) -> Result<Self> {
let os_type = detect_os();
let base_path = if mode == InstallMode::Container {
PathBuf::from("/opt/gbo")
} else {
std::env::current_dir()?.join("botserver-stack")
};
let tenant = tenant.unwrap_or_else(|| "default".to_string());
let mut pm = PackageManager {
mode,
os_type,
base_path,
tenant,
components: HashMap::new(),
};
pm.register_components();
Ok(pm)
}
fn register_components(&mut self) {
self.register_tables();
self.register_cache();
self.register_drive();
self.register_llm();
self.register_email();
self.register_proxy();
self.register_directory();
self.register_alm();
self.register_alm_ci();
self.register_dns();
self.register_webmail();
self.register_meeting();
self.register_table_editor();
self.register_doc_editor();
self.register_desktop();
self.register_devtools();
self.register_vector_db();
self.register_host();
}
fn register_drive(&mut self) {
self.components.insert(
"drive".to_string(),
ComponentConfig {
name: "drive".to_string(),
ports: vec![9000, 9001],
dependencies: vec![],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
"https://dl.min.io/server/minio/release/linux-amd64/minio".to_string(),
),
binary_name: Some("minio".to_string()),
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::from([
("MINIO_ROOT_USER".to_string(), "$DRIVE_ACCESSKEY".to_string()),
("MINIO_ROOT_PASSWORD".to_string(), "$DRIVE_SECRET".to_string()),
]),
data_download_list: Vec::new(),
exec_cmd: "nohup {{BIN_PATH}}/minio server {{DATA_PATH}} --address :9000 --console-address :9001 > {{LOGS_PATH}}/minio.log 2>&1 &".to_string(),
check_cmd: "ps -ef | grep minio | grep -v grep | grep {{BIN_PATH}}".to_string(),
},
);
}
fn register_tables(&mut self) {
self.components.insert(
"tables".to_string(),
ComponentConfig {
name: "tables".to_string(),
ports: vec![5432],
dependencies: vec![],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
"https://github.com/theseus-rs/postgresql-binaries/releases/download/18.0.0/postgresql-18.0.0-x86_64-unknown-linux-gnu.tar.gz".to_string(),
),
binary_name: Some("postgres".to_string()),
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![
"chmod +x ./bin/*".to_string(),
format!("if [ ! -d \"{{{{DATA_PATH}}}}/pgdata\" ]; then PG_PASSWORD={{DB_PASSWORD}} ./bin/initdb -D {{{{DATA_PATH}}}}/pgdata -U gbuser --pwfile=<(echo $PG_PASSWORD); fi"),
"echo \"data_directory = '{{DATA_PATH}}/pgdata'\" > {{CONF_PATH}}/postgresql.conf".to_string(),
"echo \"ident_file = '{{CONF_PATH}}/pg_ident.conf'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
"echo \"port = 5432\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
"echo \"listen_addresses = '*'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
"echo \"log_directory = '{{LOGS_PATH}}'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
"echo \"logging_collector = on\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
"echo \"host all all all md5\" > {{CONF_PATH}}/pg_hba.conf".to_string(),
"touch {{CONF_PATH}}/pg_ident.conf".to_string(),
"./bin/pg_ctl -D {{DATA_PATH}}/pgdata -l {{LOGS_PATH}}/postgres.log start -w -t 30".to_string(),
"sleep 5".to_string(),
"for i in $(seq 1 30); do ./bin/pg_isready -h localhost -p 5432 -U gbuser >/dev/null 2>&1 && echo 'PostgreSQL is ready' && break || echo \"Waiting for PostgreSQL... attempt $i/30\" >&2; sleep 2; done".to_string(),
"./bin/pg_isready -h localhost -p 5432 -U gbuser || { echo 'ERROR: PostgreSQL failed to start properly' >&2; cat {{LOGS_PATH}}/postgres.log >&2; exit 1; }".to_string(),
format!("PGPASSWORD={{DB_PASSWORD}} ./bin/psql -h localhost -p 5432 -U gbuser -d postgres -c \"CREATE DATABASE botserver WITH OWNER gbuser\" 2>&1 | grep -v 'already exists' || true"),
],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![
"chmod +x ./bin/*".to_string(),
"if [ ! -d \"{{DATA_PATH}}/pgdata\" ]; then ./bin/initdb -A -D {{DATA_PATH}}/pgdata -U postgres; fi".to_string(),
],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "./bin/pg_ctl -D {{DATA_PATH}}/pgdata -l {{LOGS_PATH}}/postgres.log start -w -t 30 > {{LOGS_PATH}}/stdout.log 2>&1 &".to_string(),
check_cmd: "{{BIN_PATH}}/bin/pg_isready -h localhost -p 5432 -U gbuser >/dev/null 2>&1".to_string(),
},
);
}
fn register_cache(&mut self) {
self.components.insert(
"cache".to_string(),
ComponentConfig {
name: "cache".to_string(),
ports: vec![6379],
dependencies: vec![],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
"https://download.valkey.io/releases/valkey-9.0.0-jammy-x86_64.tar.gz".to_string(),
),
binary_name: Some("valkey-server".to_string()),
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![
"chmod +x {{BIN_PATH}}/bin/valkey-server".to_string(),
],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "nohup {{BIN_PATH}}/bin/valkey-server --port 6379 --dir {{DATA_PATH}} > {{LOGS_PATH}}/valkey.log 2>&1 && {{BIN_PATH}}/bin/valkey-cli CONFIG SET stop-writes-on-bgsave-error no 2>&1 &".to_string(),
check_cmd: "{{BIN_PATH}}/bin/valkey-cli ping | grep -q PONG".to_string(),
},
);
}
fn register_llm(&mut self) {
self.components.insert(
"llm".to_string(),
ComponentConfig {
name: "llm".to_string(),
ports: vec![8081, 8082],
dependencies: vec![],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
"https://github.com/ggml-org/llama.cpp/releases/download/b6148/llama-b6148-bin-ubuntu-x64.zip".to_string(),
),
binary_name: Some("llama-server".to_string()),
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: vec![
"https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-1.5B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-1.5B-Q3_K_M.gguf".to_string(),
"https://huggingface.co/CompendiumLabs/bge-small-en-v1.5-gguf/resolve/main/bge-small-en-v1.5-f32.gguf".to_string(),
],
exec_cmd: "".to_string(),
check_cmd: "".to_string(),
},
);
}
fn register_email(&mut self) {
self.components.insert(
"email".to_string(),
ComponentConfig {
name: "email".to_string(),
ports: vec![25, 80, 110, 143, 465, 587, 993, 995, 4190],
dependencies: vec![],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
"https://github.com/stalwartlabs/stalwart/releases/download/v0.13.1/stalwart-x86_64-unknown-linux-gnu.tar.gz".to_string(),
),
binary_name: Some("stalwart".to_string()),
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![
"setcap 'cap_net_bind_service=+ep' {{BIN_PATH}}/stalwart".to_string(),
],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "{{BIN_PATH}}/stalwart --config {{CONF_PATH}}/config.toml".to_string(),
check_cmd: "curl -f http://localhost:25 >/dev/null 2>&1".to_string(),
},
);
}
fn register_proxy(&mut self) {
self.components.insert(
"proxy".to_string(),
ComponentConfig {
name: "proxy".to_string(),
ports: vec![80, 443],
dependencies: vec![],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
"https://github.com/caddyserver/caddy/releases/download/v2.10.0-beta.3/caddy_2.10.0-beta.3_linux_amd64.tar.gz".to_string(),
),
binary_name: Some("caddy".to_string()),
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![
"setcap 'cap_net_bind_service=+ep' {{BIN_PATH}}/caddy".to_string(),
],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::from([("XDG_DATA_HOME".to_string(), "{{DATA_PATH}}".to_string())]),
data_download_list: Vec::new(),
exec_cmd: "{{BIN_PATH}}/caddy run --config {{CONF_PATH}}/Caddyfile".to_string(),
check_cmd: "curl -f http://localhost >/dev/null 2>&1".to_string(),
},
);
}
fn register_directory(&mut self) {
self.components.insert(
"directory".to_string(),
ComponentConfig {
name: "directory".to_string(),
ports: vec![8080],
dependencies: vec![],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
"https://github.com/zitadel/zitadel/releases/download/v2.71.2/zitadel-linux-amd64.tar.gz".to_string(),
),
binary_name: Some("zitadel".to_string()),
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![
"setcap 'cap_net_bind_service=+ep' {{BIN_PATH}}/zitadel".to_string(),
],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "{{BIN_PATH}}/zitadel start --config {{CONF_PATH}}/zitadel.yaml".to_string(),
check_cmd: "curl -f http://localhost:8080 >/dev/null 2>&1".to_string(),
},
);
}
fn register_alm(&mut self) {
self.components.insert(
"alm".to_string(),
ComponentConfig {
name: "alm".to_string(),
ports: vec![3000],
dependencies: vec![],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
"https://codeberg.org/forgejo/forgejo/releases/download/v10.0.2/forgejo-10.0.2-linux-amd64".to_string(),
),
binary_name: Some("forgejo".to_string()),
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::from([
("USER".to_string(), "alm".to_string()),
("HOME".to_string(), "{{DATA_PATH}}".to_string()),
]),
data_download_list: Vec::new(),
exec_cmd: "{{BIN_PATH}}/forgejo web --work-path {{DATA_PATH}}".to_string(),
check_cmd: "curl -f http://localhost:3000 >/dev/null 2>&1".to_string(),
},
);
}
fn register_alm_ci(&mut self) {
self.components.insert(
"alm-ci".to_string(),
ComponentConfig {
name: "alm-ci".to_string(),
ports: vec![],
dependencies: vec!["alm".to_string()],
linux_packages: vec![
],
macos_packages: vec!["git".to_string(), "node".to_string()],
windows_packages: vec![],
download_url: Some(
"https://code.forgejo.org/forgejo/runner/releases/download/v6.3.1/forgejo-runner-6.3.1-linux-amd64".to_string(),
),
binary_name: Some("forgejo-runner".to_string()),
pre_install_cmds_linux: vec![
],
post_install_cmds_linux: vec![],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "{{BIN_PATH}}/forgejo-runner daemon --config {{CONF_PATH}}/config.yaml".to_string(),
check_cmd: "ps -ef | grep forgejo-runner | grep -v grep | grep {{BIN_PATH}}".to_string(),
},
);
}
fn register_dns(&mut self) {
self.components.insert(
"dns".to_string(),
ComponentConfig {
name: "dns".to_string(),
ports: vec![53],
dependencies: vec![],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
"https://github.com/coredns/coredns/releases/download/v1.12.4/coredns_1.12.4_linux_amd64.tgz".to_string(),
),
binary_name: Some("coredns".to_string()),
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![
"setcap cap_net_bind_service=+ep {{BIN_PATH}}/coredns".to_string(),
],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "{{BIN_PATH}}/coredns -conf {{CONF_PATH}}/Corefile".to_string(),
check_cmd: "dig @localhost example.com >/dev/null 2>&1".to_string(),
},
);
}
fn register_webmail(&mut self) {
self.components.insert(
"webmail".to_string(),
ComponentConfig {
name: "webmail".to_string(),
ports: vec![8080],
dependencies: vec!["email".to_string()],
linux_packages: vec![
"ca-certificates".to_string(),
"apt-transport-https".to_string(),
"php8.1".to_string(),
"php8.1-fpm".to_string(),
],
macos_packages: vec!["php".to_string()],
windows_packages: vec![],
download_url: Some(
"https://github.com/roundcube/roundcubemail/releases/download/1.6.6/roundcubemail-1.6.6-complete.tar.gz".to_string(),
),
binary_name: None,
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "php -S 0.0.0.0:8080 -t {{DATA_PATH}}/roundcubemail".to_string(),
check_cmd: "curl -f http://localhost:8080 >/dev/null 2>&1".to_string(),
},
);
}
fn register_meeting(&mut self) {
self.components.insert(
"meeting".to_string(),
ComponentConfig {
name: "meeting".to_string(),
ports: vec![7880, 3478],
dependencies: vec![],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
"https://github.com/livekit/livekit/releases/download/v1.8.4/livekit_1.8.4_linux_amd64.tar.gz".to_string(),
),
binary_name: Some("livekit-server".to_string()),
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "{{BIN_PATH}}/livekit-server --config {{CONF_PATH}}/config.yaml".to_string(),
check_cmd: "curl -f http://localhost:7880 >/dev/null 2>&1".to_string(),
},
);
}
fn register_table_editor(&mut self) {
self.components.insert(
"table_editor".to_string(),
ComponentConfig {
name: "table_editor".to_string(),
ports: vec![5757],
dependencies: vec!["tables".to_string()],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some("http://get.nocodb.com/linux-x64".to_string()),
binary_name: Some("nocodb".to_string()),
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "{{BIN_PATH}}/nocodb".to_string(),
check_cmd: "curl -f http://localhost:5757 >/dev/null 2>&1".to_string(),
},
);
}
fn register_doc_editor(&mut self) {
self.components.insert(
"doc_editor".to_string(),
ComponentConfig {
name: "doc_editor".to_string(),
ports: vec![9980],
dependencies: vec![],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: None,
binary_name: Some("coolwsd".to_string()),
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "coolwsd --config-file={{CONF_PATH}}/coolwsd.xml".to_string(),
check_cmd: "curl -f http://localhost:9980 >/dev/null 2>&1".to_string(),
},
);
}
fn register_desktop(&mut self) {
self.components.insert(
"desktop".to_string(),
ComponentConfig {
name: "desktop".to_string(),
ports: vec![3389],
dependencies: vec![],
linux_packages: vec!["xvfb".to_string(), "xrdp".to_string(), "xfce4".to_string()],
macos_packages: vec![],
windows_packages: vec![],
download_url: None,
binary_name: None,
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "xrdp --nodaemon".to_string(),
check_cmd: "netstat -tln | grep :3389 >/dev/null 2>&1".to_string(),
},
);
}
fn register_devtools(&mut self) {
self.components.insert(
"devtools".to_string(),
ComponentConfig {
name: "devtools".to_string(),
ports: vec![],
dependencies: vec![],
linux_packages: vec!["xclip".to_string(), "git".to_string(), "curl".to_string()],
macos_packages: vec!["git".to_string()],
windows_packages: vec![],
download_url: None,
binary_name: None,
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "".to_string(),
check_cmd: "".to_string(),
},
);
}
fn register_botserver(&mut self) {
self.components.insert(
"system".to_string(),
ComponentConfig {
name: "system".to_string(),
ports: vec![8000],
dependencies: vec![],
linux_packages: vec!["curl".to_string(), "unzip".to_string(), "git".to_string()],
macos_packages: vec![],
windows_packages: vec![],
download_url: None,
binary_name: None,
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "".to_string(),
check_cmd: "".to_string(),
},
);
}
fn register_vector_db(&mut self) {
self.components.insert(
"vector_db".to_string(),
ComponentConfig {
name: "vector_db".to_string(),
ports: vec![6333],
dependencies: vec![],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
"https://github.com/qdrant/qdrant/releases/latest/download/qdrant-x86_64-unknown-linux-gnu.tar.gz".to_string(),
),
binary_name: Some("qdrant".to_string()),
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "{{BIN_PATH}}/qdrant --storage-path {{DATA_PATH}}".to_string(),
check_cmd: "curl -f http://localhost:6333 >/dev/null 2>&1".to_string(),
},
);
}
fn register_host(&mut self) {
self.components.insert(
"host".to_string(),
ComponentConfig {
name: "host".to_string(),
ports: vec![],
dependencies: vec![],
linux_packages: vec!["sshfs".to_string(), "bridge-utils".to_string()],
macos_packages: vec![],
windows_packages: vec![],
download_url: None,
binary_name: None,
pre_install_cmds_linux: vec![
"echo 'net.ipv4.ip_forward=1' | tee -a /etc/sysctl.conf".to_string(),
"sysctl -p".to_string(),
],
post_install_cmds_linux: vec![
"lxd init --auto".to_string(),
"lxc storage create default dir".to_string(),
"lxc profile device add default root disk path=/ pool=default".to_string(),
],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "".to_string(),
check_cmd: "".to_string(),
},
);
}
pub fn start(&self, component: &str) -> Result<std::process::Child> {
if let Some(component) = self.components.get(component) {
let bin_path = self.base_path.join("bin").join(&component.name);
let data_path = self.base_path.join("data").join(&component.name);
let conf_path = self.base_path.join("conf").join(&component.name);
let logs_path = self.base_path.join("logs").join(&component.name);
// First check if the service is already running
let check_cmd = component
.check_cmd
.replace("{{BIN_PATH}}", &bin_path.to_string_lossy())
.replace("{{DATA_PATH}}", &data_path.to_string_lossy())
.replace("{{CONF_PATH}}", &conf_path.to_string_lossy())
.replace("{{LOGS_PATH}}", &logs_path.to_string_lossy());
let check_status = std::process::Command::new("sh")
.current_dir(&bin_path)
.arg("-c")
.arg(&check_cmd)
.status();
if check_status.is_ok() && check_status.unwrap().success() {
trace!("Component {} is already running", component.name);
return Ok(std::process::Command::new("sh")
.arg("-c")
.arg("true")
.spawn()?);
}
// If not running, execute the main command
let rendered_cmd = component
.exec_cmd
.replace("{{BIN_PATH}}", &bin_path.to_string_lossy())
.replace("{{DATA_PATH}}", &data_path.to_string_lossy())
.replace("{{CONF_PATH}}", &conf_path.to_string_lossy())
.replace("{{LOGS_PATH}}", &logs_path.to_string_lossy());
trace!(
"Starting component {} with command: {}",
component.name,
rendered_cmd
);
// Create new env vars map with evaluated $VAR references
let mut evaluated_envs = HashMap::new();
for (k, v) in &component.env_vars {
if v.starts_with('$') {
let var_name = &v[1..];
evaluated_envs.insert(k.clone(), std::env::var(var_name).unwrap_or_default());
} else {
evaluated_envs.insert(k.clone(), v.clone());
}
}
let child = std::process::Command::new("sh")
.current_dir(&bin_path)
.arg("-c")
.arg(&rendered_cmd)
.envs(&evaluated_envs)
.spawn();
std::thread::sleep(std::time::Duration::from_secs(2));
match child {
Ok(c) => Ok(c),
Err(e) => {
let err_msg = e.to_string();
if err_msg.contains("already running")
|| err_msg.contains("be running")
|| component.name == "tables"
{
trace!(
"Component {} may already be running, continuing anyway",
component.name
);
Ok(std::process::Command::new("sh").arg("-c").spawn()?)
} else {
Err(e.into())
}
}
}
} else {
Err(anyhow::anyhow!("Component {} not found", component))
}
}
}

View file

@ -0,0 +1,43 @@
pub mod component;
pub mod installer;
pub mod os;
pub mod setup;
pub use installer::PackageManager;
pub mod cli;
pub mod facade;
#[derive(Debug, Clone, PartialEq)]
pub enum InstallMode {
Local,
Container,
}
#[derive(Debug, Clone, PartialEq)]
pub enum OsType {
Linux,
MacOS,
Windows,
}
#[derive(Debug)]
pub struct ComponentInfo {
pub name: &'static str,
pub termination_command: &'static str,
}
pub fn get_all_components() -> Vec<ComponentInfo> {
vec![
ComponentInfo {
name: "tables",
termination_command: "postgres",
},
ComponentInfo {
name: "cache",
termination_command: "redis-server",
},
ComponentInfo {
name: "drive",
termination_command: "minio",
},
ComponentInfo {
name: "llm",
termination_command: "llama-server",
},
]
}

View file

@ -0,0 +1,12 @@
use crate::package_manager::OsType;
pub fn detect_os() -> OsType {
if cfg!(target_os = "linux") {
OsType::Linux
} else if cfg!(target_os = "macos") {
OsType::MacOS
} else if cfg!(target_os = "windows") {
OsType::Windows
} else {
OsType::Linux
}
}

View file

@ -0,0 +1,25 @@
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::test_util;
#[test]
fn test_package_manager_module() {
test_util::setup();
assert!(true, "Basic package manager module test");
}
#[test]
fn test_cli_interface() {
test_util::setup();
assert!(true, "CLI interface placeholder test");
}
#[test]
fn test_component_management() {
test_util::setup();
assert!(true, "Component management placeholder test");
}
#[test]
fn test_os_specific() {
test_util::setup();
assert!(true, "OS-specific functionality placeholder test");
}
}

View file

@ -0,0 +1,497 @@
use anyhow::Result;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::path::PathBuf;
use std::time::Duration;
use tokio::fs;
use tokio::time::sleep;
/// Directory (Zitadel) auto-setup manager
#[derive(Debug)]
pub struct DirectorySetup {
base_url: String,
client: Client,
admin_token: Option<String>,
config_path: PathBuf,
}
impl DirectorySetup {
/// Set the admin token
pub fn set_admin_token(&mut self, token: String) {
self.admin_token = Some(token);
}
/// Get or initialize admin token
pub async fn ensure_admin_token(&mut self) -> Result<()> {
if self.admin_token.is_none() {
let token = std::env::var("DIRECTORY_ADMIN_TOKEN")
.unwrap_or_else(|_| "zitadel-admin-sa".to_string());
self.admin_token = Some(token);
}
Ok(())
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct DefaultOrganization {
pub id: String,
pub name: String,
pub domain: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct DefaultUser {
pub id: String,
pub username: String,
pub email: String,
pub password: String,
pub first_name: String,
pub last_name: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct DirectoryConfig {
pub base_url: String,
pub default_org: DefaultOrganization,
pub default_user: DefaultUser,
pub admin_token: String,
pub project_id: String,
pub client_id: String,
pub client_secret: String,
}
impl DirectorySetup {
pub fn new(base_url: String, config_path: PathBuf) -> Self {
Self {
base_url,
client: Client::builder()
.timeout(Duration::from_secs(30))
.build()
.unwrap(),
admin_token: None,
config_path,
}
}
/// Wait for directory service to be ready
pub async fn wait_for_ready(&self, max_attempts: u32) -> Result<()> {
log::info!("Waiting for Directory service to be ready...");
for attempt in 1..=max_attempts {
match self
.client
.get(format!("{}/debug/ready", self.base_url))
.send()
.await
{
Ok(response) if response.status().is_success() => {
log::info!("Directory service is ready!");
return Ok(());
}
_ => {
log::debug!(
"Directory not ready yet (attempt {}/{})",
attempt,
max_attempts
);
sleep(Duration::from_secs(3)).await;
}
}
}
anyhow::bail!("Directory service did not become ready in time")
}
/// Initialize directory with default configuration
pub async fn initialize(&mut self) -> Result<DirectoryConfig> {
log::info!("🔧 Initializing Directory (Zitadel) with defaults...");
// Check if already initialized
if let Ok(existing_config) = self.load_existing_config().await {
log::info!("Directory already initialized, using existing config");
return Ok(existing_config);
}
// Wait for service to be ready
self.wait_for_ready(30).await?;
// Get initial admin token (from Zitadel setup)
self.ensure_admin_token().await?;
// Create default organization
let org = self.create_default_organization().await?;
log::info!("✅ Created default organization: {}", org.name);
// Create default user
let user = self.create_default_user(&org.id).await?;
log::info!("✅ Created default user: {}", user.username);
// Create OAuth2 application for BotServer
let (project_id, client_id, client_secret) = self.create_oauth_application(&org.id).await?;
log::info!("✅ Created OAuth2 application");
// Grant user admin permissions
self.grant_user_permissions(&org.id, &user.id).await?;
log::info!("✅ Granted admin permissions to default user");
let config = DirectoryConfig {
base_url: self.base_url.clone(),
default_org: org,
default_user: user,
admin_token: self.admin_token.clone().unwrap_or_default(),
project_id,
client_id,
client_secret,
};
// Save configuration
self.save_config_internal(&config).await?;
log::info!("✅ Saved Directory configuration");
log::info!("🎉 Directory initialization complete!");
log::info!(
"📧 Default user: {} / {}",
config.default_user.email,
config.default_user.password
);
log::info!("🌐 Login at: {}", self.base_url);
Ok(config)
}
/// Create an organization
pub async fn create_organization(&mut self, name: &str, description: &str) -> Result<String> {
// Ensure we have admin token
self.ensure_admin_token().await?;
let response = self
.client
.post(format!("{}/management/v1/orgs", self.base_url))
.bearer_auth(self.admin_token.as_ref().unwrap())
.json(&json!({
"name": name,
"description": description,
}))
.send()
.await?;
if !response.status().is_success() {
let error_text = response.text().await?;
anyhow::bail!("Failed to create organization: {}", error_text);
}
let result: serde_json::Value = response.json().await?;
Ok(result["id"].as_str().unwrap_or("").to_string())
}
/// Create default organization
async fn create_default_organization(&self) -> Result<DefaultOrganization> {
let org_name =
std::env::var("DIRECTORY_DEFAULT_ORG").unwrap_or_else(|_| "BotServer".to_string());
let response = self
.client
.post(format!("{}/management/v1/orgs", self.base_url))
.bearer_auth(self.admin_token.as_ref().unwrap())
.json(&json!({
"name": org_name,
}))
.send()
.await?;
if !response.status().is_success() {
let error_text = response.text().await?;
anyhow::bail!("Failed to create organization: {}", error_text);
}
let result: serde_json::Value = response.json().await?;
Ok(DefaultOrganization {
id: result["id"].as_str().unwrap_or("").to_string(),
name: org_name.clone(),
domain: format!("{}.localhost", org_name.to_lowercase()),
})
}
/// Create a user in an organization
pub async fn create_user(
&mut self,
org_id: &str,
username: &str,
email: &str,
password: &str,
first_name: &str,
last_name: &str,
is_admin: bool,
) -> Result<DefaultUser> {
// Ensure we have admin token
self.ensure_admin_token().await?;
let response = self
.client
.post(format!("{}/management/v1/users/human", self.base_url))
.bearer_auth(self.admin_token.as_ref().unwrap())
.json(&json!({
"userName": username,
"profile": {
"firstName": first_name,
"lastName": last_name,
"displayName": format!("{} {}", first_name, last_name)
},
"email": {
"email": email,
"isEmailVerified": true
},
"password": password,
"organisation": {
"orgId": org_id
}
}))
.send()
.await?;
if !response.status().is_success() {
let error_text = response.text().await?;
anyhow::bail!("Failed to create user: {}", error_text);
}
let result: serde_json::Value = response.json().await?;
let user = DefaultUser {
id: result["userId"].as_str().unwrap_or("").to_string(),
username: username.to_string(),
email: email.to_string(),
password: password.to_string(),
first_name: first_name.to_string(),
last_name: last_name.to_string(),
};
// Grant admin permissions if requested
if is_admin {
self.grant_user_permissions(org_id, &user.id).await?;
}
Ok(user)
}
/// Create default user in organization
async fn create_default_user(&self, org_id: &str) -> Result<DefaultUser> {
let username =
std::env::var("DIRECTORY_DEFAULT_USERNAME").unwrap_or_else(|_| "admin".to_string());
let email = std::env::var("DIRECTORY_DEFAULT_EMAIL")
.unwrap_or_else(|_| "admin@localhost".to_string());
let password = std::env::var("DIRECTORY_DEFAULT_PASSWORD")
.unwrap_or_else(|_| "BotServer123!".to_string());
let response = self
.client
.post(format!("{}/management/v1/users/human", self.base_url))
.bearer_auth(self.admin_token.as_ref().unwrap())
.json(&json!({
"userName": username,
"profile": {
"firstName": "Admin",
"lastName": "User",
"displayName": "Administrator"
},
"email": {
"email": email,
"isEmailVerified": true
},
"password": password,
"organisation": {
"orgId": org_id
}
}))
.send()
.await?;
if !response.status().is_success() {
let error_text = response.text().await?;
anyhow::bail!("Failed to create user: {}", error_text);
}
let result: serde_json::Value = response.json().await?;
Ok(DefaultUser {
id: result["userId"].as_str().unwrap_or("").to_string(),
username: username.clone(),
email: email.clone(),
password: password.clone(),
first_name: "Admin".to_string(),
last_name: "User".to_string(),
})
}
/// Create OAuth2 application for BotServer
pub async fn create_oauth_application(
&self,
_org_id: &str,
) -> Result<(String, String, String)> {
let app_name = "BotServer";
let redirect_uri = std::env::var("DIRECTORY_REDIRECT_URI")
.unwrap_or_else(|_| "http://localhost:8080/auth/callback".to_string());
// Create project
let project_response = self
.client
.post(format!("{}/management/v1/projects", self.base_url))
.bearer_auth(self.admin_token.as_ref().unwrap())
.json(&json!({
"name": app_name,
}))
.send()
.await?;
let project_result: serde_json::Value = project_response.json().await?;
let project_id = project_result["id"].as_str().unwrap_or("").to_string();
// Create OIDC application
let app_response = self.client
.post(format!("{}/management/v1/projects/{}/apps/oidc", self.base_url, project_id))
.bearer_auth(self.admin_token.as_ref().unwrap())
.json(&json!({
"name": app_name,
"redirectUris": [redirect_uri],
"responseTypes": ["OIDC_RESPONSE_TYPE_CODE"],
"grantTypes": ["OIDC_GRANT_TYPE_AUTHORIZATION_CODE", "OIDC_GRANT_TYPE_REFRESH_TOKEN"],
"appType": "OIDC_APP_TYPE_WEB",
"authMethodType": "OIDC_AUTH_METHOD_TYPE_BASIC",
"postLogoutRedirectUris": ["http://localhost:8080"],
}))
.send()
.await?;
let app_result: serde_json::Value = app_response.json().await?;
let client_id = app_result["clientId"].as_str().unwrap_or("").to_string();
let client_secret = app_result["clientSecret"]
.as_str()
.unwrap_or("")
.to_string();
Ok((project_id, client_id, client_secret))
}
/// Grant admin permissions to user
pub async fn grant_user_permissions(&self, org_id: &str, user_id: &str) -> Result<()> {
// Grant ORG_OWNER role
let _response = self
.client
.post(format!(
"{}/management/v1/orgs/{}/members",
self.base_url, org_id
))
.bearer_auth(self.admin_token.as_ref().unwrap())
.json(&json!({
"userId": user_id,
"roles": ["ORG_OWNER"]
}))
.send()
.await?;
Ok(())
}
/// Save configuration to file
pub async fn save_config(
&mut self,
org_id: String,
org_name: String,
admin_user: DefaultUser,
client_id: String,
client_secret: String,
) -> Result<DirectoryConfig> {
// Get or create admin token
self.ensure_admin_token().await?;
let config = DirectoryConfig {
base_url: self.base_url.clone(),
default_org: DefaultOrganization {
id: org_id,
name: org_name.clone(),
domain: format!("{}.localhost", org_name.to_lowercase()),
},
default_user: admin_user,
admin_token: self.admin_token.clone().unwrap_or_default(),
project_id: String::new(), // This will be set if OAuth app is created
client_id,
client_secret,
};
// Save to file
let json = serde_json::to_string_pretty(&config)?;
fs::write(&self.config_path, json).await?;
log::info!("Saved Directory configuration to {:?}", self.config_path);
Ok(config)
}
/// Internal save configuration to file
async fn save_config_internal(&self, config: &DirectoryConfig) -> Result<()> {
let json = serde_json::to_string_pretty(config)?;
fs::write(&self.config_path, json).await?;
Ok(())
}
/// Load existing configuration
async fn load_existing_config(&self) -> Result<DirectoryConfig> {
let content = fs::read_to_string(&self.config_path).await?;
let config: DirectoryConfig = serde_json::from_str(&content)?;
Ok(config)
}
/// Get stored configuration
pub async fn get_config(&self) -> Result<DirectoryConfig> {
self.load_existing_config().await
}
}
/// Generate Zitadel configuration file
pub async fn generate_directory_config(config_path: PathBuf, _db_path: PathBuf) -> Result<()> {
let yaml_config = format!(
r#"
Log:
Level: info
Database:
Postgres:
Host: localhost
Port: 5432
Database: zitadel
User: zitadel
Password: zitadel
SSL:
Mode: disable
Machine:
Identification:
Hostname: localhost
WebhookAddress: http://localhost:8080
ExternalDomain: localhost:8080
ExternalPort: 8080
ExternalSecure: false
TLS:
Enabled: false
"#
);
fs::write(config_path, yaml_config).await?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_directory_setup_creation() {
let setup = DirectorySetup::new(
"http://localhost:8080".to_string(),
PathBuf::from("/tmp/directory_config.json"),
);
assert_eq!(setup.base_url, "http://localhost:8080");
}
}

View file

@ -0,0 +1,339 @@
use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use std::time::Duration;
use tokio::fs;
use tokio::time::sleep;
/// Email (Stalwart) auto-setup manager
#[derive(Debug)]
pub struct EmailSetup {
base_url: String,
admin_user: String,
admin_pass: String,
config_path: PathBuf,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct EmailConfig {
pub base_url: String,
pub smtp_host: String,
pub smtp_port: u16,
pub imap_host: String,
pub imap_port: u16,
pub admin_user: String,
pub admin_pass: String,
pub directory_integration: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct EmailDomain {
pub domain: String,
pub enabled: bool,
}
impl EmailSetup {
pub fn new(base_url: String, config_path: PathBuf) -> Self {
let admin_user =
std::env::var("EMAIL_ADMIN_USER").unwrap_or_else(|_| "admin@localhost".to_string());
let admin_pass =
std::env::var("EMAIL_ADMIN_PASSWORD").unwrap_or_else(|_| "EmailAdmin123!".to_string());
Self {
base_url,
admin_user,
admin_pass,
config_path,
}
}
/// Wait for email service to be ready
pub async fn wait_for_ready(&self, max_attempts: u32) -> Result<()> {
log::info!("Waiting for Email service to be ready...");
for attempt in 1..=max_attempts {
// Check SMTP port
if let Ok(_) = tokio::net::TcpStream::connect("127.0.0.1:25").await {
log::info!("Email service is ready!");
return Ok(());
}
log::debug!(
"Email service not ready yet (attempt {}/{})",
attempt,
max_attempts
);
sleep(Duration::from_secs(3)).await;
}
anyhow::bail!("Email service did not become ready in time")
}
/// Initialize email server with default configuration
pub async fn initialize(
&mut self,
directory_config_path: Option<PathBuf>,
) -> Result<EmailConfig> {
log::info!("🔧 Initializing Email (Stalwart) server...");
// Check if already initialized
if let Ok(existing_config) = self.load_existing_config().await {
log::info!("Email already initialized, using existing config");
return Ok(existing_config);
}
// Wait for service to be ready
self.wait_for_ready(30).await?;
// Create default domain
self.create_default_domain().await?;
log::info!("✅ Created default email domain: localhost");
// Set up Directory (Zitadel) integration if available
let directory_integration = if let Some(dir_config_path) = directory_config_path {
match self.setup_directory_integration(&dir_config_path).await {
Ok(_) => {
log::info!("✅ Integrated with Directory for authentication");
true
}
Err(e) => {
log::warn!("⚠️ Directory integration failed: {}", e);
false
}
}
} else {
false
};
// Create admin account
self.create_admin_account().await?;
log::info!("✅ Created admin email account: {}", self.admin_user);
let config = EmailConfig {
base_url: self.base_url.clone(),
smtp_host: "localhost".to_string(),
smtp_port: 25,
imap_host: "localhost".to_string(),
imap_port: 143,
admin_user: self.admin_user.clone(),
admin_pass: self.admin_pass.clone(),
directory_integration,
};
// Save configuration
self.save_config(&config).await?;
log::info!("✅ Saved Email configuration");
log::info!("🎉 Email initialization complete!");
log::info!("📧 SMTP: localhost:25 (587 for TLS)");
log::info!("📬 IMAP: localhost:143 (993 for TLS)");
log::info!("👤 Admin: {} / {}", config.admin_user, config.admin_pass);
Ok(config)
}
/// Create default email domain
async fn create_default_domain(&self) -> Result<()> {
// Stalwart auto-creates domains based on config
// For now, ensure localhost domain exists
Ok(())
}
/// Create admin email account
async fn create_admin_account(&self) -> Result<()> {
// In Stalwart, accounts are created via management API
// This is a placeholder - implement actual Stalwart API calls
log::info!("Creating admin email account...");
Ok(())
}
/// Set up Directory (Zitadel) integration for authentication
async fn setup_directory_integration(&self, directory_config_path: &PathBuf) -> Result<()> {
let content = fs::read_to_string(directory_config_path).await?;
let dir_config: serde_json::Value = serde_json::from_str(&content)?;
let issuer_url = dir_config["base_url"]
.as_str()
.unwrap_or("http://localhost:8080");
log::info!("Setting up OIDC authentication with Directory...");
log::info!("Issuer URL: {}", issuer_url);
// Configure Stalwart to use Zitadel for authentication
// This would typically be done via config file updates
Ok(())
}
/// Save configuration to file
async fn save_config(&self, config: &EmailConfig) -> Result<()> {
let json = serde_json::to_string_pretty(config)?;
fs::write(&self.config_path, json).await?;
Ok(())
}
/// Load existing configuration
async fn load_existing_config(&self) -> Result<EmailConfig> {
let content = fs::read_to_string(&self.config_path).await?;
let config: EmailConfig = serde_json::from_str(&content)?;
Ok(config)
}
/// Get stored configuration
pub async fn get_config(&self) -> Result<EmailConfig> {
self.load_existing_config().await
}
/// Create email account for Directory user
pub async fn create_user_mailbox(
&self,
_username: &str,
_password: &str,
email: &str,
) -> Result<()> {
log::info!("Creating mailbox for user: {}", email);
// Implement Stalwart mailbox creation
// This would use Stalwart's management API
Ok(())
}
/// Sync users from Directory to Email
pub async fn sync_users_from_directory(&self, directory_config_path: &PathBuf) -> Result<()> {
log::info!("Syncing users from Directory to Email...");
let content = fs::read_to_string(directory_config_path).await?;
let dir_config: serde_json::Value = serde_json::from_str(&content)?;
// Get default user from Directory
if let Some(default_user) = dir_config.get("default_user") {
let email = default_user["email"].as_str().unwrap_or("");
let password = default_user["password"].as_str().unwrap_or("");
let username = default_user["username"].as_str().unwrap_or("");
if !email.is_empty() {
self.create_user_mailbox(username, password, email).await?;
log::info!("✅ Created mailbox for: {}", email);
}
}
Ok(())
}
}
/// Generate Stalwart email server configuration
pub async fn generate_email_config(
config_path: PathBuf,
data_path: PathBuf,
directory_integration: bool,
) -> Result<()> {
let mut config = format!(
r#"
[server]
hostname = "localhost"
[server.listener."smtp"]
bind = ["0.0.0.0:25"]
protocol = "smtp"
[server.listener."smtp-submission"]
bind = ["0.0.0.0:587"]
protocol = "smtp"
tls.implicit = false
[server.listener."smtp-submissions"]
bind = ["0.0.0.0:465"]
protocol = "smtp"
tls.implicit = true
[server.listener."imap"]
bind = ["0.0.0.0:143"]
protocol = "imap"
[server.listener."imaps"]
bind = ["0.0.0.0:993"]
protocol = "imap"
tls.implicit = true
[server.listener."http"]
bind = ["0.0.0.0:8080"]
protocol = "http"
[storage]
data = "sqlite"
blob = "sqlite"
lookup = "sqlite"
fts = "sqlite"
[store."sqlite"]
type = "sqlite"
path = "{}/stalwart.db"
[directory."local"]
type = "internal"
store = "sqlite"
"#,
data_path.display()
);
// Add Directory (Zitadel) OIDC integration if enabled
if directory_integration {
config.push_str(
r#"
[directory."oidc"]
type = "oidc"
issuer = "http://localhost:8080"
client-id = "{{CLIENT_ID}}"
client-secret = "{{CLIENT_SECRET}}"
[authentication]
mechanisms = ["plain", "login"]
directory = "oidc"
fallback-directory = "local"
"#,
);
} else {
config.push_str(
r#"
[authentication]
mechanisms = ["plain", "login"]
directory = "local"
"#,
);
}
fs::write(config_path, config).await?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_email_setup_creation() {
let setup = EmailSetup::new(
"http://localhost:8080".to_string(),
PathBuf::from("/tmp/email_config.json"),
);
assert_eq!(setup.base_url, "http://localhost:8080");
}
#[tokio::test]
async fn test_generate_config() {
let config_path = std::env::temp_dir().join("email_test_config.toml");
let data_path = std::env::temp_dir().join("email_data");
generate_email_config(config_path.clone(), data_path, false)
.await
.unwrap();
assert!(config_path.exists());
// Cleanup
let _ = std::fs::remove_file(config_path);
}
}

View file

@ -0,0 +1,5 @@
pub mod directory_setup;
pub mod email_setup;
pub use directory_setup::DirectorySetup;
pub use email_setup::EmailSetup;

457
src/core/session/mod.rs Normal file
View file

@ -0,0 +1,457 @@
use crate::bot::BotOrchestrator;
use crate::shared::models::UserSession;
use crate::shared::state::AppState;
use axum::{
extract::{Extension, Path},
http::StatusCode,
response::{IntoResponse, Json},
};
use chrono::Utc;
use diesel::prelude::*;
use diesel::r2d2::{ConnectionManager, PooledConnection};
use diesel::PgConnection;
use log::{error, trace, warn};
use redis::Client;
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
use std::error::Error;
use std::sync::Arc;
use uuid::Uuid;
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct SessionData {
pub id: Uuid,
pub user_id: Option<Uuid>,
pub data: String,
}
pub struct SessionManager {
conn: PooledConnection<ConnectionManager<PgConnection>>,
sessions: HashMap<Uuid, SessionData>,
waiting_for_input: HashSet<Uuid>,
redis: Option<Arc<Client>>,
}
impl std::fmt::Debug for SessionManager {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("SessionManager")
.field("conn", &"PooledConnection<PgConnection>")
.field("sessions", &self.sessions)
.field("waiting_for_input", &self.waiting_for_input)
.field("redis", &self.redis.is_some())
.finish()
}
}
impl SessionManager {
pub fn new(
conn: PooledConnection<ConnectionManager<PgConnection>>,
redis_client: Option<Arc<Client>>,
) -> Self {
SessionManager {
conn,
sessions: HashMap::new(),
waiting_for_input: HashSet::new(),
redis: redis_client,
}
}
pub fn provide_input(
&mut self,
session_id: Uuid,
input: String,
) -> Result<Option<String>, Box<dyn Error + Send + Sync>> {
trace!(
"SessionManager.provide_input called for session {}",
session_id
);
if let Some(sess) = self.sessions.get_mut(&session_id) {
sess.data = input;
self.waiting_for_input.remove(&session_id);
Ok(Some("user_input".to_string()))
} else {
let sess = SessionData {
id: session_id,
user_id: None,
data: input,
};
self.sessions.insert(session_id, sess);
self.waiting_for_input.remove(&session_id);
Ok(Some("user_input".to_string()))
}
}
pub fn mark_waiting(&mut self, session_id: Uuid) {
self.waiting_for_input.insert(session_id);
}
pub fn get_session_by_id(
&mut self,
session_id: Uuid,
) -> Result<Option<UserSession>, Box<dyn Error + Send + Sync>> {
use crate::shared::models::user_sessions::dsl::*;
let result = user_sessions
.filter(id.eq(session_id))
.first::<UserSession>(&mut self.conn)
.optional()?;
Ok(result)
}
pub fn get_user_session(
&mut self,
uid: Uuid,
bid: Uuid,
) -> Result<Option<UserSession>, Box<dyn Error + Send + Sync>> {
use crate::shared::models::user_sessions::dsl::*;
let result = user_sessions
.filter(user_id.eq(uid))
.filter(bot_id.eq(bid))
.order(created_at.desc())
.first::<UserSession>(&mut self.conn)
.optional()?;
Ok(result)
}
pub fn get_or_create_user_session(
&mut self,
uid: Uuid,
bid: Uuid,
session_title: &str,
) -> Result<Option<UserSession>, Box<dyn Error + Send + Sync>> {
if let Some(existing) = self.get_user_session(uid, bid)? {
return Ok(Some(existing));
}
self.create_session(uid, bid, session_title).map(Some)
}
pub fn get_or_create_anonymous_user(
&mut self,
uid: Option<Uuid>,
) -> Result<Uuid, Box<dyn Error + Send + Sync>> {
use crate::shared::models::users::dsl as users_dsl;
let user_id = uid.unwrap_or_else(Uuid::new_v4);
let user_exists: Option<Uuid> = users_dsl::users
.filter(users_dsl::id.eq(user_id))
.select(users_dsl::id)
.first(&mut self.conn)
.optional()?;
if user_exists.is_none() {
let now = Utc::now();
diesel::insert_into(users_dsl::users)
.values((
users_dsl::id.eq(user_id),
users_dsl::username.eq(format!("guest_{}", &user_id.to_string()[..8])),
users_dsl::email.eq(format!(
"guest_{}@anonymous.local",
&user_id.to_string()[..8]
)),
users_dsl::password_hash.eq(""),
users_dsl::is_active.eq(true),
users_dsl::created_at.eq(now),
users_dsl::updated_at.eq(now),
))
.execute(&mut self.conn)?;
}
Ok(user_id)
}
pub fn create_session(
&mut self,
uid: Uuid,
bid: Uuid,
session_title: &str,
) -> Result<UserSession, Box<dyn Error + Send + Sync>> {
use crate::shared::models::user_sessions::dsl::*;
let verified_uid = self.get_or_create_anonymous_user(Some(uid))?;
let now = Utc::now();
let inserted: UserSession = diesel::insert_into(user_sessions)
.values((
id.eq(Uuid::new_v4()),
user_id.eq(verified_uid),
bot_id.eq(bid),
title.eq(session_title),
context_data.eq(serde_json::json!({})),
current_tool.eq(None::<String>),
created_at.eq(now),
updated_at.eq(now),
))
.returning(UserSession::as_returning())
.get_result(&mut self.conn)
.map_err(|e| {
error!("Failed to create session in database: {}", e);
e
})?;
Ok(inserted)
}
fn _clear_messages(&mut self, _session_id: Uuid) -> Result<(), Box<dyn Error + Send + Sync>> {
use crate::shared::models::message_history::dsl::*;
diesel::delete(message_history.filter(session_id.eq(session_id)))
.execute(&mut self.conn)?;
Ok(())
}
pub fn save_message(
&mut self,
sess_id: Uuid,
uid: Uuid,
ro: i32,
content: &str,
msg_type: i32,
) -> Result<(), Box<dyn Error + Send + Sync>> {
use crate::shared::models::message_history::dsl::*;
let next_index = message_history
.filter(session_id.eq(sess_id))
.count()
.get_result::<i64>(&mut self.conn)
.unwrap_or(0);
diesel::insert_into(message_history)
.values((
id.eq(Uuid::new_v4()),
session_id.eq(sess_id),
user_id.eq(uid),
role.eq(ro),
content_encrypted.eq(content),
message_type.eq(msg_type),
message_index.eq(next_index),
created_at.eq(chrono::Utc::now()),
))
.execute(&mut self.conn)?;
trace!(
"Message saved for session {} with index {}",
sess_id,
next_index
);
Ok(())
}
pub async fn update_session_context(
&mut self,
session_id: &Uuid,
user_id: &Uuid,
context_data: String,
) -> Result<(), Box<dyn Error + Send + Sync>> {
use redis::Commands;
let redis_key = format!("context:{}:{}", user_id, session_id);
if let Some(redis_client) = &self.redis {
let mut conn = redis_client.get_connection()?;
conn.set::<_, _, ()>(&redis_key, &context_data)?;
} else {
warn!("No Redis client configured, context not persisted");
}
Ok(())
}
pub async fn get_session_context_data(
&self,
session_id: &Uuid,
user_id: &Uuid,
) -> Result<String, Box<dyn Error + Send + Sync>> {
use redis::Commands;
let base_key = format!("context:{}:{}", user_id, session_id);
if let Some(redis_client) = &self.redis {
let conn_option = redis_client
.get_connection()
.map_err(|e| {
warn!("Failed to get Cache connection: {}", e);
e
})
.ok();
if let Some(mut connection) = conn_option {
match connection.get::<_, Option<String>>(&base_key) {
Ok(Some(context_name)) => {
let full_key =
format!("context:{}:{}:{}", user_id, session_id, context_name);
match connection.get::<_, Option<String>>(&full_key) {
Ok(Some(context_value)) => {
trace!(
"Retrieved context value from Cache for key {}: {} chars",
full_key,
context_value.len()
);
return Ok(context_value);
}
Ok(None) => {
trace!("No context value found for key: {}", full_key);
}
Err(e) => {
warn!("Failed to retrieve context value from Cache: {}", e);
}
}
}
Ok(None) => {
trace!("No context name found for key: {}", base_key);
}
Err(e) => {
warn!("Failed to retrieve context name from Cache: {}", e);
}
}
}
}
Ok(String::new())
}
pub fn get_conversation_history(
&mut self,
sess_id: Uuid,
_uid: Uuid,
) -> Result<Vec<(String, String)>, Box<dyn Error + Send + Sync>> {
use crate::shared::models::message_history::dsl::*;
let messages = message_history
.filter(session_id.eq(sess_id))
.order(message_index.asc())
.select((role, content_encrypted))
.load::<(i32, String)>(&mut self.conn)?;
let mut history: Vec<(String, String)> = Vec::new();
for (other_role, content) in messages {
let role_str = match other_role {
1 => "user".to_string(),
2 => "assistant".to_string(),
3 => "system".to_string(),
9 => "compact".to_string(),
_ => "unknown".to_string(),
};
history.push((role_str, content));
}
Ok(history)
}
pub fn get_user_sessions(
&mut self,
uid: Uuid,
) -> Result<Vec<UserSession>, Box<dyn Error + Send + Sync>> {
use crate::shared::models::user_sessions::dsl::*;
let sessions = if uid == Uuid::nil() {
user_sessions
.order(created_at.desc())
.load::<UserSession>(&mut self.conn)?
} else {
user_sessions
.filter(user_id.eq(uid))
.order(created_at.desc())
.load::<UserSession>(&mut self.conn)?
};
Ok(sessions)
}
pub fn update_user_id(
&mut self,
session_id: Uuid,
new_user_id: Uuid,
) -> Result<(), Box<dyn Error + Send + Sync>> {
use crate::shared::models::user_sessions::dsl::*;
let updated_count = diesel::update(user_sessions.filter(id.eq(session_id)))
.set((user_id.eq(new_user_id), updated_at.eq(chrono::Utc::now())))
.execute(&mut self.conn)?;
if updated_count == 0 {
warn!("No session found with ID: {}", session_id);
} else {
trace!("Updated user ID for session: {}", session_id);
}
Ok(())
}
}
/* Axum handlers */
/// Create a new session (anonymous user)
pub async fn create_session(Extension(state): Extension<Arc<AppState>>) -> impl IntoResponse {
// Using a fixed anonymous user ID for simplicity
let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap();
let bot_id = Uuid::nil();
let session_result = {
let mut sm = state.session_manager.lock().await;
sm.get_or_create_user_session(user_id, bot_id, "New Conversation")
};
match session_result {
Ok(Some(session)) => (
StatusCode::OK,
Json(serde_json::json!({
"session_id": session.id,
"title": "New Conversation",
"created_at": Utc::now()
})),
),
Ok(None) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": "Failed to create session" })),
),
Err(e) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": e.to_string() })),
),
}
}
/// Get list of sessions for the anonymous user
pub async fn get_sessions(Extension(state): Extension<Arc<AppState>>) -> impl IntoResponse {
let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap();
let orchestrator = BotOrchestrator::new(state.clone());
match orchestrator.get_user_sessions(user_id).await {
Ok(sessions) => (StatusCode::OK, Json(serde_json::json!(sessions))),
Err(e) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": e.to_string() })),
),
}
}
/// Start a session (mark as waiting for input)
pub async fn start_session(
Extension(state): Extension<Arc<AppState>>,
Path(session_id): Path<String>,
) -> impl IntoResponse {
match Uuid::parse_str(&session_id) {
Ok(session_uuid) => {
let mut sm = state.session_manager.lock().await;
match sm.get_session_by_id(session_uuid) {
Ok(Some(_)) => {
sm.mark_waiting(session_uuid);
(
StatusCode::OK,
Json(serde_json::json!({ "status": "started", "session_id": session_id })),
)
}
Ok(None) => (
StatusCode::NOT_FOUND,
Json(serde_json::json!({ "error": "Session not found" })),
),
Err(e) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": e.to_string() })),
),
}
}
Err(_) => (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({ "error": "Invalid session ID" })),
),
}
}
/// Get conversation history for a session
pub async fn get_session_history(
Extension(state): Extension<Arc<AppState>>,
Path(session_id): Path<String>,
) -> impl IntoResponse {
let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap();
match Uuid::parse_str(&session_id) {
Ok(session_uuid) => {
let orchestrator = BotOrchestrator::new(state.clone());
match orchestrator
.get_conversation_history(session_uuid, user_id)
.await
{
Ok(history) => (StatusCode::OK, Json(serde_json::json!(history))),
Err(e) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": e.to_string() })),
),
}
}
Err(_) => (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({ "error": "Invalid session ID" })),
),
}
}

View file

@ -0,0 +1,15 @@
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::test_util;
#[test]
fn test_session_module() {
test_util::setup();
assert!(true, "Basic session module test");
}
#[test]
fn test_session_management() {
test_util::setup();
assert!(true, "Session management placeholder test");
}
}

623
src/core/shared/admin.rs Normal file
View file

@ -0,0 +1,623 @@
//! System Administration & Management Module
//!
//! Provides comprehensive system administration, monitoring, configuration,
//! and maintenance operations.
use axum::{
extract::{Path, Query, State},
http::StatusCode,
response::Json,
};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use uuid::Uuid;
use crate::shared::state::AppState;
// ===== Request/Response Structures =====
#[derive(Debug, Deserialize)]
pub struct ConfigUpdateRequest {
pub config_key: String,
pub config_value: serde_json::Value,
}
#[derive(Debug, Deserialize)]
pub struct MaintenanceScheduleRequest {
pub scheduled_at: DateTime<Utc>,
pub duration_minutes: u32,
pub reason: String,
pub notify_users: bool,
}
#[derive(Debug, Deserialize)]
pub struct BackupRequest {
pub backup_type: String,
pub include_files: bool,
pub include_database: bool,
pub compression: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct RestoreRequest {
pub backup_id: String,
pub restore_point: DateTime<Utc>,
pub verify_before_restore: bool,
}
#[derive(Debug, Deserialize)]
pub struct UserManagementRequest {
pub user_id: Uuid,
pub action: String,
pub reason: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct RoleManagementRequest {
pub role_name: String,
pub permissions: Vec<String>,
pub description: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct QuotaManagementRequest {
pub user_id: Option<Uuid>,
pub group_id: Option<Uuid>,
pub quota_type: String,
pub limit_value: u64,
}
#[derive(Debug, Deserialize)]
pub struct LicenseManagementRequest {
pub license_key: String,
pub license_type: String,
}
#[derive(Debug, Deserialize)]
pub struct LogQuery {
pub start_date: Option<String>,
pub end_date: Option<String>,
pub level: Option<String>,
pub service: Option<String>,
pub limit: Option<u32>,
}
#[derive(Debug, Serialize)]
pub struct SystemStatusResponse {
pub status: String,
pub uptime_seconds: u64,
pub version: String,
pub services: Vec<ServiceStatus>,
pub health_checks: Vec<HealthCheck>,
pub last_restart: DateTime<Utc>,
}
#[derive(Debug, Serialize)]
pub struct ServiceStatus {
pub name: String,
pub status: String,
pub uptime_seconds: u64,
pub memory_mb: f64,
pub cpu_percent: f64,
}
#[derive(Debug, Serialize)]
pub struct HealthCheck {
pub name: String,
pub status: String,
pub message: Option<String>,
pub last_check: DateTime<Utc>,
}
#[derive(Debug, Serialize)]
pub struct SystemMetricsResponse {
pub cpu_usage: f64,
pub memory_total_mb: u64,
pub memory_used_mb: u64,
pub memory_percent: f64,
pub disk_total_gb: u64,
pub disk_used_gb: u64,
pub disk_percent: f64,
pub network_in_mbps: f64,
pub network_out_mbps: f64,
pub active_connections: u32,
pub request_rate_per_minute: u32,
pub error_rate_percent: f64,
}
#[derive(Debug, Serialize)]
pub struct LogEntry {
pub id: Uuid,
pub timestamp: DateTime<Utc>,
pub level: String,
pub service: String,
pub message: String,
pub metadata: Option<serde_json::Value>,
}
#[derive(Debug, Serialize)]
pub struct ConfigResponse {
pub configs: Vec<ConfigItem>,
pub last_updated: DateTime<Utc>,
}
#[derive(Debug, Serialize)]
pub struct ConfigItem {
pub key: String,
pub value: serde_json::Value,
pub description: Option<String>,
pub editable: bool,
pub requires_restart: bool,
}
#[derive(Debug, Serialize)]
pub struct MaintenanceResponse {
pub id: Uuid,
pub scheduled_at: DateTime<Utc>,
pub duration_minutes: u32,
pub reason: String,
pub status: String,
pub created_by: String,
}
#[derive(Debug, Serialize)]
pub struct BackupResponse {
pub id: Uuid,
pub backup_type: String,
pub size_bytes: u64,
pub created_at: DateTime<Utc>,
pub status: String,
pub download_url: Option<String>,
pub expires_at: Option<DateTime<Utc>>,
}
#[derive(Debug, Serialize)]
pub struct QuotaResponse {
pub id: Uuid,
pub entity_type: String,
pub entity_id: Uuid,
pub quota_type: String,
pub limit_value: u64,
pub current_value: u64,
pub percent_used: f64,
}
#[derive(Debug, Serialize)]
pub struct LicenseResponse {
pub id: Uuid,
pub license_type: String,
pub status: String,
pub max_users: u32,
pub current_users: u32,
pub features: Vec<String>,
pub issued_at: DateTime<Utc>,
pub expires_at: Option<DateTime<Utc>>,
}
#[derive(Debug, Serialize)]
pub struct SuccessResponse {
pub success: bool,
pub message: Option<String>,
}
// ===== API Handlers =====
/// GET /admin/system/status - Get overall system status
pub async fn get_system_status(
State(state): State<Arc<AppState>>,
) -> Result<Json<SystemStatusResponse>, (StatusCode, Json<serde_json::Value>)> {
let now = Utc::now();
let status = SystemStatusResponse {
status: "healthy".to_string(),
uptime_seconds: 3600 * 24 * 7,
version: "1.0.0".to_string(),
services: vec![
ServiceStatus {
name: "web_server".to_string(),
status: "running".to_string(),
uptime_seconds: 3600 * 24 * 7,
memory_mb: 256.5,
cpu_percent: 12.3,
},
ServiceStatus {
name: "database".to_string(),
status: "running".to_string(),
uptime_seconds: 3600 * 24 * 7,
memory_mb: 512.8,
cpu_percent: 8.5,
},
ServiceStatus {
name: "cache".to_string(),
status: "running".to_string(),
uptime_seconds: 3600 * 24 * 7,
memory_mb: 128.2,
cpu_percent: 3.2,
},
ServiceStatus {
name: "storage".to_string(),
status: "running".to_string(),
uptime_seconds: 3600 * 24 * 7,
memory_mb: 64.1,
cpu_percent: 5.8,
},
],
health_checks: vec![
HealthCheck {
name: "database_connection".to_string(),
status: "passed".to_string(),
message: Some("Connected successfully".to_string()),
last_check: now,
},
HealthCheck {
name: "storage_access".to_string(),
status: "passed".to_string(),
message: Some("Storage accessible".to_string()),
last_check: now,
},
HealthCheck {
name: "api_endpoints".to_string(),
status: "passed".to_string(),
message: Some("All endpoints responding".to_string()),
last_check: now,
},
],
last_restart: now.checked_sub_signed(chrono::Duration::days(7)).unwrap(),
};
Ok(Json(status))
}
/// GET /admin/system/metrics - Get system performance metrics
pub async fn get_system_metrics(
State(state): State<Arc<AppState>>,
) -> Result<Json<SystemMetricsResponse>, (StatusCode, Json<serde_json::Value>)> {
let metrics = SystemMetricsResponse {
cpu_usage: 23.5,
memory_total_mb: 8192,
memory_used_mb: 4096,
memory_percent: 50.0,
disk_total_gb: 500,
disk_used_gb: 350,
disk_percent: 70.0,
network_in_mbps: 12.5,
network_out_mbps: 8.3,
active_connections: 256,
request_rate_per_minute: 1250,
error_rate_percent: 0.5,
};
Ok(Json(metrics))
}
/// GET /admin/logs/view - View system logs
pub async fn view_logs(
State(state): State<Arc<AppState>>,
Query(params): Query<LogQuery>,
) -> Result<Json<Vec<LogEntry>>, (StatusCode, Json<serde_json::Value>)> {
let now = Utc::now();
let logs = vec![
LogEntry {
id: Uuid::new_v4(),
timestamp: now,
level: "info".to_string(),
service: "web_server".to_string(),
message: "Request processed successfully".to_string(),
metadata: Some(serde_json::json!({
"endpoint": "/api/files/list",
"duration_ms": 45,
"status_code": 200
})),
},
LogEntry {
id: Uuid::new_v4(),
timestamp: now.checked_sub_signed(chrono::Duration::minutes(5)).unwrap(),
level: "warning".to_string(),
service: "database".to_string(),
message: "Slow query detected".to_string(),
metadata: Some(serde_json::json!({
"query": "SELECT * FROM users WHERE...",
"duration_ms": 1250
})),
},
LogEntry {
id: Uuid::new_v4(),
timestamp: now.checked_sub_signed(chrono::Duration::minutes(10)).unwrap(),
level: "error".to_string(),
service: "storage".to_string(),
message: "Failed to upload file".to_string(),
metadata: Some(serde_json::json!({
"file": "document.pdf",
"error": "Connection timeout"
})),
},
];
Ok(Json(logs))
}
/// POST /admin/logs/export - Export system logs
pub async fn export_logs(
State(state): State<Arc<AppState>>,
Query(params): Query<LogQuery>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<serde_json::Value>)> {
Ok(Json(SuccessResponse {
success: true,
message: Some("Logs exported successfully".to_string()),
}))
}
/// GET /admin/config - Get system configuration
pub async fn get_config(
State(state): State<Arc<AppState>>,
) -> Result<Json<ConfigResponse>, (StatusCode, Json<serde_json::Value>)> {
let now = Utc::now();
let config = ConfigResponse {
configs: vec![
ConfigItem {
key: "max_upload_size_mb".to_string(),
value: serde_json::json!(100),
description: Some("Maximum file upload size in MB".to_string()),
editable: true,
requires_restart: false,
},
ConfigItem {
key: "session_timeout_minutes".to_string(),
value: serde_json::json!(30),
description: Some("User session timeout in minutes".to_string()),
editable: true,
requires_restart: false,
},
ConfigItem {
key: "enable_2fa".to_string(),
value: serde_json::json!(true),
description: Some("Enable two-factor authentication".to_string()),
editable: true,
requires_restart: false,
},
ConfigItem {
key: "database_pool_size".to_string(),
value: serde_json::json!(20),
description: Some("Database connection pool size".to_string()),
editable: true,
requires_restart: true,
},
],
last_updated: now,
};
Ok(Json(config))
}
/// PUT /admin/config/update - Update system configuration
pub async fn update_config(
State(state): State<Arc<AppState>>,
Json(req): Json<ConfigUpdateRequest>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<serde_json::Value>)> {
Ok(Json(SuccessResponse {
success: true,
message: Some(format!("Configuration '{}' updated successfully", req.config_key)),
}))
}
/// POST /admin/maintenance/schedule - Schedule maintenance window
pub async fn schedule_maintenance(
State(state): State<Arc<AppState>>,
Json(req): Json<MaintenanceScheduleRequest>,
) -> Result<Json<MaintenanceResponse>, (StatusCode, Json<serde_json::Value>)> {
let maintenance_id = Uuid::new_v4();
let maintenance = MaintenanceResponse {
id: maintenance_id,
scheduled_at: req.scheduled_at,
duration_minutes: req.duration_minutes,
reason: req.reason,
status: "scheduled".to_string(),
created_by: "admin".to_string(),
};
Ok(Json(maintenance))
}
/// POST /admin/backup/create - Create system backup
pub async fn create_backup(
State(state): State<Arc<AppState>>,
Json(req): Json<BackupRequest>,
) -> Result<Json<BackupResponse>, (StatusCode, Json<serde_json::Value>)> {
let backup_id = Uuid::new_v4();
let now = Utc::now();
let backup = BackupResponse {
id: backup_id,
backup_type: req.backup_type,
size_bytes: 1024 * 1024 * 500,
created_at: now,
status: "completed".to_string(),
download_url: Some(format!("/admin/backups/{}/download", backup_id)),
expires_at: Some(now.checked_add_signed(chrono::Duration::days(30)).unwrap()),
};
Ok(Json(backup))
}
/// POST /admin/backup/restore - Restore from backup
pub async fn restore_backup(
State(state): State<Arc<AppState>>,
Json(req): Json<RestoreRequest>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<serde_json::Value>)> {
Ok(Json(SuccessResponse {
success: true,
message: Some(format!("Restore from backup {} initiated", req.backup_id)),
}))
}
/// GET /admin/backups - List available backups
pub async fn list_backups(
State(state): State<Arc<AppState>>,
) -> Result<Json<Vec<BackupResponse>>, (StatusCode, Json<serde_json::Value>)> {
let now = Utc::now();
let backups = vec![
BackupResponse {
id: Uuid::new_v4(),
backup_type: "full".to_string(),
size_bytes: 1024 * 1024 * 500,
created_at: now.checked_sub_signed(chrono::Duration::days(1)).unwrap(),
status: "completed".to_string(),
download_url: Some("/admin/backups/1/download".to_string()),
expires_at: Some(now.checked_add_signed(chrono::Duration::days(29)).unwrap()),
},
BackupResponse {
id: Uuid::new_v4(),
backup_type: "incremental".to_string(),
size_bytes: 1024 * 1024 * 50,
created_at: now.checked_sub_signed(chrono::Duration::hours(12)).unwrap(),
status: "completed".to_string(),
download_url: Some("/admin/backups/2/download".to_string()),
expires_at: Some(now.checked_add_signed(chrono::Duration::days(29)).unwrap()),
},
];
Ok(Json(backups))
}
/// POST /admin/users/manage - Manage user accounts
pub async fn manage_users(
State(state): State<Arc<AppState>>,
Json(req): Json<UserManagementRequest>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<serde_json::Value>)> {
let message = match req.action.as_str() {
"suspend" => format!("User {} suspended", req.user_id),
"activate" => format!("User {} activated", req.user_id),
"delete" => format!("User {} deleted", req.user_id),
"reset_password" => format!("Password reset for user {}", req.user_id),
_ => format!("Action {} performed on user {}", req.action, req.user_id),
};
Ok(Json(SuccessResponse {
success: true,
message: Some(message),
}))
}
/// GET /admin/roles - Get all roles
pub async fn get_roles(
State(state): State<Arc<AppState>>,
) -> Result<Json<Vec<serde_json::Value>>, (StatusCode, Json<serde_json::Value>)> {
let roles = vec![
serde_json::json!({
"id": Uuid::new_v4(),
"name": "admin",
"description": "Full system access",
"permissions": ["*"],
"user_count": 5
}),
serde_json::json!({
"id": Uuid::new_v4(),
"name": "user",
"description": "Standard user access",
"permissions": ["read:own", "write:own"],
"user_count": 1245
}),
serde_json::json!({
"id": Uuid::new_v4(),
"name": "guest",
"description": "Limited read-only access",
"permissions": ["read:public"],
"user_count": 328
}),
];
Ok(Json(roles))
}
/// POST /admin/roles/manage - Create or update role
pub async fn manage_roles(
State(state): State<Arc<AppState>>,
Json(req): Json<RoleManagementRequest>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<serde_json::Value>)> {
Ok(Json(SuccessResponse {
success: true,
message: Some(format!("Role '{}' managed successfully", req.role_name)),
}))
}
/// GET /admin/quotas - Get all quotas
pub async fn get_quotas(
State(state): State<Arc<AppState>>,
) -> Result<Json<Vec<QuotaResponse>>, (StatusCode, Json<serde_json::Value>)> {
let quotas = vec![
QuotaResponse {
id: Uuid::new_v4(),
entity_type: "user".to_string(),
entity_id: Uuid::new_v4(),
quota_type: "storage".to_string(),
limit_value: 10 * 1024 * 1024 * 1024,
current_value: 7 * 1024 * 1024 * 1024,
percent_used: 70.0,
},
QuotaResponse {
id: Uuid::new_v4(),
entity_type: "user".to_string(),
entity_id: Uuid::new_v4(),
quota_type: "api_calls".to_string(),
limit_value: 10000,
current_value: 3500,
percent_used: 35.0,
},
];
Ok(Json(quotas))
}
/// POST /admin/quotas/manage - Set or update quotas
pub async fn manage_quotas(
State(state): State<Arc<AppState>>,
Json(req): Json<QuotaManagementRequest>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<serde_json::Value>)> {
Ok(Json(SuccessResponse {
success: true,
message: Some(format!("Quota '{}' set successfully", req.quota_type)),
}))
}
/// GET /admin/licenses - Get license information
pub async fn get_licenses(
State(state): State<Arc<AppState>>,
) -> Result<Json<Vec<LicenseResponse>>, (StatusCode, Json<serde_json::Value>)> {
let now = Utc::now();
let licenses = vec![
LicenseResponse {
id: Uuid::new_v4(),
license_type: "enterprise".to_string(),
status: "active".to_string(),
max_users: 1000,
current_users: 850,
features: vec![
"unlimited_storage".to_string(),
"advanced_analytics".to_string(),
"priority_support".to_string(),
"custom_integrations".to_string(),
],
issued_at: now.checked_sub_signed(chrono::Duration::days(180)).unwrap(),
expires_at: Some(now.checked_add_signed(chrono::Duration::days(185)).unwrap()),
},
];
Ok(Json(licenses))
}
/// POST /admin/licenses/manage - Add or update license
pub async fn manage_licenses(
State(state): State<Arc<AppState>>,
Json(req): Json<LicenseManagementRequest>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<serde_json::Value>)> {
Ok(Json(SuccessResponse {
success: true,
message: Some(format!("License '{}' activated successfully", req.license_type)),
}))
}

View file

@ -0,0 +1,557 @@
//! Analytics & Reporting Module
//!
//! Provides comprehensive analytics, reporting, and insights generation capabilities.
use axum::{
extract::{Path, Query, State},
http::StatusCode,
response::Json,
};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use uuid::Uuid;
use crate::shared::state::AppState;
// ===== Request/Response Structures =====
#[derive(Debug, Deserialize)]
pub struct ReportQuery {
pub report_type: String,
pub start_date: Option<String>,
pub end_date: Option<String>,
pub group_by: Option<String>,
pub filters: Option<serde_json::Value>,
}
#[derive(Debug, Deserialize)]
pub struct ScheduleReportRequest {
pub report_type: String,
pub frequency: String,
pub recipients: Vec<String>,
pub format: String,
pub filters: Option<serde_json::Value>,
}
#[derive(Debug, Deserialize)]
pub struct MetricsCollectionRequest {
pub metric_type: String,
pub value: f64,
pub labels: Option<serde_json::Value>,
pub timestamp: Option<DateTime<Utc>>,
}
#[derive(Debug, Deserialize)]
pub struct InsightsQuery {
pub data_source: String,
pub analysis_type: String,
pub time_range: String,
}
#[derive(Debug, Deserialize)]
pub struct TrendsQuery {
pub metric: String,
pub start_date: String,
pub end_date: String,
pub granularity: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct ExportRequest {
pub data_type: String,
pub format: String,
pub filters: Option<serde_json::Value>,
}
#[derive(Debug, Serialize)]
pub struct DashboardResponse {
pub overview: OverviewStats,
pub recent_activity: Vec<ActivityItem>,
pub charts: Vec<ChartData>,
pub alerts: Vec<AlertItem>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Serialize)]
pub struct OverviewStats {
pub total_users: u32,
pub active_users: u32,
pub total_files: u64,
pub total_storage_gb: f64,
pub total_messages: u64,
pub total_calls: u32,
pub growth_rate: f64,
}
#[derive(Debug, Serialize)]
pub struct ActivityItem {
pub id: Uuid,
pub action: String,
pub user_id: Option<Uuid>,
pub user_name: String,
pub resource_type: String,
pub resource_id: String,
pub timestamp: DateTime<Utc>,
}
#[derive(Debug, Serialize)]
pub struct ChartData {
pub chart_type: String,
pub title: String,
pub labels: Vec<String>,
pub datasets: Vec<DatasetInfo>,
}
#[derive(Debug, Serialize)]
pub struct DatasetInfo {
pub label: String,
pub data: Vec<f64>,
pub color: String,
}
#[derive(Debug, Serialize)]
pub struct AlertItem {
pub id: Uuid,
pub severity: String,
pub title: String,
pub message: String,
pub timestamp: DateTime<Utc>,
}
#[derive(Debug, Serialize)]
pub struct ReportResponse {
pub id: Uuid,
pub report_type: String,
pub generated_at: DateTime<Utc>,
pub data: serde_json::Value,
pub summary: Option<String>,
pub download_url: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct ScheduledReportResponse {
pub id: Uuid,
pub report_type: String,
pub frequency: String,
pub recipients: Vec<String>,
pub format: String,
pub next_run: DateTime<Utc>,
pub last_run: Option<DateTime<Utc>>,
pub status: String,
}
#[derive(Debug, Serialize)]
pub struct MetricResponse {
pub metric_type: String,
pub value: f64,
pub timestamp: DateTime<Utc>,
pub labels: serde_json::Value,
}
#[derive(Debug, Serialize)]
pub struct InsightsResponse {
pub insights: Vec<Insight>,
pub confidence_score: f64,
pub generated_at: DateTime<Utc>,
}
#[derive(Debug, Serialize)]
pub struct Insight {
pub title: String,
pub description: String,
pub insight_type: String,
pub severity: String,
pub data: serde_json::Value,
pub recommendations: Vec<String>,
}
#[derive(Debug, Serialize)]
pub struct TrendsResponse {
pub metric: String,
pub trend_direction: String,
pub change_percentage: f64,
pub data_points: Vec<TrendDataPoint>,
pub forecast: Option<Vec<TrendDataPoint>>,
}
#[derive(Debug, Serialize)]
pub struct TrendDataPoint {
pub timestamp: DateTime<Utc>,
pub value: f64,
}
#[derive(Debug, Serialize)]
pub struct ExportResponse {
pub export_id: Uuid,
pub format: String,
pub size_bytes: u64,
pub download_url: String,
pub expires_at: DateTime<Utc>,
}
#[derive(Debug, Serialize)]
pub struct SuccessResponse {
pub success: bool,
pub message: Option<String>,
}
// ===== API Handlers =====
/// GET /analytics/dashboard - Get analytics dashboard
pub async fn get_dashboard(
State(state): State<Arc<AppState>>,
) -> Result<Json<DashboardResponse>, (StatusCode, Json<serde_json::Value>)> {
let now = Utc::now();
let dashboard = DashboardResponse {
overview: OverviewStats {
total_users: 1250,
active_users: 892,
total_files: 45678,
total_storage_gb: 234.5,
total_messages: 123456,
total_calls: 3456,
growth_rate: 12.5,
},
recent_activity: vec![
ActivityItem {
id: Uuid::new_v4(),
action: "file_upload".to_string(),
user_id: Some(Uuid::new_v4()),
user_name: "John Doe".to_string(),
resource_type: "file".to_string(),
resource_id: "document.pdf".to_string(),
timestamp: now,
},
ActivityItem {
id: Uuid::new_v4(),
action: "user_login".to_string(),
user_id: Some(Uuid::new_v4()),
user_name: "Jane Smith".to_string(),
resource_type: "session".to_string(),
resource_id: "session-123".to_string(),
timestamp: now,
},
],
charts: vec![
ChartData {
chart_type: "line".to_string(),
title: "Daily Active Users".to_string(),
labels: vec!["Mon".to_string(), "Tue".to_string(), "Wed".to_string(), "Thu".to_string(), "Fri".to_string()],
datasets: vec![DatasetInfo {
label: "Active Users".to_string(),
data: vec![850.0, 920.0, 880.0, 950.0, 892.0],
color: "#3b82f6".to_string(),
}],
},
ChartData {
chart_type: "bar".to_string(),
title: "Storage Usage".to_string(),
labels: vec!["Files".to_string(), "Media".to_string(), "Backups".to_string()],
datasets: vec![DatasetInfo {
label: "GB".to_string(),
data: vec![120.5, 80.3, 33.7],
color: "#10b981".to_string(),
}],
},
],
alerts: vec![
AlertItem {
id: Uuid::new_v4(),
severity: "warning".to_string(),
title: "Storage capacity".to_string(),
message: "Storage usage is at 78%".to_string(),
timestamp: now,
},
],
updated_at: now,
};
Ok(Json(dashboard))
}
/// POST /analytics/reports/generate - Generate analytics report
pub async fn generate_report(
State(state): State<Arc<AppState>>,
Query(params): Query<ReportQuery>,
) -> Result<Json<ReportResponse>, (StatusCode, Json<serde_json::Value>)> {
let report_id = Uuid::new_v4();
let now = Utc::now();
let report_data = match params.report_type.as_str() {
"user_activity" => {
serde_json::json!({
"total_users": 1250,
"active_users": 892,
"new_users_this_month": 45,
"user_engagement_score": 7.8,
"top_users": [
{"name": "John Doe", "activity_score": 95},
{"name": "Jane Smith", "activity_score": 88},
],
})
}
"storage" => {
serde_json::json!({
"total_storage_gb": 234.5,
"used_storage_gb": 182.3,
"available_storage_gb": 52.2,
"growth_rate_monthly": 8.5,
"largest_consumers": [
{"user": "John Doe", "storage_gb": 15.2},
{"user": "Jane Smith", "storage_gb": 12.8},
],
})
}
"communication" => {
serde_json::json!({
"total_messages": 123456,
"total_calls": 3456,
"average_call_duration_minutes": 23.5,
"most_active_channels": [
{"name": "General", "messages": 45678},
{"name": "Development", "messages": 23456},
],
})
}
_ => {
serde_json::json!({
"message": "Report data not available for this type"
})
}
};
let report = ReportResponse {
id: report_id,
report_type: params.report_type,
generated_at: now,
data: report_data,
summary: Some("Report generated successfully".to_string()),
download_url: Some(format!("/analytics/reports/{}/download", report_id)),
};
Ok(Json(report))
}
/// POST /analytics/reports/schedule - Schedule recurring report
pub async fn schedule_report(
State(state): State<Arc<AppState>>,
Json(req): Json<ScheduleReportRequest>,
) -> Result<Json<ScheduledReportResponse>, (StatusCode, Json<serde_json::Value>)> {
let schedule_id = Uuid::new_v4();
let now = Utc::now();
let next_run = match req.frequency.as_str() {
"daily" => now.checked_add_signed(chrono::Duration::days(1)).unwrap(),
"weekly" => now.checked_add_signed(chrono::Duration::weeks(1)).unwrap(),
"monthly" => now.checked_add_signed(chrono::Duration::days(30)).unwrap(),
_ => now.checked_add_signed(chrono::Duration::days(1)).unwrap(),
};
let scheduled = ScheduledReportResponse {
id: schedule_id,
report_type: req.report_type,
frequency: req.frequency,
recipients: req.recipients,
format: req.format,
next_run,
last_run: None,
status: "active".to_string(),
};
Ok(Json(scheduled))
}
/// POST /analytics/metrics/collect - Collect metric data
pub async fn collect_metrics(
State(state): State<Arc<AppState>>,
Json(req): Json<MetricsCollectionRequest>,
) -> Result<Json<MetricResponse>, (StatusCode, Json<serde_json::Value>)> {
let timestamp = req.timestamp.unwrap_or_else(Utc::now);
let metric = MetricResponse {
metric_type: req.metric_type,
value: req.value,
timestamp,
labels: req.labels.unwrap_or_else(|| serde_json::json!({})),
};
Ok(Json(metric))
}
/// POST /analytics/insights/generate - Generate insights from data
pub async fn generate_insights(
State(state): State<Arc<AppState>>,
Query(params): Query<InsightsQuery>,
) -> Result<Json<InsightsResponse>, (StatusCode, Json<serde_json::Value>)> {
let now = Utc::now();
let insights = match params.analysis_type.as_str() {
"performance" => {
vec![
Insight {
title: "High User Engagement".to_string(),
description: "User engagement has increased by 15% this week".to_string(),
insight_type: "positive".to_string(),
severity: "info".to_string(),
data: serde_json::json!({
"current_engagement": 7.8,
"previous_engagement": 6.8,
"change_percentage": 15.0
}),
recommendations: vec![
"Continue current engagement strategies".to_string(),
"Consider expanding successful features".to_string(),
],
},
Insight {
title: "Storage Optimization Needed".to_string(),
description: "Storage usage growing faster than expected".to_string(),
insight_type: "warning".to_string(),
severity: "medium".to_string(),
data: serde_json::json!({
"current_usage_gb": 182.3,
"projected_usage_gb": 250.0,
"days_until_full": 45
}),
recommendations: vec![
"Review and archive old files".to_string(),
"Implement storage quotas per user".to_string(),
"Consider upgrading storage capacity".to_string(),
],
},
]
}
"usage" => {
vec![
Insight {
title: "Peak Usage Times".to_string(),
description: "Highest activity between 9 AM - 11 AM".to_string(),
insight_type: "informational".to_string(),
severity: "info".to_string(),
data: serde_json::json!({
"peak_hours": ["09:00", "10:00", "11:00"],
"average_users": 750
}),
recommendations: vec![
"Schedule maintenance outside peak hours".to_string(),
"Ensure adequate resources during peak times".to_string(),
],
},
]
}
"security" => {
vec![
Insight {
title: "Failed Login Attempts".to_string(),
description: "Unusual number of failed login attempts detected".to_string(),
insight_type: "security".to_string(),
severity: "high".to_string(),
data: serde_json::json!({
"failed_attempts": 127,
"affected_accounts": 15,
"suspicious_ips": ["192.168.1.1", "10.0.0.5"]
}),
recommendations: vec![
"Enable two-factor authentication".to_string(),
"Review and block suspicious IP addresses".to_string(),
"Notify affected users".to_string(),
],
},
]
}
_ => vec![],
};
let response = InsightsResponse {
insights,
confidence_score: 0.85,
generated_at: now,
};
Ok(Json(response))
}
/// POST /analytics/trends/analyze - Analyze trends
pub async fn analyze_trends(
State(state): State<Arc<AppState>>,
Query(params): Query<TrendsQuery>,
) -> Result<Json<TrendsResponse>, (StatusCode, Json<serde_json::Value>)> {
let start_date = DateTime::parse_from_rfc3339(&params.start_date)
.unwrap_or_else(|_| {
Utc::now()
.checked_sub_signed(chrono::Duration::days(30))
.unwrap()
.into()
})
.with_timezone(&Utc);
let end_date = DateTime::parse_from_rfc3339(&params.end_date)
.unwrap_or_else(|_| Utc::now().into())
.with_timezone(&Utc);
let data_points = vec![
TrendDataPoint {
timestamp: start_date,
value: 850.0,
},
TrendDataPoint {
timestamp: start_date.checked_add_signed(chrono::Duration::days(5)).unwrap(),
value: 920.0,
},
TrendDataPoint {
timestamp: start_date.checked_add_signed(chrono::Duration::days(10)).unwrap(),
value: 880.0,
},
TrendDataPoint {
timestamp: start_date.checked_add_signed(chrono::Duration::days(15)).unwrap(),
value: 950.0,
},
TrendDataPoint {
timestamp: end_date,
value: 892.0,
},
];
let forecast = vec![
TrendDataPoint {
timestamp: end_date.checked_add_signed(chrono::Duration::days(5)).unwrap(),
value: 910.0,
},
TrendDataPoint {
timestamp: end_date.checked_add_signed(chrono::Duration::days(10)).unwrap(),
value: 935.0,
},
];
let trends = TrendsResponse {
metric: params.metric,
trend_direction: "upward".to_string(),
change_percentage: 4.9,
data_points,
forecast: Some(forecast),
};
Ok(Json(trends))
}
/// POST /analytics/export - Export analytics data
pub async fn export_analytics(
State(state): State<Arc<AppState>>,
Json(req): Json<ExportRequest>,
) -> Result<Json<ExportResponse>, (StatusCode, Json<serde_json::Value>)> {
let export_id = Uuid::new_v4();
let now = Utc::now();
let expires_at = now.checked_add_signed(chrono::Duration::hours(24)).unwrap();
let export = ExportResponse {
export_id,
format: req.format,
size_bytes: 1024 * 1024 * 5,
download_url: format!("/analytics/exports/{}/download", export_id),
expires_at,
};
Ok(Json(export))
}

5
src/core/shared/mod.rs Normal file
View file

@ -0,0 +1,5 @@
pub mod admin;
pub mod analytics;
pub mod models;
pub mod state;
pub mod utils;

351
src/core/shared/models.rs Normal file
View file

@ -0,0 +1,351 @@
use chrono::{DateTime, Utc};
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum TriggerKind {
Scheduled = 0,
TableUpdate = 1,
TableInsert = 2,
TableDelete = 3,
}
impl TriggerKind {
pub fn _from_i32(value: i32) -> Option<Self> {
match value {
0 => Some(Self::Scheduled),
1 => Some(Self::TableUpdate),
2 => Some(Self::TableInsert),
3 => Some(Self::TableDelete),
_ => None,
}
}
}
#[derive(Debug, Queryable, Serialize, Deserialize, Identifiable)]
#[diesel(table_name = system_automations)]
pub struct Automation {
pub id: Uuid,
pub bot_id: Uuid,
pub kind: i32,
pub target: Option<String>,
pub schedule: Option<String>,
pub param: String,
pub is_active: bool,
pub last_triggered: Option<chrono::DateTime<chrono::Utc>>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Queryable, Identifiable, Selectable)]
#[diesel(table_name = user_sessions)]
pub struct UserSession {
pub id: Uuid,
pub user_id: Uuid,
pub bot_id: Uuid,
pub title: String,
pub context_data: serde_json::Value,
pub current_tool: Option<String>,
pub created_at: chrono::DateTime<Utc>,
pub updated_at: chrono::DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserMessage {
pub bot_id: String,
pub user_id: String,
pub session_id: String,
pub channel: String,
pub content: String,
pub message_type: i32,
pub media_url: Option<String>,
pub timestamp: DateTime<Utc>,
pub context_name: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Suggestion {
pub text: String,
pub context: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BotResponse {
pub bot_id: String,
pub user_id: String,
pub session_id: String,
pub channel: String,
pub content: String,
pub message_type: i32,
pub stream_token: Option<String>,
pub is_complete: bool,
pub suggestions: Vec<Suggestion>,
pub context_name: Option<String>,
pub context_length: usize,
pub context_max_length: usize,
}
impl BotResponse {
pub fn from_string_ids(
bot_id: &str,
session_id: &str,
user_id: &str,
content: String,
channel: String,
) -> Result<Self, anyhow::Error> {
Ok(Self {
bot_id: bot_id.to_string(),
user_id: user_id.to_string(),
session_id: session_id.to_string(),
channel,
content,
message_type: 2,
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Queryable, Identifiable, Insertable)]
#[diesel(table_name = bot_memories)]
pub struct BotMemory {
pub id: Uuid,
pub bot_id: Uuid,
pub key: String,
pub value: String,
pub created_at: chrono::DateTime<Utc>,
pub updated_at: chrono::DateTime<Utc>,
}
pub mod schema {
diesel::table! {
organizations (org_id) {
org_id -> Uuid,
name -> Text,
slug -> Text,
created_at -> Timestamptz,
}
}
diesel::table! {
bots (id) {
id -> Uuid,
name -> Varchar,
description -> Nullable<Text>,
llm_provider -> Varchar,
llm_config -> Jsonb,
context_provider -> Varchar,
context_config -> Jsonb,
created_at -> Timestamptz,
updated_at -> Timestamptz,
is_active -> Nullable<Bool>,
tenant_id -> Nullable<Uuid>,
}
}
diesel::table! {
system_automations (id) {
id -> Uuid,
bot_id -> Uuid,
kind -> Int4,
target -> Nullable<Text>,
schedule -> Nullable<Text>,
param -> Text,
is_active -> Bool,
last_triggered -> Nullable<Timestamptz>,
}
}
diesel::table! {
user_sessions (id) {
id -> Uuid,
user_id -> Uuid,
bot_id -> Uuid,
title -> Text,
context_data -> Jsonb,
current_tool -> Nullable<Text>,
created_at -> Timestamptz,
updated_at -> Timestamptz,
}
}
diesel::table! {
message_history (id) {
id -> Uuid,
session_id -> Uuid,
user_id -> Uuid,
role -> Int4,
content_encrypted -> Text,
message_type -> Int4,
message_index -> Int8,
created_at -> Timestamptz,
}
}
diesel::table! {
users (id) {
id -> Uuid,
username -> Text,
email -> Text,
password_hash -> Text,
is_active -> Bool,
created_at -> Timestamptz,
updated_at -> Timestamptz,
}
}
diesel::table! {
clicks (id) {
id -> Uuid,
campaign_id -> Text,
email -> Text,
updated_at -> Timestamptz,
}
}
diesel::table! {
bot_memories (id) {
id -> Uuid,
bot_id -> Uuid,
key -> Text,
value -> Text,
created_at -> Timestamptz,
updated_at -> Timestamptz,
}
}
diesel::table! {
kb_documents (id) {
id -> Text,
bot_id -> Text,
user_id -> Text,
collection_name -> Text,
file_path -> Text,
file_size -> Integer,
file_hash -> Text,
first_published_at -> Text,
last_modified_at -> Text,
indexed_at -> Nullable<Text>,
metadata -> Text,
created_at -> Text,
updated_at -> Text,
}
}
diesel::table! {
basic_tools (id) {
id -> Text,
bot_id -> Text,
tool_name -> Text,
file_path -> Text,
ast_path -> Text,
file_hash -> Text,
mcp_json -> Nullable<Text>,
tool_json -> Nullable<Text>,
compiled_at -> Text,
is_active -> Integer,
created_at -> Text,
updated_at -> Text,
}
}
diesel::table! {
kb_collections (id) {
id -> Text,
bot_id -> Text,
user_id -> Text,
name -> Text,
folder_path -> Text,
qdrant_collection -> Text,
document_count -> Integer,
is_active -> Integer,
created_at -> Text,
updated_at -> Text,
}
}
diesel::table! {
user_kb_associations (id) {
id -> Text,
user_id -> Text,
bot_id -> Text,
kb_name -> Text,
is_website -> Integer,
website_url -> Nullable<Text>,
created_at -> Text,
updated_at -> Text,
}
}
diesel::table! {
session_tool_associations (id) {
id -> Text,
session_id -> Text,
tool_name -> Text,
added_at -> Text,
}
}
diesel::table! {
bot_configuration (id) {
id -> Uuid,
bot_id -> Uuid,
config_key -> Text,
config_value -> Text,
is_encrypted -> Bool,
config_type -> Text,
created_at -> Timestamptz,
updated_at -> Timestamptz,
}
}
diesel::table! {
user_email_accounts (id) {
id -> Uuid,
user_id -> Uuid,
email -> Varchar,
display_name -> Nullable<Varchar>,
imap_server -> Varchar,
imap_port -> Int4,
smtp_server -> Varchar,
smtp_port -> Int4,
username -> Varchar,
password_encrypted -> Text,
is_primary -> Bool,
is_active -> Bool,
created_at -> Timestamptz,
updated_at -> Timestamptz,
}
}
diesel::table! {
email_drafts (id) {
id -> Uuid,
user_id -> Uuid,
account_id -> Uuid,
to_address -> Text,
cc_address -> Nullable<Text>,
bcc_address -> Nullable<Text>,
subject -> Nullable<Varchar>,
body -> Nullable<Text>,
attachments -> Jsonb,
created_at -> Timestamptz,
updated_at -> Timestamptz,
}
}
diesel::table! {
email_folders (id) {
id -> Uuid,
account_id -> Uuid,
folder_name -> Varchar,
folder_path -> Varchar,
unread_count -> Int4,
total_count -> Int4,
last_synced -> Nullable<Timestamptz>,
created_at -> Timestamptz,
updated_at -> Timestamptz,
}
}
diesel::table! {
user_preferences (id) {
id -> Uuid,
user_id -> Uuid,
preference_key -> Varchar,
preference_value -> Jsonb,
created_at -> Timestamptz,
updated_at -> Timestamptz,
}
}
diesel::table! {
user_login_tokens (id) {
id -> Uuid,
user_id -> Uuid,
token_hash -> Varchar,
expires_at -> Timestamptz,
created_at -> Timestamptz,
last_used -> Timestamptz,
user_agent -> Nullable<Text>,
ip_address -> Nullable<Varchar>,
is_active -> Bool,
}
}
}
pub use schema::*;

View file

View file

@ -0,0 +1,25 @@
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::test_util;
#[test]
fn test_shared_module() {
test_util::setup();
assert!(true, "Basic shared module test");
}
#[test]
fn test_models() {
test_util::setup();
assert!(true, "Models placeholder test");
}
#[test]
fn test_state() {
test_util::setup();
assert!(true, "State placeholder test");
}
#[test]
fn test_utils() {
test_util::setup();
assert!(true, "Utils placeholder test");
}
}

87
src/core/shared/state.rs Normal file
View file

@ -0,0 +1,87 @@
#[cfg(feature = "directory")]
use crate::directory::AuthService;
use crate::core::bot::channels::{ChannelAdapter, VoiceAdapter, WebChannelAdapter};
use crate::core::config::AppConfig;
#[cfg(feature = "llm")]
use crate::llm::LLMProvider;
use crate::core::session::SessionManager;
use crate::shared::models::BotResponse;
use crate::shared::utils::DbPool;
#[cfg(feature = "drive")]
use aws_sdk_s3::Client as S3Client;
#[cfg(feature = "redis-cache")]
use redis::Client as RedisClient;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::mpsc;
pub struct AppState {
#[cfg(feature = "drive")]
pub drive: Option<S3Client>,
#[cfg(feature = "redis-cache")]
pub cache: Option<Arc<RedisClient>>,
pub bucket_name: String,
pub config: Option<AppConfig>,
pub conn: DbPool,
pub session_manager: Arc<tokio::sync::Mutex<SessionManager>>,
#[cfg(feature = "llm")]
pub llm_provider: Arc<dyn LLMProvider>,
#[cfg(feature = "directory")]
pub auth_service: Arc<tokio::sync::Mutex<AuthService>>,
pub channels: Arc<tokio::sync::Mutex<HashMap<String, Arc<dyn ChannelAdapter>>>>,
pub response_channels: Arc<tokio::sync::Mutex<HashMap<String, mpsc::Sender<BotResponse>>>>,
pub web_adapter: Arc<WebChannelAdapter>,
pub voice_adapter: Arc<VoiceAdapter>,
}
impl Clone for AppState {
fn clone(&self) -> Self {
Self {
#[cfg(feature = "drive")]
drive: self.drive.clone(),
bucket_name: self.bucket_name.clone(),
config: self.config.clone(),
conn: self.conn.clone(),
#[cfg(feature = "redis-cache")]
cache: self.cache.clone(),
session_manager: Arc::clone(&self.session_manager),
#[cfg(feature = "llm")]
llm_provider: Arc::clone(&self.llm_provider),
#[cfg(feature = "directory")]
auth_service: Arc::clone(&self.auth_service),
channels: Arc::clone(&self.channels),
response_channels: Arc::clone(&self.response_channels),
web_adapter: Arc::clone(&self.web_adapter),
voice_adapter: Arc::clone(&self.voice_adapter),
}
}
}
impl std::fmt::Debug for AppState {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut debug = f.debug_struct("AppState");
#[cfg(feature = "drive")]
debug.field("drive", &self.drive.is_some());
#[cfg(feature = "redis-cache")]
debug.field("cache", &self.cache.is_some());
debug.field("bucket_name", &self.bucket_name)
.field("config", &self.config)
.field("conn", &"DbPool")
.field("session_manager", &"Arc<Mutex<SessionManager>>");
#[cfg(feature = "llm")]
debug.field("llm_provider", &"Arc<dyn LLMProvider>");
#[cfg(feature = "directory")]
debug.field("auth_service", &"Arc<Mutex<AuthService>>");
debug
.field("channels", &"Arc<Mutex<HashMap>>")
.field("response_channels", &"Arc<Mutex<HashMap>>")
.field("web_adapter", &self.web_adapter)
.field("voice_adapter", &self.voice_adapter)
.finish()
}
}

164
src/core/shared/utils.rs Normal file
View file

@ -0,0 +1,164 @@
use anyhow::{Context, Result};
use diesel::Connection;
use diesel::{
r2d2::{ConnectionManager, Pool},
PgConnection,
};
use futures_util::StreamExt;
use indicatif::{ProgressBar, ProgressStyle};
use reqwest::Client;
use rhai::{Array, Dynamic};
use serde_json::Value;
use smartstring::SmartString;
use std::error::Error;
use tokio::fs::File as TokioFile;
use tokio::io::AsyncWriteExt;
use aws_sdk_s3::{Client as S3Client, config::Builder as S3ConfigBuilder};
use aws_config::BehaviorVersion;
use crate::config::DriveConfig;
pub async fn create_s3_operator(config: &DriveConfig) -> Result<S3Client, Box<dyn std::error::Error>> {
let endpoint = if !config.server.ends_with('/') {
format!("{}/", config.server)
} else {
config.server.clone()
};
let base_config = aws_config::defaults(BehaviorVersion::latest())
.endpoint_url(endpoint)
.region("auto")
.credentials_provider(
aws_sdk_s3::config::Credentials::new(
config.access_key.clone(),
config.secret_key.clone(),
None,
None,
"static",
)
)
.load()
.await;
let s3_config = S3ConfigBuilder::from(&base_config)
.force_path_style(true)
.build();
Ok(S3Client::from_conf(s3_config))
}
pub fn json_value_to_dynamic(value: &Value) -> Dynamic {
match value {
Value::Null => Dynamic::UNIT,
Value::Bool(b) => Dynamic::from(*b),
Value::Number(n) => {
if let Some(i) = n.as_i64() {
Dynamic::from(i)
} else if let Some(f) = n.as_f64() {
Dynamic::from(f)
} else {
Dynamic::UNIT
}
}
Value::String(s) => Dynamic::from(s.clone()),
Value::Array(arr) => Dynamic::from(
arr.iter()
.map(json_value_to_dynamic)
.collect::<rhai::Array>(),
),
Value::Object(obj) => Dynamic::from(
obj.iter()
.map(|(k, v)| (SmartString::from(k), json_value_to_dynamic(v)))
.collect::<rhai::Map>(),
),
}
}
pub fn to_array(value: Dynamic) -> Array {
if value.is_array() {
value.cast::<Array>()
} else if value.is_unit() || value.is::<()>() {
Array::new()
} else {
Array::from([value])
}
}
pub async fn download_file(url: &str, output_path: &str) -> Result<(), anyhow::Error> {
let url = url.to_string();
let output_path = output_path.to_string();
let download_handle = tokio::spawn(async move {
let client = Client::builder()
.user_agent("Mozilla/5.0 (compatible; BotServer/1.0)")
.build()?;
let response = client.get(&url).send().await?;
if response.status().is_success() {
let total_size = response.content_length().unwrap_or(0);
let pb = ProgressBar::new(total_size);
pb.set_style(ProgressStyle::default_bar()
.template("{msg}\n{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({eta})")
.unwrap()
.progress_chars("#>-"));
pb.set_message(format!("Downloading {}", url));
let mut file = TokioFile::create(&output_path).await?;
let mut downloaded: u64 = 0;
let mut stream = response.bytes_stream();
while let Some(chunk_result) = stream.next().await {
let chunk = chunk_result?;
file.write_all(&chunk).await?;
downloaded += chunk.len() as u64;
pb.set_position(downloaded);
}
pb.finish_with_message(format!("Downloaded {}", output_path));
Ok(())
} else {
Err(anyhow::anyhow!("HTTP {}: {}", response.status(), url))
}
});
download_handle.await?
}
pub fn parse_filter(filter_str: &str) -> Result<(String, Vec<String>), Box<dyn Error>> {
let parts: Vec<&str> = filter_str.split('=').collect();
if parts.len() != 2 {
return Err("Invalid filter format. Expected 'KEY=VALUE'".into());
}
let column = parts[0].trim();
let value = parts[1].trim();
if !column
.chars()
.all(|c| c.is_ascii_alphanumeric() || c == '_')
{
return Err("Invalid column name in filter".into());
}
Ok((format!("{} = $1", column), vec![value.to_string()]))
}
pub fn estimate_token_count(text: &str) -> usize {
let char_count = text.chars().count();
(char_count / 4).max(1)
}
pub fn establish_pg_connection() -> Result<PgConnection> {
let database_url = std::env::var("DATABASE_URL").unwrap();
PgConnection::establish(&database_url)
.with_context(|| format!("Failed to connect to database at {}", database_url))
}
pub type DbPool = Pool<ConnectionManager<PgConnection>>;
pub fn create_conn() -> Result<DbPool, diesel::r2d2::PoolError> {
let database_url = std::env::var("DATABASE_URL")
.unwrap();
let manager = ConnectionManager::<PgConnection>::new(database_url);
Pool::builder().build(manager)
}
pub fn parse_database_url(url: &str) -> (String, String, String, u32, String) {
if let Some(stripped) = url.strip_prefix("postgres://") {
let parts: Vec<&str> = stripped.split('@').collect();
if parts.len() == 2 {
let user_pass: Vec<&str> = parts[0].split(':').collect();
let host_db: Vec<&str> = parts[1].split('/').collect();
if user_pass.len() >= 2 && host_db.len() >= 2 {
let username = user_pass[0].to_string();
let password = user_pass[1].to_string();
let host_port: Vec<&str> = host_db[0].split(':').collect();
let server = host_port[0].to_string();
let port = host_port
.get(1)
.and_then(|p| p.parse().ok())
.unwrap_or(5432);
let database = host_db[1].to_string();
return (username, password, server, port, database);
}
}
}
("".to_string(), "".to_string(), "".to_string(), 5432, "".to_string())
}

View file

@ -0,0 +1,46 @@
use axum::{
http::StatusCode,
response::{Html, IntoResponse},
routing::get,
Router,
};
use log::error;
use std::{fs, path::PathBuf};
use tower_http::services::ServeDir;
pub async fn index() -> impl IntoResponse {
match fs::read_to_string("web/desktop/index.html") {
Ok(html) => (StatusCode::OK, [("content-type", "text/html")], Html(html)),
Err(e) => {
error!("Failed to load index page: {}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
[("content-type", "text/plain")],
Html("Failed to load index page".to_string()),
)
}
}
}
pub fn configure_router() -> Router {
let static_path = PathBuf::from("./web/desktop");
Router::new()
// Serve all JS files
.nest_service("/js", ServeDir::new(static_path.join("js")))
// Serve CSS files
.nest_service("/css", ServeDir::new(static_path.join("css")))
// Serve public assets (themes, etc.)
.nest_service("/public", ServeDir::new(static_path.join("public")))
.nest_service("/drive", ServeDir::new(static_path.join("drive")))
.nest_service("/chat", ServeDir::new(static_path.join("chat")))
.nest_service("/mail", ServeDir::new(static_path.join("mail")))
.nest_service("/tasks", ServeDir::new(static_path.join("tasks")))
// Fallback: serve static files and index.html for SPA routing
.fallback_service(
ServeDir::new(static_path.clone()).fallback(
ServeDir::new(static_path.clone()).append_index_html_on_directories(true),
),
)
.route("/", get(index))
}

View file

@ -0,0 +1,15 @@
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::test_util;
#[test]
fn test_web_server_module() {
test_util::setup();
assert!(true, "Basic web server module test");
}
#[test]
fn test_server_routes() {
test_util::setup();
assert!(true, "Server routes placeholder test");
}
}

82
src/desktop/drive.rs Normal file
View file

@ -0,0 +1,82 @@
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::{Path, PathBuf};
use tauri::{Emitter, Window};
#[derive(Debug, Serialize, Deserialize)]
pub struct FileItem {
name: String,
path: String,
is_dir: bool,
}
#[tauri::command]
pub fn list_files(path: &str) -> Result<Vec<FileItem>, String> {
let base_path = Path::new(path);
let mut files = Vec::new();
if !base_path.exists() {
return Err("Path does not exist".into());
}
for entry in fs::read_dir(base_path).map_err(|e| e.to_string())? {
let entry = entry.map_err(|e| e.to_string())?;
let path = entry.path();
let name = path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("")
.to_string();
files.push(FileItem {
name,
path: path.to_str().unwrap_or("").to_string(),
is_dir: path.is_dir(),
});
}
files.sort_by(|a, b| {
if a.is_dir && !b.is_dir {
std::cmp::Ordering::Less
} else if !a.is_dir && b.is_dir {
std::cmp::Ordering::Greater
} else {
a.name.cmp(&b.name)
}
});
Ok(files)
}
#[tauri::command]
pub async fn upload_file(window: Window, src_path: String, dest_path: String) -> Result<(), String> {
use std::fs::File;
use std::io::{Read, Write};
let src = PathBuf::from(&src_path);
let dest_dir = PathBuf::from(&dest_path);
let dest = dest_dir.join(src.file_name().ok_or("Invalid source file")?);
if !dest_dir.exists() {
fs::create_dir_all(&dest_dir).map_err(|e| e.to_string())?;
}
let mut source_file = File::open(&src).map_err(|e| e.to_string())?;
let mut dest_file = File::create(&dest).map_err(|e| e.to_string())?;
let file_size = source_file.metadata().map_err(|e| e.to_string())?.len();
let mut buffer = [0; 8192];
let mut total_read = 0;
loop {
let bytes_read = source_file.read(&mut buffer).map_err(|e| e.to_string())?;
if bytes_read == 0 {
break;
}
dest_file
.write_all(&buffer[..bytes_read])
.map_err(|e| e.to_string())?;
total_read += bytes_read as u64;
let progress = (total_read as f64 / file_size as f64) * 100.0;
window
.emit("upload_progress", progress)
.map_err(|e| e.to_string())?;
}
Ok(())
}
#[tauri::command]
pub fn create_folder(path: String, name: String) -> Result<(), String> {
let full_path = Path::new(&path).join(&name);
if full_path.exists() {
return Err("Folder already exists".into());
}
fs::create_dir(full_path).map_err(|e| e.to_string())?;
Ok(())
}

391
src/desktop/local-sync.rs Normal file
View file

@ -0,0 +1,391 @@
use dioxus::prelude::*;
use dioxus_desktop::{use_window, LogicalSize};
use std::env;
use std::fs::{File, OpenOptions, create_dir_all};
use std::io::{BufRead, BufReader, Write};
use std::path::Path;
use std::process::{Command as ProcCommand, Child, Stdio};
use std::sync::{Arc, Mutex};
use std::thread;
use std::time::{Duration, Instant};
use notify_rust::Notification;
use serde::{Deserialize, Serialize};
use serde_json::Value;
#[derive(Debug, Clone)]
struct AppState {
name: String,
access_key: String,
secret_key: String,
status_text: String,
sync_processes: Arc<Mutex<Vec<Child>>>,
sync_active: Arc<Mutex<bool>>,
sync_statuses: Arc<Mutex<Vec<SyncStatus>>>,
show_config_dialog: bool,
show_about_dialog: bool,
current_screen: Screen,
}
#[derive(Debug, Clone)]
enum Screen {
Main,
Status,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct RcloneConfig {
name: String,
remote_path: String,
local_path: String,
access_key: String,
secret_key: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct SyncStatus {
name: String,
status: String,
transferred: String,
bytes: String,
errors: usize,
last_updated: String,
}
#[derive(Debug, Clone)]
enum Message {
NameChanged(String),
AccessKeyChanged(String),
SecretKeyChanged(String),
SaveConfig,
StartSync,
StopSync,
UpdateStatus(Vec<SyncStatus>),
ShowConfigDialog(bool),
ShowAboutDialog(bool),
ShowStatusScreen,
BackToMain,
None,
}
fn main() {
dioxus_desktop::launch(app);
}
fn app(cx: Scope) -> Element {
let window = use_window();
window.set_inner_size(LogicalSize::new(800, 600));
let state = use_ref(cx, || AppState {
name: String::new(),
access_key: String::new(),
secret_key: String::new(),
status_text: "Enter credentials to set up sync".to_string(),
sync_processes: Arc::new(Mutex::new(Vec::new())),
sync_active: Arc::new(Mutex::new(false)),
sync_statuses: Arc::new(Mutex::new(Vec::new())),
show_config_dialog: false,
show_about_dialog: false,
current_screen: Screen::Main,
});
use_future( async move {
let state = state.clone();
async move {
let mut last_check = Instant::now();
let check_interval = Duration::from_secs(5);
loop {
tokio::time::sleep(Duration::from_secs(1)).await;
if !*state.read().sync_active.lock().unwrap() {
continue;
}
if last_check.elapsed() < check_interval {
continue;
}
last_check = Instant::now();
match read_rclone_configs() {
Ok(configs) => {
let mut new_statuses = Vec::new();
for config in configs {
match get_rclone_status(&config.name) {
Ok(status) => new_statuses.push(status),
Err(e) => eprintln!("Failed to get status: {}", e),
}
}
*state.write().sync_statuses.lock().unwrap() = new_statuses.clone();
state.write().status_text = format!("Syncing {} repositories...", new_statuses.len());
}
Err(e) => eprintln!("Failed to read configs: {}", e),
}
}
}
});
cx.render(rsx! {
div {
class: "app",
div {
class: "menu-bar",
button {
onclick: move |_| state.write().show_config_dialog = true,
"Add Sync Configuration"
}
button {
onclick: move |_| state.write().show_about_dialog = true,
"About"
}
}
{match state.read().current_screen {
Screen::Main => rsx! {
div {
class: "main-screen",
h1 { "General Bots" }
p { "{state.read().status_text}" }
button {
onclick: move |_| start_sync(&state),
"Start Sync"
}
button {
onclick: move |_| stop_sync(&state),
"Stop Sync"
}
button {
onclick: move |_| state.write().current_screen = Screen::Status,
"Show Status"
}
}
},
Screen::Status => rsx! {
div {
class: "status-screen",
h1 { "Sync Status" }
div {
class: "status-list",
for status in state.read().sync_statuses.lock().unwrap().iter() {
div {
class: "status-item",
h2 { "{status.name}" }
p { "Status: {status.status}" }
p { "Transferred: {status.transferred}" }
p { "Bytes: {status.bytes}" }
p { "Errors: {status.errors}" }
p { "Last Updated: {status.last_updated}" }
}
}
}
button {
onclick: move |_| state.write().current_screen = Screen::Main,
"Back"
}
}
}
}}
if state.read().show_config_dialog {
div {
class: "dialog",
h2 { "Add Sync Configuration" }
input {
value: "{state.read().name}",
oninput: move |e| state.write().name = e.value.clone(),
placeholder: "Enter sync name",
}
input {
value: "{state.read().access_key}",
oninput: move |e| state.write().access_key = e.value.clone(),
placeholder: "Enter access key",
}
input {
value: "{state.read().secret_key}",
oninput: move |e| state.write().secret_key = e.value.clone(),
placeholder: "Enter secret key",
}
button {
onclick: move |_| {
save_config(&state);
state.write().show_config_dialog = false;
},
"Save"
}
button {
onclick: move |_| state.write().show_config_dialog = false,
"Cancel"
}
}
}
if state.read().show_about_dialog {
div {
class: "dialog",
h2 { "About General Bots" }
p { "Version: 1.0.0" }
p { "A professional-grade sync tool for OneDrive/Dropbox-like functionality." }
button {
onclick: move |_| state.write().show_about_dialog = false,
"Close"
}
}
}
}
})
}
fn save_config(state: &UseRef<AppState>) {
if state.read().name.is_empty() || state.read().access_key.is_empty() || state.read().secret_key.is_empty() {
state.write_with(|state| state.status_text = "All fields are required!".to_string());
return;
}
let new_config = RcloneConfig {
name: state.read().name.clone(),
remote_path: format!("s3:
local_path: Path::new(&env::var("HOME").unwrap()).join("General Bots").join(&state.read().name).to_string_lossy().to_string(),
access_key: state.read().access_key.clone(),
secret_key: state.read().secret_key.clone(),
};
if let Err(e) = save_rclone_config(&new_config) {
state.write_with(|state| state.status_text = format!("Failed to save config: {}", e));
} else {
state.write_with(|state| state.status_text = "New sync saved!".to_string());
}
}
fn start_sync(state: &UseRef<AppState>) {
let mut processes = state.write_with(|state| state.sync_processes.lock().unwrap());
processes.clear();
match read_rclone_configs() {
Ok(configs) => {
for config in configs {
match run_sync(&config) {
Ok(child) => processes.push(child),
Err(e) => eprintln!("Failed to start sync: {}", e),
}
}
state.write_with(|state| *state.sync_active.lock().unwrap() = true);
state.write_with(|state| state.status_text = format!("Syncing with {} configurations.", processes.len()));
}
Err(e) => state.write_with(|state| state.status_text = format!("Failed to read configurations: {}", e)),
}
}
fn stop_sync(state: &UseRef<AppState>) {
let mut processes = state.write_with(|state| state.sync_processes.lock().unwrap());
for child in processes.iter_mut() {
let _ = child.kill();
}
processes.clear();
state.write_with(|state| *state.sync_active.lock().unwrap() = false);
state.write_with(|state| state.status_text = "Sync stopped.".to_string());
}
fn save_rclone_config(config: &RcloneConfig) -> Result<(), String> {
let home_dir = env::var("HOME").map_err(|_| "HOME environment variable not set".to_string())?;
let config_path = Path::new(&home_dir).join(".config/rclone/rclone.conf");
let mut file = OpenOptions::new()
.create(true)
.append(true)
.open(&config_path)
.map_err(|e| format!("Failed to open config file: {}", e))?;
writeln!(file, "[{}]", config.name)
.and_then(|_| writeln!(file, "type = s3"))
.and_then(|_| writeln!(file, "provider = Other"))
.and_then(|_| writeln!(file, "access_key_id = {}", config.access_key))
.and_then(|_| writeln!(file, "secret_access_key = {}", config.secret_key))
.and_then(|_| writeln!(file, "endpoint = https:
.and_then(|_| writeln!(file, "acl = private"))
.map_err(|e| format!("Failed to write config: {}", e))
}
fn read_rclone_configs() -> Result<Vec<RcloneConfig>, String> {
let home_dir = env::var("HOME").map_err(|_| "HOME environment variable not set".to_string())?;
let config_path = Path::new(&home_dir).join(".config/rclone/rclone.conf");
if !config_path.exists() {
return Ok(Vec::new());
}
let file = File::open(&config_path).map_err(|e| format!("Failed to open config file: {}", e))?;
let reader = BufReader::new(file);
let mut configs = Vec::new();
let mut current_config: Option<RcloneConfig> = None;
for line in reader.lines() {
let line = line.map_err(|e| format!("Failed to read line: {}", e))?;
if line.is_empty() || line.starts_with('#') {
continue;
}
if line.starts_with('[') && line.ends_with(']') {
if let Some(config) = current_config.take() {
configs.push(config);
}
let name = line[1..line.len()-1].to_string();
current_config = Some(RcloneConfig {
name: name.clone(),
remote_path: format!("s3:
local_path: Path::new(&home_dir).join("General Bots").join(&name).to_string_lossy().to_string(),
access_key: String::new(),
secret_key: String::new(),
});
} else if let Some(ref mut config) = current_config {
if let Some(pos) = line.find('=') {
let key = line[..pos].trim().to_string();
let value = line[pos+1..].trim().to_string();
match key.as_str() {
"access_key_id" => config.access_key = value,
"secret_access_key" => config.secret_key = value,
_ => {}
}
}
}
}
if let Some(config) = current_config {
configs.push(config);
}
Ok(configs)
}
fn run_sync(config: &RcloneConfig) -> Result<Child, std::io::Error> {
let local_path = Path::new(&config.local_path);
if !local_path.exists() {
create_dir_all(local_path)?;
}
ProcCommand::new("rclone")
.arg("sync")
.arg(&config.remote_path)
.arg(&config.local_path)
.arg("--no-check-certificate")
.arg("--verbose")
.arg("--rc")
.stdout(Stdio::null())
.stderr(Stdio::null())
.spawn()
}
fn get_rclone_status(remote_name: &str) -> Result<SyncStatus, String> {
let output = ProcCommand::new("rclone")
.arg("rc")
.arg("core/stats")
.arg("--json")
.output()
.map_err(|e| format!("Failed to execute rclone rc: {}", e))?;
if !output.status.success() {
return Err(format!("rclone rc failed: {}", String::from_utf8_lossy(&output.stderr)));
}
let json = String::from_utf8_lossy(&output.stdout);
let parsed: Result<Value, _> = serde_json::from_str(&json);
match parsed {
Ok(value) => {
let transferred = value.get("bytes").and_then(|v| v.as_u64()).unwrap_or(0);
let errors = value.get("errors").and_then(|v| v.as_u64()).unwrap_or(0);
let speed = value.get("speed").and_then(|v| v.as_f64()).unwrap_or(0.0);
let status = if errors > 0 {
"Error occurred".to_string()
} else if speed > 0.0 {
"Transferring".to_string()
} else if transferred > 0 {
"Completed".to_string()
} else {
"Initializing".to_string()
};
Ok(SyncStatus {
name: remote_name.to_string(),
status,
transferred: format_bytes(transferred),
bytes: format!("{}/s", format_bytes(speed as u64)),
errors: errors as usize,
last_updated: chrono::Local::now().format("%H:%M:%S").to_string(),
})
}
Err(e) => Err(format!("Failed to parse rclone status: {}", e)),
}
}
fn format_bytes(bytes: u64) -> String {
const KB: u64 = 1024;
const MB: u64 = KB * 1024;
const GB: u64 = MB * 1024;
if bytes >= GB {
format!("{:.2} GB", bytes as f64 / GB as f64)
} else if bytes >= MB {
format!("{:.2} MB", bytes as f64 / MB as f64)
} else if bytes >= KB {
format!("{:.2} KB", bytes as f64 / KB as f64)
} else {
format!("{} B", bytes)
}
}

3
src/desktop/mod.rs Normal file
View file

@ -0,0 +1,3 @@
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
pub mod drive;
pub mod sync;

23
src/desktop/stream.rs Normal file
View file

@ -0,0 +1,23 @@
use ratatui::{
style::{Color, Style},
widgets::{Block, Borders, Gauge},
};
pub struct StreamProgress {
pub progress: f64,
pub status: String,
}
pub fn render_progress_bar(progress: &StreamProgress) -> Gauge {
let color = if progress.progress >= 1.0 {
Color::Green
} else {
Color::Blue
};
Gauge::default()
.block(
Block::default()
.title(format!("Stream Progress: {}", progress.status))
.borders(Borders::ALL),
)
.gauge_style(Style::default().fg(color))
.percent((progress.progress * 100.0) as u16)
}

126
src/desktop/sync.rs Normal file
View file

@ -0,0 +1,126 @@
use serde::{Deserialize, Serialize};
use std::sync::Mutex;
use std::process::{Command, Stdio};
use std::path::Path;
use std::fs::{OpenOptions, create_dir_all};
use std::io::Write;
use std::env;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RcloneConfig {
name: String,
remote_path: String,
local_path: String,
access_key: String,
secret_key: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SyncStatus {
name: String,
status: String,
transferred: String,
bytes: String,
errors: usize,
last_updated: String,
}
pub(crate) struct AppState {
pub sync_processes: Mutex<Vec<std::process::Child>>,
pub sync_active: Mutex<bool>,
}
#[tauri::command]
pub fn save_config(config: RcloneConfig) -> Result<(), String> {
let home_dir = env::var("HOME").map_err(|_| "HOME environment variable not set".to_string())?;
let config_path = Path::new(&home_dir).join(".config/rclone/rclone.conf");
let mut file = OpenOptions::new()
.create(true)
.append(true)
.open(&config_path)
.map_err(|e| format!("Failed to open config file: {}", e))?;
writeln!(file, "[{}]", config.name)
.and_then(|_| writeln!(file, "type = s3"))
.and_then(|_| writeln!(file, "provider = Other"))
.and_then(|_| writeln!(file, "access_key_id = {}", config.access_key))
.and_then(|_| writeln!(file, "secret_access_key = {}", config.secret_key))
.and_then(|_| writeln!(file, "endpoint = https:
.and_then(|_| writeln!(file, "acl = private"))
.map_err(|e| format!("Failed to write config: {}", e))
}
#[tauri::command]
pub fn start_sync(config: RcloneConfig, state: tauri::State<AppState>) -> Result<(), String> {
let local_path = Path::new(&config.local_path);
if !local_path.exists() {
create_dir_all(local_path).map_err(|e| format!("Failed to create local path: {}", e))?;
}
let child = Command::new("rclone")
.arg("sync")
.arg(&config.remote_path)
.arg(&config.local_path)
.arg("--no-check-certificate")
.arg("--verbose")
.arg("--rc")
.stdout(Stdio::null())
.stderr(Stdio::null())
.spawn()
.map_err(|e| format!("Failed to start rclone: {}", e))?;
state.sync_processes.lock().unwrap().push(child);
*state.sync_active.lock().unwrap() = true;
Ok(())
}
#[tauri::command]
pub fn stop_sync(state: tauri::State<AppState>) -> Result<(), String> {
let mut processes = state.sync_processes.lock().unwrap();
for child in processes.iter_mut() {
child.kill().map_err(|e| format!("Failed to kill process: {}", e))?;
}
processes.clear();
*state.sync_active.lock().unwrap() = false;
Ok(())
}
#[tauri::command]
pub fn get_status(remote_name: String) -> Result<SyncStatus, String> {
let output = Command::new("rclone")
.arg("rc")
.arg("core/stats")
.arg("--json")
.output()
.map_err(|e| format!("Failed to execute rclone rc: {}", e))?;
if !output.status.success() {
return Err(format!("rclone rc failed: {}", String::from_utf8_lossy(&output.stderr)));
}
let json = String::from_utf8_lossy(&output.stdout);
let value: serde_json::Value = serde_json::from_str(&json)
.map_err(|e| format!("Failed to parse rclone status: {}", e))?;
let transferred = value.get("bytes").and_then(|v| v.as_u64()).unwrap_or(0);
let errors = value.get("errors").and_then(|v| v.as_u64()).unwrap_or(0);
let speed = value.get("speed").and_then(|v| v.as_f64()).unwrap_or(0.0);
let status = if errors > 0 {
"Error occurred".to_string()
} else if speed > 0.0 {
"Transferring".to_string()
} else if transferred > 0 {
"Completed".to_string()
} else {
"Initializing".to_string()
};
Ok(SyncStatus {
name: remote_name,
status,
transferred: format_bytes(transferred),
bytes: format!("{}/s", format_bytes(speed as u64)),
errors: errors as usize,
last_updated: chrono::Local::now().format("%H:%M:%S").to_string(),
})
}
pub fn format_bytes(bytes: u64) -> String {
const KB: u64 = 1024;
const MB: u64 = KB * 1024;
const GB: u64 = MB * 1024;
if bytes >= GB {
format!("{:.2} GB", bytes as f64 / GB as f64)
} else if bytes >= MB {
format!("{:.2} MB", bytes as f64 / MB as f64)
} else if bytes >= KB {
format!("{:.2} KB", bytes as f64 / KB as f64)
} else {
format!("{} B", bytes)
}
}

20
src/desktop/ui.test.rs Normal file
View file

@ -0,0 +1,20 @@
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::test_util;
#[test]
fn test_ui_module() {
test_util::setup();
assert!(true, "Basic UI module test");
}
#[test]
fn test_drive_ui() {
test_util::setup();
assert!(true, "Drive UI placeholder test");
}
#[test]
fn test_sync_ui() {
test_util::setup();
assert!(true, "Sync UI placeholder test");
}
}

435
src/directory/client.rs Normal file
View file

@ -0,0 +1,435 @@
use anyhow::{anyhow, Result};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use tokio::sync::RwLock;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ZitadelConfig {
pub issuer_url: String,
pub issuer: String,
pub client_id: String,
pub client_secret: String,
pub redirect_uri: String,
pub project_id: String,
pub api_url: String,
pub service_account_key: Option<String>,
}
#[derive(Debug, Clone)]
pub struct ZitadelClient {
config: ZitadelConfig,
http_client: reqwest::Client,
access_token: Arc<RwLock<Option<String>>>,
}
impl ZitadelClient {
pub async fn new(config: ZitadelConfig) -> Result<Self> {
let http_client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()
.map_err(|e| anyhow!("Failed to create HTTP client: {}", e))?;
Ok(Self {
config,
http_client,
access_token: Arc::new(RwLock::new(None)),
})
}
pub async fn get_access_token(&self) -> Result<String> {
// Check if we have a cached token
{
let token = self.access_token.read().await;
if let Some(t) = token.as_ref() {
return Ok(t.clone());
}
}
// Get new token using client credentials
let token_url = format!("{}/oauth/v2/token", self.config.api_url);
let params = [
("grant_type", "client_credentials"),
("client_id", &self.config.client_id),
("client_secret", &self.config.client_secret),
("scope", "openid profile email"),
];
let response = self
.http_client
.post(&token_url)
.form(&params)
.send()
.await
.map_err(|e| anyhow!("Failed to get access token: {}", e))?;
let token_data: serde_json::Value = response
.json()
.await
.map_err(|e| anyhow!("Failed to parse token response: {}", e))?;
let access_token = token_data
.get("access_token")
.and_then(|t| t.as_str())
.ok_or_else(|| anyhow!("No access token in response"))?
.to_string();
// Cache the token
{
let mut token = self.access_token.write().await;
*token = Some(access_token.clone());
}
Ok(access_token)
}
pub async fn create_user(
&self,
email: &str,
first_name: &str,
last_name: &str,
username: Option<&str>,
) -> Result<String> {
let token = self.get_access_token().await?;
let url = format!("{}/v2/users/human", self.config.api_url);
let body = serde_json::json!({
"userName": username.unwrap_or(email),
"profile": {
"givenName": first_name,
"familyName": last_name,
"displayName": format!("{} {}", first_name, last_name)
},
"email": {
"email": email,
"isVerified": false
}
});
let response = self
.http_client
.post(&url)
.bearer_auth(&token)
.json(&body)
.send()
.await
.map_err(|e| anyhow!("Failed to create user: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(anyhow!("Failed to create user: {}", error_text));
}
let user_data: serde_json::Value = response
.json()
.await
.map_err(|e| anyhow!("Failed to parse user response: {}", e))?;
let user_id = user_data
.get("userId")
.and_then(|id| id.as_str())
.ok_or_else(|| anyhow!("No userId in response"))?
.to_string();
Ok(user_id)
}
pub async fn get_user(&self, user_id: &str) -> Result<serde_json::Value> {
let token = self.get_access_token().await?;
let url = format!("{}/v2/users/{}", self.config.api_url, user_id);
let response = self
.http_client
.get(&url)
.bearer_auth(&token)
.send()
.await
.map_err(|e| anyhow!("Failed to get user: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(anyhow!("Failed to get user: {}", error_text));
}
let user_data: serde_json::Value = response
.json()
.await
.map_err(|e| anyhow!("Failed to parse user response: {}", e))?;
Ok(user_data)
}
pub async fn list_users(&self, limit: u32, offset: u32) -> Result<Vec<serde_json::Value>> {
let token = self.get_access_token().await?;
let url = format!(
"{}/v2/users?limit={}&offset={}",
self.config.api_url, limit, offset
);
let response = self
.http_client
.get(&url)
.bearer_auth(&token)
.send()
.await
.map_err(|e| anyhow!("Failed to list users: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(anyhow!("Failed to list users: {}", error_text));
}
let data: serde_json::Value = response
.json()
.await
.map_err(|e| anyhow!("Failed to parse users response: {}", e))?;
let users = data
.get("result")
.and_then(|r| r.as_array())
.map(|arr| arr.iter().cloned().collect())
.unwrap_or_default();
Ok(users)
}
pub async fn search_users(&self, query: &str) -> Result<Vec<serde_json::Value>> {
let token = self.get_access_token().await?;
let url = format!("{}/v2/users/_search", self.config.api_url);
let body = serde_json::json!({
"queries": [{
"userNameQuery": {
"userName": query,
"method": "TEXT_QUERY_METHOD_CONTAINS_IGNORE_CASE"
}
}]
});
let response = self
.http_client
.post(&url)
.bearer_auth(&token)
.json(&body)
.send()
.await
.map_err(|e| anyhow!("Failed to search users: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(anyhow!("Failed to search users: {}", error_text));
}
let data: serde_json::Value = response
.json()
.await
.map_err(|e| anyhow!("Failed to parse search response: {}", e))?;
let users = data
.get("result")
.and_then(|r| r.as_array())
.map(|arr| arr.iter().cloned().collect())
.unwrap_or_default();
Ok(users)
}
pub async fn get_user_memberships(
&self,
user_id: &str,
offset: u32,
limit: u32,
) -> Result<serde_json::Value> {
let token = self.get_access_token().await?;
let url = format!(
"{}/v2/users/{}/memberships?limit={}&offset={}",
self.config.api_url, user_id, limit, offset
);
let response = self
.http_client
.get(&url)
.bearer_auth(&token)
.send()
.await
.map_err(|e| anyhow!("Failed to get memberships: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(anyhow!("Failed to get memberships: {}", error_text));
}
let data: serde_json::Value = response
.json()
.await
.map_err(|e| anyhow!("Failed to parse memberships response: {}", e))?;
Ok(data)
}
pub async fn add_org_member(
&self,
org_id: &str,
user_id: &str,
roles: Vec<String>,
) -> Result<()> {
let token = self.get_access_token().await?;
let url = format!("{}/v2/organizations/{}/members", self.config.api_url, org_id);
let body = serde_json::json!({
"userId": user_id,
"roles": roles
});
let response = self
.http_client
.post(&url)
.bearer_auth(&token)
.json(&body)
.send()
.await
.map_err(|e| anyhow!("Failed to add org member: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(anyhow!("Failed to add org member: {}", error_text));
}
Ok(())
}
pub async fn remove_org_member(&self, org_id: &str, user_id: &str) -> Result<()> {
let token = self.get_access_token().await?;
let url = format!(
"{}/v2/organizations/{}/members/{}",
self.config.api_url, org_id, user_id
);
let response = self
.http_client
.delete(&url)
.bearer_auth(&token)
.send()
.await
.map_err(|e| anyhow!("Failed to remove org member: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(anyhow!("Failed to remove org member: {}", error_text));
}
Ok(())
}
pub async fn get_org_members(&self, org_id: &str) -> Result<Vec<serde_json::Value>> {
let token = self.get_access_token().await?;
let url = format!("{}/v2/organizations/{}/members", self.config.api_url, org_id);
let response = self
.http_client
.get(&url)
.bearer_auth(&token)
.send()
.await
.map_err(|e| anyhow!("Failed to get org members: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(anyhow!("Failed to get org members: {}", error_text));
}
let data: serde_json::Value = response
.json()
.await
.map_err(|e| anyhow!("Failed to parse org members response: {}", e))?;
let members = data
.get("result")
.and_then(|r| r.as_array())
.map(|arr| arr.iter().cloned().collect())
.unwrap_or_default();
Ok(members)
}
pub async fn get_organization(&self, org_id: &str) -> Result<serde_json::Value> {
let token = self.get_access_token().await?;
let url = format!("{}/v2/organizations/{}", self.config.api_url, org_id);
let response = self
.http_client
.get(&url)
.bearer_auth(&token)
.send()
.await
.map_err(|e| anyhow!("Failed to get organization: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(anyhow!("Failed to get organization: {}", error_text));
}
let data: serde_json::Value = response
.json()
.await
.map_err(|e| anyhow!("Failed to parse organization response: {}", e))?;
Ok(data)
}
pub async fn introspect_token(&self, token: &str) -> Result<serde_json::Value> {
let url = format!("{}/oauth/v2/introspect", self.config.api_url);
let params = [
("token", token),
("client_id", &self.config.client_id),
("client_secret", &self.config.client_secret),
];
let response = self
.http_client
.post(&url)
.form(&params)
.send()
.await
.map_err(|e| anyhow!("Failed to introspect token: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(anyhow!("Failed to introspect token: {}", error_text));
}
let data: serde_json::Value = response
.json()
.await
.map_err(|e| anyhow!("Failed to parse introspection response: {}", e))?;
Ok(data)
}
pub async fn check_permission(
&self,
user_id: &str,
permission: &str,
resource: &str,
) -> Result<bool> {
// Basic permission check - can be extended
let token = self.get_access_token().await?;
let url = format!("{}/v2/users/{}/permissions", self.config.api_url, user_id);
let response = self
.http_client
.get(&url)
.bearer_auth(&token)
.send()
.await
.map_err(|e| anyhow!("Failed to check permissions: {}", e))?;
if !response.status().is_success() {
return Ok(false);
}
// Simple check - in production, parse and validate permissions
Ok(true)
}
}

345
src/directory/groups.rs Normal file
View file

@ -0,0 +1,345 @@
use axum::{
extract::{Path, Query, State},
http::StatusCode,
response::Json,
};
use chrono::{DateTime, Utc};
use log::{error, info};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use uuid::Uuid;
use crate::shared::state::AppState;
// ============================================================================
// Request/Response Types
// ============================================================================
#[derive(Debug, Deserialize)]
pub struct CreateGroupRequest {
pub name: String,
pub description: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct UpdateGroupRequest {
pub name: Option<String>,
pub description: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct GroupQuery {
pub page: Option<u32>,
pub per_page: Option<u32>,
pub search: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct AddMemberRequest {
pub user_id: String,
pub roles: Option<Vec<String>>,
}
#[derive(Debug, Serialize)]
pub struct GroupResponse {
pub id: String,
pub name: String,
pub description: Option<String>,
pub member_count: usize,
pub state: String,
pub created_at: Option<DateTime<Utc>>,
pub updated_at: Option<DateTime<Utc>>,
}
#[derive(Debug, Serialize)]
pub struct GroupListResponse {
pub groups: Vec<GroupResponse>,
pub total: usize,
pub page: u32,
pub per_page: u32,
}
#[derive(Debug, Serialize)]
pub struct GroupMemberResponse {
pub user_id: String,
pub username: Option<String>,
pub roles: Vec<String>,
pub email: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct SuccessResponse {
pub success: bool,
pub message: Option<String>,
pub group_id: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct ErrorResponse {
pub error: String,
pub details: Option<String>,
}
// ============================================================================
// Group Management Handlers
// ============================================================================
/// Create a new organization/group in Zitadel
pub async fn create_group(
State(state): State<Arc<AppState>>,
Json(req): Json<CreateGroupRequest>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<ErrorResponse>)> {
info!("Creating group: {}", req.name);
let client = {
let auth_service = state.auth_service.lock().await;
auth_service.client().clone()
};
// In Zitadel, groups are typically managed within organizations
// For now, we'll return success with a generated ID
// In production, you'd call Zitadel's organization creation API
let group_id = Uuid::new_v4().to_string();
info!("Group created successfully: {}", group_id);
Ok(Json(SuccessResponse {
success: true,
message: Some(format!("Group '{}' created successfully", req.name)),
group_id: Some(group_id),
}))
}
/// Update an existing group
pub async fn update_group(
State(state): State<Arc<AppState>>,
Path(group_id): Path<String>,
Json(req): Json<UpdateGroupRequest>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<ErrorResponse>)> {
info!("Updating group: {}", group_id);
let client = {
let auth_service = state.auth_service.lock().await;
auth_service.client().clone()
};
// Verify organization exists
match client.get_organization(&group_id).await {
Ok(_) => {
info!("Group {} updated successfully", group_id);
Ok(Json(SuccessResponse {
success: true,
message: Some(format!("Group {} updated successfully", group_id)),
group_id: Some(group_id),
}))
}
Err(e) => {
error!("Failed to update group: {}", e);
Err((
StatusCode::NOT_FOUND,
Json(ErrorResponse {
error: "Group not found".to_string(),
details: Some(e.to_string()),
}),
))
}
}
}
/// Delete a group
pub async fn delete_group(
State(state): State<Arc<AppState>>,
Path(group_id): Path<String>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<ErrorResponse>)> {
info!("Deleting group: {}", group_id);
let client = {
let auth_service = state.auth_service.lock().await;
auth_service.client().clone()
};
// Verify organization exists
match client.get_organization(&group_id).await {
Ok(_) => {
info!("Group {} deleted/deactivated", group_id);
Ok(Json(SuccessResponse {
success: true,
message: Some(format!("Group {} deleted successfully", group_id)),
group_id: Some(group_id),
}))
}
Err(e) => {
error!("Failed to delete group: {}", e);
Err((
StatusCode::NOT_FOUND,
Json(ErrorResponse {
error: "Group not found".to_string(),
details: Some(e.to_string()),
}),
))
}
}
}
/// List all groups with pagination
pub async fn list_groups(
State(state): State<Arc<AppState>>,
Query(params): Query<GroupQuery>,
) -> Result<Json<GroupListResponse>, (StatusCode, Json<ErrorResponse>)> {
let page = params.page.unwrap_or(1);
let per_page = params.per_page.unwrap_or(20);
info!("Listing groups (page: {}, per_page: {})", page, per_page);
let client = {
let auth_service = state.auth_service.lock().await;
auth_service.client().clone()
};
// In production, you'd fetch organizations from Zitadel
// For now, return empty list with proper structure
info!("Found 0 groups");
Ok(Json(GroupListResponse {
groups: vec![],
total: 0,
page,
per_page,
}))
}
/// Get members of a group
pub async fn get_group_members(
State(state): State<Arc<AppState>>,
Path(group_id): Path<String>,
) -> Result<Json<Vec<GroupMemberResponse>>, (StatusCode, Json<ErrorResponse>)> {
info!("Getting members for group: {}", group_id);
let client = {
let auth_service = state.auth_service.lock().await;
auth_service.client().clone()
};
// Get organization members from Zitadel
match client.get_org_members(&group_id).await {
Ok(members_json) => {
let members: Vec<GroupMemberResponse> = members_json
.into_iter()
.filter_map(|m| {
Some(GroupMemberResponse {
user_id: m.get("userId")?.as_str()?.to_string(),
username: None,
roles: m
.get("roles")
.and_then(|r| r.as_array())
.map(|arr| {
arr.iter()
.filter_map(|v| v.as_str().map(String::from))
.collect()
})
.unwrap_or_default(),
email: None,
})
})
.collect();
info!("Found {} members in group {}", members.len(), group_id);
Ok(Json(members))
}
Err(e) => {
error!("Failed to get group members: {}", e);
Err((
StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse {
error: "Failed to get group members".to_string(),
details: Some(e.to_string()),
}),
))
}
}
}
/// Add a member to a group
pub async fn add_group_member(
State(state): State<Arc<AppState>>,
Path(group_id): Path<String>,
Json(req): Json<AddMemberRequest>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<ErrorResponse>)> {
info!("Adding user {} to group {}", req.user_id, group_id);
let client = {
let auth_service = state.auth_service.lock().await;
auth_service.client().clone()
};
// Add member to organization in Zitadel
let roles = req.roles.unwrap_or_else(|| vec!["ORG_USER".to_string()]);
match client.add_org_member(&group_id, &req.user_id, roles).await {
Ok(_) => {
info!(
"User {} added to group {} successfully",
req.user_id, group_id
);
Ok(Json(SuccessResponse {
success: true,
message: Some(format!(
"User {} added to group {} successfully",
req.user_id, group_id
)),
group_id: Some(group_id),
}))
}
Err(e) => {
error!("Failed to add member to group: {}", e);
Err((
StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse {
error: "Failed to add member to group".to_string(),
details: Some(e.to_string()),
}),
))
}
}
}
/// Remove a member from a group
pub async fn remove_group_member(
State(state): State<Arc<AppState>>,
Path(group_id): Path<String>,
Json(req): Json<AddMemberRequest>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<ErrorResponse>)> {
info!("Removing user {} from group {}", req.user_id, group_id);
let client = {
let auth_service = state.auth_service.lock().await;
auth_service.client().clone()
};
// Remove member from organization in Zitadel
match client.remove_org_member(&group_id, &req.user_id).await {
Ok(_) => {
info!(
"User {} removed from group {} successfully",
req.user_id, group_id
);
Ok(Json(SuccessResponse {
success: true,
message: Some(format!(
"User {} removed from group {} successfully",
req.user_id, group_id
)),
group_id: Some(group_id),
}))
}
Err(e) => {
error!("Failed to remove member from group: {}", e);
Err((
StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse {
error: "Failed to remove member from group".to_string(),
details: Some(e.to_string()),
}),
))
}
}
}

184
src/directory/mod.rs Normal file
View file

@ -0,0 +1,184 @@
use crate::shared::state::AppState;
use axum::{
extract::{Query, State},
http::StatusCode,
response::{IntoResponse, Json},
};
use log::error;
use std::collections::HashMap;
use std::sync::Arc;
use uuid::Uuid;
pub mod client;
pub mod groups;
pub mod router;
pub mod users;
use self::client::{ZitadelClient, ZitadelConfig};
pub struct AuthService {
client: Arc<ZitadelClient>,
}
impl std::fmt::Debug for AuthService {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("AuthService")
.field("client", &"Arc<ZitadelClient>")
.finish()
}
}
impl AuthService {
pub async fn new(config: ZitadelConfig) -> anyhow::Result<Self> {
let client = ZitadelClient::new(config).await?;
Ok(Self {
client: Arc::new(client),
})
}
pub fn client(&self) -> &ZitadelClient {
&self.client
}
}
pub async fn auth_handler(
State(state): State<Arc<AppState>>,
Query(params): Query<HashMap<String, String>>,
) -> impl IntoResponse {
let bot_name = params.get("bot_name").cloned().unwrap_or_default();
let user_id = {
let mut sm = state.session_manager.lock().await;
match sm.get_or_create_anonymous_user(None) {
Ok(id) => id,
Err(e) => {
error!("Failed to create anonymous user: {}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": "Failed to create user" })),
);
}
}
};
let (bot_id, bot_name) = match tokio::task::spawn_blocking({
let bot_name = bot_name.clone();
let conn = state.conn.clone();
move || {
let mut db_conn = conn
.get()
.map_err(|e| format!("Failed to get database connection: {}", e))?;
use crate::shared::models::schema::bots::dsl::*;
use diesel::prelude::*;
match bots
.filter(name.eq(&bot_name))
.filter(is_active.eq(true))
.select((id, name))
.first::<(Uuid, String)>(&mut db_conn)
.optional()
{
Ok(Some((id_val, name_val))) => Ok((id_val, name_val)),
Ok(None) => match bots
.filter(is_active.eq(true))
.select((id, name))
.first::<(Uuid, String)>(&mut db_conn)
.optional()
{
Ok(Some((id_val, name_val))) => Ok((id_val, name_val)),
Ok(None) => Err("No active bots found".to_string()),
Err(e) => Err(format!("DB error: {}", e)),
},
Err(e) => Err(format!("DB error: {}", e)),
}
}
})
.await
{
Ok(Ok(res)) => res,
Ok(Err(e)) => {
error!("{}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": e })),
);
}
Err(e) => {
error!("Spawn blocking failed: {}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": "DB thread error" })),
);
}
};
let session = {
let mut sm = state.session_manager.lock().await;
match sm.get_or_create_user_session(user_id, bot_id, "Auth Session") {
Ok(Some(sess)) => sess,
Ok(None) => {
error!("Failed to create session");
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": "Failed to create session" })),
);
}
Err(e) => {
error!("Failed to create session: {}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({ "error": e.to_string() })),
);
}
}
};
let auth_script_path = format!("./work/{}.gbai/{}.gbdialog/auth.ast", bot_name, bot_name);
if tokio::fs::metadata(&auth_script_path).await.is_ok() {
let auth_script = match tokio::fs::read_to_string(&auth_script_path).await {
Ok(content) => content,
Err(e) => {
error!("Failed to read auth script: {}", e);
return (
StatusCode::OK,
Json(serde_json::json!({
"user_id": session.user_id,
"session_id": session.id,
"status": "authenticated"
})),
);
}
};
let state_clone = Arc::clone(&state);
let session_clone = session.clone();
match tokio::task::spawn_blocking(move || {
let script_service = crate::basic::ScriptService::new(state_clone, session_clone);
match script_service.compile(&auth_script) {
Ok(ast) => match script_service.run(&ast) {
Ok(_) => Ok(()),
Err(e) => Err(format!("Script execution error: {}", e)),
},
Err(e) => Err(format!("Script compilation error: {}", e)),
}
})
.await
{
Ok(Ok(())) => {}
Ok(Err(e)) => {
error!("Auth script error: {}", e);
}
Err(e) => {
error!("Auth script task error: {}", e);
}
}
}
(
StatusCode::OK,
Json(serde_json::json!({
"user_id": session.user_id,
"session_id": session.id,
"status": "authenticated"
})),
)
}

98
src/directory/router.rs Normal file
View file

@ -0,0 +1,98 @@
use axum::{
routing::{delete, get, post, put},
Router,
};
use std::sync::Arc;
use crate::shared::state::AppState;
use super::groups;
use super::users;
/// Configure all authentication, user management, group management routes
/// File management is handled by the existing /api/files routes in crate::api::files
pub fn configure() -> Router<Arc<AppState>> {
Router::new()
// ============================================================================
// User Management & Authentication
// ============================================================================
.route("/users/create", post(users::create_user))
.route("/users/:user_id/update", put(users::update_user))
.route("/users/:user_id/delete", delete(users::delete_user))
.route("/users/list", get(users::list_users))
.route("/users/search", get(users::list_users)) // Uses query params
.route("/users/:user_id/profile", get(users::get_user_profile))
.route("/users/:user_id/profile/update", put(users::update_user))
.route("/users/:user_id/settings", get(users::get_user_profile))
.route("/users/:user_id/permissions", get(users::get_user_profile))
.route("/users/:user_id/roles", get(users::get_user_profile))
.route("/users/:user_id/status", get(users::get_user_profile))
.route("/users/:user_id/presence", get(users::get_user_profile))
.route("/users/:user_id/activity", get(users::get_user_profile))
.route(
"/users/:user_id/security/2fa/enable",
post(users::get_user_profile),
)
.route(
"/users/:user_id/security/2fa/disable",
post(users::get_user_profile),
)
.route(
"/users/:user_id/security/devices",
get(users::get_user_profile),
)
.route(
"/users/:user_id/security/sessions",
get(users::get_user_profile),
)
.route(
"/users/:user_id/notifications/settings",
get(users::get_user_profile),
)
// ============================================================================
// Groups & Organizations
// ============================================================================
.route("/groups/create", post(groups::create_group))
.route("/groups/:group_id/update", put(groups::update_group))
.route("/groups/:group_id/delete", delete(groups::delete_group))
.route("/groups/list", get(groups::list_groups))
.route("/groups/search", get(groups::list_groups)) // Uses query params
.route("/groups/:group_id/members", get(groups::get_group_members))
.route(
"/groups/:group_id/members/add",
post(groups::add_group_member),
)
.route(
"/groups/:group_id/members/remove",
post(groups::remove_group_member),
)
.route(
"/groups/:group_id/permissions",
get(groups::get_group_members),
)
.route("/groups/:group_id/settings", get(groups::get_group_members))
.route(
"/groups/:group_id/analytics",
get(groups::get_group_members),
)
.route(
"/groups/:group_id/join/request",
post(groups::add_group_member),
)
.route(
"/groups/:group_id/join/approve",
post(groups::add_group_member),
)
.route(
"/groups/:group_id/join/reject",
post(groups::remove_group_member),
)
.route(
"/groups/:group_id/invites/send",
post(groups::add_group_member),
)
.route(
"/groups/:group_id/invites/list",
get(groups::get_group_members),
)
}

327
src/directory/users.rs Normal file
View file

@ -0,0 +1,327 @@
use axum::{
extract::{Path, Query, State},
http::StatusCode,
response::Json,
};
use chrono::{DateTime, Utc};
use log::{error, info};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use uuid::Uuid;
use crate::shared::state::AppState;
// ============================================================================
// Request/Response Types
// ============================================================================
#[derive(Debug, Deserialize)]
pub struct CreateUserRequest {
pub username: String,
pub email: String,
pub password: String,
pub first_name: String,
pub last_name: String,
pub display_name: Option<String>,
pub role: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct UpdateUserRequest {
pub first_name: Option<String>,
pub last_name: Option<String>,
pub display_name: Option<String>,
pub email: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct UserQuery {
pub page: Option<u32>,
pub per_page: Option<u32>,
pub search: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct UserResponse {
pub id: String,
pub username: String,
pub email: String,
pub first_name: String,
pub last_name: String,
pub display_name: Option<String>,
pub state: String,
pub created_at: Option<DateTime<Utc>>,
pub updated_at: Option<DateTime<Utc>>,
}
#[derive(Debug, Serialize)]
pub struct UserListResponse {
pub users: Vec<UserResponse>,
pub total: usize,
pub page: u32,
pub per_page: u32,
}
#[derive(Debug, Serialize)]
pub struct SuccessResponse {
pub success: bool,
pub message: Option<String>,
pub user_id: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct ErrorResponse {
pub error: String,
pub details: Option<String>,
}
// ============================================================================
// User Management Handlers
// ============================================================================
/// Create a new user in Zitadel
pub async fn create_user(
State(state): State<Arc<AppState>>,
Json(req): Json<CreateUserRequest>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<ErrorResponse>)> {
info!("Creating user: {} ({})", req.username, req.email);
// Get auth service from app state
let client = {
let auth_service = state.auth_service.lock().await;
auth_service.client().clone()
};
// Create user in Zitadel
match client
.create_user(
&req.email,
&req.first_name,
&req.last_name,
Some(&req.username),
)
.await
{
Ok(user_id) => {
info!("User created successfully: {}", user_id);
Ok(Json(SuccessResponse {
success: true,
message: Some(format!("User {} created successfully", req.username)),
user_id: Some(user_id),
}))
}
Err(e) => {
error!("Failed to create user: {}", e);
Err((
StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse {
error: "Failed to create user".to_string(),
details: Some(e.to_string()),
}),
))
}
}
}
/// Update an existing user
pub async fn update_user(
State(state): State<Arc<AppState>>,
Path(user_id): Path<String>,
Json(req): Json<UpdateUserRequest>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<ErrorResponse>)> {
info!("Updating user: {}", user_id);
let client = {
let auth_service = state.auth_service.lock().await;
auth_service.client().clone()
};
// Verify user exists first
match client.get_user(&user_id).await {
Ok(_) => {
info!("User {} updated successfully", user_id);
Ok(Json(SuccessResponse {
success: true,
message: Some(format!("User {} updated successfully", user_id)),
user_id: Some(user_id),
}))
}
Err(e) => {
error!("Failed to update user: {}", e);
Err((
StatusCode::NOT_FOUND,
Json(ErrorResponse {
error: "User not found".to_string(),
details: Some(e.to_string()),
}),
))
}
}
}
/// Delete a user
pub async fn delete_user(
State(state): State<Arc<AppState>>,
Path(user_id): Path<String>,
) -> Result<Json<SuccessResponse>, (StatusCode, Json<ErrorResponse>)> {
info!("Deleting user: {}", user_id);
let client = {
let auth_service = state.auth_service.lock().await;
auth_service.client().clone()
};
// Verify user exists
match client.get_user(&user_id).await {
Ok(_) => {
// In production, you'd call a deactivate/delete method
info!("User {} deleted/deactivated", user_id);
Ok(Json(SuccessResponse {
success: true,
message: Some(format!("User {} deleted successfully", user_id)),
user_id: Some(user_id),
}))
}
Err(e) => {
error!("Failed to delete user: {}", e);
Err((
StatusCode::NOT_FOUND,
Json(ErrorResponse {
error: "User not found".to_string(),
details: Some(e.to_string()),
}),
))
}
}
}
/// List users with pagination and optional search
pub async fn list_users(
State(state): State<Arc<AppState>>,
Query(params): Query<UserQuery>,
) -> Result<Json<UserListResponse>, (StatusCode, Json<ErrorResponse>)> {
let page = params.page.unwrap_or(1);
let per_page = params.per_page.unwrap_or(20);
info!("Listing users (page: {}, per_page: {})", page, per_page);
let client = {
let auth_service = state.auth_service.lock().await;
auth_service.client().clone()
};
let users_result = if let Some(search_term) = params.search {
info!("Searching users with term: {}", search_term);
client.search_users(&search_term).await
} else {
let offset = (page - 1) * per_page;
client.list_users(per_page, offset).await
};
match users_result {
Ok(users_json) => {
let users: Vec<UserResponse> = users_json
.into_iter()
.filter_map(|u| {
Some(UserResponse {
id: u.get("userId")?.as_str()?.to_string(),
username: u.get("userName")?.as_str()?.to_string(),
email: u
.get("preferredLoginName")
.and_then(|v| v.as_str())
.unwrap_or("unknown@example.com")
.to_string(),
first_name: String::new(),
last_name: String::new(),
display_name: None,
state: u
.get("state")
.and_then(|v| v.as_str())
.unwrap_or("unknown")
.to_string(),
created_at: None,
updated_at: None,
})
})
.collect();
let total = users.len();
info!("Found {} users", total);
Ok(Json(UserListResponse {
users,
total,
page,
per_page,
}))
}
Err(e) => {
error!("Failed to list users: {}", e);
Err((
StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse {
error: "Failed to list users".to_string(),
details: Some(e.to_string()),
}),
))
}
}
}
/// Get user profile
pub async fn get_user_profile(
State(state): State<Arc<AppState>>,
Path(user_id): Path<String>,
) -> Result<Json<UserResponse>, (StatusCode, Json<ErrorResponse>)> {
info!("Getting profile for user: {}", user_id);
let client = {
let auth_service = state.auth_service.lock().await;
auth_service.client().clone()
};
match client.get_user(&user_id).await {
Ok(user_data) => {
let user = UserResponse {
id: user_data
.get("id")
.and_then(|v| v.as_str())
.unwrap_or(&user_id)
.to_string(),
username: user_data
.get("username")
.and_then(|v| v.as_str())
.unwrap_or("unknown")
.to_string(),
email: user_data
.get("preferredLoginName")
.and_then(|v| v.as_str())
.unwrap_or("unknown@example.com")
.to_string(),
first_name: String::new(),
last_name: String::new(),
display_name: None,
state: user_data
.get("state")
.and_then(|v| v.as_str())
.unwrap_or("unknown")
.to_string(),
created_at: None,
updated_at: None,
};
info!("User profile retrieved: {}", user.username);
Ok(Json(user))
}
Err(e) => {
error!("Failed to get user profile: {}", e);
Err((
StatusCode::NOT_FOUND,
Json(ErrorResponse {
error: "User not found".to_string(),
details: Some(e.to_string()),
}),
))
}
}
}

527
src/drive/api.rs Normal file
View file

@ -0,0 +1,527 @@
//! Drive File Management REST API
//!
//! Provides HTTP endpoints for file operations with S3 backend.
//! Works across web, desktop, and mobile platforms.
use crate::shared::state::AppState;
use aws_sdk_s3::primitives::ByteStream;
use axum::{
extract::{Json, Multipart, Path, Query, State},
http::StatusCode,
response::IntoResponse,
};
use log::{error, info};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileItem {
pub name: String,
pub path: String,
pub size: u64,
pub modified: String,
pub is_dir: bool,
pub mime_type: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ListFilesQuery {
pub path: Option<String>,
pub limit: Option<i32>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CreateFolderRequest {
pub path: String,
pub name: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DeleteFileRequest {
pub path: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MoveFileRequest {
pub source: String,
pub destination: String,
}
/// GET /api/drive/list
/// List files and folders in a directory
pub async fn list_files(
State(state): State<Arc<AppState>>,
Query(query): Query<ListFilesQuery>,
) -> impl IntoResponse {
let path = query.path.unwrap_or_else(|| "/".to_string());
let prefix = path.trim_start_matches('/');
info!("Listing files in path: {}", path);
let mut files = Vec::new();
if let Some(s3_client) = &state.drive {
let bucket = &state.bucket_name;
match s3_client
.list_objects_v2()
.bucket(bucket)
.prefix(prefix)
.delimiter("/")
.max_keys(query.limit.unwrap_or(1000))
.send()
.await
{
Ok(output) => {
// Add folders (common prefixes)
let prefixes = output.common_prefixes();
if !prefixes.is_empty() {
for prefix in prefixes {
if let Some(p) = prefix.prefix() {
let name = p.trim_end_matches('/').split('/').last().unwrap_or(p);
files.push(FileItem {
name: name.to_string(),
path: format!("/{}", p),
size: 0,
modified: chrono::Utc::now().to_rfc3339(),
is_dir: true,
mime_type: None,
});
}
}
}
// Add files
let objects = output.contents();
if !objects.is_empty() {
for object in objects {
if let Some(key) = object.key() {
if key.ends_with('/') {
continue; // Skip folder markers
}
let name = key.split('/').last().unwrap_or(key);
let size = object.size().unwrap_or(0) as u64;
let modified = object
.last_modified()
.map(|dt| dt.to_string())
.unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
let mime_type =
mime_guess::from_path(name).first().map(|m| m.to_string());
files.push(FileItem {
name: name.to_string(),
path: format!("/{}", key),
size,
modified,
is_dir: false,
mime_type,
});
}
}
}
info!("Found {} items in {}", files.len(), path);
}
Err(e) => {
error!("Failed to list files: {}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": format!("Failed to list files: {}", e)
})),
);
}
}
} else {
error!("S3 client not configured");
return (
StatusCode::SERVICE_UNAVAILABLE,
Json(serde_json::json!({
"error": "Storage service not available"
})),
);
}
(StatusCode::OK, Json(serde_json::json!(files)))
}
/// POST /api/drive/upload
/// Upload a file to S3
pub async fn upload_file(
State(state): State<Arc<AppState>>,
mut multipart: Multipart,
) -> impl IntoResponse {
let mut file_path = String::new();
let mut file_data: Vec<u8> = Vec::new();
let mut file_name = String::new();
// Parse multipart form
while let Some(field) = multipart.next_field().await.unwrap_or(None) {
let name = field.name().unwrap_or("").to_string();
if name == "path" {
if let Ok(value) = field.text().await {
file_path = value;
}
} else if name == "file" {
file_name = field.file_name().unwrap_or("unnamed").to_string();
if let Ok(data) = field.bytes().await {
file_data = data.to_vec();
}
}
}
if file_data.is_empty() {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "No file data provided"
})),
);
}
let full_path = if file_path.is_empty() {
file_name.clone()
} else {
format!("{}/{}", file_path.trim_matches('/'), file_name)
};
let file_size = file_data.len();
info!("Uploading file: {} ({} bytes)", full_path, file_size);
if let Some(s3_client) = &state.drive {
let bucket = &state.bucket_name;
let content_type = mime_guess::from_path(&file_name)
.first()
.map(|m| m.to_string())
.unwrap_or_else(|| "application/octet-stream".to_string());
match s3_client
.put_object()
.bucket(bucket)
.key(&full_path)
.body(ByteStream::from(file_data))
.content_type(&content_type)
.send()
.await
{
Ok(_) => {
info!("Successfully uploaded: {}", full_path);
(
StatusCode::OK,
Json(serde_json::json!({
"success": true,
"path": format!("/{}", full_path),
"size": file_size
})),
)
}
Err(e) => {
error!("Failed to upload file: {}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": format!("Upload failed: {}", e)
})),
)
}
}
} else {
(
StatusCode::SERVICE_UNAVAILABLE,
Json(serde_json::json!({
"error": "Storage service not available"
})),
)
}
}
/// POST /api/drive/folder
/// Create a new folder
pub async fn create_folder(
State(state): State<Arc<AppState>>,
Json(request): Json<CreateFolderRequest>,
) -> impl IntoResponse {
let folder_path = format!("{}/{}/", request.path.trim_matches('/'), request.name);
info!("Creating folder: {}", folder_path);
if let Some(s3_client) = &state.drive {
let bucket = &state.bucket_name;
// Create folder marker (empty object with trailing slash)
match s3_client
.put_object()
.bucket(bucket)
.key(&folder_path)
.body(ByteStream::from(vec![]))
.send()
.await
{
Ok(_) => {
info!("Successfully created folder: {}", folder_path);
(
StatusCode::OK,
Json(serde_json::json!({
"success": true,
"path": format!("/{}", folder_path)
})),
)
}
Err(e) => {
error!("Failed to create folder: {}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": format!("Failed to create folder: {}", e)
})),
)
}
}
} else {
(
StatusCode::SERVICE_UNAVAILABLE,
Json(serde_json::json!({
"error": "Storage service not available"
})),
)
}
}
/// DELETE /api/drive/file
/// Delete a file or folder
pub async fn delete_file(
State(state): State<Arc<AppState>>,
Json(request): Json<DeleteFileRequest>,
) -> impl IntoResponse {
let path = request.path.trim_start_matches('/');
info!("Deleting: {}", path);
if let Some(s3_client) = &state.drive {
let bucket = &state.bucket_name;
// Check if it's a folder (ends with /)
if path.ends_with('/') {
// Delete all objects with this prefix
match s3_client
.list_objects_v2()
.bucket(bucket)
.prefix(path)
.send()
.await
{
Ok(output) => {
let objects = output.contents();
if !objects.is_empty() {
for object in objects {
if let Some(key) = object.key() {
if let Err(e) = s3_client
.delete_object()
.bucket(bucket)
.key(key)
.send()
.await
{
error!("Failed to delete {}: {}", key, e);
}
}
}
}
info!("Successfully deleted folder: {}", path);
return (
StatusCode::OK,
Json(serde_json::json!({
"success": true,
"path": request.path
})),
);
}
Err(e) => {
error!("Failed to list folder contents: {}", e);
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": format!("Failed to delete folder: {}", e)
})),
);
}
}
}
// Delete single file
match s3_client
.delete_object()
.bucket(bucket)
.key(path)
.send()
.await
{
Ok(_) => {
info!("Successfully deleted file: {}", path);
(
StatusCode::OK,
Json(serde_json::json!({
"success": true,
"path": request.path
})),
)
}
Err(e) => {
error!("Failed to delete file: {}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": format!("Failed to delete: {}", e)
})),
)
}
}
} else {
(
StatusCode::SERVICE_UNAVAILABLE,
Json(serde_json::json!({
"error": "Storage service not available"
})),
)
}
}
/// POST /api/drive/move
/// Move or rename a file/folder
pub async fn move_file(
State(state): State<Arc<AppState>>,
Json(request): Json<MoveFileRequest>,
) -> impl IntoResponse {
let source = request.source.trim_start_matches('/');
let destination = request.destination.trim_start_matches('/');
info!("Moving {} to {}", source, destination);
if let Some(s3_client) = &state.drive {
let bucket = &state.bucket_name;
// Copy to new location
let copy_source = format!("{}/{}", bucket, source);
match s3_client
.copy_object()
.bucket(bucket)
.copy_source(&copy_source)
.key(destination)
.send()
.await
{
Ok(_) => {
// Delete original
match s3_client
.delete_object()
.bucket(bucket)
.key(source)
.send()
.await
{
Ok(_) => {
info!("Successfully moved {} to {}", source, destination);
(
StatusCode::OK,
Json(serde_json::json!({
"success": true,
"source": request.source,
"destination": request.destination
})),
)
}
Err(e) => {
error!("Failed to delete source after copy: {}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": format!("Move partially failed: {}", e)
})),
)
}
}
}
Err(e) => {
error!("Failed to copy file: {}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": format!("Failed to move: {}", e)
})),
)
}
}
} else {
(
StatusCode::SERVICE_UNAVAILABLE,
Json(serde_json::json!({
"error": "Storage service not available"
})),
)
}
}
/// GET /api/drive/download/{path}
/// Download a file
pub async fn download_file(
State(state): State<Arc<AppState>>,
Path(file_path): Path<String>,
) -> impl IntoResponse {
let path = file_path.trim_start_matches('/');
info!("Downloading file: {}", path);
if let Some(s3_client) = &state.drive {
let bucket = &state.bucket_name;
match s3_client.get_object().bucket(bucket).key(path).send().await {
Ok(output) => {
let content_type = output
.content_type()
.unwrap_or("application/octet-stream")
.to_string();
let body = output.body.collect().await.unwrap().into_bytes();
(
StatusCode::OK,
[(axum::http::header::CONTENT_TYPE, content_type)],
body.to_vec(),
)
}
Err(e) => {
error!("Failed to download file: {}", e);
(
StatusCode::NOT_FOUND,
[(
axum::http::header::CONTENT_TYPE,
"application/json".to_string(),
)],
serde_json::json!({
"error": format!("File not found: {}", e)
})
.to_string()
.into_bytes()
.to_vec(),
)
}
}
} else {
(
StatusCode::SERVICE_UNAVAILABLE,
[(
axum::http::header::CONTENT_TYPE,
"application/json".to_string(),
)],
serde_json::json!({
"error": "Storage service not available"
})
.to_string()
.into_bytes()
.to_vec(),
)
}
}

View file

@ -0,0 +1,10 @@
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::test_util;
#[test]
fn test_drive_monitor_module() {
test_util::setup();
assert!(true, "Basic drive_monitor module test");
}
}

View file

@ -0,0 +1,355 @@
use crate::basic::compiler::BasicCompiler;
use crate::config::ConfigManager;
use crate::shared::state::AppState;
use aws_sdk_s3::Client;
use log::info;
use std::collections::HashMap;
use std::error::Error;
use std::sync::Arc;
use tokio::time::{interval, Duration};
#[derive(Debug, Clone)]
pub struct FileState {
pub etag: String,
}
#[derive(Debug)]
pub struct DriveMonitor {
state: Arc<AppState>,
bucket_name: String,
file_states: Arc<tokio::sync::RwLock<HashMap<String, FileState>>>,
bot_id: uuid::Uuid,
}
impl DriveMonitor {
pub fn new(state: Arc<AppState>, bucket_name: String, bot_id: uuid::Uuid) -> Self {
Self {
state,
bucket_name,
file_states: Arc::new(tokio::sync::RwLock::new(HashMap::new())),
bot_id,
}
}
pub fn spawn(self: Arc<Self>) -> tokio::task::JoinHandle<()> {
tokio::spawn(async move {
info!(
"Drive Monitor service started for bucket: {}",
self.bucket_name
);
let mut tick = interval(Duration::from_secs(90));
loop {
tick.tick().await;
if let Err(e) = self.check_for_changes().await {
log::error!("Error checking for drive changes: {}", e);
}
}
})
}
async fn check_for_changes(&self) -> Result<(), Box<dyn Error + Send + Sync>> {
let client = match &self.state.drive {
Some(client) => client,
None => return Ok(()),
};
self.check_gbdialog_changes(client).await?;
self.check_gbot(client).await?;
Ok(())
}
async fn check_gbdialog_changes(
&self,
client: &Client,
) -> Result<(), Box<dyn Error + Send + Sync>> {
let prefix = ".gbdialog/";
let mut current_files = HashMap::new();
let mut continuation_token = None;
loop {
let list_objects = match tokio::time::timeout(
Duration::from_secs(30),
client
.list_objects_v2()
.bucket(&self.bucket_name.to_lowercase())
.set_continuation_token(continuation_token)
.send(),
)
.await
{
Ok(Ok(list)) => list,
Ok(Err(e)) => return Err(e.into()),
Err(_) => {
log::error!("Timeout listing objects in bucket {}", self.bucket_name);
return Ok(());
}
};
for obj in list_objects.contents.unwrap_or_default() {
let path = obj.key().unwrap_or_default().to_string();
let path_parts: Vec<&str> = path.split('/').collect();
if path_parts.len() < 2 || !path_parts[0].ends_with(".gbdialog") {
continue;
}
if path.ends_with('/') || !path.ends_with(".bas") {
continue;
}
let file_state = FileState {
etag: obj.e_tag().unwrap_or_default().to_string(),
};
current_files.insert(path, file_state);
}
if !list_objects.is_truncated.unwrap_or(false) {
break;
}
continuation_token = list_objects.next_continuation_token;
}
let mut file_states = self.file_states.write().await;
for (path, current_state) in current_files.iter() {
if let Some(previous_state) = file_states.get(path) {
if current_state.etag != previous_state.etag {
if let Err(e) = self.compile_tool(client, path).await {
log::error!("Failed to compile tool {}: {}", path, e);
}
}
} else {
if let Err(e) = self.compile_tool(client, path).await {
log::error!("Failed to compile tool {}: {}", path, e);
}
}
}
let previous_paths: Vec<String> = file_states
.keys()
.filter(|k| k.starts_with(prefix))
.cloned()
.collect();
for path in previous_paths {
if !current_files.contains_key(&path) {
file_states.remove(&path);
}
}
for (path, state) in current_files {
file_states.insert(path, state);
}
Ok(())
}
async fn check_gbot(&self, client: &Client) -> Result<(), Box<dyn Error + Send + Sync>> {
let config_manager = ConfigManager::new(self.state.conn.clone());
let mut continuation_token = None;
loop {
let list_objects = match tokio::time::timeout(
Duration::from_secs(30),
client
.list_objects_v2()
.bucket(&self.bucket_name.to_lowercase())
.set_continuation_token(continuation_token)
.send(),
)
.await
{
Ok(Ok(list)) => list,
Ok(Err(e)) => return Err(e.into()),
Err(_) => {
log::error!("Timeout listing objects in bucket {}", self.bucket_name);
return Ok(());
}
};
for obj in list_objects.contents.unwrap_or_default() {
let path = obj.key().unwrap_or_default().to_string();
let path_parts: Vec<&str> = path.split('/').collect();
if path_parts.len() < 2 || !path_parts[0].ends_with(".gbot") {
continue;
}
if !path.ends_with("config.csv") {
continue;
}
match client
.head_object()
.bucket(&self.bucket_name)
.key(&path)
.send()
.await
{
Ok(_head_res) => {
let response = client
.get_object()
.bucket(&self.bucket_name)
.key(&path)
.send()
.await?;
let bytes = response.body.collect().await?.into_bytes();
let csv_content = String::from_utf8(bytes.to_vec())
.map_err(|e| format!("UTF-8 error in {}: {}", path, e))?;
let llm_lines: Vec<_> = csv_content
.lines()
.filter(|line| line.trim_start().starts_with("llm-"))
.collect();
if !llm_lines.is_empty() {
use crate::llm::local::ensure_llama_servers_running;
let mut restart_needed = false;
for line in llm_lines {
let parts: Vec<&str> = line.split(',').collect();
if parts.len() >= 2 {
let key = parts[0].trim();
let new_value = parts[1].trim();
match config_manager.get_config(&self.bot_id, key, None) {
Ok(old_value) => {
if old_value != new_value {
info!(
"Detected change in {} (old: {}, new: {})",
key, old_value, new_value
);
restart_needed = true;
}
}
Err(_) => {
restart_needed = true;
}
}
}
}
let _ = config_manager.sync_gbot_config(&self.bot_id, &csv_content);
if restart_needed {
if let Err(e) =
ensure_llama_servers_running(Arc::clone(&self.state)).await
{
log::error!("Failed to restart LLaMA servers after llm- config change: {}", e);
}
}
} else {
let _ = config_manager.sync_gbot_config(&self.bot_id, &csv_content);
}
if csv_content.lines().any(|line| line.starts_with("theme-")) {
self.broadcast_theme_change(&csv_content).await?;
}
}
Err(e) => {
log::error!("Config file {} not found or inaccessible: {}", path, e);
}
}
}
if !list_objects.is_truncated.unwrap_or(false) {
break;
}
continuation_token = list_objects.next_continuation_token;
}
Ok(())
}
async fn broadcast_theme_change(
&self,
csv_content: &str,
) -> Result<(), Box<dyn Error + Send + Sync>> {
let mut theme_data = serde_json::json!({
"event": "change_theme",
"data": {}
});
for line in csv_content.lines() {
let parts: Vec<&str> = line.split(',').collect();
if parts.len() >= 2 {
let key = parts[0].trim();
let value = parts[1].trim();
match key {
"theme-color1" => {
theme_data["data"]["color1"] = serde_json::Value::String(value.to_string())
}
"theme-color2" => {
theme_data["data"]["color2"] = serde_json::Value::String(value.to_string())
}
"theme-logo" => {
theme_data["data"]["logo_url"] =
serde_json::Value::String(value.to_string())
}
"theme-title" => {
theme_data["data"]["title"] = serde_json::Value::String(value.to_string())
}
"theme-logo-text" => {
theme_data["data"]["logo_text"] =
serde_json::Value::String(value.to_string())
}
_ => {}
}
}
}
let response_channels = self.state.response_channels.lock().await;
for (session_id, tx) in response_channels.iter() {
let theme_response = crate::shared::models::BotResponse {
bot_id: self.bot_id.to_string(),
user_id: "system".to_string(),
session_id: session_id.clone(),
channel: "web".to_string(),
content: serde_json::to_string(&theme_data)?,
message_type: 2,
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
let _ = tx.try_send(theme_response);
}
Ok(())
}
async fn compile_tool(
&self,
client: &Client,
file_path: &str,
) -> Result<(), Box<dyn Error + Send + Sync>> {
info!(
"Fetching object from Drive: bucket={}, key={}",
&self.bucket_name, file_path
);
let response = match client
.get_object()
.bucket(&self.bucket_name)
.key(file_path)
.send()
.await
{
Ok(res) => {
info!(
"Successfully fetched object from Drive: bucket={}, key={}, size={}",
&self.bucket_name,
file_path,
res.content_length().unwrap_or(0)
);
res
}
Err(e) => {
log::error!(
"Failed to fetch object from Drive: bucket={}, key={}, error={:?}",
&self.bucket_name,
file_path,
e
);
return Err(e.into());
}
};
let bytes = response.body.collect().await?.into_bytes();
let source_content = String::from_utf8(bytes.to_vec())?;
let tool_name = file_path
.split('/')
.last()
.unwrap_or(file_path)
.strip_suffix(".bas")
.unwrap_or(file_path)
.to_string();
let bot_name = self
.bucket_name
.strip_suffix(".gbai")
.unwrap_or(&self.bucket_name);
let work_dir = format!("./work/{}.gbai/{}.gbdialog", bot_name, bot_name);
let state_clone = Arc::clone(&self.state);
let work_dir_clone = work_dir.clone();
let tool_name_clone = tool_name.clone();
let source_content_clone = source_content.clone();
let bot_id = self.bot_id;
tokio::task::spawn_blocking(move || {
std::fs::create_dir_all(&work_dir_clone)?;
let local_source_path = format!("{}/{}.bas", work_dir_clone, tool_name_clone);
std::fs::write(&local_source_path, &source_content_clone)?;
let mut compiler = BasicCompiler::new(state_clone, bot_id);
let result = compiler.compile_file(&local_source_path, &work_dir_clone)?;
if let Some(mcp_tool) = result.mcp_tool {
info!(
"MCP tool definition generated with {} parameters",
mcp_tool.input_schema.properties.len()
);
}
Ok::<(), Box<dyn Error + Send + Sync>>(())
})
.await??;
Ok(())
}
}

1325
src/drive/files.rs Normal file

File diff suppressed because it is too large Load diff

336
src/instagram/instagram.rs Normal file
View file

@ -0,0 +1,336 @@
//! Instagram Messaging Channel Integration
//!
//! This module provides webhook handling and message processing for Instagram Direct Messages.
//! Currently under development for bot integration with Instagram Business accounts.
//!
//! Key features:
//! - Webhook verification and message handling
//! - Instagram Direct Message support
//! - Media attachments (images, videos)
//! - Quick replies
//! - Session management per Instagram user
use crate::shared::models::UserSession;
use crate::shared::state::AppState;
use axum::{extract::Query, http::StatusCode, response::Json, Router};
use log::{error, info};
use reqwest::Client;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::sync::Arc;
#[derive(Debug, Deserialize)]
pub struct InstagramWebhook {
#[serde(rename = "hub.mode")]
pub hub_mode: Option<String>,
#[serde(rename = "hub.verify_token")]
pub hub_verify_token: Option<String>,
#[serde(rename = "hub.challenge")]
pub hub_challenge: Option<String>,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct InstagramMessage {
pub entry: Vec<InstagramEntry>,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct InstagramEntry {
pub id: String,
pub time: i64,
pub messaging: Vec<InstagramMessaging>,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct InstagramMessaging {
pub sender: InstagramUser,
pub recipient: InstagramUser,
pub timestamp: i64,
pub message: Option<InstagramMessageContent>,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct InstagramUser {
pub id: String,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct InstagramMessageContent {
pub mid: String,
pub text: Option<String>,
pub attachments: Option<Vec<InstagramAttachment>>,
pub quick_reply: Option<InstagramQuickReply>,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct InstagramAttachment {
#[serde(rename = "type")]
pub attachment_type: String,
pub payload: InstagramAttachmentPayload,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct InstagramAttachmentPayload {
pub url: Option<String>,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct InstagramQuickReply {
pub payload: String,
}
#[derive(Debug)]
pub struct InstagramAdapter {
pub state: Arc<AppState>,
pub access_token: String,
pub verify_token: String,
pub page_id: String,
}
impl InstagramAdapter {
pub fn new(state: Arc<AppState>) -> Self {
// TODO: Load from config file or environment variables
let access_token = std::env::var("INSTAGRAM_ACCESS_TOKEN").unwrap_or_default();
let verify_token = std::env::var("INSTAGRAM_VERIFY_TOKEN")
.unwrap_or_else(|_| "webhook_verify".to_string());
let page_id = std::env::var("INSTAGRAM_PAGE_ID").unwrap_or_default();
Self {
state,
access_token,
verify_token,
page_id,
}
}
pub async fn handle_webhook_verification(
&self,
params: Query<InstagramWebhook>,
) -> Result<String, StatusCode> {
if let (Some(mode), Some(token), Some(challenge)) = (
&params.hub_mode,
&params.hub_verify_token,
&params.hub_challenge,
) {
if mode == "subscribe" && token == &self.verify_token {
info!("Instagram webhook verified successfully");
return Ok(challenge.clone());
}
}
error!("Instagram webhook verification failed");
Err(StatusCode::FORBIDDEN)
}
pub async fn handle_incoming_message(
&self,
Json(payload): Json<InstagramMessage>,
) -> Result<StatusCode, StatusCode> {
for entry in payload.entry {
for messaging in entry.messaging {
if let Some(message) = messaging.message {
if let Err(e) = self.process_message(messaging.sender.id, message).await {
error!("Error processing Instagram message: {}", e);
}
}
}
}
Ok(StatusCode::OK)
}
async fn process_message(
&self,
sender_id: String,
message: InstagramMessageContent,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
// Extract message content
let content = if let Some(text) = message.text {
text
} else if let Some(attachments) = message.attachments {
if !attachments.is_empty() {
format!("[Attachment: {}]", attachments[0].attachment_type)
} else {
return Ok(());
}
} else {
return Ok(());
};
// Process with bot
self.process_with_bot(&sender_id, &content).await?;
Ok(())
}
async fn process_with_bot(
&self,
sender_id: &str,
message: &str,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let session = self.get_or_create_session(sender_id).await?;
// Process message through bot processor (simplified for now)
let response = format!(
"Received on Instagram (session {}): {}",
session.id, message
);
self.send_message(sender_id, &response).await?;
Ok(())
}
async fn get_or_create_session(
&self,
user_id: &str,
) -> Result<UserSession, Box<dyn std::error::Error + Send + Sync>> {
if let Some(redis_client) = &self.state.cache {
let mut conn = redis_client.get_multiplexed_async_connection().await?;
let session_key = format!("instagram_session:{}", user_id);
if let Ok(session_data) = redis::cmd("GET")
.arg(&session_key)
.query_async::<String>(&mut conn)
.await
{
if let Ok(session) = serde_json::from_str::<UserSession>(&session_data) {
return Ok(session);
}
}
let user_uuid = uuid::Uuid::parse_str(user_id).unwrap_or_else(|_| uuid::Uuid::new_v4());
let session = UserSession {
id: uuid::Uuid::new_v4(),
user_id: user_uuid,
bot_id: uuid::Uuid::default(),
title: "Instagram Session".to_string(),
context_data: serde_json::json!({"channel": "instagram"}),
current_tool: None,
created_at: chrono::Utc::now(),
updated_at: chrono::Utc::now(),
};
let session_data = serde_json::to_string(&session)?;
redis::cmd("SET")
.arg(&session_key)
.arg(&session_data)
.arg("EX")
.arg(86400)
.query_async::<()>(&mut conn)
.await?;
Ok(session)
} else {
let user_uuid = uuid::Uuid::parse_str(user_id).unwrap_or_else(|_| uuid::Uuid::new_v4());
Ok(UserSession {
id: uuid::Uuid::new_v4(),
user_id: user_uuid,
bot_id: uuid::Uuid::default(),
title: "Instagram Session".to_string(),
context_data: serde_json::json!({"channel": "instagram"}),
current_tool: None,
created_at: chrono::Utc::now(),
updated_at: chrono::Utc::now(),
})
}
}
pub async fn send_message(
&self,
recipient_id: &str,
message: &str,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let url = format!("https://graph.facebook.com/v17.0/{}/messages", self.page_id);
let payload = json!({
"recipient": {
"id": recipient_id
},
"message": {
"text": message
}
});
let client = Client::new();
let response = client
.post(&url)
.query(&[("access_token", &self.access_token)])
.json(&payload)
.send()
.await?;
if !response.status().is_success() {
let error_text = response.text().await?;
error!("Instagram API error: {}", error_text);
return Err(format!("Instagram API error: {}", error_text).into());
}
Ok(())
}
pub async fn send_quick_replies(
&self,
recipient_id: &str,
title: &str,
options: Vec<String>,
) -> Result<(), Box<dyn std::error::Error>> {
let url = format!("https://graph.facebook.com/v17.0/{}/messages", self.page_id);
let quick_replies: Vec<_> = options
.iter()
.take(13) // Instagram limits to 13 quick replies
.map(|text| {
json!({
"content_type": "text",
"title": text,
"payload": text
})
})
.collect();
let payload = json!({
"recipient": {
"id": recipient_id
},
"message": {
"text": title,
"quick_replies": quick_replies
}
});
let client = Client::new();
let response = client
.post(&url)
.query(&[("access_token", &self.access_token)])
.json(&payload)
.send()
.await?;
if !response.status().is_success() {
let error_text = response.text().await?;
error!("Instagram API error: {}", error_text);
}
Ok(())
}
}
pub fn router(state: Arc<AppState>) -> Router<Arc<AppState>> {
let adapter = Arc::new(InstagramAdapter::new(state.clone()));
Router::new()
.route(
"/webhook",
axum::routing::get({
let adapter = adapter.clone();
move |params| async move { adapter.handle_webhook_verification(params).await }
}),
)
.route(
"/webhook",
axum::routing::post({
move |payload| async move { adapter.handle_incoming_message(payload).await }
}),
)
.with_state(state)
}

3
src/instagram/mod.rs Normal file
View file

@ -0,0 +1,3 @@
pub mod instagram;
pub use instagram::*;

177
src/llm/compact_prompt.rs Normal file
View file

@ -0,0 +1,177 @@
use crate::core::config::ConfigManager;
use crate::llm::llm_models;
use crate::shared::state::AppState;
use log::{error, info, trace};
use std::collections::HashSet;
use std::sync::Arc;
use tokio::time::{interval, Duration};
use uuid::Uuid;
pub fn start_compact_prompt_scheduler(state: Arc<AppState>) {
tokio::spawn(async move {
tokio::time::sleep(Duration::from_secs(30)).await;
let mut interval = interval(Duration::from_secs(60));
loop {
interval.tick().await;
if let Err(e) = compact_prompt_for_bots(&Arc::clone(&state)).await {
error!("Prompt compaction failed: {}", e);
}
}
});
}
async fn compact_prompt_for_bots(
state: &Arc<AppState>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
use once_cell::sync::Lazy;
use scopeguard::guard;
static SESSION_IN_PROGRESS: Lazy<tokio::sync::Mutex<HashSet<Uuid>>> =
Lazy::new(|| tokio::sync::Mutex::new(HashSet::new()));
let sessions = {
let mut session_manager = state.session_manager.lock().await;
session_manager.get_user_sessions(Uuid::nil())?
};
for session in sessions {
let config_manager = ConfigManager::new(state.conn.clone());
let compact_threshold = config_manager
.get_config(&session.bot_id, "prompt-compact", None)?
.parse::<i32>()
.unwrap_or(0);
if compact_threshold == 0 {
return Ok(());
} else if compact_threshold < 0 {
trace!(
"Negative compact threshold detected for bot {}, skipping",
session.bot_id
);
}
let session_id = session.id;
let history = {
let mut session_manager = state.session_manager.lock().await;
session_manager.get_conversation_history(session.id, session.user_id)?
};
let mut messages_since_summary = 0;
let mut has_new_messages = false;
let last_summary_index = history
.iter()
.rev()
.position(|(role, _)| role == "compact")
.map(|pos| history.len() - pos - 1);
// Calculate start index: if there's a summary, start after it; otherwise start from 0
let start_index = last_summary_index.map(|idx| idx + 1).unwrap_or(0);
for (_i, (role, _)) in history.iter().enumerate().skip(start_index) {
if role == "compact" {
continue;
}
messages_since_summary += 1;
has_new_messages = true;
}
if !has_new_messages && last_summary_index.is_some() {
continue;
}
if messages_since_summary < compact_threshold as usize {
continue;
}
{
let mut session_in_progress = SESSION_IN_PROGRESS.lock().await;
if session_in_progress.contains(&session.id) {
trace!(
"Skipping session {} - compaction already in progress",
session.id
);
continue;
}
session_in_progress.insert(session.id);
}
trace!(
"Compacting prompt for session {}: {} messages since last summary",
session.id,
messages_since_summary
);
let mut conversation = String::new();
conversation
.push_str("Please summarize this conversation between user and bot: \n\n [[[***** \n");
for (role, content) in history.iter().skip(start_index) {
if role == "compact" {
continue;
}
conversation.push_str(&format!(
"{}: {}\n",
if role == "user" { "user" } else { "assistant" },
content
));
}
conversation.push_str("\n *****]]] \n Give me full points only, no explanations.");
let messages = vec![serde_json::json!({
"role": "user",
"content": conversation
})];
let llm_provider = state.llm_provider.clone();
let mut filtered = String::new();
let config_manager = crate::config::ConfigManager::new(state.conn.clone());
let model = config_manager
.get_config(&Uuid::nil(), "llm-model", None)
.unwrap_or_default();
let key = config_manager
.get_config(&Uuid::nil(), "llm-key", None)
.unwrap_or_default();
let summarized = match llm_provider
.generate("", &serde_json::Value::Array(messages), &model, &key)
.await
{
Ok(summary) => {
trace!(
"Successfully summarized session {} ({} chars)",
session.id,
summary.len()
);
// Use handler to filter <think> content
let handler = llm_models::get_handler(
config_manager
.get_config(&session.bot_id, "llm-model", None)
.unwrap()
.as_str(),
);
filtered = handler.process_content(&summary);
format!("SUMMARY: {}", filtered)
}
Err(e) => {
error!(
"Failed to summarize conversation for session {}: {}",
session.id, e
);
trace!("Using fallback summary for session {}", session.id);
format!("SUMMARY: {}", filtered) // Fallback
}
};
info!(
"Prompt compacted {}: {} messages",
session.id,
history.len()
);
{
let mut session_manager = state.session_manager.lock().await;
session_manager.save_message(session.id, session.user_id, 9, &summarized, 1)?;
}
let _session_cleanup = guard((), |_| {
tokio::spawn(async move {
let mut in_progress = SESSION_IN_PROGRESS.lock().await;
in_progress.remove(&session_id);
});
});
}
Ok(())
}

View file

@ -0,0 +1,15 @@
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::test_util;
#[test]
fn test_context_module() {
test_util::setup();
assert!(true, "Basic context module test");
}
#[test]
fn test_langcache() {
test_util::setup();
assert!(true, "Langcache placeholder test");
}
}

0
src/llm/context/mod.rs Normal file
View file

View file

@ -0,0 +1,16 @@
use super::ModelHandler;
use regex;
#[derive(Debug)]
pub struct DeepseekR3Handler;
impl ModelHandler for DeepseekR3Handler {
fn is_analysis_complete(&self, buffer: &str) -> bool {
buffer.contains("</think>")
}
fn process_content(&self, content: &str) -> String {
let re = regex::Regex::new(r"(?s)<think>.*?</think>").unwrap();
re.replace_all(content, "").to_string()
}
fn has_analysis_markers(&self, buffer: &str) -> bool {
buffer.contains("<think>")
}
}

View file

@ -0,0 +1,19 @@
use super::ModelHandler;
#[derive(Debug)]
pub struct GptOss120bHandler {}
impl GptOss120bHandler {
pub fn new() -> Self {
Self {}
}
}
impl ModelHandler for GptOss120bHandler {
fn is_analysis_complete(&self, buffer: &str) -> bool {
buffer.contains("**end**")
}
fn process_content(&self, content: &str) -> String {
content.replace("**start**", "").replace("**end**", "")
}
fn has_analysis_markers(&self, buffer: &str) -> bool {
buffer.contains("**start**")
}
}

View file

@ -0,0 +1,18 @@
use super::ModelHandler;
#[derive(Debug)]
pub struct GptOss20bHandler;
impl ModelHandler for GptOss20bHandler {
fn is_analysis_complete(&self, buffer: &str) -> bool {
buffer.ends_with("final")
}
fn process_content(&self, content: &str) -> String {
if let Some(pos) = content.find("final") {
content[..pos].to_string()
} else {
content.to_string()
}
}
fn has_analysis_markers(&self, buffer: &str) -> bool {
buffer.contains("analysis<|message|>")
}
}

View file

@ -0,0 +1,35 @@
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::test_util;
#[test]
fn test_llm_models_module() {
test_util::setup();
assert!(true, "Basic LLM models module test");
}
#[test]
fn test_deepseek_r3_process_content() {
test_util::setup();
let handler = DeepseekR3Handler;
let input = r#"<think>
Alright, I need to help the user revise their resume entry. Let me read what they provided first.
The original message says: " Auxiliom has been updated last week! New release!" They want it in a few words. Hmm, so maybe instead of saying "has been updated," we can use more concise language because resumes usually don't require too much detail unless there's specific information to include.
I notice that the user wants it for their resume, which often requires bullet points or short sentences without being verbose. So perhaps combining these two thoughts into a single sentence would make sense. Also, using an exclamation mark might help convey enthusiasm about the new release.
Let me put it together: "Auxiliom has been updated last week! New release." That's concise and fits well for a resume. It effectively communicates both that something was updated recently and introduces them as having a new release without adding unnecessary details.
</think>
" Auxiliom has been updated last week! New release.""#;
let expected = r#"" Auxiliom has been updated last week! New release.""#;
let result = handler.process_content(input);
assert_eq!(result, expected);
}
#[test]
fn test_gpt_oss_20b() {
test_util::setup();
assert!(true, "GPT OSS 20B placeholder test");
}
#[test]
fn test_gpt_oss_120b() {
test_util::setup();
assert!(true, "GPT OSS 120B placeholder test");
}
}

20
src/llm/llm_models/mod.rs Normal file
View file

@ -0,0 +1,20 @@
pub mod gpt_oss_20b;
pub mod deepseek_r3;
pub mod gpt_oss_120b;
pub trait ModelHandler: Send + Sync {
fn is_analysis_complete(&self, buffer: &str) -> bool;
fn process_content(&self, content: &str) -> String;
fn has_analysis_markers(&self, buffer: &str) -> bool;
}
pub fn get_handler(model_path: &str) -> Box<dyn ModelHandler> {
let path = model_path.to_lowercase();
if path.contains("deepseek") {
Box::new(deepseek_r3::DeepseekR3Handler)
} else if path.contains("120b") {
Box::new(gpt_oss_120b::GptOss120bHandler::new())
} else if path.contains("gpt-oss") || path.contains("gpt") {
Box::new(gpt_oss_20b::GptOss20bHandler)
} else {
Box::new(gpt_oss_20b::GptOss20bHandler)
}
}

View file

@ -0,0 +1 @@
Prompts come from: https://github.com/0xeb/TheBigPromptLibrary

File diff suppressed because it is too large Load diff

3
src/msteams/mod.rs Normal file
View file

@ -0,0 +1,3 @@
pub mod teams;
pub use teams::*;

359
src/msteams/teams.rs Normal file
View file

@ -0,0 +1,359 @@
//! Microsoft Teams Channel Integration
//!
//! This module provides webhook handling and message processing for Microsoft Teams.
//! Currently under development for bot integration with Teams channels and direct messages.
//!
//! Key features:
//! - Bot Framework webhook handling
//! - Teams message and conversation support
//! - Adaptive cards for rich responses
//! - Session management per Teams user
//! - Integration with Microsoft Bot Framework
use crate::shared::models::UserSession;
use crate::shared::state::AppState;
use axum::{http::StatusCode, response::Json, Router};
use log::error;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::sync::Arc;
#[derive(Debug, Deserialize, Serialize)]
pub struct TeamsMessage {
#[serde(rename = "type")]
pub msg_type: String,
pub id: Option<String>,
pub timestamp: Option<String>,
pub from: TeamsUser,
pub conversation: TeamsConversation,
pub recipient: TeamsUser,
pub text: Option<String>,
pub attachments: Option<Vec<TeamsAttachment>>,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct TeamsUser {
pub id: String,
pub name: Option<String>,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct TeamsConversation {
pub id: String,
#[serde(rename = "conversationType")]
pub conversation_type: Option<String>,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct TeamsAttachment {
#[serde(rename = "contentType")]
pub content_type: String,
pub content: serde_json::Value,
}
#[derive(Debug)]
pub struct TeamsAdapter {
pub state: Arc<AppState>,
pub app_id: String,
pub app_password: String,
pub service_url: String,
pub tenant_id: String,
}
impl TeamsAdapter {
pub fn new(state: Arc<AppState>) -> Self {
// Load configuration from environment variables
let app_id = std::env::var("TEAMS_APP_ID").unwrap_or_default();
let app_password = std::env::var("TEAMS_APP_PASSWORD").unwrap_or_default();
let service_url = std::env::var("TEAMS_SERVICE_URL")
.unwrap_or_else(|_| "https://smba.trafficmanager.net/br/".to_string());
let tenant_id = std::env::var("TEAMS_TENANT_ID").unwrap_or_default();
Self {
state,
app_id,
app_password,
service_url,
tenant_id,
}
}
pub async fn handle_incoming_message(
&self,
Json(payload): Json<TeamsMessage>,
) -> Result<StatusCode, StatusCode> {
if payload.msg_type != "message" {
return Ok(StatusCode::OK);
}
if let Some(text) = payload.text {
if let Err(e) = self
.process_message(payload.from, payload.conversation, text)
.await
{
error!("Error processing Teams message: {}", e);
}
}
Ok(StatusCode::ACCEPTED)
}
async fn process_message(
&self,
from: TeamsUser,
conversation: TeamsConversation,
text: String,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
// Process with bot
self.process_with_bot(&from.id, &conversation.id, &text)
.await?;
Ok(())
}
async fn process_with_bot(
&self,
user_id: &str,
conversation_id: &str,
message: &str,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let _session = self.get_or_create_session(user_id).await?;
// Process message through bot processor (simplified for now)
let response = format!("Received on Teams: {}", message);
self.send_message(conversation_id, user_id, &response)
.await?;
Ok(())
}
async fn get_or_create_session(
&self,
user_id: &str,
) -> Result<UserSession, Box<dyn std::error::Error + Send + Sync>> {
if let Some(redis_client) = &self.state.cache {
let mut conn = redis_client.get_multiplexed_async_connection().await?;
let session_key = format!("teams_session:{}", user_id);
if let Ok(session_data) = redis::cmd("GET")
.arg(&session_key)
.query_async::<String>(&mut conn)
.await
{
if let Ok(session) = serde_json::from_str::<UserSession>(&session_data) {
return Ok(session);
}
}
let user_uuid = uuid::Uuid::parse_str(user_id).unwrap_or_else(|_| uuid::Uuid::new_v4());
let session = UserSession {
id: uuid::Uuid::new_v4(),
user_id: user_uuid,
bot_id: uuid::Uuid::default(),
title: "Teams Session".to_string(),
context_data: serde_json::json!({"channel": "teams"}),
current_tool: None,
created_at: chrono::Utc::now(),
updated_at: chrono::Utc::now(),
};
let session_data = serde_json::to_string(&session)?;
redis::cmd("SET")
.arg(&session_key)
.arg(&session_data)
.arg("EX")
.arg(86400)
.query_async::<()>(&mut conn)
.await?;
Ok(session)
} else {
let user_uuid = uuid::Uuid::parse_str(user_id).unwrap_or_else(|_| uuid::Uuid::new_v4());
Ok(UserSession {
id: uuid::Uuid::new_v4(),
user_id: user_uuid,
bot_id: uuid::Uuid::default(),
title: "Teams Session".to_string(),
context_data: serde_json::json!({"channel": "teams"}),
current_tool: None,
created_at: chrono::Utc::now(),
updated_at: chrono::Utc::now(),
})
}
}
pub async fn get_access_token(
&self,
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
let client = Client::new();
let token_url = format!(
"https://login.microsoftonline.com/{}/oauth2/v2.0/token",
if self.tenant_id.is_empty() {
"botframework.com"
} else {
&self.tenant_id
}
);
let params = [
("grant_type", "client_credentials"),
("client_id", &self.app_id),
("client_secret", &self.app_password),
("scope", "https://api.botframework.com/.default"),
];
let response = client.post(&token_url).form(&params).send().await?;
if !response.status().is_success() {
let error_text = response.text().await?;
return Err(format!("Failed to get Teams access token: {}", error_text).into());
}
#[derive(Deserialize)]
struct TokenResponse {
access_token: String,
}
let token_response: TokenResponse = response.json().await?;
Ok(token_response.access_token)
}
pub async fn send_message(
&self,
conversation_id: &str,
user_id: &str,
message: &str,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let access_token = self.get_access_token().await?;
let url = format!(
"{}/v3/conversations/{}/activities",
self.service_url.trim_end_matches('/'),
conversation_id
);
let activity = json!({
"type": "message",
"text": message,
"from": {
"id": self.app_id,
"name": "Bot"
},
"conversation": {
"id": conversation_id
},
"recipient": {
"id": user_id
}
});
let client = Client::new();
let response = client
.post(&url)
.bearer_auth(&access_token)
.json(&activity)
.send()
.await?;
if !response.status().is_success() {
let error_text = response.text().await?;
error!("Teams API error: {}", error_text);
return Err(format!("Teams API error: {}", error_text).into());
}
Ok(())
}
pub async fn send_card(
&self,
conversation_id: &str,
user_id: &str,
title: &str,
options: Vec<String>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let access_token = self.get_access_token().await?;
let url = format!(
"{}/v3/conversations/{}/activities",
self.service_url.trim_end_matches('/'),
conversation_id
);
let actions: Vec<_> = options
.iter()
.map(|option| {
json!({
"type": "Action.Submit",
"title": option,
"data": {
"action": option
}
})
})
.collect();
let card = json!({
"type": "AdaptiveCard",
"version": "1.3",
"body": [
{
"type": "TextBlock",
"text": title,
"size": "Medium",
"weight": "Bolder"
}
],
"actions": actions
});
let activity = json!({
"type": "message",
"from": {
"id": self.app_id,
"name": "Bot"
},
"conversation": {
"id": conversation_id
},
"recipient": {
"id": user_id
},
"attachments": [
{
"contentType": "application/vnd.microsoft.card.adaptive",
"content": card
}
]
});
let client = Client::new();
let response = client
.post(&url)
.bearer_auth(&access_token)
.json(&activity)
.send()
.await?;
if !response.status().is_success() {
let error_text = response.text().await?;
error!("Teams API error: {}", error_text);
}
Ok(())
}
}
pub fn router(state: Arc<AppState>) -> Router<Arc<AppState>> {
let adapter = Arc::new(TeamsAdapter::new(state.clone()));
Router::new()
.route(
"/messages",
axum::routing::post({
move |payload| async move { adapter.handle_incoming_message(payload).await }
}),
)
.with_state(state)
}

708
src/tasks/mod.rs Normal file
View file

@ -0,0 +1,708 @@
use axum::{
extract::{Path, Query, State},
http::StatusCode,
response::Json,
routing::{delete, get, post, put},
Router,
};
use chrono::{DateTime, Utc};
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use tokio::sync::RwLock;
use uuid::Uuid;
use crate::shared::utils::DbPool;
// TODO: Replace sqlx queries with Diesel queries
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TaskUpdate {
pub title: Option<String>,
pub description: Option<String>,
pub status: Option<TaskStatus>,
pub priority: Option<TaskPriority>,
pub assignee: Option<String>,
pub due_date: Option<DateTime<Utc>>,
pub tags: Option<Vec<String>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Task {
pub id: Uuid,
pub title: String,
pub description: Option<String>,
pub assignee: Option<String>,
pub reporter: String,
pub status: TaskStatus,
pub priority: TaskPriority,
pub due_date: Option<DateTime<Utc>>,
pub estimated_hours: Option<f32>,
pub actual_hours: Option<f32>,
pub tags: Vec<String>,
pub parent_task_id: Option<Uuid>,
pub subtasks: Vec<Uuid>,
pub dependencies: Vec<Uuid>,
pub attachments: Vec<String>,
pub comments: Vec<TaskComment>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
pub completed_at: Option<DateTime<Utc>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum TaskStatus {
Todo,
InProgress,
Review,
Done,
Blocked,
Cancelled,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum TaskPriority {
Low,
Medium,
High,
Urgent,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TaskComment {
pub id: Uuid,
pub task_id: Uuid,
pub author: String,
pub content: String,
pub created_at: DateTime<Utc>,
pub updated_at: Option<DateTime<Utc>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TaskTemplate {
pub id: Uuid,
pub name: String,
pub description: Option<String>,
pub default_assignee: Option<String>,
pub default_priority: TaskPriority,
pub default_tags: Vec<String>,
pub checklist: Vec<ChecklistItem>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChecklistItem {
pub id: Uuid,
pub task_id: Uuid,
pub description: String,
pub completed: bool,
pub completed_by: Option<String>,
pub completed_at: Option<DateTime<Utc>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TaskBoard {
pub id: Uuid,
pub name: String,
pub description: Option<String>,
pub columns: Vec<BoardColumn>,
pub owner: String,
pub members: Vec<String>,
pub created_at: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BoardColumn {
pub id: Uuid,
pub name: String,
pub position: i32,
pub status_mapping: TaskStatus,
pub task_ids: Vec<Uuid>,
pub wip_limit: Option<i32>,
}
pub struct TaskEngine {
db: Arc<DbPool>,
cache: Arc<RwLock<Vec<Task>>>,
}
impl TaskEngine {
pub fn new(db: Arc<DbPool>) -> Self {
Self {
db,
cache: Arc::new(RwLock::new(Vec::new())),
}
}
/// Create a new task
pub async fn create_task(&self, task: Task) -> Result<Task, Box<dyn std::error::Error>> {
// TODO: Implement with Diesel
/*
let result = sqlx::query!(
r#"
INSERT INTO tasks
(id, title, description, assignee, reporter, status, priority,
due_date, estimated_hours, tags, parent_task_id, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
RETURNING *
"#,
task.id,
task.title,
task.description,
task.assignee,
task.reporter,
serde_json::to_value(&task.status)?,
serde_json::to_value(&task.priority)?,
task.due_date,
task.estimated_hours,
&task.tags[..],
task.parent_task_id,
task.created_at,
task.updated_at
)
.fetch_one(self.db.as_ref())
.await?;
let created_task: Task = serde_json::from_value(serde_json::to_value(result)?)?;
*/
let created_task = task.clone();
// Update cache
let mut cache = self.cache.write().await;
cache.push(created_task.clone());
Ok(created_task)
}
/// Update an existing task
pub async fn update_task(
&self,
id: Uuid,
updates: TaskUpdate,
) -> Result<Task, Box<dyn std::error::Error>> {
let updated_at = Utc::now();
// Check if status is changing to Done
let completing = updates.status
.as_ref()
.map(|s| matches!(s, TaskStatus::Done))
.unwrap_or(false);
let completed_at = if completing {
Some(Utc::now())
} else {
None
};
// TODO: Implement with Diesel
/*
let result = sqlx::query!(
r#"
UPDATE tasks
SET title = COALESCE($2, title),
description = COALESCE($3, description),
assignee = COALESCE($4, assignee),
status = COALESCE($5, status),
priority = COALESCE($6, priority),
due_date = COALESCE($7, due_date),
updated_at = $8,
completed_at = COALESCE($9, completed_at)
WHERE id = $1
RETURNING *
"#,
id,
updates.get("title").and_then(|v| v.as_str()),
updates.get("description").and_then(|v| v.as_str()),
updates.get("assignee").and_then(|v| v.as_str()),
updates.get("status").and_then(|v| serde_json::to_value(v).ok()),
updates.get("priority").and_then(|v| serde_json::to_value(v).ok()),
updates
.get("due_date")
.and_then(|v| DateTime::parse_from_rfc3339(v.as_str()?).ok())
.map(|dt| dt.with_timezone(&Utc)),
updated_at,
completed_at
)
.fetch_one(self.db.as_ref())
.await?;
let updated_task: Task = serde_json::from_value(serde_json::to_value(result)?)?;
*/
// Create a dummy updated task for now
let updated_task = Task {
id,
title: updates.title.unwrap_or_else(|| "Updated Task".to_string()),
description: updates.description,
assignee: updates.assignee,
reporter: "system".to_string(),
status: updates.status.unwrap_or(TaskStatus::Todo),
priority: updates.priority.unwrap_or(TaskPriority::Medium),
due_date: updates.due_date,
estimated_hours: None,
actual_hours: None,
tags: updates.tags.unwrap_or_default(),
parent_task_id: None,
subtasks: Vec::new(),
dependencies: Vec::new(),
attachments: Vec::new(),
comments: Vec::new(),
created_at: Utc::now(),
updated_at: Utc::now(),
completed_at,
};
self.refresh_cache().await?;
Ok(updated_task)
}
/// Delete a task
pub async fn delete_task(&self, id: Uuid) -> Result<bool, Box<dyn std::error::Error>> {
// First, check for dependencies
let dependencies = self.get_task_dependencies(id).await?;
if !dependencies.is_empty() {
return Err("Cannot delete task with dependencies".into());
}
// TODO: Implement with Diesel
/*
let result = sqlx::query!("DELETE FROM tasks WHERE id = $1", id)
.execute(self.db.as_ref())
.await?;
*/
self.refresh_cache().await?;
Ok(false)
}
/// Get tasks for a specific user
pub async fn get_user_tasks(
&self,
_user_id: &str,
) -> Result<Vec<Task>, Box<dyn std::error::Error>> {
// TODO: Implement with Diesel
/*
let results = sqlx::query!(
r#"
SELECT * FROM tasks
WHERE assignee = $1 OR reporter = $1
ORDER BY priority DESC, due_date ASC
"#,
user_id
)
.fetch_all(self.db.as_ref())
.await?;
Ok(results
.into_iter()
.map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap())
.collect())
*/
Ok(vec![])
}
/// Get tasks by status
pub async fn get_tasks_by_status(
&self,
_status: TaskStatus,
) -> Result<Vec<Task>, Box<dyn std::error::Error>> {
// TODO: Implement with Diesel
/*
let results = sqlx::query!(
r#"
SELECT * FROM tasks
WHERE status = $1
ORDER BY priority DESC, created_at ASC
"#,
serde_json::to_value(&status)?
)
.fetch_all(self.db.as_ref())
.await?;
Ok(results
.into_iter()
.map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap())
.collect())
*/
Ok(vec![])
}
/// Get overdue tasks
pub async fn get_overdue_tasks(&self) -> Result<Vec<Task>, Box<dyn std::error::Error>> {
// TODO: Implement with Diesel
/*
let now = Utc::now();
let results = sqlx::query!(
r#"
SELECT * FROM tasks
WHERE due_date < $1 AND status != 'done' AND status != 'cancelled'
ORDER BY due_date ASC
"#,
now
)
.fetch_all(self.db.as_ref())
.await?;
Ok(results
.into_iter()
.map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap())
.collect())
*/
Ok(vec![])
}
/// Add a comment to a task
pub async fn add_comment(
&self,
task_id: Uuid,
author: &str,
content: &str,
) -> Result<TaskComment, Box<dyn std::error::Error>> {
let comment = TaskComment {
id: Uuid::new_v4(),
task_id,
author: author.to_string(),
content: content.to_string(),
created_at: Utc::now(),
updated_at: None,
};
// TODO: Implement with Diesel
/*
sqlx::query!(
r#"
INSERT INTO task_comments (id, task_id, author, content, created_at)
VALUES ($1, $2, $3, $4, $5)
"#,
comment.id,
comment.task_id,
comment.author,
comment.content,
comment.created_at
)
.execute(self.db.as_ref())
.await?;
*/
Ok(comment)
}
/// Create a subtask
pub async fn create_subtask(
&self,
parent_id: Uuid,
subtask: Task,
) -> Result<Task, Box<dyn std::error::Error>> {
let mut subtask = subtask;
subtask.parent_task_id = Some(parent_id);
let created = self.create_task(subtask).await?;
// Update parent's subtasks list
// TODO: Implement with Diesel
/*
sqlx::query!(
r#"
UPDATE tasks
SET subtasks = array_append(subtasks, $1)
WHERE id = $2
"#,
created.id,
parent_id
)
.execute(self.db.as_ref())
.await?;
*/
Ok(created)
}
/// Get task dependencies
pub async fn get_task_dependencies(
&self,
task_id: Uuid,
) -> Result<Vec<Task>, Box<dyn std::error::Error>> {
let task = self.get_task(task_id).await?;
let mut dependencies = Vec::new();
for dep_id in task.dependencies {
if let Ok(dep_task) = self.get_task(dep_id).await {
dependencies.push(dep_task);
}
}
Ok(dependencies)
}
/// Get a single task by ID
pub async fn get_task(&self, _id: Uuid) -> Result<Task, Box<dyn std::error::Error>> {
// TODO: Implement with Diesel
/*
let result = sqlx::query!("SELECT * FROM tasks WHERE id = $1", id)
.fetch_one(self.db.as_ref())
.await?;
Ok(serde_json::from_value(serde_json::to_value(result)?)?)
*/
Err("Not implemented".into())
}
/// Calculate task progress (percentage)
pub async fn calculate_progress(&self, task_id: Uuid) -> Result<f32, Box<dyn std::error::Error>> {
let task = self.get_task(task_id).await?;
if task.subtasks.is_empty() {
// No subtasks, progress based on status
return Ok(match task.status {
TaskStatus::Todo => 0.0,
TaskStatus::InProgress => 50.0,
TaskStatus::Review => 75.0,
TaskStatus::Done => 100.0,
TaskStatus::Blocked => task.actual_hours.unwrap_or(0.0) / task.estimated_hours.unwrap_or(1.0) * 100.0,
TaskStatus::Cancelled => 0.0,
});
}
// Has subtasks, calculate based on subtask completion
let total = task.subtasks.len() as f32;
let mut completed = 0.0;
for subtask_id in task.subtasks {
if let Ok(subtask) = self.get_task(subtask_id).await {
if matches!(subtask.status, TaskStatus::Done) {
completed += 1.0;
}
}
}
Ok((completed / total) * 100.0)
}
/// Create a task from template
pub async fn create_from_template(
&self,
_template_id: Uuid,
assignee: Option<String>,
) -> Result<Task, Box<dyn std::error::Error>> {
// TODO: Implement with Diesel
/*
let template = sqlx::query!(
"SELECT * FROM task_templates WHERE id = $1",
template_id
)
.fetch_one(self.db.as_ref())
.await?;
let template: TaskTemplate = serde_json::from_value(serde_json::to_value(template)?)?;
*/
let template = TaskTemplate {
id: Uuid::new_v4(),
name: "Default Template".to_string(),
description: Some("Default template".to_string()),
default_assignee: None,
default_priority: TaskPriority::Medium,
default_tags: vec![],
checklist: vec![],
};
let task = Task {
id: Uuid::new_v4(),
title: template.name,
description: template.description,
assignee: assignee,
reporter: "system".to_string(),
status: TaskStatus::Todo,
priority: template.default_priority,
due_date: None,
estimated_hours: None,
actual_hours: None,
tags: template.default_tags,
parent_task_id: None,
subtasks: Vec::new(),
dependencies: Vec::new(),
attachments: Vec::new(),
comments: Vec::new(),
created_at: Utc::now(),
updated_at: Utc::now(),
completed_at: None,
};
let created = self.create_task(task).await?;
// Create checklist items
for item in template.checklist {
let _checklist_item = ChecklistItem {
id: Uuid::new_v4(),
task_id: created.id,
description: item.description,
completed: false,
completed_by: None,
completed_at: None,
};
// TODO: Implement with Diesel
/*
sqlx::query!(
r#"
INSERT INTO task_checklists (id, task_id, description, completed)
VALUES ($1, $2, $3, $4)
"#,
checklist_item.id,
checklist_item.task_id,
checklist_item.description,
checklist_item.completed
)
.execute(self.db.as_ref())
.await?;
*/
}
Ok(created)
}
/// Send notification to assignee
async fn notify_assignee(
&self,
assignee: &str,
task: &Task,
) -> Result<(), Box<dyn std::error::Error>> {
// This would integrate with your notification system
// For now, just log it
log::info!(
"Notifying {} about new task assignment: {}",
assignee,
task.title
);
Ok(())
}
/// Refresh the cache from database
async fn refresh_cache(&self) -> Result<(), Box<dyn std::error::Error>> {
// TODO: Implement with Diesel
/*
let results = sqlx::query!("SELECT * FROM tasks ORDER BY created_at DESC")
.fetch_all(self.db.as_ref())
.await?;
let tasks: Vec<Task> = results
.into_iter()
.map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap())
.collect();
*/
let tasks: Vec<Task> = vec![];
let mut cache = self.cache.write().await;
*cache = tasks;
Ok(())
}
/// Get task statistics for reporting
pub async fn get_statistics(
&self,
user_id: Option<&str>,
) -> Result<serde_json::Value, Box<dyn std::error::Error>> {
let base_query = if let Some(uid) = user_id {
format!("WHERE assignee = '{}' OR reporter = '{}'", uid, uid)
} else {
String::new()
};
// TODO: Implement with Diesel
/*
let stats = sqlx::query(&format!(
r#"
SELECT
COUNT(*) FILTER (WHERE status = 'todo') as todo_count,
COUNT(*) FILTER (WHERE status = 'in_progress') as in_progress_count,
COUNT(*) FILTER (WHERE status = 'done') as done_count,
COUNT(*) FILTER (WHERE due_date < NOW() AND status != 'done') as overdue_count,
AVG(actual_hours / NULLIF(estimated_hours, 0)) as avg_completion_ratio
FROM tasks
{}
"#,
base_query
))
.fetch_one(self.db.as_ref())
.await?;
*/
// Return empty stats for now
Ok(serde_json::json!({
"todo_count": 0,
"in_progress_count": 0,
"done_count": 0,
"overdue_count": 0,
"avg_completion_ratio": null
}))
}
}
/// HTTP API handlers
pub mod handlers {
use super::*;
use axum::extract::{State as AxumState, Query as AxumQuery, Path as AxumPath};
use axum::response::{Json as AxumJson, IntoResponse};
use axum::http::StatusCode;
pub async fn create_task_handler<S>(
AxumState(_engine): AxumState<S>,
AxumJson(task): AxumJson<Task>,
) -> impl IntoResponse {
// TODO: Implement with actual engine
let created = task;
(StatusCode::OK, AxumJson(serde_json::json!(created)))
}
pub async fn get_tasks_handler<S>(
AxumState(_engine): AxumState<S>,
AxumQuery(_query): AxumQuery<serde_json::Value>,
) -> impl IntoResponse {
// TODO: Implement with actual engine
let tasks: Vec<Task> = vec![];
(StatusCode::OK, AxumJson(serde_json::json!(tasks)))
}
pub async fn update_task_handler<S>(
AxumState(_engine): AxumState<S>,
AxumPath(_id): AxumPath<Uuid>,
AxumJson(_updates): AxumJson<TaskUpdate>,
) -> impl IntoResponse {
// TODO: Implement with actual engine
let updated = serde_json::json!({"message": "Task updated"});
(StatusCode::OK, AxumJson(updated))
}
pub async fn get_statistics_handler<S>(
AxumState(_engine): AxumState<S>,
AxumQuery(_query): AxumQuery<serde_json::Value>,
) -> impl IntoResponse {
// TODO: Implement with actual engine
let stats = serde_json::json!({
"todo_count": 0,
"in_progress_count": 0,
"done_count": 0,
"overdue_count": 0
});
(StatusCode::OK, AxumJson(stats))
}
}
/// Configure task engine routes
pub fn configure<S>(router: Router<S>) -> Router<S>
where
S: Clone + Send + Sync + 'static,
{
use axum::routing::{get, post, put};
router
.route("/api/tasks", post(handlers::create_task_handler::<S>))
.route("/api/tasks", get(handlers::get_tasks_handler::<S>))
.route("/api/tasks/:id", put(handlers::update_task_handler::<S>))
.route("/api/tasks/statistics", get(handlers::get_statistics_handler::<S>))
}

3
src/vector-db/mod.rs Normal file
View file

@ -0,0 +1,3 @@
pub mod vectordb_indexer;
pub use vectordb_indexer::{IndexingStats, IndexingStatus, VectorDBIndexer};

View file

@ -0,0 +1,555 @@
use anyhow::Result;
use chrono::{DateTime, Utc};
use log::{error, info, warn};
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use tokio::sync::RwLock;
use tokio::time::{sleep, Duration};
use uuid::Uuid;
// UserWorkspace struct for managing user workspace paths
#[derive(Debug, Clone)]
struct UserWorkspace {
root: PathBuf,
bot_id: Uuid,
user_id: Uuid,
}
impl UserWorkspace {
fn new(root: PathBuf, bot_id: &Uuid, user_id: &Uuid) -> Self {
Self {
root,
bot_id: *bot_id,
user_id: *user_id,
}
}
fn get_path(&self) -> PathBuf {
self.root.join(self.bot_id.to_string()).join(self.user_id.to_string())
}
}
use crate::shared::utils::DbPool;
// VectorDB types are defined locally in this module
#[cfg(feature = "vectordb")]
use qdrant_client::prelude::*;
/// Indexing job status
#[derive(Debug, Clone, PartialEq)]
pub enum IndexingStatus {
Idle,
Running,
Paused,
Failed(String),
}
/// Indexing statistics
#[derive(Debug, Clone)]
pub struct IndexingStats {
pub emails_indexed: u64,
pub files_indexed: u64,
pub emails_pending: u64,
pub files_pending: u64,
pub last_run: Option<DateTime<Utc>>,
pub errors: u64,
}
/// User indexing job
#[derive(Debug)]
struct UserIndexingJob {
user_id: Uuid,
bot_id: Uuid,
workspace: UserWorkspace,
#[cfg(all(feature = "vectordb", feature = "email"))]
email_db: Option<UserEmailVectorDB>,
#[cfg(feature = "vectordb")]
drive_db: Option<UserDriveVectorDB>,
stats: IndexingStats,
status: IndexingStatus,
}
/// Background vector DB indexer for all users
pub struct VectorDBIndexer {
db_pool: DbPool,
work_root: PathBuf,
qdrant_url: String,
embedding_generator: Arc<EmailEmbeddingGenerator>,
jobs: Arc<RwLock<HashMap<Uuid, UserIndexingJob>>>,
running: Arc<RwLock<bool>>,
interval_seconds: u64,
batch_size: usize,
}
impl VectorDBIndexer {
/// Create new vector DB indexer
pub fn new(
db_pool: DbPool,
work_root: PathBuf,
qdrant_url: String,
llm_endpoint: String,
) -> Self {
Self {
db_pool,
work_root,
qdrant_url,
embedding_generator: Arc::new(EmailEmbeddingGenerator::new(llm_endpoint)),
jobs: Arc::new(RwLock::new(HashMap::new())),
running: Arc::new(RwLock::new(false)),
interval_seconds: 300, // Run every 5 minutes
batch_size: 10, // Index 10 items at a time
}
}
/// Start the background indexing service
pub async fn start(self: Arc<Self>) -> Result<()> {
let mut running = self.running.write().await;
if *running {
warn!("Vector DB indexer already running");
return Ok(());
}
*running = true;
drop(running);
info!("🚀 Starting Vector DB Indexer background service");
let indexer = Arc::clone(&self);
tokio::spawn(async move {
indexer.run_indexing_loop().await;
});
Ok(())
}
/// Stop the indexing service
pub async fn stop(&self) {
let mut running = self.running.write().await;
*running = false;
info!("🛑 Stopping Vector DB Indexer");
}
/// Main indexing loop
async fn run_indexing_loop(self: Arc<Self>) {
loop {
// Check if still running
{
let running = self.running.read().await;
if !*running {
break;
}
}
info!("🔄 Running vector DB indexing cycle...");
// Get all active users
match self.get_active_users().await {
Ok(users) => {
info!("Found {} active users to index", users.len());
for (user_id, bot_id) in users {
if let Err(e) = self.index_user_data(user_id, bot_id).await {
error!("Failed to index user {}: {}", user_id, e);
}
}
}
Err(e) => {
error!("Failed to get active users: {}", e);
}
}
info!("✅ Indexing cycle complete");
// Sleep until next cycle
sleep(Duration::from_secs(self.interval_seconds)).await;
}
info!("Vector DB Indexer stopped");
}
/// Get all active users from database
async fn get_active_users(&self) -> Result<Vec<(Uuid, Uuid)>> {
let conn = self.db_pool.clone();
tokio::task::spawn_blocking(move || {
use crate::shared::models::schema::user_sessions::dsl::*;
use diesel::prelude::*;
let mut db_conn = conn.get()?;
// Get unique user_id and bot_id pairs from active sessions
let results: Vec<(Uuid, Uuid)> = user_sessions
.select((user_id, bot_id))
.distinct()
.load(&mut db_conn)?;
Ok::<_, anyhow::Error>(results)
})
.await?
}
/// Index data for a specific user
async fn index_user_data(&self, user_id: Uuid, bot_id: Uuid) -> Result<()> {
info!("Indexing user: {} (bot: {})", user_id, bot_id);
// Get or create job for this user
let mut jobs = self.jobs.write().await;
let job = jobs.entry(user_id).or_insert_with(|| {
let workspace = UserWorkspace::new(self.work_root.clone(), &bot_id, &user_id);
UserIndexingJob {
user_id,
bot_id,
workspace,
email_db: None,
drive_db: None,
stats: IndexingStats {
emails_indexed: 0,
files_indexed: 0,
emails_pending: 0,
files_pending: 0,
last_run: None,
errors: 0,
},
status: IndexingStatus::Idle,
}
});
if job.status == IndexingStatus::Running {
warn!("Job already running for user {}", user_id);
return Ok(());
}
job.status = IndexingStatus::Running;
// Initialize vector DBs if needed
if job.email_db.is_none() {
let mut email_db =
UserEmailVectorDB::new(user_id, bot_id, job.workspace.email_vectordb());
if let Err(e) = email_db.initialize(&self.qdrant_url).await {
warn!(
"Failed to initialize email vector DB for user {}: {}",
user_id, e
);
} else {
job.email_db = Some(email_db);
}
}
if job.drive_db.is_none() {
let mut drive_db =
UserDriveVectorDB::new(user_id, bot_id, job.workspace.drive_vectordb());
if let Err(e) = drive_db.initialize(&self.qdrant_url).await {
warn!(
"Failed to initialize drive vector DB for user {}: {}",
user_id, e
);
} else {
job.drive_db = Some(drive_db);
}
}
drop(jobs);
// Index emails
if let Err(e) = self.index_user_emails(user_id).await {
error!("Failed to index emails for user {}: {}", user_id, e);
}
// Index files
if let Err(e) = self.index_user_files(user_id).await {
error!("Failed to index files for user {}: {}", user_id, e);
}
// Update job status
let mut jobs = self.jobs.write().await;
if let Some(job) = jobs.get_mut(&user_id) {
job.status = IndexingStatus::Idle;
job.stats.last_run = Some(Utc::now());
}
Ok(())
}
/// Index user's emails
async fn index_user_emails(&self, user_id: Uuid) -> Result<()> {
let jobs = self.jobs.read().await;
let job = jobs
.get(&user_id)
.ok_or_else(|| anyhow::anyhow!("Job not found"))?;
let email_db = match &job.email_db {
Some(db) => db,
None => {
warn!("Email vector DB not initialized for user {}", user_id);
return Ok(());
}
};
// Get user's email accounts
let accounts = self.get_user_email_accounts(user_id).await?;
info!(
"Found {} email accounts for user {}",
accounts.len(),
user_id
);
for account_id in accounts {
// Get recent unindexed emails (last 100)
match self.get_unindexed_emails(user_id, &account_id).await {
Ok(emails) => {
if emails.is_empty() {
continue;
}
info!(
"Indexing {} emails for account {}",
emails.len(),
account_id
);
// Process in batches
for chunk in emails.chunks(self.batch_size) {
for email in chunk {
match self.embedding_generator.generate_embedding(&email).await {
Ok(embedding) => {
if let Err(e) = email_db.index_email(&email, embedding).await {
error!("Failed to index email {}: {}", email.id, e);
} else {
info!("✅ Indexed email: {}", email.subject);
}
}
Err(e) => {
error!(
"Failed to generate embedding for email {}: {}",
email.id, e
);
}
}
}
// Small delay between batches
sleep(Duration::from_millis(100)).await;
}
}
Err(e) => {
error!(
"Failed to get unindexed emails for account {}: {}",
account_id, e
);
}
}
}
Ok(())
}
/// Index user's files
async fn index_user_files(&self, user_id: Uuid) -> Result<()> {
let jobs = self.jobs.read().await;
let job = jobs
.get(&user_id)
.ok_or_else(|| anyhow::anyhow!("Job not found"))?;
let drive_db = match &job.drive_db {
Some(db) => db,
None => {
warn!("Drive vector DB not initialized for user {}", user_id);
return Ok(());
}
};
// Get user's files from drive
match self.get_unindexed_files(user_id).await {
Ok(files) => {
if files.is_empty() {
return Ok(());
}
info!("Indexing {} files for user {}", files.len(), user_id);
// Process in batches
for chunk in files.chunks(self.batch_size) {
for file in chunk {
// Check if file should be indexed
let mime_type = file.mime_type.as_ref().map(|s| s.as_str()).unwrap_or("");
if !FileContentExtractor::should_index(mime_type, file.file_size) {
continue;
}
// Generate embedding for file content
let text = format!(
"File: {}\nType: {}\n\n{}",
file.file_name, file.file_type, file.content_text
);
match self
.embedding_generator
.generate_text_embedding(&text)
.await
{
Ok(embedding) => {
if let Err(e) = drive_db.index_file(&file, embedding).await {
error!("Failed to index file {}: {}", file.id, e);
} else {
info!("✅ Indexed file: {}", file.file_name);
}
}
Err(e) => {
error!("Failed to generate embedding for file {}: {}", file.id, e);
}
}
}
// Small delay between batches
sleep(Duration::from_millis(100)).await;
}
}
Err(e) => {
error!("Failed to get unindexed files for user {}: {}", user_id, e);
}
}
Ok(())
}
/// Get user's email accounts
async fn get_user_email_accounts(&self, user_id: Uuid) -> Result<Vec<String>> {
let conn = self.db_pool.clone();
tokio::task::spawn_blocking(move || {
use diesel::prelude::*;
let mut db_conn = conn.get()?;
let results: Vec<String> = diesel::sql_query(
"SELECT id::text FROM user_email_accounts WHERE user_id = $1 AND is_active = true",
)
.bind::<diesel::sql_types::Uuid, _>(user_id)
.load(&mut db_conn)?
.into_iter()
.filter_map(|row: diesel::QueryableByName<diesel::pg::Pg>| {
use diesel::sql_types::Text;
let id: Result<String, _> = <String as diesel::deserialize::FromSql<
Text,
diesel::pg::Pg,
>>::from_sql(row.get("id").ok()?);
id.ok()
})
.collect();
Ok::<_, anyhow::Error>(results)
})
.await?
}
/// Get unindexed emails (placeholder - needs actual implementation)
async fn get_unindexed_emails(
&self,
_user_id: Uuid,
_account_id: &str,
) -> Result<Vec<EmailDocument>> {
// TODO: Implement actual email fetching from IMAP
// This should:
// 1. Connect to user's email account
// 2. Fetch recent emails (last 100)
// 3. Check which ones are not yet in vector DB
// 4. Return list of emails to index
Ok(Vec::new())
}
/// Get unindexed files (placeholder - needs actual implementation)
async fn get_unindexed_files(&self, _user_id: Uuid) -> Result<Vec<FileDocument>> {
// TODO: Implement actual file fetching from drive
// This should:
// 1. List user's files from MinIO/S3
// 2. Check which ones are not yet in vector DB
// 3. Extract text content from files
// 4. Return list of files to index
Ok(Vec::new())
}
/// Get indexing statistics for a user
pub async fn get_user_stats(&self, user_id: Uuid) -> Option<IndexingStats> {
let jobs = self.jobs.read().await;
jobs.get(&user_id).map(|job| job.stats.clone())
}
/// Get overall indexing statistics
pub async fn get_overall_stats(&self) -> IndexingStats {
let jobs = self.jobs.read().await;
let mut total_stats = IndexingStats {
emails_indexed: 0,
files_indexed: 0,
emails_pending: 0,
files_pending: 0,
last_run: None,
errors: 0,
};
for job in jobs.values() {
total_stats.emails_indexed += job.stats.emails_indexed;
total_stats.files_indexed += job.stats.files_indexed;
total_stats.emails_pending += job.stats.emails_pending;
total_stats.files_pending += job.stats.files_pending;
total_stats.errors += job.stats.errors;
if let Some(last_run) = job.stats.last_run {
if total_stats.last_run.is_none() || total_stats.last_run.unwrap() < last_run {
total_stats.last_run = Some(last_run);
}
}
}
total_stats
}
/// Pause indexing for a specific user
pub async fn pause_user_indexing(&self, user_id: Uuid) -> Result<()> {
let mut jobs = self.jobs.write().await;
if let Some(job) = jobs.get_mut(&user_id) {
job.status = IndexingStatus::Paused;
info!("⏸️ Paused indexing for user {}", user_id);
}
Ok(())
}
/// Resume indexing for a specific user
pub async fn resume_user_indexing(&self, user_id: Uuid) -> Result<()> {
let mut jobs = self.jobs.write().await;
if let Some(job) = jobs.get_mut(&user_id) {
job.status = IndexingStatus::Idle;
info!("▶️ Resumed indexing for user {}", user_id);
}
Ok(())
}
/// Trigger immediate indexing for a user
pub async fn trigger_user_indexing(&self, user_id: Uuid, bot_id: Uuid) -> Result<()> {
info!("🔄 Triggering immediate indexing for user {}", user_id);
self.index_user_data(user_id, bot_id).await
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_indexing_stats_creation() {
let stats = IndexingStats {
emails_indexed: 10,
files_indexed: 5,
emails_pending: 2,
files_pending: 3,
last_run: Some(Utc::now()),
errors: 0,
};
assert_eq!(stats.emails_indexed, 10);
assert_eq!(stats.files_indexed, 5);
}
}

6
src/weba/mod.rs Normal file
View file

@ -0,0 +1,6 @@
// WEBA module - Web Application features
// This module is a placeholder for future web application functionality
pub fn init() {
// Placeholder for weba initialization
}

3
src/whatsapp/mod.rs Normal file
View file

@ -0,0 +1,3 @@
pub mod whatsapp;
pub use whatsapp::*;

444
src/whatsapp/whatsapp.rs Normal file
View file

@ -0,0 +1,444 @@
//! WhatsApp Business Channel Integration
//!
//! This module provides webhook handling and message processing for WhatsApp Business API.
//! Currently under development for bot integration with WhatsApp Business accounts.
//!
//! Key features:
//! - Webhook verification and message handling
//! - WhatsApp text, media, and location messages
//! - Session management per WhatsApp user
//! - Media attachments support
//! - Integration with Meta's WhatsApp Business API
use crate::shared::models::UserSession;
use crate::shared::state::AppState;
use axum::{extract::Query, http::StatusCode, response::Json, Router};
use log::{error, info};
use reqwest::Client;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::sync::Arc;
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct WhatsAppWebhook {
#[serde(rename = "hub.mode")]
pub hub_mode: Option<String>,
#[serde(rename = "hub.verify_token")]
pub hub_verify_token: Option<String>,
#[serde(rename = "hub.challenge")]
pub hub_challenge: Option<String>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct WhatsAppMessage {
pub entry: Vec<WhatsAppEntry>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct WhatsAppEntry {
pub id: String,
pub changes: Vec<WhatsAppChange>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct WhatsAppChange {
pub value: WhatsAppValue,
pub field: String,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct WhatsAppValue {
pub messaging_product: String,
pub metadata: WhatsAppMetadata,
pub contacts: Option<Vec<WhatsAppContact>>,
pub messages: Option<Vec<WhatsAppIncomingMessage>>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct WhatsAppMetadata {
pub display_phone_number: String,
pub phone_number_id: String,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct WhatsAppContact {
pub profile: WhatsAppProfile,
pub wa_id: String,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct WhatsAppProfile {
pub name: String,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct WhatsAppIncomingMessage {
pub from: String,
pub id: String,
pub timestamp: String,
#[serde(rename = "type")]
pub msg_type: String,
pub text: Option<WhatsAppText>,
pub image: Option<WhatsAppMedia>,
pub document: Option<WhatsAppMedia>,
pub audio: Option<WhatsAppMedia>,
pub video: Option<WhatsAppMedia>,
pub location: Option<WhatsAppLocation>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct WhatsAppText {
pub body: String,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct WhatsAppMedia {
pub id: String,
pub mime_type: Option<String>,
pub sha256: Option<String>,
pub caption: Option<String>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct WhatsAppLocation {
pub latitude: f64,
pub longitude: f64,
pub name: Option<String>,
pub address: Option<String>,
}
#[derive(Debug)]
pub struct WhatsAppAdapter {
pub state: Arc<AppState>,
pub access_token: String,
pub phone_number_id: String,
pub verify_token: String,
}
impl WhatsAppAdapter {
pub fn new(state: Arc<AppState>) -> Self {
// Load configuration from environment variables
let access_token = std::env::var("WHATSAPP_ACCESS_TOKEN").unwrap_or_default();
let phone_number_id = std::env::var("WHATSAPP_PHONE_ID").unwrap_or_default();
let verify_token =
std::env::var("WHATSAPP_VERIFY_TOKEN").unwrap_or_else(|_| "webhook_verify".to_string());
Self {
state,
access_token,
phone_number_id,
verify_token,
}
}
pub async fn handle_webhook_verification(
&self,
params: Query<WhatsAppWebhook>,
) -> Result<String, StatusCode> {
if let (Some(mode), Some(token), Some(challenge)) = (
&params.hub_mode,
&params.hub_verify_token,
&params.hub_challenge,
) {
if mode == "subscribe" && token == &self.verify_token {
info!("WhatsApp webhook verified successfully");
return Ok(challenge.clone());
}
}
error!("WhatsApp webhook verification failed");
Err(StatusCode::FORBIDDEN)
}
pub async fn handle_incoming_message(
&self,
Json(payload): Json<WhatsAppMessage>,
) -> Result<StatusCode, StatusCode> {
for entry in payload.entry {
for change in entry.changes {
if change.field == "messages" {
if let Some(messages) = change.value.messages {
for message in messages {
if let Err(e) = self.process_message(message).await {
error!("Error processing WhatsApp message: {}", e);
}
}
}
}
}
}
Ok(StatusCode::OK)
}
async fn process_message(
&self,
message: WhatsAppIncomingMessage,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let user_phone = message.from.clone();
let message_id = message.id.clone();
// Mark message as read
self.mark_as_read(&message_id).await?;
// Extract message content based on type
let content = match message.msg_type.as_str() {
"text" => message.text.map(|t| t.body).unwrap_or_default(),
"image" => {
if let Some(image) = message.image {
format!("[Image: {}]", image.caption.unwrap_or_default())
} else {
String::new()
}
}
"audio" => "[Audio message]".to_string(),
"video" => "[Video message]".to_string(),
"document" => "[Document]".to_string(),
"location" => {
if let Some(loc) = message.location {
format!("[Location: {}, {}]", loc.latitude, loc.longitude)
} else {
String::new()
}
}
_ => String::new(),
};
if content.is_empty() {
return Ok(());
}
// Process with bot
self.process_with_bot(&user_phone, &content).await?;
Ok(())
}
async fn process_with_bot(
&self,
from_number: &str,
message: &str,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
// Create or get user session
let session = self.get_or_create_session(from_number).await?;
// Process message through bot processor (simplified for now)
// In real implementation, this would call the bot processor
// Send response back to WhatsApp
let response = format!("Received (session {}): {}", session.id, message);
self.send_message(from_number, &response).await?;
Ok(())
}
async fn get_or_create_session(
&self,
phone_number: &str,
) -> Result<UserSession, Box<dyn std::error::Error + Send + Sync>> {
// Check Redis for existing session
if let Some(redis_client) = &self.state.cache {
let mut conn = redis_client.get_multiplexed_async_connection().await?;
let session_key = format!("whatsapp_session:{}", phone_number);
if let Ok(session_data) = redis::cmd("GET")
.arg(&session_key)
.query_async::<String>(&mut conn)
.await
{
if let Ok(session) = serde_json::from_str::<UserSession>(&session_data) {
return Ok(session);
}
}
// Create new session
let user_uuid =
uuid::Uuid::parse_str(phone_number).unwrap_or_else(|_| uuid::Uuid::new_v4());
let session = UserSession {
id: uuid::Uuid::new_v4(),
user_id: user_uuid,
bot_id: uuid::Uuid::default(), // Default bot
title: "WhatsApp Session".to_string(),
context_data: serde_json::json!({"channel": "whatsapp"}),
current_tool: None,
created_at: chrono::Utc::now(),
updated_at: chrono::Utc::now(),
};
// Store in Redis
let session_data = serde_json::to_string(&session)?;
redis::cmd("SET")
.arg(&session_key)
.arg(&session_data)
.arg("EX")
.arg(86400) // 24 hours
.query_async::<()>(&mut conn)
.await?;
Ok(session)
} else {
// Create ephemeral session
let user_uuid =
uuid::Uuid::parse_str(phone_number).unwrap_or_else(|_| uuid::Uuid::new_v4());
Ok(UserSession {
id: uuid::Uuid::new_v4(),
user_id: user_uuid,
bot_id: uuid::Uuid::default(),
title: "WhatsApp Session".to_string(),
context_data: serde_json::json!({"channel": "whatsapp"}),
current_tool: None,
created_at: chrono::Utc::now(),
updated_at: chrono::Utc::now(),
})
}
}
pub async fn send_message(
&self,
to_number: &str,
message: &str,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let url = format!(
"https://graph.facebook.com/v17.0/{}/messages",
self.phone_number_id
);
let payload = json!({
"messaging_product": "whatsapp",
"to": to_number,
"type": "text",
"text": {
"body": message
}
});
let client = Client::new();
let response = client
.post(&url)
.bearer_auth(&self.access_token)
.json(&payload)
.send()
.await?;
if !response.status().is_success() {
let error_text = response.text().await?;
error!("WhatsApp API error: {}", error_text);
return Err(format!("WhatsApp API error: {}", error_text).into());
}
Ok(())
}
pub async fn send_interactive_buttons(
&self,
to_number: &str,
header: &str,
buttons: Vec<String>,
) -> Result<(), Box<dyn std::error::Error>> {
let url = format!(
"https://graph.facebook.com/v17.0/{}/messages",
self.phone_number_id
);
let button_list: Vec<_> = buttons
.iter()
.take(3) // WhatsApp limits to 3 buttons
.enumerate()
.map(|(i, text)| {
json!({
"type": "reply",
"reply": {
"id": format!("button_{}", i),
"title": text
}
})
})
.collect();
let payload = json!({
"messaging_product": "whatsapp",
"to": to_number,
"type": "interactive",
"interactive": {
"type": "button",
"header": {
"type": "text",
"text": header
},
"body": {
"text": "Escolha uma opção:"
},
"action": {
"buttons": button_list
}
}
});
let client = Client::new();
let response = client
.post(&url)
.bearer_auth(&self.access_token)
.json(&payload)
.send()
.await?;
if !response.status().is_success() {
let error_text = response.text().await?;
error!("WhatsApp API error: {}", error_text);
}
Ok(())
}
async fn mark_as_read(
&self,
message_id: &str,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let url = format!(
"https://graph.facebook.com/v17.0/{}/messages",
self.phone_number_id
);
let payload = json!({
"messaging_product": "whatsapp",
"status": "read",
"message_id": message_id
});
let client = Client::new();
client
.post(&url)
.bearer_auth(&self.access_token)
.json(&payload)
.send()
.await?;
Ok(())
}
pub async fn get_access_token(&self) -> &str {
&self.access_token
}
}
pub fn router(state: Arc<AppState>) -> Router<Arc<AppState>> {
let adapter = Arc::new(WhatsAppAdapter::new(state.clone()));
Router::new()
.route(
"/webhook",
axum::routing::get({
let adapter = adapter.clone();
move |params| async move { adapter.handle_webhook_verification(params).await }
}),
)
.route(
"/webhook",
axum::routing::post({
move |payload| async move { adapter.handle_incoming_message(payload).await }
}),
)
.with_state(state)
}

View file

@ -0,0 +1,523 @@
/**
* Feature Manager for General Bots Desktop
* Manages dynamic feature toggling with Alpine.js
* Syncs with backend feature flags and persists user preferences
*/
const FeatureManager = (function () {
"use strict";
// Feature definitions matching Cargo.toml features
const FEATURES = {
// UI Features
"web-server": {
name: "Web Server",
category: "ui",
description: "Web interface and static file serving",
icon: "🌐",
required: true,
dependencies: [],
},
desktop: {
name: "Desktop UI",
category: "ui",
description: "Native desktop application with Tauri",
icon: "🖥️",
required: false,
dependencies: ["web-server"],
},
// Core Integrations
vectordb: {
name: "Vector Database",
category: "core",
description: "Semantic search and AI-powered indexing",
icon: "🔍",
required: false,
dependencies: [],
},
llm: {
name: "LLM/AI",
category: "core",
description: "Large Language Model integration",
icon: "🤖",
required: false,
dependencies: [],
},
nvidia: {
name: "NVIDIA GPU",
category: "core",
description: "GPU acceleration for AI workloads",
icon: "⚡",
required: false,
dependencies: ["llm"],
},
// Communication Channels
email: {
name: "Email",
category: "communication",
description: "IMAP/SMTP email integration",
icon: "📧",
required: false,
dependencies: [],
},
whatsapp: {
name: "WhatsApp",
category: "communication",
description: "WhatsApp messaging integration",
icon: "💬",
required: false,
dependencies: [],
},
instagram: {
name: "Instagram",
category: "communication",
description: "Instagram DM integration",
icon: "📸",
required: false,
dependencies: [],
},
msteams: {
name: "Microsoft Teams",
category: "communication",
description: "Teams messaging integration",
icon: "👥",
required: false,
dependencies: [],
},
// Productivity Features
chat: {
name: "Chat",
category: "productivity",
description: "Core chat messaging interface",
icon: "💬",
required: true,
dependencies: [],
},
drive: {
name: "Drive",
category: "productivity",
description: "File storage and management",
icon: "📁",
required: false,
dependencies: [],
},
tasks: {
name: "Tasks",
category: "productivity",
description: "Task management system",
icon: "✓",
required: false,
dependencies: [],
},
calendar: {
name: "Calendar",
category: "productivity",
description: "Calendar and scheduling",
icon: "📅",
required: false,
dependencies: [],
},
meet: {
name: "Meet",
category: "productivity",
description: "Video conferencing with LiveKit",
icon: "📹",
required: false,
dependencies: [],
},
mail: {
name: "Mail",
category: "productivity",
description: "Email client interface",
icon: "✉️",
required: false,
dependencies: ["email"],
},
// Enterprise Features
compliance: {
name: "Compliance",
category: "enterprise",
description: "Audit logging and compliance tracking",
icon: "📋",
required: false,
dependencies: [],
},
attendance: {
name: "Attendance",
category: "enterprise",
description: "Employee attendance tracking",
icon: "👤",
required: false,
dependencies: [],
},
directory: {
name: "Directory",
category: "enterprise",
description: "LDAP/Active Directory integration",
icon: "📖",
required: false,
dependencies: [],
},
weba: {
name: "Web Automation",
category: "enterprise",
description: "Browser automation capabilities",
icon: "🔧",
required: false,
dependencies: [],
},
};
// Category display names
const CATEGORIES = {
ui: { name: "User Interface", icon: "🖥️" },
core: { name: "Core Integrations", icon: "⚙️" },
communication: { name: "Communication Channels", icon: "💬" },
productivity: { name: "Productivity Apps", icon: "📊" },
enterprise: { name: "Enterprise Features", icon: "🏢" },
};
// State management
let enabledFeatures = new Set();
let availableFeatures = new Set();
let subscribers = [];
/**
* Initialize feature manager
*/
async function init() {
console.log("🚀 Initializing Feature Manager...");
// Load enabled features from localStorage
loadFromStorage();
// Fetch available features from backend
await fetchServerFeatures();
// Notify subscribers
notifySubscribers();
console.log("✓ Feature Manager initialized");
console.log(` Enabled: ${Array.from(enabledFeatures).join(", ")}`);
}
/**
* Load features from localStorage
*/
function loadFromStorage() {
try {
const stored = localStorage.getItem("enabledFeatures");
if (stored) {
const parsed = JSON.parse(stored);
enabledFeatures = new Set(parsed);
} else {
// Default features if nothing stored
enabledFeatures = new Set(["web-server", "chat"]);
}
} catch (e) {
console.error("Failed to load features from storage:", e);
enabledFeatures = new Set(["web-server", "chat"]);
}
}
/**
* Save features to localStorage
*/
function saveToStorage() {
try {
const array = Array.from(enabledFeatures);
localStorage.setItem("enabledFeatures", JSON.stringify(array));
} catch (e) {
console.error("Failed to save features to storage:", e);
}
}
/**
* Fetch available features from server
*/
async function fetchServerFeatures() {
try {
const response = await fetch("/api/features/available");
if (response.ok) {
const data = await response.json();
availableFeatures = new Set(data.features || []);
console.log(
"✓ Server features loaded:",
Array.from(availableFeatures).join(", ")
);
} else {
// Fallback: assume all features available
availableFeatures = new Set(Object.keys(FEATURES));
console.warn("⚠ Could not fetch server features, using all");
}
} catch (e) {
console.warn("⚠ Could not connect to server:", e.message);
// Fallback: assume all features available
availableFeatures = new Set(Object.keys(FEATURES));
}
}
/**
* Check if a feature is enabled
*/
function isEnabled(featureId) {
return enabledFeatures.has(featureId);
}
/**
* Check if a feature is available (compiled in)
*/
function isAvailable(featureId) {
return availableFeatures.has(featureId);
}
/**
* Enable a feature
*/
async function enable(featureId) {
const feature = FEATURES[featureId];
if (!feature) {
console.error(`Unknown feature: ${featureId}`);
return false;
}
if (!isAvailable(featureId)) {
console.error(
`Feature not available (not compiled): ${featureId}`
);
return false;
}
// Check dependencies
for (const dep of feature.dependencies) {
if (!isEnabled(dep)) {
console.log(
`Enabling dependency: ${dep} for ${featureId}`
);
await enable(dep);
}
}
// Enable the feature
enabledFeatures.add(featureId);
saveToStorage();
// Notify server
await notifyServer(featureId, true);
notifySubscribers();
console.log(`✓ Feature enabled: ${featureId}`);
return true;
}
/**
* Disable a feature
*/
async function disable(featureId) {
const feature = FEATURES[featureId];
if (!feature) {
console.error(`Unknown feature: ${featureId}`);
return false;
}
if (feature.required) {
console.error(`Cannot disable required feature: ${featureId}`);
return false;
}
// Check if any enabled feature depends on this
for (const [id, f] of Object.entries(FEATURES)) {
if (
isEnabled(id) &&
f.dependencies.includes(featureId)
) {
console.log(
`Disabling dependent feature: ${id}`
);
await disable(id);
}
}
// Disable the feature
enabledFeatures.delete(featureId);
saveToStorage();
// Notify server
await notifyServer(featureId, false);
notifySubscribers();
console.log(`✓ Feature disabled: ${featureId}`);
return true;
}
/**
* Toggle a feature on/off
*/
async function toggle(featureId) {
if (isEnabled(featureId)) {
return await disable(featureId);
} else {
return await enable(featureId);
}
}
/**
* Notify server about feature change
*/
async function notifyServer(featureId, enabled) {
try {
await fetch("/api/features/toggle", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
feature: featureId,
enabled: enabled,
}),
});
} catch (e) {
console.warn("Could not notify server:", e.message);
}
}
/**
* Subscribe to feature changes
*/
function subscribe(callback) {
subscribers.push(callback);
return () => {
subscribers = subscribers.filter((cb) => cb !== callback);
};
}
/**
* Notify all subscribers
*/
function notifySubscribers() {
const data = {
enabled: Array.from(enabledFeatures),
available: Array.from(availableFeatures),
};
subscribers.forEach((callback) => callback(data));
}
/**
* Get feature info
*/
function getFeature(featureId) {
return FEATURES[featureId] || null;
}
/**
* Get all features by category
*/
function getFeaturesByCategory() {
const byCategory = {};
for (const [id, feature] of Object.entries(FEATURES)) {
if (!byCategory[feature.category]) {
byCategory[feature.category] = [];
}
byCategory[feature.category].push({
id,
...feature,
enabled: isEnabled(id),
available: isAvailable(id),
});
}
return byCategory;
}
/**
* Get category info
*/
function getCategories() {
return CATEGORIES;
}
/**
* Get enabled feature IDs
*/
function getEnabled() {
return Array.from(enabledFeatures);
}
/**
* Get available feature IDs
*/
function getAvailable() {
return Array.from(availableFeatures);
}
/**
* Update UI visibility based on enabled features
*/
function updateUI() {
// Hide/show app menu items based on features
const appItems = document.querySelectorAll(".app-item");
appItems.forEach((item) => {
const section = item.dataset.section;
const featureId = section; // Assuming section names match feature IDs
if (FEATURES[featureId]) {
if (isEnabled(featureId)) {
item.style.display = "";
item.removeAttribute("disabled");
} else {
item.style.display = "none";
}
}
});
// Update main content sections
const mainContent = document.getElementById("main-content");
if (mainContent) {
// Mark sections as available/unavailable
const sections = mainContent.querySelectorAll("[data-feature]");
sections.forEach((section) => {
const featureId = section.dataset.feature;
if (!isEnabled(featureId)) {
section.classList.add("feature-disabled");
} else {
section.classList.remove("feature-disabled");
}
});
}
}
// Auto-update UI when features change
subscribe(() => {
updateUI();
});
// Public API
return {
init,
isEnabled,
isAvailable,
enable,
disable,
toggle,
subscribe,
getFeature,
getFeaturesByCategory,
getCategories,
getEnabled,
getAvailable,
updateUI,
};
})();
// Initialize on DOM ready
if (document.readyState === "loading") {
document.addEventListener("DOMContentLoaded", () => {
FeatureManager.init();
});
} else {
FeatureManager.init();
}
// Make available globally
window.FeatureManager = FeatureManager;