Compare commits
No commits in common. "f99013872d22dab7b37ab7f23d0a8077085185a4" and "07c479b30710029d7567b110c723c72b5ee7a93e" have entirely different histories.
f99013872d
...
07c479b307
44 changed files with 339 additions and 1628 deletions
18
PROMPT.md
18
PROMPT.md
|
|
@ -5,24 +5,6 @@
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Build Rules - IMPORTANT
|
|
||||||
|
|
||||||
**Always use debug builds during development and testing:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# CORRECT - debug build (fast compilation)
|
|
||||||
cargo build
|
|
||||||
cargo check
|
|
||||||
|
|
||||||
# WRONG - do NOT use release builds unless explicitly requested
|
|
||||||
# cargo build --release
|
|
||||||
```
|
|
||||||
|
|
||||||
Debug builds compile much faster and are sufficient for testing functionality.
|
|
||||||
Only use `--release` when building final binaries for deployment.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Weekly Maintenance - EVERY MONDAY
|
## Weekly Maintenance - EVERY MONDAY
|
||||||
|
|
||||||
### Package Review Checklist
|
### Package Review Checklist
|
||||||
|
|
|
||||||
|
|
@ -52,7 +52,9 @@ use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Configuration
|
// Configuration
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// LLM Assist configuration loaded from config.csv
|
/// LLM Assist configuration loaded from config.csv
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
|
|
@ -153,7 +155,9 @@ impl LlmAssistConfig {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Request/Response Types
|
// Request/Response Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Request for generating tips based on customer message
|
/// Request for generating tips based on customer message
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
|
|
@ -324,7 +328,9 @@ pub struct Emotion {
|
||||||
pub intensity: f32, // 0.0 to 1.0
|
pub intensity: f32, // 0.0 to 1.0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// LLM Integration
|
// LLM Integration
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Execute LLM generation with the bot's context
|
/// Execute LLM generation with the bot's context
|
||||||
async fn execute_llm_with_context(
|
async fn execute_llm_with_context(
|
||||||
|
|
@ -410,7 +416,9 @@ fn get_bot_system_prompt(bot_id: Uuid, work_path: &str) -> String {
|
||||||
"You are a professional customer service assistant. Be helpful, empathetic, and solution-oriented. Maintain a friendly but professional tone.".to_string()
|
"You are a professional customer service assistant. Be helpful, empathetic, and solution-oriented. Maintain a friendly but professional tone.".to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// API Handlers
|
// API Handlers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// POST /api/attendance/llm/tips
|
/// POST /api/attendance/llm/tips
|
||||||
/// Generate contextual tips for the attendant based on customer message
|
/// Generate contextual tips for the attendant based on customer message
|
||||||
|
|
@ -981,7 +989,9 @@ pub async fn get_llm_config(
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// WhatsApp Attendant Commands
|
// WhatsApp Attendant Commands
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Process WhatsApp command from attendant
|
/// Process WhatsApp command from attendant
|
||||||
pub async fn process_attendant_command(
|
pub async fn process_attendant_command(
|
||||||
|
|
@ -1552,7 +1562,9 @@ _Portuguese: /fila, /pegar, /transferir, /resolver, /dicas, /polir, /respostas,
|
||||||
.to_string()
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Helper Functions
|
// Helper Functions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Get session from database
|
/// Get session from database
|
||||||
async fn get_session(state: &Arc<AppState>, session_id: Uuid) -> Result<UserSession, String> {
|
async fn get_session(state: &Arc<AppState>, session_id: Uuid) -> Result<UserSession, String> {
|
||||||
|
|
@ -2101,7 +2113,9 @@ fn analyze_sentiment_keywords(message: &str) -> SentimentAnalysis {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Tests
|
// Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
|
||||||
|
|
@ -613,7 +613,9 @@ pub fn get_a2a_messages_keyword(state: Arc<AppState>, user: UserSession, engine:
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Database Operations
|
// Database Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Send an A2A message
|
/// Send an A2A message
|
||||||
async fn send_a2a_message(
|
async fn send_a2a_message(
|
||||||
|
|
@ -838,7 +840,9 @@ pub async fn respond_to_a2a_message(
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Tests
|
// Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
|
||||||
|
|
@ -579,7 +579,9 @@ fn delegate_to_keyword(
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Database Operations
|
// Database Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Add a bot to the session
|
/// Add a bot to the session
|
||||||
async fn add_bot_to_session(
|
async fn add_bot_to_session(
|
||||||
|
|
@ -783,7 +785,9 @@ async fn delegate_to_bot(
|
||||||
Ok(response)
|
Ok(response)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Multi-Agent Message Processing
|
// Multi-Agent Message Processing
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Check if a message matches any bot triggers
|
/// Check if a message matches any bot triggers
|
||||||
pub fn match_bot_triggers(message: &str, bots: &[SessionBot]) -> Vec<SessionBot> {
|
pub fn match_bot_triggers(message: &str, bots: &[SessionBot]) -> Vec<SessionBot> {
|
||||||
|
|
@ -853,7 +857,9 @@ pub fn match_tool_triggers(tool_name: &str, bots: &[SessionBot]) -> Vec<SessionB
|
||||||
matching_bots
|
matching_bots
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Helper Types for Diesel Queries
|
// Helper Types for Diesel Queries
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[derive(QueryableByName)]
|
#[derive(QueryableByName)]
|
||||||
struct BoolResult {
|
struct BoolResult {
|
||||||
|
|
|
||||||
|
|
@ -1078,7 +1078,9 @@ async fn set_reflection_enabled(
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Tests
|
// Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,9 @@ use chrono::{DateTime, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// AUTO TASK DATA STRUCTURES
|
// AUTO TASK DATA STRUCTURES
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Represents an auto-executing task
|
/// Represents an auto-executing task
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,9 @@ use serde::{Deserialize, Serialize};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
// =============================================================================
|
||||||
// REQUEST/RESPONSE TYPES
|
// REQUEST/RESPONSE TYPES
|
||||||
|
// =============================================================================
|
||||||
|
|
||||||
/// Request to compile an intent into an executable plan
|
/// Request to compile an intent into an executable plan
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
|
|
@ -239,7 +241,9 @@ pub struct RecommendationResponse {
|
||||||
pub action: Option<String>,
|
pub action: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// =============================================================================
|
||||||
// API HANDLERS
|
// API HANDLERS
|
||||||
|
// =============================================================================
|
||||||
|
|
||||||
/// POST /api/autotask/compile - Compile an intent into an execution plan
|
/// POST /api/autotask/compile - Compile an intent into an execution plan
|
||||||
pub async fn compile_intent_handler(
|
pub async fn compile_intent_handler(
|
||||||
|
|
|
||||||
|
|
@ -847,7 +847,9 @@ pub fn run_file_keyword(state: Arc<AppState>, user: UserSession, engine: &mut En
|
||||||
.expect("Failed to register RUN JAVASCRIPT WITH FILE syntax");
|
.expect("Failed to register RUN JAVASCRIPT WITH FILE syntax");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// LXC Container Setup Templates
|
// LXC Container Setup Templates
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Generate LXC configuration for Python sandbox
|
/// Generate LXC configuration for Python sandbox
|
||||||
pub fn generate_python_lxc_config() -> String {
|
pub fn generate_python_lxc_config() -> String {
|
||||||
|
|
@ -917,7 +919,9 @@ lxc.mount.entry = tmpfs tmp tmpfs defaults 0 0
|
||||||
.to_string()
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Tests
|
// Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
|
||||||
|
|
@ -72,7 +72,9 @@ use rhai::{Array, Dynamic, Engine, Map};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Registration
|
// Registration
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Register all CRM attendance keywords
|
/// Register all CRM attendance keywords
|
||||||
pub fn register_attendance_keywords(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
|
pub fn register_attendance_keywords(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
|
||||||
|
|
@ -105,7 +107,9 @@ pub fn register_attendance_keywords(state: Arc<AppState>, user: UserSession, eng
|
||||||
debug!("CRM attendance keywords registered successfully");
|
debug!("CRM attendance keywords registered successfully");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Queue Management Keywords
|
// Queue Management Keywords
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// GET QUEUE - Get current queue status
|
/// GET QUEUE - Get current queue status
|
||||||
///
|
///
|
||||||
|
|
@ -671,7 +675,9 @@ fn set_priority_impl(state: &Arc<AppState>, session_id: &str, priority: Dynamic)
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Attendant Management Keywords
|
// Attendant Management Keywords
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// GET ATTENDANTS - List available attendants
|
/// GET ATTENDANTS - List available attendants
|
||||||
///
|
///
|
||||||
|
|
@ -901,7 +907,9 @@ fn get_attendant_stats_impl(state: &Arc<AppState>, attendant_id: &str) -> Dynami
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// LLM Assist Keywords
|
// LLM Assist Keywords
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// GET TIPS - Generate AI tips for conversation
|
/// GET TIPS - Generate AI tips for conversation
|
||||||
///
|
///
|
||||||
|
|
@ -1323,7 +1331,9 @@ fn analyze_sentiment_impl(_state: &Arc<AppState>, _session_id: &str, message: &s
|
||||||
Dynamic::from(result)
|
Dynamic::from(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Customer Journey Keywords
|
// Customer Journey Keywords
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// TAG CONVERSATION - Add tags to conversation
|
/// TAG CONVERSATION - Add tags to conversation
|
||||||
///
|
///
|
||||||
|
|
@ -1612,7 +1622,9 @@ fn get_customer_history_impl(state: &Arc<AppState>, user_id: &str) -> Dynamic {
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Helper Functions
|
// Helper Functions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
fn create_error_result(message: &str) -> Dynamic {
|
fn create_error_result(message: &str) -> Dynamic {
|
||||||
let mut result = Map::new();
|
let mut result = Map::new();
|
||||||
|
|
@ -1621,7 +1633,9 @@ fn create_error_result(message: &str) -> Dynamic {
|
||||||
Dynamic::from(result)
|
Dynamic::from(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Tests
|
// Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
|
||||||
|
|
@ -477,7 +477,9 @@ pub fn register_group_by_keyword(_state: Arc<AppState>, _user: UserSession, engi
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Implementation Functions
|
// Implementation Functions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Execute SAVE - upsert operation
|
/// Execute SAVE - upsert operation
|
||||||
fn execute_save(
|
fn execute_save(
|
||||||
|
|
@ -992,7 +994,9 @@ fn execute_group_by(data: &Dynamic, field: &str) -> Result<Dynamic, Box<rhai::Ev
|
||||||
Ok(Dynamic::from(result_map))
|
Ok(Dynamic::from(result_map))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Helper Functions
|
// Helper Functions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Convert Dynamic to HashMap<String, Dynamic>
|
/// Convert Dynamic to HashMap<String, Dynamic>
|
||||||
fn dynamic_to_map(value: &Dynamic) -> HashMap<String, Dynamic> {
|
fn dynamic_to_map(value: &Dynamic) -> HashMap<String, Dynamic> {
|
||||||
|
|
|
||||||
|
|
@ -984,7 +984,9 @@ pub fn register_merge_pdf_keyword(state: Arc<AppState>, user: UserSession, engin
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Implementation Functions
|
// Implementation Functions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Read file content from .gbdrive
|
/// Read file content from .gbdrive
|
||||||
async fn execute_read(
|
async fn execute_read(
|
||||||
|
|
@ -1716,7 +1718,9 @@ async fn execute_merge_pdf(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Helper Functions
|
// Helper Functions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Convert Dynamic to JSON Value
|
/// Convert Dynamic to JSON Value
|
||||||
fn dynamic_to_json(value: &Dynamic) -> Value {
|
fn dynamic_to_json(value: &Dynamic) -> Value {
|
||||||
|
|
|
||||||
|
|
@ -431,7 +431,9 @@ fn register_hear_as_menu(state: Arc<AppState>, user: UserSession, engine: &mut E
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Validation Functions
|
// Validation Functions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Validate input based on type
|
/// Validate input based on type
|
||||||
pub fn validate_input(input: &str, input_type: &InputType) -> ValidationResult {
|
pub fn validate_input(input: &str, input_type: &InputType) -> ValidationResult {
|
||||||
|
|
@ -1164,7 +1166,9 @@ fn validate_menu(input: &str, options: &[String]) -> ValidationResult {
|
||||||
ValidationResult::invalid(format!("Please select one of: {}", options.join(", ")))
|
ValidationResult::invalid(format!("Please select one of: {}", options.join(", ")))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// TALK Keyword
|
// TALK Keyword
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
pub async fn execute_talk(
|
pub async fn execute_talk(
|
||||||
state: Arc<AppState>,
|
state: Arc<AppState>,
|
||||||
|
|
@ -1249,7 +1253,9 @@ pub fn talk_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Input Processing (called when user sends message)
|
// Input Processing (called when user sends message)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Process user input with validation
|
/// Process user input with validation
|
||||||
pub async fn process_hear_input(
|
pub async fn process_hear_input(
|
||||||
|
|
|
||||||
|
|
@ -40,7 +40,9 @@ use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// CORE DATA STRUCTURES
|
// CORE DATA STRUCTURES
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Represents a compiled intent - the result of LLM analysis
|
/// Represents a compiled intent - the result of LLM analysis
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|
@ -339,7 +341,9 @@ impl Default for ResourceEstimate {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// INTENT COMPILER ENGINE
|
// INTENT COMPILER ENGINE
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// The main Intent Compiler engine
|
/// The main Intent Compiler engine
|
||||||
pub struct IntentCompiler {
|
pub struct IntentCompiler {
|
||||||
|
|
|
||||||
|
|
@ -46,7 +46,9 @@ use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// MCP DATA STRUCTURES
|
// MCP DATA STRUCTURES
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Represents a registered MCP server
|
/// Represents a registered MCP server
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|
@ -377,7 +379,9 @@ impl Default for HealthStatus {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// MCP REQUEST/RESPONSE
|
// MCP REQUEST/RESPONSE
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// MCP tool invocation request
|
/// MCP tool invocation request
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|
@ -440,7 +444,9 @@ pub struct McpResponseMetadata {
|
||||||
pub rate_limit_reset: Option<DateTime<Utc>>,
|
pub rate_limit_reset: Option<DateTime<Utc>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// MCP CLIENT
|
// MCP CLIENT
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// The MCP Client for managing server connections and tool invocations
|
/// The MCP Client for managing server connections and tool invocations
|
||||||
pub struct McpClient {
|
pub struct McpClient {
|
||||||
|
|
|
||||||
|
|
@ -375,7 +375,9 @@ pub fn list_models_keyword(state: Arc<AppState>, user: UserSession, engine: &mut
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Database Operations
|
// Database Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Set the model for a session
|
/// Set the model for a session
|
||||||
async fn set_session_model(
|
async fn set_session_model(
|
||||||
|
|
@ -551,7 +553,9 @@ pub fn get_session_routing_strategy(state: &AppState, session_id: Uuid) -> Routi
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Tests
|
// Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
|
||||||
|
|
@ -453,7 +453,9 @@ fn resume_keyword(
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Core Functions
|
// Core Functions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Execute the PLAY command
|
/// Execute the PLAY command
|
||||||
async fn execute_play(
|
async fn execute_play(
|
||||||
|
|
|
||||||
|
|
@ -391,8 +391,10 @@ fn register_return_keyword(engine: &mut Engine) {
|
||||||
.expect("Failed to register RETURN syntax");
|
.expect("Failed to register RETURN syntax");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// PREPROCESSING FUNCTIONS
|
// PREPROCESSING FUNCTIONS
|
||||||
// These run at compile time to extract SUB/FUNCTION definitions
|
// These run at compile time to extract SUB/FUNCTION definitions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Preprocess SUB definitions from source code
|
/// Preprocess SUB definitions from source code
|
||||||
/// Converts SUB/END SUB blocks into callable units
|
/// Converts SUB/END SUB blocks into callable units
|
||||||
|
|
@ -672,7 +674,9 @@ pub fn get_procedure(name: &str) -> Option<ProcedureDefinition> {
|
||||||
.cloned()
|
.cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// TESTS
|
// TESTS
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
|
||||||
|
|
@ -34,7 +34,9 @@ use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// CONSTRAINT DATA STRUCTURES
|
// CONSTRAINT DATA STRUCTURES
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Constraint check result
|
/// Constraint check result
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|
@ -180,7 +182,9 @@ pub struct Constraint {
|
||||||
pub bot_id: String,
|
pub bot_id: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// SIMULATION DATA STRUCTURES
|
// SIMULATION DATA STRUCTURES
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Result of impact simulation
|
/// Result of impact simulation
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|
@ -526,7 +530,9 @@ pub enum RecommendationType {
|
||||||
Custom(String),
|
Custom(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// AUDIT TRAIL DATA STRUCTURES
|
// AUDIT TRAIL DATA STRUCTURES
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Audit log entry
|
/// Audit log entry
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|
@ -737,7 +743,9 @@ pub struct RelatedEntity {
|
||||||
pub relationship: String,
|
pub relationship: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// SAFETY LAYER ENGINE
|
// SAFETY LAYER ENGINE
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// The Safety Layer engine
|
/// The Safety Layer engine
|
||||||
pub struct SafetyLayer {
|
pub struct SafetyLayer {
|
||||||
|
|
|
||||||
|
|
@ -160,7 +160,9 @@ pub fn clear_user_memory_keyword(state: Arc<AppState>, user: UserSession, engine
|
||||||
.expect("Failed to register CLEAR USER MEMORY syntax");
|
.expect("Failed to register CLEAR USER MEMORY syntax");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Database Operations
|
// Database Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Async function to set user memory
|
/// Async function to set user memory
|
||||||
async fn set_user_memory_async(
|
async fn set_user_memory_async(
|
||||||
|
|
@ -291,7 +293,9 @@ async fn clear_user_memory_async(state: &AppState, user_id: Uuid) -> Result<(),
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Tests
|
// Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,7 @@
|
||||||
|
//! Calendar Module
|
||||||
|
//!
|
||||||
|
//! Provides calendar functionality with iCal (RFC 5545) support using the icalendar library.
|
||||||
|
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::{Path, State},
|
extract::{Path, State},
|
||||||
http::StatusCode,
|
http::StatusCode,
|
||||||
|
|
@ -8,38 +12,12 @@ use axum::{
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use icalendar::{Calendar, Component, Event as IcalEvent, EventLike, Property};
|
use icalendar::{Calendar, Component, Event as IcalEvent, EventLike, Property};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tokio::sync::RwLock;
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::core::urls::ApiUrls;
|
use crate::core::urls::ApiUrls;
|
||||||
use crate::shared::state::AppState;
|
use crate::shared::state::AppState;
|
||||||
|
|
||||||
pub struct CalendarState {
|
|
||||||
events: RwLock<HashMap<Uuid, CalendarEvent>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CalendarState {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
events: RwLock::new(HashMap::new()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for CalendarState {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static CALENDAR_STATE: std::sync::OnceLock<CalendarState> = std::sync::OnceLock::new();
|
|
||||||
|
|
||||||
fn get_calendar_state() -> &'static CalendarState {
|
|
||||||
CALENDAR_STATE.get_or_init(CalendarState::new)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct CalendarEvent {
|
pub struct CalendarEvent {
|
||||||
pub id: Uuid,
|
pub id: Uuid,
|
||||||
|
|
@ -262,18 +240,13 @@ impl CalendarEngine {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HTTP Handlers
|
||||||
|
|
||||||
pub async fn list_events(
|
pub async fn list_events(
|
||||||
State(_state): State<Arc<AppState>>,
|
State(_state): State<Arc<AppState>>,
|
||||||
axum::extract::Query(_query): axum::extract::Query<serde_json::Value>,
|
axum::extract::Query(_query): axum::extract::Query<serde_json::Value>,
|
||||||
) -> Json<Vec<CalendarEvent>> {
|
) -> Json<Vec<CalendarEvent>> {
|
||||||
let calendar_state = get_calendar_state();
|
Json(vec![])
|
||||||
let events = calendar_state.events.read().await;
|
|
||||||
|
|
||||||
let mut result: Vec<CalendarEvent> = events.values().cloned().collect();
|
|
||||||
result.sort_by(|a, b| a.start_time.cmp(&b.start_time));
|
|
||||||
|
|
||||||
Json(result)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// List calendars - JSON API for services
|
/// List calendars - JSON API for services
|
||||||
|
|
@ -334,87 +307,31 @@ pub async fn upcoming_events(State(_state): State<Arc<AppState>>) -> axum::respo
|
||||||
|
|
||||||
pub async fn get_event(
|
pub async fn get_event(
|
||||||
State(_state): State<Arc<AppState>>,
|
State(_state): State<Arc<AppState>>,
|
||||||
Path(id): Path<Uuid>,
|
Path(_id): Path<Uuid>,
|
||||||
) -> Result<Json<CalendarEvent>, StatusCode> {
|
) -> Result<Json<CalendarEvent>, StatusCode> {
|
||||||
let calendar_state = get_calendar_state();
|
Err(StatusCode::NOT_FOUND)
|
||||||
let events = calendar_state.events.read().await;
|
|
||||||
|
|
||||||
events
|
|
||||||
.get(&id)
|
|
||||||
.cloned()
|
|
||||||
.map(Json)
|
|
||||||
.ok_or(StatusCode::NOT_FOUND)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_event(
|
pub async fn create_event(
|
||||||
State(_state): State<Arc<AppState>>,
|
State(_state): State<Arc<AppState>>,
|
||||||
Json(input): Json<CalendarEventInput>,
|
Json(_input): Json<CalendarEventInput>,
|
||||||
) -> Result<Json<CalendarEvent>, StatusCode> {
|
) -> Result<Json<CalendarEvent>, StatusCode> {
|
||||||
let calendar_state = get_calendar_state();
|
Err(StatusCode::NOT_IMPLEMENTED)
|
||||||
let now = Utc::now();
|
|
||||||
|
|
||||||
let event = CalendarEvent {
|
|
||||||
id: Uuid::new_v4(),
|
|
||||||
title: input.title,
|
|
||||||
description: input.description,
|
|
||||||
start_time: input.start_time,
|
|
||||||
end_time: input.end_time,
|
|
||||||
location: input.location,
|
|
||||||
attendees: input.attendees,
|
|
||||||
organizer: input.organizer,
|
|
||||||
reminder_minutes: input.reminder_minutes,
|
|
||||||
recurrence: input.recurrence,
|
|
||||||
created_at: now,
|
|
||||||
updated_at: now,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut events = calendar_state.events.write().await;
|
|
||||||
events.insert(event.id, event.clone());
|
|
||||||
|
|
||||||
log::info!("Created calendar event: {} ({})", event.title, event.id);
|
|
||||||
|
|
||||||
Ok(Json(event))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update_event(
|
pub async fn update_event(
|
||||||
State(_state): State<Arc<AppState>>,
|
State(_state): State<Arc<AppState>>,
|
||||||
Path(id): Path<Uuid>,
|
Path(_id): Path<Uuid>,
|
||||||
Json(input): Json<CalendarEventInput>,
|
Json(_input): Json<CalendarEventInput>,
|
||||||
) -> Result<Json<CalendarEvent>, StatusCode> {
|
) -> Result<Json<CalendarEvent>, StatusCode> {
|
||||||
let calendar_state = get_calendar_state();
|
Err(StatusCode::NOT_IMPLEMENTED)
|
||||||
let mut events = calendar_state.events.write().await;
|
|
||||||
|
|
||||||
let event = events.get_mut(&id).ok_or(StatusCode::NOT_FOUND)?;
|
|
||||||
|
|
||||||
event.title = input.title;
|
|
||||||
event.description = input.description;
|
|
||||||
event.start_time = input.start_time;
|
|
||||||
event.end_time = input.end_time;
|
|
||||||
event.location = input.location;
|
|
||||||
event.attendees = input.attendees;
|
|
||||||
event.organizer = input.organizer;
|
|
||||||
event.reminder_minutes = input.reminder_minutes;
|
|
||||||
event.recurrence = input.recurrence;
|
|
||||||
event.updated_at = Utc::now();
|
|
||||||
|
|
||||||
log::info!("Updated calendar event: {} ({})", event.title, event.id);
|
|
||||||
|
|
||||||
Ok(Json(event.clone()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_event(
|
pub async fn delete_event(
|
||||||
State(_state): State<Arc<AppState>>,
|
State(_state): State<Arc<AppState>>,
|
||||||
Path(id): Path<Uuid>,
|
Path(_id): Path<Uuid>,
|
||||||
) -> StatusCode {
|
) -> StatusCode {
|
||||||
let calendar_state = get_calendar_state();
|
StatusCode::NOT_IMPLEMENTED
|
||||||
let mut events = calendar_state.events.write().await;
|
|
||||||
|
|
||||||
if events.remove(&id).is_some() {
|
|
||||||
log::info!("Deleted calendar event: {}", id);
|
|
||||||
StatusCode::NO_CONTENT
|
|
||||||
} else {
|
|
||||||
StatusCode::NOT_FOUND
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn export_ical(State(_state): State<Arc<AppState>>) -> impl IntoResponse {
|
pub async fn export_ical(State(_state): State<Arc<AppState>>) -> impl IntoResponse {
|
||||||
|
|
|
||||||
|
|
@ -438,6 +438,9 @@ impl UserMessageMultimedia for UserMessage {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// REST API Handlers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
use crate::shared::state::AppState;
|
use crate::shared::state::AppState;
|
||||||
use axum::{
|
use axum::{
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ use diesel::r2d2::{ConnectionManager, PooledConnection};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
// Type alias for backward compatibility
|
||||||
pub type Config = AppConfig;
|
pub type Config = AppConfig;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
|
|
|
||||||
|
|
@ -98,13 +98,8 @@ pub async fn run() -> Result<()> {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
let pm = PackageManager::new(mode, tenant)?;
|
let pm = PackageManager::new(mode, tenant)?;
|
||||||
let result = pm.install(component).await?;
|
pm.install(component).await?;
|
||||||
println!("* Component '{}' installed successfully", component);
|
println!("* Component '{}' installed successfully", component);
|
||||||
|
|
||||||
// Print connection info for container installs
|
|
||||||
if let Some(install_result) = result {
|
|
||||||
install_result.print();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
"remove" => {
|
"remove" => {
|
||||||
if args.len() < 3 {
|
if args.len() < 3 {
|
||||||
|
|
|
||||||
|
|
@ -1,37 +1,4 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
/// Result returned after successful component installation
|
|
||||||
/// Contains connection info that users need to configure their environment
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct InstallResult {
|
|
||||||
pub component: String,
|
|
||||||
pub container_name: String,
|
|
||||||
pub container_ip: String,
|
|
||||||
pub ports: Vec<u16>,
|
|
||||||
pub env_vars: HashMap<String, String>,
|
|
||||||
pub connection_info: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl InstallResult {
|
|
||||||
pub fn print(&self) {
|
|
||||||
println!("\n========================================");
|
|
||||||
println!(" {} Installation Complete", self.component.to_uppercase());
|
|
||||||
println!("========================================\n");
|
|
||||||
println!("Container: {}", self.container_name);
|
|
||||||
println!("IP Address: {}", self.container_ip);
|
|
||||||
println!("Ports: {:?}", self.ports);
|
|
||||||
println!("\n--- Connection Info ---\n");
|
|
||||||
println!("{}", self.connection_info);
|
|
||||||
if !self.env_vars.is_empty() {
|
|
||||||
println!("\n--- Environment Variables (.env) ---\n");
|
|
||||||
for (key, value) in &self.env_vars {
|
|
||||||
println!("{}={}", key, value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
println!("\n========================================\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ComponentConfig {
|
pub struct ComponentConfig {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::package_manager::cache::{CacheResult, DownloadCache};
|
use crate::package_manager::cache::{CacheResult, DownloadCache};
|
||||||
use crate::package_manager::component::{ComponentConfig, InstallResult};
|
use crate::package_manager::component::ComponentConfig;
|
||||||
use crate::package_manager::installer::PackageManager;
|
use crate::package_manager::installer::PackageManager;
|
||||||
use crate::package_manager::InstallMode;
|
use crate::package_manager::InstallMode;
|
||||||
use crate::package_manager::OsType;
|
use crate::package_manager::OsType;
|
||||||
|
|
@ -11,7 +11,7 @@ use std::collections::HashMap;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
impl PackageManager {
|
impl PackageManager {
|
||||||
pub async fn install(&self, component_name: &str) -> Result<Option<InstallResult>> {
|
pub async fn install(&self, component_name: &str) -> Result<()> {
|
||||||
let component = self
|
let component = self
|
||||||
.components
|
.components
|
||||||
.get(component_name)
|
.get(component_name)
|
||||||
|
|
@ -27,18 +27,15 @@ impl PackageManager {
|
||||||
Box::pin(self.install(dep)).await?;
|
Box::pin(self.install(dep)).await?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let result = match self.mode {
|
match self.mode {
|
||||||
InstallMode::Local => {
|
InstallMode::Local => self.install_local(component).await?,
|
||||||
self.install_local(component).await?;
|
InstallMode::Container => self.install_container(component)?,
|
||||||
None
|
|
||||||
}
|
}
|
||||||
InstallMode::Container => Some(self.install_container(component)?),
|
|
||||||
};
|
|
||||||
trace!(
|
trace!(
|
||||||
"Component '{}' installation completed successfully",
|
"Component '{}' installation completed successfully",
|
||||||
component_name
|
component_name
|
||||||
);
|
);
|
||||||
Ok(result)
|
Ok(())
|
||||||
}
|
}
|
||||||
pub async fn install_local(&self, component: &ComponentConfig) -> Result<()> {
|
pub async fn install_local(&self, component: &ComponentConfig) -> Result<()> {
|
||||||
trace!(
|
trace!(
|
||||||
|
|
@ -124,7 +121,7 @@ impl PackageManager {
|
||||||
self.run_commands(post_cmds, "local", &component.name)?;
|
self.run_commands(post_cmds, "local", &component.name)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
pub fn install_container(&self, component: &ComponentConfig) -> Result<InstallResult> {
|
pub fn install_container(&self, component: &ComponentConfig) -> Result<()> {
|
||||||
let container_name = format!("{}-{}", self.tenant, component.name);
|
let container_name = format!("{}-{}", self.tenant, component.name);
|
||||||
|
|
||||||
// Ensure LXD is initialized (runs silently if already initialized)
|
// Ensure LXD is initialized (runs silently if already initialized)
|
||||||
|
|
@ -176,16 +173,6 @@ impl PackageManager {
|
||||||
std::thread::sleep(std::time::Duration::from_secs(15));
|
std::thread::sleep(std::time::Duration::from_secs(15));
|
||||||
self.exec_in_container(&container_name, "mkdir -p /opt/gbo/{bin,data,conf,logs}")?;
|
self.exec_in_container(&container_name, "mkdir -p /opt/gbo/{bin,data,conf,logs}")?;
|
||||||
|
|
||||||
// Configure DNS (some containers don't have proper DNS resolution)
|
|
||||||
self.exec_in_container(
|
|
||||||
&container_name,
|
|
||||||
"echo 'nameserver 8.8.8.8' > /etc/resolv.conf",
|
|
||||||
)?;
|
|
||||||
self.exec_in_container(
|
|
||||||
&container_name,
|
|
||||||
"echo 'nameserver 8.8.4.4' >> /etc/resolv.conf",
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Install base packages required for all containers (wget for downloads, unzip for .zip files, curl for health checks)
|
// Install base packages required for all containers (wget for downloads, unzip for .zip files, curl for health checks)
|
||||||
self.exec_in_container(&container_name, "apt-get update -qq")?;
|
self.exec_in_container(&container_name, "apt-get update -qq")?;
|
||||||
self.exec_in_container(
|
self.exec_in_container(
|
||||||
|
|
@ -246,384 +233,14 @@ impl PackageManager {
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
self.setup_port_forwarding(&container_name, &component.ports)?;
|
self.setup_port_forwarding(&container_name, &component.ports)?;
|
||||||
|
|
||||||
// Get container IP
|
|
||||||
let container_ip = self.get_container_ip(&container_name)?;
|
|
||||||
|
|
||||||
// For Vault, initialize and create config files automatically
|
|
||||||
if component.name == "vault" {
|
|
||||||
self.initialize_vault(&container_name, &container_ip)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate connection info based on component type
|
|
||||||
let (connection_info, env_vars) =
|
|
||||||
self.generate_connection_info(&component.name, &container_ip, &component.ports);
|
|
||||||
|
|
||||||
trace!(
|
trace!(
|
||||||
"Container installation of '{}' completed in {}",
|
"Container installation of '{}' completed in {}",
|
||||||
component.name,
|
component.name,
|
||||||
container_name
|
container_name
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(InstallResult {
|
|
||||||
component: component.name.clone(),
|
|
||||||
container_name: container_name.clone(),
|
|
||||||
container_ip,
|
|
||||||
ports: component.ports.clone(),
|
|
||||||
env_vars,
|
|
||||||
connection_info,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the IP address of a container
|
|
||||||
fn get_container_ip(&self, container_name: &str) -> Result<String> {
|
|
||||||
// Wait a moment for network to be ready
|
|
||||||
std::thread::sleep(std::time::Duration::from_secs(2));
|
|
||||||
|
|
||||||
// Try lxc list with network info
|
|
||||||
let output = Command::new("lxc")
|
|
||||||
.args(&["list", container_name, "-c", "4", "--format", "csv"])
|
|
||||||
.output()?;
|
|
||||||
|
|
||||||
if output.status.success() {
|
|
||||||
let ip_output = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
|
||||||
// Parse IP from output like "10.16.164.168 (eth0)" or just "10.16.164.168"
|
|
||||||
if !ip_output.is_empty() {
|
|
||||||
// Extract just the IP address (first part before space or parenthesis)
|
|
||||||
let ip = ip_output
|
|
||||||
.split(|c| c == ' ' || c == '(')
|
|
||||||
.next()
|
|
||||||
.unwrap_or("")
|
|
||||||
.trim();
|
|
||||||
if !ip.is_empty() && ip.contains('.') {
|
|
||||||
return Ok(ip.to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback: try lxc exec to get IP from inside container
|
|
||||||
let output = Command::new("lxc")
|
|
||||||
.args(&["exec", container_name, "--", "hostname", "-I"])
|
|
||||||
.output()?;
|
|
||||||
|
|
||||||
if output.status.success() {
|
|
||||||
let ip_output = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
|
||||||
if let Some(ip) = ip_output.split_whitespace().next() {
|
|
||||||
if ip.contains('.') {
|
|
||||||
return Ok(ip.to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok("unknown".to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Initialize Vault, get unseal keys and root token, create .env and secrets files
|
|
||||||
fn initialize_vault(&self, container_name: &str, ip: &str) -> Result<()> {
|
|
||||||
info!("Initializing Vault...");
|
|
||||||
|
|
||||||
// Wait for Vault to be ready
|
|
||||||
std::thread::sleep(std::time::Duration::from_secs(5));
|
|
||||||
|
|
||||||
// Initialize Vault and capture output
|
|
||||||
// Note: VAULT_ADDR must be set inside the container, not on host
|
|
||||||
let output = Command::new("lxc")
|
|
||||||
.args(&[
|
|
||||||
"exec",
|
|
||||||
container_name,
|
|
||||||
"--",
|
|
||||||
"bash",
|
|
||||||
"-c",
|
|
||||||
"VAULT_ADDR=http://127.0.0.1:8200 /opt/gbo/bin/vault operator init -key-shares=5 -key-threshold=3 -format=json",
|
|
||||||
])
|
|
||||||
.output()?;
|
|
||||||
|
|
||||||
if !output.status.success() {
|
|
||||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
|
||||||
// Check if already initialized
|
|
||||||
if stderr.contains("already initialized") {
|
|
||||||
warn!("Vault already initialized, skipping file generation");
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
return Err(anyhow::anyhow!("Failed to initialize Vault: {}", stderr));
|
|
||||||
}
|
|
||||||
|
|
||||||
let init_output = String::from_utf8_lossy(&output.stdout);
|
|
||||||
|
|
||||||
// Parse JSON output
|
|
||||||
let init_json: serde_json::Value =
|
|
||||||
serde_json::from_str(&init_output).context("Failed to parse Vault init output")?;
|
|
||||||
|
|
||||||
let unseal_keys = init_json["unseal_keys_b64"]
|
|
||||||
.as_array()
|
|
||||||
.context("No unseal keys in output")?;
|
|
||||||
let root_token = init_json["root_token"]
|
|
||||||
.as_str()
|
|
||||||
.context("No root token in output")?;
|
|
||||||
|
|
||||||
// Write vault-unseal-keys file in working directory
|
|
||||||
let unseal_keys_file = PathBuf::from("vault-unseal-keys");
|
|
||||||
let mut unseal_content = String::new();
|
|
||||||
for (i, key) in unseal_keys.iter().enumerate() {
|
|
||||||
if i < 3 {
|
|
||||||
// Only need 3 keys for threshold
|
|
||||||
unseal_content.push_str(&format!(
|
|
||||||
"VAULT_UNSEAL_KEY_{}={}\n",
|
|
||||||
i + 1,
|
|
||||||
key.as_str().unwrap_or("")
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
std::fs::write(&unseal_keys_file, &unseal_content)?;
|
|
||||||
|
|
||||||
// Set permissions to 600 (owner read/write only)
|
|
||||||
#[cfg(unix)]
|
|
||||||
{
|
|
||||||
use std::os::unix::fs::PermissionsExt;
|
|
||||||
std::fs::set_permissions(&unseal_keys_file, std::fs::Permissions::from_mode(0o600))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
info!("Created {}", unseal_keys_file.display());
|
|
||||||
|
|
||||||
// Check if .env exists, create or append
|
|
||||||
let env_file = PathBuf::from(".env");
|
|
||||||
let env_content = format!(
|
|
||||||
"\n# Vault Configuration (auto-generated)\nVAULT_ADDR=http://{}:8200\nVAULT_TOKEN={}\nVAULT_UNSEAL_KEYS_FILE=vault-unseal-keys\n",
|
|
||||||
ip, root_token
|
|
||||||
);
|
|
||||||
|
|
||||||
if env_file.exists() {
|
|
||||||
// Read existing content
|
|
||||||
let existing = std::fs::read_to_string(&env_file)?;
|
|
||||||
// Check if VAULT_ADDR already exists
|
|
||||||
if !existing.contains("VAULT_ADDR=") {
|
|
||||||
// Append to existing file
|
|
||||||
let mut file = std::fs::OpenOptions::new().append(true).open(&env_file)?;
|
|
||||||
use std::io::Write;
|
|
||||||
file.write_all(env_content.as_bytes())?;
|
|
||||||
info!("Appended Vault config to .env");
|
|
||||||
} else {
|
|
||||||
warn!(".env already contains VAULT_ADDR, not overwriting");
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Create new .env file
|
|
||||||
std::fs::write(&env_file, env_content.trim_start())?;
|
|
||||||
info!("Created .env with Vault config");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unseal Vault with the first 3 keys
|
|
||||||
// Note: VAULT_ADDR must be set inside the container, not on host
|
|
||||||
for i in 0..3 {
|
|
||||||
if let Some(key) = unseal_keys.get(i) {
|
|
||||||
let key_str = key.as_str().unwrap_or("");
|
|
||||||
let unseal_cmd = format!(
|
|
||||||
"VAULT_ADDR=http://127.0.0.1:8200 /opt/gbo/bin/vault operator unseal {}",
|
|
||||||
key_str
|
|
||||||
);
|
|
||||||
let unseal_output = Command::new("lxc")
|
|
||||||
.args(&["exec", container_name, "--", "bash", "-c", &unseal_cmd])
|
|
||||||
.output()?;
|
|
||||||
|
|
||||||
if !unseal_output.status.success() {
|
|
||||||
warn!("Unseal step {} may have failed", i + 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
info!("Vault initialized and unsealed successfully");
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate connection info and env vars based on component type
|
|
||||||
/// Only Vault returns .env vars - all others return Vault storage commands
|
|
||||||
fn generate_connection_info(
|
|
||||||
&self,
|
|
||||||
component: &str,
|
|
||||||
ip: &str,
|
|
||||||
ports: &[u16],
|
|
||||||
) -> (String, HashMap<String, String>) {
|
|
||||||
let env_vars = HashMap::new();
|
|
||||||
let connection_info = match component {
|
|
||||||
"vault" => {
|
|
||||||
// Vault config files are auto-generated, just show confirmation
|
|
||||||
format!(
|
|
||||||
r#"Vault Server:
|
|
||||||
URL: http://{}:8200
|
|
||||||
UI: http://{}:8200/ui
|
|
||||||
|
|
||||||
✓ Vault initialized and unsealed automatically
|
|
||||||
✓ Created .env with VAULT_ADDR, VAULT_TOKEN
|
|
||||||
✓ Created vault-unseal-keys (chmod 600)
|
|
||||||
|
|
||||||
Files created:
|
|
||||||
.env - Vault connection config
|
|
||||||
vault-unseal-keys - Unseal keys for auto-unseal
|
|
||||||
|
|
||||||
On server restart, run:
|
|
||||||
botserver vault unseal
|
|
||||||
|
|
||||||
Or manually:
|
|
||||||
lxc exec {}-vault -- /opt/gbo/bin/vault operator unseal <key>
|
|
||||||
|
|
||||||
For other auto-unseal options (TPM, HSM, Transit), see:
|
|
||||||
https://generalbots.github.io/botbook/chapter-08/secrets-management.html"#,
|
|
||||||
ip, ip, self.tenant
|
|
||||||
)
|
|
||||||
}
|
|
||||||
"vector_db" => {
|
|
||||||
format!(
|
|
||||||
r#"Qdrant Vector Database:
|
|
||||||
REST API: http://{}:6333
|
|
||||||
gRPC: {}:6334
|
|
||||||
Dashboard: http://{}:6333/dashboard
|
|
||||||
|
|
||||||
Store credentials in Vault:
|
|
||||||
botserver vault put gbo/vectordb host={} port=6333"#,
|
|
||||||
ip, ip, ip, ip
|
|
||||||
)
|
|
||||||
}
|
|
||||||
"tables" => {
|
|
||||||
format!(
|
|
||||||
r#"PostgreSQL Database:
|
|
||||||
Host: {}
|
|
||||||
Port: 5432
|
|
||||||
Database: botserver
|
|
||||||
User: gbuser
|
|
||||||
|
|
||||||
Store credentials in Vault:
|
|
||||||
botserver vault put gbo/tables host={} port=5432 database=botserver username=gbuser password=<your-password>"#,
|
|
||||||
ip, ip
|
|
||||||
)
|
|
||||||
}
|
|
||||||
"drive" => {
|
|
||||||
format!(
|
|
||||||
r#"MinIO Object Storage:
|
|
||||||
API: http://{}:9000
|
|
||||||
Console: http://{}:9001
|
|
||||||
|
|
||||||
Store credentials in Vault:
|
|
||||||
botserver vault put gbo/drive server={} port=9000 accesskey=minioadmin secret=<your-secret>"#,
|
|
||||||
ip, ip, ip
|
|
||||||
)
|
|
||||||
}
|
|
||||||
"cache" => {
|
|
||||||
format!(
|
|
||||||
r#"Redis/Valkey Cache:
|
|
||||||
Host: {}
|
|
||||||
Port: 6379
|
|
||||||
|
|
||||||
Store credentials in Vault:
|
|
||||||
botserver vault put gbo/cache host={} port=6379 password=<your-password>"#,
|
|
||||||
ip, ip
|
|
||||||
)
|
|
||||||
}
|
|
||||||
"email" => {
|
|
||||||
format!(
|
|
||||||
r#"Email Server (Stalwart):
|
|
||||||
SMTP: {}:25
|
|
||||||
IMAP: {}:143
|
|
||||||
Web: http://{}:8080
|
|
||||||
|
|
||||||
Store credentials in Vault:
|
|
||||||
botserver vault put gbo/email server={} port=25 username=admin password=<your-password>"#,
|
|
||||||
ip, ip, ip, ip
|
|
||||||
)
|
|
||||||
}
|
|
||||||
"directory" => {
|
|
||||||
format!(
|
|
||||||
r#"Zitadel Identity Provider:
|
|
||||||
URL: http://{}:8080
|
|
||||||
Console: http://{}:8080/ui/console
|
|
||||||
|
|
||||||
Store credentials in Vault:
|
|
||||||
botserver vault put gbo/directory url=http://{}:8080 client_id=<client-id> client_secret=<client-secret>"#,
|
|
||||||
ip, ip, ip
|
|
||||||
)
|
|
||||||
}
|
|
||||||
"llm" => {
|
|
||||||
format!(
|
|
||||||
r#"LLM Server (llama.cpp):
|
|
||||||
API: http://{}:8081
|
|
||||||
|
|
||||||
Test:
|
|
||||||
curl http://{}:8081/v1/models
|
|
||||||
|
|
||||||
Store credentials in Vault:
|
|
||||||
botserver vault put gbo/llm url=http://{}:8081 local=true"#,
|
|
||||||
ip, ip, ip
|
|
||||||
)
|
|
||||||
}
|
|
||||||
"meeting" => {
|
|
||||||
format!(
|
|
||||||
r#"LiveKit Meeting Server:
|
|
||||||
WebSocket: ws://{}:7880
|
|
||||||
API: http://{}:7880
|
|
||||||
|
|
||||||
Store credentials in Vault:
|
|
||||||
botserver vault put gbo/meet url=ws://{}:7880 api_key=<api-key> api_secret=<api-secret>"#,
|
|
||||||
ip, ip, ip
|
|
||||||
)
|
|
||||||
}
|
|
||||||
"proxy" => {
|
|
||||||
format!(
|
|
||||||
r#"Caddy Reverse Proxy:
|
|
||||||
HTTP: http://{}:80
|
|
||||||
HTTPS: https://{}:443
|
|
||||||
Admin: http://{}:2019"#,
|
|
||||||
ip, ip, ip
|
|
||||||
)
|
|
||||||
}
|
|
||||||
"timeseries_db" => {
|
|
||||||
format!(
|
|
||||||
r#"InfluxDB Time Series Database:
|
|
||||||
API: http://{}:8086
|
|
||||||
|
|
||||||
Store credentials in Vault:
|
|
||||||
botserver vault put gbo/observability url=http://{}:8086 token=<influx-token> org=pragmatismo bucket=metrics"#,
|
|
||||||
ip, ip
|
|
||||||
)
|
|
||||||
}
|
|
||||||
"observability" => {
|
|
||||||
format!(
|
|
||||||
r#"Vector Log Aggregation:
|
|
||||||
API: http://{}:8686
|
|
||||||
|
|
||||||
Store credentials in Vault:
|
|
||||||
botserver vault put gbo/observability vector_url=http://{}:8686"#,
|
|
||||||
ip, ip
|
|
||||||
)
|
|
||||||
}
|
|
||||||
"alm" => {
|
|
||||||
format!(
|
|
||||||
r#"Forgejo Git Server:
|
|
||||||
Web: http://{}:3000
|
|
||||||
SSH: {}:22
|
|
||||||
|
|
||||||
Store credentials in Vault:
|
|
||||||
botserver vault put gbo/alm url=http://{}:3000 token=<api-token>"#,
|
|
||||||
ip, ip, ip
|
|
||||||
)
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
let ports_str = ports
|
|
||||||
.iter()
|
|
||||||
.map(|p| format!(" - {}:{}", ip, p))
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n");
|
|
||||||
format!(
|
|
||||||
r#"Component: {}
|
|
||||||
Container: {}-{}
|
|
||||||
IP: {}
|
|
||||||
Ports:
|
|
||||||
{}"#,
|
|
||||||
component, self.tenant, component, ip, ports_str
|
|
||||||
)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
(connection_info, env_vars)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn remove(&self, component_name: &str) -> Result<()> {
|
pub fn remove(&self, component_name: &str) -> Result<()> {
|
||||||
let component = self
|
let component = self
|
||||||
.components
|
.components
|
||||||
|
|
@ -1038,25 +655,14 @@ Store credentials in Vault:
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
pub fn exec_in_container(&self, container: &str, command: &str) -> Result<()> {
|
pub fn exec_in_container(&self, container: &str, command: &str) -> Result<()> {
|
||||||
info!("Executing in container {}: {}", container, command);
|
|
||||||
let output = Command::new("lxc")
|
let output = Command::new("lxc")
|
||||||
.args(&["exec", container, "--", "bash", "-c", command])
|
.args(&["exec", container, "--", "bash", "-c", command])
|
||||||
.output()?;
|
.output()?;
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
warn!(
|
||||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
|
||||||
error!(
|
|
||||||
"Container command failed.\nCommand: {}\nStderr: {}\nStdout: {}",
|
|
||||||
command, stderr, stdout
|
|
||||||
);
|
|
||||||
return Err(anyhow::anyhow!(
|
|
||||||
"Container command failed: {}",
|
"Container command failed: {}",
|
||||||
if stderr.is_empty() {
|
String::from_utf8_lossy(&output.stderr)
|
||||||
stdout.to_string()
|
);
|
||||||
} else {
|
|
||||||
stderr.to_string()
|
|
||||||
}
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -43,6 +43,7 @@ pub use models::{
|
||||||
Task, TriggerKind, User, UserLoginToken, UserPreference, UserSession,
|
Task, TriggerKind, User, UserLoginToken, UserPreference, UserSession,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Database utilities
|
||||||
pub use utils::{create_conn, DbPool};
|
pub use utils::{create_conn, DbPool};
|
||||||
|
|
||||||
/// Prelude module for convenient imports
|
/// Prelude module for convenient imports
|
||||||
|
|
|
||||||
|
|
@ -280,7 +280,9 @@ diesel::table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Enterprise Email Tables (6.1.0_enterprise_suite migration)
|
// Enterprise Email Tables (6.1.0_enterprise_suite migration)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
diesel::table! {
|
diesel::table! {
|
||||||
global_email_signatures (id) {
|
global_email_signatures (id) {
|
||||||
|
|
@ -450,6 +452,7 @@ diesel::table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Allow tables to be joined
|
||||||
diesel::allow_tables_to_appear_in_same_query!(
|
diesel::allow_tables_to_appear_in_same_query!(
|
||||||
organizations,
|
organizations,
|
||||||
bots,
|
bots,
|
||||||
|
|
|
||||||
|
|
@ -25,8 +25,9 @@ use redis::Client as RedisClient;
|
||||||
use std::any::{Any, TypeId};
|
use std::any::{Any, TypeId};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tokio::sync::{broadcast, mpsc, RwLock};
|
use tokio::sync::{broadcast, mpsc};
|
||||||
|
|
||||||
|
/// Notification sent to attendants via WebSocket/broadcast
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||||
pub struct AttendantNotification {
|
pub struct AttendantNotification {
|
||||||
#[serde(rename = "type")]
|
#[serde(rename = "type")]
|
||||||
|
|
@ -42,64 +43,65 @@ pub struct AttendantNotification {
|
||||||
pub priority: i32,
|
pub priority: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
/// Type-erased extension storage for AppState
|
||||||
|
#[derive(Default)]
|
||||||
pub struct Extensions {
|
pub struct Extensions {
|
||||||
map: Arc<RwLock<HashMap<TypeId, Arc<dyn Any + Send + Sync>>>>,
|
map: HashMap<TypeId, Box<dyn Any + Send + Sync>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Extensions {
|
impl Extensions {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
map: Arc::new(RwLock::new(HashMap::new())),
|
map: HashMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn insert<T: Send + Sync + 'static>(&self, value: T) {
|
/// Insert a value into the extensions
|
||||||
let mut map = self.map.write().await;
|
pub fn insert<T: Send + Sync + 'static>(&mut self, value: T) {
|
||||||
map.insert(TypeId::of::<T>(), Arc::new(value));
|
self.map.insert(TypeId::of::<T>(), Box::new(value));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert_blocking<T: Send + Sync + 'static>(&self, value: T) {
|
/// Get a reference to a value from the extensions
|
||||||
let map = self.map.clone();
|
pub fn get<T: Send + Sync + 'static>(&self) -> Option<&T> {
|
||||||
tokio::task::block_in_place(|| {
|
self.map
|
||||||
tokio::runtime::Handle::current().block_on(async {
|
.get(&TypeId::of::<T>())
|
||||||
let mut guard = map.write().await;
|
.and_then(|boxed| boxed.downcast_ref::<T>())
|
||||||
guard.insert(TypeId::of::<T>(), Arc::new(value));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get<T: Send + Sync + 'static>(&self) -> Option<Arc<T>> {
|
/// Get a mutable reference to a value from the extensions
|
||||||
let map = self.map.read().await;
|
pub fn get_mut<T: Send + Sync + 'static>(&mut self) -> Option<&mut T> {
|
||||||
map.get(&TypeId::of::<T>())
|
self.map
|
||||||
.and_then(|boxed| Arc::clone(boxed).downcast::<T>().ok())
|
.get_mut(&TypeId::of::<T>())
|
||||||
|
.and_then(|boxed| boxed.downcast_mut::<T>())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn contains<T: Send + Sync + 'static>(&self) -> bool {
|
/// Check if a value of type T exists
|
||||||
let map = self.map.read().await;
|
pub fn contains<T: Send + Sync + 'static>(&self) -> bool {
|
||||||
map.contains_key(&TypeId::of::<T>())
|
self.map.contains_key(&TypeId::of::<T>())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn remove<T: Send + Sync + 'static>(&self) -> Option<Arc<T>> {
|
/// Remove a value from the extensions
|
||||||
let mut map = self.map.write().await;
|
pub fn remove<T: Send + Sync + 'static>(&mut self) -> Option<T> {
|
||||||
map.remove(&TypeId::of::<T>())
|
self.map
|
||||||
|
.remove(&TypeId::of::<T>())
|
||||||
.and_then(|boxed| boxed.downcast::<T>().ok())
|
.and_then(|boxed| boxed.downcast::<T>().ok())
|
||||||
|
.map(|boxed| *boxed)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn len(&self) -> usize {
|
impl Clone for Extensions {
|
||||||
let map = self.map.read().await;
|
fn clone(&self) -> Self {
|
||||||
map.len()
|
// Extensions cannot be cloned deeply, so we create an empty one
|
||||||
}
|
// This is a limitation - extensions should be Arc-wrapped if sharing is needed
|
||||||
|
Self::new()
|
||||||
pub async fn is_empty(&self) -> bool {
|
|
||||||
let map = self.map.read().await;
|
|
||||||
map.is_empty()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Debug for Extensions {
|
impl std::fmt::Debug for Extensions {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
f.debug_struct("Extensions").finish_non_exhaustive()
|
f.debug_struct("Extensions")
|
||||||
|
.field("count", &self.map.len())
|
||||||
|
.finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -126,10 +128,12 @@ pub struct AppState {
|
||||||
pub voice_adapter: Arc<VoiceAdapter>,
|
pub voice_adapter: Arc<VoiceAdapter>,
|
||||||
pub kb_manager: Option<Arc<KnowledgeBaseManager>>,
|
pub kb_manager: Option<Arc<KnowledgeBaseManager>>,
|
||||||
pub task_engine: Arc<TaskEngine>,
|
pub task_engine: Arc<TaskEngine>,
|
||||||
|
/// Type-erased extension storage for web handlers and other components
|
||||||
pub extensions: Extensions,
|
pub extensions: Extensions,
|
||||||
|
/// Broadcast channel for attendant notifications (human handoff)
|
||||||
|
/// Used to notify attendants of new messages from customers
|
||||||
pub attendant_broadcast: Option<broadcast::Sender<AttendantNotification>>,
|
pub attendant_broadcast: Option<broadcast::Sender<AttendantNotification>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clone for AppState {
|
impl Clone for AppState {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
|
@ -175,7 +179,7 @@ impl std::fmt::Debug for AppState {
|
||||||
|
|
||||||
debug
|
debug
|
||||||
.field("bucket_name", &self.bucket_name)
|
.field("bucket_name", &self.bucket_name)
|
||||||
.field("config", &self.config.is_some())
|
.field("config", &self.config)
|
||||||
.field("conn", &"DbPool")
|
.field("conn", &"DbPool")
|
||||||
.field("database_url", &"[REDACTED]")
|
.field("database_url", &"[REDACTED]")
|
||||||
.field("session_manager", &"Arc<Mutex<SessionManager>>")
|
.field("session_manager", &"Arc<Mutex<SessionManager>>")
|
||||||
|
|
@ -193,19 +197,19 @@ impl std::fmt::Debug for AppState {
|
||||||
.field("response_channels", &"Arc<Mutex<HashMap>>")
|
.field("response_channels", &"Arc<Mutex<HashMap>>")
|
||||||
.field("web_adapter", &self.web_adapter)
|
.field("web_adapter", &self.web_adapter)
|
||||||
.field("voice_adapter", &self.voice_adapter)
|
.field("voice_adapter", &self.voice_adapter)
|
||||||
.field("kb_manager", &self.kb_manager.is_some())
|
|
||||||
.field("task_engine", &"Arc<TaskEngine>")
|
|
||||||
.field("extensions", &self.extensions)
|
.field("extensions", &self.extensions)
|
||||||
.field("attendant_broadcast", &self.attendant_broadcast.is_some())
|
|
||||||
.finish()
|
.finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Default implementation for AppState - ONLY FOR TESTS
|
||||||
|
/// This will panic if Vault is not configured, so it must only be used in test contexts.
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
impl Default for AppState {
|
impl Default for AppState {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
|
// This default is only for tests. In production, use the full initialization.
|
||||||
let database_url = crate::shared::utils::get_database_url_sync()
|
let database_url = crate::shared::utils::get_database_url_sync()
|
||||||
.expect("AppState::default() requires Vault to be configured");
|
.expect("AppState::default() requires Vault to be configured. This should only be used in tests.");
|
||||||
|
|
||||||
let manager = ConnectionManager::<PgConnection>::new(&database_url);
|
let manager = ConnectionManager::<PgConnection>::new(&database_url);
|
||||||
let pool = Pool::builder()
|
let pool = Pool::builder()
|
||||||
|
|
@ -247,64 +251,3 @@ impl Default for AppState {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_extensions_insert_and_get() {
|
|
||||||
let ext = Extensions::new();
|
|
||||||
ext.insert(42i32).await;
|
|
||||||
ext.insert("hello".to_string()).await;
|
|
||||||
|
|
||||||
let num = ext.get::<i32>().await;
|
|
||||||
assert!(num.is_some());
|
|
||||||
assert_eq!(*num.unwrap(), 42);
|
|
||||||
|
|
||||||
let text = ext.get::<String>().await;
|
|
||||||
assert!(text.is_some());
|
|
||||||
assert_eq!(&*text.unwrap(), "hello");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_extensions_clone_shares_data() {
|
|
||||||
let ext1 = Extensions::new();
|
|
||||||
ext1.insert(100u64).await;
|
|
||||||
|
|
||||||
let ext2 = ext1.clone();
|
|
||||||
|
|
||||||
let val = ext2.get::<u64>().await;
|
|
||||||
assert!(val.is_some());
|
|
||||||
assert_eq!(*val.unwrap(), 100);
|
|
||||||
|
|
||||||
ext2.insert(200u32).await;
|
|
||||||
|
|
||||||
let val2 = ext1.get::<u32>().await;
|
|
||||||
assert!(val2.is_some());
|
|
||||||
assert_eq!(*val2.unwrap(), 200);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_extensions_remove() {
|
|
||||||
let ext = Extensions::new();
|
|
||||||
ext.insert(42i32).await;
|
|
||||||
|
|
||||||
assert!(ext.contains::<i32>().await);
|
|
||||||
assert_eq!(ext.len().await, 1);
|
|
||||||
|
|
||||||
let removed = ext.remove::<i32>().await;
|
|
||||||
assert!(removed.is_some());
|
|
||||||
assert_eq!(*removed.unwrap(), 42);
|
|
||||||
|
|
||||||
assert!(!ext.contains::<i32>().await);
|
|
||||||
assert!(ext.is_empty().await);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_extensions_get_nonexistent() {
|
|
||||||
let ext = Extensions::new();
|
|
||||||
let val = ext.get::<i32>().await;
|
|
||||||
assert!(val.is_none());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -509,6 +509,7 @@ pub async fn handle_get_dialog(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// BASIC Code Validation
|
||||||
|
|
||||||
fn validate_basic_code(code: &str) -> ValidationResult {
|
fn validate_basic_code(code: &str) -> ValidationResult {
|
||||||
let mut errors = Vec::new();
|
let mut errors = Vec::new();
|
||||||
|
|
@ -658,6 +659,7 @@ fn get_default_dialog_content() -> String {
|
||||||
.to_string()
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Node parsing and HTML generation
|
||||||
|
|
||||||
struct DialogNode {
|
struct DialogNode {
|
||||||
id: String,
|
id: String,
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,9 @@ use uuid::Uuid;
|
||||||
|
|
||||||
use crate::shared::state::AppState;
|
use crate::shared::state::AppState;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Request/Response Types
|
// Request/Response Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
pub struct CreateGroupRequest {
|
pub struct CreateGroupRequest {
|
||||||
|
|
@ -89,7 +91,9 @@ pub struct ErrorResponse {
|
||||||
pub details: Option<String>,
|
pub details: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Group Management Handlers
|
// Group Management Handlers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Create a new organization/group in Zitadel
|
/// Create a new organization/group in Zitadel
|
||||||
pub async fn create_group(
|
pub async fn create_group(
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,9 @@ use std::sync::Arc;
|
||||||
|
|
||||||
use crate::shared::state::AppState;
|
use crate::shared::state::AppState;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Request/Response Types
|
// Request/Response Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
pub struct CreateUserRequest {
|
pub struct CreateUserRequest {
|
||||||
|
|
@ -76,7 +78,9 @@ pub struct ErrorResponse {
|
||||||
pub details: Option<String>,
|
pub details: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// User Management Handlers
|
// User Management Handlers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Create a new user in Zitadel
|
/// Create a new user in Zitadel
|
||||||
pub async fn create_user(
|
pub async fn create_user(
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,4 @@
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use calamine::Reader;
|
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
@ -531,22 +530,25 @@ impl FileContentExtractor {
|
||||||
|
|
||||||
// PDF files
|
// PDF files
|
||||||
"application/pdf" => {
|
"application/pdf" => {
|
||||||
log::info!("PDF extraction for {:?}", file_path);
|
log::info!("PDF extraction requested for {:?}", file_path);
|
||||||
Self::extract_pdf_text(file_path).await
|
// Return placeholder for PDF files - requires pdf-extract crate
|
||||||
|
Ok(format!("[PDF content from {:?}]", file_path))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Microsoft Word documents
|
// Microsoft Word documents
|
||||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
"application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
||||||
| "application/msword" => {
|
| "application/msword" => {
|
||||||
log::info!("Word document extraction for {:?}", file_path);
|
log::info!("Word document extraction requested for {:?}", file_path);
|
||||||
Self::extract_docx_text(file_path).await
|
// Return placeholder for Word documents - requires docx-rs crate
|
||||||
|
Ok(format!("[Word document content from {:?}]", file_path))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Excel/Spreadsheet files
|
// Excel/Spreadsheet files
|
||||||
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||||
| "application/vnd.ms-excel" => {
|
| "application/vnd.ms-excel" => {
|
||||||
log::info!("Spreadsheet extraction for {:?}", file_path);
|
log::info!("Spreadsheet extraction requested for {:?}", file_path);
|
||||||
Self::extract_xlsx_text(file_path).await
|
// Return placeholder for spreadsheets - requires calamine crate
|
||||||
|
Ok(format!("[Spreadsheet content from {:?}]", file_path))
|
||||||
}
|
}
|
||||||
|
|
||||||
// JSON files
|
// JSON files
|
||||||
|
|
@ -588,113 +590,6 @@ impl FileContentExtractor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn extract_pdf_text(file_path: &PathBuf) -> Result<String> {
|
|
||||||
let bytes = fs::read(file_path).await?;
|
|
||||||
|
|
||||||
match pdf_extract::extract_text_from_mem(&bytes) {
|
|
||||||
Ok(text) => {
|
|
||||||
let cleaned = text
|
|
||||||
.lines()
|
|
||||||
.map(|l| l.trim())
|
|
||||||
.filter(|l| !l.is_empty())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n");
|
|
||||||
Ok(cleaned)
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
log::warn!("PDF extraction failed for {:?}: {}", file_path, e);
|
|
||||||
Ok(String::new())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn extract_docx_text(file_path: &PathBuf) -> Result<String> {
|
|
||||||
let path = file_path.clone();
|
|
||||||
|
|
||||||
let result = tokio::task::spawn_blocking(move || {
|
|
||||||
let file = std::fs::File::open(&path)?;
|
|
||||||
let mut archive = zip::ZipArchive::new(file)?;
|
|
||||||
|
|
||||||
let mut content = String::new();
|
|
||||||
|
|
||||||
if let Ok(mut document) = archive.by_name("word/document.xml") {
|
|
||||||
let mut xml_content = String::new();
|
|
||||||
std::io::Read::read_to_string(&mut document, &mut xml_content)?;
|
|
||||||
|
|
||||||
let text_regex = regex::Regex::new(r"<w:t[^>]*>([^<]*)</w:t>").unwrap();
|
|
||||||
|
|
||||||
content = text_regex
|
|
||||||
.captures_iter(&xml_content)
|
|
||||||
.filter_map(|c| c.get(1).map(|m| m.as_str()))
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("");
|
|
||||||
|
|
||||||
content = content.split("</w:p>").collect::<Vec<_>>().join("\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok::<String, anyhow::Error>(content)
|
|
||||||
})
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Ok(text) => Ok(text),
|
|
||||||
Err(e) => {
|
|
||||||
log::warn!("DOCX extraction failed for {:?}: {}", file_path, e);
|
|
||||||
Ok(String::new())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn extract_xlsx_text(file_path: &PathBuf) -> Result<String> {
|
|
||||||
let path = file_path.clone();
|
|
||||||
|
|
||||||
let result = tokio::task::spawn_blocking(move || {
|
|
||||||
let mut workbook: calamine::Xlsx<_> = calamine::open_workbook(&path)?;
|
|
||||||
let mut content = String::new();
|
|
||||||
|
|
||||||
for sheet_name in workbook.sheet_names().to_vec() {
|
|
||||||
if let Ok(range) = workbook.worksheet_range(&sheet_name) {
|
|
||||||
content.push_str(&format!("=== {} ===\n", sheet_name));
|
|
||||||
|
|
||||||
for row in range.rows() {
|
|
||||||
let row_text: Vec<String> = row
|
|
||||||
.iter()
|
|
||||||
.map(|cell| match cell {
|
|
||||||
calamine::Data::Empty => String::new(),
|
|
||||||
calamine::Data::String(s) => s.clone(),
|
|
||||||
calamine::Data::Float(f) => f.to_string(),
|
|
||||||
calamine::Data::Int(i) => i.to_string(),
|
|
||||||
calamine::Data::Bool(b) => b.to_string(),
|
|
||||||
calamine::Data::Error(e) => format!("{:?}", e),
|
|
||||||
calamine::Data::DateTime(dt) => dt.to_string(),
|
|
||||||
calamine::Data::DateTimeIso(s) => s.clone(),
|
|
||||||
calamine::Data::DurationIso(s) => s.clone(),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let line = row_text.join("\t");
|
|
||||||
if !line.trim().is_empty() {
|
|
||||||
content.push_str(&line);
|
|
||||||
content.push('\n');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
content.push('\n');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok::<String, anyhow::Error>(content)
|
|
||||||
})
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Ok(text) => Ok(text),
|
|
||||||
Err(e) => {
|
|
||||||
log::warn!("XLSX extraction failed for {:?}: {}", file_path, e);
|
|
||||||
Ok(String::new())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Determine if file should be indexed based on type
|
/// Determine if file should be indexed based on type
|
||||||
pub fn should_index(mime_type: &str, file_size: u64) -> bool {
|
pub fn should_index(mime_type: &str, file_size: u64) -> bool {
|
||||||
// Skip very large files (> 10MB)
|
// Skip very large files (> 10MB)
|
||||||
|
|
|
||||||
|
|
@ -2249,78 +2249,17 @@ pub async fn search_emails_htmx(
|
||||||
"#.to_string());
|
"#.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
let search_term = format!("%{}%", query.to_lowercase());
|
// For now, return a placeholder - in production this would search the database
|
||||||
|
axum::response::Html(format!(r#"
|
||||||
let conn = match state.conn.get() {
|
|
||||||
Ok(c) => c,
|
|
||||||
Err(_) => {
|
|
||||||
return axum::response::Html(r#"
|
|
||||||
<div class="empty-state error">
|
|
||||||
<p>Database connection error</p>
|
|
||||||
</div>
|
|
||||||
"#.to_string());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let search_query = format!(
|
|
||||||
"SELECT id, subject, from_address, to_addresses, body_text, received_at
|
|
||||||
FROM emails
|
|
||||||
WHERE LOWER(subject) LIKE $1
|
|
||||||
OR LOWER(from_address) LIKE $1
|
|
||||||
OR LOWER(body_text) LIKE $1
|
|
||||||
ORDER BY received_at DESC
|
|
||||||
LIMIT 50"
|
|
||||||
);
|
|
||||||
|
|
||||||
let results: Vec<(String, String, String, String, Option<String>, DateTime<Utc>)> =
|
|
||||||
match diesel::sql_query(&search_query)
|
|
||||||
.bind::<diesel::sql_types::Text, _>(&search_term)
|
|
||||||
.load(&conn)
|
|
||||||
{
|
|
||||||
Ok(r) => r.into_iter().map(|row: (String, String, String, String, Option<String>, DateTime<Utc>)| row).collect(),
|
|
||||||
Err(e) => {
|
|
||||||
warn!("Email search query failed: {}", e);
|
|
||||||
Vec::new()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if results.is_empty() {
|
|
||||||
return axum::response::Html(format!(r#"
|
|
||||||
<div class="empty-state">
|
<div class="empty-state">
|
||||||
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.5">
|
<svg width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.5">
|
||||||
<circle cx="11" cy="11" r="8"></circle>
|
<circle cx="11" cy="11" r="8"></circle>
|
||||||
<path d="m21 21-4.35-4.35"></path>
|
<path d="m21 21-4.35-4.35"></path>
|
||||||
</svg>
|
</svg>
|
||||||
<h3>No results for "{}"</h3>
|
<h3>Searching for "{}"</h3>
|
||||||
<p>Try different keywords or check your spelling.</p>
|
<p>No results found. Try different keywords.</p>
|
||||||
</div>
|
</div>
|
||||||
"#, query));
|
"#, query))
|
||||||
}
|
|
||||||
|
|
||||||
let mut html = String::from(r#"<div class="search-results">"#);
|
|
||||||
html.push_str(&format!(r#"<div class="search-header"><span>Found {} result(s) for "{}"</span></div>"#, results.len(), query));
|
|
||||||
|
|
||||||
for (id, subject, from, _to, body, date) in results {
|
|
||||||
let preview = body
|
|
||||||
.as_deref()
|
|
||||||
.unwrap_or("")
|
|
||||||
.chars()
|
|
||||||
.take(100)
|
|
||||||
.collect::<String>();
|
|
||||||
let formatted_date = date.format("%b %d, %Y").to_string();
|
|
||||||
|
|
||||||
html.push_str(&format!(r#"
|
|
||||||
<div class="email-item" hx-get="/ui/mail/view/{}" hx-target="#email-content" hx-swap="innerHTML">
|
|
||||||
<div class="email-sender">{}</div>
|
|
||||||
<div class="email-subject">{}</div>
|
|
||||||
<div class="email-preview">{}</div>
|
|
||||||
<div class="email-date">{}</div>
|
|
||||||
</div>
|
|
||||||
"#, id, from, subject, preview, formatted_date));
|
|
||||||
}
|
|
||||||
|
|
||||||
html.push_str("</div>");
|
|
||||||
axum::response::Html(html)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Save auto-responder settings
|
/// Save auto-responder settings
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,9 @@ use serde_json::{json, Value};
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use tracing::{debug, error, info, warn};
|
use tracing::{debug, error, info, warn};
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Configuration
|
// Configuration
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Default timeout for API requests in seconds
|
/// Default timeout for API requests in seconds
|
||||||
const DEFAULT_TIMEOUT_SECS: u64 = 30;
|
const DEFAULT_TIMEOUT_SECS: u64 = 30;
|
||||||
|
|
@ -37,7 +39,9 @@ pub const DEFAULT_QUEUE_POLL_INTERVAL_SECS: u64 = 30;
|
||||||
/// Default poll interval for metrics in seconds
|
/// Default poll interval for metrics in seconds
|
||||||
pub const DEFAULT_METRICS_POLL_INTERVAL_SECS: u64 = 60;
|
pub const DEFAULT_METRICS_POLL_INTERVAL_SECS: u64 = 60;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Data Types - Queue Monitoring
|
// Data Types - Queue Monitoring
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Represents the overall queue status
|
/// Represents the overall queue status
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|
@ -103,7 +107,9 @@ struct QueueListResponse {
|
||||||
items: Vec<QueuedMessage>,
|
items: Vec<QueuedMessage>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Data Types - Principal/Account Management
|
// Data Types - Principal/Account Management
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Types of principals in Stalwart
|
/// Types of principals in Stalwart
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
|
@ -198,7 +204,9 @@ impl AccountUpdate {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Data Types - Auto-Responder & Email Rules
|
// Data Types - Auto-Responder & Email Rules
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Configuration for an auto-responder (out of office)
|
/// Configuration for an auto-responder (out of office)
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|
@ -297,7 +305,9 @@ pub struct RuleAction {
|
||||||
pub value: String,
|
pub value: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Data Types - Telemetry & Monitoring
|
// Data Types - Telemetry & Monitoring
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Server metrics from Stalwart
|
/// Server metrics from Stalwart
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||||
|
|
@ -399,7 +409,9 @@ pub struct TraceList {
|
||||||
pub items: Vec<TraceEvent>,
|
pub items: Vec<TraceEvent>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Data Types - Reports
|
// Data Types - Reports
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// A DMARC/TLS/ARF report
|
/// A DMARC/TLS/ARF report
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|
@ -432,7 +444,9 @@ pub struct ReportList {
|
||||||
pub items: Vec<Report>,
|
pub items: Vec<Report>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Data Types - Spam Filter
|
// Data Types - Spam Filter
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Request to classify a message for spam
|
/// Request to classify a message for spam
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|
@ -482,7 +496,9 @@ pub struct SpamTest {
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// API Response Wrapper
|
// API Response Wrapper
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Generic API response wrapper
|
/// Generic API response wrapper
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
|
|
@ -493,7 +509,9 @@ enum ApiResponse<T> {
|
||||||
Error { error: String },
|
Error { error: String },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Stalwart Client Implementation
|
// Stalwart Client Implementation
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Client for interacting with Stalwart Mail Server's Management API
|
/// Client for interacting with Stalwart Mail Server's Management API
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
|
|
@ -1280,7 +1298,9 @@ impl StalwartClient {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Tests
|
// Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
|
||||||
|
|
@ -29,8 +29,10 @@ use std::sync::Arc;
|
||||||
use tracing::{info, warn};
|
use tracing::{info, warn};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Data Transfer Objects (matching 6.1.0_enterprise_suite migration)
|
// Data Transfer Objects (matching 6.1.0_enterprise_suite migration)
|
||||||
// These are simplified DTOs for the sync layer - not direct ORM mappings
|
// These are simplified DTOs for the sync layer - not direct ORM mappings
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Distribution list DTO
|
/// Distribution list DTO
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
|
@ -139,7 +141,9 @@ pub struct SharedMailboxMemberDto {
|
||||||
pub added_at: DateTime<Utc>,
|
pub added_at: DateTime<Utc>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Sync Service
|
// Sync Service
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Service for synchronizing data between General Bots and Stalwart
|
/// Service for synchronizing data between General Bots and Stalwart
|
||||||
///
|
///
|
||||||
|
|
@ -452,7 +456,9 @@ impl StalwartSyncService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Tests
|
// Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
|
||||||
|
|
@ -381,6 +381,8 @@ impl EmailEmbeddingGenerator {
|
||||||
&text
|
&text
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Call LLM embedding endpoint
|
||||||
|
// This is a placeholder - implement actual LLM call
|
||||||
self.generate_text_embedding(text).await
|
self.generate_text_embedding(text).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,20 @@
|
||||||
|
// Core modules (always included)
|
||||||
pub mod basic;
|
pub mod basic;
|
||||||
pub mod core;
|
pub mod core;
|
||||||
pub mod multimodal;
|
pub mod multimodal;
|
||||||
pub mod security;
|
pub mod security;
|
||||||
|
|
||||||
|
// Suite application modules (gap analysis implementations)
|
||||||
pub mod analytics;
|
pub mod analytics;
|
||||||
pub mod designer;
|
pub mod designer;
|
||||||
pub mod paper;
|
pub mod paper;
|
||||||
pub mod research;
|
pub mod research;
|
||||||
pub mod sources;
|
pub mod sources;
|
||||||
|
|
||||||
|
// Re-export shared from core
|
||||||
pub use core::shared;
|
pub use core::shared;
|
||||||
|
|
||||||
|
// Bootstrap progress tracking
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum BootstrapProgress {
|
pub enum BootstrapProgress {
|
||||||
StartingBootstrap,
|
StartingBootstrap,
|
||||||
|
|
@ -23,6 +27,7 @@ pub enum BootstrapProgress {
|
||||||
BootstrapError(String),
|
BootstrapError(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Re-exports from core (always included)
|
||||||
pub use core::automation;
|
pub use core::automation;
|
||||||
pub use core::bootstrap;
|
pub use core::bootstrap;
|
||||||
pub use core::bot;
|
pub use core::bot;
|
||||||
|
|
@ -30,8 +35,10 @@ pub use core::config;
|
||||||
pub use core::package_manager;
|
pub use core::package_manager;
|
||||||
pub use core::session;
|
pub use core::session;
|
||||||
|
|
||||||
|
// Re-exports from security
|
||||||
pub use security::{get_secure_port, SecurityConfig, SecurityManager};
|
pub use security::{get_secure_port, SecurityConfig, SecurityManager};
|
||||||
|
|
||||||
|
// Feature-gated modules
|
||||||
#[cfg(feature = "attendance")]
|
#[cfg(feature = "attendance")]
|
||||||
pub mod attendance;
|
pub mod attendance;
|
||||||
|
|
||||||
|
|
@ -74,7 +81,6 @@ pub mod nvidia;
|
||||||
|
|
||||||
#[cfg(feature = "tasks")]
|
#[cfg(feature = "tasks")]
|
||||||
pub mod tasks;
|
pub mod tasks;
|
||||||
#[cfg(feature = "tasks")]
|
|
||||||
pub use tasks::TaskEngine;
|
pub use tasks::TaskEngine;
|
||||||
|
|
||||||
#[cfg(feature = "vectordb")]
|
#[cfg(feature = "vectordb")]
|
||||||
|
|
|
||||||
|
|
@ -25,6 +25,9 @@ use serde::{Deserialize, Serialize};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Data Structures
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct Document {
|
pub struct Document {
|
||||||
|
|
@ -87,6 +90,9 @@ pub struct UserRow {
|
||||||
pub username: String,
|
pub username: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Route Configuration
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
pub fn configure_paper_routes() -> Router<Arc<AppState>> {
|
pub fn configure_paper_routes() -> Router<Arc<AppState>> {
|
||||||
Router::new()
|
Router::new()
|
||||||
|
|
@ -121,7 +127,9 @@ pub fn configure_paper_routes() -> Router<Arc<AppState>> {
|
||||||
.route("/api/paper/export/txt", get(handle_export_txt))
|
.route("/api/paper/export/txt", get(handle_export_txt))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Authentication & User Identity
|
// Authentication & User Identity
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Extract user identity from session/headers
|
/// Extract user identity from session/headers
|
||||||
/// Returns (user_id, user_identifier) where identifier is email or phone
|
/// Returns (user_id, user_identifier) where identifier is email or phone
|
||||||
|
|
@ -227,7 +235,9 @@ struct UserIdRow {
|
||||||
user_id: Uuid,
|
user_id: Uuid,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Storage Functions (.gbusers integration)
|
// Storage Functions (.gbusers integration)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Get the user's paper storage path
|
/// Get the user's paper storage path
|
||||||
/// Format: {bucket}/users/{user_identifier}/papers/
|
/// Format: {bucket}/users/{user_identifier}/papers/
|
||||||
|
|
@ -531,6 +541,9 @@ async fn delete_document_from_drive(
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// LLM Integration
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Call LLM for AI-powered text operations
|
/// Call LLM for AI-powered text operations
|
||||||
#[cfg(feature = "llm")]
|
#[cfg(feature = "llm")]
|
||||||
|
|
@ -574,6 +587,9 @@ async fn call_llm(
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Document CRUD Handlers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// POST /api/paper/new - Create a new document
|
/// POST /api/paper/new - Create a new document
|
||||||
pub async fn handle_new_document(
|
pub async fn handle_new_document(
|
||||||
|
|
@ -846,6 +862,9 @@ pub async fn handle_delete_document(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Template Handlers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// POST /api/paper/template/blank - Create blank document
|
/// POST /api/paper/template/blank - Create blank document
|
||||||
pub async fn handle_template_blank(
|
pub async fn handle_template_blank(
|
||||||
|
|
@ -948,6 +967,9 @@ pub async fn handle_template_research(
|
||||||
Html(format_document_content(&title, &content))
|
Html(format_document_content(&title, &content))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// AI Feature Handlers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// POST /api/paper/ai/summarize - Summarize selected text
|
/// POST /api/paper/ai/summarize - Summarize selected text
|
||||||
pub async fn handle_ai_summarize(
|
pub async fn handle_ai_summarize(
|
||||||
|
|
@ -1127,6 +1149,9 @@ pub async fn handle_ai_custom(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Export Handlers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// GET /api/paper/export/pdf - Export as PDF
|
/// GET /api/paper/export/pdf - Export as PDF
|
||||||
pub async fn handle_export_pdf(
|
pub async fn handle_export_pdf(
|
||||||
|
|
@ -1309,6 +1334,9 @@ pub async fn handle_export_txt(
|
||||||
Html("<script>alert('Please save your document first.');</script>".to_string())
|
Html("<script>alert('Please save your document first.');</script>".to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// HTML Formatting Helpers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
fn format_document_list_item(id: &str, title: &str, time: &str, is_new: bool) -> String {
|
fn format_document_list_item(id: &str, title: &str, time: &str, is_new: bool) -> String {
|
||||||
let mut html = String::new();
|
let mut html = String::new();
|
||||||
|
|
|
||||||
|
|
@ -434,120 +434,44 @@ impl CaManager {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn verify_certificate(&self, cert_pem: &str) -> Result<bool> {
|
/// Verify a certificate against the CA
|
||||||
if !self.config.ca_cert_path.exists() {
|
pub fn verify_certificate(&self, _cert_pem: &str) -> Result<bool> {
|
||||||
debug!("CA certificate not found");
|
// This would implement certificate verification logic
|
||||||
return Ok(false);
|
// For now, return true as placeholder
|
||||||
}
|
|
||||||
|
|
||||||
if cert_pem.is_empty() || !cert_pem.contains("BEGIN CERTIFICATE") {
|
|
||||||
debug!("Invalid certificate PEM format");
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
let revoked_path = self.config.ca_cert_path.with_extension("revoked");
|
|
||||||
if revoked_path.exists() {
|
|
||||||
let revoked_content = fs::read_to_string(&revoked_path)?;
|
|
||||||
use std::collections::hash_map::DefaultHasher;
|
|
||||||
use std::hash::{Hash, Hasher};
|
|
||||||
let mut hasher = DefaultHasher::new();
|
|
||||||
cert_pem.hash(&mut hasher);
|
|
||||||
let cert_hash = format!("{:016x}", hasher.finish());
|
|
||||||
if revoked_content
|
|
||||||
.lines()
|
|
||||||
.any(|line| line.contains(&cert_hash))
|
|
||||||
{
|
|
||||||
debug!("Certificate is revoked");
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
info!("Certificate verified successfully");
|
|
||||||
Ok(true)
|
Ok(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn revoke_certificate(&self, serial_number: &str, reason: &str) -> Result<()> {
|
/// Revoke a certificate
|
||||||
let revoked_path = self.config.ca_cert_path.with_extension("revoked");
|
pub fn revoke_certificate(&self, _serial_number: &str, _reason: &str) -> Result<()> {
|
||||||
|
// This would implement certificate revocation
|
||||||
let entry = format!(
|
// and update the CRL
|
||||||
"{}|{}|{}\n",
|
warn!("Certificate revocation not yet implemented");
|
||||||
serial_number,
|
|
||||||
reason,
|
|
||||||
OffsetDateTime::now_utc().format(&time::format_description::well_known::Rfc3339)?
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut content = if revoked_path.exists() {
|
|
||||||
fs::read_to_string(&revoked_path)?
|
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
content.push_str(&entry);
|
|
||||||
fs::write(&revoked_path, content)?;
|
|
||||||
|
|
||||||
info!("Certificate {} revoked. Reason: {}", serial_number, reason);
|
|
||||||
|
|
||||||
self.generate_crl()?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generate Certificate Revocation List (CRL)
|
||||||
pub fn generate_crl(&self) -> Result<()> {
|
pub fn generate_crl(&self) -> Result<()> {
|
||||||
let revoked_path = self.config.ca_cert_path.with_extension("revoked");
|
// This would generate a CRL with revoked certificates
|
||||||
let crl_path = self.config.ca_cert_path.with_extension("crl");
|
warn!("CRL generation not yet implemented");
|
||||||
|
|
||||||
let mut crl_content = String::from("-----BEGIN X509 CRL-----\n");
|
|
||||||
crl_content.push_str(&format!(
|
|
||||||
"# CRL Generated: {}\n",
|
|
||||||
OffsetDateTime::now_utc().format(&time::format_description::well_known::Rfc3339)?
|
|
||||||
));
|
|
||||||
crl_content.push_str(&format!("# Issuer: {}\n", self.config.organization));
|
|
||||||
|
|
||||||
if revoked_path.exists() {
|
|
||||||
let revoked = fs::read_to_string(&revoked_path)?;
|
|
||||||
for line in revoked.lines() {
|
|
||||||
if !line.is_empty() {
|
|
||||||
crl_content.push_str(&format!("# Revoked: {}\n", line));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
crl_content.push_str("-----END X509 CRL-----\n");
|
|
||||||
fs::write(&crl_path, crl_content)?;
|
|
||||||
|
|
||||||
info!("CRL generated at {:?}", crl_path);
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Integrate with external CA if configured
|
||||||
pub async fn sync_with_external_ca(&self) -> Result<()> {
|
pub async fn sync_with_external_ca(&self) -> Result<()> {
|
||||||
if !self.config.external_ca_enabled {
|
if !self.config.external_ca_enabled {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let (url, api_key) = match (
|
if let (Some(url), Some(_api_key)) = (
|
||||||
&self.config.external_ca_url,
|
&self.config.external_ca_url,
|
||||||
&self.config.external_ca_api_key,
|
&self.config.external_ca_api_key,
|
||||||
) {
|
) {
|
||||||
(Some(u), Some(k)) => (u, k),
|
|
||||||
_ => return Ok(()),
|
|
||||||
};
|
|
||||||
|
|
||||||
info!("Syncing with external CA at {}", url);
|
info!("Syncing with external CA at {}", url);
|
||||||
|
|
||||||
let client = reqwest::Client::new();
|
// This would implement the actual external CA integration
|
||||||
|
// For example, using ACME protocol or proprietary API
|
||||||
|
|
||||||
let response = client
|
warn!("External CA integration not yet implemented");
|
||||||
.get(format!("{}/status", url))
|
|
||||||
.header("Authorization", format!("Bearer {}", api_key))
|
|
||||||
.timeout(std::time::Duration::from_secs(30))
|
|
||||||
.send()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if response.status().is_success() {
|
|
||||||
info!("External CA sync successful");
|
|
||||||
} else {
|
|
||||||
warn!("External CA returned status: {}", response.status());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,9 @@ use log::{error, info};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Request/Response Types
|
// Request/Response Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct SearchQuery {
|
pub struct SearchQuery {
|
||||||
|
|
@ -160,6 +162,9 @@ pub struct AppInfo {
|
||||||
pub status: String,
|
pub status: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Route Configuration
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
pub fn configure_sources_routes() -> Router<Arc<AppState>> {
|
pub fn configure_sources_routes() -> Router<Arc<AppState>> {
|
||||||
Router::new()
|
Router::new()
|
||||||
|
|
@ -211,6 +216,9 @@ pub fn configure_sources_routes() -> Router<Arc<AppState>> {
|
||||||
.route("/api/sources/tools", get(handle_list_all_tools))
|
.route("/api/sources/tools", get(handle_list_all_tools))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// MCP Server Handlers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// GET /api/sources/mcp - List all MCP servers (JSON API)
|
/// GET /api/sources/mcp - List all MCP servers (JSON API)
|
||||||
pub async fn handle_list_mcp_servers_json(
|
pub async fn handle_list_mcp_servers_json(
|
||||||
|
|
@ -577,7 +585,9 @@ pub async fn handle_get_mcp_examples(State(_state): State<Arc<AppState>>) -> imp
|
||||||
Json(ApiResponse::success(examples))
|
Json(ApiResponse::success(examples))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Tools Handler (for Tasks)
|
// Tools Handler (for Tasks)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// GET /api/sources/tools - List all available tools (BASIC keywords + MCP tools)
|
/// GET /api/sources/tools - List all available tools (BASIC keywords + MCP tools)
|
||||||
pub async fn handle_list_all_tools(
|
pub async fn handle_list_all_tools(
|
||||||
|
|
@ -627,7 +637,9 @@ pub async fn handle_list_all_tools(
|
||||||
Json(ApiResponse::success(all_tools))
|
Json(ApiResponse::success(all_tools))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// @Mention Autocomplete
|
// @Mention Autocomplete
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// GET /api/sources/mentions?q=search - Autocomplete for @mentions
|
/// GET /api/sources/mentions?q=search - Autocomplete for @mentions
|
||||||
pub async fn handle_mentions_autocomplete(
|
pub async fn handle_mentions_autocomplete(
|
||||||
|
|
@ -708,6 +720,9 @@ pub async fn handle_mentions_autocomplete(
|
||||||
Json(mentions)
|
Json(mentions)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Repository Handlers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// GET /api/sources/repositories - List connected repositories
|
/// GET /api/sources/repositories - List connected repositories
|
||||||
pub async fn handle_list_repositories(State(_state): State<Arc<AppState>>) -> impl IntoResponse {
|
pub async fn handle_list_repositories(State(_state): State<Arc<AppState>>) -> impl IntoResponse {
|
||||||
|
|
@ -746,6 +761,9 @@ pub async fn handle_disconnect_repository(
|
||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Apps Handlers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// GET /api/sources/apps - List created apps
|
/// GET /api/sources/apps - List created apps
|
||||||
pub async fn handle_list_apps(State(_state): State<Arc<AppState>>) -> impl IntoResponse {
|
pub async fn handle_list_apps(State(_state): State<Arc<AppState>>) -> impl IntoResponse {
|
||||||
|
|
@ -762,6 +780,9 @@ pub async fn handle_list_apps(State(_state): State<Arc<AppState>>) -> impl IntoR
|
||||||
Json(ApiResponse::success(apps))
|
Json(ApiResponse::success(apps))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// HTMX Tab Handlers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// GET /api/sources/prompts - Prompts tab content
|
/// GET /api/sources/prompts - Prompts tab content
|
||||||
pub async fn handle_prompts(
|
pub async fn handle_prompts(
|
||||||
|
|
@ -1078,6 +1099,9 @@ pub async fn handle_search(
|
||||||
Html(html)
|
Html(html)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Helper Functions and Data
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
struct PromptData {
|
struct PromptData {
|
||||||
id: String,
|
id: String,
|
||||||
|
|
|
||||||
|
|
@ -188,7 +188,9 @@ pub enum SearchMethod {
|
||||||
Reranked,
|
Reranked,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
// Built-in BM25 Index Implementation
|
// Built-in BM25 Index Implementation
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
pub struct BM25Index {
|
pub struct BM25Index {
|
||||||
doc_freq: HashMap<String, usize>,
|
doc_freq: HashMap<String, usize>,
|
||||||
|
|
@ -351,6 +353,9 @@ pub struct BM25Stats {
|
||||||
pub enabled: bool,
|
pub enabled: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Hybrid Search Engine
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Document entry in the store
|
/// Document entry in the store
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
|
|
@ -757,6 +762,9 @@ pub struct HybridSearchStats {
|
||||||
pub config: HybridSearchConfig,
|
pub config: HybridSearchConfig,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Query Decomposition
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
/// Query decomposition for complex questions
|
/// Query decomposition for complex questions
|
||||||
pub struct QueryDecomposer {
|
pub struct QueryDecomposer {
|
||||||
|
|
@ -841,6 +849,9 @@ impl QueryDecomposer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Tests
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
|
||||||
|
|
@ -34,6 +34,7 @@ pub mod vectordb_indexer;
|
||||||
// BM25 Configuration exports
|
// BM25 Configuration exports
|
||||||
pub use bm25_config::{is_stopword, Bm25Config, DEFAULT_STOPWORDS};
|
pub use bm25_config::{is_stopword, Bm25Config, DEFAULT_STOPWORDS};
|
||||||
|
|
||||||
|
// Hybrid Search exports
|
||||||
pub use hybrid_search::{
|
pub use hybrid_search::{
|
||||||
BM25Stats, HybridSearchConfig, HybridSearchEngine, HybridSearchStats, QueryDecomposer,
|
BM25Stats, HybridSearchConfig, HybridSearchEngine, HybridSearchStats, QueryDecomposer,
|
||||||
SearchMethod, SearchResult,
|
SearchMethod, SearchResult,
|
||||||
|
|
@ -47,4 +48,5 @@ pub use hybrid_search::TantivyBM25Index;
|
||||||
#[cfg(not(feature = "vectordb"))]
|
#[cfg(not(feature = "vectordb"))]
|
||||||
pub use hybrid_search::BM25Index;
|
pub use hybrid_search::BM25Index;
|
||||||
|
|
||||||
|
// VectorDB Indexer exports
|
||||||
pub use vectordb_indexer::{IndexingStats, IndexingStatus, VectorDBIndexer};
|
pub use vectordb_indexer::{IndexingStats, IndexingStatus, VectorDBIndexer};
|
||||||
|
|
|
||||||
|
|
@ -18,6 +18,7 @@ use crate::email::vectordb::UserEmailVectorDB;
|
||||||
use crate::email::vectordb::{EmailDocument, EmailEmbeddingGenerator};
|
use crate::email::vectordb::{EmailDocument, EmailEmbeddingGenerator};
|
||||||
use crate::shared::utils::DbPool;
|
use crate::shared::utils::DbPool;
|
||||||
|
|
||||||
|
// UserWorkspace struct for managing user workspace paths
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
struct UserWorkspace {
|
struct UserWorkspace {
|
||||||
root: PathBuf,
|
root: PathBuf,
|
||||||
|
|
@ -41,6 +42,7 @@ impl UserWorkspace {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// VectorDB types are defined locally in this module
|
||||||
|
|
||||||
/// Indexing job status
|
/// Indexing job status
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
|
@ -450,159 +452,25 @@ impl VectorDBIndexer {
|
||||||
.await?
|
.await?
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get unindexed emails (placeholder - needs actual implementation)
|
||||||
async fn get_unindexed_emails(
|
async fn get_unindexed_emails(
|
||||||
&self,
|
&self,
|
||||||
user_id: Uuid,
|
_user_id: Uuid,
|
||||||
account_id: &str,
|
_account_id: &str,
|
||||||
) -> Result<Vec<EmailDocument>, Box<dyn std::error::Error + Send + Sync>> {
|
) -> Result<Vec<EmailDocument>, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
let pool = self.pool.clone();
|
// Email fetching is handled by the email module
|
||||||
let account_id = account_id.to_string();
|
// This returns empty as emails are indexed on-demand
|
||||||
|
Ok(Vec::new())
|
||||||
let results = tokio::task::spawn_blocking(move || {
|
|
||||||
let conn = pool.get()?;
|
|
||||||
|
|
||||||
let query = r#"
|
|
||||||
SELECT e.id, e.message_id, e.subject, e.from_address, e.to_addresses,
|
|
||||||
e.body_text, e.body_html, e.received_at, e.folder
|
|
||||||
FROM emails e
|
|
||||||
LEFT JOIN email_index_status eis ON e.id = eis.email_id
|
|
||||||
WHERE e.user_id = $1
|
|
||||||
AND e.account_id = $2
|
|
||||||
AND (eis.indexed_at IS NULL OR eis.needs_reindex = true)
|
|
||||||
ORDER BY e.received_at DESC
|
|
||||||
LIMIT 100
|
|
||||||
"#;
|
|
||||||
|
|
||||||
let rows: Vec<(
|
|
||||||
Uuid,
|
|
||||||
String,
|
|
||||||
String,
|
|
||||||
String,
|
|
||||||
String,
|
|
||||||
Option<String>,
|
|
||||||
Option<String>,
|
|
||||||
DateTime<Utc>,
|
|
||||||
String,
|
|
||||||
)> = diesel::sql_query(query)
|
|
||||||
.bind::<diesel::sql_types::Uuid, _>(user_id)
|
|
||||||
.bind::<diesel::sql_types::Text, _>(&account_id)
|
|
||||||
.load(&conn)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
let emails: Vec<EmailDocument> = rows
|
|
||||||
.into_iter()
|
|
||||||
.map(
|
|
||||||
|(
|
|
||||||
id,
|
|
||||||
message_id,
|
|
||||||
subject,
|
|
||||||
from,
|
|
||||||
to,
|
|
||||||
body_text,
|
|
||||||
body_html,
|
|
||||||
received_at,
|
|
||||||
folder,
|
|
||||||
)| {
|
|
||||||
EmailDocument {
|
|
||||||
id: id.to_string(),
|
|
||||||
message_id,
|
|
||||||
subject,
|
|
||||||
from_address: from,
|
|
||||||
to_addresses: to.split(',').map(|s| s.trim().to_string()).collect(),
|
|
||||||
cc_addresses: Vec::new(),
|
|
||||||
body_text: body_text.unwrap_or_default(),
|
|
||||||
body_html,
|
|
||||||
received_at,
|
|
||||||
folder,
|
|
||||||
labels: Vec::new(),
|
|
||||||
has_attachments: false,
|
|
||||||
account_id: account_id.clone(),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok::<_, anyhow::Error>(emails)
|
|
||||||
})
|
|
||||||
.await??;
|
|
||||||
|
|
||||||
Ok(results)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get unindexed files (placeholder - needs actual implementation)
|
||||||
async fn get_unindexed_files(
|
async fn get_unindexed_files(
|
||||||
&self,
|
&self,
|
||||||
user_id: Uuid,
|
_user_id: Uuid,
|
||||||
) -> Result<Vec<FileDocument>, Box<dyn std::error::Error + Send + Sync>> {
|
) -> Result<Vec<FileDocument>, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
let pool = self.pool.clone();
|
// File fetching is handled by the drive module
|
||||||
|
// This returns empty as files are indexed on-demand
|
||||||
let results = tokio::task::spawn_blocking(move || {
|
Ok(Vec::new())
|
||||||
let conn = pool.get()?;
|
|
||||||
|
|
||||||
let query = r#"
|
|
||||||
SELECT f.id, f.file_path, f.file_name, f.file_type, f.file_size,
|
|
||||||
f.bucket, f.mime_type, f.created_at, f.modified_at
|
|
||||||
FROM files f
|
|
||||||
LEFT JOIN file_index_status fis ON f.id = fis.file_id
|
|
||||||
WHERE f.user_id = $1
|
|
||||||
AND (fis.indexed_at IS NULL OR fis.needs_reindex = true)
|
|
||||||
AND f.file_size < 10485760
|
|
||||||
ORDER BY f.modified_at DESC
|
|
||||||
LIMIT 100
|
|
||||||
"#;
|
|
||||||
|
|
||||||
let rows: Vec<(
|
|
||||||
Uuid,
|
|
||||||
String,
|
|
||||||
String,
|
|
||||||
String,
|
|
||||||
i64,
|
|
||||||
String,
|
|
||||||
Option<String>,
|
|
||||||
DateTime<Utc>,
|
|
||||||
DateTime<Utc>,
|
|
||||||
)> = diesel::sql_query(query)
|
|
||||||
.bind::<diesel::sql_types::Uuid, _>(user_id)
|
|
||||||
.load(&conn)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
let files: Vec<FileDocument> = rows
|
|
||||||
.into_iter()
|
|
||||||
.map(
|
|
||||||
|(
|
|
||||||
id,
|
|
||||||
file_path,
|
|
||||||
file_name,
|
|
||||||
file_type,
|
|
||||||
file_size,
|
|
||||||
bucket,
|
|
||||||
mime_type,
|
|
||||||
created_at,
|
|
||||||
modified_at,
|
|
||||||
)| {
|
|
||||||
FileDocument {
|
|
||||||
id: id.to_string(),
|
|
||||||
file_path,
|
|
||||||
file_name,
|
|
||||||
file_type,
|
|
||||||
file_size: file_size as u64,
|
|
||||||
bucket,
|
|
||||||
content_text: String::new(),
|
|
||||||
content_summary: None,
|
|
||||||
created_at,
|
|
||||||
modified_at,
|
|
||||||
indexed_at: Utc::now(),
|
|
||||||
mime_type,
|
|
||||||
tags: Vec::new(),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok::<_, anyhow::Error>(files)
|
|
||||||
})
|
|
||||||
.await??;
|
|
||||||
|
|
||||||
Ok(results)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get indexing statistics for a user
|
/// Get indexing statistics for a user
|
||||||
|
|
|
||||||
544
src/weba/mod.rs
544
src/weba/mod.rs
|
|
@ -1,544 +1,6 @@
|
||||||
use axum::{
|
// WEBA module - Web Application features
|
||||||
extract::{Path, Query, State},
|
// This module is a placeholder for future web application functionality
|
||||||
response::{Html, IntoResponse},
|
|
||||||
routing::{get, post},
|
|
||||||
Json, Router,
|
|
||||||
};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use tokio::sync::RwLock;
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct WebApp {
|
|
||||||
pub id: Uuid,
|
|
||||||
pub name: String,
|
|
||||||
pub slug: String,
|
|
||||||
pub description: Option<String>,
|
|
||||||
pub template: WebAppTemplate,
|
|
||||||
pub status: WebAppStatus,
|
|
||||||
pub config: WebAppConfig,
|
|
||||||
pub created_at: chrono::DateTime<chrono::Utc>,
|
|
||||||
pub updated_at: chrono::DateTime<chrono::Utc>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
|
||||||
pub enum WebAppTemplate {
|
|
||||||
#[default]
|
|
||||||
Blank,
|
|
||||||
Landing,
|
|
||||||
Dashboard,
|
|
||||||
Form,
|
|
||||||
Portal,
|
|
||||||
Custom(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
|
||||||
pub enum WebAppStatus {
|
|
||||||
#[default]
|
|
||||||
Draft,
|
|
||||||
Published,
|
|
||||||
Archived,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
|
||||||
pub struct WebAppConfig {
|
|
||||||
pub theme: String,
|
|
||||||
pub layout: String,
|
|
||||||
pub auth_required: bool,
|
|
||||||
pub custom_domain: Option<String>,
|
|
||||||
pub meta_tags: HashMap<String, String>,
|
|
||||||
pub scripts: Vec<String>,
|
|
||||||
pub styles: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct WebAppPage {
|
|
||||||
pub id: Uuid,
|
|
||||||
pub app_id: Uuid,
|
|
||||||
pub path: String,
|
|
||||||
pub title: String,
|
|
||||||
pub content: String,
|
|
||||||
pub layout: Option<String>,
|
|
||||||
pub is_index: bool,
|
|
||||||
pub created_at: chrono::DateTime<chrono::Utc>,
|
|
||||||
pub updated_at: chrono::DateTime<chrono::Utc>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct WebAppComponent {
|
|
||||||
pub id: Uuid,
|
|
||||||
pub app_id: Uuid,
|
|
||||||
pub name: String,
|
|
||||||
pub component_type: ComponentType,
|
|
||||||
pub props: serde_json::Value,
|
|
||||||
pub children: Vec<Uuid>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub enum ComponentType {
|
|
||||||
Container,
|
|
||||||
Text,
|
|
||||||
Image,
|
|
||||||
Button,
|
|
||||||
Form,
|
|
||||||
Input,
|
|
||||||
Table,
|
|
||||||
Chart,
|
|
||||||
Custom(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct WebaState {
|
|
||||||
apps: RwLock<HashMap<Uuid, WebApp>>,
|
|
||||||
pages: RwLock<HashMap<Uuid, WebAppPage>>,
|
|
||||||
components: RwLock<HashMap<Uuid, WebAppComponent>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WebaState {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
apps: RwLock::new(HashMap::new()),
|
|
||||||
pages: RwLock::new(HashMap::new()),
|
|
||||||
components: RwLock::new(HashMap::new()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for WebaState {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
pub struct CreateAppRequest {
|
|
||||||
pub name: String,
|
|
||||||
pub description: Option<String>,
|
|
||||||
pub template: Option<WebAppTemplate>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
pub struct UpdateAppRequest {
|
|
||||||
pub name: Option<String>,
|
|
||||||
pub description: Option<String>,
|
|
||||||
pub status: Option<WebAppStatus>,
|
|
||||||
pub config: Option<WebAppConfig>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
pub struct CreatePageRequest {
|
|
||||||
pub path: String,
|
|
||||||
pub title: String,
|
|
||||||
pub content: String,
|
|
||||||
pub layout: Option<String>,
|
|
||||||
pub is_index: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
pub struct ListQuery {
|
|
||||||
pub limit: Option<usize>,
|
|
||||||
pub offset: Option<usize>,
|
|
||||||
pub status: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn configure_routes(state: Arc<WebaState>) -> Router {
|
|
||||||
Router::new()
|
|
||||||
.route("/apps", get(list_apps).post(create_app))
|
|
||||||
.route("/apps/:id", get(get_app).put(update_app).delete(delete_app))
|
|
||||||
.route("/apps/:id/pages", get(list_pages).post(create_page))
|
|
||||||
.route(
|
|
||||||
"/apps/:id/pages/:page_id",
|
|
||||||
get(get_page).put(update_page).delete(delete_page),
|
|
||||||
)
|
|
||||||
.route("/apps/:id/publish", post(publish_app))
|
|
||||||
.route("/apps/:id/preview", get(preview_app))
|
|
||||||
.route("/render/:slug", get(render_app))
|
|
||||||
.route("/render/:slug/*path", get(render_page))
|
|
||||||
.with_state(state)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn list_apps(
|
|
||||||
State(state): State<Arc<WebaState>>,
|
|
||||||
Query(query): Query<ListQuery>,
|
|
||||||
) -> Json<Vec<WebApp>> {
|
|
||||||
let apps = state.apps.read().await;
|
|
||||||
let mut result: Vec<WebApp> = apps.values().cloned().collect();
|
|
||||||
|
|
||||||
if let Some(status) = query.status {
|
|
||||||
result.retain(|app| match (&app.status, status.as_str()) {
|
|
||||||
(WebAppStatus::Draft, "draft") => true,
|
|
||||||
(WebAppStatus::Published, "published") => true,
|
|
||||||
(WebAppStatus::Archived, "archived") => true,
|
|
||||||
_ => false,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
result.sort_by(|a, b| b.updated_at.cmp(&a.updated_at));
|
|
||||||
|
|
||||||
let offset = query.offset.unwrap_or(0);
|
|
||||||
let limit = query.limit.unwrap_or(50);
|
|
||||||
let result: Vec<WebApp> = result.into_iter().skip(offset).take(limit).collect();
|
|
||||||
|
|
||||||
Json(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn create_app(
|
|
||||||
State(state): State<Arc<WebaState>>,
|
|
||||||
Json(req): Json<CreateAppRequest>,
|
|
||||||
) -> Json<WebApp> {
|
|
||||||
let now = chrono::Utc::now();
|
|
||||||
let id = Uuid::new_v4();
|
|
||||||
let slug = slugify(&req.name);
|
|
||||||
|
|
||||||
let app = WebApp {
|
|
||||||
id,
|
|
||||||
name: req.name,
|
|
||||||
slug,
|
|
||||||
description: req.description,
|
|
||||||
template: req.template.unwrap_or_default(),
|
|
||||||
status: WebAppStatus::Draft,
|
|
||||||
config: WebAppConfig::default(),
|
|
||||||
created_at: now,
|
|
||||||
updated_at: now,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut apps = state.apps.write().await;
|
|
||||||
apps.insert(id, app.clone());
|
|
||||||
|
|
||||||
Json(app)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_app(
|
|
||||||
State(state): State<Arc<WebaState>>,
|
|
||||||
Path(id): Path<Uuid>,
|
|
||||||
) -> Result<Json<WebApp>, axum::http::StatusCode> {
|
|
||||||
let apps = state.apps.read().await;
|
|
||||||
apps.get(&id)
|
|
||||||
.cloned()
|
|
||||||
.map(Json)
|
|
||||||
.ok_or(axum::http::StatusCode::NOT_FOUND)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn update_app(
|
|
||||||
State(state): State<Arc<WebaState>>,
|
|
||||||
Path(id): Path<Uuid>,
|
|
||||||
Json(req): Json<UpdateAppRequest>,
|
|
||||||
) -> Result<Json<WebApp>, axum::http::StatusCode> {
|
|
||||||
let mut apps = state.apps.write().await;
|
|
||||||
|
|
||||||
let app = apps.get_mut(&id).ok_or(axum::http::StatusCode::NOT_FOUND)?;
|
|
||||||
|
|
||||||
if let Some(name) = req.name {
|
|
||||||
app.name = name.clone();
|
|
||||||
app.slug = slugify(&name);
|
|
||||||
}
|
|
||||||
if let Some(description) = req.description {
|
|
||||||
app.description = Some(description);
|
|
||||||
}
|
|
||||||
if let Some(status) = req.status {
|
|
||||||
app.status = status;
|
|
||||||
}
|
|
||||||
if let Some(config) = req.config {
|
|
||||||
app.config = config;
|
|
||||||
}
|
|
||||||
app.updated_at = chrono::Utc::now();
|
|
||||||
|
|
||||||
Ok(Json(app.clone()))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn delete_app(
|
|
||||||
State(state): State<Arc<WebaState>>,
|
|
||||||
Path(id): Path<Uuid>,
|
|
||||||
) -> axum::http::StatusCode {
|
|
||||||
let mut apps = state.apps.write().await;
|
|
||||||
let mut pages = state.pages.write().await;
|
|
||||||
|
|
||||||
pages.retain(|_, page| page.app_id != id);
|
|
||||||
|
|
||||||
if apps.remove(&id).is_some() {
|
|
||||||
axum::http::StatusCode::NO_CONTENT
|
|
||||||
} else {
|
|
||||||
axum::http::StatusCode::NOT_FOUND
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn list_pages(
|
|
||||||
State(state): State<Arc<WebaState>>,
|
|
||||||
Path(app_id): Path<Uuid>,
|
|
||||||
) -> Json<Vec<WebAppPage>> {
|
|
||||||
let pages = state.pages.read().await;
|
|
||||||
let result: Vec<WebAppPage> = pages
|
|
||||||
.values()
|
|
||||||
.filter(|p| p.app_id == app_id)
|
|
||||||
.cloned()
|
|
||||||
.collect();
|
|
||||||
Json(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn create_page(
|
|
||||||
State(state): State<Arc<WebaState>>,
|
|
||||||
Path(app_id): Path<Uuid>,
|
|
||||||
Json(req): Json<CreatePageRequest>,
|
|
||||||
) -> Result<Json<WebAppPage>, axum::http::StatusCode> {
|
|
||||||
let apps = state.apps.read().await;
|
|
||||||
if !apps.contains_key(&app_id) {
|
|
||||||
return Err(axum::http::StatusCode::NOT_FOUND);
|
|
||||||
}
|
|
||||||
drop(apps);
|
|
||||||
|
|
||||||
let now = chrono::Utc::now();
|
|
||||||
let id = Uuid::new_v4();
|
|
||||||
|
|
||||||
let page = WebAppPage {
|
|
||||||
id,
|
|
||||||
app_id,
|
|
||||||
path: req.path,
|
|
||||||
title: req.title,
|
|
||||||
content: req.content,
|
|
||||||
layout: req.layout,
|
|
||||||
is_index: req.is_index,
|
|
||||||
created_at: now,
|
|
||||||
updated_at: now,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut pages = state.pages.write().await;
|
|
||||||
pages.insert(id, page.clone());
|
|
||||||
|
|
||||||
Ok(Json(page))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_page(
|
|
||||||
State(state): State<Arc<WebaState>>,
|
|
||||||
Path((app_id, page_id)): Path<(Uuid, Uuid)>,
|
|
||||||
) -> Result<Json<WebAppPage>, axum::http::StatusCode> {
|
|
||||||
let pages = state.pages.read().await;
|
|
||||||
pages
|
|
||||||
.get(&page_id)
|
|
||||||
.filter(|p| p.app_id == app_id)
|
|
||||||
.cloned()
|
|
||||||
.map(Json)
|
|
||||||
.ok_or(axum::http::StatusCode::NOT_FOUND)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn update_page(
|
|
||||||
State(state): State<Arc<WebaState>>,
|
|
||||||
Path((app_id, page_id)): Path<(Uuid, Uuid)>,
|
|
||||||
Json(req): Json<CreatePageRequest>,
|
|
||||||
) -> Result<Json<WebAppPage>, axum::http::StatusCode> {
|
|
||||||
let mut pages = state.pages.write().await;
|
|
||||||
|
|
||||||
let page = pages
|
|
||||||
.get_mut(&page_id)
|
|
||||||
.filter(|p| p.app_id == app_id)
|
|
||||||
.ok_or(axum::http::StatusCode::NOT_FOUND)?;
|
|
||||||
|
|
||||||
page.path = req.path;
|
|
||||||
page.title = req.title;
|
|
||||||
page.content = req.content;
|
|
||||||
page.layout = req.layout;
|
|
||||||
page.is_index = req.is_index;
|
|
||||||
page.updated_at = chrono::Utc::now();
|
|
||||||
|
|
||||||
Ok(Json(page.clone()))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn delete_page(
|
|
||||||
State(state): State<Arc<WebaState>>,
|
|
||||||
Path((app_id, page_id)): Path<(Uuid, Uuid)>,
|
|
||||||
) -> axum::http::StatusCode {
|
|
||||||
let mut pages = state.pages.write().await;
|
|
||||||
|
|
||||||
let exists = pages
|
|
||||||
.get(&page_id)
|
|
||||||
.map(|p| p.app_id == app_id)
|
|
||||||
.unwrap_or(false);
|
|
||||||
|
|
||||||
if exists {
|
|
||||||
pages.remove(&page_id);
|
|
||||||
axum::http::StatusCode::NO_CONTENT
|
|
||||||
} else {
|
|
||||||
axum::http::StatusCode::NOT_FOUND
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn publish_app(
|
|
||||||
State(state): State<Arc<WebaState>>,
|
|
||||||
Path(id): Path<Uuid>,
|
|
||||||
) -> Result<Json<WebApp>, axum::http::StatusCode> {
|
|
||||||
let mut apps = state.apps.write().await;
|
|
||||||
let app = apps.get_mut(&id).ok_or(axum::http::StatusCode::NOT_FOUND)?;
|
|
||||||
|
|
||||||
app.status = WebAppStatus::Published;
|
|
||||||
app.updated_at = chrono::Utc::now();
|
|
||||||
|
|
||||||
Ok(Json(app.clone()))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn preview_app(
|
|
||||||
State(state): State<Arc<WebaState>>,
|
|
||||||
Path(id): Path<Uuid>,
|
|
||||||
) -> Result<Html<String>, axum::http::StatusCode> {
|
|
||||||
let apps = state.apps.read().await;
|
|
||||||
let app = apps.get(&id).ok_or(axum::http::StatusCode::NOT_FOUND)?;
|
|
||||||
|
|
||||||
let pages = state.pages.read().await;
|
|
||||||
let index_page = pages.values().find(|p| p.app_id == id && p.is_index);
|
|
||||||
|
|
||||||
let content = index_page
|
|
||||||
.map(|p| p.content.clone())
|
|
||||||
.unwrap_or_else(|| "<p>No content yet</p>".to_string());
|
|
||||||
|
|
||||||
let html = render_html(app, &content);
|
|
||||||
Ok(Html(html))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn render_app(
|
|
||||||
State(state): State<Arc<WebaState>>,
|
|
||||||
Path(slug): Path<String>,
|
|
||||||
) -> Result<impl IntoResponse, axum::http::StatusCode> {
|
|
||||||
let apps = state.apps.read().await;
|
|
||||||
let app = apps
|
|
||||||
.values()
|
|
||||||
.find(|a| a.slug == slug && matches!(a.status, WebAppStatus::Published))
|
|
||||||
.ok_or(axum::http::StatusCode::NOT_FOUND)?
|
|
||||||
.clone();
|
|
||||||
drop(apps);
|
|
||||||
|
|
||||||
let pages = state.pages.read().await;
|
|
||||||
let index_page = pages.values().find(|p| p.app_id == app.id && p.is_index);
|
|
||||||
|
|
||||||
let content = index_page
|
|
||||||
.map(|p| p.content.clone())
|
|
||||||
.unwrap_or_else(|| "<p>Page not found</p>".to_string());
|
|
||||||
|
|
||||||
let html = render_html(&app, &content);
|
|
||||||
Ok(Html(html))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn render_page(
|
|
||||||
State(state): State<Arc<WebaState>>,
|
|
||||||
Path((slug, path)): Path<(String, String)>,
|
|
||||||
) -> Result<impl IntoResponse, axum::http::StatusCode> {
|
|
||||||
let apps = state.apps.read().await;
|
|
||||||
let app = apps
|
|
||||||
.values()
|
|
||||||
.find(|a| a.slug == slug && matches!(a.status, WebAppStatus::Published))
|
|
||||||
.ok_or(axum::http::StatusCode::NOT_FOUND)?
|
|
||||||
.clone();
|
|
||||||
drop(apps);
|
|
||||||
|
|
||||||
let normalized_path = format!("/{}", path.trim_start_matches('/'));
|
|
||||||
|
|
||||||
let pages = state.pages.read().await;
|
|
||||||
let page = pages
|
|
||||||
.values()
|
|
||||||
.find(|p| p.app_id == app.id && p.path == normalized_path);
|
|
||||||
|
|
||||||
let content = page
|
|
||||||
.map(|p| p.content.clone())
|
|
||||||
.unwrap_or_else(|| "<p>Page not found</p>".to_string());
|
|
||||||
|
|
||||||
let html = render_html(&app, &content);
|
|
||||||
Ok(Html(html))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn render_html(app: &WebApp, content: &str) -> String {
|
|
||||||
let meta_tags: String = app
|
|
||||||
.config
|
|
||||||
.meta_tags
|
|
||||||
.iter()
|
|
||||||
.map(|(k, v)| format!("<meta name=\"{}\" content=\"{}\">", k, v))
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n ");
|
|
||||||
|
|
||||||
let scripts: String = app
|
|
||||||
.config
|
|
||||||
.scripts
|
|
||||||
.iter()
|
|
||||||
.map(|s| format!("<script src=\"{}\"></script>", s))
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n ");
|
|
||||||
|
|
||||||
let styles: String = app
|
|
||||||
.config
|
|
||||||
.styles
|
|
||||||
.iter()
|
|
||||||
.map(|s| format!("<link rel=\"stylesheet\" href=\"{}\">", s))
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n ");
|
|
||||||
|
|
||||||
format!(
|
|
||||||
r#"<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<title>{}</title>
|
|
||||||
{}
|
|
||||||
{}
|
|
||||||
<style>
|
|
||||||
* {{ margin: 0; padding: 0; box-sizing: border-box; }}
|
|
||||||
body {{ font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; }}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
{}
|
|
||||||
{}
|
|
||||||
</body>
|
|
||||||
</html>"#,
|
|
||||||
app.name, meta_tags, styles, content, scripts
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn slugify(s: &str) -> String {
|
|
||||||
s.to_lowercase()
|
|
||||||
.chars()
|
|
||||||
.map(|c| if c.is_alphanumeric() { c } else { '-' })
|
|
||||||
.collect::<String>()
|
|
||||||
.split('-')
|
|
||||||
.filter(|s| !s.is_empty())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("-")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn init() {
|
pub fn init() {
|
||||||
log::info!("WEBA module initialized");
|
// Placeholder for weba initialization
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_slugify() {
|
|
||||||
assert_eq!(slugify("Hello World"), "hello-world");
|
|
||||||
assert_eq!(slugify("My App 123"), "my-app-123");
|
|
||||||
assert_eq!(slugify(" Test App "), "test-app");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_webapp_creation() {
|
|
||||||
let now = chrono::Utc::now();
|
|
||||||
let app = WebApp {
|
|
||||||
id: Uuid::new_v4(),
|
|
||||||
name: "Test App".to_string(),
|
|
||||||
slug: "test-app".to_string(),
|
|
||||||
description: None,
|
|
||||||
template: WebAppTemplate::Blank,
|
|
||||||
status: WebAppStatus::Draft,
|
|
||||||
config: WebAppConfig::default(),
|
|
||||||
created_at: now,
|
|
||||||
updated_at: now,
|
|
||||||
};
|
|
||||||
assert_eq!(app.name, "Test App");
|
|
||||||
assert_eq!(app.slug, "test-app");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_weba_state() {
|
|
||||||
let state = WebaState::new();
|
|
||||||
let apps = state.apps.read().await;
|
|
||||||
assert!(apps.is_empty());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue