feat(autotask): Implement AutoTask system with intent classification and app generation
- Add IntentClassifier with 7 intent types (APP_CREATE, TODO, MONITOR, ACTION, SCHEDULE, GOAL, TOOL)
- Add AppGenerator with LLM-powered app structure analysis
- Add DesignerAI for modifying apps through conversation
- Add app_server for serving generated apps with clean URLs
- Add db_api for CRUD operations on bot database tables
- Add ask_later keyword for pending info collection
- Add migration 6.1.1 with tables: pending_info, auto_tasks, execution_plans, task_approvals, task_decisions, safety_audit_log, generated_apps, intent_classifications, designer_changes
- Write apps to S3 drive and sync to SITE_ROOT for serving
- Clean URL structure: /apps/{app_name}/
- Integrate with DriveMonitor for file sync
Based on Chapter 17 - Autonomous Tasks specification
This commit is contained in:
parent
5da86bbef2
commit
14b7cf70af
107 changed files with 6607 additions and 1024 deletions
67
migrations/6.1.1_autotask_system/down.sql
Normal file
67
migrations/6.1.1_autotask_system/down.sql
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
-- Rollback Migration: 6.1.1 AutoTask System
|
||||
-- Description: Drop tables for the AutoTask system
|
||||
|
||||
-- Drop indexes first (automatically dropped with tables, but explicit for clarity)
|
||||
|
||||
-- Drop designer_pending_changes
|
||||
DROP INDEX IF EXISTS idx_designer_pending_changes_expires_at;
|
||||
DROP INDEX IF EXISTS idx_designer_pending_changes_bot_id;
|
||||
DROP TABLE IF EXISTS designer_pending_changes;
|
||||
|
||||
-- Drop designer_changes
|
||||
DROP INDEX IF EXISTS idx_designer_changes_created_at;
|
||||
DROP INDEX IF EXISTS idx_designer_changes_bot_id;
|
||||
DROP TABLE IF EXISTS designer_changes;
|
||||
|
||||
-- Drop intent_classifications
|
||||
DROP INDEX IF EXISTS idx_intent_classifications_created_at;
|
||||
DROP INDEX IF EXISTS idx_intent_classifications_intent_type;
|
||||
DROP INDEX IF EXISTS idx_intent_classifications_bot_id;
|
||||
DROP TABLE IF EXISTS intent_classifications;
|
||||
|
||||
-- Drop generated_apps
|
||||
DROP INDEX IF EXISTS idx_generated_apps_is_active;
|
||||
DROP INDEX IF EXISTS idx_generated_apps_name;
|
||||
DROP INDEX IF EXISTS idx_generated_apps_bot_id;
|
||||
DROP TABLE IF EXISTS generated_apps;
|
||||
|
||||
-- Drop safety_audit_log
|
||||
DROP INDEX IF EXISTS idx_safety_audit_log_created_at;
|
||||
DROP INDEX IF EXISTS idx_safety_audit_log_outcome;
|
||||
DROP INDEX IF EXISTS idx_safety_audit_log_task_id;
|
||||
DROP INDEX IF EXISTS idx_safety_audit_log_bot_id;
|
||||
DROP TABLE IF EXISTS safety_audit_log;
|
||||
|
||||
-- Drop task_decisions
|
||||
DROP INDEX IF EXISTS idx_task_decisions_status;
|
||||
DROP INDEX IF EXISTS idx_task_decisions_task_id;
|
||||
DROP INDEX IF EXISTS idx_task_decisions_bot_id;
|
||||
DROP TABLE IF EXISTS task_decisions;
|
||||
|
||||
-- Drop task_approvals
|
||||
DROP INDEX IF EXISTS idx_task_approvals_expires_at;
|
||||
DROP INDEX IF EXISTS idx_task_approvals_status;
|
||||
DROP INDEX IF EXISTS idx_task_approvals_task_id;
|
||||
DROP INDEX IF EXISTS idx_task_approvals_bot_id;
|
||||
DROP TABLE IF EXISTS task_approvals;
|
||||
|
||||
-- Drop execution_plans
|
||||
DROP INDEX IF EXISTS idx_execution_plans_intent_type;
|
||||
DROP INDEX IF EXISTS idx_execution_plans_status;
|
||||
DROP INDEX IF EXISTS idx_execution_plans_task_id;
|
||||
DROP INDEX IF EXISTS idx_execution_plans_bot_id;
|
||||
DROP TABLE IF EXISTS execution_plans;
|
||||
|
||||
-- Drop auto_tasks
|
||||
DROP INDEX IF EXISTS idx_auto_tasks_created_at;
|
||||
DROP INDEX IF EXISTS idx_auto_tasks_priority;
|
||||
DROP INDEX IF EXISTS idx_auto_tasks_status;
|
||||
DROP INDEX IF EXISTS idx_auto_tasks_session_id;
|
||||
DROP INDEX IF EXISTS idx_auto_tasks_bot_id;
|
||||
DROP TABLE IF EXISTS auto_tasks;
|
||||
|
||||
-- Drop pending_info
|
||||
DROP INDEX IF EXISTS idx_pending_info_is_filled;
|
||||
DROP INDEX IF EXISTS idx_pending_info_config_key;
|
||||
DROP INDEX IF EXISTS idx_pending_info_bot_id;
|
||||
DROP TABLE IF EXISTS pending_info;
|
||||
268
migrations/6.1.1_autotask_system/up.sql
Normal file
268
migrations/6.1.1_autotask_system/up.sql
Normal file
|
|
@ -0,0 +1,268 @@
|
|||
-- Migration: 6.1.1 AutoTask System
|
||||
-- Description: Tables for the AutoTask system - autonomous task execution with LLM intent compilation
|
||||
-- NOTE: TABLES AND INDEXES ONLY - No views, triggers, or functions per project standards
|
||||
|
||||
-- ============================================================================
|
||||
-- PENDING INFO TABLE
|
||||
-- ============================================================================
|
||||
-- Stores information that the system needs to collect from users
|
||||
-- Used by ASK LATER keyword to defer collecting config values
|
||||
|
||||
CREATE TABLE IF NOT EXISTS pending_info (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
|
||||
field_name VARCHAR(100) NOT NULL,
|
||||
field_label VARCHAR(255) NOT NULL,
|
||||
field_type VARCHAR(50) NOT NULL DEFAULT 'text',
|
||||
reason TEXT,
|
||||
config_key VARCHAR(255) NOT NULL,
|
||||
is_filled BOOLEAN DEFAULT false,
|
||||
filled_at TIMESTAMPTZ,
|
||||
filled_value TEXT,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_pending_info_bot_id ON pending_info(bot_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_pending_info_config_key ON pending_info(config_key);
|
||||
CREATE INDEX IF NOT EXISTS idx_pending_info_is_filled ON pending_info(is_filled);
|
||||
|
||||
-- ============================================================================
|
||||
-- AUTO TASKS TABLE
|
||||
-- ============================================================================
|
||||
-- Stores autonomous tasks that can be executed by the system
|
||||
|
||||
CREATE TABLE IF NOT EXISTS auto_tasks (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
|
||||
session_id UUID REFERENCES user_sessions(id) ON DELETE SET NULL,
|
||||
title VARCHAR(500) NOT NULL,
|
||||
intent TEXT NOT NULL,
|
||||
status VARCHAR(50) NOT NULL DEFAULT 'pending',
|
||||
execution_mode VARCHAR(50) NOT NULL DEFAULT 'supervised',
|
||||
priority VARCHAR(20) NOT NULL DEFAULT 'normal',
|
||||
plan_id UUID,
|
||||
basic_program TEXT,
|
||||
current_step INTEGER DEFAULT 0,
|
||||
total_steps INTEGER DEFAULT 0,
|
||||
progress FLOAT DEFAULT 0.0,
|
||||
step_results JSONB DEFAULT '[]'::jsonb,
|
||||
error TEXT,
|
||||
started_at TIMESTAMPTZ,
|
||||
completed_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
CONSTRAINT check_status CHECK (status IN ('pending', 'ready', 'running', 'paused', 'waiting_approval', 'completed', 'failed', 'cancelled')),
|
||||
CONSTRAINT check_execution_mode CHECK (execution_mode IN ('autonomous', 'supervised', 'manual')),
|
||||
CONSTRAINT check_priority CHECK (priority IN ('low', 'normal', 'high', 'urgent'))
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_auto_tasks_bot_id ON auto_tasks(bot_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_auto_tasks_session_id ON auto_tasks(session_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_auto_tasks_status ON auto_tasks(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_auto_tasks_priority ON auto_tasks(priority);
|
||||
CREATE INDEX IF NOT EXISTS idx_auto_tasks_created_at ON auto_tasks(created_at);
|
||||
|
||||
-- ============================================================================
|
||||
-- EXECUTION PLANS TABLE
|
||||
-- ============================================================================
|
||||
-- Stores compiled execution plans from intent analysis
|
||||
|
||||
CREATE TABLE IF NOT EXISTS execution_plans (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
|
||||
task_id UUID REFERENCES auto_tasks(id) ON DELETE CASCADE,
|
||||
intent TEXT NOT NULL,
|
||||
intent_type VARCHAR(100),
|
||||
confidence FLOAT DEFAULT 0.0,
|
||||
status VARCHAR(50) NOT NULL DEFAULT 'pending',
|
||||
steps JSONB NOT NULL DEFAULT '[]'::jsonb,
|
||||
context JSONB DEFAULT '{}'::jsonb,
|
||||
basic_program TEXT,
|
||||
simulation_result JSONB,
|
||||
approved_at TIMESTAMPTZ,
|
||||
approved_by UUID,
|
||||
executed_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
CONSTRAINT check_plan_status CHECK (status IN ('pending', 'approved', 'rejected', 'executing', 'completed', 'failed'))
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_execution_plans_bot_id ON execution_plans(bot_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_execution_plans_task_id ON execution_plans(task_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_execution_plans_status ON execution_plans(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_execution_plans_intent_type ON execution_plans(intent_type);
|
||||
|
||||
-- ============================================================================
|
||||
-- TASK APPROVALS TABLE
|
||||
-- ============================================================================
|
||||
-- Stores approval requests and decisions for supervised tasks
|
||||
|
||||
CREATE TABLE IF NOT EXISTS task_approvals (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
|
||||
task_id UUID NOT NULL REFERENCES auto_tasks(id) ON DELETE CASCADE,
|
||||
plan_id UUID REFERENCES execution_plans(id) ON DELETE CASCADE,
|
||||
step_index INTEGER,
|
||||
action_type VARCHAR(100) NOT NULL,
|
||||
action_description TEXT NOT NULL,
|
||||
risk_level VARCHAR(20) DEFAULT 'low',
|
||||
status VARCHAR(50) NOT NULL DEFAULT 'pending',
|
||||
decision VARCHAR(20),
|
||||
decision_reason TEXT,
|
||||
decided_by UUID,
|
||||
decided_at TIMESTAMPTZ,
|
||||
expires_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
CONSTRAINT check_risk_level CHECK (risk_level IN ('low', 'medium', 'high', 'critical')),
|
||||
CONSTRAINT check_approval_status CHECK (status IN ('pending', 'approved', 'rejected', 'expired', 'skipped')),
|
||||
CONSTRAINT check_decision CHECK (decision IS NULL OR decision IN ('approve', 'reject', 'skip'))
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_task_approvals_bot_id ON task_approvals(bot_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_task_approvals_task_id ON task_approvals(task_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_task_approvals_status ON task_approvals(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_task_approvals_expires_at ON task_approvals(expires_at);
|
||||
|
||||
-- ============================================================================
|
||||
-- TASK DECISIONS TABLE
|
||||
-- ============================================================================
|
||||
-- Stores user decisions requested during task execution
|
||||
|
||||
CREATE TABLE IF NOT EXISTS task_decisions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
|
||||
task_id UUID NOT NULL REFERENCES auto_tasks(id) ON DELETE CASCADE,
|
||||
question TEXT NOT NULL,
|
||||
options JSONB NOT NULL DEFAULT '[]'::jsonb,
|
||||
context JSONB DEFAULT '{}'::jsonb,
|
||||
status VARCHAR(50) NOT NULL DEFAULT 'pending',
|
||||
selected_option VARCHAR(255),
|
||||
decision_reason TEXT,
|
||||
decided_by UUID,
|
||||
decided_at TIMESTAMPTZ,
|
||||
timeout_seconds INTEGER DEFAULT 3600,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
CONSTRAINT check_decision_status CHECK (status IN ('pending', 'answered', 'timeout', 'cancelled'))
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_task_decisions_bot_id ON task_decisions(bot_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_task_decisions_task_id ON task_decisions(task_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_task_decisions_status ON task_decisions(status);
|
||||
|
||||
-- ============================================================================
|
||||
-- SAFETY AUDIT LOG TABLE
|
||||
-- ============================================================================
|
||||
-- Stores audit trail of all safety checks and constraint validations
|
||||
|
||||
CREATE TABLE IF NOT EXISTS safety_audit_log (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
|
||||
task_id UUID REFERENCES auto_tasks(id) ON DELETE SET NULL,
|
||||
plan_id UUID REFERENCES execution_plans(id) ON DELETE SET NULL,
|
||||
action_type VARCHAR(100) NOT NULL,
|
||||
action_details JSONB NOT NULL DEFAULT '{}'::jsonb,
|
||||
constraint_checks JSONB DEFAULT '[]'::jsonb,
|
||||
simulation_result JSONB,
|
||||
risk_assessment JSONB,
|
||||
outcome VARCHAR(50) NOT NULL,
|
||||
error_message TEXT,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
CONSTRAINT check_outcome CHECK (outcome IN ('allowed', 'blocked', 'warning', 'error'))
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_safety_audit_log_bot_id ON safety_audit_log(bot_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_safety_audit_log_task_id ON safety_audit_log(task_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_safety_audit_log_outcome ON safety_audit_log(outcome);
|
||||
CREATE INDEX IF NOT EXISTS idx_safety_audit_log_created_at ON safety_audit_log(created_at);
|
||||
|
||||
-- ============================================================================
|
||||
-- GENERATED APPS TABLE
|
||||
-- ============================================================================
|
||||
-- Stores metadata about apps generated by the AppGenerator
|
||||
|
||||
CREATE TABLE IF NOT EXISTS generated_apps (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
domain VARCHAR(100),
|
||||
intent_source TEXT,
|
||||
pages JSONB DEFAULT '[]'::jsonb,
|
||||
tables_created JSONB DEFAULT '[]'::jsonb,
|
||||
tools JSONB DEFAULT '[]'::jsonb,
|
||||
schedulers JSONB DEFAULT '[]'::jsonb,
|
||||
app_path VARCHAR(500),
|
||||
is_active BOOLEAN DEFAULT true,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
CONSTRAINT unique_bot_app_name UNIQUE (bot_id, name)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_generated_apps_bot_id ON generated_apps(bot_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_generated_apps_name ON generated_apps(name);
|
||||
CREATE INDEX IF NOT EXISTS idx_generated_apps_is_active ON generated_apps(is_active);
|
||||
|
||||
-- ============================================================================
|
||||
-- INTENT CLASSIFICATIONS TABLE
|
||||
-- ============================================================================
|
||||
-- Stores classified intents for analytics and learning
|
||||
|
||||
CREATE TABLE IF NOT EXISTS intent_classifications (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
|
||||
session_id UUID REFERENCES user_sessions(id) ON DELETE SET NULL,
|
||||
original_text TEXT NOT NULL,
|
||||
intent_type VARCHAR(50) NOT NULL,
|
||||
confidence FLOAT NOT NULL DEFAULT 0.0,
|
||||
entities JSONB DEFAULT '{}'::jsonb,
|
||||
suggested_name VARCHAR(255),
|
||||
was_correct BOOLEAN,
|
||||
corrected_type VARCHAR(50),
|
||||
feedback TEXT,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
CONSTRAINT check_intent_type CHECK (intent_type IN ('APP_CREATE', 'TODO', 'MONITOR', 'ACTION', 'SCHEDULE', 'GOAL', 'TOOL', 'UNKNOWN'))
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_intent_classifications_bot_id ON intent_classifications(bot_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_intent_classifications_intent_type ON intent_classifications(intent_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_intent_classifications_created_at ON intent_classifications(created_at);
|
||||
|
||||
-- ============================================================================
|
||||
-- DESIGNER CHANGES TABLE
|
||||
-- ============================================================================
|
||||
-- Stores change history for Designer AI undo support
|
||||
|
||||
CREATE TABLE IF NOT EXISTS designer_changes (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
|
||||
session_id UUID REFERENCES user_sessions(id) ON DELETE SET NULL,
|
||||
change_type VARCHAR(50) NOT NULL,
|
||||
description TEXT NOT NULL,
|
||||
file_path VARCHAR(500) NOT NULL,
|
||||
original_content TEXT NOT NULL,
|
||||
new_content TEXT NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
CONSTRAINT check_designer_change_type CHECK (change_type IN ('STYLE', 'HTML', 'DATABASE', 'TOOL', 'SCHEDULER', 'MULTIPLE', 'UNKNOWN'))
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_designer_changes_bot_id ON designer_changes(bot_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_designer_changes_created_at ON designer_changes(created_at);
|
||||
|
||||
-- ============================================================================
|
||||
-- DESIGNER PENDING CHANGES TABLE
|
||||
-- ============================================================================
|
||||
-- Stores pending changes awaiting confirmation
|
||||
|
||||
CREATE TABLE IF NOT EXISTS designer_pending_changes (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
|
||||
session_id UUID REFERENCES user_sessions(id) ON DELETE SET NULL,
|
||||
analysis_json TEXT NOT NULL,
|
||||
instruction TEXT NOT NULL,
|
||||
expires_at TIMESTAMPTZ NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_designer_pending_changes_bot_id ON designer_pending_changes(bot_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_designer_pending_changes_expires_at ON designer_pending_changes(expires_at);
|
||||
|
|
@ -262,14 +262,17 @@ impl AttendanceDriveService {
|
|||
}
|
||||
|
||||
log::info!(
|
||||
"Syncing attendance records from {:?} to s3://{}/{}",
|
||||
local_path,
|
||||
"Syncing attendance records from {} to s3://{}/{}",
|
||||
local_path.display(),
|
||||
self.config.bucket_name,
|
||||
self.config.prefix
|
||||
);
|
||||
|
||||
if !local_path.exists() {
|
||||
return Err(anyhow!("Local path does not exist: {:?}", local_path));
|
||||
return Err(anyhow!(
|
||||
"Local path does not exist: {}",
|
||||
local_path.display()
|
||||
));
|
||||
}
|
||||
|
||||
let mut uploaded = 0;
|
||||
|
|
@ -293,7 +296,7 @@ impl AttendanceDriveService {
|
|||
let file_name = match path.file_name().and_then(|n| n.to_str()) {
|
||||
Some(name) => name.to_string(),
|
||||
None => {
|
||||
log::warn!("Skipping file with invalid name: {:?}", path);
|
||||
log::warn!("Skipping file with invalid name: {}", path.display());
|
||||
skipped += 1;
|
||||
continue;
|
||||
}
|
||||
|
|
@ -317,7 +320,7 @@ impl AttendanceDriveService {
|
|||
}
|
||||
},
|
||||
Err(e) => {
|
||||
log::error!("Failed to read file {:?}: {}", path, e);
|
||||
log::error!("Failed to read file {}: {}", path.display(), e);
|
||||
failed += 1;
|
||||
}
|
||||
}
|
||||
|
|
@ -356,7 +359,7 @@ impl AttendanceDriveService {
|
|||
last_modified: result
|
||||
.last_modified
|
||||
.and_then(|t| t.to_millis().ok())
|
||||
.map(|ms| chrono::Utc.timestamp_millis_opt(ms as i64).unwrap()),
|
||||
.map(|ms| chrono::Utc.timestamp_millis_opt(ms).unwrap()),
|
||||
content_type: result.content_type,
|
||||
etag: result.e_tag,
|
||||
})
|
||||
|
|
|
|||
|
|
@ -188,7 +188,7 @@ impl AttendanceService {
|
|||
AttendanceCommand::Resume => self.handle_resume(user_id, &parsed).await,
|
||||
AttendanceCommand::Status => self.handle_status(user_id).await,
|
||||
AttendanceCommand::Report => self.handle_report(user_id, &parsed).await,
|
||||
AttendanceCommand::Override => self.handle_override(user_id, &parsed),
|
||||
AttendanceCommand::Override => Self::handle_override(user_id, &parsed),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -431,7 +431,7 @@ impl AttendanceService {
|
|||
Ok(AttendanceResponse::Report { data: report })
|
||||
}
|
||||
|
||||
fn handle_override(&self, user_id: &str, parsed: &ParsedCommand) -> Result<AttendanceResponse> {
|
||||
fn handle_override(user_id: &str, parsed: &ParsedCommand) -> Result<AttendanceResponse> {
|
||||
if parsed.args.len() < 2 {
|
||||
return Ok(AttendanceResponse::Error {
|
||||
message: "Override requires target user and action".to_string(),
|
||||
|
|
|
|||
|
|
@ -106,18 +106,15 @@ pub async fn attendant_respond(
|
|||
request.attendant_id, request.session_id
|
||||
);
|
||||
|
||||
let session_id = match Uuid::parse_str(&request.session_id) {
|
||||
Ok(id) => id,
|
||||
Err(_) => {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(AttendantRespondResponse {
|
||||
success: false,
|
||||
message: "Invalid session ID".to_string(),
|
||||
error: Some("Could not parse session ID as UUID".to_string()),
|
||||
}),
|
||||
)
|
||||
}
|
||||
let Ok(session_id) = Uuid::parse_str(&request.session_id) else {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(AttendantRespondResponse {
|
||||
success: false,
|
||||
message: "Invalid session ID".to_string(),
|
||||
error: Some("Could not parse session ID as UUID".to_string()),
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
let conn = state.conn.clone();
|
||||
|
|
@ -133,18 +130,15 @@ pub async fn attendant_respond(
|
|||
.ok()
|
||||
.flatten();
|
||||
|
||||
let session = match session_result {
|
||||
Some(s) => s,
|
||||
None => {
|
||||
return (
|
||||
StatusCode::NOT_FOUND,
|
||||
Json(AttendantRespondResponse {
|
||||
success: false,
|
||||
message: "Session not found".to_string(),
|
||||
error: Some("No session with that ID exists".to_string()),
|
||||
}),
|
||||
)
|
||||
}
|
||||
let Some(session) = session_result else {
|
||||
return (
|
||||
StatusCode::NOT_FOUND,
|
||||
Json(AttendantRespondResponse {
|
||||
success: false,
|
||||
message: "Session not found".to_string(),
|
||||
error: Some("No session with that ID exists".to_string()),
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
let channel = session
|
||||
|
|
@ -216,7 +210,7 @@ pub async fn attendant_respond(
|
|||
),
|
||||
}
|
||||
}
|
||||
"web" | _ => {
|
||||
_ => {
|
||||
let sent = if let Some(tx) = state
|
||||
.response_channels
|
||||
.lock()
|
||||
|
|
@ -305,6 +299,7 @@ async fn save_message_to_history(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_async)]
|
||||
async fn broadcast_attendant_action(
|
||||
state: &Arc<AppState>,
|
||||
session: &UserSession,
|
||||
|
|
@ -382,11 +377,7 @@ async fn handle_attendant_websocket(socket: WebSocket, state: Arc<AppState>, att
|
|||
});
|
||||
|
||||
if let Ok(welcome_str) = serde_json::to_string(&welcome) {
|
||||
if sender
|
||||
.send(Message::Text(welcome_str.into()))
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
if sender.send(Message::Text(welcome_str)).await.is_err() {
|
||||
error!("Failed to send welcome message to attendant");
|
||||
return;
|
||||
}
|
||||
|
|
@ -413,7 +404,7 @@ async fn handle_attendant_websocket(socket: WebSocket, state: Arc<AppState>, att
|
|||
"Sending notification to attendant {}: {}",
|
||||
attendant_id_clone, notification.notification_type
|
||||
);
|
||||
if sender.send(Message::Text(json_str.into())).await.is_err() {
|
||||
if sender.send(Message::Text(json_str)).await.is_err() {
|
||||
error!("Failed to send notification to attendant WebSocket");
|
||||
break;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -228,12 +228,12 @@ impl BasicCompiler {
|
|||
}
|
||||
fn normalize_type(basic_type: &str) -> String {
|
||||
match basic_type.to_lowercase().as_str() {
|
||||
"string" | "text" | "date" | "datetime" => "string".to_string(),
|
||||
"integer" | "int" | "number" => "integer".to_string(),
|
||||
"float" | "double" | "decimal" => "number".to_string(),
|
||||
"boolean" | "bool" => "boolean".to_string(),
|
||||
"array" | "list" => "array".to_string(),
|
||||
"object" | "map" => "object".to_string(),
|
||||
// "string", "text", "date", "datetime", and any other type default to string
|
||||
_ => "string".to_string(),
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -40,17 +40,14 @@ pub fn add_member_keyword(state: Arc<AppState>, user: UserSession, engine: &mut
|
|||
.enable_all()
|
||||
.build();
|
||||
|
||||
let send_err = if let Ok(rt) = rt {
|
||||
let result = rt.block_on(async move {
|
||||
execute_add_member(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&group_id,
|
||||
&user_email,
|
||||
&role,
|
||||
)
|
||||
.await
|
||||
});
|
||||
let send_err = if let Ok(_rt) = rt {
|
||||
let result = execute_add_member(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&group_id,
|
||||
&user_email,
|
||||
&role,
|
||||
);
|
||||
tx.send(result).err()
|
||||
} else {
|
||||
tx.send(Err("Failed to build tokio runtime".to_string()))
|
||||
|
|
@ -124,17 +121,14 @@ pub fn add_member_keyword(state: Arc<AppState>, user: UserSession, engine: &mut
|
|||
.enable_all()
|
||||
.build();
|
||||
|
||||
let send_err = if let Ok(rt) = rt {
|
||||
let result = rt.block_on(async move {
|
||||
execute_create_team(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&name,
|
||||
members,
|
||||
&workspace_template,
|
||||
)
|
||||
.await
|
||||
});
|
||||
let send_err = if let Ok(_rt) = rt {
|
||||
let result = execute_create_team(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&name,
|
||||
members,
|
||||
&workspace_template,
|
||||
);
|
||||
tx.send(result).err()
|
||||
} else {
|
||||
tx.send(Err("Failed to build tokio runtime".to_string()))
|
||||
|
|
@ -162,7 +156,7 @@ pub fn add_member_keyword(state: Arc<AppState>, user: UserSession, engine: &mut
|
|||
.unwrap();
|
||||
}
|
||||
|
||||
async fn execute_add_member(
|
||||
pub fn execute_add_member(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
group_id: &str,
|
||||
|
|
@ -200,9 +194,9 @@ async fn execute_add_member(
|
|||
format!("Failed to add member: {}", e)
|
||||
})?;
|
||||
|
||||
send_member_invitation(state, group_id, user_email, &valid_role).await?;
|
||||
send_member_invitation(state, group_id, user_email, &valid_role)?;
|
||||
|
||||
log_group_activity(state, group_id, "member_added", user_email).await?;
|
||||
log_group_activity(state, group_id, "member_added", user_email)?;
|
||||
|
||||
trace!(
|
||||
"Added {} to group {} as {} with permissions {:?}",
|
||||
|
|
@ -215,7 +209,7 @@ async fn execute_add_member(
|
|||
Ok(member_id)
|
||||
}
|
||||
|
||||
async fn execute_create_team(
|
||||
fn execute_create_team(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
name: &str,
|
||||
|
|
@ -254,7 +248,7 @@ async fn execute_create_team(
|
|||
format!("Failed to create team: {}", e)
|
||||
})?;
|
||||
|
||||
execute_add_member(state, user, &team_id, &user.user_id.to_string(), "admin").await?;
|
||||
execute_add_member(state, user, &team_id, &user.user_id.to_string(), "admin")?;
|
||||
|
||||
for member_email in &members {
|
||||
let role = if member_email == &user.user_id.to_string() {
|
||||
|
|
@ -262,12 +256,12 @@ async fn execute_create_team(
|
|||
} else {
|
||||
"member"
|
||||
};
|
||||
execute_add_member(state, user, &team_id, member_email, role).await?;
|
||||
execute_add_member(state, user, &team_id, member_email, role)?;
|
||||
}
|
||||
|
||||
create_workspace_structure(state, &team_id, name, workspace_template).await?;
|
||||
create_workspace_structure(state, &team_id, name, workspace_template)?;
|
||||
|
||||
create_team_channel(state, &team_id, name).await?;
|
||||
create_team_channel(state, &team_id, name)?;
|
||||
|
||||
trace!(
|
||||
"Created team '{}' with {} members (ID: {})",
|
||||
|
|
@ -283,7 +277,6 @@ fn validate_role(role: &str) -> String {
|
|||
match role.to_lowercase().as_str() {
|
||||
"admin" | "administrator" => "admin",
|
||||
"contributor" | "editor" => "contributor",
|
||||
"member" | "user" => "member",
|
||||
"viewer" | "read" | "readonly" => "viewer",
|
||||
"owner" => "owner",
|
||||
_ => "member",
|
||||
|
|
@ -317,7 +310,7 @@ fn get_permissions_for_role(role: &str) -> serde_json::Value {
|
|||
"manage_settings": false,
|
||||
"export_data": false
|
||||
}),
|
||||
"viewer" | _ => json!({
|
||||
_ => json!({
|
||||
"read": true,
|
||||
"write": false,
|
||||
"delete": false,
|
||||
|
|
@ -328,7 +321,7 @@ fn get_permissions_for_role(role: &str) -> serde_json::Value {
|
|||
}
|
||||
}
|
||||
|
||||
async fn send_member_invitation(
|
||||
fn send_member_invitation(
|
||||
_state: &AppState,
|
||||
group_id: &str,
|
||||
user_email: &str,
|
||||
|
|
@ -343,7 +336,7 @@ async fn send_member_invitation(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn log_group_activity(
|
||||
fn log_group_activity(
|
||||
state: &AppState,
|
||||
group_id: &str,
|
||||
action: &str,
|
||||
|
|
@ -373,7 +366,7 @@ async fn log_group_activity(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn create_workspace_structure(
|
||||
fn create_workspace_structure(
|
||||
state: &AppState,
|
||||
team_id: &str,
|
||||
team_name: &str,
|
||||
|
|
@ -428,11 +421,7 @@ async fn create_workspace_structure(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn create_team_channel(
|
||||
state: &AppState,
|
||||
team_id: &str,
|
||||
team_name: &str,
|
||||
) -> Result<(), String> {
|
||||
fn create_team_channel(state: &AppState, team_id: &str, team_name: &str) -> Result<(), String> {
|
||||
let mut conn = state.conn.get().map_err(|e| format!("DB error: {}", e))?;
|
||||
|
||||
let channel_id = Uuid::new_v4().to_string();
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ impl From<&str> for ReflectionType {
|
|||
}
|
||||
|
||||
impl ReflectionType {
|
||||
#[allow(clippy::literal_string_with_formatting_args)]
|
||||
pub fn prompt_template(&self) -> String {
|
||||
match self {
|
||||
Self::ConversationQuality => r#"Analyze the following conversation and evaluate:
|
||||
|
|
@ -456,7 +457,7 @@ impl ReflectionEngine {
|
|||
return Err("Reflection is not enabled for this bot".to_string());
|
||||
}
|
||||
|
||||
let history = self.get_recent_history(session_id, 20).await?;
|
||||
let history = self.get_recent_history(session_id, 20)?;
|
||||
|
||||
if history.is_empty() {
|
||||
return Err("No conversation history to analyze".to_string());
|
||||
|
|
@ -476,10 +477,10 @@ impl ReflectionEngine {
|
|||
messages_count,
|
||||
);
|
||||
|
||||
self.store_reflection(&result).await?;
|
||||
self.store_reflection(&result)?;
|
||||
|
||||
if self.config.auto_apply && result.needs_improvement(self.config.improvement_threshold) {
|
||||
self.apply_improvements(&result).await?;
|
||||
self.apply_improvements(&result)?;
|
||||
}
|
||||
|
||||
info!(
|
||||
|
|
@ -490,7 +491,7 @@ impl ReflectionEngine {
|
|||
Ok(result)
|
||||
}
|
||||
|
||||
async fn get_recent_history(
|
||||
fn get_recent_history(
|
||||
&self,
|
||||
session_id: Uuid,
|
||||
limit: usize,
|
||||
|
|
@ -533,6 +534,7 @@ impl ReflectionEngine {
|
|||
Ok(history)
|
||||
}
|
||||
|
||||
#[allow(clippy::literal_string_with_formatting_args)]
|
||||
fn build_reflection_prompt(
|
||||
&self,
|
||||
reflection_type: &ReflectionType,
|
||||
|
|
@ -565,14 +567,8 @@ impl ReflectionEngine {
|
|||
Ok(prompt)
|
||||
}
|
||||
|
||||
fn call_llm_for_reflection_sync(&self, prompt: &str) -> Result<String, String> {
|
||||
// Note: This is a synchronous wrapper - actual async call happens in reflect()
|
||||
let _ = prompt;
|
||||
Err("Use async reflect() method instead".to_string())
|
||||
}
|
||||
|
||||
async fn call_llm_for_reflection(&self, prompt: &str) -> Result<String, String> {
|
||||
let (llm_url, llm_model, llm_key) = self.get_llm_config().await?;
|
||||
let (llm_url, llm_model, llm_key) = self.get_llm_config()?;
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
|
|
@ -620,7 +616,7 @@ impl ReflectionEngine {
|
|||
Ok(content)
|
||||
}
|
||||
|
||||
async fn get_llm_config(&self) -> Result<(String, String, String), String> {
|
||||
fn get_llm_config(&self) -> Result<(String, String, String), String> {
|
||||
let mut conn = self
|
||||
.state
|
||||
.conn
|
||||
|
|
@ -659,7 +655,7 @@ impl ReflectionEngine {
|
|||
Ok((llm_url, llm_model, llm_key))
|
||||
}
|
||||
|
||||
async fn store_reflection(&self, result: &ReflectionResult) -> Result<(), String> {
|
||||
fn store_reflection(&self, result: &ReflectionResult) -> Result<(), String> {
|
||||
let mut conn = self
|
||||
.state
|
||||
.conn
|
||||
|
|
@ -697,7 +693,7 @@ impl ReflectionEngine {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn apply_improvements(&self, result: &ReflectionResult) -> Result<(), String> {
|
||||
fn apply_improvements(&self, result: &ReflectionResult) -> Result<(), String> {
|
||||
let mut conn = self
|
||||
.state
|
||||
.conn
|
||||
|
|
@ -732,7 +728,7 @@ impl ReflectionEngine {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_insights(&self, limit: usize) -> Result<Vec<ReflectionResult>, String> {
|
||||
pub fn get_insights(&self, limit: usize) -> Result<Vec<ReflectionResult>, String> {
|
||||
let mut conn = self
|
||||
.state
|
||||
.conn
|
||||
|
|
@ -802,7 +798,7 @@ impl ReflectionEngine {
|
|||
Ok(results)
|
||||
}
|
||||
|
||||
pub async fn should_reflect(&self, session_id: Uuid) -> bool {
|
||||
pub fn should_reflect(&self, session_id: Uuid) -> bool {
|
||||
if !self.config.enabled {
|
||||
return false;
|
||||
}
|
||||
|
|
@ -872,10 +868,8 @@ pub fn set_bot_reflection_keyword(state: Arc<AppState>, user: UserSession, engin
|
|||
let (tx, rx) = std::sync::mpsc::channel();
|
||||
|
||||
std::thread::spawn(move || {
|
||||
let rt = tokio::runtime::Runtime::new().expect("Failed to create runtime");
|
||||
let result = rt.block_on(async {
|
||||
set_reflection_enabled(&state_for_task, bot_id, enabled).await
|
||||
});
|
||||
let _rt = tokio::runtime::Runtime::new().expect("Failed to create runtime");
|
||||
let result = set_reflection_enabled(&state_for_task, bot_id, enabled);
|
||||
let _ = tx.send(result);
|
||||
});
|
||||
|
||||
|
|
@ -958,11 +952,11 @@ pub fn get_reflection_insights_keyword(
|
|||
let state = Arc::clone(&state_clone);
|
||||
let bot_id = user_clone.bot_id;
|
||||
|
||||
let rt = tokio::runtime::Runtime::new().expect("Failed to create runtime");
|
||||
let result = rt.block_on(async {
|
||||
let _rt = tokio::runtime::Runtime::new().expect("Failed to create runtime");
|
||||
let result = {
|
||||
let engine = ReflectionEngine::new(state, bot_id);
|
||||
engine.get_insights(10).await
|
||||
});
|
||||
engine.get_insights(10)
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(insights) => insights
|
||||
|
|
@ -974,11 +968,7 @@ pub fn get_reflection_insights_keyword(
|
|||
});
|
||||
}
|
||||
|
||||
async fn set_reflection_enabled(
|
||||
state: &AppState,
|
||||
bot_id: Uuid,
|
||||
enabled: bool,
|
||||
) -> Result<String, String> {
|
||||
fn set_reflection_enabled(state: &AppState, bot_id: Uuid, enabled: bool) -> Result<String, String> {
|
||||
let mut conn = state
|
||||
.conn
|
||||
.get()
|
||||
|
|
|
|||
1224
src/basic/keywords/app_generator.rs
Normal file
1224
src/basic/keywords/app_generator.rs
Normal file
File diff suppressed because it is too large
Load diff
230
src/basic/keywords/app_server.rs
Normal file
230
src/basic/keywords/app_server.rs
Normal file
|
|
@ -0,0 +1,230 @@
|
|||
//! App Server Module
|
||||
//!
|
||||
//! Serves generated HTMX applications with clean URLs.
|
||||
//! Apps are synced from drive to SITE_ROOT/{app_name}/ for serving.
|
||||
//!
|
||||
//! URL structure:
|
||||
//! - `/apps/{app_name}/` -> {site_path}/{app_name}/index.html
|
||||
//! - `/apps/{app_name}/patients.html` -> {site_path}/{app_name}/patients.html
|
||||
//! - `/apps/{app_name}/styles.css` -> {site_path}/{app_name}/styles.css
|
||||
//!
|
||||
//! Flow:
|
||||
//! 1. AppGenerator writes to S3 drive: {bucket}/.gbdrive/apps/{app_name}/
|
||||
//! 2. sync_app_to_site_root() copies to: {site_path}/{app_name}/
|
||||
//! 3. This module serves from: {site_path}/{app_name}/
|
||||
|
||||
use crate::shared::state::AppState;
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::{Path, State},
|
||||
http::{header, StatusCode},
|
||||
response::{IntoResponse, Response},
|
||||
routing::get,
|
||||
Router,
|
||||
};
|
||||
use log::{error, trace, warn};
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Configure routes for serving generated apps
|
||||
pub fn configure_app_server_routes() -> Router<Arc<AppState>> {
|
||||
Router::new()
|
||||
// Serve app files: /apps/{app_name}/* (clean URLs)
|
||||
.route("/apps/:app_name", get(serve_app_index))
|
||||
.route("/apps/:app_name/", get(serve_app_index))
|
||||
.route("/apps/:app_name/*file_path", get(serve_app_file))
|
||||
// List all available apps
|
||||
.route("/apps", get(list_all_apps))
|
||||
}
|
||||
|
||||
/// Path parameters for app serving
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct AppPath {
|
||||
pub app_name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct AppFilePath {
|
||||
pub app_name: String,
|
||||
pub file_path: String,
|
||||
}
|
||||
|
||||
/// Serve the index.html for an app
|
||||
pub async fn serve_app_index(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Path(params): Path<AppPath>,
|
||||
) -> impl IntoResponse {
|
||||
serve_app_file_internal(&state, ¶ms.app_name, "index.html").await
|
||||
}
|
||||
|
||||
/// Serve any file from an app directory
|
||||
pub async fn serve_app_file(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Path(params): Path<AppFilePath>,
|
||||
) -> impl IntoResponse {
|
||||
serve_app_file_internal(&state, ¶ms.app_name, ¶ms.file_path).await
|
||||
}
|
||||
|
||||
/// Internal function to serve files from app directory
|
||||
async fn serve_app_file_internal(state: &AppState, app_name: &str, file_path: &str) -> Response {
|
||||
// Sanitize paths to prevent directory traversal
|
||||
let sanitized_app_name = sanitize_path_component(app_name);
|
||||
let sanitized_file_path = sanitize_path_component(file_path);
|
||||
|
||||
if sanitized_app_name.is_empty() || sanitized_file_path.is_empty() {
|
||||
return (StatusCode::BAD_REQUEST, "Invalid path").into_response();
|
||||
}
|
||||
|
||||
// Construct full file path from SITE_ROOT
|
||||
// Apps are synced to: {site_path}/{app_name}/
|
||||
let site_path = state
|
||||
.config
|
||||
.as_ref()
|
||||
.map(|c| c.site_path.clone())
|
||||
.unwrap_or_else(|| "./botserver-stack/sites".to_string());
|
||||
|
||||
let full_path = format!(
|
||||
"{}/{}/{}",
|
||||
site_path, sanitized_app_name, sanitized_file_path
|
||||
);
|
||||
|
||||
trace!("Serving app file: {}", full_path);
|
||||
|
||||
// Check if file exists
|
||||
let path = std::path::Path::new(&full_path);
|
||||
if !path.exists() {
|
||||
warn!("App file not found: {}", full_path);
|
||||
return (StatusCode::NOT_FOUND, "File not found").into_response();
|
||||
}
|
||||
|
||||
// Determine content type
|
||||
let content_type = get_content_type(&sanitized_file_path);
|
||||
|
||||
// Read and serve the file
|
||||
match std::fs::read(&full_path) {
|
||||
Ok(contents) => Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.header(header::CONTENT_TYPE, content_type)
|
||||
.header(header::CACHE_CONTROL, "public, max-age=3600")
|
||||
.body(Body::from(contents))
|
||||
.unwrap_or_else(|_| {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to build response",
|
||||
)
|
||||
.into_response()
|
||||
}),
|
||||
Err(e) => {
|
||||
error!("Failed to read file {}: {}", full_path, e);
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, "Failed to read file").into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// List all available apps from SITE_ROOT
|
||||
pub async fn list_all_apps(State(state): State<Arc<AppState>>) -> impl IntoResponse {
|
||||
let site_path = state
|
||||
.config
|
||||
.as_ref()
|
||||
.map(|c| c.site_path.clone())
|
||||
.unwrap_or_else(|| "./botserver-stack/sites".to_string());
|
||||
|
||||
let mut apps = Vec::new();
|
||||
|
||||
// List all directories in site_path that have an index.html (are apps)
|
||||
if let Ok(entries) = std::fs::read_dir(&site_path) {
|
||||
for entry in entries.flatten() {
|
||||
if entry.file_type().map(|t| t.is_dir()).unwrap_or(false) {
|
||||
if let Some(name) = entry.file_name().to_str() {
|
||||
// Skip .gbai directories and other system folders
|
||||
if name.starts_with('.') || name.ends_with(".gbai") {
|
||||
continue;
|
||||
}
|
||||
|
||||
let app_path = entry.path();
|
||||
let has_index = app_path.join("index.html").exists();
|
||||
|
||||
if has_index {
|
||||
apps.push(serde_json::json!({
|
||||
"name": name,
|
||||
"url": format!("/apps/{}", name),
|
||||
"has_index": true
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(
|
||||
StatusCode::OK,
|
||||
axum::Json(serde_json::json!({
|
||||
"apps": apps,
|
||||
"count": apps.len()
|
||||
})),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
|
||||
/// Sanitize path component to prevent directory traversal
|
||||
fn sanitize_path_component(component: &str) -> String {
|
||||
component
|
||||
.replace("..", "")
|
||||
.replace("//", "/")
|
||||
.trim_start_matches('/')
|
||||
.trim_end_matches('/')
|
||||
.chars()
|
||||
.filter(|c| c.is_alphanumeric() || *c == '-' || *c == '_' || *c == '.' || *c == '/')
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get content type based on file extension
|
||||
fn get_content_type(file_path: &str) -> &'static str {
|
||||
let ext = file_path.rsplit('.').next().unwrap_or("").to_lowercase();
|
||||
|
||||
match ext.as_str() {
|
||||
"html" | "htm" => "text/html; charset=utf-8",
|
||||
"css" => "text/css; charset=utf-8",
|
||||
"js" => "application/javascript; charset=utf-8",
|
||||
"json" => "application/json; charset=utf-8",
|
||||
"png" => "image/png",
|
||||
"jpg" | "jpeg" => "image/jpeg",
|
||||
"gif" => "image/gif",
|
||||
"svg" => "image/svg+xml",
|
||||
"ico" => "image/x-icon",
|
||||
"woff" => "font/woff",
|
||||
"woff2" => "font/woff2",
|
||||
"ttf" => "font/ttf",
|
||||
"eot" => "application/vnd.ms-fontobject",
|
||||
"txt" => "text/plain; charset=utf-8",
|
||||
"xml" => "application/xml; charset=utf-8",
|
||||
"pdf" => "application/pdf",
|
||||
_ => "application/octet-stream",
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_sanitize_path_component() {
|
||||
assert_eq!(sanitize_path_component("clinic"), "clinic");
|
||||
assert_eq!(sanitize_path_component("../etc/passwd"), "etc/passwd");
|
||||
assert_eq!(sanitize_path_component("app/../secret"), "app/secret");
|
||||
assert_eq!(sanitize_path_component("/leading/slash"), "leading/slash");
|
||||
assert_eq!(sanitize_path_component("file.html"), "file.html");
|
||||
assert_eq!(sanitize_path_component("my-app_v2"), "my-app_v2");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_content_type() {
|
||||
assert_eq!(get_content_type("index.html"), "text/html; charset=utf-8");
|
||||
assert_eq!(get_content_type("styles.css"), "text/css; charset=utf-8");
|
||||
assert_eq!(
|
||||
get_content_type("app.js"),
|
||||
"application/javascript; charset=utf-8"
|
||||
);
|
||||
assert_eq!(get_content_type("image.png"), "image/png");
|
||||
assert_eq!(get_content_type("unknown.xyz"), "application/octet-stream");
|
||||
}
|
||||
}
|
||||
|
|
@ -279,7 +279,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_join() {
|
||||
let arr = vec!["a", "b", "c"];
|
||||
let arr = ["a", "b", "c"];
|
||||
let result = arr.join("-");
|
||||
assert_eq!(result, "a-b-c");
|
||||
}
|
||||
|
|
@ -287,8 +287,8 @@ mod tests {
|
|||
#[test]
|
||||
fn test_split() {
|
||||
let s = "a,b,c";
|
||||
let parts: Vec<&str> = s.split(',').collect();
|
||||
assert_eq!(parts.len(), 3);
|
||||
let parts_count = s.split(',').count();
|
||||
assert_eq!(parts_count, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
|
|
@ -55,6 +55,22 @@ fn compare_dynamic(a: &Dynamic, b: &Dynamic) -> std::cmp::Ordering {
|
|||
a.to_string().cmp(&b.to_string())
|
||||
}
|
||||
|
||||
fn to_f64(value: &Dynamic) -> Option<f64> {
|
||||
if value.is_int() {
|
||||
value.as_int().ok().map(|i| i as f64)
|
||||
} else if value.is_float() {
|
||||
value.as_float().ok()
|
||||
} else if value.is_string() {
|
||||
value
|
||||
.clone()
|
||||
.into_string()
|
||||
.ok()
|
||||
.and_then(|s| s.parse().ok())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
@ -113,19 +129,3 @@ mod tests {
|
|||
assert_eq!(compare_dynamic(&a, &b), std::cmp::Ordering::Less);
|
||||
}
|
||||
}
|
||||
|
||||
fn to_f64(value: &Dynamic) -> Option<f64> {
|
||||
if value.is_int() {
|
||||
value.as_int().ok().map(|i| i as f64)
|
||||
} else if value.is_float() {
|
||||
value.as_float().ok()
|
||||
} else if value.is_string() {
|
||||
value
|
||||
.clone()
|
||||
.into_string()
|
||||
.ok()
|
||||
.and_then(|s| s.parse().ok())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,18 +35,19 @@ fn unique_array(arr: Array) -> Array {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rhai::{Array, Dynamic};
|
||||
use rhai::Dynamic;
|
||||
|
||||
#[test]
|
||||
fn test_unique_integers() {
|
||||
let mut arr = Array::new();
|
||||
arr.push(Dynamic::from(1_i64));
|
||||
arr.push(Dynamic::from(2_i64));
|
||||
arr.push(Dynamic::from(2_i64));
|
||||
arr.push(Dynamic::from(3_i64));
|
||||
arr.push(Dynamic::from(3_i64));
|
||||
arr.push(Dynamic::from(3_i64));
|
||||
arr.push(Dynamic::from(4_i64));
|
||||
let arr: Array = vec![
|
||||
Dynamic::from(1_i64),
|
||||
Dynamic::from(2_i64),
|
||||
Dynamic::from(2_i64),
|
||||
Dynamic::from(3_i64),
|
||||
Dynamic::from(3_i64),
|
||||
Dynamic::from(3_i64),
|
||||
Dynamic::from(4_i64),
|
||||
];
|
||||
|
||||
let result = unique_array(arr);
|
||||
assert_eq!(result.len(), 4);
|
||||
|
|
@ -54,11 +55,12 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_unique_strings() {
|
||||
let mut arr = Array::new();
|
||||
arr.push(Dynamic::from("Alice"));
|
||||
arr.push(Dynamic::from("Bob"));
|
||||
arr.push(Dynamic::from("Alice"));
|
||||
arr.push(Dynamic::from("Charlie"));
|
||||
let arr: Array = vec![
|
||||
Dynamic::from("Alice"),
|
||||
Dynamic::from("Bob"),
|
||||
Dynamic::from("Alice"),
|
||||
Dynamic::from("Charlie"),
|
||||
];
|
||||
|
||||
let result = unique_array(arr);
|
||||
assert_eq!(result.len(), 3);
|
||||
|
|
@ -66,12 +68,13 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_unique_preserves_order() {
|
||||
let mut arr = Array::new();
|
||||
arr.push(Dynamic::from("C"));
|
||||
arr.push(Dynamic::from("A"));
|
||||
arr.push(Dynamic::from("B"));
|
||||
arr.push(Dynamic::from("A"));
|
||||
arr.push(Dynamic::from("C"));
|
||||
let arr: Array = vec![
|
||||
Dynamic::from("C"),
|
||||
Dynamic::from("A"),
|
||||
Dynamic::from("B"),
|
||||
Dynamic::from("A"),
|
||||
Dynamic::from("C"),
|
||||
];
|
||||
|
||||
let result = unique_array(arr);
|
||||
assert_eq!(result.len(), 3);
|
||||
|
|
@ -89,8 +92,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_unique_single_element() {
|
||||
let mut arr = Array::new();
|
||||
arr.push(Dynamic::from(42_i64));
|
||||
let arr: Array = vec![Dynamic::from(42_i64)];
|
||||
|
||||
let result = unique_array(arr);
|
||||
assert_eq!(result.len(), 1);
|
||||
|
|
@ -98,10 +100,11 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_unique_all_same() {
|
||||
let mut arr = Array::new();
|
||||
arr.push(Dynamic::from(1_i64));
|
||||
arr.push(Dynamic::from(1_i64));
|
||||
arr.push(Dynamic::from(1_i64));
|
||||
let arr: Array = vec![
|
||||
Dynamic::from(1_i64),
|
||||
Dynamic::from(1_i64),
|
||||
Dynamic::from(1_i64),
|
||||
];
|
||||
|
||||
let result = unique_array(arr);
|
||||
assert_eq!(result.len(), 1);
|
||||
|
|
@ -109,12 +112,13 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_unique_mixed_types() {
|
||||
let mut arr = Array::new();
|
||||
arr.push(Dynamic::from(1_i64));
|
||||
arr.push(Dynamic::from("1"));
|
||||
arr.push(Dynamic::from(1_i64));
|
||||
let arr: Array = vec![
|
||||
Dynamic::from(1_i64),
|
||||
Dynamic::from("1"),
|
||||
Dynamic::from(1_i64),
|
||||
];
|
||||
|
||||
let result = unique_array(arr);
|
||||
assert!(result.len() >= 1 && result.len() <= 2);
|
||||
assert!(!result.is_empty() && result.len() <= 2);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
269
src/basic/keywords/ask_later.rs
Normal file
269
src/basic/keywords/ask_later.rs
Normal file
|
|
@ -0,0 +1,269 @@
|
|||
use crate::core::shared::models::UserSession;
|
||||
use crate::core::shared::state::AppState;
|
||||
use diesel::prelude::*;
|
||||
use diesel::sql_query;
|
||||
use diesel::sql_types::Text;
|
||||
use log::{info, trace};
|
||||
use rhai::{Dynamic, Engine};
|
||||
use std::sync::Arc;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub fn ask_later_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
|
||||
let state_clone = state.clone();
|
||||
let user_clone = user.clone();
|
||||
|
||||
engine.register_fn(
|
||||
"ask_later",
|
||||
move |label: &str, config_key: &str, reason: &str| -> Dynamic {
|
||||
let state = state_clone.clone();
|
||||
let user = user_clone.clone();
|
||||
|
||||
let result = save_pending_info(&state, &user, label, config_key, reason, None);
|
||||
|
||||
match result {
|
||||
Ok(id) => {
|
||||
info!(
|
||||
"Pending info saved: {} -> {} (id: {})",
|
||||
label, config_key, id
|
||||
);
|
||||
Dynamic::from(id.to_string())
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to save pending info: {}", e);
|
||||
Dynamic::UNIT
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
let state_clone2 = state.clone();
|
||||
let user_clone2 = user.clone();
|
||||
|
||||
engine.register_fn(
|
||||
"ask_later_with_type",
|
||||
move |label: &str, config_key: &str, reason: &str, field_type: &str| -> Dynamic {
|
||||
let state = state_clone2.clone();
|
||||
let user = user_clone2.clone();
|
||||
|
||||
let result =
|
||||
save_pending_info(&state, &user, label, config_key, reason, Some(field_type));
|
||||
|
||||
match result {
|
||||
Ok(id) => {
|
||||
info!(
|
||||
"Pending info saved with type {}: {} -> {} (id: {})",
|
||||
field_type, label, config_key, id
|
||||
);
|
||||
Dynamic::from(id.to_string())
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to save pending info: {}", e);
|
||||
Dynamic::UNIT
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
let state_clone3 = state.clone();
|
||||
let user_clone3 = user.clone();
|
||||
|
||||
engine.register_fn(
|
||||
"fill_pending_info",
|
||||
move |config_key: &str, value: &str| -> bool {
|
||||
let state = state_clone3.clone();
|
||||
let user = user_clone3.clone();
|
||||
|
||||
match fill_pending_info(&state, &user, config_key, value) {
|
||||
Ok(_) => {
|
||||
info!("Pending info filled: {} = {}", config_key, value);
|
||||
true
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to fill pending info: {}", e);
|
||||
false
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
let state_clone4 = state.clone();
|
||||
let user_clone4 = user.clone();
|
||||
|
||||
engine.register_fn("get_pending_info_count", move || -> i64 {
|
||||
let state = state_clone4.clone();
|
||||
let user = user_clone4.clone();
|
||||
|
||||
match get_pending_info_count(&state, &user) {
|
||||
Ok(count) => count,
|
||||
Err(e) => {
|
||||
log::error!("Failed to get pending info count: {}", e);
|
||||
0
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let state_clone5 = state.clone();
|
||||
let user_clone5 = user.clone();
|
||||
|
||||
engine.register_fn("list_pending_info", move || -> Dynamic {
|
||||
let state = state_clone5.clone();
|
||||
let user = user_clone5.clone();
|
||||
|
||||
match list_pending_info(&state, &user) {
|
||||
Ok(items) => {
|
||||
let array: Vec<Dynamic> = items
|
||||
.into_iter()
|
||||
.map(|item| {
|
||||
let mut map = rhai::Map::new();
|
||||
map.insert("id".into(), Dynamic::from(item.id));
|
||||
map.insert("label".into(), Dynamic::from(item.field_label));
|
||||
map.insert("config_key".into(), Dynamic::from(item.config_key));
|
||||
map.insert(
|
||||
"reason".into(),
|
||||
Dynamic::from(item.reason.unwrap_or_default()),
|
||||
);
|
||||
map.insert("field_type".into(), Dynamic::from(item.field_type));
|
||||
Dynamic::from(map)
|
||||
})
|
||||
.collect();
|
||||
Dynamic::from(array)
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to list pending info: {}", e);
|
||||
Dynamic::from(Vec::<Dynamic>::new())
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
trace!("ASK LATER keyword registered");
|
||||
}
|
||||
|
||||
fn save_pending_info(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
label: &str,
|
||||
config_key: &str,
|
||||
reason: &str,
|
||||
field_type: Option<&str>,
|
||||
) -> Result<Uuid, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let bot_id = user.bot_id;
|
||||
let field_type_str = field_type.unwrap_or("text");
|
||||
let id = Uuid::new_v4();
|
||||
|
||||
let mut conn = state.conn.get()?;
|
||||
|
||||
sql_query(
|
||||
"INSERT INTO pending_info (id, bot_id, field_name, field_label, field_type, reason, config_key)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)",
|
||||
)
|
||||
.bind::<diesel::sql_types::Uuid, _>(id)
|
||||
.bind::<diesel::sql_types::Uuid, _>(bot_id)
|
||||
.bind::<Text, _>(config_key)
|
||||
.bind::<Text, _>(label)
|
||||
.bind::<Text, _>(field_type_str)
|
||||
.bind::<Text, _>(reason)
|
||||
.bind::<Text, _>(config_key)
|
||||
.execute(&mut conn)?;
|
||||
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
fn fill_pending_info(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
config_key: &str,
|
||||
value: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let bot_id = user.bot_id;
|
||||
|
||||
let mut conn = state.conn.get()?;
|
||||
|
||||
sql_query(
|
||||
"UPDATE pending_info SET filled_at = NOW() WHERE bot_id = $1 AND config_key = $2 AND filled_at IS NULL",
|
||||
)
|
||||
.bind::<diesel::sql_types::Uuid, _>(bot_id)
|
||||
.bind::<Text, _>(config_key)
|
||||
.execute(&mut conn)?;
|
||||
|
||||
let config_manager = crate::core::config::ConfigManager::new(state.conn.clone());
|
||||
config_manager.set_config(&bot_id, config_key, value)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_pending_info_count(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
) -> Result<i64, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let bot_id = user.bot_id;
|
||||
|
||||
let mut conn = state.conn.get()?;
|
||||
|
||||
let result: CountResult = sql_query(
|
||||
"SELECT COUNT(*) as count FROM pending_info WHERE bot_id = $1 AND filled_at IS NULL",
|
||||
)
|
||||
.bind::<diesel::sql_types::Uuid, _>(bot_id)
|
||||
.get_result(&mut conn)?;
|
||||
|
||||
Ok(result.count)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PendingInfoItem {
|
||||
pub id: String,
|
||||
pub field_label: String,
|
||||
pub config_key: String,
|
||||
pub reason: Option<String>,
|
||||
pub field_type: String,
|
||||
}
|
||||
|
||||
fn list_pending_info(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
) -> Result<Vec<PendingInfoItem>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let bot_id = user.bot_id;
|
||||
|
||||
let mut conn = state.conn.get()?;
|
||||
|
||||
let results: Vec<PendingInfoRow> = sql_query(
|
||||
"SELECT id, field_label, config_key, reason, field_type
|
||||
FROM pending_info
|
||||
WHERE bot_id = $1 AND filled_at IS NULL
|
||||
ORDER BY created_at ASC",
|
||||
)
|
||||
.bind::<diesel::sql_types::Uuid, _>(bot_id)
|
||||
.get_results(&mut conn)?;
|
||||
|
||||
let items = results
|
||||
.into_iter()
|
||||
.map(|row| PendingInfoItem {
|
||||
id: row.id.to_string(),
|
||||
field_label: row.field_label,
|
||||
config_key: row.config_key,
|
||||
reason: row.reason,
|
||||
field_type: row.field_type,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(items)
|
||||
}
|
||||
|
||||
#[derive(QueryableByName)]
|
||||
struct CountResult {
|
||||
#[diesel(sql_type = diesel::sql_types::BigInt)]
|
||||
count: i64,
|
||||
}
|
||||
|
||||
#[derive(QueryableByName)]
|
||||
struct PendingInfoRow {
|
||||
#[diesel(sql_type = diesel::sql_types::Uuid)]
|
||||
id: Uuid,
|
||||
#[diesel(sql_type = Text)]
|
||||
field_label: String,
|
||||
#[diesel(sql_type = Text)]
|
||||
config_key: String,
|
||||
#[diesel(sql_type = diesel::sql_types::Nullable<Text>)]
|
||||
reason: Option<String>,
|
||||
#[diesel(sql_type = Text)]
|
||||
field_type: String,
|
||||
}
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
use crate::basic::keywords::auto_task::{
|
||||
AutoTask, AutoTaskStatus, ExecutionMode, PendingApproval, PendingDecision, TaskPriority,
|
||||
};
|
||||
use crate::basic::keywords::intent_classifier::IntentClassifier;
|
||||
use crate::basic::keywords::intent_compiler::IntentCompiler;
|
||||
use crate::basic::keywords::safety_layer::{SafetyLayer, SimulationResult};
|
||||
use crate::shared::state::AppState;
|
||||
|
|
@ -23,6 +24,43 @@ pub struct CompileIntentRequest {
|
|||
pub priority: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ClassifyIntentRequest {
|
||||
pub intent: String,
|
||||
pub auto_process: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct ClassifyIntentResponse {
|
||||
pub success: bool,
|
||||
pub intent_type: String,
|
||||
pub confidence: f64,
|
||||
pub suggested_name: Option<String>,
|
||||
pub requires_clarification: bool,
|
||||
pub clarification_question: Option<String>,
|
||||
pub result: Option<IntentResultResponse>,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct IntentResultResponse {
|
||||
pub success: bool,
|
||||
pub message: String,
|
||||
pub app_url: Option<String>,
|
||||
pub task_id: Option<String>,
|
||||
pub schedule_id: Option<String>,
|
||||
pub tool_triggers: Vec<String>,
|
||||
pub created_resources: Vec<CreatedResourceResponse>,
|
||||
pub next_steps: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct CreatedResourceResponse {
|
||||
pub resource_type: String,
|
||||
pub name: String,
|
||||
pub path: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct CompileIntentResponse {
|
||||
pub success: bool,
|
||||
|
|
@ -221,6 +259,128 @@ pub struct RecommendationResponse {
|
|||
pub action: Option<String>,
|
||||
}
|
||||
|
||||
/// Classify and optionally process an intent
|
||||
/// POST /api/autotask/classify
|
||||
pub async fn classify_intent_handler(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Json(request): Json<ClassifyIntentRequest>,
|
||||
) -> impl IntoResponse {
|
||||
info!(
|
||||
"Classifying intent: {}",
|
||||
&request.intent[..request.intent.len().min(100)]
|
||||
);
|
||||
|
||||
let session = match get_current_session(&state) {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::UNAUTHORIZED,
|
||||
Json(ClassifyIntentResponse {
|
||||
success: false,
|
||||
intent_type: "UNKNOWN".to_string(),
|
||||
confidence: 0.0,
|
||||
suggested_name: None,
|
||||
requires_clarification: false,
|
||||
clarification_question: None,
|
||||
result: None,
|
||||
error: Some(format!("Authentication error: {}", e)),
|
||||
}),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
let classifier = IntentClassifier::new(Arc::clone(&state));
|
||||
let auto_process = request.auto_process.unwrap_or(true);
|
||||
|
||||
if auto_process {
|
||||
// Classify and process in one step
|
||||
match classifier
|
||||
.classify_and_process(&request.intent, &session)
|
||||
.await
|
||||
{
|
||||
Ok(result) => {
|
||||
let response = ClassifyIntentResponse {
|
||||
success: result.success,
|
||||
intent_type: result.intent_type.to_string(),
|
||||
confidence: 0.0, // Would come from classification
|
||||
suggested_name: None,
|
||||
requires_clarification: false,
|
||||
clarification_question: None,
|
||||
result: Some(IntentResultResponse {
|
||||
success: result.success,
|
||||
message: result.message,
|
||||
app_url: result.app_url,
|
||||
task_id: result.task_id,
|
||||
schedule_id: result.schedule_id,
|
||||
tool_triggers: result.tool_triggers,
|
||||
created_resources: result
|
||||
.created_resources
|
||||
.into_iter()
|
||||
.map(|r| CreatedResourceResponse {
|
||||
resource_type: r.resource_type,
|
||||
name: r.name,
|
||||
path: r.path,
|
||||
})
|
||||
.collect(),
|
||||
next_steps: result.next_steps,
|
||||
}),
|
||||
error: result.error,
|
||||
};
|
||||
(StatusCode::OK, Json(response))
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to classify/process intent: {}", e);
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(ClassifyIntentResponse {
|
||||
success: false,
|
||||
intent_type: "UNKNOWN".to_string(),
|
||||
confidence: 0.0,
|
||||
suggested_name: None,
|
||||
requires_clarification: false,
|
||||
clarification_question: None,
|
||||
result: None,
|
||||
error: Some(e.to_string()),
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Just classify, don't process
|
||||
match classifier.classify(&request.intent, &session).await {
|
||||
Ok(classification) => {
|
||||
let response = ClassifyIntentResponse {
|
||||
success: true,
|
||||
intent_type: classification.intent_type.to_string(),
|
||||
confidence: classification.confidence,
|
||||
suggested_name: classification.suggested_name,
|
||||
requires_clarification: classification.requires_clarification,
|
||||
clarification_question: classification.clarification_question,
|
||||
result: None,
|
||||
error: None,
|
||||
};
|
||||
(StatusCode::OK, Json(response))
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to classify intent: {}", e);
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(ClassifyIntentResponse {
|
||||
success: false,
|
||||
intent_type: "UNKNOWN".to_string(),
|
||||
confidence: 0.0,
|
||||
suggested_name: None,
|
||||
requires_clarification: false,
|
||||
clarification_question: None,
|
||||
result: None,
|
||||
error: Some(e.to_string()),
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn compile_intent_handler(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Json(request): Json<CompileIntentRequest>,
|
||||
|
|
@ -230,7 +390,7 @@ pub async fn compile_intent_handler(
|
|||
&request.intent[..request.intent.len().min(100)]
|
||||
);
|
||||
|
||||
let session = match get_current_session(&state).await {
|
||||
let session = match get_current_session(&state) {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
return (
|
||||
|
|
@ -374,7 +534,7 @@ pub async fn execute_plan_handler(
|
|||
) -> impl IntoResponse {
|
||||
info!("Executing plan: {}", request.plan_id);
|
||||
|
||||
let session = match get_current_session(&state).await {
|
||||
let session = match get_current_session(&state) {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
return (
|
||||
|
|
@ -405,10 +565,8 @@ pub async fn execute_plan_handler(
|
|||
_ => TaskPriority::Medium,
|
||||
};
|
||||
|
||||
match create_auto_task_from_plan(&state, &session, &request.plan_id, execution_mode, priority)
|
||||
.await
|
||||
{
|
||||
Ok(task) => match start_task_execution(&state, &task.id).await {
|
||||
match create_auto_task_from_plan(&state, &session, &request.plan_id, execution_mode, priority) {
|
||||
Ok(task) => match start_task_execution(&state, &task.id) {
|
||||
Ok(_) => (
|
||||
StatusCode::OK,
|
||||
Json(ExecutePlanResponse {
|
||||
|
|
@ -451,7 +609,7 @@ pub async fn list_tasks_handler(
|
|||
let limit = query.limit.unwrap_or(50);
|
||||
let offset = query.offset.unwrap_or(0);
|
||||
|
||||
match list_auto_tasks(&state, filter, limit, offset).await {
|
||||
match list_auto_tasks(&state, filter, limit, offset) {
|
||||
Ok(tasks) => {
|
||||
let html = render_task_list_html(&tasks);
|
||||
(StatusCode::OK, axum::response::Html(html))
|
||||
|
|
@ -474,7 +632,7 @@ pub async fn list_tasks_handler(
|
|||
}
|
||||
|
||||
pub async fn get_stats_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse {
|
||||
match get_auto_task_stats(&state).await {
|
||||
match get_auto_task_stats(&state) {
|
||||
Ok(stats) => (StatusCode::OK, Json(stats)),
|
||||
Err(e) => {
|
||||
error!("Failed to get stats: {}", e);
|
||||
|
|
@ -498,7 +656,7 @@ pub async fn pause_task_handler(
|
|||
State(state): State<Arc<AppState>>,
|
||||
Path(task_id): Path<String>,
|
||||
) -> impl IntoResponse {
|
||||
match update_task_status(&state, &task_id, AutoTaskStatus::Paused).await {
|
||||
match update_task_status(&state, &task_id, AutoTaskStatus::Paused) {
|
||||
Ok(_) => (
|
||||
StatusCode::OK,
|
||||
Json(TaskActionResponse {
|
||||
|
|
@ -522,9 +680,9 @@ pub async fn resume_task_handler(
|
|||
State(state): State<Arc<AppState>>,
|
||||
Path(task_id): Path<String>,
|
||||
) -> impl IntoResponse {
|
||||
match update_task_status(&state, &task_id, AutoTaskStatus::Running).await {
|
||||
match update_task_status(&state, &task_id, AutoTaskStatus::Running) {
|
||||
Ok(_) => {
|
||||
let _ = start_task_execution(&state, &task_id).await;
|
||||
let _ = start_task_execution(&state, &task_id);
|
||||
(
|
||||
StatusCode::OK,
|
||||
Json(TaskActionResponse {
|
||||
|
|
@ -549,7 +707,7 @@ pub async fn cancel_task_handler(
|
|||
State(state): State<Arc<AppState>>,
|
||||
Path(task_id): Path<String>,
|
||||
) -> impl IntoResponse {
|
||||
match update_task_status(&state, &task_id, AutoTaskStatus::Cancelled).await {
|
||||
match update_task_status(&state, &task_id, AutoTaskStatus::Cancelled) {
|
||||
Ok(_) => (
|
||||
StatusCode::OK,
|
||||
Json(TaskActionResponse {
|
||||
|
|
@ -573,7 +731,7 @@ pub async fn simulate_task_handler(
|
|||
State(state): State<Arc<AppState>>,
|
||||
Path(task_id): Path<String>,
|
||||
) -> impl IntoResponse {
|
||||
let session = match get_current_session(&state).await {
|
||||
let session = match get_current_session(&state) {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
return (
|
||||
|
|
@ -752,7 +910,7 @@ pub async fn get_decisions_handler(
|
|||
State(state): State<Arc<AppState>>,
|
||||
Path(task_id): Path<String>,
|
||||
) -> impl IntoResponse {
|
||||
match get_pending_decisions(&state, &task_id).await {
|
||||
match get_pending_decisions(&state, &task_id) {
|
||||
Ok(decisions) => (StatusCode::OK, Json(decisions)),
|
||||
Err(e) => {
|
||||
error!("Failed to get decisions: {}", e);
|
||||
|
|
@ -769,7 +927,7 @@ pub async fn submit_decision_handler(
|
|||
Path(task_id): Path<String>,
|
||||
Json(request): Json<DecisionRequest>,
|
||||
) -> impl IntoResponse {
|
||||
match submit_decision(&state, &task_id, &request).await {
|
||||
match submit_decision(&state, &task_id, &request) {
|
||||
Ok(_) => (
|
||||
StatusCode::OK,
|
||||
Json(TaskActionResponse {
|
||||
|
|
@ -793,7 +951,7 @@ pub async fn get_approvals_handler(
|
|||
State(state): State<Arc<AppState>>,
|
||||
Path(task_id): Path<String>,
|
||||
) -> impl IntoResponse {
|
||||
match get_pending_approvals(&state, &task_id).await {
|
||||
match get_pending_approvals(&state, &task_id) {
|
||||
Ok(approvals) => (StatusCode::OK, Json(approvals)),
|
||||
Err(e) => {
|
||||
error!("Failed to get approvals: {}", e);
|
||||
|
|
@ -810,7 +968,7 @@ pub async fn submit_approval_handler(
|
|||
Path(task_id): Path<String>,
|
||||
Json(request): Json<ApprovalRequest>,
|
||||
) -> impl IntoResponse {
|
||||
match submit_approval(&state, &task_id, &request).await {
|
||||
match submit_approval(&state, &task_id, &request) {
|
||||
Ok(_) => (
|
||||
StatusCode::OK,
|
||||
Json(TaskActionResponse {
|
||||
|
|
@ -834,7 +992,7 @@ pub async fn simulate_plan_handler(
|
|||
State(state): State<Arc<AppState>>,
|
||||
Path(plan_id): Path<String>,
|
||||
) -> impl IntoResponse {
|
||||
let session = match get_current_session(&state).await {
|
||||
let session = match get_current_session(&state) {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
return (
|
||||
|
|
@ -1009,7 +1167,7 @@ pub async fn simulate_plan_handler(
|
|||
}
|
||||
}
|
||||
|
||||
async fn get_current_session(
|
||||
fn get_current_session(
|
||||
state: &Arc<AppState>,
|
||||
) -> Result<crate::shared::models::UserSession, Box<dyn std::error::Error + Send + Sync>> {
|
||||
use crate::shared::models::user_sessions::dsl::*;
|
||||
|
|
@ -1030,7 +1188,7 @@ async fn get_current_session(
|
|||
Ok(session)
|
||||
}
|
||||
|
||||
async fn create_auto_task_from_plan(
|
||||
fn create_auto_task_from_plan(
|
||||
_state: &Arc<AppState>,
|
||||
session: &crate::shared::models::UserSession,
|
||||
plan_id: &str,
|
||||
|
|
@ -1077,7 +1235,7 @@ async fn create_auto_task_from_plan(
|
|||
Ok(task)
|
||||
}
|
||||
|
||||
async fn start_task_execution(
|
||||
fn start_task_execution(
|
||||
_state: &Arc<AppState>,
|
||||
task_id: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
|
|
@ -1085,7 +1243,7 @@ async fn start_task_execution(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn list_auto_tasks(
|
||||
fn list_auto_tasks(
|
||||
_state: &Arc<AppState>,
|
||||
_filter: &str,
|
||||
_limit: i32,
|
||||
|
|
@ -1094,7 +1252,7 @@ async fn list_auto_tasks(
|
|||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
async fn get_auto_task_stats(
|
||||
fn get_auto_task_stats(
|
||||
_state: &Arc<AppState>,
|
||||
) -> Result<AutoTaskStatsResponse, Box<dyn std::error::Error + Send + Sync>> {
|
||||
Ok(AutoTaskStatsResponse {
|
||||
|
|
@ -1108,7 +1266,7 @@ async fn get_auto_task_stats(
|
|||
})
|
||||
}
|
||||
|
||||
async fn update_task_status(
|
||||
fn update_task_status(
|
||||
_state: &Arc<AppState>,
|
||||
task_id: &str,
|
||||
status: AutoTaskStatus,
|
||||
|
|
@ -1140,7 +1298,7 @@ fn simulate_plan_execution(
|
|||
safety_layer.simulate_execution(plan_id, session)
|
||||
}
|
||||
|
||||
async fn get_pending_decisions(
|
||||
fn get_pending_decisions(
|
||||
_state: &Arc<AppState>,
|
||||
task_id: &str,
|
||||
) -> Result<Vec<PendingDecision>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
|
|
@ -1148,7 +1306,7 @@ async fn get_pending_decisions(
|
|||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
async fn submit_decision(
|
||||
fn submit_decision(
|
||||
_state: &Arc<AppState>,
|
||||
task_id: &str,
|
||||
request: &DecisionRequest,
|
||||
|
|
@ -1160,7 +1318,7 @@ async fn submit_decision(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_pending_approvals(
|
||||
fn get_pending_approvals(
|
||||
_state: &Arc<AppState>,
|
||||
task_id: &str,
|
||||
) -> Result<Vec<PendingApproval>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
|
|
@ -1168,7 +1326,7 @@ async fn get_pending_approvals(
|
|||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
async fn submit_approval(
|
||||
fn submit_approval(
|
||||
_state: &Arc<AppState>,
|
||||
task_id: &str,
|
||||
request: &ApprovalRequest,
|
||||
|
|
|
|||
|
|
@ -26,10 +26,8 @@ pub fn clear_tools_keyword(state: Arc<AppState>, user: UserSession, engine: &mut
|
|||
.enable_all()
|
||||
.build();
|
||||
|
||||
let send_err = if let Ok(rt) = rt {
|
||||
let result = rt.block_on(async move {
|
||||
clear_all_tools_from_session(&state_for_task, &user_for_task).await
|
||||
});
|
||||
let send_err = if let Ok(_rt) = rt {
|
||||
let result = clear_all_tools_from_session(&state_for_task, &user_for_task);
|
||||
tx.send(result).err()
|
||||
} else {
|
||||
tx.send(Err("Failed to build tokio runtime".to_string()))
|
||||
|
|
@ -62,7 +60,7 @@ pub fn clear_tools_keyword(state: Arc<AppState>, user: UserSession, engine: &mut
|
|||
.unwrap();
|
||||
}
|
||||
|
||||
async fn clear_all_tools_from_session(state: &AppState, user: &UserSession) -> Result<String, String> {
|
||||
fn clear_all_tools_from_session(state: &AppState, user: &UserSession) -> Result<String, String> {
|
||||
let mut conn = state.conn.get().map_err(|e| {
|
||||
error!("Failed to acquire database lock: {}", e);
|
||||
format!("Database connection error: {}", e)
|
||||
|
|
|
|||
|
|
@ -48,14 +48,10 @@ pub enum CodeLanguage {
|
|||
|
||||
impl From<&str> for CodeLanguage {
|
||||
fn from(s: &str) -> Self {
|
||||
match s.to_lowercase().as_str() {
|
||||
"python" | "py" | "javascript" | "js" | "node" | "bash" | "sh" | "shell" => {}
|
||||
_ => {}
|
||||
}
|
||||
match s.to_lowercase().as_str() {
|
||||
"python" | "py" => Self::Python,
|
||||
"javascript" | "js" | "node" => Self::JavaScript,
|
||||
"bash" | "sh" | "shell" | _ => Self::Bash,
|
||||
_ => Self::Bash,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -40,13 +40,13 @@ async fn execute_create_draft(
|
|||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
let email_body = if !previous_email.is_empty() {
|
||||
let email_body = if previous_email.is_empty() {
|
||||
reply_text.to_string()
|
||||
} else {
|
||||
let email_separator = "<br><hr><br>";
|
||||
let formatted_reply = reply_text.replace("FIX", "Fixed");
|
||||
let formatted_old = previous_email.replace('\n', "<br>");
|
||||
format!("{formatted_reply}{email_separator}{formatted_old}")
|
||||
} else {
|
||||
reply_text.to_string()
|
||||
};
|
||||
|
||||
let draft_request = SaveDraftRequest {
|
||||
|
|
@ -61,7 +61,6 @@ async fn execute_create_draft(
|
|||
save_email_draft(&config.email, &draft_request)
|
||||
.await
|
||||
.map(|()| "Draft saved successfully".to_string())
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "email"))]
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ async fn create_site(
|
|||
store_to_drive(s3.as_ref(), &bucket, &bot_id, &drive_path, &generated_html).await?;
|
||||
|
||||
let serve_path = base_path.join(&alias_str);
|
||||
sync_to_serve_path(&serve_path, &generated_html, &template_path).await?;
|
||||
sync_to_serve_path(&serve_path, &generated_html, &template_path)?;
|
||||
|
||||
info!(
|
||||
"CREATE SITE: {} completed, available at /apps/{}",
|
||||
|
|
@ -300,7 +300,7 @@ async fn store_to_drive(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn sync_to_serve_path(
|
||||
fn sync_to_serve_path(
|
||||
serve_path: &std::path::Path,
|
||||
html_content: &str,
|
||||
template_path: &std::path::Path,
|
||||
|
|
|
|||
|
|
@ -58,18 +58,15 @@ pub fn create_task_keyword(state: Arc<AppState>, user: UserSession, engine: &mut
|
|||
.enable_all()
|
||||
.build();
|
||||
|
||||
let send_err = if let Ok(rt) = rt {
|
||||
let result = rt.block_on(async move {
|
||||
execute_create_task(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&title,
|
||||
&assignee,
|
||||
&due_date,
|
||||
project_id.as_deref(),
|
||||
)
|
||||
.await
|
||||
});
|
||||
let send_err = if let Ok(_rt) = rt {
|
||||
let result = execute_create_task(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&title,
|
||||
&assignee,
|
||||
&due_date,
|
||||
project_id.as_deref(),
|
||||
);
|
||||
tx.send(result).err()
|
||||
} else {
|
||||
tx.send(Err("Failed to build tokio runtime".to_string()))
|
||||
|
|
@ -146,17 +143,14 @@ pub fn create_task_keyword(state: Arc<AppState>, user: UserSession, engine: &mut
|
|||
.enable_all()
|
||||
.build();
|
||||
|
||||
let send_err = if let Ok(rt) = rt {
|
||||
let result = rt.block_on(async move {
|
||||
smart_assign_task(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&task_id,
|
||||
team,
|
||||
load_balance,
|
||||
)
|
||||
.await
|
||||
});
|
||||
let send_err = if let Ok(_rt) = rt {
|
||||
let result = smart_assign_task(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&task_id,
|
||||
team,
|
||||
load_balance,
|
||||
);
|
||||
tx.send(result).err()
|
||||
} else {
|
||||
tx.send(Err("Failed to build tokio runtime".to_string()))
|
||||
|
|
@ -184,7 +178,7 @@ pub fn create_task_keyword(state: Arc<AppState>, user: UserSession, engine: &mut
|
|||
.unwrap();
|
||||
}
|
||||
|
||||
async fn execute_create_task(
|
||||
fn execute_create_task(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
title: &str,
|
||||
|
|
@ -197,7 +191,7 @@ async fn execute_create_task(
|
|||
let due_datetime = parse_due_date(due_date)?;
|
||||
|
||||
let actual_assignee = if assignee == "auto" {
|
||||
auto_assign_task(state, project_id).await?
|
||||
auto_assign_task(state, project_id)?
|
||||
} else {
|
||||
assignee.to_string()
|
||||
};
|
||||
|
|
@ -228,7 +222,7 @@ async fn execute_create_task(
|
|||
format!("Failed to create task: {}", e)
|
||||
})?;
|
||||
|
||||
send_task_notification(state, &task_id, title, &actual_assignee, due_datetime).await?;
|
||||
send_task_notification(state, &task_id, title, &actual_assignee, due_datetime)?;
|
||||
|
||||
trace!(
|
||||
"Created task '{}' assigned to {} (ID: {})",
|
||||
|
|
@ -240,7 +234,7 @@ async fn execute_create_task(
|
|||
Ok(task_id)
|
||||
}
|
||||
|
||||
async fn smart_assign_task(
|
||||
fn smart_assign_task(
|
||||
state: &AppState,
|
||||
_user: &UserSession,
|
||||
task_id: &str,
|
||||
|
|
@ -300,7 +294,7 @@ async fn smart_assign_task(
|
|||
Ok(best_assignee)
|
||||
}
|
||||
|
||||
async fn auto_assign_task(state: &AppState, project_id: Option<&str>) -> Result<String, String> {
|
||||
fn auto_assign_task(state: &AppState, project_id: Option<&str>) -> Result<String, String> {
|
||||
let mut conn = state.conn.get().map_err(|e| format!("DB error: {}", e))?;
|
||||
|
||||
let team_query_str = if let Some(proj_id) = project_id {
|
||||
|
|
@ -402,7 +396,7 @@ fn determine_priority(due_date: Option<DateTime<Utc>>) -> String {
|
|||
}
|
||||
}
|
||||
|
||||
async fn send_task_notification(
|
||||
fn send_task_notification(
|
||||
_state: &AppState,
|
||||
task_id: &str,
|
||||
title: &str,
|
||||
|
|
|
|||
|
|
@ -1458,12 +1458,10 @@ mod tests {
|
|||
.iter()
|
||||
.filter(|w| msg_lower.contains(*w))
|
||||
.count();
|
||||
if positive_count > negative_count {
|
||||
"positive"
|
||||
} else if negative_count > positive_count {
|
||||
"negative"
|
||||
} else {
|
||||
"neutral"
|
||||
match positive_count.cmp(&negative_count) {
|
||||
std::cmp::Ordering::Greater => "positive",
|
||||
std::cmp::Ordering::Less => "negative",
|
||||
std::cmp::Ordering::Equal => "neutral",
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
592
src/basic/keywords/db_api.rs
Normal file
592
src/basic/keywords/db_api.rs
Normal file
|
|
@ -0,0 +1,592 @@
|
|||
use crate::core::shared::state::AppState;
|
||||
use axum::{
|
||||
extract::{Path, Query, State},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
routing::{delete, get, post, put},
|
||||
Json, Router,
|
||||
};
|
||||
use diesel::prelude::*;
|
||||
use diesel::sql_query;
|
||||
use log::{error, info};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{json, Value};
|
||||
use std::sync::Arc;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct QueryParams {
|
||||
pub limit: Option<i32>,
|
||||
pub offset: Option<i32>,
|
||||
pub order_by: Option<String>,
|
||||
pub order_dir: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct ListResponse {
|
||||
pub data: Vec<Value>,
|
||||
pub total: i64,
|
||||
pub limit: i32,
|
||||
pub offset: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct RecordResponse {
|
||||
pub success: bool,
|
||||
pub data: Option<Value>,
|
||||
pub message: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct DeleteResponse {
|
||||
pub success: bool,
|
||||
pub deleted: i64,
|
||||
pub message: Option<String>,
|
||||
}
|
||||
|
||||
pub fn configure_db_routes() -> Router<Arc<AppState>> {
|
||||
Router::new()
|
||||
.route("/api/db/{table}", get(list_records_handler))
|
||||
.route("/api/db/{table}", post(create_record_handler))
|
||||
.route("/api/db/{table}/{id}", get(get_record_handler))
|
||||
.route("/api/db/{table}/{id}", put(update_record_handler))
|
||||
.route("/api/db/{table}/{id}", delete(delete_record_handler))
|
||||
.route("/api/db/{table}/count", get(count_records_handler))
|
||||
.route("/api/db/{table}/search", post(search_records_handler))
|
||||
}
|
||||
|
||||
fn sanitize_identifier(name: &str) -> String {
|
||||
name.chars()
|
||||
.filter(|c| c.is_alphanumeric() || *c == '_')
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub async fn list_records_handler(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Path(table): Path<String>,
|
||||
Query(params): Query<QueryParams>,
|
||||
) -> impl IntoResponse {
|
||||
let table_name = sanitize_identifier(&table);
|
||||
let limit = params.limit.unwrap_or(20).min(100);
|
||||
let offset = params.offset.unwrap_or(0);
|
||||
let order_by = params
|
||||
.order_by
|
||||
.map(|o| sanitize_identifier(&o))
|
||||
.unwrap_or_else(|| "id".to_string());
|
||||
let order_dir = params
|
||||
.order_dir
|
||||
.map(|d| {
|
||||
if d.to_uppercase() == "DESC" {
|
||||
"DESC"
|
||||
} else {
|
||||
"ASC"
|
||||
}
|
||||
})
|
||||
.unwrap_or("ASC");
|
||||
|
||||
let mut conn = match state.conn.get() {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({ "error": format!("Database connection error: {e}") })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
let query = format!(
|
||||
"SELECT row_to_json(t.*) as data FROM {} t ORDER BY {} {} LIMIT {} OFFSET {}",
|
||||
table_name, order_by, order_dir, limit, offset
|
||||
);
|
||||
|
||||
let count_query = format!("SELECT COUNT(*) as count FROM {}", table_name);
|
||||
|
||||
let rows: Result<Vec<JsonRow>, _> = sql_query(&query).get_results(&mut conn);
|
||||
let total: Result<CountResult, _> = sql_query(&count_query).get_result(&mut conn);
|
||||
|
||||
match (rows, total) {
|
||||
(Ok(data), Ok(count_result)) => {
|
||||
let response = ListResponse {
|
||||
data: data.into_iter().map(|r| r.data).collect(),
|
||||
total: count_result.count,
|
||||
limit,
|
||||
offset,
|
||||
};
|
||||
(StatusCode::OK, Json(response)).into_response()
|
||||
}
|
||||
(Err(e), _) | (_, Err(e)) => {
|
||||
error!("Failed to list records from {table_name}: {e}");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({ "error": e.to_string() })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_record_handler(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Path((table, id)): Path<(String, String)>,
|
||||
) -> impl IntoResponse {
|
||||
let table_name = sanitize_identifier(&table);
|
||||
|
||||
let record_id = match Uuid::parse_str(&id) {
|
||||
Ok(uuid) => uuid,
|
||||
Err(_) => {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(RecordResponse {
|
||||
success: false,
|
||||
data: None,
|
||||
message: Some("Invalid UUID format".to_string()),
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
let mut conn = match state.conn.get() {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(RecordResponse {
|
||||
success: false,
|
||||
data: None,
|
||||
message: Some(format!("Database connection error: {e}")),
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
let query = format!(
|
||||
"SELECT row_to_json(t.*) as data FROM {} t WHERE id = $1",
|
||||
table_name
|
||||
);
|
||||
|
||||
let row: Result<Option<JsonRow>, _> = sql_query(&query)
|
||||
.bind::<diesel::sql_types::Uuid, _>(record_id)
|
||||
.get_result(&mut conn)
|
||||
.optional();
|
||||
|
||||
match row {
|
||||
Ok(Some(r)) => (
|
||||
StatusCode::OK,
|
||||
Json(RecordResponse {
|
||||
success: true,
|
||||
data: Some(r.data),
|
||||
message: None,
|
||||
}),
|
||||
)
|
||||
.into_response(),
|
||||
Ok(None) => (
|
||||
StatusCode::NOT_FOUND,
|
||||
Json(RecordResponse {
|
||||
success: false,
|
||||
data: None,
|
||||
message: Some("Record not found".to_string()),
|
||||
}),
|
||||
)
|
||||
.into_response(),
|
||||
Err(e) => {
|
||||
error!("Failed to get record from {table_name}: {e}");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(RecordResponse {
|
||||
success: false,
|
||||
data: None,
|
||||
message: Some(e.to_string()),
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn create_record_handler(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Path(table): Path<String>,
|
||||
Json(payload): Json<Value>,
|
||||
) -> impl IntoResponse {
|
||||
let table_name = sanitize_identifier(&table);
|
||||
|
||||
let obj = match payload.as_object() {
|
||||
Some(o) => o,
|
||||
None => {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(RecordResponse {
|
||||
success: false,
|
||||
data: None,
|
||||
message: Some("Payload must be a JSON object".to_string()),
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
let mut columns: Vec<String> = vec!["id".to_string()];
|
||||
let mut values: Vec<String> = vec![format!("'{}'", Uuid::new_v4())];
|
||||
|
||||
for (key, value) in obj {
|
||||
let col = sanitize_identifier(key);
|
||||
if col.is_empty() || col == "id" {
|
||||
continue;
|
||||
}
|
||||
columns.push(col);
|
||||
values.push(value_to_sql(value));
|
||||
}
|
||||
|
||||
let mut conn = match state.conn.get() {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(RecordResponse {
|
||||
success: false,
|
||||
data: None,
|
||||
message: Some(format!("Database connection error: {e}")),
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
let query = format!(
|
||||
"INSERT INTO {} ({}) VALUES ({}) RETURNING row_to_json({}.*)::jsonb as data",
|
||||
table_name,
|
||||
columns.join(", "),
|
||||
values.join(", "),
|
||||
table_name
|
||||
);
|
||||
|
||||
let row: Result<JsonRow, _> = sql_query(&query).get_result(&mut conn);
|
||||
|
||||
match row {
|
||||
Ok(r) => {
|
||||
info!("Created record in {table_name}");
|
||||
(
|
||||
StatusCode::CREATED,
|
||||
Json(RecordResponse {
|
||||
success: true,
|
||||
data: Some(r.data),
|
||||
message: None,
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to create record in {table_name}: {e}");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(RecordResponse {
|
||||
success: false,
|
||||
data: None,
|
||||
message: Some(e.to_string()),
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn update_record_handler(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Path((table, id)): Path<(String, String)>,
|
||||
Json(payload): Json<Value>,
|
||||
) -> impl IntoResponse {
|
||||
let table_name = sanitize_identifier(&table);
|
||||
|
||||
let record_id = match Uuid::parse_str(&id) {
|
||||
Ok(uuid) => uuid,
|
||||
Err(_) => {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(RecordResponse {
|
||||
success: false,
|
||||
data: None,
|
||||
message: Some("Invalid UUID format".to_string()),
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
let obj = match payload.as_object() {
|
||||
Some(o) => o,
|
||||
None => {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(RecordResponse {
|
||||
success: false,
|
||||
data: None,
|
||||
message: Some("Payload must be a JSON object".to_string()),
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
let mut set_clauses: Vec<String> = Vec::new();
|
||||
|
||||
for (key, value) in obj {
|
||||
let col = sanitize_identifier(key);
|
||||
if col.is_empty() || col == "id" {
|
||||
continue;
|
||||
}
|
||||
set_clauses.push(format!("{} = {}", col, value_to_sql(value)));
|
||||
}
|
||||
|
||||
if set_clauses.is_empty() {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(RecordResponse {
|
||||
success: false,
|
||||
data: None,
|
||||
message: Some("No valid fields to update".to_string()),
|
||||
}),
|
||||
)
|
||||
.into_response();
|
||||
}
|
||||
|
||||
set_clauses.push("updated_at = NOW()".to_string());
|
||||
|
||||
let mut conn = match state.conn.get() {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(RecordResponse {
|
||||
success: false,
|
||||
data: None,
|
||||
message: Some(format!("Database connection error: {e}")),
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
let query = format!(
|
||||
"UPDATE {} SET {} WHERE id = '{}' RETURNING row_to_json({}.*)::jsonb as data",
|
||||
table_name,
|
||||
set_clauses.join(", "),
|
||||
record_id,
|
||||
table_name
|
||||
);
|
||||
|
||||
let row: Result<JsonRow, _> = sql_query(&query).get_result(&mut conn);
|
||||
|
||||
match row {
|
||||
Ok(r) => {
|
||||
info!("Updated record in {table_name}: {record_id}");
|
||||
(
|
||||
StatusCode::OK,
|
||||
Json(RecordResponse {
|
||||
success: true,
|
||||
data: Some(r.data),
|
||||
message: None,
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to update record in {table_name}: {e}");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(RecordResponse {
|
||||
success: false,
|
||||
data: None,
|
||||
message: Some(e.to_string()),
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn delete_record_handler(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Path((table, id)): Path<(String, String)>,
|
||||
) -> impl IntoResponse {
|
||||
let table_name = sanitize_identifier(&table);
|
||||
|
||||
let record_id = match Uuid::parse_str(&id) {
|
||||
Ok(uuid) => uuid,
|
||||
Err(_) => {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(DeleteResponse {
|
||||
success: false,
|
||||
deleted: 0,
|
||||
message: Some("Invalid UUID format".to_string()),
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
let mut conn = match state.conn.get() {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(DeleteResponse {
|
||||
success: false,
|
||||
deleted: 0,
|
||||
message: Some(format!("Database connection error: {e}")),
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
let query = format!("DELETE FROM {} WHERE id = $1", table_name);
|
||||
|
||||
let deleted: Result<usize, _> = sql_query(&query)
|
||||
.bind::<diesel::sql_types::Uuid, _>(record_id)
|
||||
.execute(&mut conn);
|
||||
|
||||
match deleted {
|
||||
Ok(count) => {
|
||||
info!("Deleted {count} record(s) from {table_name}");
|
||||
(
|
||||
StatusCode::OK,
|
||||
Json(DeleteResponse {
|
||||
success: count > 0,
|
||||
deleted: count as i64,
|
||||
message: if count == 0 {
|
||||
Some("Record not found".to_string())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to delete record from {table_name}: {e}");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(DeleteResponse {
|
||||
success: false,
|
||||
deleted: 0,
|
||||
message: Some(e.to_string()),
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn count_records_handler(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Path(table): Path<String>,
|
||||
) -> impl IntoResponse {
|
||||
let table_name = sanitize_identifier(&table);
|
||||
|
||||
let mut conn = match state.conn.get() {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({ "error": format!("Database connection error: {e}") })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
let query = format!("SELECT COUNT(*) as count FROM {}", table_name);
|
||||
let result: Result<CountResult, _> = sql_query(&query).get_result(&mut conn);
|
||||
|
||||
match result {
|
||||
Ok(r) => (StatusCode::OK, Json(json!({ "count": r.count }))).into_response(),
|
||||
Err(e) => {
|
||||
error!("Failed to count records in {table_name}: {e}");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({ "error": e.to_string() })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct SearchRequest {
|
||||
pub query: String,
|
||||
pub fields: Option<Vec<String>>,
|
||||
pub limit: Option<i32>,
|
||||
}
|
||||
|
||||
pub async fn search_records_handler(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Path(table): Path<String>,
|
||||
Json(payload): Json<SearchRequest>,
|
||||
) -> impl IntoResponse {
|
||||
let table_name = sanitize_identifier(&table);
|
||||
let limit = payload.limit.unwrap_or(20).min(100);
|
||||
let search_term = payload.query.replace('\'', "''");
|
||||
|
||||
let mut conn = match state.conn.get() {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
return (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({ "error": format!("Database connection error: {e}") })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
};
|
||||
|
||||
let query = format!(
|
||||
"SELECT row_to_json(t.*) as data FROM {} t WHERE
|
||||
COALESCE(t.title::text, '') || ' ' || COALESCE(t.name::text, '') || ' ' || COALESCE(t.description::text, '')
|
||||
ILIKE '%{}%' LIMIT {}",
|
||||
table_name, search_term, limit
|
||||
);
|
||||
|
||||
let rows: Result<Vec<JsonRow>, _> = sql_query(&query).get_results(&mut conn);
|
||||
|
||||
match rows {
|
||||
Ok(data) => (
|
||||
StatusCode::OK,
|
||||
Json(json!({ "data": data.into_iter().map(|r| r.data).collect::<Vec<_>>() })),
|
||||
)
|
||||
.into_response(),
|
||||
Err(e) => {
|
||||
error!("Failed to search in {table_name}: {e}");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({ "error": e.to_string() })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn value_to_sql(value: &Value) -> String {
|
||||
match value {
|
||||
Value::Null => "NULL".to_string(),
|
||||
Value::Bool(b) => b.to_string(),
|
||||
Value::Number(n) => n.to_string(),
|
||||
Value::String(s) => format!("'{}'", s.replace('\'', "''")),
|
||||
Value::Array(_) | Value::Object(_) => {
|
||||
format!("'{}'", value.to_string().replace('\'', "''"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(QueryableByName)]
|
||||
struct JsonRow {
|
||||
#[diesel(sql_type = diesel::sql_types::Jsonb)]
|
||||
data: Value,
|
||||
}
|
||||
|
||||
#[derive(QueryableByName)]
|
||||
struct CountResult {
|
||||
#[diesel(sql_type = diesel::sql_types::BigInt)]
|
||||
count: i64,
|
||||
}
|
||||
1194
src/basic/keywords/designer_ai.rs
Normal file
1194
src/basic/keywords/designer_ai.rs
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -197,6 +197,6 @@ mod tests {
|
|||
map.insert("message".into(), Dynamic::from("test error"));
|
||||
|
||||
assert!(map.contains_key("error"));
|
||||
assert_eq!(map.get("error").unwrap().as_bool().unwrap_or(false), true);
|
||||
assert!(map.get("error").unwrap().as_bool().unwrap_or(false));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -291,10 +291,8 @@ mod tests {
|
|||
clear_last_error();
|
||||
|
||||
let result = try_execute(|| {
|
||||
Err::<String, _>(
|
||||
Box::new(std::io::Error::new(std::io::ErrorKind::Other, "test error"))
|
||||
as Box<dyn std::error::Error + Send + Sync>,
|
||||
)
|
||||
Err::<String, _>(Box::new(std::io::Error::other("test error"))
|
||||
as Box<dyn std::error::Error + Send + Sync>)
|
||||
});
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
|
|
|||
|
|
@ -64,10 +64,8 @@ pub fn register_import_keyword(state: Arc<AppState>, user: UserSession, engine:
|
|||
.enable_all()
|
||||
.build();
|
||||
|
||||
let send_err = if let Ok(rt) = rt {
|
||||
let result = rt.block_on(async move {
|
||||
execute_import_json(&state_for_task, &user_for_task, &file_path).await
|
||||
});
|
||||
let send_err = if let Ok(_rt) = rt {
|
||||
let result = execute_import_json(&state_for_task, &user_for_task, &file_path);
|
||||
tx.send(result).err()
|
||||
} else {
|
||||
tx.send(Err("Failed to build tokio runtime".into())).err()
|
||||
|
|
@ -125,16 +123,13 @@ pub fn register_export_keyword(state: Arc<AppState>, user: UserSession, engine:
|
|||
.enable_all()
|
||||
.build();
|
||||
|
||||
let send_err = if let Ok(rt) = rt {
|
||||
let result = rt.block_on(async move {
|
||||
execute_export_json(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&file_path,
|
||||
data_json,
|
||||
)
|
||||
.await
|
||||
});
|
||||
let send_err = if let Ok(_rt) = rt {
|
||||
let result = execute_export_json(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&file_path,
|
||||
data_json,
|
||||
);
|
||||
tx.send(result).err()
|
||||
} else {
|
||||
tx.send(Err("Failed to build tokio runtime".into())).err()
|
||||
|
|
@ -167,25 +162,25 @@ pub fn register_export_keyword(state: Arc<AppState>, user: UserSession, engine:
|
|||
.unwrap();
|
||||
}
|
||||
|
||||
async fn execute_import_json(
|
||||
fn execute_import_json(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
file_path: &str,
|
||||
) -> Result<Value, String> {
|
||||
match execute_import(state, user, file_path).await {
|
||||
match execute_import(state, user, file_path) {
|
||||
Ok(dynamic) => Ok(dynamic_to_json(&dynamic)),
|
||||
Err(e) => Err(e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn execute_export_json(
|
||||
fn execute_export_json(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
file_path: &str,
|
||||
data_json: Value,
|
||||
) -> Result<String, String> {
|
||||
let data = json_to_dynamic(&data_json);
|
||||
match execute_export(state, user, file_path, data).await {
|
||||
match execute_export(state, user, file_path, data) {
|
||||
Ok(result) => Ok(result),
|
||||
Err(e) => Err(e.to_string()),
|
||||
}
|
||||
|
|
@ -195,7 +190,7 @@ fn dynamic_to_json_value(data: &Dynamic) -> Value {
|
|||
dynamic_to_json(data)
|
||||
}
|
||||
|
||||
async fn execute_import(
|
||||
fn execute_import(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
file_path: &str,
|
||||
|
|
@ -216,7 +211,7 @@ async fn execute_import(
|
|||
}
|
||||
}
|
||||
|
||||
async fn execute_export(
|
||||
fn execute_export(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
file_path: &str,
|
||||
|
|
|
|||
1117
src/basic/keywords/intent_classifier.rs
Normal file
1117
src/basic/keywords/intent_classifier.rs
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -275,7 +275,7 @@ impl Default for ResourceEstimate {
|
|||
}
|
||||
|
||||
pub struct IntentCompiler {
|
||||
_state: Arc<AppState>,
|
||||
state: Arc<AppState>,
|
||||
config: IntentCompilerConfig,
|
||||
}
|
||||
|
||||
|
|
@ -321,16 +321,13 @@ impl std::fmt::Debug for IntentCompiler {
|
|||
impl IntentCompiler {
|
||||
pub fn new(state: Arc<AppState>) -> Self {
|
||||
Self {
|
||||
_state: state,
|
||||
state,
|
||||
config: IntentCompilerConfig::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_config(state: Arc<AppState>, config: IntentCompilerConfig) -> Self {
|
||||
Self {
|
||||
_state: state,
|
||||
config,
|
||||
}
|
||||
Self { state, config }
|
||||
}
|
||||
|
||||
pub async fn compile(
|
||||
|
|
@ -533,14 +530,14 @@ Respond ONLY with valid JSON."#,
|
|||
Some("HIGH") => StepPriority::High,
|
||||
Some("LOW") => StepPriority::Low,
|
||||
Some("OPTIONAL") => StepPriority::Optional,
|
||||
Some("MEDIUM") | None | _ => StepPriority::Medium,
|
||||
Some("MEDIUM" | _) | None => StepPriority::Medium,
|
||||
},
|
||||
risk_level: match s.risk_level.as_deref() {
|
||||
Some("NONE") => RiskLevel::None,
|
||||
Some("MEDIUM") => RiskLevel::Medium,
|
||||
Some("HIGH") => RiskLevel::High,
|
||||
Some("CRITICAL") => RiskLevel::Critical,
|
||||
Some("LOW") | None | _ => RiskLevel::Low,
|
||||
Some("LOW" | _) | None => RiskLevel::Low,
|
||||
},
|
||||
estimated_minutes: s.estimated_minutes.unwrap_or(5),
|
||||
requires_approval: s.requires_approval.unwrap_or(false),
|
||||
|
|
@ -801,19 +798,36 @@ Respond ONLY with valid JSON."#,
|
|||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
trace!("Calling LLM with prompt length: {}", prompt.len());
|
||||
|
||||
let response = serde_json::json!({
|
||||
"action": "create",
|
||||
"target": "system",
|
||||
"domain": null,
|
||||
"client": null,
|
||||
"features": [],
|
||||
"constraints": [],
|
||||
"technologies": [],
|
||||
"data_sources": [],
|
||||
"integrations": []
|
||||
});
|
||||
#[cfg(feature = "llm")]
|
||||
{
|
||||
let config = serde_json::json!({
|
||||
"temperature": self.config.temperature,
|
||||
"max_tokens": self.config.max_tokens
|
||||
});
|
||||
let response = self
|
||||
.state
|
||||
.llm_provider
|
||||
.generate(prompt, &config, &self.config.model, "")
|
||||
.await?;
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
Ok(response.to_string())
|
||||
#[cfg(not(feature = "llm"))]
|
||||
{
|
||||
warn!("LLM feature not enabled, returning fallback response");
|
||||
let response = serde_json::json!({
|
||||
"action": "create",
|
||||
"target": "system",
|
||||
"domain": null,
|
||||
"client": null,
|
||||
"features": [],
|
||||
"constraints": [],
|
||||
"technologies": [],
|
||||
"data_sources": [],
|
||||
"integrations": []
|
||||
});
|
||||
Ok(response.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
fn assess_risks(plan: &ExecutionPlan) -> RiskAssessment {
|
||||
|
|
|
|||
|
|
@ -18,18 +18,18 @@ mod tests {
|
|||
#[test]
|
||||
fn test_abs_positive() {
|
||||
assert_eq!(42_i64.abs(), 42);
|
||||
assert_eq!(3.14_f64.abs(), 3.14);
|
||||
assert!((3.5_f64.abs() - 3.5).abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_abs_negative() {
|
||||
assert_eq!((-42_i64).abs(), 42);
|
||||
assert_eq!((-3.14_f64).abs(), 3.14);
|
||||
assert!(((-3.5_f64).abs() - 3.5).abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_abs_zero() {
|
||||
assert_eq!(0_i64.abs(), 0);
|
||||
assert_eq!(0.0_f64.abs(), 0.0);
|
||||
assert!(0.0_f64.abs().abs() < f64::EPSILON);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -110,7 +110,7 @@ mod tests {
|
|||
.iter()
|
||||
.filter_map(|v| v.as_int().ok().map(|i| i as f64))
|
||||
.sum();
|
||||
assert_eq!(sum, 60.0);
|
||||
assert!((sum - 60.0).abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -121,7 +121,7 @@ mod tests {
|
|||
Dynamic::from(3.0_f64),
|
||||
];
|
||||
let sum: f64 = arr.iter().filter_map(|v| v.as_float().ok()).sum();
|
||||
assert_eq!(sum, 7.0);
|
||||
assert!((sum - 7.0).abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -129,7 +129,7 @@ mod tests {
|
|||
let arr: Vec<f64> = vec![10.0, 20.0, 30.0];
|
||||
let sum: f64 = arr.iter().sum();
|
||||
let avg = sum / arr.len() as f64;
|
||||
assert_eq!(avg, 20.0);
|
||||
assert!((avg - 20.0).abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -137,14 +137,14 @@ mod tests {
|
|||
let arr: Vec<f64> = vec![42.0];
|
||||
let sum: f64 = arr.iter().sum();
|
||||
let avg = sum / arr.len() as f64;
|
||||
assert_eq!(avg, 42.0);
|
||||
assert!((avg - 42.0).abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_array_sum() {
|
||||
let arr: Vec<f64> = vec![];
|
||||
let result: f64 = arr.iter().sum();
|
||||
assert_eq!(result, 0.0);
|
||||
assert!(result.abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -155,7 +155,7 @@ mod tests {
|
|||
} else {
|
||||
arr.iter().sum::<f64>() / arr.len() as f64
|
||||
};
|
||||
assert_eq!(result, 0.0);
|
||||
assert!(result.abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -173,6 +173,6 @@ mod tests {
|
|||
.or_else(|| v.as_int().ok().map(|i| i as f64))
|
||||
})
|
||||
.sum();
|
||||
assert_eq!(sum, 60.5);
|
||||
assert!((sum - 60.5).abs() < f64::EPSILON);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,11 +30,11 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_round_decimals() {
|
||||
let n = 2.71828_f64;
|
||||
let n = 2.56789_f64;
|
||||
let decimals = 2;
|
||||
let factor = 10_f64.powi(decimals);
|
||||
let result = (n * factor).round() / factor;
|
||||
assert!((result - 2.72).abs() < 0.001);
|
||||
assert!((result - 2.57).abs() < 0.001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
|
|
@ -83,14 +83,15 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_exp() {
|
||||
assert!((0.0_f64.exp() - 1.0).abs() < 0.0001);
|
||||
assert!(0.0_f64.exp_m1().abs() < 0.0001);
|
||||
assert!((1.0_f64.exp() - std::f64::consts::E).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pi() {
|
||||
assert!(std::f64::consts::PI > 3.14);
|
||||
assert!(std::f64::consts::PI < 3.15);
|
||||
let pi = std::f64::consts::PI;
|
||||
assert!(pi > 3.0);
|
||||
assert!(pi < 4.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
|
|
@ -263,7 +263,7 @@ fn send_template_message(
|
|||
"whatsapp" | "sms" => {
|
||||
recipient.starts_with('+') || recipient.chars().all(|c| c.is_numeric())
|
||||
}
|
||||
"telegram" | "push" => !recipient.is_empty(),
|
||||
// "telegram", "push", and all other channels just require non-empty recipient
|
||||
_ => !recipient.is_empty(),
|
||||
};
|
||||
|
||||
|
|
@ -473,9 +473,10 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_send_template_batch() {
|
||||
let mut recipients = Array::new();
|
||||
recipients.push(Dynamic::from("user1@example.com"));
|
||||
recipients.push(Dynamic::from("user2@example.com"));
|
||||
let recipients: Array = vec![
|
||||
Dynamic::from("user1@example.com"),
|
||||
Dynamic::from("user2@example.com"),
|
||||
];
|
||||
|
||||
let result = send_template_batch("welcome", &recipients, "email", None);
|
||||
assert_eq!(result.get("total").unwrap().as_int().unwrap(), 2);
|
||||
|
|
|
|||
|
|
@ -5,7 +5,10 @@ pub mod add_suggestion;
|
|||
pub mod agent_reflection;
|
||||
pub mod ai_tools;
|
||||
pub mod api_tool_generator;
|
||||
pub mod app_generator;
|
||||
pub mod app_server;
|
||||
pub mod arrays;
|
||||
pub mod ask_later;
|
||||
pub mod auto_task;
|
||||
pub mod autotask_api;
|
||||
pub mod book;
|
||||
|
|
@ -20,6 +23,8 @@ pub mod create_task;
|
|||
pub mod crm;
|
||||
pub mod data_operations;
|
||||
pub mod datetime;
|
||||
pub mod db_api;
|
||||
pub mod designer_ai;
|
||||
pub mod episodic_memory;
|
||||
pub mod errors;
|
||||
pub mod file_operations;
|
||||
|
|
@ -32,6 +37,7 @@ pub mod hear_talk;
|
|||
pub mod http_operations;
|
||||
pub mod human_approval;
|
||||
pub mod import_export;
|
||||
pub mod intent_classifier;
|
||||
pub mod intent_compiler;
|
||||
pub mod kb_statistics;
|
||||
pub mod knowledge_graph;
|
||||
|
|
@ -81,38 +87,39 @@ pub mod weather;
|
|||
pub mod web_data;
|
||||
pub mod webhook;
|
||||
|
||||
|
||||
pub use app_generator::{
|
||||
AppGenerator, GeneratedApp, GeneratedPage, GeneratedScript, PageType, SyncResult,
|
||||
};
|
||||
pub use app_server::configure_app_server_routes;
|
||||
pub use auto_task::{AutoTask, AutoTaskStatus, ExecutionMode, TaskPriority};
|
||||
pub use db_api::configure_db_routes;
|
||||
pub use designer_ai::{DesignerAI, DesignerContext, ModificationResult, ModificationType};
|
||||
pub use intent_classifier::{ClassifiedIntent, IntentClassifier, IntentResult, IntentType};
|
||||
pub use intent_compiler::{CompiledIntent, ExecutionPlan, IntentCompiler, PlanStep};
|
||||
pub use mcp_client::{McpClient, McpRequest, McpResponse, McpServer, McpTool};
|
||||
pub use mcp_directory::{McpDirectoryScanResult, McpDirectoryScanner, McpServerConfig};
|
||||
pub use safety_layer::{AuditEntry, ConstraintCheckResult, SafetyLayer, SimulationResult};
|
||||
|
||||
|
||||
pub use autotask_api::{
|
||||
cancel_task_handler, compile_intent_handler, execute_plan_handler, get_approvals_handler,
|
||||
get_decisions_handler, get_stats_handler, list_tasks_handler, pause_task_handler,
|
||||
resume_task_handler, simulate_plan_handler, simulate_task_handler, submit_approval_handler,
|
||||
submit_decision_handler,
|
||||
cancel_task_handler, classify_intent_handler, compile_intent_handler, execute_plan_handler,
|
||||
get_approvals_handler, get_decisions_handler, get_stats_handler, list_tasks_handler,
|
||||
pause_task_handler, resume_task_handler, simulate_plan_handler, simulate_task_handler,
|
||||
submit_approval_handler, submit_decision_handler,
|
||||
};
|
||||
|
||||
|
||||
pub fn configure_autotask_routes() -> axum::Router<std::sync::Arc<crate::shared::state::AppState>> {
|
||||
use axum::routing::{get, post};
|
||||
|
||||
axum::Router::new()
|
||||
|
||||
.route("/api/autotask/classify", post(classify_intent_handler))
|
||||
.route("/api/autotask/compile", post(compile_intent_handler))
|
||||
|
||||
.route("/api/autotask/execute", post(execute_plan_handler))
|
||||
.route(
|
||||
"/api/autotask/simulate/:plan_id",
|
||||
post(simulate_plan_handler),
|
||||
)
|
||||
|
||||
.route("/api/autotask/list", get(list_tasks_handler))
|
||||
.route("/api/autotask/stats", get(get_stats_handler))
|
||||
|
||||
.route("/api/autotask/:task_id/pause", post(pause_task_handler))
|
||||
.route("/api/autotask/:task_id/resume", post(resume_task_handler))
|
||||
.route("/api/autotask/:task_id/cancel", post(cancel_task_handler))
|
||||
|
|
@ -120,7 +127,6 @@ pub fn configure_autotask_routes() -> axum::Router<std::sync::Arc<crate::shared:
|
|||
"/api/autotask/:task_id/simulate",
|
||||
post(simulate_task_handler),
|
||||
)
|
||||
|
||||
.route(
|
||||
"/api/autotask/:task_id/decisions",
|
||||
get(get_decisions_handler),
|
||||
|
|
@ -129,7 +135,6 @@ pub fn configure_autotask_routes() -> axum::Router<std::sync::Arc<crate::shared:
|
|||
"/api/autotask/:task_id/decide",
|
||||
post(submit_decision_handler),
|
||||
)
|
||||
|
||||
.route(
|
||||
"/api/autotask/:task_id/approvals",
|
||||
get(get_approvals_handler),
|
||||
|
|
@ -140,31 +145,25 @@ pub fn configure_autotask_routes() -> axum::Router<std::sync::Arc<crate::shared:
|
|||
)
|
||||
}
|
||||
|
||||
|
||||
pub fn get_all_keywords() -> Vec<String> {
|
||||
vec![
|
||||
|
||||
"ADD BOT".to_string(),
|
||||
"BOT REFLECTION".to_string(),
|
||||
"BROADCAST TO BOTS".to_string(),
|
||||
"DELEGATE TO BOT".to_string(),
|
||||
"TRANSFER CONVERSATION".to_string(),
|
||||
|
||||
"ADD MEMBER".to_string(),
|
||||
"CREATE DRAFT".to_string(),
|
||||
"SEND MAIL".to_string(),
|
||||
"SEND TEMPLATE".to_string(),
|
||||
"SMS".to_string(),
|
||||
|
||||
"ADD SUGGESTION".to_string(),
|
||||
"CLEAR SUGGESTIONS".to_string(),
|
||||
|
||||
"ADD TOOL".to_string(),
|
||||
"CLEAR TOOLS".to_string(),
|
||||
"CREATE SITE".to_string(),
|
||||
"CREATE TASK".to_string(),
|
||||
"USE TOOL".to_string(),
|
||||
|
||||
"AGGREGATE".to_string(),
|
||||
"DELETE".to_string(),
|
||||
"FILL".to_string(),
|
||||
|
|
@ -181,7 +180,6 @@ pub fn get_all_keywords() -> Vec<String> {
|
|||
"SAVE".to_string(),
|
||||
"SAVE FROM UNSTRUCTURED".to_string(),
|
||||
"UPDATE".to_string(),
|
||||
|
||||
"COMPRESS".to_string(),
|
||||
"COPY".to_string(),
|
||||
"DELETE FILE".to_string(),
|
||||
|
|
@ -194,7 +192,6 @@ pub fn get_all_keywords() -> Vec<String> {
|
|||
"READ".to_string(),
|
||||
"UPLOAD".to_string(),
|
||||
"WRITE".to_string(),
|
||||
|
||||
"CLEAR HEADERS".to_string(),
|
||||
"DELETE HTTP".to_string(),
|
||||
"GET".to_string(),
|
||||
|
|
@ -204,17 +201,14 @@ pub fn get_all_keywords() -> Vec<String> {
|
|||
"PUT".to_string(),
|
||||
"SET HEADER".to_string(),
|
||||
"SOAP".to_string(),
|
||||
|
||||
"EXIT FOR".to_string(),
|
||||
"FOR EACH".to_string(),
|
||||
"IF".to_string(),
|
||||
"SWITCH".to_string(),
|
||||
"WAIT".to_string(),
|
||||
"WHILE".to_string(),
|
||||
|
||||
"GET".to_string(),
|
||||
"SET".to_string(),
|
||||
|
||||
"GET BOT MEMORY".to_string(),
|
||||
"GET USER MEMORY".to_string(),
|
||||
"REMEMBER".to_string(),
|
||||
|
|
@ -223,61 +217,47 @@ pub fn get_all_keywords() -> Vec<String> {
|
|||
"SET USER FACT".to_string(),
|
||||
"SET USER MEMORY".to_string(),
|
||||
"USER FACTS".to_string(),
|
||||
|
||||
"CLEAR KB".to_string(),
|
||||
"USE KB".to_string(),
|
||||
"USE ACCOUNT".to_string(),
|
||||
"USE WEBSITE".to_string(),
|
||||
|
||||
"LLM".to_string(),
|
||||
"SET CONTEXT".to_string(),
|
||||
"USE MODEL".to_string(),
|
||||
|
||||
"RUN BASH".to_string(),
|
||||
"RUN JAVASCRIPT".to_string(),
|
||||
"RUN PYTHON".to_string(),
|
||||
|
||||
"HEAR".to_string(),
|
||||
"TALK".to_string(),
|
||||
|
||||
"ON".to_string(),
|
||||
"ON EMAIL".to_string(),
|
||||
"ON CHANGE".to_string(),
|
||||
"SET SCHEDULE".to_string(),
|
||||
"WEBHOOK".to_string(),
|
||||
|
||||
"SET USER".to_string(),
|
||||
|
||||
"BOOK".to_string(),
|
||||
"WEATHER".to_string(),
|
||||
|
||||
"PRINT".to_string(),
|
||||
|
||||
"FORMAT".to_string(),
|
||||
"INSTR".to_string(),
|
||||
"IS NUMERIC".to_string(),
|
||||
|
||||
"REQUIRE APPROVAL".to_string(),
|
||||
"SIMULATE IMPACT".to_string(),
|
||||
"CHECK CONSTRAINTS".to_string(),
|
||||
"AUDIT LOG".to_string(),
|
||||
|
||||
"PLAN START".to_string(),
|
||||
"PLAN END".to_string(),
|
||||
"STEP".to_string(),
|
||||
"AUTO TASK".to_string(),
|
||||
|
||||
"USE MCP".to_string(),
|
||||
"MCP LIST TOOLS".to_string(),
|
||||
"MCP INVOKE".to_string(),
|
||||
|
||||
"OPTION A OR B".to_string(),
|
||||
"DECIDE".to_string(),
|
||||
"ESCALATE".to_string(),
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
pub fn get_keyword_categories() -> std::collections::HashMap<String, Vec<String>> {
|
||||
let mut categories = std::collections::HashMap::new();
|
||||
|
||||
|
|
|
|||
|
|
@ -231,10 +231,8 @@ pub fn use_model_keyword(state: Arc<AppState>, user: UserSession, engine: &mut E
|
|||
let (tx, rx) = std::sync::mpsc::channel();
|
||||
|
||||
std::thread::spawn(move || {
|
||||
let rt = tokio::runtime::Runtime::new().expect("Failed to create runtime");
|
||||
let result = rt.block_on(async {
|
||||
set_session_model(&state_for_task, session_id, &model_name_clone).await
|
||||
});
|
||||
let _rt = tokio::runtime::Runtime::new().expect("Failed to create runtime");
|
||||
let result = set_session_model(&state_for_task, session_id, &model_name_clone);
|
||||
let _ = tx.send(result);
|
||||
});
|
||||
|
||||
|
|
@ -288,11 +286,8 @@ pub fn set_model_routing_keyword(state: Arc<AppState>, user: UserSession, engine
|
|||
let (tx, rx) = std::sync::mpsc::channel();
|
||||
|
||||
std::thread::spawn(move || {
|
||||
let rt = tokio::runtime::Runtime::new().expect("Failed to create runtime");
|
||||
let result = rt.block_on(async {
|
||||
set_session_routing_strategy(&state_for_task, session_id, strategy_clone)
|
||||
.await
|
||||
});
|
||||
let result =
|
||||
set_session_routing_strategy(&state_for_task, session_id, strategy_clone);
|
||||
let _ = tx.send(result);
|
||||
});
|
||||
|
||||
|
|
@ -347,7 +342,7 @@ pub fn list_models_keyword(state: Arc<AppState>, user: UserSession, engine: &mut
|
|||
});
|
||||
}
|
||||
|
||||
async fn set_session_model(
|
||||
fn set_session_model(
|
||||
state: &AppState,
|
||||
session_id: Uuid,
|
||||
model_name: &str,
|
||||
|
|
@ -377,7 +372,7 @@ async fn set_session_model(
|
|||
Ok(format!("Now using model: {}", model_name))
|
||||
}
|
||||
|
||||
async fn set_session_routing_strategy(
|
||||
fn set_session_routing_strategy(
|
||||
state: &AppState,
|
||||
session_id: Uuid,
|
||||
strategy: RoutingStrategy,
|
||||
|
|
|
|||
|
|
@ -375,7 +375,7 @@ pub fn execute_on_change(
|
|||
}))
|
||||
}
|
||||
|
||||
pub async fn check_folder_monitors(
|
||||
pub fn check_folder_monitors(
|
||||
state: &AppState,
|
||||
bot_id: Uuid,
|
||||
) -> Result<Vec<(FolderChangeEvent, String)>, String> {
|
||||
|
|
@ -450,8 +450,7 @@ pub async fn check_folder_monitors(
|
|||
monitor.last_change_token.as_deref(),
|
||||
monitor.watch_subfolders,
|
||||
&event_types,
|
||||
)
|
||||
.await?;
|
||||
)?;
|
||||
|
||||
for event in new_events {
|
||||
events.push((event, monitor.script_path.clone()));
|
||||
|
|
@ -461,7 +460,7 @@ pub async fn check_folder_monitors(
|
|||
Ok(events)
|
||||
}
|
||||
|
||||
async fn fetch_folder_changes(
|
||||
fn fetch_folder_changes(
|
||||
_state: &AppState,
|
||||
monitor_id: Uuid,
|
||||
provider: FolderProvider,
|
||||
|
|
@ -480,7 +479,7 @@ async fn fetch_folder_changes(
|
|||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
pub async fn process_folder_event(
|
||||
pub fn process_folder_event(
|
||||
state: &AppState,
|
||||
event: &FolderChangeEvent,
|
||||
script_path: &str,
|
||||
|
|
|
|||
|
|
@ -270,7 +270,7 @@ pub fn execute_on_email(
|
|||
}))
|
||||
}
|
||||
|
||||
pub async fn check_email_monitors(
|
||||
pub fn check_email_monitors(
|
||||
state: &AppState,
|
||||
bot_id: Uuid,
|
||||
) -> Result<Vec<(EmailReceivedEvent, String)>, String> {
|
||||
|
|
@ -321,8 +321,7 @@ pub async fn check_email_monitors(
|
|||
monitor.last_uid.unwrap_or(0),
|
||||
monitor.filter_from.as_deref(),
|
||||
monitor.filter_subject.as_deref(),
|
||||
)
|
||||
.await?;
|
||||
)?;
|
||||
|
||||
for event in new_events {
|
||||
events.push((event, monitor.script_path.clone()));
|
||||
|
|
@ -332,7 +331,7 @@ pub async fn check_email_monitors(
|
|||
Ok(events)
|
||||
}
|
||||
|
||||
async fn fetch_new_emails(
|
||||
fn fetch_new_emails(
|
||||
_state: &AppState,
|
||||
monitor_id: Uuid,
|
||||
_email_address: &str,
|
||||
|
|
@ -344,7 +343,7 @@ async fn fetch_new_emails(
|
|||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
pub async fn process_email_event(
|
||||
pub fn process_email_event(
|
||||
state: &AppState,
|
||||
event: &EmailReceivedEvent,
|
||||
script_path: &str,
|
||||
|
|
|
|||
|
|
@ -647,9 +647,9 @@ impl SafetyLayer {
|
|||
threshold: row.threshold.and_then(|t| serde_json::from_str(&t).ok()),
|
||||
severity: match row.severity.as_str() {
|
||||
"info" => ConstraintSeverity::Info,
|
||||
"warning" => ConstraintSeverity::Warning,
|
||||
"error" => ConstraintSeverity::Error,
|
||||
"critical" => ConstraintSeverity::Critical,
|
||||
// "warning" and any other value default to Warning
|
||||
_ => ConstraintSeverity::Warning,
|
||||
},
|
||||
enabled: row.enabled,
|
||||
|
|
|
|||
|
|
@ -91,14 +91,14 @@ pub async fn execute_save_from_unstructured(
|
|||
table_name: &str,
|
||||
text: &str,
|
||||
) -> Result<String, String> {
|
||||
let schema = get_table_schema(state, table_name).await?;
|
||||
let schema = get_table_schema(state, table_name)?;
|
||||
|
||||
let extraction_prompt = build_extraction_prompt(table_name, &schema, text);
|
||||
let extracted_json = call_llm_for_extraction(state, &extraction_prompt).await?;
|
||||
|
||||
let cleaned_data = validate_and_clean_data(&extracted_json, &schema)?;
|
||||
|
||||
let record_id = save_to_table(state, user, table_name, cleaned_data).await?;
|
||||
let record_id = save_to_table(state, user, table_name, cleaned_data)?;
|
||||
|
||||
trace!(
|
||||
"Saved unstructured data to table '{}': {}",
|
||||
|
|
@ -109,7 +109,7 @@ pub async fn execute_save_from_unstructured(
|
|||
Ok(record_id)
|
||||
}
|
||||
|
||||
async fn get_table_schema(state: &AppState, table_name: &str) -> Result<Value, String> {
|
||||
fn get_table_schema(state: &AppState, table_name: &str) -> Result<Value, String> {
|
||||
let mut conn = state.conn.get().map_err(|e| format!("DB error: {}", e))?;
|
||||
|
||||
let query = diesel::sql_query(
|
||||
|
|
@ -357,7 +357,6 @@ fn clean_value_for_type(value: &Value, data_type: &str) -> Value {
|
|||
json!(null)
|
||||
}
|
||||
}
|
||||
"jsonb" | "json" => value.clone(),
|
||||
"uuid" => {
|
||||
if let Some(s) = value.as_str() {
|
||||
if Uuid::parse_str(s).is_ok() {
|
||||
|
|
@ -369,11 +368,12 @@ fn clean_value_for_type(value: &Value, data_type: &str) -> Value {
|
|||
json!(Uuid::new_v4().to_string())
|
||||
}
|
||||
}
|
||||
// jsonb, json, and any other types just clone the value
|
||||
_ => value.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn save_to_table(
|
||||
pub fn save_to_table(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
table_name: &str,
|
||||
|
|
@ -385,23 +385,8 @@ pub async fn save_to_table(
|
|||
let user_id = user.user_id.to_string();
|
||||
let created_at = Utc::now();
|
||||
|
||||
let mut fields = vec!["id", "created_at"];
|
||||
let mut placeholders = vec!["$1".to_string(), "$2".to_string()];
|
||||
let mut _bind_index = 3;
|
||||
|
||||
let data_obj = data.as_object().ok_or("Invalid data format")?;
|
||||
|
||||
for (field, _) in data_obj {
|
||||
fields.push(field);
|
||||
placeholders.push(format!("${}", _bind_index));
|
||||
_bind_index += 1;
|
||||
}
|
||||
|
||||
if !data_obj.contains_key("user_id") {
|
||||
fields.push("user_id");
|
||||
placeholders.push(format!("${}", _bind_index));
|
||||
}
|
||||
|
||||
let mut values_map = serde_json::Map::new();
|
||||
values_map.insert("id".to_string(), json!(record_id));
|
||||
values_map.insert("created_at".to_string(), json!(created_at));
|
||||
|
|
@ -449,7 +434,7 @@ mod tests {
|
|||
fn test_clean_value_for_type() {
|
||||
assert_eq!(clean_value_for_type(&json!("test"), "text"), json!("test"));
|
||||
assert_eq!(clean_value_for_type(&json!("42"), "integer"), json!(42));
|
||||
assert_eq!(clean_value_for_type(&json!("3.14"), "numeric"), json!(3.14));
|
||||
assert_eq!(clean_value_for_type(&json!("3.5"), "numeric"), json!(3.5));
|
||||
assert_eq!(clean_value_for_type(&json!("true"), "boolean"), json!(true));
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -286,16 +286,19 @@ async fn execute_send_mail(
|
|||
|
||||
let email_service = EmailService::new(Arc::new(state.clone()));
|
||||
|
||||
if let Ok(_) = email_service.send_email(
|
||||
&to,
|
||||
&subject,
|
||||
&body,
|
||||
if attachments.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(attachments.clone())
|
||||
},
|
||||
) {
|
||||
if email_service
|
||||
.send_email(
|
||||
&to,
|
||||
&subject,
|
||||
&body,
|
||||
if attachments.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(attachments.clone())
|
||||
},
|
||||
)
|
||||
.is_ok()
|
||||
{
|
||||
trace!("Email sent successfully: {}", message_id);
|
||||
return Ok(format!("Email sent: {}", message_id));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -242,7 +242,7 @@ pub fn get_twitter_metrics_keyword(state: Arc<AppState>, user: UserSession, engi
|
|||
debug!("Registered GET TWITTER METRICS keyword");
|
||||
}
|
||||
|
||||
async fn get_platform_credentials(
|
||||
fn get_platform_credentials(
|
||||
state: &AppState,
|
||||
bot_id: Uuid,
|
||||
platform: &str,
|
||||
|
|
@ -275,7 +275,7 @@ async fn fetch_instagram_metrics(
|
|||
user: &UserSession,
|
||||
post_id: &str,
|
||||
) -> Result<PostEngagement, String> {
|
||||
let credentials = get_platform_credentials(state, user.bot_id, "instagram").await?;
|
||||
let credentials = get_platform_credentials(state, user.bot_id, "instagram")?;
|
||||
|
||||
let access_token = credentials
|
||||
.get("access_token")
|
||||
|
|
@ -317,7 +317,7 @@ async fn fetch_facebook_metrics(
|
|||
user: &UserSession,
|
||||
post_id: &str,
|
||||
) -> Result<PostEngagement, String> {
|
||||
let credentials = get_platform_credentials(state, user.bot_id, "facebook").await?;
|
||||
let credentials = get_platform_credentials(state, user.bot_id, "facebook")?;
|
||||
|
||||
let access_token = credentials
|
||||
.get("access_token")
|
||||
|
|
@ -370,7 +370,7 @@ async fn fetch_linkedin_metrics(
|
|||
user: &UserSession,
|
||||
post_id: &str,
|
||||
) -> Result<PostEngagement, String> {
|
||||
let credentials = get_platform_credentials(state, user.bot_id, "linkedin").await?;
|
||||
let credentials = get_platform_credentials(state, user.bot_id, "linkedin")?;
|
||||
|
||||
let access_token = credentials
|
||||
.get("access_token")
|
||||
|
|
@ -413,7 +413,7 @@ async fn fetch_twitter_metrics(
|
|||
user: &UserSession,
|
||||
post_id: &str,
|
||||
) -> Result<PostEngagement, String> {
|
||||
let credentials = get_platform_credentials(state, user.bot_id, "twitter").await?;
|
||||
let credentials = get_platform_credentials(state, user.bot_id, "twitter")?;
|
||||
|
||||
let bearer_token = credentials
|
||||
.get("bearer_token")
|
||||
|
|
|
|||
|
|
@ -40,17 +40,14 @@ pub fn post_to_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Eng
|
|||
.enable_all()
|
||||
.build();
|
||||
|
||||
if let Ok(rt) = rt {
|
||||
let result = rt.block_on(async move {
|
||||
execute_post_to(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&platform_owned,
|
||||
&media_owned,
|
||||
&caption_owned,
|
||||
)
|
||||
.await
|
||||
});
|
||||
if let Ok(_rt) = rt {
|
||||
let result = execute_post_to(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&platform_owned,
|
||||
&media_owned,
|
||||
&caption_owned,
|
||||
);
|
||||
let _ = tx.send(result);
|
||||
}
|
||||
});
|
||||
|
|
@ -104,17 +101,14 @@ fn register_platform_shortcuts(state: Arc<AppState>, user: UserSession, engine:
|
|||
.enable_all()
|
||||
.build();
|
||||
|
||||
if let Ok(rt) = rt {
|
||||
let result = rt.block_on(async move {
|
||||
execute_post_to(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&platform_owned,
|
||||
&media_owned,
|
||||
&caption_owned,
|
||||
)
|
||||
.await
|
||||
});
|
||||
if let Ok(_rt) = rt {
|
||||
let result = execute_post_to(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&platform_owned,
|
||||
&media_owned,
|
||||
&caption_owned,
|
||||
);
|
||||
let _ = tx.send(result);
|
||||
}
|
||||
});
|
||||
|
|
@ -136,7 +130,7 @@ fn register_platform_shortcuts(state: Arc<AppState>, user: UserSession, engine:
|
|||
}
|
||||
}
|
||||
|
||||
async fn execute_post_to(
|
||||
fn execute_post_to(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
platform_input: &str,
|
||||
|
|
@ -147,14 +141,14 @@ async fn execute_post_to(
|
|||
let mut post_ids = Vec::new();
|
||||
|
||||
for platform in platforms {
|
||||
let post_id = save_social_post(state, user, platform, media, caption).await?;
|
||||
let post_id = save_social_post(state, user, platform, media, caption)?;
|
||||
post_ids.push(post_id);
|
||||
}
|
||||
|
||||
Ok(post_ids.join(","))
|
||||
}
|
||||
|
||||
async fn save_social_post(
|
||||
fn save_social_post(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
platform: &str,
|
||||
|
|
|
|||
|
|
@ -52,18 +52,15 @@ pub fn post_to_at_keyword(state: Arc<AppState>, user: UserSession, engine: &mut
|
|||
.enable_all()
|
||||
.build();
|
||||
|
||||
if let Ok(rt) = rt {
|
||||
let result = rt.block_on(async move {
|
||||
execute_scheduled_post(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&platform_owned,
|
||||
&media_owned,
|
||||
&caption_owned,
|
||||
scheduled_at,
|
||||
)
|
||||
.await
|
||||
});
|
||||
if let Ok(_rt) = rt {
|
||||
let result = execute_scheduled_post(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&platform_owned,
|
||||
&media_owned,
|
||||
&caption_owned,
|
||||
scheduled_at,
|
||||
);
|
||||
let _ = tx.send(result);
|
||||
}
|
||||
});
|
||||
|
|
@ -112,7 +109,7 @@ fn parse_schedule_time(time_str: &str) -> Result<DateTime<Utc>, Box<rhai::EvalAl
|
|||
)))
|
||||
}
|
||||
|
||||
async fn execute_scheduled_post(
|
||||
fn execute_scheduled_post(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
platform: &str,
|
||||
|
|
@ -124,14 +121,14 @@ async fn execute_scheduled_post(
|
|||
let mut post_ids = Vec::new();
|
||||
|
||||
for p in platforms {
|
||||
let post_id = save_scheduled_post(state, user, p, media, caption, scheduled_at).await?;
|
||||
let post_id = save_scheduled_post(state, user, p, media, caption, scheduled_at)?;
|
||||
post_ids.push(post_id);
|
||||
}
|
||||
|
||||
Ok(post_ids.join(","))
|
||||
}
|
||||
|
||||
async fn save_scheduled_post(
|
||||
fn save_scheduled_post(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
platform: &str,
|
||||
|
|
|
|||
|
|
@ -180,9 +180,9 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_switch_match_floats() {
|
||||
let a = Dynamic::from(3.14_f64);
|
||||
let b = Dynamic::from(3.14_f64);
|
||||
let c = Dynamic::from(2.71_f64);
|
||||
let a = Dynamic::from(3.5_f64);
|
||||
let b = Dynamic::from(3.5_f64);
|
||||
let c = Dynamic::from(2.5_f64);
|
||||
|
||||
assert!(switch_match_impl(&a, &b));
|
||||
assert!(!switch_match_impl(&a, &c));
|
||||
|
|
|
|||
|
|
@ -288,7 +288,8 @@ pub fn generate_create_table_sql(table: &TableDefinition, driver: &str) -> Strin
|
|||
}
|
||||
|
||||
if let Some(ref default) = field.default_value {
|
||||
col_def.push_str(&format!(" DEFAULT {}", default));
|
||||
use std::fmt::Write;
|
||||
let _ = write!(&mut col_def, " DEFAULT {}", default);
|
||||
}
|
||||
|
||||
column_defs.push(col_def);
|
||||
|
|
@ -381,7 +382,6 @@ pub fn load_connection_config(
|
|||
pub fn build_connection_string(conn: &ExternalConnection) -> String {
|
||||
let port = conn.port.unwrap_or(match conn.driver.as_str() {
|
||||
"mysql" | "mariadb" => 3306,
|
||||
"postgres" | "postgresql" => 5432,
|
||||
"mssql" | "sqlserver" => 1433,
|
||||
_ => 5432,
|
||||
});
|
||||
|
|
@ -399,7 +399,7 @@ pub fn build_connection_string(conn: &ExternalConnection) -> String {
|
|||
conn.username, conn.password, conn.server, port, conn.database
|
||||
)
|
||||
}
|
||||
"postgres" | "postgresql" | _ => {
|
||||
_ => {
|
||||
format!(
|
||||
"postgres://{}:{}@{}:{}/{}",
|
||||
conn.username, conn.password, conn.server, port, conn.database
|
||||
|
|
|
|||
|
|
@ -255,8 +255,7 @@ fn priority_to_int(priority: Option<&str>) -> i32 {
|
|||
Some("urgent") => 3,
|
||||
Some("high") => 2,
|
||||
Some("low") => 0,
|
||||
Some("normal") | None => 1,
|
||||
Some(_) => 1,
|
||||
_ => 1,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -246,7 +246,7 @@ async fn send_message_to_recipient(
|
|||
send_web_message(state.clone(), &recipient_id, message).await?;
|
||||
}
|
||||
"email" => {
|
||||
send_email(state.clone(), &recipient_id, message).await?;
|
||||
send_email(state.clone(), &recipient_id, message)?;
|
||||
}
|
||||
_ => {
|
||||
error!("Unknown channel: {}", channel);
|
||||
|
|
@ -296,7 +296,7 @@ async fn send_file_with_caption_to_recipient(
|
|||
send_web_file(state, &recipient_id, file_data, caption).await?;
|
||||
}
|
||||
"email" => {
|
||||
send_email_attachment(state, &recipient_id, file_data, caption).await?;
|
||||
send_email_attachment(state, &recipient_id, file_data, caption)?;
|
||||
}
|
||||
_ => {
|
||||
return Err(format!("Unsupported channel for file sending: {}", channel).into());
|
||||
|
|
@ -608,7 +608,7 @@ async fn send_web_file(
|
|||
send_web_message(state, session_id, &message).await
|
||||
}
|
||||
|
||||
async fn send_email(
|
||||
fn send_email(
|
||||
state: Arc<AppState>,
|
||||
email: &str,
|
||||
message: &str,
|
||||
|
|
@ -630,7 +630,7 @@ async fn send_email(
|
|||
}
|
||||
}
|
||||
|
||||
async fn send_email_attachment(
|
||||
fn send_email_attachment(
|
||||
state: Arc<AppState>,
|
||||
email: &str,
|
||||
file_data: Vec<u8>,
|
||||
|
|
|
|||
|
|
@ -39,15 +39,12 @@ pub fn use_tool_keyword(state: Arc<AppState>, user: UserSession, engine: &mut En
|
|||
.worker_threads(2)
|
||||
.enable_all()
|
||||
.build();
|
||||
let send_err = if let Ok(rt) = rt {
|
||||
let result = rt.block_on(async move {
|
||||
associate_tool_with_session(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&tool_name_for_task,
|
||||
)
|
||||
.await
|
||||
});
|
||||
let send_err = if let Ok(_rt) = rt {
|
||||
let result = associate_tool_with_session(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&tool_name_for_task,
|
||||
);
|
||||
tx.send(result).err()
|
||||
} else {
|
||||
tx.send(Err("Failed to build tokio runtime".to_string()))
|
||||
|
|
@ -77,7 +74,7 @@ pub fn use_tool_keyword(state: Arc<AppState>, user: UserSession, engine: &mut En
|
|||
})
|
||||
.unwrap();
|
||||
}
|
||||
async fn associate_tool_with_session(
|
||||
fn associate_tool_with_session(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
tool_name: &str,
|
||||
|
|
|
|||
|
|
@ -43,15 +43,12 @@ pub fn use_website_keyword(state: Arc<AppState>, user: UserSession, engine: &mut
|
|||
.enable_all()
|
||||
.build();
|
||||
|
||||
let send_err = if let Ok(rt) = rt {
|
||||
let result = rt.block_on(async move {
|
||||
associate_website_with_session(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&url_for_task,
|
||||
)
|
||||
.await
|
||||
});
|
||||
let send_err = if let Ok(_rt) = rt {
|
||||
let result = associate_website_with_session(
|
||||
&state_for_task,
|
||||
&user_for_task,
|
||||
&url_for_task,
|
||||
);
|
||||
tx.send(result).err()
|
||||
} else {
|
||||
tx.send(Err("Failed to build tokio runtime".to_string()))
|
||||
|
|
@ -85,7 +82,7 @@ pub fn use_website_keyword(state: Arc<AppState>, user: UserSession, engine: &mut
|
|||
.unwrap();
|
||||
}
|
||||
|
||||
async fn associate_website_with_session(
|
||||
fn associate_website_with_session(
|
||||
state: &AppState,
|
||||
user: &UserSession,
|
||||
url: &str,
|
||||
|
|
|
|||
|
|
@ -73,8 +73,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_non_empty_array() {
|
||||
let mut arr = Array::new();
|
||||
arr.push(Dynamic::from(1));
|
||||
let arr: Array = vec![Dynamic::from(1)];
|
||||
let value = Dynamic::from(arr);
|
||||
assert!(!check_empty(&value));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -109,16 +109,16 @@ mod tests {
|
|||
assert_eq!(get_type_name(&Dynamic::UNIT), "null");
|
||||
assert_eq!(get_type_name(&Dynamic::from(true)), "boolean");
|
||||
assert_eq!(get_type_name(&Dynamic::from(42_i64)), "integer");
|
||||
assert_eq!(get_type_name(&Dynamic::from(3.14_f64)), "float");
|
||||
assert_eq!(get_type_name(&Dynamic::from(3.5_f64)), "float");
|
||||
assert_eq!(get_type_name(&Dynamic::from("hello")), "string");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_numeric() {
|
||||
assert!(is_numeric(&Dynamic::from(42_i64)));
|
||||
assert!(is_numeric(&Dynamic::from(3.14_f64)));
|
||||
assert!(is_numeric(&Dynamic::from(3.5_f64)));
|
||||
assert!(is_numeric(&Dynamic::from("123")));
|
||||
assert!(is_numeric(&Dynamic::from("3.14")));
|
||||
assert!(is_numeric(&Dynamic::from("3.5")));
|
||||
assert!(!is_numeric(&Dynamic::from("hello")));
|
||||
assert!(!is_numeric(&Dynamic::from(true)));
|
||||
}
|
||||
|
|
@ -138,13 +138,13 @@ mod tests {
|
|||
#[test]
|
||||
fn test_is_numeric_negative() {
|
||||
assert!(is_numeric(&Dynamic::from(-42_i64)));
|
||||
assert!(is_numeric(&Dynamic::from(-3.14_f64)));
|
||||
assert!(is_numeric(&Dynamic::from(-3.5_f64)));
|
||||
assert!(is_numeric(&Dynamic::from("-123")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_numeric_whitespace() {
|
||||
assert!(is_numeric(&Dynamic::from(" 123 ")));
|
||||
assert!(is_numeric(&Dynamic::from(" 3.14 ")));
|
||||
assert!(is_numeric(&Dynamic::from(" 3.5 ")));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -154,7 +154,7 @@ pub fn remove_webhook_registration(
|
|||
Ok(result)
|
||||
}
|
||||
|
||||
/// Type alias for webhook results: (target, param, is_active)
|
||||
/// Type alias for webhook results: (target, param, `is_active`).
|
||||
pub type WebhookResult = Vec<(String, String, bool)>;
|
||||
|
||||
pub fn get_bot_webhooks(
|
||||
|
|
|
|||
106
src/basic/mod.rs
106
src/basic/mod.rs
|
|
@ -430,10 +430,11 @@ impl ScriptService {
|
|||
|| trimmed.starts_with("ELSE")
|
||||
|| trimmed.starts_with("END IF");
|
||||
result.push_str(trimmed);
|
||||
if is_basic_command || !for_stack.is_empty() || is_control_flow {
|
||||
result.push(';');
|
||||
} else if !trimmed.ends_with(';') && !trimmed.ends_with('{') && !trimmed.ends_with('}')
|
||||
{
|
||||
let needs_semicolon = is_basic_command
|
||||
|| !for_stack.is_empty()
|
||||
|| is_control_flow
|
||||
|| (!trimmed.ends_with(';') && !trimmed.ends_with('{') && !trimmed.ends_with('}'));
|
||||
if needs_semicolon {
|
||||
result.push(';');
|
||||
}
|
||||
result.push('\n');
|
||||
|
|
@ -764,7 +765,6 @@ impl ScriptService {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::collections::HashMap;
|
||||
use std::time::Duration;
|
||||
|
||||
|
|
@ -861,7 +861,13 @@ TALK "Total: $" + STR$(total)
|
|||
pub use_mocks: bool,
|
||||
pub env_vars: HashMap<String, String>,
|
||||
pub capture_logs: bool,
|
||||
pub log_level: LogLevel,
|
||||
log_level: LogLevel,
|
||||
}
|
||||
|
||||
impl BotRunnerConfig {
|
||||
pub const fn log_level(&self) -> LogLevel {
|
||||
self.log_level
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for BotRunnerConfig {
|
||||
|
|
@ -915,6 +921,18 @@ TALK "Total: $" + STR$(total)
|
|||
0.0
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn min_latency(&self) -> u64 {
|
||||
self.min_latency_ms
|
||||
}
|
||||
|
||||
pub const fn max_latency(&self) -> u64 {
|
||||
self.max_latency_ms
|
||||
}
|
||||
|
||||
pub const fn latency_range(&self) -> u64 {
|
||||
self.max_latency_ms.saturating_sub(self.min_latency_ms)
|
||||
}
|
||||
}
|
||||
|
||||
// Tests
|
||||
|
|
@ -973,27 +991,31 @@ TALK "Total: $" + STR$(total)
|
|||
|
||||
#[test]
|
||||
fn test_runner_metrics_avg_latency() {
|
||||
let mut metrics = RunnerMetrics::default();
|
||||
metrics.total_requests = 10;
|
||||
metrics.total_latency_ms = 1000;
|
||||
let metrics = RunnerMetrics {
|
||||
total_requests: 10,
|
||||
total_latency_ms: 1000,
|
||||
..RunnerMetrics::default()
|
||||
};
|
||||
|
||||
assert_eq!(metrics.avg_latency_ms(), 100);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_runner_metrics_success_rate() {
|
||||
let mut metrics = RunnerMetrics::default();
|
||||
metrics.total_requests = 100;
|
||||
metrics.successful_requests = 95;
|
||||
let metrics = RunnerMetrics {
|
||||
total_requests: 100,
|
||||
successful_requests: 95,
|
||||
..RunnerMetrics::default()
|
||||
};
|
||||
|
||||
assert_eq!(metrics.success_rate(), 95.0);
|
||||
assert!((metrics.success_rate() - 95.0).abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_runner_metrics_zero_requests() {
|
||||
let metrics = RunnerMetrics::default();
|
||||
assert_eq!(metrics.avg_latency_ms(), 0);
|
||||
assert_eq!(metrics.success_rate(), 0.0);
|
||||
assert!(metrics.success_rate().abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -1004,13 +1026,14 @@ TALK "Total: $" + STR$(total)
|
|||
|
||||
#[test]
|
||||
fn test_runner_config_env_vars() {
|
||||
let mut config = BotRunnerConfig::default();
|
||||
config
|
||||
.env_vars
|
||||
.insert("API_KEY".to_string(), "test123".to_string());
|
||||
config
|
||||
.env_vars
|
||||
.insert("DEBUG".to_string(), "true".to_string());
|
||||
let mut env_vars = HashMap::new();
|
||||
env_vars.insert("API_KEY".to_string(), "test123".to_string());
|
||||
env_vars.insert("DEBUG".to_string(), "true".to_string());
|
||||
|
||||
let config = BotRunnerConfig {
|
||||
env_vars,
|
||||
..BotRunnerConfig::default()
|
||||
};
|
||||
|
||||
assert_eq!(config.env_vars.get("API_KEY"), Some(&"test123".to_string()));
|
||||
assert_eq!(config.env_vars.get("DEBUG"), Some(&"true".to_string()));
|
||||
|
|
@ -1018,40 +1041,55 @@ TALK "Total: $" + STR$(total)
|
|||
|
||||
#[test]
|
||||
fn test_runner_config_timeout() {
|
||||
let mut config = BotRunnerConfig::default();
|
||||
config.timeout = Duration::from_secs(60);
|
||||
let config = BotRunnerConfig {
|
||||
timeout: Duration::from_secs(60),
|
||||
..BotRunnerConfig::default()
|
||||
};
|
||||
|
||||
assert_eq!(config.timeout, Duration::from_secs(60));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_metrics_tracking() {
|
||||
let mut metrics = RunnerMetrics::default();
|
||||
metrics.total_requests = 50;
|
||||
metrics.successful_requests = 45;
|
||||
metrics.failed_requests = 5;
|
||||
metrics.total_latency_ms = 5000;
|
||||
metrics.min_latency_ms = 10;
|
||||
metrics.max_latency_ms = 500;
|
||||
let metrics = RunnerMetrics {
|
||||
total_requests: 50,
|
||||
successful_requests: 45,
|
||||
failed_requests: 5,
|
||||
total_latency_ms: 5000,
|
||||
min_latency_ms: 10,
|
||||
max_latency_ms: 500,
|
||||
..RunnerMetrics::default()
|
||||
};
|
||||
|
||||
assert_eq!(metrics.avg_latency_ms(), 100);
|
||||
assert_eq!(metrics.success_rate(), 90.0);
|
||||
assert!((metrics.success_rate() - 90.0).abs() < f64::EPSILON);
|
||||
assert_eq!(
|
||||
metrics.total_requests,
|
||||
metrics.successful_requests + metrics.failed_requests
|
||||
);
|
||||
assert_eq!(metrics.min_latency(), 10);
|
||||
assert_eq!(metrics.max_latency(), 500);
|
||||
assert_eq!(metrics.latency_range(), 490);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_script_execution_tracking() {
|
||||
let mut metrics = RunnerMetrics::default();
|
||||
metrics.script_executions = 25;
|
||||
metrics.transfer_to_human_count = 3;
|
||||
let metrics = RunnerMetrics {
|
||||
script_executions: 25,
|
||||
transfer_to_human_count: 3,
|
||||
..RunnerMetrics::default()
|
||||
};
|
||||
|
||||
assert_eq!(metrics.script_executions, 25);
|
||||
assert_eq!(metrics.transfer_to_human_count, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_log_level_accessor() {
|
||||
let config = BotRunnerConfig::default();
|
||||
assert_eq!(config.log_level(), LogLevel::Info);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_log_levels() {
|
||||
assert!(matches!(LogLevel::Trace, LogLevel::Trace));
|
||||
|
|
|
|||
|
|
@ -92,10 +92,10 @@ impl CalendarEvent {
|
|||
event.location(loc);
|
||||
}
|
||||
|
||||
event.add_property("ORGANIZER", &format!("mailto:{}", self.organizer));
|
||||
event.add_property("ORGANIZER", format!("mailto:{}", self.organizer));
|
||||
|
||||
for attendee in &self.attendees {
|
||||
event.add_property("ATTENDEE", &format!("mailto:{}", attendee));
|
||||
event.add_property("ATTENDEE", format!("mailto:{}", attendee));
|
||||
}
|
||||
|
||||
if let Some(ref rrule) = self.recurrence {
|
||||
|
|
@ -103,7 +103,7 @@ impl CalendarEvent {
|
|||
}
|
||||
|
||||
if let Some(minutes) = self.reminder_minutes {
|
||||
event.add_property("VALARM", &format!("-PT{}M", minutes));
|
||||
event.add_property("VALARM", format!("-PT{}M", minutes));
|
||||
}
|
||||
|
||||
event.done()
|
||||
|
|
@ -500,7 +500,7 @@ pub async fn start_reminder_job(engine: Arc<CalendarEngine>) {
|
|||
for event in &engine.events {
|
||||
if let Some(reminder_minutes) = event.reminder_minutes {
|
||||
let reminder_time =
|
||||
event.start_time - chrono::Duration::minutes(reminder_minutes as i64);
|
||||
event.start_time - chrono::Duration::minutes(i64::from(reminder_minutes));
|
||||
|
||||
if now >= reminder_time && now < reminder_time + chrono::Duration::minutes(1) {
|
||||
info!(
|
||||
|
|
|
|||
|
|
@ -1,15 +1,9 @@
|
|||
|
||||
|
||||
|
||||
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use uuid::Uuid;
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum AccessLevel {
|
||||
Read,
|
||||
|
|
@ -18,7 +12,6 @@ pub enum AccessLevel {
|
|||
Owner,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum ResourceType {
|
||||
File,
|
||||
|
|
@ -28,7 +21,6 @@ pub enum ResourceType {
|
|||
Application,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AccessPermission {
|
||||
pub id: Uuid,
|
||||
|
|
@ -43,7 +35,6 @@ pub struct AccessPermission {
|
|||
pub is_active: bool,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AccessReviewRequest {
|
||||
pub id: Uuid,
|
||||
|
|
@ -56,7 +47,6 @@ pub struct AccessReviewRequest {
|
|||
pub comments: Option<String>,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum ReviewStatus {
|
||||
Pending,
|
||||
|
|
@ -66,7 +56,6 @@ pub enum ReviewStatus {
|
|||
Expired,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AccessReviewResult {
|
||||
pub review_id: Uuid,
|
||||
|
|
@ -78,7 +67,6 @@ pub struct AccessReviewResult {
|
|||
pub comments: String,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AccessViolation {
|
||||
pub id: Uuid,
|
||||
|
|
@ -90,7 +78,6 @@ pub struct AccessViolation {
|
|||
pub severity: ViolationSeverity,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum ViolationSeverity {
|
||||
Low,
|
||||
|
|
@ -99,7 +86,6 @@ pub enum ViolationSeverity {
|
|||
Critical,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AccessReviewService {
|
||||
permissions: HashMap<Uuid, Vec<AccessPermission>>,
|
||||
|
|
@ -108,7 +94,6 @@ pub struct AccessReviewService {
|
|||
}
|
||||
|
||||
impl AccessReviewService {
|
||||
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
permissions: HashMap::new(),
|
||||
|
|
@ -117,7 +102,6 @@ impl AccessReviewService {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn grant_permission(
|
||||
&mut self,
|
||||
user_id: Uuid,
|
||||
|
|
@ -143,7 +127,7 @@ impl AccessReviewService {
|
|||
|
||||
self.permissions
|
||||
.entry(user_id)
|
||||
.or_insert_with(Vec::new)
|
||||
.or_default()
|
||||
.push(permission.clone());
|
||||
|
||||
log::info!(
|
||||
|
|
@ -156,7 +140,6 @@ impl AccessReviewService {
|
|||
Ok(permission)
|
||||
}
|
||||
|
||||
|
||||
pub fn revoke_permission(&mut self, permission_id: Uuid, revoked_by: Uuid) -> Result<()> {
|
||||
for permissions in self.permissions.values_mut() {
|
||||
if let Some(perm) = permissions.iter_mut().find(|p| p.id == permission_id) {
|
||||
|
|
@ -173,7 +156,6 @@ impl AccessReviewService {
|
|||
Err(anyhow!("Permission not found"))
|
||||
}
|
||||
|
||||
|
||||
pub fn check_access(
|
||||
&mut self,
|
||||
user_id: Uuid,
|
||||
|
|
@ -185,22 +167,19 @@ impl AccessReviewService {
|
|||
if let Some(permissions) = user_permissions {
|
||||
for perm in permissions {
|
||||
if perm.resource_id == resource_id && perm.is_active {
|
||||
|
||||
if let Some(expires) = perm.expires_at {
|
||||
if expires < Utc::now() {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if self.has_sufficient_access(&perm.access_level, &required_level) {
|
||||
if Self::has_sufficient_access(&perm.access_level, &required_level) {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
let violation = AccessViolation {
|
||||
id: Uuid::new_v4(),
|
||||
user_id,
|
||||
|
|
@ -216,8 +195,7 @@ impl AccessReviewService {
|
|||
Ok(false)
|
||||
}
|
||||
|
||||
|
||||
fn has_sufficient_access(&self, user_level: &AccessLevel, required: &AccessLevel) -> bool {
|
||||
fn has_sufficient_access(user_level: &AccessLevel, required: &AccessLevel) -> bool {
|
||||
match required {
|
||||
AccessLevel::Read => true,
|
||||
AccessLevel::Write => matches!(
|
||||
|
|
@ -229,7 +207,6 @@ impl AccessReviewService {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn create_review_request(
|
||||
&mut self,
|
||||
user_id: Uuid,
|
||||
|
|
@ -261,7 +238,6 @@ impl AccessReviewService {
|
|||
Ok(review)
|
||||
}
|
||||
|
||||
|
||||
pub fn process_review(
|
||||
&mut self,
|
||||
review_id: Uuid,
|
||||
|
|
@ -270,7 +246,6 @@ impl AccessReviewService {
|
|||
modified: Vec<(Uuid, AccessLevel)>,
|
||||
comments: String,
|
||||
) -> Result<AccessReviewResult> {
|
||||
|
||||
let (reviewer_id, user_id) = {
|
||||
let review = self
|
||||
.reviews
|
||||
|
|
@ -283,12 +258,10 @@ impl AccessReviewService {
|
|||
(review.reviewer_id, review.user_id)
|
||||
};
|
||||
|
||||
|
||||
for perm_id in &revoked {
|
||||
self.revoke_permission(*perm_id, reviewer_id)?;
|
||||
}
|
||||
|
||||
|
||||
for (perm_id, new_level) in &modified {
|
||||
if let Some(permissions) = self.permissions.get_mut(&user_id) {
|
||||
if let Some(perm) = permissions.iter_mut().find(|p| p.id == *perm_id) {
|
||||
|
|
@ -297,7 +270,6 @@ impl AccessReviewService {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
if let Some(review) = self.reviews.get_mut(&review_id) {
|
||||
review.status = ReviewStatus::Approved;
|
||||
review.comments = Some(comments.clone());
|
||||
|
|
@ -318,7 +290,6 @@ impl AccessReviewService {
|
|||
Ok(result)
|
||||
}
|
||||
|
||||
|
||||
pub fn get_expired_permissions(&self) -> Vec<AccessPermission> {
|
||||
let now = Utc::now();
|
||||
let mut expired = Vec::new();
|
||||
|
|
@ -336,7 +307,6 @@ impl AccessReviewService {
|
|||
expired
|
||||
}
|
||||
|
||||
|
||||
pub fn get_user_permissions(&self, user_id: Uuid) -> Vec<AccessPermission> {
|
||||
self.permissions
|
||||
.get(&user_id)
|
||||
|
|
@ -347,19 +317,17 @@ impl AccessReviewService {
|
|||
.collect()
|
||||
}
|
||||
|
||||
|
||||
pub fn get_pending_reviews(&self, reviewer_id: Option<Uuid>) -> Vec<AccessReviewRequest> {
|
||||
self.reviews
|
||||
.values()
|
||||
.filter(|r| {
|
||||
r.status == ReviewStatus::Pending
|
||||
&& reviewer_id.map_or(true, |id| r.reviewer_id == id)
|
||||
&& reviewer_id.is_none_or(|id| r.reviewer_id == id)
|
||||
})
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
||||
pub fn get_violations(
|
||||
&self,
|
||||
user_id: Option<Uuid>,
|
||||
|
|
@ -369,15 +337,14 @@ impl AccessReviewService {
|
|||
self.violations
|
||||
.iter()
|
||||
.filter(|v| {
|
||||
user_id.map_or(true, |id| v.user_id == id)
|
||||
&& severity.as_ref().map_or(true, |s| &v.severity == s)
|
||||
&& since.map_or(true, |d| v.occurred_at >= d)
|
||||
user_id.is_none_or(|id| v.user_id == id)
|
||||
&& severity.as_ref().is_none_or(|s| &v.severity == s)
|
||||
&& since.is_none_or(|d| v.occurred_at >= d)
|
||||
})
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
||||
pub fn generate_compliance_report(&self) -> AccessComplianceReport {
|
||||
let total_permissions = self.permissions.values().map(|p| p.len()).sum::<usize>();
|
||||
|
||||
|
|
@ -420,15 +387,12 @@ impl AccessReviewService {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
fn calculate_compliance_score(&self) -> f64 {
|
||||
let mut score = 100.0;
|
||||
|
||||
|
||||
let expired = self.get_expired_permissions().len();
|
||||
score -= expired as f64 * 2.0;
|
||||
|
||||
|
||||
let overdue_reviews = self
|
||||
.reviews
|
||||
.values()
|
||||
|
|
@ -436,7 +400,6 @@ impl AccessReviewService {
|
|||
.count();
|
||||
score -= overdue_reviews as f64 * 5.0;
|
||||
|
||||
|
||||
for violation in &self.violations {
|
||||
match violation.severity {
|
||||
ViolationSeverity::Low => score -= 1.0,
|
||||
|
|
@ -446,11 +409,10 @@ impl AccessReviewService {
|
|||
}
|
||||
}
|
||||
|
||||
score.max(0.0).min(100.0)
|
||||
score.clamp(0.0, 100.0)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AccessComplianceReport {
|
||||
pub generated_at: DateTime<Utc>,
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ impl ScanStats {
|
|||
self.total += 1;
|
||||
}
|
||||
|
||||
pub fn merge(&mut self, other: &ScanStats) {
|
||||
pub fn merge(&mut self, other: &Self) {
|
||||
self.critical += other.critical;
|
||||
self.high += other.high;
|
||||
self.medium += other.medium;
|
||||
|
|
|
|||
|
|
@ -222,33 +222,30 @@ impl ComplianceMonitor {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn run_checks(
|
||||
&self,
|
||||
) -> Result<Vec<ComplianceCheckResult>, Box<dyn std::error::Error>> {
|
||||
pub fn run_checks(&self) -> Result<Vec<ComplianceCheckResult>, Box<dyn std::error::Error>> {
|
||||
let mut results = Vec::new();
|
||||
|
||||
for framework in &self.enabled_frameworks {
|
||||
let framework_results = self.check_framework(framework).await?;
|
||||
let framework_results = Self::check_framework(framework)?;
|
||||
results.extend(framework_results);
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
async fn check_framework(
|
||||
&self,
|
||||
fn check_framework(
|
||||
framework: &ComplianceFramework,
|
||||
) -> Result<Vec<ComplianceCheckResult>, Box<dyn std::error::Error>> {
|
||||
match framework {
|
||||
ComplianceFramework::GDPR => self.check_gdpr(),
|
||||
ComplianceFramework::SOC2 => self.check_soc2(),
|
||||
ComplianceFramework::ISO27001 => self.check_iso27001(),
|
||||
ComplianceFramework::HIPAA => self.check_hipaa(),
|
||||
ComplianceFramework::PCIDSS => self.check_pci_dss(),
|
||||
ComplianceFramework::GDPR => Self::check_gdpr(),
|
||||
ComplianceFramework::SOC2 => Self::check_soc2(),
|
||||
ComplianceFramework::ISO27001 => Self::check_iso27001(),
|
||||
ComplianceFramework::HIPAA => Self::check_hipaa(),
|
||||
ComplianceFramework::PCIDSS => Self::check_pci_dss(),
|
||||
}
|
||||
}
|
||||
|
||||
fn check_gdpr(&self) -> Result<Vec<ComplianceCheckResult>, Box<dyn std::error::Error>> {
|
||||
fn check_gdpr() -> Result<Vec<ComplianceCheckResult>, Box<dyn std::error::Error>> {
|
||||
Ok(vec![
|
||||
ComplianceCheckResult {
|
||||
framework: ComplianceFramework::GDPR,
|
||||
|
|
@ -283,7 +280,7 @@ impl ComplianceMonitor {
|
|||
])
|
||||
}
|
||||
|
||||
fn check_soc2(&self) -> Result<Vec<ComplianceCheckResult>, Box<dyn std::error::Error>> {
|
||||
fn check_soc2() -> Result<Vec<ComplianceCheckResult>, Box<dyn std::error::Error>> {
|
||||
Ok(vec![ComplianceCheckResult {
|
||||
framework: ComplianceFramework::SOC2,
|
||||
control_id: "cc6.1".to_string(),
|
||||
|
|
@ -296,7 +293,7 @@ impl ComplianceMonitor {
|
|||
}])
|
||||
}
|
||||
|
||||
fn check_iso27001(&self) -> Result<Vec<ComplianceCheckResult>, Box<dyn std::error::Error>> {
|
||||
fn check_iso27001() -> Result<Vec<ComplianceCheckResult>, Box<dyn std::error::Error>> {
|
||||
Ok(vec![ComplianceCheckResult {
|
||||
framework: ComplianceFramework::ISO27001,
|
||||
control_id: "a.8.1".to_string(),
|
||||
|
|
@ -309,11 +306,11 @@ impl ComplianceMonitor {
|
|||
}])
|
||||
}
|
||||
|
||||
fn check_hipaa(&self) -> Result<Vec<ComplianceCheckResult>, Box<dyn std::error::Error>> {
|
||||
fn check_hipaa() -> Result<Vec<ComplianceCheckResult>, Box<dyn std::error::Error>> {
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
fn check_pci_dss(&self) -> Result<Vec<ComplianceCheckResult>, Box<dyn std::error::Error>> {
|
||||
fn check_pci_dss() -> Result<Vec<ComplianceCheckResult>, Box<dyn std::error::Error>> {
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,15 +1,9 @@
|
|||
|
||||
|
||||
|
||||
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use uuid::Uuid;
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum PolicyType {
|
||||
AccessControl,
|
||||
|
|
@ -23,7 +17,6 @@ pub enum PolicyType {
|
|||
ComplianceStandard,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum PolicyStatus {
|
||||
Active,
|
||||
|
|
@ -32,7 +25,6 @@ pub enum PolicyStatus {
|
|||
Archived,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
|
||||
pub enum PolicySeverity {
|
||||
Low,
|
||||
|
|
@ -41,7 +33,6 @@ pub enum PolicySeverity {
|
|||
Critical,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SecurityPolicy {
|
||||
pub id: Uuid,
|
||||
|
|
@ -58,7 +49,6 @@ pub struct SecurityPolicy {
|
|||
pub tags: Vec<String>,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PolicyRule {
|
||||
pub id: Uuid,
|
||||
|
|
@ -68,7 +58,6 @@ pub struct PolicyRule {
|
|||
pub parameters: HashMap<String, String>,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum PolicyAction {
|
||||
Allow,
|
||||
|
|
@ -78,7 +67,6 @@ pub enum PolicyAction {
|
|||
Log,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PolicyViolation {
|
||||
pub id: Uuid,
|
||||
|
|
@ -93,7 +81,6 @@ pub struct PolicyViolation {
|
|||
pub resolved: bool,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PolicyCheckResult {
|
||||
pub policy_id: Uuid,
|
||||
|
|
@ -103,7 +90,6 @@ pub struct PolicyCheckResult {
|
|||
pub timestamp: DateTime<Utc>,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PolicyChecker {
|
||||
policies: HashMap<Uuid, SecurityPolicy>,
|
||||
|
|
@ -112,7 +98,6 @@ pub struct PolicyChecker {
|
|||
}
|
||||
|
||||
impl PolicyChecker {
|
||||
|
||||
pub fn new() -> Self {
|
||||
let mut checker = Self {
|
||||
policies: HashMap::new(),
|
||||
|
|
@ -120,14 +105,11 @@ impl PolicyChecker {
|
|||
check_history: Vec::new(),
|
||||
};
|
||||
|
||||
|
||||
checker.initialize_default_policies();
|
||||
checker
|
||||
}
|
||||
|
||||
|
||||
fn initialize_default_policies(&mut self) {
|
||||
|
||||
let password_policy = SecurityPolicy {
|
||||
id: Uuid::new_v4(),
|
||||
name: "Password Strength Policy".to_string(),
|
||||
|
|
@ -161,7 +143,6 @@ impl PolicyChecker {
|
|||
|
||||
self.policies.insert(password_policy.id, password_policy);
|
||||
|
||||
|
||||
let session_policy = SecurityPolicy {
|
||||
id: Uuid::new_v4(),
|
||||
name: "Session Timeout Policy".to_string(),
|
||||
|
|
@ -174,9 +155,7 @@ impl PolicyChecker {
|
|||
name: "Maximum Session Duration".to_string(),
|
||||
condition: "session.duration <= 8_hours".to_string(),
|
||||
action: PolicyAction::Enforce,
|
||||
parameters: HashMap::from([
|
||||
("max_duration".to_string(), "28800".to_string()),
|
||||
]),
|
||||
parameters: HashMap::from([("max_duration".to_string(), "28800".to_string())]),
|
||||
}],
|
||||
created_at: Utc::now(),
|
||||
updated_at: Utc::now(),
|
||||
|
|
@ -188,7 +167,6 @@ impl PolicyChecker {
|
|||
self.policies.insert(session_policy.id, session_policy);
|
||||
}
|
||||
|
||||
|
||||
pub fn add_policy(&mut self, policy: SecurityPolicy) -> Result<()> {
|
||||
if self.policies.contains_key(&policy.id) {
|
||||
return Err(anyhow!("Policy already exists"));
|
||||
|
|
@ -199,7 +177,6 @@ impl PolicyChecker {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
pub fn update_policy(&mut self, policy_id: Uuid, updates: SecurityPolicy) -> Result<()> {
|
||||
if let Some(existing) = self.policies.get_mut(&policy_id) {
|
||||
*existing = updates;
|
||||
|
|
@ -211,7 +188,6 @@ impl PolicyChecker {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn check_password_policy(&mut self, password: &str) -> PolicyCheckResult {
|
||||
let policy = self
|
||||
.policies
|
||||
|
|
@ -225,7 +201,6 @@ impl PolicyChecker {
|
|||
let mut violations = Vec::new();
|
||||
let mut warnings = Vec::new();
|
||||
|
||||
|
||||
if password.len() < 12 {
|
||||
violations.push(PolicyViolation {
|
||||
id: Uuid::new_v4(),
|
||||
|
|
@ -244,7 +219,6 @@ impl PolicyChecker {
|
|||
});
|
||||
}
|
||||
|
||||
|
||||
let has_uppercase = password.chars().any(|c| c.is_uppercase());
|
||||
let has_lowercase = password.chars().any(|c| c.is_lowercase());
|
||||
let has_digit = password.chars().any(|c| c.is_numeric());
|
||||
|
|
@ -265,7 +239,6 @@ impl PolicyChecker {
|
|||
});
|
||||
}
|
||||
|
||||
|
||||
if password.to_lowercase().contains("password") {
|
||||
warnings.push("Password contains the word 'password'".to_string());
|
||||
}
|
||||
|
|
@ -293,7 +266,6 @@ impl PolicyChecker {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn check_session_policy(&mut self, session_duration_seconds: u64) -> PolicyCheckResult {
|
||||
let policy = self
|
||||
.policies
|
||||
|
|
@ -307,7 +279,6 @@ impl PolicyChecker {
|
|||
let mut violations = Vec::new();
|
||||
|
||||
if session_duration_seconds > 28800 {
|
||||
|
||||
violations.push(PolicyViolation {
|
||||
id: Uuid::new_v4(),
|
||||
policy_id: policy.id,
|
||||
|
|
@ -348,9 +319,7 @@ impl PolicyChecker {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn check_all_policies(&mut self, context: &PolicyContext) -> Vec<PolicyCheckResult> {
|
||||
|
||||
let active_policy_ids: Vec<Uuid> = self
|
||||
.policies
|
||||
.iter()
|
||||
|
|
@ -370,7 +339,6 @@ impl PolicyChecker {
|
|||
results
|
||||
}
|
||||
|
||||
|
||||
pub fn check_policy(
|
||||
&mut self,
|
||||
policy_id: Uuid,
|
||||
|
|
@ -386,7 +354,7 @@ impl PolicyChecker {
|
|||
let warnings = Vec::new();
|
||||
|
||||
for rule in &policy.rules {
|
||||
if !self.evaluate_rule(rule, context) {
|
||||
if !Self::evaluate_rule(rule, context) {
|
||||
violations.push(PolicyViolation {
|
||||
id: Uuid::new_v4(),
|
||||
policy_id: policy.id,
|
||||
|
|
@ -416,18 +384,10 @@ impl PolicyChecker {
|
|||
Ok(result)
|
||||
}
|
||||
|
||||
|
||||
fn evaluate_rule(&self, rule: &PolicyRule, _context: &PolicyContext) -> bool {
|
||||
|
||||
|
||||
match rule.action {
|
||||
PolicyAction::Allow => true,
|
||||
PolicyAction::Deny => false,
|
||||
_ => true,
|
||||
}
|
||||
fn evaluate_rule(rule: &PolicyRule, _context: &PolicyContext) -> bool {
|
||||
!matches!(rule.action, PolicyAction::Deny)
|
||||
}
|
||||
|
||||
|
||||
pub fn get_violations(&self, unresolved_only: bool) -> Vec<PolicyViolation> {
|
||||
if unresolved_only {
|
||||
self.violations
|
||||
|
|
@ -440,7 +400,6 @@ impl PolicyChecker {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn resolve_violation(&mut self, violation_id: Uuid) -> Result<()> {
|
||||
if let Some(violation) = self.violations.iter_mut().find(|v| v.id == violation_id) {
|
||||
violation.resolved = true;
|
||||
|
|
@ -451,7 +410,6 @@ impl PolicyChecker {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn get_compliance_report(&self) -> PolicyComplianceReport {
|
||||
let total_policies = self.policies.len();
|
||||
let active_policies = self
|
||||
|
|
@ -473,11 +431,11 @@ impl PolicyChecker {
|
|||
.filter(|c| c.timestamp > Utc::now() - Duration::days(7))
|
||||
.count();
|
||||
|
||||
let compliance_rate = if !self.check_history.is_empty() {
|
||||
let compliance_rate = if self.check_history.is_empty() {
|
||||
100.0
|
||||
} else {
|
||||
let passed = self.check_history.iter().filter(|c| c.passed).count();
|
||||
(passed as f64 / self.check_history.len() as f64) * 100.0
|
||||
} else {
|
||||
100.0
|
||||
};
|
||||
|
||||
PolicyComplianceReport {
|
||||
|
|
@ -493,7 +451,6 @@ impl PolicyChecker {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PolicyContext {
|
||||
pub user_id: Option<Uuid>,
|
||||
|
|
@ -502,7 +459,6 @@ pub struct PolicyContext {
|
|||
pub parameters: HashMap<String, String>,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PolicyComplianceReport {
|
||||
pub generated_at: DateTime<Utc>,
|
||||
|
|
|
|||
|
|
@ -1,15 +1,9 @@
|
|||
|
||||
|
||||
|
||||
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use uuid::Uuid;
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum TrainingType {
|
||||
SecurityAwareness,
|
||||
|
|
@ -22,7 +16,6 @@ pub enum TrainingType {
|
|||
EmergencyProcedures,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub enum TrainingStatus {
|
||||
NotStarted,
|
||||
|
|
@ -33,7 +26,6 @@ pub enum TrainingStatus {
|
|||
Exempted,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
|
||||
pub enum TrainingPriority {
|
||||
Low,
|
||||
|
|
@ -42,7 +34,6 @@ pub enum TrainingPriority {
|
|||
Critical,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TrainingCourse {
|
||||
pub id: Uuid,
|
||||
|
|
@ -59,7 +50,6 @@ pub struct TrainingCourse {
|
|||
pub max_attempts: u32,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TrainingAssignment {
|
||||
pub id: Uuid,
|
||||
|
|
@ -75,7 +65,6 @@ pub struct TrainingAssignment {
|
|||
pub notes: Option<String>,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TrainingAttempt {
|
||||
pub id: Uuid,
|
||||
|
|
@ -87,7 +76,6 @@ pub struct TrainingAttempt {
|
|||
pub time_spent_minutes: Option<u32>,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TrainingCertificate {
|
||||
pub id: Uuid,
|
||||
|
|
@ -99,7 +87,6 @@ pub struct TrainingCertificate {
|
|||
pub verification_code: String,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ComplianceStatus {
|
||||
pub user_id: Uuid,
|
||||
|
|
@ -111,7 +98,6 @@ pub struct ComplianceStatus {
|
|||
pub compliance_percentage: f64,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TrainingTracker {
|
||||
courses: HashMap<Uuid, TrainingCourse>,
|
||||
|
|
@ -121,7 +107,6 @@ pub struct TrainingTracker {
|
|||
}
|
||||
|
||||
impl TrainingTracker {
|
||||
|
||||
pub fn new() -> Self {
|
||||
let mut tracker = Self {
|
||||
courses: HashMap::new(),
|
||||
|
|
@ -130,12 +115,10 @@ impl TrainingTracker {
|
|||
user_roles: HashMap::new(),
|
||||
};
|
||||
|
||||
|
||||
tracker.initialize_default_courses();
|
||||
tracker
|
||||
}
|
||||
|
||||
|
||||
fn initialize_default_courses(&mut self) {
|
||||
let security_awareness = TrainingCourse {
|
||||
id: Uuid::new_v4(),
|
||||
|
|
@ -173,7 +156,6 @@ impl TrainingTracker {
|
|||
self.courses.insert(data_protection.id, data_protection);
|
||||
}
|
||||
|
||||
|
||||
pub fn create_course(&mut self, course: TrainingCourse) -> Result<()> {
|
||||
if self.courses.contains_key(&course.id) {
|
||||
return Err(anyhow!("Course already exists"));
|
||||
|
|
@ -184,7 +166,6 @@ impl TrainingTracker {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
pub fn assign_training(
|
||||
&mut self,
|
||||
user_id: Uuid,
|
||||
|
|
@ -219,7 +200,6 @@ impl TrainingTracker {
|
|||
Ok(assignment)
|
||||
}
|
||||
|
||||
|
||||
pub fn start_training(&mut self, assignment_id: Uuid) -> Result<TrainingAttempt> {
|
||||
let assignment = self
|
||||
.assignments
|
||||
|
|
@ -251,14 +231,12 @@ impl TrainingTracker {
|
|||
Ok(attempt)
|
||||
}
|
||||
|
||||
|
||||
pub fn complete_training(
|
||||
&mut self,
|
||||
assignment_id: Uuid,
|
||||
attempt_id: Uuid,
|
||||
score: u32,
|
||||
) -> Result<bool> {
|
||||
|
||||
let (course_id, passing_score, validity_days, max_attempts, course_title) = {
|
||||
let assignment = self
|
||||
.assignments
|
||||
|
|
@ -268,7 +246,13 @@ impl TrainingTracker {
|
|||
.courses
|
||||
.get(&assignment.course_id)
|
||||
.ok_or_else(|| anyhow!("Course not found"))?;
|
||||
(course.id, course.passing_score, course.validity_days, course.max_attempts, course.title.clone())
|
||||
(
|
||||
course.id,
|
||||
course.passing_score,
|
||||
course.validity_days,
|
||||
course.max_attempts,
|
||||
course.title.clone(),
|
||||
)
|
||||
};
|
||||
|
||||
let assignment = self
|
||||
|
|
@ -287,7 +271,6 @@ impl TrainingTracker {
|
|||
let time_spent = (end_time - start_time).num_minutes() as u32;
|
||||
let passed = score >= passing_score;
|
||||
|
||||
|
||||
assignment.attempts[attempt_idx].end_time = Some(end_time);
|
||||
assignment.attempts[attempt_idx].score = Some(score);
|
||||
assignment.attempts[attempt_idx].time_spent_minutes = Some(time_spent);
|
||||
|
|
@ -301,7 +284,6 @@ impl TrainingTracker {
|
|||
assignment.completion_date = Some(end_time);
|
||||
assignment.expiry_date = Some(end_time + Duration::days(validity_days));
|
||||
|
||||
|
||||
let certificate = TrainingCertificate {
|
||||
id: Uuid::new_v4(),
|
||||
user_id,
|
||||
|
|
@ -330,7 +312,6 @@ impl TrainingTracker {
|
|||
Ok(passed)
|
||||
}
|
||||
|
||||
|
||||
pub fn get_compliance_status(&self, user_id: Uuid) -> ComplianceStatus {
|
||||
let user_roles = self
|
||||
.user_roles
|
||||
|
|
@ -351,7 +332,6 @@ impl TrainingTracker {
|
|||
{
|
||||
required_trainings.push(course.id);
|
||||
|
||||
|
||||
let assignment = self
|
||||
.assignments
|
||||
.values()
|
||||
|
|
@ -400,7 +380,6 @@ impl TrainingTracker {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn get_training_report(&self) -> TrainingReport {
|
||||
let total_courses = self.courses.len();
|
||||
let total_assignments = self.assignments.len();
|
||||
|
|
@ -441,7 +420,6 @@ impl TrainingTracker {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
fn calculate_average_score(&self) -> f64 {
|
||||
let mut total_score = 0;
|
||||
let mut count = 0;
|
||||
|
|
@ -458,16 +436,14 @@ impl TrainingTracker {
|
|||
if count == 0 {
|
||||
0.0
|
||||
} else {
|
||||
total_score as f64 / count as f64
|
||||
f64::from(total_score) / f64::from(count)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn set_user_roles(&mut self, user_id: Uuid, roles: Vec<String>) {
|
||||
self.user_roles.insert(user_id, roles);
|
||||
}
|
||||
|
||||
|
||||
pub fn get_overdue_trainings(&self) -> Vec<TrainingAssignment> {
|
||||
self.assignments
|
||||
.values()
|
||||
|
|
@ -476,7 +452,6 @@ impl TrainingTracker {
|
|||
.collect()
|
||||
}
|
||||
|
||||
|
||||
pub fn get_expiring_certificates(&self, days_ahead: i64) -> Vec<TrainingCertificate> {
|
||||
let cutoff = Utc::now() + Duration::days(days_ahead);
|
||||
self.certificates
|
||||
|
|
@ -487,7 +462,6 @@ impl TrainingTracker {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TrainingReport {
|
||||
pub generated_at: DateTime<Utc>,
|
||||
|
|
|
|||
|
|
@ -142,7 +142,7 @@ impl VoiceAdapter {
|
|||
pub async fn add_connection(&self, session_id: String, tx: mpsc::Sender<BotResponse>) {
|
||||
self.connections.lock().await.insert(session_id, tx);
|
||||
}
|
||||
pub async fn send_voice_response(
|
||||
pub fn send_voice_response(
|
||||
&self,
|
||||
session_id: &str,
|
||||
text: &str,
|
||||
|
|
@ -159,6 +159,5 @@ impl ChannelAdapter for VoiceAdapter {
|
|||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
info!("Sending voice response to: {}", response.user_id);
|
||||
self.send_voice_response(&response.session_id, &response.content)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ impl KbContextManager {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn get_active_kbs(&self, session_id: Uuid) -> Result<Vec<SessionKbAssociation>> {
|
||||
pub fn get_active_kbs(&self, session_id: Uuid) -> Result<Vec<SessionKbAssociation>> {
|
||||
let mut conn = self.db_pool.get()?;
|
||||
|
||||
let query = diesel::sql_query(
|
||||
|
|
@ -88,7 +88,7 @@ impl KbContextManager {
|
|||
max_results_per_kb: usize,
|
||||
max_total_tokens: usize,
|
||||
) -> Result<Vec<KbContext>> {
|
||||
let active_kbs = self.get_active_kbs(session_id).await?;
|
||||
let active_kbs = self.get_active_kbs(session_id)?;
|
||||
|
||||
if active_kbs.is_empty() {
|
||||
debug!("No active KBs for session {}", session_id);
|
||||
|
|
@ -226,7 +226,7 @@ impl KbContextManager {
|
|||
context_parts.join("\n")
|
||||
}
|
||||
|
||||
pub async fn get_active_tools(&self, session_id: Uuid) -> Result<Vec<String>> {
|
||||
pub fn get_active_tools(&self, session_id: Uuid) -> Result<Vec<String>> {
|
||||
let mut conn = self.db_pool.get()?;
|
||||
|
||||
let query = diesel::sql_query(
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ impl BotOrchestrator {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn mount_all_bots(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
pub fn mount_all_bots(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
info!("mount_all_bots called");
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -325,6 +325,7 @@ impl BotOrchestrator {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_async)]
|
||||
pub async fn websocket_handler(
|
||||
ws: WebSocketUpgrade,
|
||||
State(state): State<Arc<AppState>>,
|
||||
|
|
@ -446,7 +447,7 @@ async fn handle_websocket(
|
|||
info!("WebSocket disconnected for session: {}", session_id);
|
||||
}
|
||||
|
||||
pub async fn create_bot_handler(
|
||||
pub fn create_bot_handler(
|
||||
Extension(state): Extension<Arc<AppState>>,
|
||||
Json(payload): Json<HashMap<String, String>>,
|
||||
) -> impl IntoResponse {
|
||||
|
|
@ -456,7 +457,7 @@ pub async fn create_bot_handler(
|
|||
.unwrap_or_else(|| "default".to_string());
|
||||
|
||||
let orchestrator = BotOrchestrator::new(state);
|
||||
if let Err(e) = orchestrator.mount_all_bots().await {
|
||||
if let Err(e) = orchestrator.mount_all_bots() {
|
||||
error!("Failed to mount bots: {}", e);
|
||||
}
|
||||
|
||||
|
|
@ -466,14 +467,14 @@ pub async fn create_bot_handler(
|
|||
)
|
||||
}
|
||||
|
||||
pub async fn mount_bot_handler(
|
||||
pub fn mount_bot_handler(
|
||||
Extension(state): Extension<Arc<AppState>>,
|
||||
Json(payload): Json<HashMap<String, String>>,
|
||||
) -> impl IntoResponse {
|
||||
let bot_guid = payload.get("bot_guid").cloned().unwrap_or_default();
|
||||
|
||||
let orchestrator = BotOrchestrator::new(state);
|
||||
if let Err(e) = orchestrator.mount_all_bots().await {
|
||||
if let Err(e) = orchestrator.mount_all_bots() {
|
||||
error!("Failed to mount bot: {}", e);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -382,7 +382,7 @@ impl ConfigManager {
|
|||
Ok(value)
|
||||
}
|
||||
|
||||
pub async fn get_bot_config_value(
|
||||
pub fn get_bot_config_value(
|
||||
&self,
|
||||
target_bot_id: &uuid::Uuid,
|
||||
key: &str,
|
||||
|
|
@ -430,4 +430,28 @@ impl ConfigManager {
|
|||
}
|
||||
Ok(updated)
|
||||
}
|
||||
|
||||
/// Set a single configuration value for a bot (upsert)
|
||||
pub fn set_config(
|
||||
&self,
|
||||
target_bot_id: &uuid::Uuid,
|
||||
key: &str,
|
||||
value: &str,
|
||||
) -> Result<(), diesel::result::Error> {
|
||||
let mut conn = self.get_conn()?;
|
||||
let new_id: uuid::Uuid = uuid::Uuid::new_v4();
|
||||
|
||||
diesel::sql_query(
|
||||
"INSERT INTO bot_configuration (id, bot_id, config_key, config_value, config_type) \
|
||||
VALUES ($1, $2, $3, $4, 'string') \
|
||||
ON CONFLICT (bot_id, config_key) DO UPDATE SET config_value = EXCLUDED.config_value, updated_at = NOW()"
|
||||
)
|
||||
.bind::<diesel::sql_types::Uuid, _>(new_id)
|
||||
.bind::<diesel::sql_types::Uuid, _>(target_bot_id)
|
||||
.bind::<diesel::sql_types::Text, _>(key)
|
||||
.bind::<diesel::sql_types::Text, _>(value)
|
||||
.execute(&mut conn)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -74,17 +74,17 @@ impl UserProvisioningService {
|
|||
self.base_url
|
||||
);
|
||||
|
||||
let user_id = self.create_database_user(account).await?;
|
||||
let user_id = self.create_database_user(account)?;
|
||||
|
||||
for bot_access in &account.bots {
|
||||
self.create_s3_home(account, bot_access).await?;
|
||||
}
|
||||
|
||||
if let Err(e) = self.setup_email_account(account).await {
|
||||
if let Err(e) = self.setup_email_account(account) {
|
||||
log::warn!("Email account creation failed: {}", e);
|
||||
}
|
||||
|
||||
self.setup_oauth_config(&user_id, account).await?;
|
||||
self.setup_oauth_config(&user_id, account)?;
|
||||
|
||||
let profile_url = self.build_profile_url(&account.username);
|
||||
log::info!(
|
||||
|
|
@ -95,7 +95,7 @@ impl UserProvisioningService {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn create_database_user(&self, account: &UserAccount) -> Result<String> {
|
||||
fn create_database_user(&self, account: &UserAccount) -> Result<String> {
|
||||
use crate::shared::models::schema::users;
|
||||
use argon2::{
|
||||
password_hash::{rand_core::OsRng, SaltString},
|
||||
|
|
@ -179,7 +179,7 @@ impl UserProvisioningService {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn setup_email_account(&self, account: &UserAccount) -> Result<()> {
|
||||
fn setup_email_account(&self, account: &UserAccount) -> Result<()> {
|
||||
use crate::shared::models::schema::user_email_accounts;
|
||||
use diesel::prelude::*;
|
||||
|
||||
|
|
@ -207,7 +207,7 @@ impl UserProvisioningService {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn setup_oauth_config(&self, _user_id: &str, account: &UserAccount) -> Result<()> {
|
||||
fn setup_oauth_config(&self, _user_id: &str, account: &UserAccount) -> Result<()> {
|
||||
use crate::shared::models::schema::bot_configuration;
|
||||
use diesel::prelude::*;
|
||||
|
||||
|
|
@ -247,14 +247,14 @@ impl UserProvisioningService {
|
|||
log::info!("Deprovisioning user: {}", username);
|
||||
|
||||
self.remove_s3_data(username).await?;
|
||||
self.remove_email_config(username).await?;
|
||||
self.remove_user_from_db(username).await?;
|
||||
self.remove_email_config(username)?;
|
||||
self.remove_user_from_db(username)?;
|
||||
|
||||
log::info!("User {} deprovisioned successfully", username);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn remove_user_from_db(&self, username: &str) -> Result<()> {
|
||||
fn remove_user_from_db(&self, username: &str) -> Result<()> {
|
||||
use crate::shared::models::schema::users;
|
||||
use diesel::prelude::*;
|
||||
|
||||
|
|
@ -305,7 +305,7 @@ impl UserProvisioningService {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn remove_email_config(&self, username: &str) -> Result<()> {
|
||||
fn remove_email_config(&self, username: &str) -> Result<()> {
|
||||
use crate::shared::models::schema::user_email_accounts;
|
||||
use diesel::prelude::*;
|
||||
|
||||
|
|
|
|||
|
|
@ -195,12 +195,13 @@ impl DocumentProcessor {
|
|||
"pdftotext failed for {}, trying library extraction",
|
||||
file_path.display()
|
||||
);
|
||||
self.extract_pdf_with_library(file_path).await
|
||||
self.extract_pdf_with_library(file_path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn extract_pdf_with_library(&self, file_path: &Path) -> Result<String> {
|
||||
fn extract_pdf_with_library(&self, file_path: &Path) -> Result<String> {
|
||||
let _ = self; // Suppress unused self warning
|
||||
use pdf_extract::extract_text;
|
||||
|
||||
match extract_text(file_path) {
|
||||
|
|
|
|||
|
|
@ -164,7 +164,7 @@ impl KbEmbeddingGenerator {
|
|||
Ok(embeddings) => Ok(embeddings),
|
||||
Err(e) => {
|
||||
warn!("Local embedding service failed: {}, trying OpenAI API", e);
|
||||
self.generate_openai_embeddings(&texts).await
|
||||
self.generate_openai_embeddings(&texts)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -211,7 +211,8 @@ impl KbEmbeddingGenerator {
|
|||
Ok(embeddings)
|
||||
}
|
||||
|
||||
async fn generate_openai_embeddings(&self, _texts: &[String]) -> Result<Vec<Embedding>> {
|
||||
fn generate_openai_embeddings(&self, _texts: &[String]) -> Result<Vec<Embedding>> {
|
||||
let _ = self; // Suppress unused self warning
|
||||
Err(anyhow::anyhow!(
|
||||
"OpenAI embeddings not configured - use local embedding service"
|
||||
))
|
||||
|
|
|
|||
|
|
@ -131,8 +131,7 @@ impl KbIndexer {
|
|||
indexed_documents += 1;
|
||||
}
|
||||
|
||||
self.update_collection_metadata(&collection_name, bot_name, kb_name, total_chunks)
|
||||
.await?;
|
||||
self.update_collection_metadata(&collection_name, bot_name, kb_name, total_chunks)?;
|
||||
|
||||
Ok(IndexingResult {
|
||||
collection_name,
|
||||
|
|
@ -303,13 +302,14 @@ impl KbIndexer {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn update_collection_metadata(
|
||||
fn update_collection_metadata(
|
||||
&self,
|
||||
collection_name: &str,
|
||||
bot_name: &str,
|
||||
kb_name: &str,
|
||||
document_count: usize,
|
||||
) -> Result<()> {
|
||||
let _ = self;
|
||||
info!(
|
||||
"Updated collection {} metadata: bot={}, kb={}, docs={}",
|
||||
collection_name, bot_name, kb_name, document_count
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ impl WebsiteCrawlerService {
|
|||
|
||||
*service.running.write().await = true;
|
||||
|
||||
if let Err(e) = service.check_and_crawl_websites().await {
|
||||
if let Err(e) = service.check_and_crawl_websites() {
|
||||
error!("Error in website crawler service: {}", e);
|
||||
}
|
||||
|
||||
|
|
@ -54,7 +54,7 @@ impl WebsiteCrawlerService {
|
|||
})
|
||||
}
|
||||
|
||||
async fn check_and_crawl_websites(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
fn check_and_crawl_websites(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
info!("Checking for websites that need recrawling");
|
||||
|
||||
let mut conn = self.db_pool.get()?;
|
||||
|
|
@ -100,14 +100,12 @@ impl WebsiteCrawlerService {
|
|||
|
||||
let website_max_depth = config_manager
|
||||
.get_bot_config_value(&website.bot_id, "website-max-depth")
|
||||
.await
|
||||
.ok()
|
||||
.and_then(|v| v.parse::<usize>().ok())
|
||||
.unwrap_or(website.max_depth as usize);
|
||||
|
||||
let website_max_pages = config_manager
|
||||
.get_bot_config_value(&website.bot_id, "website-max-pages")
|
||||
.await
|
||||
.ok()
|
||||
.and_then(|v| v.parse::<usize>().ok())
|
||||
.unwrap_or(website.max_pages as usize);
|
||||
|
|
@ -246,9 +244,9 @@ pub async fn ensure_crawler_service_running(
|
|||
Arc::clone(kb_manager),
|
||||
));
|
||||
|
||||
let _ = service.start().await;
|
||||
let _ = service.start();
|
||||
|
||||
info!("Website crawler service started");
|
||||
info!("Website crawler service initialized");
|
||||
|
||||
Ok(())
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ impl OAuthProvider {
|
|||
]
|
||||
}
|
||||
|
||||
pub fn from_str(s: &str) -> Option<Self> {
|
||||
pub fn parse(s: &str) -> Option<Self> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"google" => Some(Self::Google),
|
||||
"discord" => Some(Self::Discord),
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ async fn start_oauth(
|
|||
Path(provider_name): Path<String>,
|
||||
Query(params): Query<OAuthStartParams>,
|
||||
) -> Response {
|
||||
let Some(provider) = OAuthProvider::from_str(&provider_name) else {
|
||||
let Some(provider) = OAuthProvider::parse(&provider_name) else {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Html(format!(
|
||||
|
|
@ -247,7 +247,7 @@ async fn oauth_callback(
|
|||
.into_response();
|
||||
}
|
||||
|
||||
let Some(provider) = OAuthProvider::from_str(&provider_name) else {
|
||||
let Some(provider) = OAuthProvider::parse(&provider_name) else {
|
||||
return (
|
||||
StatusCode::BAD_REQUEST,
|
||||
Html("Invalid provider".to_string()),
|
||||
|
|
|
|||
|
|
@ -691,9 +691,10 @@ async fn rotate_secret(component: &str) -> Result<()> {
|
|||
println!("⚠️ WARNING: You must update PostgreSQL with the new password!");
|
||||
println!();
|
||||
println!("Run this SQL command:");
|
||||
let default_username = "postgres".to_string();
|
||||
println!(
|
||||
" ALTER USER {} WITH PASSWORD '{}';",
|
||||
secrets.get("username").unwrap_or(&"postgres".to_string()),
|
||||
secrets.get("username").unwrap_or(&default_username),
|
||||
new_password
|
||||
);
|
||||
println!();
|
||||
|
|
|
|||
|
|
@ -378,7 +378,7 @@ impl PackageManager {
|
|||
"https://huggingface.co/CompendiumLabs/bge-small-en-v1.5-gguf/resolve/main/bge-small-en-v1.5-f32.gguf".to_string(),
|
||||
],
|
||||
exec_cmd: "nohup {{BIN_PATH}}/llama-server --port 8081 --ssl-key-file {{CONF_PATH}}/system/certificates/llm/server.key --ssl-cert-file {{CONF_PATH}}/system/certificates/llm/server.crt -m {{DATA_PATH}}/DeepSeek-R1-Distill-Qwen-1.5B-Q3_K_M.gguf > {{LOGS_PATH}}/llm.log 2>&1 & nohup {{BIN_PATH}}/llama-server --port 8082 --ssl-key-file {{CONF_PATH}}/system/certificates/embedding/server.key --ssl-cert-file {{CONF_PATH}}/system/certificates/embedding/server.crt -m {{DATA_PATH}}/bge-small-en-v1.5-f32.gguf --embedding > {{LOGS_PATH}}/embedding.log 2>&1 &".to_string(),
|
||||
check_cmd: "curl -f -k https://localhost:8081/health >/dev/null 2>&1 && curl -f -k https://localhost:8082/health >/dev/null 2>&1".to_string(),
|
||||
check_cmd: "curl -f -k --connect-timeout 2 -m 5 https://localhost:8081/health >/dev/null 2>&1 && curl -f -k --connect-timeout 2 -m 5 https://localhost:8082/health >/dev/null 2>&1".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -411,7 +411,7 @@ impl PackageManager {
|
|||
]),
|
||||
data_download_list: Vec::new(),
|
||||
exec_cmd: "{{BIN_PATH}}/stalwart-mail --config {{CONF_PATH}}/email/config.toml".to_string(),
|
||||
check_cmd: "curl -f -k https://localhost:8025/health >/dev/null 2>&1".to_string(),
|
||||
check_cmd: "curl -f -k --connect-timeout 2 -m 5 https://localhost:8025/health >/dev/null 2>&1".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -441,7 +441,7 @@ impl PackageManager {
|
|||
env_vars: HashMap::from([("XDG_DATA_HOME".to_string(), "{{DATA_PATH}}".to_string())]),
|
||||
data_download_list: Vec::new(),
|
||||
exec_cmd: "{{BIN_PATH}}/caddy run --config {{CONF_PATH}}/Caddyfile".to_string(),
|
||||
check_cmd: "curl -f http://localhost >/dev/null 2>&1".to_string(),
|
||||
check_cmd: "curl -f --connect-timeout 2 -m 5 http://localhost >/dev/null 2>&1".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -487,7 +487,7 @@ impl PackageManager {
|
|||
]),
|
||||
data_download_list: Vec::new(),
|
||||
exec_cmd: "ZITADEL_MASTERKEY=$(VAULT_ADDR=http://localhost:8200 vault kv get -field=masterkey secret/gbo/directory 2>/dev/null || echo 'MasterkeyNeedsToHave32Characters') nohup {{BIN_PATH}}/zitadel start --config {{CONF_PATH}}/directory/zitadel.yaml --masterkeyFromEnv --tlsMode disabled > {{LOGS_PATH}}/zitadel.log 2>&1 &".to_string(),
|
||||
check_cmd: "curl -f http://localhost:8300/healthz >/dev/null 2>&1".to_string(),
|
||||
check_cmd: "curl -f --connect-timeout 2 -m 5 http://localhost:8300/healthz >/dev/null 2>&1".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -518,7 +518,7 @@ impl PackageManager {
|
|||
]),
|
||||
data_download_list: Vec::new(),
|
||||
exec_cmd: "{{BIN_PATH}}/forgejo web --work-path {{DATA_PATH}} --port 3000 --cert {{CONF_PATH}}/system/certificates/alm/server.crt --key {{CONF_PATH}}/system/certificates/alm/server.key".to_string(),
|
||||
check_cmd: "curl -f -k https://localhost:3000 >/dev/null 2>&1".to_string(),
|
||||
check_cmd: "curl -f -k --connect-timeout 2 -m 5 https://localhost:3000 >/dev/null 2>&1".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -622,7 +622,7 @@ impl PackageManager {
|
|||
env_vars: HashMap::new(),
|
||||
data_download_list: Vec::new(),
|
||||
exec_cmd: "php -S 0.0.0.0:8080 -t {{DATA_PATH}}/roundcubemail".to_string(),
|
||||
check_cmd: "curl -f -k https://localhost:8300 >/dev/null 2>&1".to_string(),
|
||||
check_cmd: "curl -f -k --connect-timeout 2 -m 5 https://localhost:8300 >/dev/null 2>&1".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -651,7 +651,7 @@ impl PackageManager {
|
|||
env_vars: HashMap::new(),
|
||||
data_download_list: Vec::new(),
|
||||
exec_cmd: "{{BIN_PATH}}/livekit-server --config {{CONF_PATH}}/meet/config.yaml --key-file {{CONF_PATH}}/system/certificates/meet/server.key --cert-file {{CONF_PATH}}/system/certificates/meet/server.crt".to_string(),
|
||||
check_cmd: "curl -f -k https://localhost:7880 >/dev/null 2>&1".to_string(),
|
||||
check_cmd: "curl -f -k --connect-timeout 2 -m 5 https://localhost:7880 >/dev/null 2>&1".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -678,7 +678,7 @@ impl PackageManager {
|
|||
env_vars: HashMap::new(),
|
||||
data_download_list: Vec::new(),
|
||||
exec_cmd: "{{BIN_PATH}}/nocodb".to_string(),
|
||||
check_cmd: "curl -f -k https://localhost:5757 >/dev/null 2>&1".to_string(),
|
||||
check_cmd: "curl -f -k --connect-timeout 2 -m 5 https://localhost:5757 >/dev/null 2>&1".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -705,7 +705,7 @@ impl PackageManager {
|
|||
env_vars: HashMap::new(),
|
||||
data_download_list: Vec::new(),
|
||||
exec_cmd: "coolwsd --config-file={{CONF_PATH}}/coolwsd.xml".to_string(),
|
||||
check_cmd: "curl -f -k https://localhost:9980 >/dev/null 2>&1".to_string(),
|
||||
check_cmd: "curl -f -k --connect-timeout 2 -m 5 https://localhost:9980 >/dev/null 2>&1".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -815,7 +815,7 @@ impl PackageManager {
|
|||
env_vars: HashMap::new(),
|
||||
data_download_list: Vec::new(),
|
||||
exec_cmd: "{{BIN_PATH}}/qdrant --storage-path {{DATA_PATH}} --enable-tls --cert {{CONF_PATH}}/system/certificates/qdrant/server.crt --key {{CONF_PATH}}/system/certificates/qdrant/server.key".to_string(),
|
||||
check_cmd: "curl -f -k https://localhost:6334/metrics >/dev/null 2>&1".to_string(),
|
||||
check_cmd: "curl -f -k --connect-timeout 2 -m 5 https://localhost:6334/metrics >/dev/null 2>&1".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -857,7 +857,7 @@ impl PackageManager {
|
|||
},
|
||||
data_download_list: Vec::new(),
|
||||
exec_cmd: "{{BIN_PATH}}/influxd --bolt-path={{DATA_PATH}}/influxdb/influxd.bolt --engine-path={{DATA_PATH}}/influxdb/engine --http-bind-address=:8086".to_string(),
|
||||
check_cmd: "curl -f http://localhost:8086/health >/dev/null 2>&1".to_string(),
|
||||
check_cmd: "curl -f --connect-timeout 2 -m 5 http://localhost:8086/health >/dev/null 2>&1".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -935,7 +935,7 @@ EOF"#.to_string(),
|
|||
data_download_list: Vec::new(),
|
||||
exec_cmd: "nohup {{BIN_PATH}}/vault server -config={{CONF_PATH}}/vault/config.hcl > {{LOGS_PATH}}/vault.log 2>&1 &"
|
||||
.to_string(),
|
||||
check_cmd: "curl -f -s 'http://localhost:8200/v1/sys/health?standbyok=true&uninitcode=200&sealedcode=200' >/dev/null 2>&1"
|
||||
check_cmd: "curl -f -s --connect-timeout 2 -m 5 'http://localhost:8200/v1/sys/health?standbyok=true&uninitcode=200&sealedcode=200' >/dev/null 2>&1"
|
||||
.to_string(),
|
||||
},
|
||||
);
|
||||
|
|
@ -976,7 +976,7 @@ EOF"#.to_string(),
|
|||
|
||||
|
||||
exec_cmd: "{{BIN_PATH}}/vector --config {{CONF_PATH}}/monitoring/vector.toml".to_string(),
|
||||
check_cmd: "curl -f http://localhost:8686/health >/dev/null 2>&1".to_string(),
|
||||
check_cmd: "curl -f --connect-timeout 2 -m 5 http://localhost:8686/health >/dev/null 2>&1".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ impl DirectorySetup {
|
|||
self.admin_token = Some(token);
|
||||
}
|
||||
|
||||
pub async fn ensure_admin_token(&mut self) -> Result<()> {
|
||||
pub fn ensure_admin_token(&mut self) -> Result<()> {
|
||||
if self.admin_token.is_none() {
|
||||
return Err(anyhow::anyhow!("Admin token must be configured"));
|
||||
}
|
||||
|
|
@ -119,7 +119,7 @@ impl DirectorySetup {
|
|||
|
||||
self.wait_for_ready(30).await?;
|
||||
|
||||
self.ensure_admin_token().await?;
|
||||
self.ensure_admin_token()?;
|
||||
|
||||
let org = self.create_default_organization().await?;
|
||||
log::info!(" Created default organization: {}", org.name);
|
||||
|
|
@ -158,7 +158,7 @@ impl DirectorySetup {
|
|||
}
|
||||
|
||||
pub async fn create_organization(&mut self, name: &str, description: &str) -> Result<String> {
|
||||
self.ensure_admin_token().await?;
|
||||
self.ensure_admin_token()?;
|
||||
|
||||
let response = self
|
||||
.client
|
||||
|
|
@ -217,7 +217,7 @@ impl DirectorySetup {
|
|||
last_name: &str,
|
||||
is_admin: bool,
|
||||
) -> Result<DefaultUser> {
|
||||
self.ensure_admin_token().await?;
|
||||
self.ensure_admin_token()?;
|
||||
|
||||
let response = self
|
||||
.client
|
||||
|
|
@ -388,7 +388,7 @@ impl DirectorySetup {
|
|||
client_id: String,
|
||||
client_secret: String,
|
||||
) -> Result<DirectoryConfig> {
|
||||
self.ensure_admin_token().await?;
|
||||
self.ensure_admin_token()?;
|
||||
|
||||
let config = DirectoryConfig {
|
||||
base_url: self.base_url.clone(),
|
||||
|
|
|
|||
|
|
@ -96,11 +96,11 @@ impl EmailSetup {
|
|||
|
||||
self.wait_for_ready(30).await?;
|
||||
|
||||
self.create_default_domain().await?;
|
||||
self.create_default_domain()?;
|
||||
log::info!(" Created default email domain: localhost");
|
||||
|
||||
let directory_integration = if let Some(dir_config_path) = directory_config_path {
|
||||
match self.setup_directory_integration(&dir_config_path).await {
|
||||
match self.setup_directory_integration(&dir_config_path) {
|
||||
Ok(_) => {
|
||||
log::info!(" Integrated with Directory for authentication");
|
||||
true
|
||||
|
|
@ -139,7 +139,8 @@ impl EmailSetup {
|
|||
Ok(config)
|
||||
}
|
||||
|
||||
async fn create_default_domain(&self) -> Result<()> {
|
||||
fn create_default_domain(&self) -> Result<()> {
|
||||
let _ = self;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
@ -195,8 +196,9 @@ impl EmailSetup {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn setup_directory_integration(&self, directory_config_path: &PathBuf) -> Result<()> {
|
||||
let content = fs::read_to_string(directory_config_path).await?;
|
||||
fn setup_directory_integration(&self, directory_config_path: &PathBuf) -> Result<()> {
|
||||
let _ = self;
|
||||
let content = std::fs::read_to_string(directory_config_path)?;
|
||||
let dir_config: serde_json::Value = serde_json::from_str(&content)?;
|
||||
|
||||
let issuer_url = dir_config["base_url"]
|
||||
|
|
@ -225,12 +227,8 @@ impl EmailSetup {
|
|||
self.load_existing_config().await
|
||||
}
|
||||
|
||||
pub async fn create_user_mailbox(
|
||||
&self,
|
||||
_username: &str,
|
||||
_password: &str,
|
||||
email: &str,
|
||||
) -> Result<()> {
|
||||
pub fn create_user_mailbox(&self, _username: &str, _password: &str, email: &str) -> Result<()> {
|
||||
let _ = self;
|
||||
log::info!("Creating mailbox for user: {}", email);
|
||||
|
||||
Ok(())
|
||||
|
|
@ -248,7 +246,7 @@ impl EmailSetup {
|
|||
let username = default_user["username"].as_str().unwrap_or("");
|
||||
|
||||
if !email.is_empty() {
|
||||
self.create_user_mailbox(username, password, email).await?;
|
||||
self.create_user_mailbox(username, password, email)?;
|
||||
log::info!(" Created mailbox for: {}", email);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ impl std::fmt::Debug for KeyedRateLimiter {
|
|||
"limiters",
|
||||
&format!("<{} entries>", self.limiters.blocking_read().len()),
|
||||
)
|
||||
.finish()
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -65,20 +65,20 @@ impl SessionManager {
|
|||
"SessionManager.provide_input called for session {}",
|
||||
session_id
|
||||
);
|
||||
if let Some(sess) = self.sessions.get_mut(&session_id) {
|
||||
let sess = if let Some(existing) = self.sessions.get(&session_id) {
|
||||
let mut sess = existing.clone();
|
||||
sess.data = input;
|
||||
self.waiting_for_input.remove(&session_id);
|
||||
Ok(Some("user_input".to_string()))
|
||||
sess
|
||||
} else {
|
||||
let sess = SessionData {
|
||||
SessionData {
|
||||
id: session_id,
|
||||
user_id: None,
|
||||
data: input,
|
||||
};
|
||||
self.sessions.insert(session_id, sess);
|
||||
self.waiting_for_input.remove(&session_id);
|
||||
Ok(Some("user_input".to_string()))
|
||||
}
|
||||
}
|
||||
};
|
||||
self.sessions.insert(session_id, sess);
|
||||
self.waiting_for_input.remove(&session_id);
|
||||
Ok(Some("user_input".to_string()))
|
||||
}
|
||||
|
||||
pub fn mark_waiting(&mut self, session_id: Uuid) {
|
||||
|
|
@ -569,9 +569,9 @@ mod tests {
|
|||
WhatsApp,
|
||||
Teams,
|
||||
Web,
|
||||
SMS,
|
||||
Sms,
|
||||
Email,
|
||||
API,
|
||||
Api,
|
||||
}
|
||||
|
||||
impl Default for Channel {
|
||||
|
|
@ -811,12 +811,32 @@ mod tests {
|
|||
}
|
||||
}
|
||||
|
||||
impl ConversationState {
|
||||
pub const fn is_terminal(self) -> bool {
|
||||
matches!(self, Self::Ended | Self::Error | Self::Transferred)
|
||||
}
|
||||
|
||||
pub const fn is_waiting(self) -> bool {
|
||||
matches!(self, Self::WaitingForUser | Self::WaitingForBot)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ConversationConfig {
|
||||
pub response_timeout: Duration,
|
||||
pub record: bool,
|
||||
pub use_mock_llm: bool,
|
||||
pub variables: HashMap<String, String>,
|
||||
variables: HashMap<String, String>,
|
||||
}
|
||||
|
||||
impl ConversationConfig {
|
||||
pub fn get_variable(&self, key: &str) -> Option<&String> {
|
||||
self.variables.get(key)
|
||||
}
|
||||
|
||||
pub fn set_variable(&mut self, key: String, value: String) {
|
||||
self.variables.insert(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ConversationConfig {
|
||||
|
|
@ -1040,12 +1060,54 @@ mod tests {
|
|||
assert!(config.use_mock_llm);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conversation_config_variables() {
|
||||
let mut config = ConversationConfig::default();
|
||||
config.set_variable("key1".to_string(), "value1".to_string());
|
||||
assert_eq!(config.get_variable("key1"), Some(&"value1".to_string()));
|
||||
assert_eq!(config.get_variable("nonexistent"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conversation_state_default() {
|
||||
let state = ConversationState::default();
|
||||
assert_eq!(state, ConversationState::Initial);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conversation_state_is_terminal() {
|
||||
assert!(!ConversationState::Initial.is_terminal());
|
||||
assert!(!ConversationState::WaitingForUser.is_terminal());
|
||||
assert!(!ConversationState::WaitingForBot.is_terminal());
|
||||
assert!(ConversationState::Transferred.is_terminal());
|
||||
assert!(ConversationState::Ended.is_terminal());
|
||||
assert!(ConversationState::Error.is_terminal());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conversation_state_is_waiting() {
|
||||
assert!(!ConversationState::Initial.is_waiting());
|
||||
assert!(ConversationState::WaitingForUser.is_waiting());
|
||||
assert!(ConversationState::WaitingForBot.is_waiting());
|
||||
assert!(!ConversationState::Transferred.is_waiting());
|
||||
assert!(!ConversationState::Ended.is_waiting());
|
||||
assert!(!ConversationState::Error.is_waiting());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_channel_sms_and_api() {
|
||||
let sms_customer = Customer {
|
||||
channel: Channel::Sms,
|
||||
..Default::default()
|
||||
};
|
||||
let api_customer = Customer {
|
||||
channel: Channel::Api,
|
||||
..Default::default()
|
||||
};
|
||||
assert_eq!(sms_customer.channel, Channel::Sms);
|
||||
assert_eq!(api_customer.channel, Channel::Api);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_state_transitions() {
|
||||
let mut session = Session::default();
|
||||
|
|
|
|||
|
|
@ -367,7 +367,7 @@ pub async fn get_metric(
|
|||
let window = Duration::minutes(query.window_minutes.unwrap_or(1));
|
||||
Some(collector.get_rate(&query.name, window).await)
|
||||
}
|
||||
Some("sum") | Some(_) | None => collector.get_aggregate(&query.name).await,
|
||||
Some("sum" | _) | None => collector.get_aggregate(&query.name).await,
|
||||
};
|
||||
|
||||
Json(match result {
|
||||
|
|
|
|||
|
|
@ -233,15 +233,7 @@ pub fn create_mock_auth_service() -> AuthService {
|
|||
service_account_key: None,
|
||||
};
|
||||
|
||||
let rt = tokio::runtime::Handle::try_current()
|
||||
.map(|h| h.block_on(AuthService::new(config.clone())))
|
||||
.unwrap_or_else(|_| {
|
||||
tokio::runtime::Runtime::new()
|
||||
.expect("Failed to create runtime")
|
||||
.block_on(AuthService::new(config))
|
||||
});
|
||||
|
||||
rt.expect("Failed to create mock AuthService")
|
||||
AuthService::new(config).expect("Failed to create mock AuthService")
|
||||
}
|
||||
|
||||
pub fn create_test_db_pool() -> Result<DbPool, Box<dyn std::error::Error + Send + Sync>> {
|
||||
|
|
|
|||
|
|
@ -167,6 +167,7 @@ pub async fn download_file(url: &str, output_path: &str) -> Result<(), anyhow::E
|
|||
if response.status().is_success() {
|
||||
let total_size = response.content_length().unwrap_or(0);
|
||||
let pb = ProgressBar::new(total_size);
|
||||
#[allow(clippy::literal_string_with_formatting_args)]
|
||||
pb.set_style(ProgressStyle::default_bar()
|
||||
.template("{msg}\n{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({eta})")
|
||||
.expect("Invalid progress bar template")
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ pub struct ZitadelClient {
|
|||
}
|
||||
|
||||
impl ZitadelClient {
|
||||
pub async fn new(config: ZitadelConfig) -> Result<Self> {
|
||||
pub fn new(config: ZitadelConfig) -> Result<Self> {
|
||||
let http_client = reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.build()
|
||||
|
|
|
|||
|
|
@ -29,8 +29,8 @@ impl std::fmt::Debug for AuthService {
|
|||
}
|
||||
|
||||
impl AuthService {
|
||||
pub async fn new(config: ZitadelConfig) -> anyhow::Result<Self> {
|
||||
let client = ZitadelClient::new(config).await?;
|
||||
pub fn new(config: ZitadelConfig) -> anyhow::Result<Self> {
|
||||
let client = ZitadelClient::new(config)?;
|
||||
Ok(Self {
|
||||
client: Arc::new(client),
|
||||
})
|
||||
|
|
|
|||
|
|
@ -524,7 +524,8 @@ pub async fn import_document(
|
|||
})?;
|
||||
serde_json::to_string_pretty(&parsed).unwrap_or(content)
|
||||
}
|
||||
"xml" | "csv" | _ => content,
|
||||
// "xml", "csv", and any other format pass through unchanged
|
||||
_ => content,
|
||||
};
|
||||
|
||||
s3_client
|
||||
|
|
|
|||
|
|
@ -211,11 +211,16 @@ impl DriveMonitor {
|
|||
for obj in list_objects.contents.unwrap_or_default() {
|
||||
let path = obj.key().unwrap_or_default().to_string();
|
||||
let path_parts: Vec<&str> = path.split('/').collect();
|
||||
if path_parts.len() < 2 || !path_parts[0].ends_with(".gbot") {
|
||||
if path_parts.len() < 2
|
||||
|| !std::path::Path::new(path_parts[0])
|
||||
.extension()
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("gbot"))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
let path_lower = path.to_ascii_lowercase();
|
||||
if !path_lower.ends_with("config.csv") {
|
||||
if !path.eq_ignore_ascii_case("config.csv")
|
||||
&& !path.to_ascii_lowercase().ends_with("/config.csv")
|
||||
{
|
||||
continue;
|
||||
}
|
||||
match client
|
||||
|
|
|
|||
|
|
@ -535,15 +535,12 @@ fn get_file_icon(path: &str) -> String {
|
|||
.map(|e| e.to_lowercase());
|
||||
|
||||
match ext.as_deref() {
|
||||
Some("bas") => "".to_string(),
|
||||
Some("ast") => "".to_string(),
|
||||
Some("csv") => "".to_string(),
|
||||
Some("gbkb") => "".to_string(),
|
||||
Some("bas" | "ast" | "csv" | "gbkb") => "".to_string(),
|
||||
Some("json") => "🔖".to_string(),
|
||||
Some("txt" | "md") => "📃".to_string(),
|
||||
Some("pdf") => "📕".to_string(),
|
||||
Some("zip" | "tar" | "gz") => "📦".to_string(),
|
||||
Some("jpg" | "png" | "gif") | _ => "📄".to_string(),
|
||||
_ => "📄".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1068,6 +1065,10 @@ mod tests {
|
|||
format!("http://127.0.0.1:{}", self.console_port)
|
||||
}
|
||||
|
||||
fn data_path(&self) -> &std::path::Path {
|
||||
&self.data_dir
|
||||
}
|
||||
|
||||
fn credentials(&self) -> (String, String) {
|
||||
(self.access_key.clone(), self.secret_key.clone())
|
||||
}
|
||||
|
|
@ -1093,6 +1094,7 @@ mod tests {
|
|||
|
||||
assert_eq!(config.endpoint(), "http://127.0.0.1:9000");
|
||||
assert_eq!(config.console_url(), "http://127.0.0.1:10000");
|
||||
assert_eq!(config.data_path(), std::path::Path::new("/tmp/test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -1291,7 +1293,7 @@ mod tests {
|
|||
percentage_used: 50.0,
|
||||
};
|
||||
|
||||
assert_eq!(response.percentage_used, 50.0);
|
||||
assert!((response.percentage_used - 50.0).abs() < f64::EPSILON);
|
||||
assert_eq!(
|
||||
response.total_bytes,
|
||||
response.used_bytes + response.available_bytes
|
||||
|
|
@ -1431,6 +1433,6 @@ mod tests {
|
|||
};
|
||||
|
||||
assert_eq!(request.bucket, "my-bucket");
|
||||
assert!(request.path.ends_with(".zip"));
|
||||
assert!(request.path.to_lowercase().ends_with(".zip"));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -140,7 +140,7 @@ impl UserDriveVectorDB {
|
|||
|
||||
let payload: qdrant_client::Payload = serde_json::to_value(file)?
|
||||
.as_object()
|
||||
.map(|m| m.clone())
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|(k, v)| (k, qdrant_client::qdrant::Value::from(v.to_string())))
|
||||
|
|
@ -674,7 +674,8 @@ impl FileContentExtractor {
|
|||
|
||||
for sheet_name in workbook.sheet_names() {
|
||||
if let Ok(range) = workbook.worksheet_range(&sheet_name) {
|
||||
content.push_str(&format!("=== {} ===\n", sheet_name));
|
||||
use std::fmt::Write;
|
||||
let _ = writeln!(&mut content, "=== {} ===", sheet_name);
|
||||
|
||||
for row in range.rows() {
|
||||
let row_text: Vec<String> = row
|
||||
|
|
|
|||
|
|
@ -1586,7 +1586,7 @@ fn fetch_emails_from_folder(
|
|||
"sent" => "Sent",
|
||||
"drafts" => "Drafts",
|
||||
"trash" => "Trash",
|
||||
"inbox" | _ => "INBOX",
|
||||
_ => "INBOX",
|
||||
};
|
||||
|
||||
session
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ use chrono::{DateTime, NaiveDate, Utc};
|
|||
use reqwest::{Client, Method};
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
use serde_json::{json, Value};
|
||||
use std::fmt::Write;
|
||||
use std::time::Duration;
|
||||
use tracing::{debug, error, info, warn};
|
||||
|
||||
|
|
@ -573,7 +574,7 @@ impl StalwartClient {
|
|||
) -> Result<QueueListResponse> {
|
||||
let mut path = format!("/api/queue/messages?limit={}&offset={}", limit, offset);
|
||||
if let Some(status) = status_filter {
|
||||
path.push_str(&format!("&filter=status:{}", status));
|
||||
let _ = write!(path, "&filter=status:{}", status);
|
||||
}
|
||||
self.request(Method::GET, &path, None).await
|
||||
}
|
||||
|
|
@ -804,17 +805,19 @@ impl StalwartClient {
|
|||
script.push_str("# Date-based activation\n");
|
||||
|
||||
if let Some(start) = &config.start_date {
|
||||
script.push_str(&format!(
|
||||
"if currentdate :value \"lt\" \"date\" \"{}\" {{ stop; }}\n",
|
||||
let _ = writeln!(
|
||||
script,
|
||||
"if currentdate :value \"lt\" \"date\" \"{}\" {{ stop; }}",
|
||||
start.format("%Y-%m-%d")
|
||||
));
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(end) = &config.end_date {
|
||||
script.push_str(&format!(
|
||||
"if currentdate :value \"gt\" \"date\" \"{}\" {{ stop; }}\n",
|
||||
let _ = writeln!(
|
||||
script,
|
||||
"if currentdate :value \"gt\" \"date\" \"{}\" {{ stop; }}",
|
||||
end.format("%Y-%m-%d")
|
||||
));
|
||||
);
|
||||
}
|
||||
|
||||
script.push('\n');
|
||||
|
|
@ -823,10 +826,11 @@ impl StalwartClient {
|
|||
let subject = config.subject.replace('"', "\\\"").replace('\n', " ");
|
||||
let body = config.body_plain.replace('"', "\\\"").replace('\n', "\\n");
|
||||
|
||||
script.push_str(&format!(
|
||||
"vacation :days {} :subject \"{}\" \"{}\";\n",
|
||||
let _ = writeln!(
|
||||
script,
|
||||
"vacation :days {} :subject \"{}\" \"{}\";",
|
||||
config.vacation_days, subject, body
|
||||
));
|
||||
);
|
||||
|
||||
script
|
||||
}
|
||||
|
|
@ -868,7 +872,7 @@ impl StalwartClient {
|
|||
"require [\"fileinto\", \"reject\", \"vacation\", \"imap4flags\", \"copy\"];\n\n",
|
||||
);
|
||||
|
||||
script.push_str(&format!("# Rule: {}\n", rule.name));
|
||||
let _ = writeln!(script, "# Rule: {}", rule.name);
|
||||
|
||||
if !rule.enabled {
|
||||
script.push_str("# DISABLED\n");
|
||||
|
|
@ -886,16 +890,16 @@ impl StalwartClient {
|
|||
if conditions.is_empty() {
|
||||
script.push_str("# Always applies\n");
|
||||
} else {
|
||||
script.push_str(&format!("if allof ({}) {{\n", conditions.join(", ")));
|
||||
let _ = writeln!(script, "if allof ({}) {{", conditions.join(", "));
|
||||
}
|
||||
|
||||
for action in &rule.actions {
|
||||
let action_str = self.generate_action_sieve(action);
|
||||
if !action_str.is_empty() {
|
||||
if conditions.is_empty() {
|
||||
script.push_str(&format!("{}\n", action_str));
|
||||
let _ = writeln!(script, "{}", action_str);
|
||||
} else {
|
||||
script.push_str(&format!(" {}\n", action_str));
|
||||
let _ = writeln!(script, " {}", action_str);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -58,10 +58,10 @@ impl UserEmailVectorDB {
|
|||
pub fn new(user_id: Uuid, bot_id: Uuid, db_path: PathBuf) -> Self {
|
||||
let collection_name = format!("emails_{}_{}", bot_id, user_id);
|
||||
log::trace!(
|
||||
"Creating UserEmailVectorDB for user={} bot={} path={:?}",
|
||||
"Creating UserEmailVectorDB for user={} bot={} path={}",
|
||||
user_id,
|
||||
bot_id,
|
||||
db_path
|
||||
db_path.display()
|
||||
);
|
||||
|
||||
Self {
|
||||
|
|
@ -77,10 +77,10 @@ impl UserEmailVectorDB {
|
|||
#[cfg(feature = "vectordb")]
|
||||
pub async fn initialize(&mut self, qdrant_url: &str) -> Result<()> {
|
||||
log::info!(
|
||||
"Initializing email vector DB for user={} bot={} at {:?}",
|
||||
"Initializing email vector DB for user={} bot={} at {}",
|
||||
self.user_id,
|
||||
self.bot_id,
|
||||
self.db_path
|
||||
self.db_path.display()
|
||||
);
|
||||
let client = Qdrant::from_url(qdrant_url).build()?;
|
||||
|
||||
|
|
@ -124,7 +124,7 @@ impl UserEmailVectorDB {
|
|||
|
||||
let payload: qdrant_client::Payload = serde_json::to_value(email)?
|
||||
.as_object()
|
||||
.map(|m| m.clone())
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|(k, v)| (k, qdrant_client::qdrant::Value::from(v.to_string())))
|
||||
|
|
@ -410,7 +410,7 @@ impl EmailEmbeddingGenerator {
|
|||
Ok(embedding) => Ok(embedding),
|
||||
Err(e) => {
|
||||
log::warn!("Local embedding failed: {e}, falling back to hash embedding");
|
||||
self.generate_hash_embedding(text)
|
||||
Self::generate_hash_embedding(text)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -491,7 +491,7 @@ impl EmailEmbeddingGenerator {
|
|||
Ok(embedding)
|
||||
}
|
||||
|
||||
fn generate_hash_embedding(&self, text: &str) -> Result<Vec<f32>> {
|
||||
fn generate_hash_embedding(text: &str) -> Result<Vec<f32>> {
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
|
|
|
|||
|
|
@ -40,7 +40,6 @@ async fn verify_webhook(Query(query): Query<WebhookVerifyQuery>) -> impl IntoRes
|
|||
) {
|
||||
(Some(mode), Some(token), Some(challenge)) => adapter
|
||||
.handle_webhook_verification(mode, token, &challenge)
|
||||
.await
|
||||
.map_or_else(
|
||||
|| (StatusCode::FORBIDDEN, "Verification failed".to_string()),
|
||||
|response| (StatusCode::OK, response),
|
||||
|
|
|
|||
|
|
@ -235,7 +235,8 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_library_loads() {
|
||||
assert!(true);
|
||||
let version = env!("CARGO_PKG_VERSION");
|
||||
assert!(!version.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
|
|
@ -180,7 +180,7 @@ impl CachedLLMProvider {
|
|||
}
|
||||
}
|
||||
|
||||
async fn get_bot_cache_config(&self, bot_id: &str) -> CacheConfig {
|
||||
fn get_bot_cache_config(&self, bot_id: &str) -> CacheConfig {
|
||||
if let Some(ref db_pool) = self.db_pool {
|
||||
let bot_uuid = match Uuid::parse_str(bot_id) {
|
||||
Ok(uuid) => uuid,
|
||||
|
|
@ -512,7 +512,7 @@ impl LLMProvider for CachedLLMProvider {
|
|||
return self.provider.generate(prompt, messages, model, key).await;
|
||||
}
|
||||
|
||||
let bot_cache_config = self.get_bot_cache_config(bot_id).await;
|
||||
let bot_cache_config = self.get_bot_cache_config(bot_id);
|
||||
|
||||
if let Some(cached) = self.get_cached_response(prompt, messages, model).await {
|
||||
info!("Cache hit (exact match) for bot {}", bot_id);
|
||||
|
|
|
|||
|
|
@ -106,12 +106,18 @@ pub async fn ensure_llama_servers_running(
|
|||
let mut tasks = vec![];
|
||||
if !llm_running && !llm_model.is_empty() {
|
||||
info!("Starting LLM server...");
|
||||
tasks.push(tokio::spawn(start_llm_server(
|
||||
Arc::clone(&app_state),
|
||||
llm_server_path.clone(),
|
||||
llm_model.clone(),
|
||||
llm_url.clone(),
|
||||
)));
|
||||
let app_state_clone = Arc::clone(&app_state);
|
||||
let llm_server_path_clone = llm_server_path.clone();
|
||||
let llm_model_clone = llm_model.clone();
|
||||
let llm_url_clone = llm_url.clone();
|
||||
tasks.push(tokio::spawn(async move {
|
||||
start_llm_server(
|
||||
app_state_clone,
|
||||
llm_server_path_clone,
|
||||
llm_model_clone,
|
||||
llm_url_clone,
|
||||
)
|
||||
}));
|
||||
} else if llm_model.is_empty() {
|
||||
info!("LLM_MODEL not set, skipping LLM server");
|
||||
}
|
||||
|
|
@ -222,7 +228,7 @@ pub async fn is_server_running(url: &str) -> bool {
|
|||
},
|
||||
}
|
||||
}
|
||||
pub async fn start_llm_server(
|
||||
pub fn start_llm_server(
|
||||
app_state: Arc<AppState>,
|
||||
llama_cpp_path: String,
|
||||
model_path: String,
|
||||
|
|
|
|||
|
|
@ -230,9 +230,12 @@ mod tests {
|
|||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct Usage {
|
||||
prompt_tokens: i32,
|
||||
completion_tokens: i32,
|
||||
total_tokens: i32,
|
||||
#[serde(rename = "prompt_tokens")]
|
||||
prompt: i32,
|
||||
#[serde(rename = "completion_tokens")]
|
||||
completion: i32,
|
||||
#[serde(rename = "total_tokens")]
|
||||
total: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
|
|
@ -281,9 +284,9 @@ mod tests {
|
|||
finish_reason: "stop".to_string(),
|
||||
}],
|
||||
usage: Usage {
|
||||
prompt_tokens: 10,
|
||||
completion_tokens: 5,
|
||||
total_tokens: 15,
|
||||
prompt: 10,
|
||||
completion: 5,
|
||||
total: 15,
|
||||
},
|
||||
};
|
||||
|
||||
|
|
@ -401,14 +404,11 @@ mod tests {
|
|||
#[test]
|
||||
fn test_usage_calculation() {
|
||||
let usage = Usage {
|
||||
prompt_tokens: 100,
|
||||
completion_tokens: 50,
|
||||
total_tokens: 150,
|
||||
prompt: 100,
|
||||
completion: 50,
|
||||
total: 150,
|
||||
};
|
||||
assert_eq!(
|
||||
usage.prompt_tokens + usage.completion_tokens,
|
||||
usage.total_tokens
|
||||
);
|
||||
assert_eq!(usage.prompt + usage.completion, usage.total);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
19
src/main.rs
19
src/main.rs
|
|
@ -210,6 +210,8 @@ async fn run_axum_server(
|
|||
api_router = api_router.merge(botserver::research::configure_research_routes());
|
||||
api_router = api_router.merge(botserver::sources::configure_sources_routes());
|
||||
api_router = api_router.merge(botserver::designer::configure_designer_routes());
|
||||
api_router = api_router.merge(botserver::basic::keywords::configure_db_routes());
|
||||
api_router = api_router.merge(botserver::basic::keywords::configure_app_server_routes());
|
||||
|
||||
#[cfg(feature = "whatsapp")]
|
||||
{
|
||||
|
|
@ -233,7 +235,8 @@ async fn run_axum_server(
|
|||
|
||||
let app = Router::new()
|
||||
.merge(api_router.with_state(app_state.clone()))
|
||||
.nest_service("/apps", ServeDir::new(&site_path))
|
||||
// Static files fallback for legacy /apps/* paths
|
||||
.nest_service("/static", ServeDir::new(&site_path))
|
||||
.layer(Extension(app_state.clone()))
|
||||
.layer(cors)
|
||||
.layer(TraceLayer::new_for_http());
|
||||
|
|
@ -624,9 +627,7 @@ async fn main() -> std::io::Result<()> {
|
|||
};
|
||||
#[cfg(feature = "directory")]
|
||||
let auth_service = Arc::new(tokio::sync::Mutex::new(
|
||||
botserver::directory::AuthService::new(zitadel_config)
|
||||
.await
|
||||
.unwrap(),
|
||||
botserver::directory::AuthService::new(zitadel_config).unwrap(),
|
||||
));
|
||||
let config_manager = ConfigManager::new(pool.clone());
|
||||
|
||||
|
|
@ -739,7 +740,7 @@ async fn main() -> std::io::Result<()> {
|
|||
log::warn!("Failed to start website crawler service: {}", e);
|
||||
}
|
||||
|
||||
state_tx.send(app_state.clone()).await.ok();
|
||||
let _ = state_tx.try_send(app_state.clone());
|
||||
progress_tx.send(BootstrapProgress::BootstrapComplete).ok();
|
||||
|
||||
info!(
|
||||
|
|
@ -756,11 +757,9 @@ async fn main() -> std::io::Result<()> {
|
|||
info!("Automation service initialized with episodic memory scheduler");
|
||||
|
||||
let bot_orchestrator = BotOrchestrator::new(app_state.clone());
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) = bot_orchestrator.mount_all_bots().await {
|
||||
error!("Failed to mount bots: {}", e);
|
||||
}
|
||||
});
|
||||
if let Err(e) = bot_orchestrator.mount_all_bots() {
|
||||
error!("Failed to mount bots: {}", e);
|
||||
}
|
||||
|
||||
let automation_state = app_state.clone();
|
||||
tokio::spawn(async move {
|
||||
|
|
|
|||
|
|
@ -17,16 +17,12 @@ pub mod conversations;
|
|||
pub mod service;
|
||||
use service::{DefaultTranscriptionService, MeetingService};
|
||||
|
||||
|
||||
|
||||
|
||||
pub fn configure() -> Router<Arc<AppState>> {
|
||||
Router::new()
|
||||
.route(ApiUrls::VOICE_START, post(voice_start))
|
||||
.route(ApiUrls::VOICE_STOP, post(voice_stop))
|
||||
.route(ApiUrls::MEET_CREATE, post(create_meeting))
|
||||
.route(ApiUrls::MEET_ROOMS, get(list_rooms))
|
||||
|
||||
.route("/api/meet/rooms", get(list_rooms_ui))
|
||||
.route("/api/meet/recent", get(recent_meetings))
|
||||
.route("/api/meet/participants", get(all_participants))
|
||||
|
|
@ -46,7 +42,6 @@ pub fn configure() -> Router<Arc<AppState>> {
|
|||
.route(ApiUrls::MEET_TOKEN, post(get_meeting_token))
|
||||
.route(ApiUrls::MEET_INVITE, post(send_meeting_invites))
|
||||
.route(ApiUrls::WS_MEET, get(meeting_websocket))
|
||||
|
||||
.route(
|
||||
"/conversations/create",
|
||||
post(conversations::create_conversation),
|
||||
|
|
@ -137,8 +132,6 @@ pub fn configure() -> Router<Arc<AppState>> {
|
|||
)
|
||||
}
|
||||
|
||||
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct CreateMeetingRequest {
|
||||
pub name: String,
|
||||
|
|
@ -164,8 +157,6 @@ pub struct SendInvitesRequest {
|
|||
pub emails: Vec<String>,
|
||||
}
|
||||
|
||||
|
||||
|
||||
pub async fn voice_start(
|
||||
State(data): State<Arc<AppState>>,
|
||||
Json(info): Json<Value>,
|
||||
|
|
@ -245,7 +236,6 @@ pub async fn voice_stop(
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub async fn create_meeting(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Json(payload): Json<CreateMeetingRequest>,
|
||||
|
|
@ -271,7 +261,6 @@ pub async fn create_meeting(
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub async fn list_rooms(State(state): State<Arc<AppState>>) -> impl IntoResponse {
|
||||
let transcription_service = Arc::new(DefaultTranscriptionService);
|
||||
let meeting_service = MeetingService::new(state.clone(), transcription_service);
|
||||
|
|
@ -282,7 +271,6 @@ pub async fn list_rooms(State(state): State<Arc<AppState>>) -> impl IntoResponse
|
|||
(StatusCode::OK, Json(serde_json::json!(room_list)))
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_room(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Path(room_id): Path<String>,
|
||||
|
|
@ -300,7 +288,6 @@ pub async fn get_room(
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub async fn join_room(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Path(room_id): Path<String>,
|
||||
|
|
@ -327,7 +314,6 @@ pub async fn join_room(
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub async fn start_transcription(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Path(room_id): Path<String>,
|
||||
|
|
@ -353,12 +339,10 @@ pub async fn start_transcription(
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_meeting_token(
|
||||
State(_state): State<Arc<AppState>>,
|
||||
Json(payload): Json<GetTokenRequest>,
|
||||
) -> impl IntoResponse {
|
||||
|
||||
let token = format!(
|
||||
"meet_token_{}_{}_{}",
|
||||
payload.room_id,
|
||||
|
|
@ -376,7 +360,6 @@ pub async fn get_meeting_token(
|
|||
)
|
||||
}
|
||||
|
||||
|
||||
pub async fn send_meeting_invites(
|
||||
State(_state): State<Arc<AppState>>,
|
||||
Json(payload): Json<SendInvitesRequest>,
|
||||
|
|
@ -392,7 +375,6 @@ pub async fn send_meeting_invites(
|
|||
)
|
||||
}
|
||||
|
||||
|
||||
pub async fn meeting_websocket(
|
||||
ws: axum::extract::ws::WebSocketUpgrade,
|
||||
State(state): State<Arc<AppState>>,
|
||||
|
|
@ -400,15 +382,11 @@ pub async fn meeting_websocket(
|
|||
ws.on_upgrade(|socket| handle_meeting_socket(socket, state))
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_async)]
|
||||
async fn handle_meeting_socket(_socket: axum::extract::ws::WebSocket, _state: Arc<AppState>) {
|
||||
info!("Meeting WebSocket connection established");
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
pub async fn list_rooms_ui(State(_state): State<Arc<AppState>>) -> Json<serde_json::Value> {
|
||||
Json(serde_json::json!({
|
||||
"rooms": [],
|
||||
|
|
@ -416,7 +394,6 @@ pub async fn list_rooms_ui(State(_state): State<Arc<AppState>>) -> Json<serde_js
|
|||
}))
|
||||
}
|
||||
|
||||
|
||||
pub async fn recent_meetings(State(_state): State<Arc<AppState>>) -> Json<serde_json::Value> {
|
||||
Json(serde_json::json!({
|
||||
"meetings": [],
|
||||
|
|
@ -424,7 +401,6 @@ pub async fn recent_meetings(State(_state): State<Arc<AppState>>) -> Json<serde_
|
|||
}))
|
||||
}
|
||||
|
||||
|
||||
pub async fn all_participants(State(_state): State<Arc<AppState>>) -> Json<serde_json::Value> {
|
||||
Json(serde_json::json!({
|
||||
"participants": [],
|
||||
|
|
@ -432,7 +408,6 @@ pub async fn all_participants(State(_state): State<Arc<AppState>>) -> Json<serde
|
|||
}))
|
||||
}
|
||||
|
||||
|
||||
pub async fn scheduled_meetings(State(_state): State<Arc<AppState>>) -> Json<serde_json::Value> {
|
||||
Json(serde_json::json!({
|
||||
"meetings": [],
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ use std::sync::Arc;
|
|||
use tokio::sync::{mpsc, RwLock};
|
||||
use uuid::Uuid;
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Participant {
|
||||
pub id: String,
|
||||
|
|
@ -35,7 +34,6 @@ pub enum ParticipantRole {
|
|||
Bot,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MeetingRoom {
|
||||
pub id: String,
|
||||
|
|
@ -77,11 +75,9 @@ impl Default for MeetingSettings {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum MeetingMessage {
|
||||
|
||||
JoinMeeting {
|
||||
room_id: String,
|
||||
participant_name: String,
|
||||
|
|
@ -188,7 +184,6 @@ pub enum RecordingAction {
|
|||
Resume,
|
||||
}
|
||||
|
||||
|
||||
pub struct MeetingService {
|
||||
pub state: Arc<AppState>,
|
||||
pub rooms: Arc<RwLock<HashMap<String, MeetingRoom>>>,
|
||||
|
|
@ -220,7 +215,6 @@ impl MeetingService {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub async fn create_room(
|
||||
&self,
|
||||
name: String,
|
||||
|
|
@ -236,14 +230,13 @@ impl MeetingService {
|
|||
created_at: chrono::Utc::now(),
|
||||
participants: Vec::new(),
|
||||
is_recording: false,
|
||||
is_transcribing: settings.as_ref().map_or(true, |s| s.enable_transcription),
|
||||
is_transcribing: settings.as_ref().is_none_or(|s| s.enable_transcription),
|
||||
max_participants: 100,
|
||||
settings: settings.unwrap_or_default(),
|
||||
};
|
||||
|
||||
self.rooms.write().await.insert(room_id, room.clone());
|
||||
|
||||
|
||||
if room.settings.bot_enabled {
|
||||
self.add_bot_to_room(&room.id).await?;
|
||||
}
|
||||
|
|
@ -252,7 +245,6 @@ impl MeetingService {
|
|||
Ok(room)
|
||||
}
|
||||
|
||||
|
||||
pub async fn join_room(
|
||||
&self,
|
||||
room_id: &str,
|
||||
|
|
@ -278,7 +270,6 @@ impl MeetingService {
|
|||
|
||||
room.participants.push(participant.clone());
|
||||
|
||||
|
||||
if room.is_transcribing && room.participants.iter().filter(|p| !p.is_bot).count() == 1 {
|
||||
self.start_transcription(room_id).await?;
|
||||
}
|
||||
|
|
@ -291,7 +282,6 @@ impl MeetingService {
|
|||
Ok(participant)
|
||||
}
|
||||
|
||||
|
||||
async fn add_bot_to_room(&self, room_id: &str) -> Result<()> {
|
||||
let bot_participant = Participant {
|
||||
id: format!("bot-{}", Uuid::new_v4()),
|
||||
|
|
@ -314,7 +304,6 @@ impl MeetingService {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
pub async fn start_transcription(&self, room_id: &str) -> Result<()> {
|
||||
info!("Starting transcription for room: {}", room_id);
|
||||
|
||||
|
|
@ -330,29 +319,25 @@ impl MeetingService {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
pub async fn handle_websocket(&self, socket: WebSocket, room_id: String) {
|
||||
let (mut sender, mut receiver) = socket.split();
|
||||
let (tx, mut rx) = mpsc::channel::<MeetingMessage>(100);
|
||||
|
||||
|
||||
self.connections
|
||||
.write()
|
||||
.await
|
||||
.insert(room_id.clone(), tx.clone());
|
||||
|
||||
|
||||
tokio::spawn(async move {
|
||||
while let Some(msg) = rx.recv().await {
|
||||
if let Ok(json) = serde_json::to_string(&msg) {
|
||||
if sender.send(Message::Text(json.into())).await.is_err() {
|
||||
if sender.send(Message::Text(json)).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
while let Some(msg) = receiver.next().await {
|
||||
if let Ok(Message::Text(text)) = msg {
|
||||
if let Ok(meeting_msg) = serde_json::from_str::<MeetingMessage>(&text) {
|
||||
|
|
@ -361,11 +346,9 @@ impl MeetingService {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
self.connections.write().await.remove(&room_id);
|
||||
}
|
||||
|
||||
|
||||
async fn handle_meeting_message(&self, message: MeetingMessage, room_id: &str) {
|
||||
match message {
|
||||
MeetingMessage::Transcription {
|
||||
|
|
@ -377,7 +360,6 @@ impl MeetingService {
|
|||
if is_final {
|
||||
info!("Transcription from {}: {}", participant_id, text);
|
||||
|
||||
|
||||
if let Some(room) = self.rooms.read().await.get(room_id) {
|
||||
if room.settings.bot_enabled {
|
||||
self.process_bot_command(&text, room_id, &participant_id)
|
||||
|
|
@ -397,7 +379,6 @@ impl MeetingService {
|
|||
.await;
|
||||
}
|
||||
MeetingMessage::ChatMessage { .. } => {
|
||||
|
||||
self.broadcast_to_room(room_id, message.clone()).await;
|
||||
}
|
||||
_ => {
|
||||
|
|
@ -406,11 +387,8 @@ impl MeetingService {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
async fn process_bot_command(&self, text: &str, room_id: &str, participant_id: &str) {
|
||||
|
||||
if text.to_lowercase().contains("hey bot") || text.to_lowercase().contains("assistant") {
|
||||
|
||||
let user_message = UserMessage {
|
||||
bot_id: "meeting-assistant".to_string(),
|
||||
user_id: participant_id.to_string(),
|
||||
|
|
@ -423,8 +401,7 @@ impl MeetingService {
|
|||
context_name: None,
|
||||
};
|
||||
|
||||
|
||||
if let Ok(response) = self.process_with_bot(user_message).await {
|
||||
if let Ok(response) = Self::process_with_bot(user_message) {
|
||||
let bot_msg = MeetingMessage::ChatMessage {
|
||||
room_id: room_id.to_string(),
|
||||
content: response.content,
|
||||
|
|
@ -437,7 +414,6 @@ impl MeetingService {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
async fn handle_bot_request(
|
||||
&self,
|
||||
command: &str,
|
||||
|
|
@ -472,10 +448,7 @@ impl MeetingService {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
async fn process_with_bot(&self, message: UserMessage) -> Result<BotResponse> {
|
||||
|
||||
|
||||
fn process_with_bot(message: UserMessage) -> Result<BotResponse> {
|
||||
Ok(BotResponse {
|
||||
bot_id: message.bot_id,
|
||||
user_id: message.user_id,
|
||||
|
|
@ -492,7 +465,6 @@ impl MeetingService {
|
|||
})
|
||||
}
|
||||
|
||||
|
||||
async fn broadcast_to_room(&self, room_id: &str, message: MeetingMessage) {
|
||||
let connections = self.connections.read().await;
|
||||
if let Some(tx) = connections.get(room_id) {
|
||||
|
|
@ -500,18 +472,15 @@ impl MeetingService {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub async fn get_room(&self, room_id: &str) -> Option<MeetingRoom> {
|
||||
self.rooms.read().await.get(room_id).cloned()
|
||||
}
|
||||
|
||||
|
||||
pub async fn list_rooms(&self) -> Vec<MeetingRoom> {
|
||||
self.rooms.read().await.values().cloned().collect()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[async_trait]
|
||||
pub trait TranscriptionService: Send + Sync {
|
||||
async fn start_transcription(&self, room_id: &str) -> Result<()>;
|
||||
|
|
@ -519,7 +488,6 @@ pub trait TranscriptionService: Send + Sync {
|
|||
async fn process_audio(&self, audio_data: Vec<u8>, room_id: &str) -> Result<String>;
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DefaultTranscriptionService;
|
||||
|
||||
|
|
@ -536,7 +504,6 @@ impl TranscriptionService for DefaultTranscriptionService {
|
|||
}
|
||||
|
||||
async fn process_audio(&self, _audio_data: Vec<u8>, room_id: &str) -> Result<String> {
|
||||
|
||||
Ok(format!("Transcribed text for room {}", room_id))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -144,7 +144,7 @@ mod tests {
|
|||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Activity {
|
||||
#[serde(rename = "type")]
|
||||
pub activity_type: String,
|
||||
pub kind: String,
|
||||
pub id: String,
|
||||
pub timestamp: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
|
|
@ -181,7 +181,7 @@ mod tests {
|
|||
impl Default for Activity {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
activity_type: "message".to_string(),
|
||||
kind: "message".to_string(),
|
||||
id: uuid::Uuid::new_v4().to_string(),
|
||||
timestamp: chrono::Utc::now().to_rfc3339(),
|
||||
local_timestamp: None,
|
||||
|
|
@ -245,7 +245,7 @@ mod tests {
|
|||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Entity {
|
||||
#[serde(rename = "type")]
|
||||
pub entity_type: String,
|
||||
pub kind: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub mentioned: Option<ChannelAccount>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
|
|
@ -302,7 +302,7 @@ mod tests {
|
|||
#[test]
|
||||
fn test_activity_default() {
|
||||
let activity = Activity::default();
|
||||
assert_eq!(activity.activity_type, "message");
|
||||
assert_eq!(activity.kind, "message");
|
||||
assert_eq!(activity.channel_id, "msteams");
|
||||
assert!(!activity.id.is_empty());
|
||||
}
|
||||
|
|
@ -310,7 +310,7 @@ mod tests {
|
|||
#[test]
|
||||
fn test_activity_serialization() {
|
||||
let activity = Activity {
|
||||
activity_type: "message".to_string(),
|
||||
kind: "message".to_string(),
|
||||
id: "test-id".to_string(),
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
local_timestamp: None,
|
||||
|
|
@ -381,7 +381,7 @@ mod tests {
|
|||
#[test]
|
||||
fn test_entity_mention() {
|
||||
let entity = Entity {
|
||||
entity_type: "mention".to_string(),
|
||||
kind: "mention".to_string(),
|
||||
mentioned: Some(ChannelAccount {
|
||||
id: "bot-id".to_string(),
|
||||
name: Some("Bot".to_string()),
|
||||
|
|
|
|||
|
|
@ -349,7 +349,7 @@ impl AntivirusManager {
|
|||
ScanType::Rootkit => "/".to_string(),
|
||||
};
|
||||
|
||||
let result = Self::run_clamav_scan(&scan_path, &config).await;
|
||||
let result = Self::run_clamav_scan(&scan_path, &config);
|
||||
|
||||
let mut scans_guard = scans.write().await;
|
||||
if let Some(scan) = scans_guard.get_mut(&scan_id) {
|
||||
|
|
@ -374,7 +374,7 @@ impl AntivirusManager {
|
|||
}
|
||||
}
|
||||
|
||||
async fn run_clamav_scan(path: &str, config: &AntivirusConfig) -> Result<(u64, Vec<Threat>)> {
|
||||
fn run_clamav_scan(path: &str, config: &AntivirusConfig) -> Result<(u64, Vec<Threat>)> {
|
||||
let clamscan = config
|
||||
.clamav_path
|
||||
.clone()
|
||||
|
|
@ -492,7 +492,7 @@ impl AntivirusManager {
|
|||
|
||||
pub async fn quarantine_file(&self, file_path: &Path) -> Result<()> {
|
||||
if !file_path.exists() {
|
||||
return Err(anyhow::anyhow!("File not found: {:?}", file_path));
|
||||
return Err(anyhow::anyhow!("File not found: {}", file_path.display()));
|
||||
}
|
||||
|
||||
let file_name = file_path
|
||||
|
|
|
|||
|
|
@ -539,12 +539,14 @@ mod tests {
|
|||
#[test]
|
||||
fn test_ca_manager_creation() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let mut config = CaConfig::default();
|
||||
config.ca_cert_path = temp_dir.path().join("ca.crt");
|
||||
config.ca_key_path = temp_dir.path().join("ca.key");
|
||||
config.intermediate_cert_path = Some(temp_dir.path().join("intermediate.crt"));
|
||||
config.intermediate_key_path = Some(temp_dir.path().join("intermediate.key"));
|
||||
config.crl_path = Some(temp_dir.path().join("crl.pem"));
|
||||
let config = CaConfig {
|
||||
ca_cert_path: temp_dir.path().join("ca.crt"),
|
||||
ca_key_path: temp_dir.path().join("ca.key"),
|
||||
intermediate_cert_path: Some(temp_dir.path().join("intermediate.crt")),
|
||||
intermediate_key_path: Some(temp_dir.path().join("intermediate.key")),
|
||||
crl_path: Some(temp_dir.path().join("crl.pem")),
|
||||
..CaConfig::default()
|
||||
};
|
||||
|
||||
let manager = CaManager::new(config);
|
||||
assert!(manager.is_ok());
|
||||
|
|
@ -582,10 +584,12 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_ca_config_external_ca() {
|
||||
let mut config = CaConfig::default();
|
||||
config.external_ca_enabled = true;
|
||||
config.external_ca_url = Some("https://ca.example.com".to_string());
|
||||
config.external_ca_api_key = Some("secret-key".to_string());
|
||||
let config = CaConfig {
|
||||
external_ca_enabled: true,
|
||||
external_ca_url: Some("https://ca.example.com".to_string()),
|
||||
external_ca_api_key: Some("secret-key".to_string()),
|
||||
..CaConfig::default()
|
||||
};
|
||||
|
||||
assert!(config.external_ca_enabled);
|
||||
assert_eq!(
|
||||
|
|
|
|||
|
|
@ -124,15 +124,15 @@ impl TlsIntegration {
|
|||
pub fn load_ca_cert(&mut self, ca_path: &Path) -> Result<()> {
|
||||
if ca_path.exists() {
|
||||
let ca_cert_pem = fs::read(ca_path)
|
||||
.with_context(|| format!("Failed to read CA certificate from {:?}", ca_path))?;
|
||||
.with_context(|| format!("Failed to read CA certificate from {}", ca_path.display()))?;
|
||||
|
||||
let ca_cert =
|
||||
Certificate::from_pem(&ca_cert_pem).context("Failed to parse CA certificate")?;
|
||||
|
||||
self.ca_cert = Some(ca_cert);
|
||||
info!("Loaded CA certificate from {:?}", ca_path);
|
||||
info!("Loaded CA certificate from {}", ca_path.display());
|
||||
} else {
|
||||
warn!("CA certificate not found at {:?}", ca_path);
|
||||
warn!("CA certificate not found at {}", ca_path.display());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
@ -146,10 +146,10 @@ impl TlsIntegration {
|
|||
) -> Result<()> {
|
||||
if cert_path.exists() && key_path.exists() {
|
||||
let cert = fs::read(cert_path)
|
||||
.with_context(|| format!("Failed to read client cert from {:?}", cert_path))?;
|
||||
.with_context(|| format!("Failed to read client cert from {}", cert_path.display()))?;
|
||||
|
||||
let key = fs::read(key_path)
|
||||
.with_context(|| format!("Failed to read client key from {:?}", key_path))?;
|
||||
.with_context(|| format!("Failed to read client key from {}", key_path.display()))?;
|
||||
|
||||
let identity = Identity::from_pem(&[&cert[..], &key[..]].concat())
|
||||
.context("Failed to create client identity")?;
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue