Add billion-scale database redesign with enums and sharding
Database Schema v7.0.0: - Create new 'gb' schema with PostgreSQL ENUMs instead of VARCHAR for all domain values - Add sharding infrastructure (shard_config, tenant_shard_map tables) - Implement partitioned tables for sessions, messages, and analytics (monthly partitions) - Add Snowflake-like ID generation for distributed systems - Design for billion-user scale with proper indexing strategies Rust Enums: - Add comprehensive enum types in core/shared/enums.rs - Implement ToSql/FromSql for Diesel ORM integration - Include: ChannelType, MessageRole, MessageType, LlmProvider, ContextProvider - Include: TaskStatus, TaskPriority, ExecutionMode, RiskLevel, ApprovalStatus, IntentType - All enums stored as SMALLINT for efficiency Other fixes: - Fix hardcoded gpt-4 model in auto_task modules to use bot config - Add vector_db to required bootstrap components - Add Qdrant health check before KB indexing - Change verbose START messages to trace level - Fix episodic memory role handling in Claude client - Disable auth for /api routes during development This is a DESTRUCTIVE migration - only for fresh installations.
This commit is contained in:
parent
7bb7f8883c
commit
1f150228af
13 changed files with 2160 additions and 41 deletions
9
migrations/7.0.0_billion_scale_redesign/down.sql
Normal file
9
migrations/7.0.0_billion_scale_redesign/down.sql
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
-- Migration: 7.0.0 Billion Scale Redesign - ROLLBACK
|
||||
-- Description: Drops the gb schema and all its objects
|
||||
-- WARNING: This is a DESTRUCTIVE operation - all data will be lost
|
||||
|
||||
-- Drop the entire schema (CASCADE drops all objects within)
|
||||
DROP SCHEMA IF EXISTS gb CASCADE;
|
||||
|
||||
-- Note: This migration completely removes the v7 schema.
|
||||
-- To restore previous schema, run migrations 6.x.x in order.
|
||||
1124
migrations/7.0.0_billion_scale_redesign/up.sql
Normal file
1124
migrations/7.0.0_billion_scale_redesign/up.sql
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -2,6 +2,7 @@ use crate::auto_task::app_logs::{log_generator_error, log_generator_info};
|
|||
use crate::basic::keywords::table_definition::{
|
||||
generate_create_table_sql, FieldDefinition, TableDefinition,
|
||||
};
|
||||
use crate::core::config::ConfigManager;
|
||||
use crate::core::shared::get_content_type;
|
||||
use crate::core::shared::models::UserSession;
|
||||
use crate::core::shared::state::AppState;
|
||||
|
|
@ -167,7 +168,7 @@ impl AppGenerator {
|
|||
),
|
||||
);
|
||||
|
||||
let llm_app = match self.generate_complete_app_with_llm(intent).await {
|
||||
let llm_app = match self.generate_complete_app_with_llm(intent, session.bot_id).await {
|
||||
Ok(app) => {
|
||||
log_generator_info(
|
||||
&app.name,
|
||||
|
|
@ -425,6 +426,7 @@ guid, string, text, integer, decimal, boolean, date, datetime, json
|
|||
async fn generate_complete_app_with_llm(
|
||||
&self,
|
||||
intent: &str,
|
||||
bot_id: Uuid,
|
||||
) -> Result<LlmGeneratedApp, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let platform = Self::get_platform_prompt();
|
||||
|
||||
|
|
@ -478,7 +480,7 @@ IMPORTANT:
|
|||
Respond with valid JSON only."#
|
||||
);
|
||||
|
||||
let response = self.call_llm(&prompt).await?;
|
||||
let response = self.call_llm(&prompt, bot_id).await?;
|
||||
Self::parse_llm_app_response(&response)
|
||||
}
|
||||
|
||||
|
|
@ -551,10 +553,28 @@ Respond with valid JSON only."#
|
|||
async fn call_llm(
|
||||
&self,
|
||||
prompt: &str,
|
||||
bot_id: Uuid,
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
#[cfg(feature = "llm")]
|
||||
{
|
||||
let config = serde_json::json!({
|
||||
// Get model and key from bot configuration
|
||||
let config_manager = ConfigManager::new(self.state.conn.clone());
|
||||
let model = config_manager
|
||||
.get_config(&bot_id, "llm-model", None)
|
||||
.unwrap_or_else(|_| {
|
||||
config_manager
|
||||
.get_config(&Uuid::nil(), "llm-model", None)
|
||||
.unwrap_or_else(|_| "gpt-4".to_string())
|
||||
});
|
||||
let key = config_manager
|
||||
.get_config(&bot_id, "llm-key", None)
|
||||
.unwrap_or_else(|_| {
|
||||
config_manager
|
||||
.get_config(&Uuid::nil(), "llm-key", None)
|
||||
.unwrap_or_default()
|
||||
});
|
||||
|
||||
let llm_config = serde_json::json!({
|
||||
"temperature": 0.7,
|
||||
"max_tokens": 16000
|
||||
});
|
||||
|
|
@ -562,7 +582,7 @@ Respond with valid JSON only."#
|
|||
match self
|
||||
.state
|
||||
.llm_provider
|
||||
.generate(prompt, &config, "gpt-4", "")
|
||||
.generate(prompt, &llm_config, &model, &key)
|
||||
.await
|
||||
{
|
||||
Ok(response) => return Ok(response),
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
use crate::core::config::ConfigManager;
|
||||
use crate::shared::models::UserSession;
|
||||
use crate::shared::state::AppState;
|
||||
use chrono::{DateTime, Utc};
|
||||
|
|
@ -142,7 +143,7 @@ impl DesignerAI {
|
|||
|
||||
// Analyze what the user wants to modify
|
||||
let analysis = self
|
||||
.analyze_modification(&request.instruction, &request.context)
|
||||
.analyze_modification(&request.instruction, &request.context, session.bot_id)
|
||||
.await?;
|
||||
|
||||
trace!("Modification analysis: {:?}", analysis.modification_type);
|
||||
|
|
@ -292,6 +293,7 @@ impl DesignerAI {
|
|||
&self,
|
||||
instruction: &str,
|
||||
context: &DesignerContext,
|
||||
bot_id: Uuid,
|
||||
) -> Result<AnalyzedModification, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let context_json = serde_json::to_string(context)?;
|
||||
|
||||
|
|
@ -334,7 +336,7 @@ Guidelines:
|
|||
Respond ONLY with valid JSON."#
|
||||
);
|
||||
|
||||
let response = self.call_llm(&prompt).await?;
|
||||
let response = self.call_llm(&prompt, bot_id).await?;
|
||||
Self::parse_analysis_response(&response, instruction)
|
||||
}
|
||||
|
||||
|
|
@ -1037,19 +1039,37 @@ Respond ONLY with valid JSON."#
|
|||
async fn call_llm(
|
||||
&self,
|
||||
prompt: &str,
|
||||
bot_id: Uuid,
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
trace!("Designer calling LLM");
|
||||
|
||||
#[cfg(feature = "llm")]
|
||||
{
|
||||
let config = serde_json::json!({
|
||||
// Get model and key from bot configuration
|
||||
let config_manager = ConfigManager::new(self.state.conn.clone());
|
||||
let model = config_manager
|
||||
.get_config(&bot_id, "llm-model", None)
|
||||
.unwrap_or_else(|_| {
|
||||
config_manager
|
||||
.get_config(&Uuid::nil(), "llm-model", None)
|
||||
.unwrap_or_else(|_| "gpt-4".to_string())
|
||||
});
|
||||
let key = config_manager
|
||||
.get_config(&bot_id, "llm-key", None)
|
||||
.unwrap_or_else(|_| {
|
||||
config_manager
|
||||
.get_config(&Uuid::nil(), "llm-key", None)
|
||||
.unwrap_or_default()
|
||||
});
|
||||
|
||||
let llm_config = serde_json::json!({
|
||||
"temperature": 0.3,
|
||||
"max_tokens": 2000
|
||||
});
|
||||
let response = self
|
||||
.state
|
||||
.llm_provider
|
||||
.generate(prompt, &config, "gpt-4", "")
|
||||
.generate(prompt, &llm_config, &model, &key)
|
||||
.await?;
|
||||
return Ok(response);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
use crate::auto_task::app_generator::AppGenerator;
|
||||
use crate::auto_task::intent_compiler::IntentCompiler;
|
||||
use crate::core::config::ConfigManager;
|
||||
use crate::shared::models::UserSession;
|
||||
use crate::shared::state::AppState;
|
||||
use chrono::{DateTime, Utc};
|
||||
|
|
@ -155,7 +156,7 @@ impl IntentClassifier {
|
|||
);
|
||||
|
||||
// Use LLM to classify the intent
|
||||
let classification = self.classify_with_llm(intent).await?;
|
||||
let classification = self.classify_with_llm(intent, session.bot_id).await?;
|
||||
|
||||
// Store classification for analytics
|
||||
self.store_classification(&classification, session)?;
|
||||
|
|
@ -222,6 +223,7 @@ impl IntentClassifier {
|
|||
async fn classify_with_llm(
|
||||
&self,
|
||||
intent: &str,
|
||||
bot_id: Uuid,
|
||||
) -> Result<ClassifiedIntent, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let prompt = format!(
|
||||
r#"Classify this user request into one of these intent types:
|
||||
|
|
@ -273,7 +275,7 @@ Respond with JSON only:
|
|||
}}"#
|
||||
);
|
||||
|
||||
let response = self.call_llm(&prompt).await?;
|
||||
let response = self.call_llm(&prompt, bot_id).await?;
|
||||
Self::parse_classification_response(&response, intent)
|
||||
}
|
||||
|
||||
|
|
@ -952,19 +954,37 @@ END TRIGGER
|
|||
async fn call_llm(
|
||||
&self,
|
||||
prompt: &str,
|
||||
bot_id: Uuid,
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
trace!("Calling LLM for intent classification");
|
||||
|
||||
#[cfg(feature = "llm")]
|
||||
{
|
||||
let config = serde_json::json!({
|
||||
// Get model and key from bot configuration
|
||||
let config_manager = ConfigManager::new(self.state.conn.clone());
|
||||
let model = config_manager
|
||||
.get_config(&bot_id, "llm-model", None)
|
||||
.unwrap_or_else(|_| {
|
||||
config_manager
|
||||
.get_config(&Uuid::nil(), "llm-model", None)
|
||||
.unwrap_or_else(|_| "gpt-4".to_string())
|
||||
});
|
||||
let key = config_manager
|
||||
.get_config(&bot_id, "llm-key", None)
|
||||
.unwrap_or_else(|_| {
|
||||
config_manager
|
||||
.get_config(&Uuid::nil(), "llm-key", None)
|
||||
.unwrap_or_default()
|
||||
});
|
||||
|
||||
let llm_config = serde_json::json!({
|
||||
"temperature": 0.3,
|
||||
"max_tokens": 1000
|
||||
});
|
||||
let response = self
|
||||
.state
|
||||
.llm_provider
|
||||
.generate(prompt, &config, "gpt-4", "")
|
||||
.generate(prompt, &llm_config, &model, &key)
|
||||
.await?;
|
||||
return Ok(response);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
use crate::core::config::ConfigManager;
|
||||
use crate::shared::models::UserSession;
|
||||
use crate::shared::state::AppState;
|
||||
use chrono::{DateTime, Utc};
|
||||
|
|
@ -341,10 +342,10 @@ impl IntentCompiler {
|
|||
&intent[..intent.len().min(100)]
|
||||
);
|
||||
|
||||
let entities = self.extract_entities(intent).await?;
|
||||
let entities = self.extract_entities(intent, session.bot_id).await?;
|
||||
trace!("Extracted entities: {entities:?}");
|
||||
|
||||
let plan = self.generate_plan(intent, &entities).await?;
|
||||
let plan = self.generate_plan(intent, &entities, session.bot_id).await?;
|
||||
trace!("Generated plan with {} steps", plan.steps.len());
|
||||
|
||||
let basic_program = Self::generate_basic_program(&plan, &entities);
|
||||
|
|
@ -382,6 +383,7 @@ impl IntentCompiler {
|
|||
async fn extract_entities(
|
||||
&self,
|
||||
intent: &str,
|
||||
bot_id: Uuid,
|
||||
) -> Result<IntentEntities, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let prompt = format!(
|
||||
r#"Analyze this user request and extract structured information.
|
||||
|
|
@ -406,7 +408,7 @@ Extract the following as JSON:
|
|||
Respond ONLY with valid JSON, no explanation."#
|
||||
);
|
||||
|
||||
let response = self.call_llm(&prompt).await?;
|
||||
let response = self.call_llm(&prompt, bot_id).await?;
|
||||
let entities: IntentEntities = serde_json::from_str(&response).unwrap_or_else(|e| {
|
||||
warn!("Failed to parse entity extraction response: {e}");
|
||||
IntentEntities {
|
||||
|
|
@ -423,6 +425,7 @@ Respond ONLY with valid JSON, no explanation."#
|
|||
&self,
|
||||
intent: &str,
|
||||
entities: &IntentEntities,
|
||||
bot_id: Uuid,
|
||||
) -> Result<ExecutionPlan, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let keywords_list = self.config.available_keywords.join(", ");
|
||||
let mcp_servers_list = self.config.available_mcp_servers.join(", ");
|
||||
|
|
@ -483,7 +486,7 @@ Respond ONLY with valid JSON."#,
|
|||
self.config.max_plan_steps
|
||||
);
|
||||
|
||||
let response = self.call_llm(&prompt).await?;
|
||||
let response = self.call_llm(&prompt, bot_id).await?;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PlanResponse {
|
||||
|
|
@ -680,19 +683,37 @@ Respond ONLY with valid JSON."#,
|
|||
async fn call_llm(
|
||||
&self,
|
||||
prompt: &str,
|
||||
bot_id: Uuid,
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
trace!("Calling LLM with prompt length: {}", prompt.len());
|
||||
|
||||
#[cfg(feature = "llm")]
|
||||
{
|
||||
let config = serde_json::json!({
|
||||
// Get model and key from bot configuration
|
||||
let config_manager = ConfigManager::new(self.state.conn.clone());
|
||||
let model = config_manager
|
||||
.get_config(&bot_id, "llm-model", None)
|
||||
.unwrap_or_else(|_| {
|
||||
config_manager
|
||||
.get_config(&Uuid::nil(), "llm-model", None)
|
||||
.unwrap_or_else(|_| self.config.model.clone())
|
||||
});
|
||||
let key = config_manager
|
||||
.get_config(&bot_id, "llm-key", None)
|
||||
.unwrap_or_else(|_| {
|
||||
config_manager
|
||||
.get_config(&Uuid::nil(), "llm-key", None)
|
||||
.unwrap_or_default()
|
||||
});
|
||||
|
||||
let llm_config = serde_json::json!({
|
||||
"temperature": self.config.temperature,
|
||||
"max_tokens": self.config.max_tokens
|
||||
});
|
||||
let response = self
|
||||
.state
|
||||
.llm_provider
|
||||
.generate(prompt, &config, &self.config.model, "")
|
||||
.generate(prompt, &llm_config, &model, &key)
|
||||
.await?;
|
||||
return Ok(response);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -735,7 +735,7 @@ impl BootstrapManager {
|
|||
|
||||
let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?;
|
||||
|
||||
let required_components = vec!["vault", "tables", "directory", "drive", "cache", "llm"];
|
||||
let required_components = vec!["vault", "tables", "directory", "drive", "cache", "llm", "vector_db"];
|
||||
|
||||
let vault_needs_setup = !self.stack_dir("conf/vault/init.json").exists();
|
||||
|
||||
|
|
|
|||
|
|
@ -93,6 +93,16 @@ impl KbIndexer {
|
|||
}
|
||||
}
|
||||
|
||||
/// Check if Qdrant vector database is available
|
||||
pub async fn check_qdrant_health(&self) -> Result<bool> {
|
||||
let health_url = format!("{}/healthz", self.qdrant_config.url);
|
||||
|
||||
match self.http_client.get(&health_url).send().await {
|
||||
Ok(response) => Ok(response.status().is_success()),
|
||||
Err(_) => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn index_kb_folder(
|
||||
&self,
|
||||
bot_name: &str,
|
||||
|
|
@ -101,6 +111,19 @@ impl KbIndexer {
|
|||
) -> Result<IndexingResult> {
|
||||
info!("Indexing KB folder: {} for bot {}", kb_name, bot_name);
|
||||
|
||||
// Check if Qdrant is available before proceeding
|
||||
if !self.check_qdrant_health().await.unwrap_or(false) {
|
||||
warn!(
|
||||
"Qdrant vector database is not available at {}. KB indexing skipped. \
|
||||
Install and start vector_db component to enable KB indexing.",
|
||||
self.qdrant_config.url
|
||||
);
|
||||
return Err(anyhow::anyhow!(
|
||||
"Qdrant vector database not available at {}. Start the vector_db service to enable KB indexing.",
|
||||
self.qdrant_config.url
|
||||
));
|
||||
}
|
||||
|
||||
let collection_name = format!("{}_{}", bot_name, kb_name);
|
||||
|
||||
self.ensure_collection_exists(&collection_name).await?;
|
||||
|
|
|
|||
|
|
@ -1085,8 +1085,8 @@ EOF"#.to_string(),
|
|||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"[START] About to spawn shell command for {}: {}",
|
||||
trace!(
|
||||
"About to spawn shell command for {}: {}",
|
||||
component.name, rendered_cmd
|
||||
);
|
||||
trace!("[START] Working dir: {}", bin_path.display());
|
||||
|
|
@ -1097,15 +1097,15 @@ EOF"#.to_string(),
|
|||
.envs(&evaluated_envs)
|
||||
.spawn();
|
||||
|
||||
info!(
|
||||
"[START] Spawn result for {}: {:?}",
|
||||
trace!(
|
||||
"Spawn result for {}: {:?}",
|
||||
component.name,
|
||||
child.is_ok()
|
||||
);
|
||||
std::thread::sleep(std::time::Duration::from_secs(2));
|
||||
|
||||
info!(
|
||||
"[START] Checking if {} process exists after 2s sleep...",
|
||||
trace!(
|
||||
"Checking if {} process exists after 2s sleep...",
|
||||
component.name
|
||||
);
|
||||
let check_proc = std::process::Command::new("pgrep")
|
||||
|
|
@ -1113,8 +1113,8 @@ EOF"#.to_string(),
|
|||
.output();
|
||||
if let Ok(output) = check_proc {
|
||||
let pids = String::from_utf8_lossy(&output.stdout);
|
||||
info!(
|
||||
"[START] pgrep '{}' result: '{}'",
|
||||
trace!(
|
||||
"pgrep '{}' result: '{}'",
|
||||
component.name,
|
||||
pids.trim()
|
||||
);
|
||||
|
|
@ -1122,11 +1122,11 @@ EOF"#.to_string(),
|
|||
|
||||
match child {
|
||||
Ok(c) => {
|
||||
trace!("[START] Component {} started successfully", component.name);
|
||||
trace!("Component {} started successfully", component.name);
|
||||
Ok(c)
|
||||
}
|
||||
Err(e) => {
|
||||
error!("[START] Spawn failed for {}: {}", component.name, e);
|
||||
error!("Spawn failed for {}: {}", component.name, e);
|
||||
let err_msg = e.to_string();
|
||||
if err_msg.contains("already running")
|
||||
|| err_msg.contains("be running")
|
||||
|
|
|
|||
816
src/core/shared/enums.rs
Normal file
816
src/core/shared/enums.rs
Normal file
|
|
@ -0,0 +1,816 @@
|
|||
//! Database Enum Types for Billion-Scale Schema
|
||||
//!
|
||||
//! This module defines Rust enums that map directly to PostgreSQL enum types.
|
||||
//! Using enums instead of TEXT columns provides:
|
||||
//! - Type safety at compile time
|
||||
//! - Efficient storage (stored as integers internally)
|
||||
//! - Fast comparisons and indexing
|
||||
//! - Automatic validation
|
||||
//!
|
||||
//! All enums derive necessary traits for Diesel ORM integration.
|
||||
|
||||
use diesel::deserialize::{self, FromSql};
|
||||
use diesel::pg::{Pg, PgValue};
|
||||
use diesel::serialize::{self, Output, ToSql};
|
||||
use diesel::sql_types::SmallInt;
|
||||
use diesel::{AsExpression, FromSqlRow};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::io::Write;
|
||||
|
||||
// ============================================================================
|
||||
// CHANNEL TYPES
|
||||
// ============================================================================
|
||||
|
||||
/// Communication channel types for bot interactions
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, AsExpression, FromSqlRow)]
|
||||
#[diesel(sql_type = SmallInt)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[repr(i16)]
|
||||
pub enum ChannelType {
|
||||
Web = 0,
|
||||
WhatsApp = 1,
|
||||
Telegram = 2,
|
||||
MsTeams = 3,
|
||||
Slack = 4,
|
||||
Email = 5,
|
||||
Sms = 6,
|
||||
Voice = 7,
|
||||
Instagram = 8,
|
||||
Api = 9,
|
||||
}
|
||||
|
||||
impl Default for ChannelType {
|
||||
fn default() -> Self {
|
||||
Self::Web
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql<SmallInt, Pg> for ChannelType {
|
||||
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
|
||||
let v = *self as i16;
|
||||
out.write_all(&v.to_be_bytes())?;
|
||||
Ok(serialize::IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql<SmallInt, Pg> for ChannelType {
|
||||
fn from_sql(bytes: PgValue<'_>) -> deserialize::Result<Self> {
|
||||
let value = i16::from_sql(bytes)?;
|
||||
match value {
|
||||
0 => Ok(Self::Web),
|
||||
1 => Ok(Self::WhatsApp),
|
||||
2 => Ok(Self::Telegram),
|
||||
3 => Ok(Self::MsTeams),
|
||||
4 => Ok(Self::Slack),
|
||||
5 => Ok(Self::Email),
|
||||
6 => Ok(Self::Sms),
|
||||
7 => Ok(Self::Voice),
|
||||
8 => Ok(Self::Instagram),
|
||||
9 => Ok(Self::Api),
|
||||
_ => Err(format!("Unknown ChannelType: {}", value).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ChannelType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Web => write!(f, "web"),
|
||||
Self::WhatsApp => write!(f, "whatsapp"),
|
||||
Self::Telegram => write!(f, "telegram"),
|
||||
Self::MsTeams => write!(f, "msteams"),
|
||||
Self::Slack => write!(f, "slack"),
|
||||
Self::Email => write!(f, "email"),
|
||||
Self::Sms => write!(f, "sms"),
|
||||
Self::Voice => write!(f, "voice"),
|
||||
Self::Instagram => write!(f, "instagram"),
|
||||
Self::Api => write!(f, "api"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for ChannelType {
|
||||
type Err = String;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"web" => Ok(Self::Web),
|
||||
"whatsapp" => Ok(Self::WhatsApp),
|
||||
"telegram" => Ok(Self::Telegram),
|
||||
"msteams" | "ms_teams" | "teams" => Ok(Self::MsTeams),
|
||||
"slack" => Ok(Self::Slack),
|
||||
"email" => Ok(Self::Email),
|
||||
"sms" => Ok(Self::Sms),
|
||||
"voice" => Ok(Self::Voice),
|
||||
"instagram" => Ok(Self::Instagram),
|
||||
"api" => Ok(Self::Api),
|
||||
_ => Err(format!("Unknown channel type: {}", s)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// MESSAGE ROLE
|
||||
// ============================================================================
|
||||
|
||||
/// Role of a message in a conversation
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, AsExpression, FromSqlRow)]
|
||||
#[diesel(sql_type = SmallInt)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[repr(i16)]
|
||||
pub enum MessageRole {
|
||||
User = 1,
|
||||
Assistant = 2,
|
||||
System = 3,
|
||||
Tool = 4,
|
||||
Episodic = 9,
|
||||
Compact = 10,
|
||||
}
|
||||
|
||||
impl Default for MessageRole {
|
||||
fn default() -> Self {
|
||||
Self::User
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql<SmallInt, Pg> for MessageRole {
|
||||
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
|
||||
let v = *self as i16;
|
||||
out.write_all(&v.to_be_bytes())?;
|
||||
Ok(serialize::IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql<SmallInt, Pg> for MessageRole {
|
||||
fn from_sql(bytes: PgValue<'_>) -> deserialize::Result<Self> {
|
||||
let value = i16::from_sql(bytes)?;
|
||||
match value {
|
||||
1 => Ok(Self::User),
|
||||
2 => Ok(Self::Assistant),
|
||||
3 => Ok(Self::System),
|
||||
4 => Ok(Self::Tool),
|
||||
9 => Ok(Self::Episodic),
|
||||
10 => Ok(Self::Compact),
|
||||
_ => Err(format!("Unknown MessageRole: {}", value).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for MessageRole {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::User => write!(f, "user"),
|
||||
Self::Assistant => write!(f, "assistant"),
|
||||
Self::System => write!(f, "system"),
|
||||
Self::Tool => write!(f, "tool"),
|
||||
Self::Episodic => write!(f, "episodic"),
|
||||
Self::Compact => write!(f, "compact"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for MessageRole {
|
||||
type Err = String;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"user" => Ok(Self::User),
|
||||
"assistant" => Ok(Self::Assistant),
|
||||
"system" => Ok(Self::System),
|
||||
"tool" => Ok(Self::Tool),
|
||||
"episodic" => Ok(Self::Episodic),
|
||||
"compact" => Ok(Self::Compact),
|
||||
_ => Err(format!("Unknown message role: {}", s)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// MESSAGE TYPE
|
||||
// ============================================================================
|
||||
|
||||
/// Type of message content
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, AsExpression, FromSqlRow)]
|
||||
#[diesel(sql_type = SmallInt)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[repr(i16)]
|
||||
pub enum MessageType {
|
||||
Text = 0,
|
||||
Image = 1,
|
||||
Audio = 2,
|
||||
Video = 3,
|
||||
Document = 4,
|
||||
Location = 5,
|
||||
Contact = 6,
|
||||
Sticker = 7,
|
||||
Reaction = 8,
|
||||
}
|
||||
|
||||
impl Default for MessageType {
|
||||
fn default() -> Self {
|
||||
Self::Text
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql<SmallInt, Pg> for MessageType {
|
||||
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
|
||||
let v = *self as i16;
|
||||
out.write_all(&v.to_be_bytes())?;
|
||||
Ok(serialize::IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql<SmallInt, Pg> for MessageType {
|
||||
fn from_sql(bytes: PgValue<'_>) -> deserialize::Result<Self> {
|
||||
let value = i16::from_sql(bytes)?;
|
||||
match value {
|
||||
0 => Ok(Self::Text),
|
||||
1 => Ok(Self::Image),
|
||||
2 => Ok(Self::Audio),
|
||||
3 => Ok(Self::Video),
|
||||
4 => Ok(Self::Document),
|
||||
5 => Ok(Self::Location),
|
||||
6 => Ok(Self::Contact),
|
||||
7 => Ok(Self::Sticker),
|
||||
8 => Ok(Self::Reaction),
|
||||
_ => Err(format!("Unknown MessageType: {}", value).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for MessageType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Text => write!(f, "text"),
|
||||
Self::Image => write!(f, "image"),
|
||||
Self::Audio => write!(f, "audio"),
|
||||
Self::Video => write!(f, "video"),
|
||||
Self::Document => write!(f, "document"),
|
||||
Self::Location => write!(f, "location"),
|
||||
Self::Contact => write!(f, "contact"),
|
||||
Self::Sticker => write!(f, "sticker"),
|
||||
Self::Reaction => write!(f, "reaction"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// LLM PROVIDER
|
||||
// ============================================================================
|
||||
|
||||
/// Supported LLM providers
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, AsExpression, FromSqlRow)]
|
||||
#[diesel(sql_type = SmallInt)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[repr(i16)]
|
||||
pub enum LlmProvider {
|
||||
OpenAi = 0,
|
||||
Anthropic = 1,
|
||||
AzureOpenAi = 2,
|
||||
AzureClaude = 3,
|
||||
Google = 4,
|
||||
Local = 5,
|
||||
Ollama = 6,
|
||||
Groq = 7,
|
||||
Mistral = 8,
|
||||
Cohere = 9,
|
||||
}
|
||||
|
||||
impl Default for LlmProvider {
|
||||
fn default() -> Self {
|
||||
Self::OpenAi
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql<SmallInt, Pg> for LlmProvider {
|
||||
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
|
||||
let v = *self as i16;
|
||||
out.write_all(&v.to_be_bytes())?;
|
||||
Ok(serialize::IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql<SmallInt, Pg> for LlmProvider {
|
||||
fn from_sql(bytes: PgValue<'_>) -> deserialize::Result<Self> {
|
||||
let value = i16::from_sql(bytes)?;
|
||||
match value {
|
||||
0 => Ok(Self::OpenAi),
|
||||
1 => Ok(Self::Anthropic),
|
||||
2 => Ok(Self::AzureOpenAi),
|
||||
3 => Ok(Self::AzureClaude),
|
||||
4 => Ok(Self::Google),
|
||||
5 => Ok(Self::Local),
|
||||
6 => Ok(Self::Ollama),
|
||||
7 => Ok(Self::Groq),
|
||||
8 => Ok(Self::Mistral),
|
||||
9 => Ok(Self::Cohere),
|
||||
_ => Err(format!("Unknown LlmProvider: {}", value).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for LlmProvider {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::OpenAi => write!(f, "openai"),
|
||||
Self::Anthropic => write!(f, "anthropic"),
|
||||
Self::AzureOpenAi => write!(f, "azure_openai"),
|
||||
Self::AzureClaude => write!(f, "azure_claude"),
|
||||
Self::Google => write!(f, "google"),
|
||||
Self::Local => write!(f, "local"),
|
||||
Self::Ollama => write!(f, "ollama"),
|
||||
Self::Groq => write!(f, "groq"),
|
||||
Self::Mistral => write!(f, "mistral"),
|
||||
Self::Cohere => write!(f, "cohere"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// CONTEXT PROVIDER (Vector DB)
|
||||
// ============================================================================
|
||||
|
||||
/// Supported vector database providers
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, AsExpression, FromSqlRow)]
|
||||
#[diesel(sql_type = SmallInt)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[repr(i16)]
|
||||
pub enum ContextProvider {
|
||||
None = 0,
|
||||
Qdrant = 1,
|
||||
Pinecone = 2,
|
||||
Weaviate = 3,
|
||||
Milvus = 4,
|
||||
PgVector = 5,
|
||||
Elasticsearch = 6,
|
||||
}
|
||||
|
||||
impl Default for ContextProvider {
|
||||
fn default() -> Self {
|
||||
Self::Qdrant
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql<SmallInt, Pg> for ContextProvider {
|
||||
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
|
||||
let v = *self as i16;
|
||||
out.write_all(&v.to_be_bytes())?;
|
||||
Ok(serialize::IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql<SmallInt, Pg> for ContextProvider {
|
||||
fn from_sql(bytes: PgValue<'_>) -> deserialize::Result<Self> {
|
||||
let value = i16::from_sql(bytes)?;
|
||||
match value {
|
||||
0 => Ok(Self::None),
|
||||
1 => Ok(Self::Qdrant),
|
||||
2 => Ok(Self::Pinecone),
|
||||
3 => Ok(Self::Weaviate),
|
||||
4 => Ok(Self::Milvus),
|
||||
5 => Ok(Self::PgVector),
|
||||
6 => Ok(Self::Elasticsearch),
|
||||
_ => Err(format!("Unknown ContextProvider: {}", value).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// TASK STATUS
|
||||
// ============================================================================
|
||||
|
||||
/// Status of a task (both regular tasks and auto-tasks)
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, AsExpression, FromSqlRow)]
|
||||
#[diesel(sql_type = SmallInt)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[repr(i16)]
|
||||
pub enum TaskStatus {
|
||||
Pending = 0,
|
||||
Ready = 1,
|
||||
Running = 2,
|
||||
Paused = 3,
|
||||
WaitingApproval = 4,
|
||||
Completed = 5,
|
||||
Failed = 6,
|
||||
Cancelled = 7,
|
||||
}
|
||||
|
||||
impl Default for TaskStatus {
|
||||
fn default() -> Self {
|
||||
Self::Pending
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql<SmallInt, Pg> for TaskStatus {
|
||||
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
|
||||
let v = *self as i16;
|
||||
out.write_all(&v.to_be_bytes())?;
|
||||
Ok(serialize::IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql<SmallInt, Pg> for TaskStatus {
|
||||
fn from_sql(bytes: PgValue<'_>) -> deserialize::Result<Self> {
|
||||
let value = i16::from_sql(bytes)?;
|
||||
match value {
|
||||
0 => Ok(Self::Pending),
|
||||
1 => Ok(Self::Ready),
|
||||
2 => Ok(Self::Running),
|
||||
3 => Ok(Self::Paused),
|
||||
4 => Ok(Self::WaitingApproval),
|
||||
5 => Ok(Self::Completed),
|
||||
6 => Ok(Self::Failed),
|
||||
7 => Ok(Self::Cancelled),
|
||||
_ => Err(format!("Unknown TaskStatus: {}", value).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TaskStatus {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Pending => write!(f, "pending"),
|
||||
Self::Ready => write!(f, "ready"),
|
||||
Self::Running => write!(f, "running"),
|
||||
Self::Paused => write!(f, "paused"),
|
||||
Self::WaitingApproval => write!(f, "waiting_approval"),
|
||||
Self::Completed => write!(f, "completed"),
|
||||
Self::Failed => write!(f, "failed"),
|
||||
Self::Cancelled => write!(f, "cancelled"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for TaskStatus {
|
||||
type Err = String;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"pending" => Ok(Self::Pending),
|
||||
"ready" => Ok(Self::Ready),
|
||||
"running" => Ok(Self::Running),
|
||||
"paused" => Ok(Self::Paused),
|
||||
"waiting_approval" | "waitingapproval" => Ok(Self::WaitingApproval),
|
||||
"completed" | "done" => Ok(Self::Completed),
|
||||
"failed" | "error" => Ok(Self::Failed),
|
||||
"cancelled" | "canceled" => Ok(Self::Cancelled),
|
||||
_ => Err(format!("Unknown task status: {}", s)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// TASK PRIORITY
|
||||
// ============================================================================
|
||||
|
||||
/// Priority level for tasks
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, AsExpression, FromSqlRow)]
|
||||
#[diesel(sql_type = SmallInt)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[repr(i16)]
|
||||
pub enum TaskPriority {
|
||||
Low = 0,
|
||||
Normal = 1,
|
||||
High = 2,
|
||||
Urgent = 3,
|
||||
Critical = 4,
|
||||
}
|
||||
|
||||
impl Default for TaskPriority {
|
||||
fn default() -> Self {
|
||||
Self::Normal
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql<SmallInt, Pg> for TaskPriority {
|
||||
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
|
||||
let v = *self as i16;
|
||||
out.write_all(&v.to_be_bytes())?;
|
||||
Ok(serialize::IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql<SmallInt, Pg> for TaskPriority {
|
||||
fn from_sql(bytes: PgValue<'_>) -> deserialize::Result<Self> {
|
||||
let value = i16::from_sql(bytes)?;
|
||||
match value {
|
||||
0 => Ok(Self::Low),
|
||||
1 => Ok(Self::Normal),
|
||||
2 => Ok(Self::High),
|
||||
3 => Ok(Self::Urgent),
|
||||
4 => Ok(Self::Critical),
|
||||
_ => Err(format!("Unknown TaskPriority: {}", value).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TaskPriority {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Low => write!(f, "low"),
|
||||
Self::Normal => write!(f, "normal"),
|
||||
Self::High => write!(f, "high"),
|
||||
Self::Urgent => write!(f, "urgent"),
|
||||
Self::Critical => write!(f, "critical"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for TaskPriority {
|
||||
type Err = String;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"low" => Ok(Self::Low),
|
||||
"normal" | "medium" => Ok(Self::Normal),
|
||||
"high" => Ok(Self::High),
|
||||
"urgent" => Ok(Self::Urgent),
|
||||
"critical" => Ok(Self::Critical),
|
||||
_ => Err(format!("Unknown task priority: {}", s)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// EXECUTION MODE
|
||||
// ============================================================================
|
||||
|
||||
/// Execution mode for autonomous tasks
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, AsExpression, FromSqlRow)]
|
||||
#[diesel(sql_type = SmallInt)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[repr(i16)]
|
||||
pub enum ExecutionMode {
|
||||
Manual = 0,
|
||||
Supervised = 1,
|
||||
Autonomous = 2,
|
||||
}
|
||||
|
||||
impl Default for ExecutionMode {
|
||||
fn default() -> Self {
|
||||
Self::Supervised
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql<SmallInt, Pg> for ExecutionMode {
|
||||
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
|
||||
let v = *self as i16;
|
||||
out.write_all(&v.to_be_bytes())?;
|
||||
Ok(serialize::IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql<SmallInt, Pg> for ExecutionMode {
|
||||
fn from_sql(bytes: PgValue<'_>) -> deserialize::Result<Self> {
|
||||
let value = i16::from_sql(bytes)?;
|
||||
match value {
|
||||
0 => Ok(Self::Manual),
|
||||
1 => Ok(Self::Supervised),
|
||||
2 => Ok(Self::Autonomous),
|
||||
_ => Err(format!("Unknown ExecutionMode: {}", value).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ExecutionMode {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Manual => write!(f, "manual"),
|
||||
Self::Supervised => write!(f, "supervised"),
|
||||
Self::Autonomous => write!(f, "autonomous"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// RISK LEVEL
|
||||
// ============================================================================
|
||||
|
||||
/// Risk assessment level for actions
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, AsExpression, FromSqlRow)]
|
||||
#[diesel(sql_type = SmallInt)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[repr(i16)]
|
||||
pub enum RiskLevel {
|
||||
None = 0,
|
||||
Low = 1,
|
||||
Medium = 2,
|
||||
High = 3,
|
||||
Critical = 4,
|
||||
}
|
||||
|
||||
impl Default for RiskLevel {
|
||||
fn default() -> Self {
|
||||
Self::Low
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql<SmallInt, Pg> for RiskLevel {
|
||||
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
|
||||
let v = *self as i16;
|
||||
out.write_all(&v.to_be_bytes())?;
|
||||
Ok(serialize::IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql<SmallInt, Pg> for RiskLevel {
|
||||
fn from_sql(bytes: PgValue<'_>) -> deserialize::Result<Self> {
|
||||
let value = i16::from_sql(bytes)?;
|
||||
match value {
|
||||
0 => Ok(Self::None),
|
||||
1 => Ok(Self::Low),
|
||||
2 => Ok(Self::Medium),
|
||||
3 => Ok(Self::High),
|
||||
4 => Ok(Self::Critical),
|
||||
_ => Err(format!("Unknown RiskLevel: {}", value).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for RiskLevel {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::None => write!(f, "none"),
|
||||
Self::Low => write!(f, "low"),
|
||||
Self::Medium => write!(f, "medium"),
|
||||
Self::High => write!(f, "high"),
|
||||
Self::Critical => write!(f, "critical"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// APPROVAL STATUS
|
||||
// ============================================================================
|
||||
|
||||
/// Status of an approval request
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, AsExpression, FromSqlRow)]
|
||||
#[diesel(sql_type = SmallInt)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[repr(i16)]
|
||||
pub enum ApprovalStatus {
|
||||
Pending = 0,
|
||||
Approved = 1,
|
||||
Rejected = 2,
|
||||
Expired = 3,
|
||||
Skipped = 4,
|
||||
}
|
||||
|
||||
impl Default for ApprovalStatus {
|
||||
fn default() -> Self {
|
||||
Self::Pending
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql<SmallInt, Pg> for ApprovalStatus {
|
||||
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
|
||||
let v = *self as i16;
|
||||
out.write_all(&v.to_be_bytes())?;
|
||||
Ok(serialize::IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql<SmallInt, Pg> for ApprovalStatus {
|
||||
fn from_sql(bytes: PgValue<'_>) -> deserialize::Result<Self> {
|
||||
let value = i16::from_sql(bytes)?;
|
||||
match value {
|
||||
0 => Ok(Self::Pending),
|
||||
1 => Ok(Self::Approved),
|
||||
2 => Ok(Self::Rejected),
|
||||
3 => Ok(Self::Expired),
|
||||
4 => Ok(Self::Skipped),
|
||||
_ => Err(format!("Unknown ApprovalStatus: {}", value).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ApprovalStatus {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Pending => write!(f, "pending"),
|
||||
Self::Approved => write!(f, "approved"),
|
||||
Self::Rejected => write!(f, "rejected"),
|
||||
Self::Expired => write!(f, "expired"),
|
||||
Self::Skipped => write!(f, "skipped"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// APPROVAL DECISION
|
||||
// ============================================================================
|
||||
|
||||
/// Decision made on an approval request
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, AsExpression, FromSqlRow)]
|
||||
#[diesel(sql_type = SmallInt)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[repr(i16)]
|
||||
pub enum ApprovalDecision {
|
||||
Approve = 0,
|
||||
Reject = 1,
|
||||
Skip = 2,
|
||||
}
|
||||
|
||||
impl ToSql<SmallInt, Pg> for ApprovalDecision {
|
||||
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
|
||||
let v = *self as i16;
|
||||
out.write_all(&v.to_be_bytes())?;
|
||||
Ok(serialize::IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql<SmallInt, Pg> for ApprovalDecision {
|
||||
fn from_sql(bytes: PgValue<'_>) -> deserialize::Result<Self> {
|
||||
let value = i16::from_sql(bytes)?;
|
||||
match value {
|
||||
0 => Ok(Self::Approve),
|
||||
1 => Ok(Self::Reject),
|
||||
2 => Ok(Self::Skip),
|
||||
_ => Err(format!("Unknown ApprovalDecision: {}", value).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ApprovalDecision {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Approve => write!(f, "approve"),
|
||||
Self::Reject => write!(f, "reject"),
|
||||
Self::Skip => write!(f, "skip"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// INTENT TYPE
|
||||
// ============================================================================
|
||||
|
||||
/// Classified intent type from user requests
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, AsExpression, FromSqlRow)]
|
||||
#[diesel(sql_type = SmallInt)]
|
||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||
#[repr(i16)]
|
||||
pub enum IntentType {
|
||||
Unknown = 0,
|
||||
AppCreate = 1,
|
||||
Todo = 2,
|
||||
Monitor = 3,
|
||||
Action = 4,
|
||||
Schedule = 5,
|
||||
Goal = 6,
|
||||
Tool = 7,
|
||||
Query = 8,
|
||||
}
|
||||
|
||||
impl Default for IntentType {
|
||||
fn default() -> Self {
|
||||
Self::Unknown
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql<SmallInt, Pg> for IntentType {
|
||||
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
|
||||
let v = *self as i16;
|
||||
out.write_all(&v.to_be_bytes())?;
|
||||
Ok(serialize::IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql<SmallInt, Pg> for IntentType {
|
||||
fn from_sql(bytes: PgValue<'_>) -> deserialize::Result<Self> {
|
||||
let value = i16::from_sql(bytes)?;
|
||||
match value {
|
||||
0 => Ok(Self::Unknown),
|
||||
1 => Ok(Self::AppCreate),
|
||||
2 => Ok(Self::Todo),
|
||||
3 => Ok(Self::Monitor),
|
||||
4 => Ok(Self::Action),
|
||||
5 => Ok(Self::Schedule),
|
||||
6 => Ok(Self::Goal),
|
||||
7 => Ok(Self::Tool),
|
||||
8 => Ok(Self::Query),
|
||||
_ => Err(format!("Unknown IntentType: {}", value).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for IntentType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Unknown => write!(f, "UNKNOWN"),
|
||||
Self::AppCreate => write!(f, "APP_CREATE"),
|
||||
Self::Todo => write!(f, "TODO"),
|
||||
Self::Monitor => write!(f, "MONITOR"),
|
||||
Self::Action => write!(f, "ACTION"),
|
||||
Self::Schedule => write!(f, "SCHEDULE"),
|
||||
Self::Goal => write!(f, "GOAL"),
|
||||
Self::Tool => write!(f, "TOOL"),
|
||||
Self::Query => write!(f, "QUERY"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for IntentType {
|
||||
type Err = String;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_uppercase().as_str() {
|
||||
"UNKNOWN" => Ok(Self::Unknown),
|
||||
"APP_CREATE" | "APPCREATE" | "APP" | "APPLICATION" | "CREATE_APP" => Ok(Self::AppCreate),
|
||||
"TODO" | "TASK" | "REMINDER" => Ok(Self::Todo),
|
||||
"MONITOR" | "WATCH" | "ALERT" | "ON_CHANGE" => Ok(Self::Monitor),
|
||||
|
|
@ -5,6 +5,7 @@
|
|||
|
||||
pub mod admin;
|
||||
pub mod analytics;
|
||||
pub mod enums;
|
||||
pub mod models;
|
||||
pub mod schema;
|
||||
pub mod state;
|
||||
|
|
@ -13,6 +14,7 @@ pub mod test_utils;
|
|||
pub mod utils;
|
||||
|
||||
|
||||
pub use enums::*;
|
||||
pub use schema::*;
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -119,6 +119,21 @@ impl ClaudeClient {
|
|||
headers
|
||||
}
|
||||
|
||||
/// Normalize role names for Claude API compatibility.
|
||||
/// Claude only accepts "user" or "assistant" roles in messages.
|
||||
/// - "episodic" and "compact" roles (conversation summaries) are converted to "user" with a context prefix
|
||||
/// - "system" roles should be handled separately (not in messages array)
|
||||
/// - Unknown roles default to "user"
|
||||
fn normalize_role(role: &str) -> Option<(String, bool)> {
|
||||
match role {
|
||||
"user" => Some(("user".to_string(), false)),
|
||||
"assistant" => Some(("assistant".to_string(), false)),
|
||||
"system" => None, // System messages handled separately
|
||||
"episodic" | "compact" => Some(("user".to_string(), true)), // Mark as context
|
||||
_ => Some(("user".to_string(), false)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_messages(
|
||||
system_prompt: &str,
|
||||
context_data: &str,
|
||||
|
|
@ -133,6 +148,13 @@ impl ClaudeClient {
|
|||
system_parts.push(context_data.to_string());
|
||||
}
|
||||
|
||||
// Extract episodic memory content and add to system prompt
|
||||
for (role, content) in history {
|
||||
if role == "episodic" || role == "compact" {
|
||||
system_parts.push(format!("[Previous conversation summary]: {}", content));
|
||||
}
|
||||
}
|
||||
|
||||
let system = if system_parts.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
|
@ -141,10 +163,17 @@ impl ClaudeClient {
|
|||
|
||||
let messages: Vec<ClaudeMessage> = history
|
||||
.iter()
|
||||
.map(|(role, content)| ClaudeMessage {
|
||||
role: role.clone(),
|
||||
.filter_map(|(role, content)| {
|
||||
match Self::normalize_role(role) {
|
||||
Some((normalized_role, is_context)) if !is_context => {
|
||||
Some(ClaudeMessage {
|
||||
role: normalized_role,
|
||||
content: content.clone(),
|
||||
})
|
||||
}
|
||||
_ => None, // Skip system, episodic, compact (already in system prompt)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
(system, messages)
|
||||
|
|
@ -180,7 +209,7 @@ impl LLMProvider for ClaudeClient {
|
|||
};
|
||||
|
||||
let empty_vec = vec![];
|
||||
let claude_messages: Vec<ClaudeMessage> = if messages.is_array() {
|
||||
let mut claude_messages: Vec<ClaudeMessage> = if messages.is_array() {
|
||||
let arr = messages.as_array().unwrap_or(&empty_vec);
|
||||
if arr.is_empty() {
|
||||
vec![ClaudeMessage {
|
||||
|
|
@ -192,11 +221,16 @@ impl LLMProvider for ClaudeClient {
|
|||
.filter_map(|m| {
|
||||
let role = m["role"].as_str().unwrap_or("user");
|
||||
let content = m["content"].as_str().unwrap_or("");
|
||||
if role == "system" {
|
||||
// Skip system messages (handled separately), episodic/compact (context), and empty content
|
||||
if role == "system" || role == "episodic" || role == "compact" || content.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let normalized_role = match role {
|
||||
"user" | "assistant" => role.to_string(),
|
||||
_ => "user".to_string(),
|
||||
};
|
||||
Some(ClaudeMessage {
|
||||
role: role.to_string(),
|
||||
role: normalized_role,
|
||||
content: content.to_string(),
|
||||
})
|
||||
}
|
||||
|
|
@ -210,6 +244,14 @@ impl LLMProvider for ClaudeClient {
|
|||
}]
|
||||
};
|
||||
|
||||
// Ensure at least one user message exists
|
||||
if claude_messages.is_empty() && !prompt.is_empty() {
|
||||
claude_messages.push(ClaudeMessage {
|
||||
role: "user".to_string(),
|
||||
content: prompt.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
let system_prompt: Option<String> = if messages.is_array() {
|
||||
messages
|
||||
.as_array()
|
||||
|
|
@ -226,6 +268,11 @@ impl LLMProvider for ClaudeClient {
|
|||
|
||||
let system = system_prompt.filter(|s| !s.is_empty());
|
||||
|
||||
// Validate we have at least one message with content
|
||||
if claude_messages.is_empty() {
|
||||
return Err("Cannot send request to Claude: no messages with content".into());
|
||||
}
|
||||
|
||||
let request = ClaudeRequest {
|
||||
model: model_name.to_string(),
|
||||
max_tokens: 4096,
|
||||
|
|
@ -279,7 +326,7 @@ impl LLMProvider for ClaudeClient {
|
|||
};
|
||||
|
||||
let empty_vec = vec![];
|
||||
let claude_messages: Vec<ClaudeMessage> = if messages.is_array() {
|
||||
let mut claude_messages: Vec<ClaudeMessage> = if messages.is_array() {
|
||||
let arr = messages.as_array().unwrap_or(&empty_vec);
|
||||
if arr.is_empty() {
|
||||
vec![ClaudeMessage {
|
||||
|
|
@ -291,11 +338,16 @@ impl LLMProvider for ClaudeClient {
|
|||
.filter_map(|m| {
|
||||
let role = m["role"].as_str().unwrap_or("user");
|
||||
let content = m["content"].as_str().unwrap_or("");
|
||||
if role == "system" {
|
||||
// Skip system messages (handled separately), episodic/compact (context), and empty content
|
||||
if role == "system" || role == "episodic" || role == "compact" || content.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let normalized_role = match role {
|
||||
"user" | "assistant" => role.to_string(),
|
||||
_ => "user".to_string(),
|
||||
};
|
||||
Some(ClaudeMessage {
|
||||
role: role.to_string(),
|
||||
role: normalized_role,
|
||||
content: content.to_string(),
|
||||
})
|
||||
}
|
||||
|
|
@ -309,6 +361,14 @@ impl LLMProvider for ClaudeClient {
|
|||
}]
|
||||
};
|
||||
|
||||
// Ensure at least one user message exists
|
||||
if claude_messages.is_empty() && !prompt.is_empty() {
|
||||
claude_messages.push(ClaudeMessage {
|
||||
role: "user".to_string(),
|
||||
content: prompt.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
let system_prompt: Option<String> = if messages.is_array() {
|
||||
messages
|
||||
.as_array()
|
||||
|
|
@ -325,6 +385,11 @@ impl LLMProvider for ClaudeClient {
|
|||
|
||||
let system = system_prompt.filter(|s| !s.is_empty());
|
||||
|
||||
// Validate we have at least one message with content
|
||||
if claude_messages.is_empty() {
|
||||
return Err("Cannot send streaming request to Claude: no messages with content".into());
|
||||
}
|
||||
|
||||
let request = ClaudeRequest {
|
||||
model: model_name.to_string(),
|
||||
max_tokens: 4096,
|
||||
|
|
|
|||
|
|
@ -178,14 +178,13 @@ async fn run_axum_server(
|
|||
let cors = create_cors_layer();
|
||||
|
||||
// Create auth config for protected routes
|
||||
// TODO: Re-enable auth for production - currently disabled for development
|
||||
let auth_config = Arc::new(AuthConfig::default()
|
||||
.add_anonymous_path("/health")
|
||||
.add_anonymous_path("/healthz")
|
||||
.add_anonymous_path("/api/health")
|
||||
.add_anonymous_path("/api/v1/health")
|
||||
.add_anonymous_path("/api") // Disable auth for all API routes during development
|
||||
.add_anonymous_path("/ws")
|
||||
.add_anonymous_path("/auth")
|
||||
.add_anonymous_path("/api/auth")
|
||||
.add_public_path("/static")
|
||||
.add_public_path("/favicon.ico"));
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue