Update: refactor migrations, update source files, and add new features

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2026-02-04 13:29:29 -03:00
parent f311a438cd
commit 355215c2a2
137 changed files with 4142 additions and 1355 deletions

View file

@ -10,7 +10,7 @@ features = ["database", "i18n"]
[features]
# ===== DEFAULT =====
default = ["chat", "automation", "drive", "tasks", "cache", "directory", "llm"]
default = ["chat", "automation", "drive", "tasks", "cache", "directory", "llm", "crawler"]
# ===== CORE INFRASTRUCTURE (Can be used standalone) =====
scripting = ["dep:rhai"]
@ -18,6 +18,7 @@ automation = ["scripting", "dep:cron"]
drive = ["dep:aws-config", "dep:aws-sdk-s3", "dep:aws-smithy-async", "dep:pdf-extract"]
cache = ["dep:redis"]
directory = []
crawler = ["drive", "cache"]
# ===== APPS (Each includes what it needs from core) =====
# Communication
@ -48,7 +49,7 @@ player = ["automation", "drive", "cache"]
canvas = ["automation", "drive", "cache"]
# Learning
learn = ["automation", "drive", "cache"]
learn = ["automation", "drive", "cache", "crawler"]
research = ["automation", "drive", "cache", "llm", "vectordb"]
sources = ["automation", "drive", "cache"]

View file

@ -324,7 +324,7 @@ When a file grows beyond this limit:
## 🗄️ Database Standards
- **TABLES AND INDEXES ONLY** (no views, triggers, functions)
- **TABLES AND INDEXES ONLY** (no stored procedures, nothing, no views, no triggers, no functions)
- **JSON columns:** use TEXT with `_json` suffix
- **ORM:** Use diesel - no sqlx
- **Migrations:** Located in `botserver/migrations/`

3
build.rs Normal file
View file

@ -0,0 +1,3 @@
fn main() {
println!("cargo:rerun-if-changed=../botui/ui/suite/");
}

View file

@ -1,7 +1,7 @@
{
"base_url": "http://localhost:8300",
"default_org": {
"id": "354799954578898958",
"id": "357870945618100238",
"name": "default",
"domain": "default.localhost"
},
@ -13,8 +13,8 @@
"first_name": "Admin",
"last_name": "User"
},
"admin_token": "6nMpG1E-H-hqlSjrbFB5n2yx8BoEpnl3a3-F3HJoc5bvria3nwiz8vURPndzS4lQWKwaz_8",
"admin_token": "RflPqOgYM-BtinaBTyCaY8hX-_koTwC65gCg1Kpf7Sfhlc0ZOLZvIr-XsOYXmckPLBAWzjU",
"project_id": "",
"client_id": "354799955384270862",
"client_secret": "z6WFb1qshdCQ1y4Gw5EpOHzARgHicz6XkrazZwJdDcxMJrc6iRdHlhf5rf5LTzgi"
"client_id": "357870946289254414",
"client_secret": "q20LOjW5Vdjzp57Cw8EuFt7sILEd8VeSeGPvrhB63880GLgaJZpcWeRgUwdGET2x"
}

18
examples/test_cron.rs Normal file
View file

@ -0,0 +1,18 @@
use cron::Schedule;
use std::str::FromStr;
fn main() {
let schedules = vec![
"59 * * * *",
"0 * * * *",
"0 11 * * *",
];
for schedule_str in schedules {
println!("\nTesting: {}", schedule_str);
match Schedule::from_str(schedule_str) {
Ok(_) => println!(" ✓ OK"),
Err(e) => println!(" ✗ Error: {}", e),
}
}
}

View file

@ -2748,7 +2748,7 @@ CREATE INDEX IF NOT EXISTS idx_designer_pending_changes_expires_at ON designer_p
-- Add role-based access control columns to dynamic table definitions and fields
--
-- Syntax in .gbdialog TABLE definitions:
-- TABLE Contatos ON maria READ BY "admin;manager"
-- TABLE Contatos READ BY "admin;manager"
-- Id number key
-- Nome string(150)
-- NumeroDocumento string(25) READ BY "admin"

View file

@ -22,7 +22,7 @@ CREATE INDEX idx_billing_usage_alerts_org_id ON billing_usage_alerts(org_id);
CREATE INDEX idx_billing_usage_alerts_bot_id ON billing_usage_alerts(bot_id);
CREATE INDEX idx_billing_usage_alerts_severity ON billing_usage_alerts(severity);
CREATE INDEX idx_billing_usage_alerts_created_at ON billing_usage_alerts(created_at);
CREATE INDEX idx_billing_usage_alerts_acknowledged ON billing_usage_alerts(acknowledged_at) WHERE acknowledged_at IS NULL;
CREATE INDEX idx_billing_usage_alerts_acknowledged ON billing_usage_alerts(acknowledged_at);
-- Billing Alert History table
CREATE TABLE IF NOT EXISTS billing_alert_history (
@ -87,9 +87,9 @@ CREATE TABLE IF NOT EXISTS billing_grace_periods (
end_reason VARCHAR(50),
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
UNIQUE(org_id, metric, is_active) WHERE is_active = TRUE
UNIQUE(org_id, metric, is_active)
);
CREATE INDEX idx_billing_grace_periods_org_id ON billing_grace_periods(org_id);
CREATE INDEX idx_billing_grace_periods_active ON billing_grace_periods(is_active) WHERE is_active = TRUE;
CREATE INDEX idx_billing_grace_periods_expires ON billing_grace_periods(expires_at) WHERE is_active = TRUE;
CREATE INDEX idx_billing_grace_periods_active ON billing_grace_periods(is_active);
CREATE INDEX idx_billing_grace_periods_expires ON billing_grace_periods(expires_at);

View file

@ -3,7 +3,7 @@
CREATE TABLE IF NOT EXISTS organization_invitations (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
org_id UUID NOT NULL REFERENCES organizations(id) ON DELETE CASCADE,
org_id UUID NOT NULL REFERENCES organizations(org_id) ON DELETE CASCADE,
email VARCHAR(255) NOT NULL,
role VARCHAR(50) NOT NULL DEFAULT 'member',
status VARCHAR(20) NOT NULL DEFAULT 'pending',

View file

@ -189,42 +189,42 @@ CREATE INDEX idx_crm_notes_opportunity ON crm_notes(opportunity_id);
CREATE INDEX idx_crm_notes_account ON crm_notes(account_id);
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
SELECT org_id, b.id, 'New', 1, 10, FALSE, FALSE, '#94a3b8'
SELECT o.org_id, b.id, 'New', 1, 10, FALSE, FALSE, '#94a3b8'
FROM organizations o
CROSS JOIN bots b
LIMIT 1
ON CONFLICT DO NOTHING;
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
SELECT org_id, b.id, 'Qualified', 2, 25, FALSE, FALSE, '#3b82f6'
SELECT o.org_id, b.id, 'Qualified', 2, 25, FALSE, FALSE, '#3b82f6'
FROM organizations o
CROSS JOIN bots b
LIMIT 1
ON CONFLICT DO NOTHING;
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
SELECT org_id, b.id, 'Proposal', 3, 50, FALSE, FALSE, '#8b5cf6'
SELECT o.org_id, b.id, 'Proposal', 3, 50, FALSE, FALSE, '#8b5cf6'
FROM organizations o
CROSS JOIN bots b
LIMIT 1
ON CONFLICT DO NOTHING;
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
SELECT org_id, b.id, 'Negotiation', 4, 75, FALSE, FALSE, '#f59e0b'
SELECT o.org_id, b.id, 'Negotiation', 4, 75, FALSE, FALSE, '#f59e0b'
FROM organizations o
CROSS JOIN bots b
LIMIT 1
ON CONFLICT DO NOTHING;
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
SELECT org_id, b.id, 'Won', 5, 100, TRUE, FALSE, '#22c55e'
SELECT o.org_id, b.id, 'Won', 5, 100, TRUE, FALSE, '#22c55e'
FROM organizations o
CROSS JOIN bots b
LIMIT 1
ON CONFLICT DO NOTHING;
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
SELECT org_id, b.id, 'Lost', 6, 0, FALSE, TRUE, '#ef4444'
SELECT o.org_id, b.id, 'Lost', 6, 0, FALSE, TRUE, '#ef4444'
FROM organizations o
CROSS JOIN bots b
LIMIT 1

View file

@ -37,16 +37,16 @@ CREATE TABLE IF NOT EXISTS calendar_resource_bookings (
CREATE INDEX IF NOT EXISTS idx_resource_bookings_resource ON calendar_resource_bookings(resource_id, start_time, end_time);
CREATE INDEX IF NOT EXISTS idx_resource_bookings_user ON calendar_resource_bookings(booked_by);
-- Calendar sharing
CREATE TABLE IF NOT EXISTS calendar_shares (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
owner_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
shared_with_user UUID REFERENCES users(id) ON DELETE CASCADE,
shared_with_email VARCHAR(255),
permission_level VARCHAR(20) DEFAULT 'view',
created_at TIMESTAMPTZ DEFAULT NOW(),
CONSTRAINT check_cal_permission CHECK (permission_level IN ('free_busy', 'view', 'edit', 'admin'))
);
-- Calendar sharing (skip - already exists from 6.0.13-01-calendar)
-- CREATE TABLE IF NOT EXISTS calendar_shares (
-- id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
-- owner_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
-- shared_with_user UUID REFERENCES users(id) ON DELETE CASCADE,
-- shared_with_email VARCHAR(255),
-- permission_level VARCHAR(20) DEFAULT 'view',
-- created_at TIMESTAMPTZ DEFAULT NOW(),
-- CONSTRAINT check_cal_permission CHECK (permission_level IN ('free_busy', 'view', 'edit', 'admin'))
-- );
CREATE INDEX IF NOT EXISTS idx_calendar_shares_owner ON calendar_shares(owner_id);
CREATE INDEX IF NOT EXISTS idx_calendar_shares_shared ON calendar_shares(shared_with_user);
-- CREATE INDEX IF NOT EXISTS idx_calendar_shares_owner ON calendar_shares(owner_id);
-- CREATE INDEX IF NOT EXISTS idx_calendar_shares_shared ON calendar_shares(shared_with_user);

View file

@ -4,4 +4,5 @@ DROP TABLE IF EXISTS meeting_waiting_room;
DROP TABLE IF EXISTS meeting_questions;
DROP TABLE IF EXISTS meeting_polls;
DROP TABLE IF EXISTS meeting_breakout_rooms;
DROP TABLE IF EXISTS meeting_recordings;
-- Note: meeting_recordings table is from 6.0.23 migration, don't drop it
DROP TABLE IF EXISTS meetings;

View file

@ -1,32 +1,29 @@
-- Legacy Meet Tables extracted from consolidated
-- Meeting recordings
CREATE TABLE IF NOT EXISTS meeting_recordings (
-- Core meetings table (if not exists from scheduled_meetings)
CREATE TABLE IF NOT EXISTS meetings (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
meeting_id UUID NOT NULL,
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
recorded_by UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
file_path TEXT NOT NULL,
file_size BIGINT NOT NULL DEFAULT 0,
duration_seconds INTEGER,
format VARCHAR(20) DEFAULT 'mp4',
thumbnail_path TEXT,
transcription_path TEXT,
transcription_status VARCHAR(20) DEFAULT 'pending',
is_shared BOOLEAN DEFAULT false,
shared_with_json TEXT DEFAULT '[]',
retention_until TIMESTAMPTZ,
scheduled_meeting_id UUID REFERENCES scheduled_meetings(id) ON DELETE SET NULL,
room_id UUID,
title VARCHAR(255) NOT NULL,
status VARCHAR(20) DEFAULT 'active',
started_at TIMESTAMPTZ DEFAULT NOW(),
ended_at TIMESTAMPTZ,
created_at TIMESTAMPTZ DEFAULT NOW(),
CONSTRAINT check_transcription_status CHECK (transcription_status IN ('pending', 'processing', 'completed', 'failed'))
CONSTRAINT check_meeting_status CHECK (status IN ('active', 'ended', 'cancelled'))
);
CREATE INDEX IF NOT EXISTS idx_meeting_recordings_meeting ON meeting_recordings(meeting_id);
CREATE INDEX IF NOT EXISTS idx_meeting_recordings_bot ON meeting_recordings(bot_id);
CREATE INDEX IF NOT EXISTS idx_meetings_scheduled ON meetings(scheduled_meeting_id);
CREATE INDEX IF NOT EXISTS idx_meetings_status ON meetings(status);
-- Meeting recordings (legacy table already exists, skip creation)
-- Note: meeting_recordings table already exists from 6.0.23 migration with different schema
-- This migration creates additional meeting-related tables that reference the new meetings table
-- Breakout rooms
CREATE TABLE IF NOT EXISTS meeting_breakout_rooms (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
meeting_id UUID NOT NULL,
meeting_id UUID NOT NULL REFERENCES meetings(id) ON DELETE CASCADE,
name VARCHAR(100) NOT NULL,
room_number INTEGER NOT NULL,
participants_json TEXT DEFAULT '[]',
@ -41,7 +38,7 @@ CREATE INDEX IF NOT EXISTS idx_breakout_rooms_meeting ON meeting_breakout_rooms(
-- Meeting polls
CREATE TABLE IF NOT EXISTS meeting_polls (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
meeting_id UUID NOT NULL,
meeting_id UUID NOT NULL REFERENCES meetings(id) ON DELETE CASCADE,
created_by UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
question TEXT NOT NULL,
poll_type VARCHAR(20) DEFAULT 'single',
@ -60,7 +57,7 @@ CREATE INDEX IF NOT EXISTS idx_meeting_polls_meeting ON meeting_polls(meeting_id
-- Meeting Q&A
CREATE TABLE IF NOT EXISTS meeting_questions (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
meeting_id UUID NOT NULL,
meeting_id UUID NOT NULL REFERENCES meetings(id) ON DELETE CASCADE,
asked_by UUID REFERENCES users(id) ON DELETE SET NULL,
question TEXT NOT NULL,
is_anonymous BOOLEAN DEFAULT false,
@ -78,7 +75,7 @@ CREATE INDEX IF NOT EXISTS idx_meeting_questions_unanswered ON meeting_questions
-- Meeting waiting room
CREATE TABLE IF NOT EXISTS meeting_waiting_room (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
meeting_id UUID NOT NULL,
meeting_id UUID NOT NULL REFERENCES meetings(id) ON DELETE CASCADE,
user_id UUID REFERENCES users(id) ON DELETE CASCADE,
guest_name VARCHAR(255),
guest_email VARCHAR(255),
@ -96,7 +93,7 @@ CREATE INDEX IF NOT EXISTS idx_waiting_room_status ON meeting_waiting_room(meeti
-- Meeting live captions
CREATE TABLE IF NOT EXISTS meeting_captions (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
meeting_id UUID NOT NULL,
meeting_id UUID NOT NULL REFERENCES meetings(id) ON DELETE CASCADE,
speaker_id UUID REFERENCES users(id) ON DELETE SET NULL,
speaker_name VARCHAR(255),
caption_text TEXT NOT NULL,

View file

@ -1,3 +1,6 @@
-- Drop existing workflow_executions table if it exists (from older schema)
DROP TABLE IF EXISTS workflow_executions CASCADE;
-- Workflow state persistence (survives server restart)
CREATE TABLE workflow_executions (
id UUID PRIMARY KEY,

View file

@ -0,0 +1,3 @@
-- Remove the refresh_policy column from website_crawls table
ALTER TABLE website_crawls
DROP COLUMN IF EXISTS refresh_policy;

View file

@ -0,0 +1,13 @@
-- Add refresh_policy column to website_crawls table
-- This column stores the user-configured refresh interval (e.g., "1d", "1w", "1m", "1y")
ALTER TABLE website_crawls
ADD COLUMN IF NOT EXISTS refresh_policy VARCHAR(20);
-- Update existing records to have a default refresh policy (1 month)
UPDATE website_crawls
SET refresh_policy = '1m'
WHERE refresh_policy IS NULL;
-- Add comment for documentation
COMMENT ON COLUMN website_crawls.refresh_policy IS 'User-configured refresh interval (e.g., "1d", "1w", "1m", "1y") - shortest interval is used when duplicates exist';

View file

@ -0,0 +1,2 @@
DROP INDEX IF EXISTS idx_bots_database_name;
ALTER TABLE bots DROP COLUMN IF EXISTS database_name;

View file

@ -0,0 +1,2 @@
ALTER TABLE bots ADD COLUMN IF NOT EXISTS database_name VARCHAR(255);
CREATE INDEX IF NOT EXISTS idx_bots_database_name ON bots(database_name);

File diff suppressed because it is too large Load diff

View file

@ -121,19 +121,18 @@ impl AppLogStore {
source: LogSource,
message: &str,
details: Option<String>,
bot_id: Option<Uuid>,
user_id: Option<Uuid>,
ids: (Option<Uuid>, Option<Uuid>), // (bot_id, user_id)
) {
let entry = AppLogEntry {
id: Uuid::new_v4().to_string(),
timestamp: Utc::now(),
level,
source,
app_name: app_name.to_string(),
bot_id,
user_id,
message: message.to_string(),
details,
bot_id: ids.0,
user_id: ids.1,
app_name: app_name.to_string(),
file_path: None,
line_number: None,
stack_trace: None,
@ -157,9 +156,7 @@ impl AppLogStore {
source: LogSource,
message: &str,
error: &str,
file_path: Option<&str>,
line_number: Option<u32>,
stack_trace: Option<&str>,
location: (Option<&str>, Option<u32>, Option<&str>), // (file_path, line_number, stack_trace)
) {
let entry = AppLogEntry {
id: Uuid::new_v4().to_string(),
@ -171,9 +168,9 @@ impl AppLogStore {
user_id: None,
message: message.to_string(),
details: Some(error.to_string()),
file_path: file_path.map(String::from),
line_number,
stack_trace: stack_trace.map(String::from),
file_path: location.0.map(String::from),
line_number: location.1,
stack_trace: location.2.map(String::from),
};
self.add_entry(entry);
@ -184,8 +181,8 @@ impl AppLogStore {
source,
message,
error,
file_path.unwrap_or("unknown"),
line_number.unwrap_or(0)
location.0.unwrap_or("unknown"),
location.1.unwrap_or(0)
);
}
@ -454,8 +451,7 @@ pub fn log_generator_info(app_name: &str, message: &str) {
LogSource::Generator,
message,
None,
None,
None,
(None, None),
);
}
@ -465,9 +461,7 @@ pub fn log_generator_error(app_name: &str, message: &str, error: &str) {
LogSource::Generator,
message,
error,
None,
None,
None,
(None, None, None),
);
}
@ -482,9 +476,7 @@ pub fn log_validation_error(
LogSource::Validation,
message,
"Validation failed",
file_path,
line_number,
None,
(file_path, line_number, None),
);
}
@ -494,9 +486,7 @@ pub fn log_runtime_error(app_name: &str, message: &str, error: &str, stack_trace
LogSource::Runtime,
message,
error,
None,
None,
stack_trace,
(None, None, stack_trace),
);
}

View file

@ -1075,7 +1075,7 @@ Respond ONLY with valid JSON."#
.llm_provider
.generate(prompt, &llm_config, &model, &key)
.await?;
return Ok(response);
Ok(response)
}
#[cfg(not(feature = "llm"))]

View file

@ -1129,7 +1129,7 @@ END TRIGGER
.llm_provider
.generate(prompt, &llm_config, &model, &key)
.await?;
return Ok(response);
Ok(response)
}
#[cfg(not(feature = "llm"))]

View file

@ -708,7 +708,7 @@ Respond ONLY with valid JSON."#,
.llm_provider
.generate(prompt, &llm_config, &model, &key)
.await?;
return Ok(response);
Ok(response)
}
#[cfg(not(feature = "llm"))]

View file

@ -935,25 +935,28 @@ pub struct MonitorDefinition {
pub target: String,
}
pub struct ManifestData {
pub tables: Vec<TableDefinition>,
pub files: Vec<FileDefinition>,
pub pages: Vec<PageDefinition>,
pub tools: Vec<ToolDefinition>,
pub schedulers: Vec<SchedulerDefinition>,
pub monitors: Vec<MonitorDefinition>,
}
pub fn create_manifest_from_llm_response(
app_name: &str,
description: &str,
tables: Vec<TableDefinition>,
files: Vec<FileDefinition>,
pages: Vec<PageDefinition>,
tools: Vec<ToolDefinition>,
schedulers: Vec<SchedulerDefinition>,
monitors: Vec<MonitorDefinition>,
data: ManifestData,
) -> TaskManifest {
let estimated_time = estimate_generation_time(&tables, &files, &tools, &schedulers);
let estimated_time = estimate_generation_time(&data.tables, &data.files, &data.tools, &data.schedulers);
ManifestBuilder::new(app_name, description)
.with_tables(tables)
.with_files(files)
.with_pages(pages)
.with_tools(tools)
.with_schedulers(schedulers)
.with_monitors(monitors)
.with_tables(data.tables)
.with_files(data.files)
.with_pages(data.pages)
.with_tools(data.tools)
.with_schedulers(data.schedulers)
.with_estimated_time(estimated_time)
.build()
}

View file

@ -8,6 +8,7 @@ use diesel::ExpressionMethods;
use diesel::QueryDsl;
use diesel::RunQueryDsl;
use log::{trace, warn};
use regex::Regex;
pub mod goto_transform;
use serde::{Deserialize, Serialize};
@ -406,27 +407,33 @@ impl BasicCompiler {
continue;
}
if trimmed.starts_with("USE WEBSITE") {
let parts: Vec<&str> = normalized.split('"').collect();
if parts.len() >= 2 {
let url = parts[1];
if trimmed.to_uppercase().starts_with("USE WEBSITE") {
let re = Regex::new(r#"(?i)USE\s+WEBSITE\s+"([^"]+)"(?:\s+REFRESH\s+"([^"]+)")?"#).unwrap();
if let Some(caps) = re.captures(&normalized) {
if let Some(url_match) = caps.get(1) {
let url = url_match.as_str();
let refresh = caps.get(2).map(|m| m.as_str()).unwrap_or("1m");
let mut conn = self
.state
.conn
.get()
.map_err(|e| format!("Failed to get database connection: {}", e))?;
if let Err(e) =
crate::basic::keywords::use_website::execute_use_website_preprocessing(
&mut conn, url, bot_id,
crate::basic::keywords::use_website::execute_use_website_preprocessing_with_refresh(
&mut conn, url, bot_id, refresh,
)
{
log::error!("Failed to register USE_WEBSITE during preprocessing: {}", e);
} else {
log::info!(
"Registered website {} for crawling during preprocessing",
url
"Registered website {} for crawling during preprocessing (refresh: {})",
url, refresh
);
}
result.push_str(&format!("USE_WEBSITE(\"{}\", \"{}\");\n", url, refresh));
continue;
}
} else {
log::warn!("Malformed USE_WEBSITE line ignored: {}", normalized);
}

View file

@ -1,7 +1,6 @@
use crate::shared::models::UserSession;
use crate::shared::state::AppState;
use diesel::prelude::*;
use diesel::sql_query;
use log::{info, trace};
use rhai::{Dynamic, Engine};
use serde::{Deserialize, Serialize};
@ -593,26 +592,10 @@ fn add_bot_to_session(
.map(|r| r.id)
.map_err(|e| format!("Failed to get bot ID: {e}"))?
} else {
let new_bot_id = Uuid::new_v4();
let db_name = format!("bot_{}", bot_name.replace(['-', ' '], "_").to_lowercase());
diesel::sql_query(
"INSERT INTO bots (id, name, description, is_active, database_name, created_at)
VALUES ($1, $2, $3, true, $4, NOW())
ON CONFLICT (name) DO UPDATE SET is_active = true, database_name = COALESCE(bots.database_name, $4)
RETURNING id",
)
.bind::<diesel::sql_types::Text, _>(new_bot_id.to_string())
.bind::<diesel::sql_types::Text, _>(bot_name)
.bind::<diesel::sql_types::Text, _>(format!("Bot agent: {bot_name}"))
.bind::<diesel::sql_types::Text, _>(&db_name)
.execute(&mut *conn)
.map_err(|e| format!("Failed to create bot: {e}"))?;
if let Err(e) = create_bot_database(&mut conn, &db_name) {
log::warn!("Failed to create database for bot {bot_name}: {e}");
}
new_bot_id.to_string()
return Err(format!(
"Bot '{}' does not exist in database. Please create it first using the import process.",
bot_name
));
};
let trigger_json =
@ -852,48 +835,3 @@ struct BotConfigRow {
#[diesel(sql_type = diesel::sql_types::Nullable<diesel::sql_types::Text>)]
model_config: Option<String>,
}
fn create_bot_database(conn: &mut PgConnection, db_name: &str) -> Result<(), String> {
let safe_db_name: String = db_name
.chars()
.filter(|c| c.is_alphanumeric() || *c == '_')
.collect();
if safe_db_name.is_empty() || safe_db_name.len() > 63 {
return Err("Invalid database name".into());
}
#[derive(QueryableByName)]
struct DbExists {
#[diesel(sql_type = diesel::sql_types::Bool)]
exists: bool,
}
let check_query = format!(
"SELECT EXISTS (SELECT 1 FROM pg_database WHERE datname = '{}') as exists",
safe_db_name
);
let exists = sql_query(&check_query)
.get_result::<DbExists>(conn)
.map(|r| r.exists)
.unwrap_or(false);
if exists {
info!("Database {} already exists", safe_db_name);
return Ok(());
}
let create_query = format!("CREATE DATABASE {}", safe_db_name);
if let Err(e) = sql_query(&create_query).execute(conn) {
let err_str = e.to_string();
if err_str.contains("already exists") {
info!("Database {} already exists", safe_db_name);
return Ok(());
}
return Err(format!("Failed to create database: {}", e));
}
info!("Created database: {}", safe_db_name);
Ok(())
}

View file

@ -47,7 +47,7 @@ pub fn create_site_keyword(state: &AppState, user: UserSession, engine: &mut Eng
}
};
let s3 = state_clone.s3_client.clone().map(std::sync::Arc::new);
let s3 = state_clone.drive.clone().map(std::sync::Arc::new);
let bucket = state_clone.bucket_name.clone();
let bot_id = user_clone.bot_id.to_string();

View file

@ -53,18 +53,15 @@ pub struct ActionItem {
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum Priority {
Low,
#[default]
Medium,
High,
Critical,
}
impl Default for Priority {
fn default() -> Self {
Self::Medium
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Sentiment {
@ -77,19 +74,16 @@ pub struct Sentiment {
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum SentimentLabel {
VeryNegative,
Negative,
#[default]
Neutral,
Positive,
VeryPositive,
}
impl Default for SentimentLabel {
fn default() -> Self {
Self::Neutral
}
}
impl Default for Sentiment {
fn default() -> Self {
@ -103,19 +97,16 @@ impl Default for Sentiment {
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum ResolutionStatus {
Resolved,
Unresolved,
Escalated,
Pending,
#[default]
Unknown,
}
impl Default for ResolutionStatus {
fn default() -> Self {
Self::Unknown
}
}
#[derive(Debug, Clone)]
pub struct EpisodicMemoryConfig {

View file

@ -71,15 +71,20 @@ pub fn register_on_error_keywords(_state: Arc<AppState>, _user: UserSession, eng
engine
.register_custom_syntax(
["ON", "ERROR", "GOTO", "0"],
["ON", "ERROR", "GOTO", "$ident$"],
false,
move |_context, _inputs| {
move |context, inputs| {
let label = context.eval_expression_tree(&inputs[0])?.to_string();
if label == "0" {
trace!("ON ERROR GOTO 0 - Error handling disabled");
set_error_resume_next(false);
} else {
trace!("ON ERROR GOTO {} - Error handler set", label);
}
Ok(Dynamic::UNIT)
},
)
.expect("Failed to register ON ERROR GOTO 0");
.expect("Failed to register ON ERROR GOTO");
engine
.register_custom_syntax(["CLEAR", "ERROR"], false, move |_context, _inputs| {

View file

@ -146,7 +146,7 @@ async fn publish_event(
if let Some(redis_client) = &state.cache {
if let Ok(mut redis_conn) = redis_client.get_multiplexed_async_connection().await {
let channel = format!("events:{event_name}");
let _: Result<(), _> = redis_conn.publish(&channel, &new_event.id.to_string()).await;
let _: Result<(), _> = redis_conn.publish(&channel, new_event.id.to_string()).await;
}
}

View file

@ -8,6 +8,9 @@ use serde::{Deserialize, Serialize};
use std::sync::Arc;
use uuid::Uuid;
// Import the send_message_to_recipient function from universal_messaging
use super::universal_messaging::send_message_to_recipient;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub enum InputType {
Any,
@ -1082,7 +1085,7 @@ pub async fn execute_talk(
session_id: user_session.id.to_string(),
channel: "web".to_string(),
content: message,
message_type: MessageType::USER,
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions,
@ -1111,8 +1114,42 @@ pub async fn execute_talk(
pub fn talk_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state);
let user_clone = user;
let user_clone = user.clone();
// Register TALK TO "recipient", "message" syntax FIRST (more specific pattern)
let state_clone2 = Arc::clone(&state);
let user_clone2 = user.clone();
engine
.register_custom_syntax(
["TALK", "TO", "$expr$", ",", "$expr$"],
true,
move |context, inputs| {
let recipient = context.eval_expression_tree(&inputs[0])?.to_string();
let message = context.eval_expression_tree(&inputs[1])?.to_string();
trace!("TALK TO: Sending message to {}", recipient);
let state_for_send = Arc::clone(&state_clone2);
let user_for_send = user_clone2.clone();
tokio::spawn(async move {
if let Err(e) = send_message_to_recipient(
state_for_send,
&user_for_send,
&recipient,
&message,
).await {
error!("Failed to send TALK TO message: {}", e);
}
});
Ok(Dynamic::UNIT)
},
)
.expect("valid syntax registration");
// Register simple TALK "message" syntax SECOND (fallback pattern)
engine
.register_custom_syntax(["TALK", "$expr$"], true, move |context, inputs| {
let message = context.eval_expression_tree(&inputs[0])?.to_string();

View file

@ -30,18 +30,15 @@ pub struct KgEntity {
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum EntitySource {
#[default]
Manual,
Extracted,
Imported,
Inferred,
}
impl Default for EntitySource {
fn default() -> Self {
Self::Manual
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct KgRelationship {

View file

@ -19,18 +19,15 @@ pub struct ModelConfig {
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum RoutingStrategy {
#[default]
Manual,
Auto,
LoadBalanced,
Fallback,
}
impl Default for RoutingStrategy {
fn default() -> Self {
Self::Manual
}
}
#[derive(Debug, Clone)]
pub struct ModelRouter {

View file

@ -8,7 +8,7 @@ pub fn parse_natural_schedule(input: &str) -> Result<String, String> {
let input = input.trim().to_lowercase();
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 5 && is_cron_expression(&parts) {
if (parts.len() == 5 || parts.len() == 6) && is_cron_expression(&parts) {
return Ok(input);
}
@ -16,9 +16,14 @@ pub fn parse_natural_schedule(input: &str) -> Result<String, String> {
}
fn is_cron_expression(parts: &[&str]) -> bool {
if parts.len() != 5 && parts.len() != 6 {
return false;
}
parts.iter().all(|part| {
part.chars()
.all(|c| c.is_ascii_digit() || c == '*' || c == '/' || c == '-' || c == ',')
part.chars().all(|c| {
c.is_ascii_digit() || c == '*' || c == '/' || c == '-' || c == ',' || c.is_ascii_alphabetic()
})
})
}

View file

@ -10,12 +10,14 @@ use serde_json::json;
use std::sync::Arc;
pub fn register_universal_messaging(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
register_talk_to(state.clone(), user.clone(), engine);
register_send_file_to(state.clone(), user.clone(), engine);
register_send_to(state.clone(), user.clone(), engine);
register_broadcast(state, user, engine);
}
// DEPRECATED: TALK TO functionality moved to hear_talk.rs talk_keyword function
// to avoid syntax conflicts between TALK and TALK TO
/*
fn register_talk_to(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state);
@ -50,6 +52,7 @@ fn register_talk_to(state: Arc<AppState>, user: UserSession, engine: &mut Engine
)
.expect("valid syntax registration");
}
*/
fn register_send_file_to(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state);
@ -179,7 +182,7 @@ fn register_broadcast(state: Arc<AppState>, user: UserSession, engine: &mut Engi
.expect("valid syntax registration");
}
async fn send_message_to_recipient(
pub async fn send_message_to_recipient(
state: Arc<AppState>,
user: &UserSession,
recipient: &str,
@ -461,7 +464,7 @@ async fn send_instagram_file(
let file_key = format!("temp/instagram/{}_{}.bin", user.id, uuid::Uuid::new_v4());
if let Some(s3) = &state.s3_client {
if let Some(s3) = &state.drive {
s3.put_object()
.bucket("uploads")
.key(&file_key)
@ -483,7 +486,7 @@ async fn send_instagram_file(
tokio::spawn(async move {
tokio::time::sleep(tokio::time::Duration::from_secs(3600)).await;
if let Some(s3) = &state.s3_client {
if let Some(s3) = &state.drive {
let _ = s3
.delete_object()
.bucket("uploads")

Some files were not shown because too many files have changed in this diff Show more