Compare commits
No commits in common. "30345c66e2738ebe73d896841e54f655999e3630" and "d26f0652e5c5f145dba86ad13915f192d03bb55a" have entirely different histories.
30345c66e2
...
d26f0652e5
138 changed files with 1446 additions and 4242 deletions
|
|
@ -1,18 +1,10 @@
|
||||||
name: BotServer CI
|
name: GBCI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: ["main"]
|
branches: ["main"]
|
||||||
paths:
|
|
||||||
- "botserver/**"
|
|
||||||
- "botlib/**"
|
|
||||||
- ".forgejo/workflows/botserver.yaml"
|
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: ["main"]
|
branches: ["main"]
|
||||||
paths:
|
|
||||||
- "botserver/**"
|
|
||||||
- "botlib/**"
|
|
||||||
- ".forgejo/workflows/botserver.yaml"
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CARGO_BUILD_JOBS: 8
|
CARGO_BUILD_JOBS: 8
|
||||||
|
|
@ -37,17 +29,13 @@ jobs:
|
||||||
git clone --depth 1 --branch main https://alm.pragmatismo.com.br/GeneralBots/gb.git workspace
|
git clone --depth 1 --branch main https://alm.pragmatismo.com.br/GeneralBots/gb.git workspace
|
||||||
cd workspace
|
cd workspace
|
||||||
git submodule update --init --depth 1 botlib
|
git submodule update --init --depth 1 botlib
|
||||||
|
git submodule update --init --depth 1 botui
|
||||||
|
|
||||||
# Remove all members except botserver and botlib from workspace
|
|
||||||
|
# Remove missing members from workspace to prevent Cargo errors
|
||||||
sed -i '/"botapp",/d' Cargo.toml
|
sed -i '/"botapp",/d' Cargo.toml
|
||||||
sed -i '/"botdevice",/d' Cargo.toml
|
sed -i '/"botdevice",/d' Cargo.toml
|
||||||
sed -i '/"bottest",/d' Cargo.toml
|
sed -i '/"bottest",/d' Cargo.toml
|
||||||
sed -i '/"botui",/d' Cargo.toml
|
|
||||||
sed -i '/"botbook",/d' Cargo.toml
|
|
||||||
sed -i '/"botmodels",/d' Cargo.toml
|
|
||||||
sed -i '/"botplugin",/d' Cargo.toml
|
|
||||||
sed -i '/"bottemplates",/d' Cargo.toml
|
|
||||||
|
|
||||||
cd ..
|
cd ..
|
||||||
rm -rf workspace/botserver
|
rm -rf workspace/botserver
|
||||||
mv botserver workspace/botserver
|
mv botserver workspace/botserver
|
||||||
|
|
@ -62,9 +50,8 @@ jobs:
|
||||||
~/.cargo/git
|
~/.cargo/git
|
||||||
~/.cache/sccache
|
~/.cache/sccache
|
||||||
workspace/target
|
workspace/target
|
||||||
key: ${{ runner.os }}-cargo-v2-debug-server-${{ hashFiles('**/Cargo.lock') }}
|
key: ${{ runner.os }}-cargo-v2-debug-${{ hashFiles('**/Cargo.lock') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-cargo-v2-debug-server-
|
|
||||||
${{ runner.os }}-cargo-v2-debug-
|
${{ runner.os }}-cargo-v2-debug-
|
||||||
|
|
||||||
- name: Install system dependencies
|
- name: Install system dependencies
|
||||||
|
|
@ -90,11 +77,13 @@ jobs:
|
||||||
- name: Setup environment
|
- name: Setup environment
|
||||||
run: sudo cp /opt/gbo/bin/system/.env . 2>/dev/null || true
|
run: sudo cp /opt/gbo/bin/system/.env . 2>/dev/null || true
|
||||||
|
|
||||||
- name: Build BotServer
|
- name: Build debug
|
||||||
working-directory: workspace
|
working-directory: workspace
|
||||||
run: |
|
run: |
|
||||||
cargo build -p botserver -j 8 2>&1 | tee /tmp/build.log
|
cargo build -p botserver -j 8 2>&1 | tee /tmp/build.log
|
||||||
|
cargo build -p botui --features embed-ui -j 8 2>&1 | tee -a /tmp/build.log
|
||||||
ls -lh target/debug/botserver
|
ls -lh target/debug/botserver
|
||||||
|
ls -lh target/debug/botui
|
||||||
sccache --show-stats || true
|
sccache --show-stats || true
|
||||||
|
|
||||||
- name: Save build log
|
- name: Save build log
|
||||||
|
|
@ -107,9 +96,12 @@ jobs:
|
||||||
working-directory: workspace
|
working-directory: workspace
|
||||||
run: |
|
run: |
|
||||||
lxc exec bot:pragmatismo-system -- systemctl stop system || true
|
lxc exec bot:pragmatismo-system -- systemctl stop system || true
|
||||||
|
lxc exec bot:pragmatismo-system -- systemctl stop ui || true
|
||||||
|
|
||||||
sudo cp target/debug/botserver /opt/gbo/bin/system/
|
sudo cp target/debug/botserver /opt/gbo/bin/system/
|
||||||
sudo chmod +x /opt/gbo/bin/system/botserver
|
sudo chmod +x /opt/gbo/bin/system/botserver
|
||||||
|
sudo cp target/debug/botui /opt/gbo/bin/system/
|
||||||
|
sudo chmod +x /opt/gbo/bin/system/botui
|
||||||
|
|
||||||
lxc exec bot:pragmatismo-system -- systemctl start system || true
|
lxc exec bot:pragmatismo-system -- systemctl start system || true
|
||||||
|
lxc exec bot:pragmatismo-system -- systemctl start ui || true
|
||||||
|
|
@ -10,7 +10,7 @@ features = ["database", "i18n"]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
# ===== DEFAULT =====
|
# ===== DEFAULT =====
|
||||||
default = ["chat", "automation", "drive", "tasks", "cache", "directory", "llm", "crawler"]
|
default = ["chat", "automation", "drive", "tasks", "cache", "directory", "llm"]
|
||||||
|
|
||||||
# ===== CORE INFRASTRUCTURE (Can be used standalone) =====
|
# ===== CORE INFRASTRUCTURE (Can be used standalone) =====
|
||||||
scripting = ["dep:rhai"]
|
scripting = ["dep:rhai"]
|
||||||
|
|
@ -18,7 +18,6 @@ automation = ["scripting", "dep:cron"]
|
||||||
drive = ["dep:aws-config", "dep:aws-sdk-s3", "dep:aws-smithy-async", "dep:pdf-extract"]
|
drive = ["dep:aws-config", "dep:aws-sdk-s3", "dep:aws-smithy-async", "dep:pdf-extract"]
|
||||||
cache = ["dep:redis"]
|
cache = ["dep:redis"]
|
||||||
directory = []
|
directory = []
|
||||||
crawler = ["drive", "cache"]
|
|
||||||
|
|
||||||
# ===== APPS (Each includes what it needs from core) =====
|
# ===== APPS (Each includes what it needs from core) =====
|
||||||
# Communication
|
# Communication
|
||||||
|
|
@ -49,7 +48,7 @@ player = ["automation", "drive", "cache"]
|
||||||
canvas = ["automation", "drive", "cache"]
|
canvas = ["automation", "drive", "cache"]
|
||||||
|
|
||||||
# Learning
|
# Learning
|
||||||
learn = ["automation", "drive", "cache", "crawler"]
|
learn = ["automation", "drive", "cache"]
|
||||||
research = ["automation", "drive", "cache", "llm", "vectordb"]
|
research = ["automation", "drive", "cache", "llm", "vectordb"]
|
||||||
sources = ["automation", "drive", "cache"]
|
sources = ["automation", "drive", "cache"]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -324,7 +324,7 @@ When a file grows beyond this limit:
|
||||||
|
|
||||||
## 🗄️ Database Standards
|
## 🗄️ Database Standards
|
||||||
|
|
||||||
- **TABLES AND INDEXES ONLY** (no stored procedures, nothing, no views, no triggers, no functions)
|
- **TABLES AND INDEXES ONLY** (no views, triggers, functions)
|
||||||
- **JSON columns:** use TEXT with `_json` suffix
|
- **JSON columns:** use TEXT with `_json` suffix
|
||||||
- **ORM:** Use diesel - no sqlx
|
- **ORM:** Use diesel - no sqlx
|
||||||
- **Migrations:** Located in `botserver/migrations/`
|
- **Migrations:** Located in `botserver/migrations/`
|
||||||
|
|
|
||||||
3
build.rs
3
build.rs
|
|
@ -1,3 +0,0 @@
|
||||||
fn main() {
|
|
||||||
println!("cargo:rerun-if-changed=../botui/ui/suite/");
|
|
||||||
}
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"base_url": "http://localhost:8300",
|
"base_url": "http://localhost:8300",
|
||||||
"default_org": {
|
"default_org": {
|
||||||
"id": "357870945618100238",
|
"id": "354799954578898958",
|
||||||
"name": "default",
|
"name": "default",
|
||||||
"domain": "default.localhost"
|
"domain": "default.localhost"
|
||||||
},
|
},
|
||||||
|
|
@ -13,8 +13,8 @@
|
||||||
"first_name": "Admin",
|
"first_name": "Admin",
|
||||||
"last_name": "User"
|
"last_name": "User"
|
||||||
},
|
},
|
||||||
"admin_token": "RflPqOgYM-BtinaBTyCaY8hX-_koTwC65gCg1Kpf7Sfhlc0ZOLZvIr-XsOYXmckPLBAWzjU",
|
"admin_token": "6nMpG1E-H-hqlSjrbFB5n2yx8BoEpnl3a3-F3HJoc5bvria3nwiz8vURPndzS4lQWKwaz_8",
|
||||||
"project_id": "",
|
"project_id": "",
|
||||||
"client_id": "357870946289254414",
|
"client_id": "354799955384270862",
|
||||||
"client_secret": "q20LOjW5Vdjzp57Cw8EuFt7sILEd8VeSeGPvrhB63880GLgaJZpcWeRgUwdGET2x"
|
"client_secret": "z6WFb1qshdCQ1y4Gw5EpOHzARgHicz6XkrazZwJdDcxMJrc6iRdHlhf5rf5LTzgi"
|
||||||
}
|
}
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
-- Remove the refresh_policy column from website_crawls table
|
|
||||||
ALTER TABLE website_crawls
|
|
||||||
DROP COLUMN IF EXISTS refresh_policy;
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
-- Add refresh_policy column to website_crawls table
|
|
||||||
-- This column stores the user-configured refresh interval (e.g., "1d", "1w", "1m", "1y")
|
|
||||||
|
|
||||||
ALTER TABLE website_crawls
|
|
||||||
ADD COLUMN IF NOT EXISTS refresh_policy VARCHAR(20);
|
|
||||||
|
|
||||||
-- Update existing records to have a default refresh policy (1 month)
|
|
||||||
UPDATE website_crawls
|
|
||||||
SET refresh_policy = '1m'
|
|
||||||
WHERE refresh_policy IS NULL;
|
|
||||||
|
|
||||||
-- Add comment for documentation
|
|
||||||
COMMENT ON COLUMN website_crawls.refresh_policy IS 'User-configured refresh interval (e.g., "1d", "1w", "1m", "1y") - shortest interval is used when duplicates exist';
|
|
||||||
|
|
@ -1,2 +0,0 @@
|
||||||
DROP INDEX IF EXISTS idx_bots_database_name;
|
|
||||||
ALTER TABLE bots DROP COLUMN IF EXISTS database_name;
|
|
||||||
|
|
@ -1,2 +0,0 @@
|
||||||
ALTER TABLE bots ADD COLUMN IF NOT EXISTS database_name VARCHAR(255);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_bots_database_name ON bots(database_name);
|
|
||||||
|
|
@ -1,6 +1,3 @@
|
||||||
-- Drop existing workflow_executions table if it exists (from older schema)
|
|
||||||
DROP TABLE IF EXISTS workflow_executions CASCADE;
|
|
||||||
|
|
||||||
-- Workflow state persistence (survives server restart)
|
-- Workflow state persistence (survives server restart)
|
||||||
CREATE TABLE workflow_executions (
|
CREATE TABLE workflow_executions (
|
||||||
id UUID PRIMARY KEY,
|
id UUID PRIMARY KEY,
|
||||||
|
|
@ -22,7 +22,7 @@ CREATE INDEX idx_billing_usage_alerts_org_id ON billing_usage_alerts(org_id);
|
||||||
CREATE INDEX idx_billing_usage_alerts_bot_id ON billing_usage_alerts(bot_id);
|
CREATE INDEX idx_billing_usage_alerts_bot_id ON billing_usage_alerts(bot_id);
|
||||||
CREATE INDEX idx_billing_usage_alerts_severity ON billing_usage_alerts(severity);
|
CREATE INDEX idx_billing_usage_alerts_severity ON billing_usage_alerts(severity);
|
||||||
CREATE INDEX idx_billing_usage_alerts_created_at ON billing_usage_alerts(created_at);
|
CREATE INDEX idx_billing_usage_alerts_created_at ON billing_usage_alerts(created_at);
|
||||||
CREATE INDEX idx_billing_usage_alerts_acknowledged ON billing_usage_alerts(acknowledged_at);
|
CREATE INDEX idx_billing_usage_alerts_acknowledged ON billing_usage_alerts(acknowledged_at) WHERE acknowledged_at IS NULL;
|
||||||
|
|
||||||
-- Billing Alert History table
|
-- Billing Alert History table
|
||||||
CREATE TABLE IF NOT EXISTS billing_alert_history (
|
CREATE TABLE IF NOT EXISTS billing_alert_history (
|
||||||
|
|
@ -87,9 +87,9 @@ CREATE TABLE IF NOT EXISTS billing_grace_periods (
|
||||||
end_reason VARCHAR(50),
|
end_reason VARCHAR(50),
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
UNIQUE(org_id, metric, is_active)
|
UNIQUE(org_id, metric, is_active) WHERE is_active = TRUE
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE INDEX idx_billing_grace_periods_org_id ON billing_grace_periods(org_id);
|
CREATE INDEX idx_billing_grace_periods_org_id ON billing_grace_periods(org_id);
|
||||||
CREATE INDEX idx_billing_grace_periods_active ON billing_grace_periods(is_active);
|
CREATE INDEX idx_billing_grace_periods_active ON billing_grace_periods(is_active) WHERE is_active = TRUE;
|
||||||
CREATE INDEX idx_billing_grace_periods_expires ON billing_grace_periods(expires_at);
|
CREATE INDEX idx_billing_grace_periods_expires ON billing_grace_periods(expires_at) WHERE is_active = TRUE;
|
||||||
|
|
@ -37,16 +37,16 @@ CREATE TABLE IF NOT EXISTS calendar_resource_bookings (
|
||||||
CREATE INDEX IF NOT EXISTS idx_resource_bookings_resource ON calendar_resource_bookings(resource_id, start_time, end_time);
|
CREATE INDEX IF NOT EXISTS idx_resource_bookings_resource ON calendar_resource_bookings(resource_id, start_time, end_time);
|
||||||
CREATE INDEX IF NOT EXISTS idx_resource_bookings_user ON calendar_resource_bookings(booked_by);
|
CREATE INDEX IF NOT EXISTS idx_resource_bookings_user ON calendar_resource_bookings(booked_by);
|
||||||
|
|
||||||
-- Calendar sharing (skip - already exists from 6.0.13-01-calendar)
|
-- Calendar sharing
|
||||||
-- CREATE TABLE IF NOT EXISTS calendar_shares (
|
CREATE TABLE IF NOT EXISTS calendar_shares (
|
||||||
-- id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
-- owner_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
owner_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
-- shared_with_user UUID REFERENCES users(id) ON DELETE CASCADE,
|
shared_with_user UUID REFERENCES users(id) ON DELETE CASCADE,
|
||||||
-- shared_with_email VARCHAR(255),
|
shared_with_email VARCHAR(255),
|
||||||
-- permission_level VARCHAR(20) DEFAULT 'view',
|
permission_level VARCHAR(20) DEFAULT 'view',
|
||||||
-- created_at TIMESTAMPTZ DEFAULT NOW(),
|
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||||
-- CONSTRAINT check_cal_permission CHECK (permission_level IN ('free_busy', 'view', 'edit', 'admin'))
|
CONSTRAINT check_cal_permission CHECK (permission_level IN ('free_busy', 'view', 'edit', 'admin'))
|
||||||
-- );
|
);
|
||||||
|
|
||||||
-- CREATE INDEX IF NOT EXISTS idx_calendar_shares_owner ON calendar_shares(owner_id);
|
CREATE INDEX IF NOT EXISTS idx_calendar_shares_owner ON calendar_shares(owner_id);
|
||||||
-- CREATE INDEX IF NOT EXISTS idx_calendar_shares_shared ON calendar_shares(shared_with_user);
|
CREATE INDEX IF NOT EXISTS idx_calendar_shares_shared ON calendar_shares(shared_with_user);
|
||||||
|
|
@ -2748,7 +2748,7 @@ CREATE INDEX IF NOT EXISTS idx_designer_pending_changes_expires_at ON designer_p
|
||||||
-- Add role-based access control columns to dynamic table definitions and fields
|
-- Add role-based access control columns to dynamic table definitions and fields
|
||||||
--
|
--
|
||||||
-- Syntax in .gbdialog TABLE definitions:
|
-- Syntax in .gbdialog TABLE definitions:
|
||||||
-- TABLE Contatos READ BY "admin;manager"
|
-- TABLE Contatos ON maria READ BY "admin;manager"
|
||||||
-- Id number key
|
-- Id number key
|
||||||
-- Nome string(150)
|
-- Nome string(150)
|
||||||
-- NumeroDocumento string(25) READ BY "admin"
|
-- NumeroDocumento string(25) READ BY "admin"
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS organization_invitations (
|
CREATE TABLE IF NOT EXISTS organization_invitations (
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
org_id UUID NOT NULL REFERENCES organizations(org_id) ON DELETE CASCADE,
|
org_id UUID NOT NULL REFERENCES organizations(id) ON DELETE CASCADE,
|
||||||
email VARCHAR(255) NOT NULL,
|
email VARCHAR(255) NOT NULL,
|
||||||
role VARCHAR(50) NOT NULL DEFAULT 'member',
|
role VARCHAR(50) NOT NULL DEFAULT 'member',
|
||||||
status VARCHAR(20) NOT NULL DEFAULT 'pending',
|
status VARCHAR(20) NOT NULL DEFAULT 'pending',
|
||||||
|
|
@ -4,5 +4,4 @@ DROP TABLE IF EXISTS meeting_waiting_room;
|
||||||
DROP TABLE IF EXISTS meeting_questions;
|
DROP TABLE IF EXISTS meeting_questions;
|
||||||
DROP TABLE IF EXISTS meeting_polls;
|
DROP TABLE IF EXISTS meeting_polls;
|
||||||
DROP TABLE IF EXISTS meeting_breakout_rooms;
|
DROP TABLE IF EXISTS meeting_breakout_rooms;
|
||||||
-- Note: meeting_recordings table is from 6.0.23 migration, don't drop it
|
DROP TABLE IF EXISTS meeting_recordings;
|
||||||
DROP TABLE IF EXISTS meetings;
|
|
||||||
|
|
@ -1,29 +1,32 @@
|
||||||
-- Legacy Meet Tables extracted from consolidated
|
-- Legacy Meet Tables extracted from consolidated
|
||||||
|
|
||||||
-- Core meetings table (if not exists from scheduled_meetings)
|
-- Meeting recordings
|
||||||
CREATE TABLE IF NOT EXISTS meetings (
|
CREATE TABLE IF NOT EXISTS meeting_recordings (
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
scheduled_meeting_id UUID REFERENCES scheduled_meetings(id) ON DELETE SET NULL,
|
meeting_id UUID NOT NULL,
|
||||||
room_id UUID,
|
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
|
||||||
title VARCHAR(255) NOT NULL,
|
recorded_by UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
status VARCHAR(20) DEFAULT 'active',
|
file_path TEXT NOT NULL,
|
||||||
started_at TIMESTAMPTZ DEFAULT NOW(),
|
file_size BIGINT NOT NULL DEFAULT 0,
|
||||||
ended_at TIMESTAMPTZ,
|
duration_seconds INTEGER,
|
||||||
|
format VARCHAR(20) DEFAULT 'mp4',
|
||||||
|
thumbnail_path TEXT,
|
||||||
|
transcription_path TEXT,
|
||||||
|
transcription_status VARCHAR(20) DEFAULT 'pending',
|
||||||
|
is_shared BOOLEAN DEFAULT false,
|
||||||
|
shared_with_json TEXT DEFAULT '[]',
|
||||||
|
retention_until TIMESTAMPTZ,
|
||||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||||
CONSTRAINT check_meeting_status CHECK (status IN ('active', 'ended', 'cancelled'))
|
CONSTRAINT check_transcription_status CHECK (transcription_status IN ('pending', 'processing', 'completed', 'failed'))
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_meetings_scheduled ON meetings(scheduled_meeting_id);
|
CREATE INDEX IF NOT EXISTS idx_meeting_recordings_meeting ON meeting_recordings(meeting_id);
|
||||||
CREATE INDEX IF NOT EXISTS idx_meetings_status ON meetings(status);
|
CREATE INDEX IF NOT EXISTS idx_meeting_recordings_bot ON meeting_recordings(bot_id);
|
||||||
|
|
||||||
-- Meeting recordings (legacy table already exists, skip creation)
|
|
||||||
-- Note: meeting_recordings table already exists from 6.0.23 migration with different schema
|
|
||||||
-- This migration creates additional meeting-related tables that reference the new meetings table
|
|
||||||
|
|
||||||
-- Breakout rooms
|
-- Breakout rooms
|
||||||
CREATE TABLE IF NOT EXISTS meeting_breakout_rooms (
|
CREATE TABLE IF NOT EXISTS meeting_breakout_rooms (
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
meeting_id UUID NOT NULL REFERENCES meetings(id) ON DELETE CASCADE,
|
meeting_id UUID NOT NULL,
|
||||||
name VARCHAR(100) NOT NULL,
|
name VARCHAR(100) NOT NULL,
|
||||||
room_number INTEGER NOT NULL,
|
room_number INTEGER NOT NULL,
|
||||||
participants_json TEXT DEFAULT '[]',
|
participants_json TEXT DEFAULT '[]',
|
||||||
|
|
@ -38,7 +41,7 @@ CREATE INDEX IF NOT EXISTS idx_breakout_rooms_meeting ON meeting_breakout_rooms(
|
||||||
-- Meeting polls
|
-- Meeting polls
|
||||||
CREATE TABLE IF NOT EXISTS meeting_polls (
|
CREATE TABLE IF NOT EXISTS meeting_polls (
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
meeting_id UUID NOT NULL REFERENCES meetings(id) ON DELETE CASCADE,
|
meeting_id UUID NOT NULL,
|
||||||
created_by UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
created_by UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
question TEXT NOT NULL,
|
question TEXT NOT NULL,
|
||||||
poll_type VARCHAR(20) DEFAULT 'single',
|
poll_type VARCHAR(20) DEFAULT 'single',
|
||||||
|
|
@ -57,7 +60,7 @@ CREATE INDEX IF NOT EXISTS idx_meeting_polls_meeting ON meeting_polls(meeting_id
|
||||||
-- Meeting Q&A
|
-- Meeting Q&A
|
||||||
CREATE TABLE IF NOT EXISTS meeting_questions (
|
CREATE TABLE IF NOT EXISTS meeting_questions (
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
meeting_id UUID NOT NULL REFERENCES meetings(id) ON DELETE CASCADE,
|
meeting_id UUID NOT NULL,
|
||||||
asked_by UUID REFERENCES users(id) ON DELETE SET NULL,
|
asked_by UUID REFERENCES users(id) ON DELETE SET NULL,
|
||||||
question TEXT NOT NULL,
|
question TEXT NOT NULL,
|
||||||
is_anonymous BOOLEAN DEFAULT false,
|
is_anonymous BOOLEAN DEFAULT false,
|
||||||
|
|
@ -75,7 +78,7 @@ CREATE INDEX IF NOT EXISTS idx_meeting_questions_unanswered ON meeting_questions
|
||||||
-- Meeting waiting room
|
-- Meeting waiting room
|
||||||
CREATE TABLE IF NOT EXISTS meeting_waiting_room (
|
CREATE TABLE IF NOT EXISTS meeting_waiting_room (
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
meeting_id UUID NOT NULL REFERENCES meetings(id) ON DELETE CASCADE,
|
meeting_id UUID NOT NULL,
|
||||||
user_id UUID REFERENCES users(id) ON DELETE CASCADE,
|
user_id UUID REFERENCES users(id) ON DELETE CASCADE,
|
||||||
guest_name VARCHAR(255),
|
guest_name VARCHAR(255),
|
||||||
guest_email VARCHAR(255),
|
guest_email VARCHAR(255),
|
||||||
|
|
@ -93,7 +96,7 @@ CREATE INDEX IF NOT EXISTS idx_waiting_room_status ON meeting_waiting_room(meeti
|
||||||
-- Meeting live captions
|
-- Meeting live captions
|
||||||
CREATE TABLE IF NOT EXISTS meeting_captions (
|
CREATE TABLE IF NOT EXISTS meeting_captions (
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
meeting_id UUID NOT NULL REFERENCES meetings(id) ON DELETE CASCADE,
|
meeting_id UUID NOT NULL,
|
||||||
speaker_id UUID REFERENCES users(id) ON DELETE SET NULL,
|
speaker_id UUID REFERENCES users(id) ON DELETE SET NULL,
|
||||||
speaker_name VARCHAR(255),
|
speaker_name VARCHAR(255),
|
||||||
caption_text TEXT NOT NULL,
|
caption_text TEXT NOT NULL,
|
||||||
|
|
@ -189,42 +189,42 @@ CREATE INDEX idx_crm_notes_opportunity ON crm_notes(opportunity_id);
|
||||||
CREATE INDEX idx_crm_notes_account ON crm_notes(account_id);
|
CREATE INDEX idx_crm_notes_account ON crm_notes(account_id);
|
||||||
|
|
||||||
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
|
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
|
||||||
SELECT o.org_id, b.id, 'New', 1, 10, FALSE, FALSE, '#94a3b8'
|
SELECT org_id, b.id, 'New', 1, 10, FALSE, FALSE, '#94a3b8'
|
||||||
FROM organizations o
|
FROM organizations o
|
||||||
CROSS JOIN bots b
|
CROSS JOIN bots b
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
ON CONFLICT DO NOTHING;
|
ON CONFLICT DO NOTHING;
|
||||||
|
|
||||||
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
|
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
|
||||||
SELECT o.org_id, b.id, 'Qualified', 2, 25, FALSE, FALSE, '#3b82f6'
|
SELECT org_id, b.id, 'Qualified', 2, 25, FALSE, FALSE, '#3b82f6'
|
||||||
FROM organizations o
|
FROM organizations o
|
||||||
CROSS JOIN bots b
|
CROSS JOIN bots b
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
ON CONFLICT DO NOTHING;
|
ON CONFLICT DO NOTHING;
|
||||||
|
|
||||||
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
|
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
|
||||||
SELECT o.org_id, b.id, 'Proposal', 3, 50, FALSE, FALSE, '#8b5cf6'
|
SELECT org_id, b.id, 'Proposal', 3, 50, FALSE, FALSE, '#8b5cf6'
|
||||||
FROM organizations o
|
FROM organizations o
|
||||||
CROSS JOIN bots b
|
CROSS JOIN bots b
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
ON CONFLICT DO NOTHING;
|
ON CONFLICT DO NOTHING;
|
||||||
|
|
||||||
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
|
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
|
||||||
SELECT o.org_id, b.id, 'Negotiation', 4, 75, FALSE, FALSE, '#f59e0b'
|
SELECT org_id, b.id, 'Negotiation', 4, 75, FALSE, FALSE, '#f59e0b'
|
||||||
FROM organizations o
|
FROM organizations o
|
||||||
CROSS JOIN bots b
|
CROSS JOIN bots b
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
ON CONFLICT DO NOTHING;
|
ON CONFLICT DO NOTHING;
|
||||||
|
|
||||||
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
|
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
|
||||||
SELECT o.org_id, b.id, 'Won', 5, 100, TRUE, FALSE, '#22c55e'
|
SELECT org_id, b.id, 'Won', 5, 100, TRUE, FALSE, '#22c55e'
|
||||||
FROM organizations o
|
FROM organizations o
|
||||||
CROSS JOIN bots b
|
CROSS JOIN bots b
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
ON CONFLICT DO NOTHING;
|
ON CONFLICT DO NOTHING;
|
||||||
|
|
||||||
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
|
INSERT INTO crm_pipeline_stages (org_id, bot_id, name, stage_order, probability, is_won, is_lost, color)
|
||||||
SELECT o.org_id, b.id, 'Lost', 6, 0, FALSE, TRUE, '#ef4444'
|
SELECT org_id, b.id, 'Lost', 6, 0, FALSE, TRUE, '#ef4444'
|
||||||
FROM organizations o
|
FROM organizations o
|
||||||
CROSS JOIN bots b
|
CROSS JOIN bots b
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -121,18 +121,19 @@ impl AppLogStore {
|
||||||
source: LogSource,
|
source: LogSource,
|
||||||
message: &str,
|
message: &str,
|
||||||
details: Option<String>,
|
details: Option<String>,
|
||||||
ids: (Option<Uuid>, Option<Uuid>), // (bot_id, user_id)
|
bot_id: Option<Uuid>,
|
||||||
|
user_id: Option<Uuid>,
|
||||||
) {
|
) {
|
||||||
let entry = AppLogEntry {
|
let entry = AppLogEntry {
|
||||||
id: Uuid::new_v4().to_string(),
|
id: Uuid::new_v4().to_string(),
|
||||||
timestamp: Utc::now(),
|
timestamp: Utc::now(),
|
||||||
level,
|
level,
|
||||||
source,
|
source,
|
||||||
|
app_name: app_name.to_string(),
|
||||||
|
bot_id,
|
||||||
|
user_id,
|
||||||
message: message.to_string(),
|
message: message.to_string(),
|
||||||
details,
|
details,
|
||||||
bot_id: ids.0,
|
|
||||||
user_id: ids.1,
|
|
||||||
app_name: app_name.to_string(),
|
|
||||||
file_path: None,
|
file_path: None,
|
||||||
line_number: None,
|
line_number: None,
|
||||||
stack_trace: None,
|
stack_trace: None,
|
||||||
|
|
@ -156,7 +157,9 @@ impl AppLogStore {
|
||||||
source: LogSource,
|
source: LogSource,
|
||||||
message: &str,
|
message: &str,
|
||||||
error: &str,
|
error: &str,
|
||||||
location: (Option<&str>, Option<u32>, Option<&str>), // (file_path, line_number, stack_trace)
|
file_path: Option<&str>,
|
||||||
|
line_number: Option<u32>,
|
||||||
|
stack_trace: Option<&str>,
|
||||||
) {
|
) {
|
||||||
let entry = AppLogEntry {
|
let entry = AppLogEntry {
|
||||||
id: Uuid::new_v4().to_string(),
|
id: Uuid::new_v4().to_string(),
|
||||||
|
|
@ -168,9 +171,9 @@ impl AppLogStore {
|
||||||
user_id: None,
|
user_id: None,
|
||||||
message: message.to_string(),
|
message: message.to_string(),
|
||||||
details: Some(error.to_string()),
|
details: Some(error.to_string()),
|
||||||
file_path: location.0.map(String::from),
|
file_path: file_path.map(String::from),
|
||||||
line_number: location.1,
|
line_number,
|
||||||
stack_trace: location.2.map(String::from),
|
stack_trace: stack_trace.map(String::from),
|
||||||
};
|
};
|
||||||
|
|
||||||
self.add_entry(entry);
|
self.add_entry(entry);
|
||||||
|
|
@ -181,8 +184,8 @@ impl AppLogStore {
|
||||||
source,
|
source,
|
||||||
message,
|
message,
|
||||||
error,
|
error,
|
||||||
location.0.unwrap_or("unknown"),
|
file_path.unwrap_or("unknown"),
|
||||||
location.1.unwrap_or(0)
|
line_number.unwrap_or(0)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -451,7 +454,8 @@ pub fn log_generator_info(app_name: &str, message: &str) {
|
||||||
LogSource::Generator,
|
LogSource::Generator,
|
||||||
message,
|
message,
|
||||||
None,
|
None,
|
||||||
(None, None),
|
None,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -461,7 +465,9 @@ pub fn log_generator_error(app_name: &str, message: &str, error: &str) {
|
||||||
LogSource::Generator,
|
LogSource::Generator,
|
||||||
message,
|
message,
|
||||||
error,
|
error,
|
||||||
(None, None, None),
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -476,7 +482,9 @@ pub fn log_validation_error(
|
||||||
LogSource::Validation,
|
LogSource::Validation,
|
||||||
message,
|
message,
|
||||||
"Validation failed",
|
"Validation failed",
|
||||||
(file_path, line_number, None),
|
file_path,
|
||||||
|
line_number,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -486,7 +494,9 @@ pub fn log_runtime_error(app_name: &str, message: &str, error: &str, stack_trace
|
||||||
LogSource::Runtime,
|
LogSource::Runtime,
|
||||||
message,
|
message,
|
||||||
error,
|
error,
|
||||||
(None, None, stack_trace),
|
None,
|
||||||
|
None,
|
||||||
|
stack_trace,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1075,7 +1075,7 @@ Respond ONLY with valid JSON."#
|
||||||
.llm_provider
|
.llm_provider
|
||||||
.generate(prompt, &llm_config, &model, &key)
|
.generate(prompt, &llm_config, &model, &key)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(response)
|
return Ok(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = "llm"))]
|
#[cfg(not(feature = "llm"))]
|
||||||
|
|
|
||||||
|
|
@ -1129,7 +1129,7 @@ END TRIGGER
|
||||||
.llm_provider
|
.llm_provider
|
||||||
.generate(prompt, &llm_config, &model, &key)
|
.generate(prompt, &llm_config, &model, &key)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(response)
|
return Ok(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = "llm"))]
|
#[cfg(not(feature = "llm"))]
|
||||||
|
|
|
||||||
|
|
@ -708,7 +708,7 @@ Respond ONLY with valid JSON."#,
|
||||||
.llm_provider
|
.llm_provider
|
||||||
.generate(prompt, &llm_config, &model, &key)
|
.generate(prompt, &llm_config, &model, &key)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(response)
|
return Ok(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = "llm"))]
|
#[cfg(not(feature = "llm"))]
|
||||||
|
|
|
||||||
|
|
@ -935,28 +935,25 @@ pub struct MonitorDefinition {
|
||||||
pub target: String,
|
pub target: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ManifestData {
|
|
||||||
pub tables: Vec<TableDefinition>,
|
|
||||||
pub files: Vec<FileDefinition>,
|
|
||||||
pub pages: Vec<PageDefinition>,
|
|
||||||
pub tools: Vec<ToolDefinition>,
|
|
||||||
pub schedulers: Vec<SchedulerDefinition>,
|
|
||||||
pub monitors: Vec<MonitorDefinition>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn create_manifest_from_llm_response(
|
pub fn create_manifest_from_llm_response(
|
||||||
app_name: &str,
|
app_name: &str,
|
||||||
description: &str,
|
description: &str,
|
||||||
data: ManifestData,
|
tables: Vec<TableDefinition>,
|
||||||
|
files: Vec<FileDefinition>,
|
||||||
|
pages: Vec<PageDefinition>,
|
||||||
|
tools: Vec<ToolDefinition>,
|
||||||
|
schedulers: Vec<SchedulerDefinition>,
|
||||||
|
monitors: Vec<MonitorDefinition>,
|
||||||
) -> TaskManifest {
|
) -> TaskManifest {
|
||||||
let estimated_time = estimate_generation_time(&data.tables, &data.files, &data.tools, &data.schedulers);
|
let estimated_time = estimate_generation_time(&tables, &files, &tools, &schedulers);
|
||||||
|
|
||||||
ManifestBuilder::new(app_name, description)
|
ManifestBuilder::new(app_name, description)
|
||||||
.with_tables(data.tables)
|
.with_tables(tables)
|
||||||
.with_files(data.files)
|
.with_files(files)
|
||||||
.with_pages(data.pages)
|
.with_pages(pages)
|
||||||
.with_tools(data.tools)
|
.with_tools(tools)
|
||||||
.with_schedulers(data.schedulers)
|
.with_schedulers(schedulers)
|
||||||
|
.with_monitors(monitors)
|
||||||
.with_estimated_time(estimated_time)
|
.with_estimated_time(estimated_time)
|
||||||
.build()
|
.build()
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,6 @@ use diesel::ExpressionMethods;
|
||||||
use diesel::QueryDsl;
|
use diesel::QueryDsl;
|
||||||
use diesel::RunQueryDsl;
|
use diesel::RunQueryDsl;
|
||||||
use log::{trace, warn};
|
use log::{trace, warn};
|
||||||
use regex::Regex;
|
|
||||||
|
|
||||||
pub mod goto_transform;
|
pub mod goto_transform;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
@ -407,33 +406,27 @@ impl BasicCompiler {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if trimmed.to_uppercase().starts_with("USE WEBSITE") {
|
if trimmed.starts_with("USE WEBSITE") {
|
||||||
let re = Regex::new(r#"(?i)USE\s+WEBSITE\s+"([^"]+)"(?:\s+REFRESH\s+"([^"]+)")?"#).unwrap();
|
let parts: Vec<&str> = normalized.split('"').collect();
|
||||||
if let Some(caps) = re.captures(&normalized) {
|
if parts.len() >= 2 {
|
||||||
if let Some(url_match) = caps.get(1) {
|
let url = parts[1];
|
||||||
let url = url_match.as_str();
|
|
||||||
let refresh = caps.get(2).map(|m| m.as_str()).unwrap_or("1m");
|
|
||||||
let mut conn = self
|
let mut conn = self
|
||||||
.state
|
.state
|
||||||
.conn
|
.conn
|
||||||
.get()
|
.get()
|
||||||
.map_err(|e| format!("Failed to get database connection: {}", e))?;
|
.map_err(|e| format!("Failed to get database connection: {}", e))?;
|
||||||
if let Err(e) =
|
if let Err(e) =
|
||||||
crate::basic::keywords::use_website::execute_use_website_preprocessing_with_refresh(
|
crate::basic::keywords::use_website::execute_use_website_preprocessing(
|
||||||
&mut conn, url, bot_id, refresh,
|
&mut conn, url, bot_id,
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
log::error!("Failed to register USE_WEBSITE during preprocessing: {}", e);
|
log::error!("Failed to register USE_WEBSITE during preprocessing: {}", e);
|
||||||
} else {
|
} else {
|
||||||
log::info!(
|
log::info!(
|
||||||
"Registered website {} for crawling during preprocessing (refresh: {})",
|
"Registered website {} for crawling during preprocessing",
|
||||||
url, refresh
|
url
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
result.push_str(&format!("USE_WEBSITE(\"{}\", \"{}\");\n", url, refresh));
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
log::warn!("Malformed USE_WEBSITE line ignored: {}", normalized);
|
log::warn!("Malformed USE_WEBSITE line ignored: {}", normalized);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
use crate::shared::models::UserSession;
|
use crate::shared::models::UserSession;
|
||||||
use crate::shared::state::AppState;
|
use crate::shared::state::AppState;
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
|
use diesel::sql_query;
|
||||||
use log::{info, trace};
|
use log::{info, trace};
|
||||||
use rhai::{Dynamic, Engine};
|
use rhai::{Dynamic, Engine};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
@ -592,10 +593,26 @@ fn add_bot_to_session(
|
||||||
.map(|r| r.id)
|
.map(|r| r.id)
|
||||||
.map_err(|e| format!("Failed to get bot ID: {e}"))?
|
.map_err(|e| format!("Failed to get bot ID: {e}"))?
|
||||||
} else {
|
} else {
|
||||||
return Err(format!(
|
let new_bot_id = Uuid::new_v4();
|
||||||
"Bot '{}' does not exist in database. Please create it first using the import process.",
|
let db_name = format!("bot_{}", bot_name.replace(['-', ' '], "_").to_lowercase());
|
||||||
bot_name
|
diesel::sql_query(
|
||||||
));
|
"INSERT INTO bots (id, name, description, is_active, database_name, created_at)
|
||||||
|
VALUES ($1, $2, $3, true, $4, NOW())
|
||||||
|
ON CONFLICT (name) DO UPDATE SET is_active = true, database_name = COALESCE(bots.database_name, $4)
|
||||||
|
RETURNING id",
|
||||||
|
)
|
||||||
|
.bind::<diesel::sql_types::Text, _>(new_bot_id.to_string())
|
||||||
|
.bind::<diesel::sql_types::Text, _>(bot_name)
|
||||||
|
.bind::<diesel::sql_types::Text, _>(format!("Bot agent: {bot_name}"))
|
||||||
|
.bind::<diesel::sql_types::Text, _>(&db_name)
|
||||||
|
.execute(&mut *conn)
|
||||||
|
.map_err(|e| format!("Failed to create bot: {e}"))?;
|
||||||
|
|
||||||
|
if let Err(e) = create_bot_database(&mut conn, &db_name) {
|
||||||
|
log::warn!("Failed to create database for bot {bot_name}: {e}");
|
||||||
|
}
|
||||||
|
|
||||||
|
new_bot_id.to_string()
|
||||||
};
|
};
|
||||||
|
|
||||||
let trigger_json =
|
let trigger_json =
|
||||||
|
|
@ -835,3 +852,48 @@ struct BotConfigRow {
|
||||||
#[diesel(sql_type = diesel::sql_types::Nullable<diesel::sql_types::Text>)]
|
#[diesel(sql_type = diesel::sql_types::Nullable<diesel::sql_types::Text>)]
|
||||||
model_config: Option<String>,
|
model_config: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn create_bot_database(conn: &mut PgConnection, db_name: &str) -> Result<(), String> {
|
||||||
|
let safe_db_name: String = db_name
|
||||||
|
.chars()
|
||||||
|
.filter(|c| c.is_alphanumeric() || *c == '_')
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if safe_db_name.is_empty() || safe_db_name.len() > 63 {
|
||||||
|
return Err("Invalid database name".into());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(QueryableByName)]
|
||||||
|
struct DbExists {
|
||||||
|
#[diesel(sql_type = diesel::sql_types::Bool)]
|
||||||
|
exists: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
let check_query = format!(
|
||||||
|
"SELECT EXISTS (SELECT 1 FROM pg_database WHERE datname = '{}') as exists",
|
||||||
|
safe_db_name
|
||||||
|
);
|
||||||
|
|
||||||
|
let exists = sql_query(&check_query)
|
||||||
|
.get_result::<DbExists>(conn)
|
||||||
|
.map(|r| r.exists)
|
||||||
|
.unwrap_or(false);
|
||||||
|
|
||||||
|
if exists {
|
||||||
|
info!("Database {} already exists", safe_db_name);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let create_query = format!("CREATE DATABASE {}", safe_db_name);
|
||||||
|
if let Err(e) = sql_query(&create_query).execute(conn) {
|
||||||
|
let err_str = e.to_string();
|
||||||
|
if err_str.contains("already exists") {
|
||||||
|
info!("Database {} already exists", safe_db_name);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
return Err(format!("Failed to create database: {}", e));
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Created database: {}", safe_db_name);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -47,7 +47,7 @@ pub fn create_site_keyword(state: &AppState, user: UserSession, engine: &mut Eng
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let s3 = state_clone.drive.clone().map(std::sync::Arc::new);
|
let s3 = state_clone.s3_client.clone().map(std::sync::Arc::new);
|
||||||
let bucket = state_clone.bucket_name.clone();
|
let bucket = state_clone.bucket_name.clone();
|
||||||
let bot_id = user_clone.bot_id.to_string();
|
let bot_id = user_clone.bot_id.to_string();
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -53,15 +53,18 @@ pub struct ActionItem {
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
#[serde(rename_all = "lowercase")]
|
#[serde(rename_all = "lowercase")]
|
||||||
#[derive(Default)]
|
|
||||||
pub enum Priority {
|
pub enum Priority {
|
||||||
Low,
|
Low,
|
||||||
#[default]
|
|
||||||
Medium,
|
Medium,
|
||||||
High,
|
High,
|
||||||
Critical,
|
Critical,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for Priority {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::Medium
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct Sentiment {
|
pub struct Sentiment {
|
||||||
|
|
@ -74,16 +77,19 @@ pub struct Sentiment {
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
#[serde(rename_all = "lowercase")]
|
#[serde(rename_all = "lowercase")]
|
||||||
#[derive(Default)]
|
|
||||||
pub enum SentimentLabel {
|
pub enum SentimentLabel {
|
||||||
VeryNegative,
|
VeryNegative,
|
||||||
Negative,
|
Negative,
|
||||||
#[default]
|
|
||||||
Neutral,
|
Neutral,
|
||||||
Positive,
|
Positive,
|
||||||
VeryPositive,
|
VeryPositive,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for SentimentLabel {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::Neutral
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Default for Sentiment {
|
impl Default for Sentiment {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
|
|
@ -97,16 +103,19 @@ impl Default for Sentiment {
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
#[serde(rename_all = "lowercase")]
|
#[serde(rename_all = "lowercase")]
|
||||||
#[derive(Default)]
|
|
||||||
pub enum ResolutionStatus {
|
pub enum ResolutionStatus {
|
||||||
Resolved,
|
Resolved,
|
||||||
Unresolved,
|
Unresolved,
|
||||||
Escalated,
|
Escalated,
|
||||||
Pending,
|
Pending,
|
||||||
#[default]
|
|
||||||
Unknown,
|
Unknown,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for ResolutionStatus {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::Unknown
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct EpisodicMemoryConfig {
|
pub struct EpisodicMemoryConfig {
|
||||||
|
|
|
||||||
|
|
@ -71,20 +71,15 @@ pub fn register_on_error_keywords(_state: Arc<AppState>, _user: UserSession, eng
|
||||||
|
|
||||||
engine
|
engine
|
||||||
.register_custom_syntax(
|
.register_custom_syntax(
|
||||||
["ON", "ERROR", "GOTO", "$ident$"],
|
["ON", "ERROR", "GOTO", "0"],
|
||||||
false,
|
false,
|
||||||
move |context, inputs| {
|
move |_context, _inputs| {
|
||||||
let label = context.eval_expression_tree(&inputs[0])?.to_string();
|
|
||||||
if label == "0" {
|
|
||||||
trace!("ON ERROR GOTO 0 - Error handling disabled");
|
trace!("ON ERROR GOTO 0 - Error handling disabled");
|
||||||
set_error_resume_next(false);
|
set_error_resume_next(false);
|
||||||
} else {
|
|
||||||
trace!("ON ERROR GOTO {} - Error handler set", label);
|
|
||||||
}
|
|
||||||
Ok(Dynamic::UNIT)
|
Ok(Dynamic::UNIT)
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.expect("Failed to register ON ERROR GOTO");
|
.expect("Failed to register ON ERROR GOTO 0");
|
||||||
|
|
||||||
engine
|
engine
|
||||||
.register_custom_syntax(["CLEAR", "ERROR"], false, move |_context, _inputs| {
|
.register_custom_syntax(["CLEAR", "ERROR"], false, move |_context, _inputs| {
|
||||||
|
|
|
||||||
|
|
@ -146,7 +146,7 @@ async fn publish_event(
|
||||||
if let Some(redis_client) = &state.cache {
|
if let Some(redis_client) = &state.cache {
|
||||||
if let Ok(mut redis_conn) = redis_client.get_multiplexed_async_connection().await {
|
if let Ok(mut redis_conn) = redis_client.get_multiplexed_async_connection().await {
|
||||||
let channel = format!("events:{event_name}");
|
let channel = format!("events:{event_name}");
|
||||||
let _: Result<(), _> = redis_conn.publish(&channel, new_event.id.to_string()).await;
|
let _: Result<(), _> = redis_conn.publish(&channel, &new_event.id.to_string()).await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,9 +8,6 @@ use serde::{Deserialize, Serialize};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
// Import the send_message_to_recipient function from universal_messaging
|
|
||||||
use super::universal_messaging::send_message_to_recipient;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
pub enum InputType {
|
pub enum InputType {
|
||||||
Any,
|
Any,
|
||||||
|
|
@ -1085,7 +1082,7 @@ pub async fn execute_talk(
|
||||||
session_id: user_session.id.to_string(),
|
session_id: user_session.id.to_string(),
|
||||||
channel: "web".to_string(),
|
channel: "web".to_string(),
|
||||||
content: message,
|
content: message,
|
||||||
message_type: MessageType::BOT_RESPONSE,
|
message_type: MessageType::USER,
|
||||||
stream_token: None,
|
stream_token: None,
|
||||||
is_complete: true,
|
is_complete: true,
|
||||||
suggestions,
|
suggestions,
|
||||||
|
|
@ -1114,42 +1111,8 @@ pub async fn execute_talk(
|
||||||
|
|
||||||
pub fn talk_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
|
pub fn talk_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
|
||||||
let state_clone = Arc::clone(&state);
|
let state_clone = Arc::clone(&state);
|
||||||
let user_clone = user.clone();
|
let user_clone = user;
|
||||||
|
|
||||||
// Register TALK TO "recipient", "message" syntax FIRST (more specific pattern)
|
|
||||||
let state_clone2 = Arc::clone(&state);
|
|
||||||
let user_clone2 = user.clone();
|
|
||||||
|
|
||||||
engine
|
|
||||||
.register_custom_syntax(
|
|
||||||
["TALK", "TO", "$expr$", ",", "$expr$"],
|
|
||||||
true,
|
|
||||||
move |context, inputs| {
|
|
||||||
let recipient = context.eval_expression_tree(&inputs[0])?.to_string();
|
|
||||||
let message = context.eval_expression_tree(&inputs[1])?.to_string();
|
|
||||||
|
|
||||||
trace!("TALK TO: Sending message to {}", recipient);
|
|
||||||
|
|
||||||
let state_for_send = Arc::clone(&state_clone2);
|
|
||||||
let user_for_send = user_clone2.clone();
|
|
||||||
|
|
||||||
tokio::spawn(async move {
|
|
||||||
if let Err(e) = send_message_to_recipient(
|
|
||||||
state_for_send,
|
|
||||||
&user_for_send,
|
|
||||||
&recipient,
|
|
||||||
&message,
|
|
||||||
).await {
|
|
||||||
error!("Failed to send TALK TO message: {}", e);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(Dynamic::UNIT)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.expect("valid syntax registration");
|
|
||||||
|
|
||||||
// Register simple TALK "message" syntax SECOND (fallback pattern)
|
|
||||||
engine
|
engine
|
||||||
.register_custom_syntax(["TALK", "$expr$"], true, move |context, inputs| {
|
.register_custom_syntax(["TALK", "$expr$"], true, move |context, inputs| {
|
||||||
let message = context.eval_expression_tree(&inputs[0])?.to_string();
|
let message = context.eval_expression_tree(&inputs[0])?.to_string();
|
||||||
|
|
|
||||||
|
|
@ -30,15 +30,18 @@ pub struct KgEntity {
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
#[serde(rename_all = "lowercase")]
|
#[serde(rename_all = "lowercase")]
|
||||||
#[derive(Default)]
|
|
||||||
pub enum EntitySource {
|
pub enum EntitySource {
|
||||||
#[default]
|
|
||||||
Manual,
|
Manual,
|
||||||
Extracted,
|
Extracted,
|
||||||
Imported,
|
Imported,
|
||||||
Inferred,
|
Inferred,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for EntitySource {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::Manual
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct KgRelationship {
|
pub struct KgRelationship {
|
||||||
|
|
|
||||||
|
|
@ -19,15 +19,18 @@ pub struct ModelConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
#[derive(Default)]
|
|
||||||
pub enum RoutingStrategy {
|
pub enum RoutingStrategy {
|
||||||
#[default]
|
|
||||||
Manual,
|
Manual,
|
||||||
Auto,
|
Auto,
|
||||||
LoadBalanced,
|
LoadBalanced,
|
||||||
Fallback,
|
Fallback,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for RoutingStrategy {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::Manual
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ModelRouter {
|
pub struct ModelRouter {
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ pub fn parse_natural_schedule(input: &str) -> Result<String, String> {
|
||||||
let input = input.trim().to_lowercase();
|
let input = input.trim().to_lowercase();
|
||||||
|
|
||||||
let parts: Vec<&str> = input.split_whitespace().collect();
|
let parts: Vec<&str> = input.split_whitespace().collect();
|
||||||
if (parts.len() == 5 || parts.len() == 6) && is_cron_expression(&parts) {
|
if parts.len() == 5 && is_cron_expression(&parts) {
|
||||||
return Ok(input);
|
return Ok(input);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -16,14 +16,9 @@ pub fn parse_natural_schedule(input: &str) -> Result<String, String> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_cron_expression(parts: &[&str]) -> bool {
|
fn is_cron_expression(parts: &[&str]) -> bool {
|
||||||
if parts.len() != 5 && parts.len() != 6 {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
parts.iter().all(|part| {
|
parts.iter().all(|part| {
|
||||||
part.chars().all(|c| {
|
part.chars()
|
||||||
c.is_ascii_digit() || c == '*' || c == '/' || c == '-' || c == ',' || c.is_ascii_alphabetic()
|
.all(|c| c.is_ascii_digit() || c == '*' || c == '/' || c == '-' || c == ',')
|
||||||
})
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,14 +10,12 @@ use serde_json::json;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub fn register_universal_messaging(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
|
pub fn register_universal_messaging(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
|
||||||
|
register_talk_to(state.clone(), user.clone(), engine);
|
||||||
register_send_file_to(state.clone(), user.clone(), engine);
|
register_send_file_to(state.clone(), user.clone(), engine);
|
||||||
register_send_to(state.clone(), user.clone(), engine);
|
register_send_to(state.clone(), user.clone(), engine);
|
||||||
register_broadcast(state, user, engine);
|
register_broadcast(state, user, engine);
|
||||||
}
|
}
|
||||||
|
|
||||||
// DEPRECATED: TALK TO functionality moved to hear_talk.rs talk_keyword function
|
|
||||||
// to avoid syntax conflicts between TALK and TALK TO
|
|
||||||
/*
|
|
||||||
fn register_talk_to(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
|
fn register_talk_to(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
|
||||||
let state_clone = Arc::clone(&state);
|
let state_clone = Arc::clone(&state);
|
||||||
|
|
||||||
|
|
@ -52,7 +50,6 @@ fn register_talk_to(state: Arc<AppState>, user: UserSession, engine: &mut Engine
|
||||||
)
|
)
|
||||||
.expect("valid syntax registration");
|
.expect("valid syntax registration");
|
||||||
}
|
}
|
||||||
*/
|
|
||||||
|
|
||||||
fn register_send_file_to(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
|
fn register_send_file_to(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
|
||||||
let state_clone = Arc::clone(&state);
|
let state_clone = Arc::clone(&state);
|
||||||
|
|
@ -182,7 +179,7 @@ fn register_broadcast(state: Arc<AppState>, user: UserSession, engine: &mut Engi
|
||||||
.expect("valid syntax registration");
|
.expect("valid syntax registration");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send_message_to_recipient(
|
async fn send_message_to_recipient(
|
||||||
state: Arc<AppState>,
|
state: Arc<AppState>,
|
||||||
user: &UserSession,
|
user: &UserSession,
|
||||||
recipient: &str,
|
recipient: &str,
|
||||||
|
|
@ -464,7 +461,7 @@ async fn send_instagram_file(
|
||||||
|
|
||||||
let file_key = format!("temp/instagram/{}_{}.bin", user.id, uuid::Uuid::new_v4());
|
let file_key = format!("temp/instagram/{}_{}.bin", user.id, uuid::Uuid::new_v4());
|
||||||
|
|
||||||
if let Some(s3) = &state.drive {
|
if let Some(s3) = &state.s3_client {
|
||||||
s3.put_object()
|
s3.put_object()
|
||||||
.bucket("uploads")
|
.bucket("uploads")
|
||||||
.key(&file_key)
|
.key(&file_key)
|
||||||
|
|
@ -486,7 +483,7 @@ async fn send_instagram_file(
|
||||||
|
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
tokio::time::sleep(tokio::time::Duration::from_secs(3600)).await;
|
tokio::time::sleep(tokio::time::Duration::from_secs(3600)).await;
|
||||||
if let Some(s3) = &state.drive {
|
if let Some(s3) = &state.s3_client {
|
||||||
let _ = s3
|
let _ = s3
|
||||||
.delete_object()
|
.delete_object()
|
||||||
.bucket("uploads")
|
.bucket("uploads")
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue