Fix config.csv loading on startup
- Disable TLS on Vault for local development (HTTP instead of HTTPS) - Fix bot_configuration id column type mismatch (TEXT -> UUID) - Add migration 6.1.1 to convert config table id columns to UUID - Fix sync_config_csv_to_db to use UUID binding for id column - Make start_all async with proper Vault startup sequence - Sync default.gbai config.csv to existing 'Default Bot' from migrations - Add diagnostic logging for config loading - Change default LLM/embedding URLs from https to http for local dev
This commit is contained in:
parent
59f3bb8c7e
commit
f7ccc95e60
27 changed files with 2206 additions and 2004 deletions
20
config/directory_config.json
Normal file
20
config/directory_config.json
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
{
|
||||||
|
"base_url": "http://localhost:8080",
|
||||||
|
"default_org": {
|
||||||
|
"id": "350084341642035214",
|
||||||
|
"name": "default",
|
||||||
|
"domain": "default.localhost"
|
||||||
|
},
|
||||||
|
"default_user": {
|
||||||
|
"id": "admin",
|
||||||
|
"username": "admin",
|
||||||
|
"email": "admin@localhost",
|
||||||
|
"password": "",
|
||||||
|
"first_name": "Admin",
|
||||||
|
"last_name": "User"
|
||||||
|
},
|
||||||
|
"admin_token": "6ToEETpAOVIPWXcuF9IclFdb4uGR0pDZvsA02rTVTUkhthzbH3MYjkJQB7OnNMHAQIFlreU",
|
||||||
|
"project_id": "",
|
||||||
|
"client_id": "350084343638589454",
|
||||||
|
"client_secret": "7rAHHUIiv04O3itDpnHbetUpH3JzG4TLP6zuL07x6TaPiUzTq1Ut3II1le8plTeG"
|
||||||
|
}
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
-- Rollback Multi-Agent Bots Migration
|
|
||||||
|
|
||||||
-- Drop triggers first
|
|
||||||
DROP TRIGGER IF EXISTS update_bots_updated_at ON bots;
|
|
||||||
DROP FUNCTION IF EXISTS update_updated_at_column();
|
|
||||||
|
|
||||||
-- Drop tables in reverse order of creation (respecting foreign key dependencies)
|
|
||||||
DROP TABLE IF EXISTS play_content;
|
|
||||||
DROP TABLE IF EXISTS hear_wait_states;
|
|
||||||
DROP TABLE IF EXISTS attachments;
|
|
||||||
DROP TABLE IF EXISTS conversation_branches;
|
|
||||||
DROP TABLE IF EXISTS bot_messages;
|
|
||||||
DROP TABLE IF EXISTS session_bots;
|
|
||||||
DROP TABLE IF EXISTS bot_triggers;
|
|
||||||
DROP TABLE IF EXISTS bots;
|
|
||||||
|
|
@ -1,226 +0,0 @@
|
||||||
-- Multi-Agent Bots Migration
|
|
||||||
-- Enables multiple bots to participate in conversations based on triggers
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- BOTS TABLE - Bot definitions
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS bots (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
name VARCHAR(255) NOT NULL UNIQUE,
|
|
||||||
description TEXT,
|
|
||||||
system_prompt TEXT,
|
|
||||||
model_config JSONB DEFAULT '{}',
|
|
||||||
tools JSONB DEFAULT '[]',
|
|
||||||
is_active BOOLEAN DEFAULT true,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_bots_name ON bots(name);
|
|
||||||
CREATE INDEX idx_bots_active ON bots(is_active) WHERE is_active = true;
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- BOT_TRIGGERS TABLE - Trigger configurations for bots
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS bot_triggers (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
|
|
||||||
trigger_type VARCHAR(50) NOT NULL, -- 'keyword', 'tool', 'schedule', 'event', 'always'
|
|
||||||
trigger_config JSONB NOT NULL DEFAULT '{}',
|
|
||||||
priority INT DEFAULT 0,
|
|
||||||
is_active BOOLEAN DEFAULT true,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
||||||
|
|
||||||
CONSTRAINT valid_trigger_type CHECK (
|
|
||||||
trigger_type IN ('keyword', 'tool', 'schedule', 'event', 'always')
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_bot_triggers_bot_id ON bot_triggers(bot_id);
|
|
||||||
CREATE INDEX idx_bot_triggers_type ON bot_triggers(trigger_type);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- SESSION_BOTS TABLE - Bots active in a session
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS session_bots (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
session_id UUID NOT NULL,
|
|
||||||
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
|
|
||||||
bot_name VARCHAR(255) NOT NULL,
|
|
||||||
trigger_config JSONB NOT NULL DEFAULT '{}',
|
|
||||||
priority INT DEFAULT 0,
|
|
||||||
is_active BOOLEAN DEFAULT true,
|
|
||||||
joined_at TIMESTAMPTZ DEFAULT NOW(),
|
|
||||||
left_at TIMESTAMPTZ,
|
|
||||||
|
|
||||||
CONSTRAINT unique_session_bot UNIQUE (session_id, bot_name)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_session_bots_session ON session_bots(session_id);
|
|
||||||
CREATE INDEX idx_session_bots_active ON session_bots(session_id, is_active) WHERE is_active = true;
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- BOT_MESSAGES TABLE - Messages from bots in conversations
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS bot_messages (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
session_id UUID NOT NULL,
|
|
||||||
bot_id UUID REFERENCES bots(id) ON DELETE SET NULL,
|
|
||||||
bot_name VARCHAR(255) NOT NULL,
|
|
||||||
user_message_id UUID, -- Reference to the user message this responds to
|
|
||||||
content TEXT NOT NULL,
|
|
||||||
role VARCHAR(50) DEFAULT 'assistant',
|
|
||||||
metadata JSONB DEFAULT '{}',
|
|
||||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_bot_messages_session ON bot_messages(session_id);
|
|
||||||
CREATE INDEX idx_bot_messages_bot ON bot_messages(bot_id);
|
|
||||||
CREATE INDEX idx_bot_messages_created ON bot_messages(created_at);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- CONVERSATION_BRANCHES TABLE - Branch conversations from a point
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS conversation_branches (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
parent_session_id UUID NOT NULL,
|
|
||||||
branch_session_id UUID NOT NULL UNIQUE,
|
|
||||||
branch_from_message_id UUID NOT NULL,
|
|
||||||
branch_name VARCHAR(255),
|
|
||||||
created_by UUID,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_branches_parent ON conversation_branches(parent_session_id);
|
|
||||||
CREATE INDEX idx_branches_session ON conversation_branches(branch_session_id);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- ATTACHMENTS TABLE - Files attached to messages
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS attachments (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
message_id UUID,
|
|
||||||
session_id UUID NOT NULL,
|
|
||||||
user_id UUID NOT NULL,
|
|
||||||
file_type VARCHAR(50) NOT NULL, -- 'image', 'document', 'audio', 'video', 'code', 'archive', 'other'
|
|
||||||
file_name VARCHAR(500) NOT NULL,
|
|
||||||
file_size BIGINT NOT NULL,
|
|
||||||
mime_type VARCHAR(255),
|
|
||||||
storage_path TEXT NOT NULL,
|
|
||||||
thumbnail_path TEXT,
|
|
||||||
metadata JSONB DEFAULT '{}',
|
|
||||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
||||||
|
|
||||||
CONSTRAINT valid_file_type CHECK (
|
|
||||||
file_type IN ('image', 'document', 'audio', 'video', 'code', 'archive', 'other')
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_attachments_session ON attachments(session_id);
|
|
||||||
CREATE INDEX idx_attachments_user ON attachments(user_id);
|
|
||||||
CREATE INDEX idx_attachments_message ON attachments(message_id);
|
|
||||||
CREATE INDEX idx_attachments_type ON attachments(file_type);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- HEAR_WAIT_STATE TABLE - Track HEAR keyword wait states
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS hear_wait_states (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
session_id UUID NOT NULL,
|
|
||||||
variable_name VARCHAR(255) NOT NULL,
|
|
||||||
input_type VARCHAR(50) NOT NULL DEFAULT 'any',
|
|
||||||
options JSONB, -- For menu type
|
|
||||||
retry_count INT DEFAULT 0,
|
|
||||||
max_retries INT DEFAULT 3,
|
|
||||||
is_waiting BOOLEAN DEFAULT true,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
||||||
expires_at TIMESTAMPTZ DEFAULT NOW() + INTERVAL '1 hour',
|
|
||||||
completed_at TIMESTAMPTZ,
|
|
||||||
|
|
||||||
CONSTRAINT unique_hear_wait UNIQUE (session_id, variable_name)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_hear_wait_session ON hear_wait_states(session_id);
|
|
||||||
CREATE INDEX idx_hear_wait_active ON hear_wait_states(session_id, is_waiting) WHERE is_waiting = true;
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- PLAY_CONTENT TABLE - Track content projector state
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS play_content (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
session_id UUID NOT NULL,
|
|
||||||
content_type VARCHAR(50) NOT NULL,
|
|
||||||
source_url TEXT NOT NULL,
|
|
||||||
title VARCHAR(500),
|
|
||||||
options JSONB DEFAULT '{}',
|
|
||||||
is_playing BOOLEAN DEFAULT true,
|
|
||||||
started_at TIMESTAMPTZ DEFAULT NOW(),
|
|
||||||
stopped_at TIMESTAMPTZ,
|
|
||||||
|
|
||||||
CONSTRAINT valid_content_type CHECK (
|
|
||||||
content_type IN ('video', 'audio', 'image', 'presentation', 'document',
|
|
||||||
'code', 'spreadsheet', 'pdf', 'markdown', 'html', 'iframe', 'unknown')
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_play_content_session ON play_content(session_id);
|
|
||||||
CREATE INDEX idx_play_content_active ON play_content(session_id, is_playing) WHERE is_playing = true;
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- DEFAULT BOTS - Insert some default specialized bots
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
INSERT INTO bots (id, name, description, system_prompt, is_active) VALUES
|
|
||||||
(gen_random_uuid(), 'fraud-detector',
|
|
||||||
'Specialized bot for detecting and handling fraud-related inquiries',
|
|
||||||
'You are a fraud detection specialist. Help users identify suspicious activities,
|
|
||||||
report unauthorized transactions, and guide them through security procedures.
|
|
||||||
Always prioritize user security and recommend immediate action for urgent cases.',
|
|
||||||
true),
|
|
||||||
|
|
||||||
(gen_random_uuid(), 'investment-advisor',
|
|
||||||
'Specialized bot for investment and financial planning advice',
|
|
||||||
'You are an investment advisor. Help users understand investment options,
|
|
||||||
analyze portfolio performance, and make informed financial decisions.
|
|
||||||
Always remind users that past performance does not guarantee future results.',
|
|
||||||
true),
|
|
||||||
|
|
||||||
(gen_random_uuid(), 'loan-specialist',
|
|
||||||
'Specialized bot for loan and financing inquiries',
|
|
||||||
'You are a loan specialist. Help users understand loan options,
|
|
||||||
simulate payments, and guide them through the application process.
|
|
||||||
Always disclose interest rates and total costs clearly.',
|
|
||||||
true),
|
|
||||||
|
|
||||||
(gen_random_uuid(), 'card-services',
|
|
||||||
'Specialized bot for credit and debit card services',
|
|
||||||
'You are a card services specialist. Help users manage their cards,
|
|
||||||
understand benefits, handle disputes, and manage limits.
|
|
||||||
For security, never ask for full card numbers in chat.',
|
|
||||||
true)
|
|
||||||
ON CONFLICT (name) DO NOTHING;
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- TRIGGERS - Update timestamps automatically
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
|
||||||
RETURNS TRIGGER AS $$
|
|
||||||
BEGIN
|
|
||||||
NEW.updated_at = NOW();
|
|
||||||
RETURN NEW;
|
|
||||||
END;
|
|
||||||
$$ language 'plpgsql';
|
|
||||||
|
|
||||||
CREATE TRIGGER update_bots_updated_at
|
|
||||||
BEFORE UPDATE ON bots
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE FUNCTION update_updated_at_column();
|
|
||||||
|
|
@ -9,8 +9,8 @@ ALTER TABLE public.system_automations ADD COLUMN IF NOT EXISTS name VARCHAR(255)
|
||||||
-- Create index on name column for faster lookups
|
-- Create index on name column for faster lookups
|
||||||
CREATE INDEX IF NOT EXISTS idx_system_automations_name ON public.system_automations(name);
|
CREATE INDEX IF NOT EXISTS idx_system_automations_name ON public.system_automations(name);
|
||||||
|
|
||||||
ALTER TABLE bot_configuration
|
-- Note: bot_configuration already has UNIQUE(bot_id, config_key) from migration 6.0.4
|
||||||
ADD CONSTRAINT bot_configuration_config_key_unique UNIQUE (config_key);
|
-- Do NOT add a global unique constraint on config_key alone as that breaks multi-bot configs
|
||||||
|
|
||||||
-- Migration 6.0.9: Add bot_id column to system_automations
|
-- Migration 6.0.9: Add bot_id column to system_automations
|
||||||
-- Description: Introduces a bot_id column to associate automations with a specific bot.
|
-- Description: Introduces a bot_id column to associate automations with a specific bot.
|
||||||
|
|
|
||||||
|
|
@ -62,7 +62,7 @@ CREATE TABLE IF NOT EXISTS session_website_associations (
|
||||||
-- Foreign key to sessions table
|
-- Foreign key to sessions table
|
||||||
CONSTRAINT fk_session_website_session
|
CONSTRAINT fk_session_website_session
|
||||||
FOREIGN KEY (session_id)
|
FOREIGN KEY (session_id)
|
||||||
REFERENCES sessions(id)
|
REFERENCES user_sessions(id)
|
||||||
ON DELETE CASCADE,
|
ON DELETE CASCADE,
|
||||||
|
|
||||||
-- Foreign key to bots table
|
-- Foreign key to bots table
|
||||||
|
|
|
||||||
|
|
@ -49,3 +49,25 @@ DROP TABLE IF EXISTS email_templates;
|
||||||
DROP TABLE IF EXISTS scheduled_emails;
|
DROP TABLE IF EXISTS scheduled_emails;
|
||||||
DROP TABLE IF EXISTS email_signatures;
|
DROP TABLE IF EXISTS email_signatures;
|
||||||
DROP TABLE IF EXISTS global_email_signatures;
|
DROP TABLE IF EXISTS global_email_signatures;
|
||||||
|
-- Drop triggers and functions
|
||||||
|
DROP TRIGGER IF EXISTS external_connections_updated_at_trigger ON external_connections;
|
||||||
|
DROP FUNCTION IF EXISTS update_external_connections_updated_at();
|
||||||
|
|
||||||
|
DROP TRIGGER IF EXISTS dynamic_table_definitions_updated_at_trigger ON dynamic_table_definitions;
|
||||||
|
DROP FUNCTION IF EXISTS update_dynamic_table_definitions_updated_at();
|
||||||
|
|
||||||
|
-- Drop indexes
|
||||||
|
DROP INDEX IF EXISTS idx_external_connections_name;
|
||||||
|
DROP INDEX IF EXISTS idx_external_connections_bot_id;
|
||||||
|
|
||||||
|
DROP INDEX IF EXISTS idx_dynamic_table_fields_name;
|
||||||
|
DROP INDEX IF EXISTS idx_dynamic_table_fields_table_id;
|
||||||
|
|
||||||
|
DROP INDEX IF EXISTS idx_dynamic_table_definitions_connection;
|
||||||
|
DROP INDEX IF EXISTS idx_dynamic_table_definitions_name;
|
||||||
|
DROP INDEX IF EXISTS idx_dynamic_table_definitions_bot_id;
|
||||||
|
|
||||||
|
-- Drop tables (order matters due to foreign keys)
|
||||||
|
DROP TABLE IF EXISTS external_connections;
|
||||||
|
DROP TABLE IF EXISTS dynamic_table_fields;
|
||||||
|
DROP TABLE IF EXISTS dynamic_table_definitions;
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,22 +0,0 @@
|
||||||
-- Drop triggers and functions
|
|
||||||
DROP TRIGGER IF EXISTS external_connections_updated_at_trigger ON external_connections;
|
|
||||||
DROP FUNCTION IF EXISTS update_external_connections_updated_at();
|
|
||||||
|
|
||||||
DROP TRIGGER IF EXISTS dynamic_table_definitions_updated_at_trigger ON dynamic_table_definitions;
|
|
||||||
DROP FUNCTION IF EXISTS update_dynamic_table_definitions_updated_at();
|
|
||||||
|
|
||||||
-- Drop indexes
|
|
||||||
DROP INDEX IF EXISTS idx_external_connections_name;
|
|
||||||
DROP INDEX IF EXISTS idx_external_connections_bot_id;
|
|
||||||
|
|
||||||
DROP INDEX IF EXISTS idx_dynamic_table_fields_name;
|
|
||||||
DROP INDEX IF EXISTS idx_dynamic_table_fields_table_id;
|
|
||||||
|
|
||||||
DROP INDEX IF EXISTS idx_dynamic_table_definitions_connection;
|
|
||||||
DROP INDEX IF EXISTS idx_dynamic_table_definitions_name;
|
|
||||||
DROP INDEX IF EXISTS idx_dynamic_table_definitions_bot_id;
|
|
||||||
|
|
||||||
-- Drop tables (order matters due to foreign keys)
|
|
||||||
DROP TABLE IF EXISTS external_connections;
|
|
||||||
DROP TABLE IF EXISTS dynamic_table_fields;
|
|
||||||
DROP TABLE IF EXISTS dynamic_table_definitions;
|
|
||||||
|
|
@ -1,120 +0,0 @@
|
||||||
-- Migration for TABLE keyword support
|
|
||||||
-- Stores dynamic table definitions created via BASIC TABLE...END TABLE syntax
|
|
||||||
|
|
||||||
-- Table to store dynamic table definitions (metadata)
|
|
||||||
CREATE TABLE IF NOT EXISTS dynamic_table_definitions (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
table_name VARCHAR(255) NOT NULL,
|
|
||||||
connection_name VARCHAR(255) NOT NULL DEFAULT 'default',
|
|
||||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMPTZ DEFAULT NOW(),
|
|
||||||
is_active BOOLEAN DEFAULT true,
|
|
||||||
|
|
||||||
-- Ensure unique table name per bot and connection
|
|
||||||
CONSTRAINT unique_bot_table_connection UNIQUE (bot_id, table_name, connection_name),
|
|
||||||
|
|
||||||
-- Foreign key to bots table
|
|
||||||
CONSTRAINT fk_dynamic_table_bot
|
|
||||||
FOREIGN KEY (bot_id)
|
|
||||||
REFERENCES bots(id)
|
|
||||||
ON DELETE CASCADE
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Table to store field definitions for dynamic tables
|
|
||||||
CREATE TABLE IF NOT EXISTS dynamic_table_fields (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
table_definition_id UUID NOT NULL,
|
|
||||||
field_name VARCHAR(255) NOT NULL,
|
|
||||||
field_type VARCHAR(100) NOT NULL,
|
|
||||||
field_length INTEGER,
|
|
||||||
field_precision INTEGER,
|
|
||||||
is_key BOOLEAN DEFAULT false,
|
|
||||||
is_nullable BOOLEAN DEFAULT true,
|
|
||||||
default_value TEXT,
|
|
||||||
reference_table VARCHAR(255),
|
|
||||||
field_order INTEGER NOT NULL DEFAULT 0,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
||||||
|
|
||||||
-- Ensure unique field name per table definition
|
|
||||||
CONSTRAINT unique_table_field UNIQUE (table_definition_id, field_name),
|
|
||||||
|
|
||||||
-- Foreign key to table definitions
|
|
||||||
CONSTRAINT fk_field_table_definition
|
|
||||||
FOREIGN KEY (table_definition_id)
|
|
||||||
REFERENCES dynamic_table_definitions(id)
|
|
||||||
ON DELETE CASCADE
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Table to store external database connections (from config.csv conn-* entries)
|
|
||||||
CREATE TABLE IF NOT EXISTS external_connections (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
connection_name VARCHAR(255) NOT NULL,
|
|
||||||
driver VARCHAR(100) NOT NULL,
|
|
||||||
server VARCHAR(255) NOT NULL,
|
|
||||||
port INTEGER,
|
|
||||||
database_name VARCHAR(255),
|
|
||||||
username VARCHAR(255),
|
|
||||||
password_encrypted TEXT,
|
|
||||||
additional_params JSONB DEFAULT '{}'::jsonb,
|
|
||||||
is_active BOOLEAN DEFAULT true,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMPTZ DEFAULT NOW(),
|
|
||||||
last_connected_at TIMESTAMPTZ,
|
|
||||||
|
|
||||||
-- Ensure unique connection name per bot
|
|
||||||
CONSTRAINT unique_bot_connection UNIQUE (bot_id, connection_name),
|
|
||||||
|
|
||||||
-- Foreign key to bots table
|
|
||||||
CONSTRAINT fk_external_connection_bot
|
|
||||||
FOREIGN KEY (bot_id)
|
|
||||||
REFERENCES bots(id)
|
|
||||||
ON DELETE CASCADE
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Create indexes for efficient queries
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_dynamic_table_definitions_bot_id
|
|
||||||
ON dynamic_table_definitions(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_dynamic_table_definitions_name
|
|
||||||
ON dynamic_table_definitions(table_name);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_dynamic_table_definitions_connection
|
|
||||||
ON dynamic_table_definitions(connection_name);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_dynamic_table_fields_table_id
|
|
||||||
ON dynamic_table_fields(table_definition_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_dynamic_table_fields_name
|
|
||||||
ON dynamic_table_fields(field_name);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_external_connections_bot_id
|
|
||||||
ON external_connections(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_external_connections_name
|
|
||||||
ON external_connections(connection_name);
|
|
||||||
|
|
||||||
-- Create trigger to update updated_at timestamp for dynamic_table_definitions
|
|
||||||
CREATE OR REPLACE FUNCTION update_dynamic_table_definitions_updated_at()
|
|
||||||
RETURNS TRIGGER AS $$
|
|
||||||
BEGIN
|
|
||||||
NEW.updated_at = NOW();
|
|
||||||
RETURN NEW;
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
|
|
||||||
CREATE TRIGGER dynamic_table_definitions_updated_at_trigger
|
|
||||||
BEFORE UPDATE ON dynamic_table_definitions
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE FUNCTION update_dynamic_table_definitions_updated_at();
|
|
||||||
|
|
||||||
-- Create trigger to update updated_at timestamp for external_connections
|
|
||||||
CREATE OR REPLACE FUNCTION update_external_connections_updated_at()
|
|
||||||
RETURNS TRIGGER AS $$
|
|
||||||
BEGIN
|
|
||||||
NEW.updated_at = NOW();
|
|
||||||
RETURN NEW;
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
|
|
||||||
CREATE TRIGGER external_connections_updated_at_trigger
|
|
||||||
BEFORE UPDATE ON external_connections
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE FUNCTION update_external_connections_updated_at();
|
|
||||||
98
migrations/6.1.1_fix_config_id_types/down.sql
Normal file
98
migrations/6.1.1_fix_config_id_types/down.sql
Normal file
|
|
@ -0,0 +1,98 @@
|
||||||
|
-- Rollback Migration 6.1.1: Revert UUID columns back to TEXT
|
||||||
|
-- This reverts the id columns from UUID back to TEXT
|
||||||
|
|
||||||
|
-- For bot_configuration
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'bot_configuration'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'uuid') THEN
|
||||||
|
ALTER TABLE bot_configuration
|
||||||
|
ALTER COLUMN id TYPE TEXT USING id::text;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- For server_configuration
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'server_configuration'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'uuid') THEN
|
||||||
|
ALTER TABLE server_configuration
|
||||||
|
ALTER COLUMN id TYPE TEXT USING id::text;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- For tenant_configuration
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'tenant_configuration'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'uuid') THEN
|
||||||
|
ALTER TABLE tenant_configuration
|
||||||
|
ALTER COLUMN id TYPE TEXT USING id::text;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- For model_configurations
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'model_configurations'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'uuid') THEN
|
||||||
|
ALTER TABLE model_configurations
|
||||||
|
ALTER COLUMN id TYPE TEXT USING id::text;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- For connection_configurations
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'connection_configurations'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'uuid') THEN
|
||||||
|
ALTER TABLE connection_configurations
|
||||||
|
ALTER COLUMN id TYPE TEXT USING id::text;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- For component_installations
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'component_installations'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'uuid') THEN
|
||||||
|
ALTER TABLE component_installations
|
||||||
|
ALTER COLUMN id TYPE TEXT USING id::text;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- For component_logs
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'component_logs'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'uuid') THEN
|
||||||
|
ALTER TABLE component_logs
|
||||||
|
ALTER COLUMN id TYPE TEXT USING id::text;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- For gbot_config_sync
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'gbot_config_sync'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'uuid') THEN
|
||||||
|
ALTER TABLE gbot_config_sync
|
||||||
|
ALTER COLUMN id TYPE TEXT USING id::text;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
99
migrations/6.1.1_fix_config_id_types/up.sql
Normal file
99
migrations/6.1.1_fix_config_id_types/up.sql
Normal file
|
|
@ -0,0 +1,99 @@
|
||||||
|
-- Migration 6.1.1: Fix bot_configuration id column type
|
||||||
|
-- The Diesel schema expects UUID but migration 6.0.4 created it as TEXT
|
||||||
|
-- This migration converts the id column from TEXT to UUID
|
||||||
|
|
||||||
|
-- For bot_configuration (main table that needs fixing)
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'bot_configuration'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'text') THEN
|
||||||
|
ALTER TABLE bot_configuration
|
||||||
|
ALTER COLUMN id TYPE UUID USING id::uuid;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- Also fix server_configuration which has the same issue
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'server_configuration'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'text') THEN
|
||||||
|
ALTER TABLE server_configuration
|
||||||
|
ALTER COLUMN id TYPE UUID USING id::uuid;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- Also fix tenant_configuration which has the same issue
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'tenant_configuration'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'text') THEN
|
||||||
|
ALTER TABLE tenant_configuration
|
||||||
|
ALTER COLUMN id TYPE UUID USING id::uuid;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- Fix model_configurations
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'model_configurations'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'text') THEN
|
||||||
|
ALTER TABLE model_configurations
|
||||||
|
ALTER COLUMN id TYPE UUID USING id::uuid;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- Fix connection_configurations
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'connection_configurations'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'text') THEN
|
||||||
|
ALTER TABLE connection_configurations
|
||||||
|
ALTER COLUMN id TYPE UUID USING id::uuid;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- Fix component_installations
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'component_installations'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'text') THEN
|
||||||
|
ALTER TABLE component_installations
|
||||||
|
ALTER COLUMN id TYPE UUID USING id::uuid;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- Fix component_logs
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'component_logs'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'text') THEN
|
||||||
|
ALTER TABLE component_logs
|
||||||
|
ALTER COLUMN id TYPE UUID USING id::uuid;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- Fix gbot_config_sync
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'gbot_config_sync'
|
||||||
|
AND column_name = 'id'
|
||||||
|
AND data_type = 'text') THEN
|
||||||
|
ALTER TABLE gbot_config_sync
|
||||||
|
ALTER COLUMN id TYPE UUID USING id::uuid;
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
@ -1,64 +0,0 @@
|
||||||
-- Migration: 6.1.1 Multi-Agent Memory Support (DOWN)
|
|
||||||
-- Description: Rollback for user memory, session preferences, and A2A protocol messaging
|
|
||||||
|
|
||||||
-- Drop triggers first
|
|
||||||
DROP TRIGGER IF EXISTS update_user_memories_updated_at ON user_memories;
|
|
||||||
DROP TRIGGER IF EXISTS update_bot_memory_extended_updated_at ON bot_memory_extended;
|
|
||||||
DROP TRIGGER IF EXISTS update_kg_entities_updated_at ON kg_entities;
|
|
||||||
|
|
||||||
-- Drop functions
|
|
||||||
DROP FUNCTION IF EXISTS update_updated_at_column();
|
|
||||||
DROP FUNCTION IF EXISTS cleanup_expired_bot_memory();
|
|
||||||
DROP FUNCTION IF EXISTS cleanup_expired_a2a_messages();
|
|
||||||
|
|
||||||
-- Drop indexes (will be dropped with tables, but explicit for clarity)
|
|
||||||
DROP INDEX IF EXISTS idx_session_bots_active;
|
|
||||||
DROP INDEX IF EXISTS idx_session_bots_session;
|
|
||||||
DROP INDEX IF EXISTS idx_gen_api_tools_bot;
|
|
||||||
DROP INDEX IF EXISTS idx_conv_costs_time;
|
|
||||||
DROP INDEX IF EXISTS idx_conv_costs_bot;
|
|
||||||
DROP INDEX IF EXISTS idx_conv_costs_user;
|
|
||||||
DROP INDEX IF EXISTS idx_conv_costs_session;
|
|
||||||
DROP INDEX IF EXISTS idx_episodic_time;
|
|
||||||
DROP INDEX IF EXISTS idx_episodic_session;
|
|
||||||
DROP INDEX IF EXISTS idx_episodic_user;
|
|
||||||
DROP INDEX IF EXISTS idx_episodic_bot;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_rel_type;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_rel_to;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_rel_from;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_rel_bot;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_entities_name;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_entities_type;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_entities_bot;
|
|
||||||
DROP INDEX IF EXISTS idx_bot_memory_ext_expires;
|
|
||||||
DROP INDEX IF EXISTS idx_bot_memory_ext_type;
|
|
||||||
DROP INDEX IF EXISTS idx_bot_memory_ext_session;
|
|
||||||
DROP INDEX IF EXISTS idx_bot_memory_ext_bot;
|
|
||||||
DROP INDEX IF EXISTS idx_a2a_messages_timestamp;
|
|
||||||
DROP INDEX IF EXISTS idx_a2a_messages_pending;
|
|
||||||
DROP INDEX IF EXISTS idx_a2a_messages_correlation;
|
|
||||||
DROP INDEX IF EXISTS idx_a2a_messages_to_agent;
|
|
||||||
DROP INDEX IF EXISTS idx_a2a_messages_session;
|
|
||||||
DROP INDEX IF EXISTS idx_session_preferences_session;
|
|
||||||
DROP INDEX IF EXISTS idx_user_memories_type;
|
|
||||||
DROP INDEX IF EXISTS idx_user_memories_user_id;
|
|
||||||
DROP INDEX IF EXISTS idx_bot_reflections_bot;
|
|
||||||
DROP INDEX IF EXISTS idx_bot_reflections_session;
|
|
||||||
DROP INDEX IF EXISTS idx_bot_reflections_time;
|
|
||||||
DROP INDEX IF EXISTS idx_conv_messages_session;
|
|
||||||
DROP INDEX IF EXISTS idx_conv_messages_time;
|
|
||||||
DROP INDEX IF EXISTS idx_conv_messages_bot;
|
|
||||||
|
|
||||||
-- Drop tables (order matters due to foreign keys)
|
|
||||||
DROP TABLE IF EXISTS conversation_messages;
|
|
||||||
DROP TABLE IF EXISTS bot_reflections;
|
|
||||||
DROP TABLE IF EXISTS session_bots;
|
|
||||||
DROP TABLE IF EXISTS generated_api_tools;
|
|
||||||
DROP TABLE IF EXISTS conversation_costs;
|
|
||||||
DROP TABLE IF EXISTS episodic_memories;
|
|
||||||
DROP TABLE IF EXISTS kg_relationships;
|
|
||||||
DROP TABLE IF EXISTS kg_entities;
|
|
||||||
DROP TABLE IF EXISTS bot_memory_extended;
|
|
||||||
DROP TABLE IF EXISTS a2a_messages;
|
|
||||||
DROP TABLE IF EXISTS session_preferences;
|
|
||||||
DROP TABLE IF EXISTS user_memories;
|
|
||||||
|
|
@ -1,315 +0,0 @@
|
||||||
-- Migration: 6.1.1 Multi-Agent Memory Support
|
|
||||||
-- Description: Adds tables for user memory, session preferences, and A2A protocol messaging
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- User Memories Table
|
|
||||||
-- Cross-session memory that persists for users across all sessions and bots
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS user_memories (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
user_id UUID NOT NULL,
|
|
||||||
key VARCHAR(255) NOT NULL,
|
|
||||||
value TEXT NOT NULL,
|
|
||||||
memory_type VARCHAR(50) NOT NULL DEFAULT 'preference',
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
CONSTRAINT user_memories_unique_key UNIQUE (user_id, key)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_memories_user_id ON user_memories(user_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_memories_type ON user_memories(user_id, memory_type);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- Session Preferences Table
|
|
||||||
-- Stores per-session configuration like current model, routing strategy, etc.
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS session_preferences (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
session_id UUID NOT NULL,
|
|
||||||
preference_key VARCHAR(255) NOT NULL,
|
|
||||||
preference_value TEXT NOT NULL,
|
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
CONSTRAINT session_preferences_unique UNIQUE (session_id, preference_key)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_session_preferences_session ON session_preferences(session_id);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- A2A Messages Table
|
|
||||||
-- Agent-to-Agent protocol messages for multi-agent orchestration
|
|
||||||
-- Based on https://a2a-protocol.org/latest/
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS a2a_messages (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
session_id UUID NOT NULL,
|
|
||||||
from_agent VARCHAR(255) NOT NULL,
|
|
||||||
to_agent VARCHAR(255), -- NULL for broadcast messages
|
|
||||||
message_type VARCHAR(50) NOT NULL,
|
|
||||||
payload TEXT NOT NULL,
|
|
||||||
correlation_id UUID NOT NULL,
|
|
||||||
timestamp TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
metadata TEXT DEFAULT '{}',
|
|
||||||
ttl_seconds INTEGER NOT NULL DEFAULT 30,
|
|
||||||
hop_count INTEGER NOT NULL DEFAULT 0,
|
|
||||||
processed BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
processed_at TIMESTAMPTZ,
|
|
||||||
error_message TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_a2a_messages_session ON a2a_messages(session_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_a2a_messages_to_agent ON a2a_messages(session_id, to_agent);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_a2a_messages_correlation ON a2a_messages(correlation_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_a2a_messages_pending ON a2a_messages(session_id, to_agent, processed) WHERE processed = FALSE;
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_a2a_messages_timestamp ON a2a_messages(timestamp);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- Extended Bot Memory Table
|
|
||||||
-- Enhanced memory with TTL and different memory types
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS bot_memory_extended (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
session_id UUID, -- NULL for long-term memory
|
|
||||||
memory_type VARCHAR(20) NOT NULL CHECK (memory_type IN ('short', 'long', 'episodic')),
|
|
||||||
key VARCHAR(255) NOT NULL,
|
|
||||||
value TEXT NOT NULL,
|
|
||||||
ttl_seconds INTEGER,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
expires_at TIMESTAMPTZ,
|
|
||||||
CONSTRAINT bot_memory_extended_unique UNIQUE (bot_id, session_id, key)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_bot_memory_ext_bot ON bot_memory_extended(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_bot_memory_ext_session ON bot_memory_extended(bot_id, session_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_bot_memory_ext_type ON bot_memory_extended(bot_id, memory_type);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_bot_memory_ext_expires ON bot_memory_extended(expires_at) WHERE expires_at IS NOT NULL;
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- Knowledge Graph Entities Table
|
|
||||||
-- For graph-based memory and entity relationships
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS kg_entities (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
entity_type VARCHAR(100) NOT NULL,
|
|
||||||
entity_name VARCHAR(500) NOT NULL,
|
|
||||||
properties JSONB DEFAULT '{}',
|
|
||||||
embedding_vector BYTEA, -- For vector similarity search
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
CONSTRAINT kg_entities_unique UNIQUE (bot_id, entity_type, entity_name)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_entities_bot ON kg_entities(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_entities_type ON kg_entities(bot_id, entity_type);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_entities_name ON kg_entities(entity_name);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- Knowledge Graph Relationships Table
|
|
||||||
-- For storing relationships between entities
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS kg_relationships (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
from_entity_id UUID NOT NULL REFERENCES kg_entities(id) ON DELETE CASCADE,
|
|
||||||
to_entity_id UUID NOT NULL REFERENCES kg_entities(id) ON DELETE CASCADE,
|
|
||||||
relationship_type VARCHAR(100) NOT NULL,
|
|
||||||
properties JSONB DEFAULT '{}',
|
|
||||||
weight FLOAT DEFAULT 1.0,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
CONSTRAINT kg_relationships_unique UNIQUE (from_entity_id, to_entity_id, relationship_type)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_rel_bot ON kg_relationships(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_rel_from ON kg_relationships(from_entity_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_rel_to ON kg_relationships(to_entity_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_rel_type ON kg_relationships(bot_id, relationship_type);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- Episodic Memory Table
|
|
||||||
-- For storing conversation summaries and episodes
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS episodic_memories (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
user_id UUID NOT NULL,
|
|
||||||
session_id UUID,
|
|
||||||
summary TEXT NOT NULL,
|
|
||||||
key_topics JSONB DEFAULT '[]',
|
|
||||||
decisions JSONB DEFAULT '[]',
|
|
||||||
action_items JSONB DEFAULT '[]',
|
|
||||||
message_count INTEGER NOT NULL DEFAULT 0,
|
|
||||||
start_timestamp TIMESTAMPTZ NOT NULL,
|
|
||||||
end_timestamp TIMESTAMPTZ NOT NULL,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_episodic_bot ON episodic_memories(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_episodic_user ON episodic_memories(user_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_episodic_session ON episodic_memories(session_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_episodic_time ON episodic_memories(bot_id, user_id, created_at);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- Conversation Cost Tracking Table
|
|
||||||
-- For monitoring LLM usage and costs
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS conversation_costs (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
session_id UUID NOT NULL,
|
|
||||||
user_id UUID NOT NULL,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
model_used VARCHAR(100),
|
|
||||||
input_tokens INTEGER NOT NULL DEFAULT 0,
|
|
||||||
output_tokens INTEGER NOT NULL DEFAULT 0,
|
|
||||||
cost_usd DECIMAL(10, 6) NOT NULL DEFAULT 0,
|
|
||||||
timestamp TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_conv_costs_session ON conversation_costs(session_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_conv_costs_user ON conversation_costs(user_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_conv_costs_bot ON conversation_costs(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_conv_costs_time ON conversation_costs(timestamp);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- Generated API Tools Table
|
|
||||||
-- For tracking tools generated from OpenAPI specs
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS generated_api_tools (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
api_name VARCHAR(255) NOT NULL,
|
|
||||||
spec_url TEXT NOT NULL,
|
|
||||||
spec_hash VARCHAR(64) NOT NULL,
|
|
||||||
tool_count INTEGER NOT NULL DEFAULT 0,
|
|
||||||
last_synced_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
CONSTRAINT generated_api_tools_unique UNIQUE (bot_id, api_name)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_gen_api_tools_bot ON generated_api_tools(bot_id);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- Session Bots Junction Table (if not exists)
|
|
||||||
-- For multi-agent sessions
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS session_bots (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
session_id UUID NOT NULL,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
bot_name VARCHAR(255) NOT NULL,
|
|
||||||
trigger_config JSONB DEFAULT '{}',
|
|
||||||
priority INTEGER NOT NULL DEFAULT 0,
|
|
||||||
is_active BOOLEAN NOT NULL DEFAULT TRUE,
|
|
||||||
added_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
CONSTRAINT session_bots_unique UNIQUE (session_id, bot_name)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_session_bots_session ON session_bots(session_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_session_bots_active ON session_bots(session_id, is_active);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- Cleanup function for expired A2A messages
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE OR REPLACE FUNCTION cleanup_expired_a2a_messages()
|
|
||||||
RETURNS INTEGER AS $$
|
|
||||||
DECLARE
|
|
||||||
deleted_count INTEGER;
|
|
||||||
BEGIN
|
|
||||||
DELETE FROM a2a_messages
|
|
||||||
WHERE ttl_seconds > 0
|
|
||||||
AND timestamp + (ttl_seconds || ' seconds')::INTERVAL < NOW();
|
|
||||||
|
|
||||||
GET DIAGNOSTICS deleted_count = ROW_COUNT;
|
|
||||||
RETURN deleted_count;
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- Cleanup function for expired bot memory
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE OR REPLACE FUNCTION cleanup_expired_bot_memory()
|
|
||||||
RETURNS INTEGER AS $$
|
|
||||||
DECLARE
|
|
||||||
deleted_count INTEGER;
|
|
||||||
BEGIN
|
|
||||||
DELETE FROM bot_memory_extended
|
|
||||||
WHERE expires_at IS NOT NULL AND expires_at < NOW();
|
|
||||||
|
|
||||||
GET DIAGNOSTICS deleted_count = ROW_COUNT;
|
|
||||||
RETURN deleted_count;
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- Trigger to update updated_at timestamp
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
|
||||||
RETURNS TRIGGER AS $$
|
|
||||||
BEGIN
|
|
||||||
NEW.updated_at = NOW();
|
|
||||||
RETURN NEW;
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
|
|
||||||
-- Apply trigger to tables with updated_at
|
|
||||||
DROP TRIGGER IF EXISTS update_user_memories_updated_at ON user_memories;
|
|
||||||
CREATE TRIGGER update_user_memories_updated_at
|
|
||||||
BEFORE UPDATE ON user_memories
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE FUNCTION update_updated_at_column();
|
|
||||||
|
|
||||||
DROP TRIGGER IF EXISTS update_bot_memory_extended_updated_at ON bot_memory_extended;
|
|
||||||
CREATE TRIGGER update_bot_memory_extended_updated_at
|
|
||||||
BEFORE UPDATE ON bot_memory_extended
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE FUNCTION update_updated_at_column();
|
|
||||||
|
|
||||||
DROP TRIGGER IF EXISTS update_kg_entities_updated_at ON kg_entities;
|
|
||||||
CREATE TRIGGER update_kg_entities_updated_at
|
|
||||||
BEFORE UPDATE ON kg_entities
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE FUNCTION update_updated_at_column();
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- Bot Reflections Table
|
|
||||||
-- For storing agent self-reflection analysis results
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS bot_reflections (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
session_id UUID NOT NULL,
|
|
||||||
reflection_type TEXT NOT NULL,
|
|
||||||
score FLOAT NOT NULL DEFAULT 0.0,
|
|
||||||
insights TEXT NOT NULL DEFAULT '[]',
|
|
||||||
improvements TEXT NOT NULL DEFAULT '[]',
|
|
||||||
positive_patterns TEXT NOT NULL DEFAULT '[]',
|
|
||||||
concerns TEXT NOT NULL DEFAULT '[]',
|
|
||||||
raw_response TEXT NOT NULL DEFAULT '',
|
|
||||||
messages_analyzed INTEGER NOT NULL DEFAULT 0,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_bot_reflections_bot ON bot_reflections(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_bot_reflections_session ON bot_reflections(session_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_bot_reflections_time ON bot_reflections(bot_id, created_at);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- Conversation Messages Table
|
|
||||||
-- For storing conversation history (if not already exists)
|
|
||||||
-- ============================================================================
|
|
||||||
CREATE TABLE IF NOT EXISTS conversation_messages (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
session_id UUID NOT NULL,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
user_id UUID,
|
|
||||||
role VARCHAR(50) NOT NULL,
|
|
||||||
content TEXT NOT NULL,
|
|
||||||
metadata JSONB DEFAULT '{}',
|
|
||||||
token_count INTEGER,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_conv_messages_session ON conversation_messages(session_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_conv_messages_time ON conversation_messages(session_id, created_at);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_conv_messages_bot ON conversation_messages(bot_id);
|
|
||||||
|
|
@ -1,124 +0,0 @@
|
||||||
-- Migration Rollback: 6.1.2_phase3_phase4
|
|
||||||
-- Description: Rollback Phase 3 and Phase 4 multi-agent features
|
|
||||||
-- WARNING: This will delete all data in the affected tables!
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- DROP VIEWS
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
DROP VIEW IF EXISTS v_llm_usage_24h;
|
|
||||||
DROP VIEW IF EXISTS v_approval_summary;
|
|
||||||
DROP VIEW IF EXISTS v_kg_stats;
|
|
||||||
DROP VIEW IF EXISTS v_recent_episodes;
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- DROP FUNCTIONS
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
DROP FUNCTION IF EXISTS cleanup_old_observability_data(INTEGER);
|
|
||||||
DROP FUNCTION IF EXISTS reset_monthly_budgets();
|
|
||||||
DROP FUNCTION IF EXISTS reset_daily_budgets();
|
|
||||||
DROP FUNCTION IF EXISTS aggregate_llm_metrics_hourly();
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- DROP TRIGGERS
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
DROP TRIGGER IF EXISTS update_llm_budget_updated_at ON llm_budget;
|
|
||||||
DROP TRIGGER IF EXISTS update_workflow_definitions_updated_at ON workflow_definitions;
|
|
||||||
DROP TRIGGER IF EXISTS update_kg_entities_updated_at ON kg_entities;
|
|
||||||
|
|
||||||
-- Note: We don't drop the update_updated_at_column() function as it may be used by other tables
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- DROP WORKFLOW TABLES
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
DROP TABLE IF EXISTS workflow_step_executions CASCADE;
|
|
||||||
DROP TABLE IF EXISTS workflow_executions CASCADE;
|
|
||||||
DROP TABLE IF EXISTS workflow_definitions CASCADE;
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- DROP LLM OBSERVABILITY TABLES
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
DROP TABLE IF EXISTS llm_traces CASCADE;
|
|
||||||
DROP TABLE IF EXISTS llm_budget CASCADE;
|
|
||||||
DROP TABLE IF EXISTS llm_metrics_hourly CASCADE;
|
|
||||||
DROP TABLE IF EXISTS llm_metrics CASCADE;
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- DROP APPROVAL TABLES
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
DROP TABLE IF EXISTS approval_tokens CASCADE;
|
|
||||||
DROP TABLE IF EXISTS approval_audit_log CASCADE;
|
|
||||||
DROP TABLE IF EXISTS approval_chains CASCADE;
|
|
||||||
DROP TABLE IF EXISTS approval_requests CASCADE;
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- DROP KNOWLEDGE GRAPH TABLES
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
DROP TABLE IF EXISTS kg_relationships CASCADE;
|
|
||||||
DROP TABLE IF EXISTS kg_entities CASCADE;
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- DROP EPISODIC MEMORY TABLES
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
DROP TABLE IF EXISTS conversation_episodes CASCADE;
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- DROP INDEXES (if any remain)
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
-- Episodic memory indexes
|
|
||||||
DROP INDEX IF EXISTS idx_episodes_user_id;
|
|
||||||
DROP INDEX IF EXISTS idx_episodes_bot_id;
|
|
||||||
DROP INDEX IF EXISTS idx_episodes_session_id;
|
|
||||||
DROP INDEX IF EXISTS idx_episodes_created_at;
|
|
||||||
DROP INDEX IF EXISTS idx_episodes_key_topics;
|
|
||||||
DROP INDEX IF EXISTS idx_episodes_resolution;
|
|
||||||
DROP INDEX IF EXISTS idx_episodes_summary_fts;
|
|
||||||
|
|
||||||
-- Knowledge graph indexes
|
|
||||||
DROP INDEX IF EXISTS idx_kg_entities_bot_id;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_entities_type;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_entities_name;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_entities_name_lower;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_entities_aliases;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_entities_name_fts;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_relationships_bot_id;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_relationships_from;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_relationships_to;
|
|
||||||
DROP INDEX IF EXISTS idx_kg_relationships_type;
|
|
||||||
|
|
||||||
-- Approval indexes
|
|
||||||
DROP INDEX IF EXISTS idx_approval_requests_bot_id;
|
|
||||||
DROP INDEX IF EXISTS idx_approval_requests_session_id;
|
|
||||||
DROP INDEX IF EXISTS idx_approval_requests_status;
|
|
||||||
DROP INDEX IF EXISTS idx_approval_requests_expires_at;
|
|
||||||
DROP INDEX IF EXISTS idx_approval_requests_pending;
|
|
||||||
DROP INDEX IF EXISTS idx_approval_audit_request_id;
|
|
||||||
DROP INDEX IF EXISTS idx_approval_audit_timestamp;
|
|
||||||
DROP INDEX IF EXISTS idx_approval_tokens_token;
|
|
||||||
DROP INDEX IF EXISTS idx_approval_tokens_request_id;
|
|
||||||
|
|
||||||
-- Observability indexes
|
|
||||||
DROP INDEX IF EXISTS idx_llm_metrics_bot_id;
|
|
||||||
DROP INDEX IF EXISTS idx_llm_metrics_session_id;
|
|
||||||
DROP INDEX IF EXISTS idx_llm_metrics_timestamp;
|
|
||||||
DROP INDEX IF EXISTS idx_llm_metrics_model;
|
|
||||||
DROP INDEX IF EXISTS idx_llm_metrics_hourly_bot_id;
|
|
||||||
DROP INDEX IF EXISTS idx_llm_metrics_hourly_hour;
|
|
||||||
DROP INDEX IF EXISTS idx_llm_traces_trace_id;
|
|
||||||
DROP INDEX IF EXISTS idx_llm_traces_start_time;
|
|
||||||
DROP INDEX IF EXISTS idx_llm_traces_component;
|
|
||||||
|
|
||||||
-- Workflow indexes
|
|
||||||
DROP INDEX IF EXISTS idx_workflow_definitions_bot_id;
|
|
||||||
DROP INDEX IF EXISTS idx_workflow_executions_workflow_id;
|
|
||||||
DROP INDEX IF EXISTS idx_workflow_executions_bot_id;
|
|
||||||
DROP INDEX IF EXISTS idx_workflow_executions_status;
|
|
||||||
DROP INDEX IF EXISTS idx_workflow_step_executions_execution_id;
|
|
||||||
|
|
@ -1,538 +0,0 @@
|
||||||
-- Migration: 6.1.2_phase3_phase4
|
|
||||||
-- Description: Phase 3 and Phase 4 multi-agent features
|
|
||||||
-- Features:
|
|
||||||
-- - Episodic memory (conversation summaries)
|
|
||||||
-- - Knowledge graphs (entity relationships)
|
|
||||||
-- - Human-in-the-loop approvals
|
|
||||||
-- - LLM observability and cost tracking
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- EPISODIC MEMORY TABLES
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
-- Conversation episodes (summaries)
|
|
||||||
CREATE TABLE IF NOT EXISTS conversation_episodes (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
user_id UUID NOT NULL,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
session_id UUID NOT NULL,
|
|
||||||
summary TEXT NOT NULL,
|
|
||||||
key_topics JSONB NOT NULL DEFAULT '[]',
|
|
||||||
decisions JSONB NOT NULL DEFAULT '[]',
|
|
||||||
action_items JSONB NOT NULL DEFAULT '[]',
|
|
||||||
sentiment JSONB NOT NULL DEFAULT '{"score": 0, "label": "neutral", "confidence": 0.5}',
|
|
||||||
resolution VARCHAR(50) NOT NULL DEFAULT 'unknown',
|
|
||||||
message_count INTEGER NOT NULL DEFAULT 0,
|
|
||||||
message_ids JSONB NOT NULL DEFAULT '[]',
|
|
||||||
conversation_start TIMESTAMP WITH TIME ZONE NOT NULL,
|
|
||||||
conversation_end TIMESTAMP WITH TIME ZONE NOT NULL,
|
|
||||||
metadata JSONB NOT NULL DEFAULT '{}',
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Indexes for episodic memory
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_episodes_user_id ON conversation_episodes(user_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_episodes_bot_id ON conversation_episodes(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_episodes_session_id ON conversation_episodes(session_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_episodes_created_at ON conversation_episodes(created_at DESC);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_episodes_key_topics ON conversation_episodes USING GIN(key_topics);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_episodes_resolution ON conversation_episodes(resolution);
|
|
||||||
|
|
||||||
-- Full-text search on summaries
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_episodes_summary_fts ON conversation_episodes
|
|
||||||
USING GIN(to_tsvector('english', summary));
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- KNOWLEDGE GRAPH TABLES
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
-- Knowledge graph entities
|
|
||||||
CREATE TABLE IF NOT EXISTS kg_entities (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
entity_type VARCHAR(100) NOT NULL,
|
|
||||||
entity_name VARCHAR(500) NOT NULL,
|
|
||||||
aliases JSONB NOT NULL DEFAULT '[]',
|
|
||||||
properties JSONB NOT NULL DEFAULT '{}',
|
|
||||||
confidence DOUBLE PRECISION NOT NULL DEFAULT 1.0,
|
|
||||||
source VARCHAR(50) NOT NULL DEFAULT 'manual',
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
|
||||||
|
|
||||||
UNIQUE(bot_id, entity_type, entity_name)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Knowledge graph relationships
|
|
||||||
CREATE TABLE IF NOT EXISTS kg_relationships (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
from_entity_id UUID NOT NULL REFERENCES kg_entities(id) ON DELETE CASCADE,
|
|
||||||
to_entity_id UUID NOT NULL REFERENCES kg_entities(id) ON DELETE CASCADE,
|
|
||||||
relationship_type VARCHAR(100) NOT NULL,
|
|
||||||
properties JSONB NOT NULL DEFAULT '{}',
|
|
||||||
confidence DOUBLE PRECISION NOT NULL DEFAULT 1.0,
|
|
||||||
bidirectional BOOLEAN NOT NULL DEFAULT false,
|
|
||||||
source VARCHAR(50) NOT NULL DEFAULT 'manual',
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
|
||||||
|
|
||||||
UNIQUE(bot_id, from_entity_id, to_entity_id, relationship_type)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Indexes for knowledge graph
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_entities_bot_id ON kg_entities(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_entities_type ON kg_entities(entity_type);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_entities_name ON kg_entities(entity_name);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_entities_name_lower ON kg_entities(LOWER(entity_name));
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_entities_aliases ON kg_entities USING GIN(aliases);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_relationships_bot_id ON kg_relationships(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_relationships_from ON kg_relationships(from_entity_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_relationships_to ON kg_relationships(to_entity_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_relationships_type ON kg_relationships(relationship_type);
|
|
||||||
|
|
||||||
-- Full-text search on entity names
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_kg_entities_name_fts ON kg_entities
|
|
||||||
USING GIN(to_tsvector('english', entity_name));
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- HUMAN-IN-THE-LOOP APPROVAL TABLES
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
-- Approval requests
|
|
||||||
CREATE TABLE IF NOT EXISTS approval_requests (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
session_id UUID NOT NULL,
|
|
||||||
initiated_by UUID NOT NULL,
|
|
||||||
approval_type VARCHAR(100) NOT NULL,
|
|
||||||
status VARCHAR(50) NOT NULL DEFAULT 'pending',
|
|
||||||
channel VARCHAR(50) NOT NULL,
|
|
||||||
recipient VARCHAR(500) NOT NULL,
|
|
||||||
context JSONB NOT NULL DEFAULT '{}',
|
|
||||||
message TEXT NOT NULL,
|
|
||||||
timeout_seconds INTEGER NOT NULL DEFAULT 3600,
|
|
||||||
default_action VARCHAR(50),
|
|
||||||
current_level INTEGER NOT NULL DEFAULT 1,
|
|
||||||
total_levels INTEGER NOT NULL DEFAULT 1,
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
|
||||||
expires_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
|
||||||
reminders_sent JSONB NOT NULL DEFAULT '[]',
|
|
||||||
decision VARCHAR(50),
|
|
||||||
decided_by VARCHAR(500),
|
|
||||||
decided_at TIMESTAMP WITH TIME ZONE,
|
|
||||||
comments TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Approval chains
|
|
||||||
CREATE TABLE IF NOT EXISTS approval_chains (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
name VARCHAR(200) NOT NULL,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
levels JSONB NOT NULL DEFAULT '[]',
|
|
||||||
stop_on_reject BOOLEAN NOT NULL DEFAULT true,
|
|
||||||
require_all BOOLEAN NOT NULL DEFAULT false,
|
|
||||||
description TEXT,
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
|
||||||
|
|
||||||
UNIQUE(bot_id, name)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Approval audit log
|
|
||||||
CREATE TABLE IF NOT EXISTS approval_audit_log (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
request_id UUID NOT NULL REFERENCES approval_requests(id) ON DELETE CASCADE,
|
|
||||||
action VARCHAR(50) NOT NULL,
|
|
||||||
actor VARCHAR(500) NOT NULL,
|
|
||||||
details JSONB NOT NULL DEFAULT '{}',
|
|
||||||
timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
|
||||||
ip_address VARCHAR(50),
|
|
||||||
user_agent TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Approval tokens (for secure links)
|
|
||||||
CREATE TABLE IF NOT EXISTS approval_tokens (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
request_id UUID NOT NULL REFERENCES approval_requests(id) ON DELETE CASCADE,
|
|
||||||
token VARCHAR(100) NOT NULL UNIQUE,
|
|
||||||
action VARCHAR(50) NOT NULL,
|
|
||||||
used BOOLEAN NOT NULL DEFAULT false,
|
|
||||||
used_at TIMESTAMP WITH TIME ZONE,
|
|
||||||
expires_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Indexes for approval tables
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_approval_requests_bot_id ON approval_requests(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_approval_requests_session_id ON approval_requests(session_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_approval_requests_status ON approval_requests(status);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_approval_requests_expires_at ON approval_requests(expires_at);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_approval_requests_pending ON approval_requests(status, expires_at)
|
|
||||||
WHERE status = 'pending';
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_approval_audit_request_id ON approval_audit_log(request_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_approval_audit_timestamp ON approval_audit_log(timestamp DESC);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_approval_tokens_token ON approval_tokens(token);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_approval_tokens_request_id ON approval_tokens(request_id);
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- LLM OBSERVABILITY TABLES
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
-- LLM request metrics
|
|
||||||
CREATE TABLE IF NOT EXISTS llm_metrics (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
request_id UUID NOT NULL,
|
|
||||||
session_id UUID NOT NULL,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
model VARCHAR(200) NOT NULL,
|
|
||||||
request_type VARCHAR(50) NOT NULL,
|
|
||||||
input_tokens BIGINT NOT NULL DEFAULT 0,
|
|
||||||
output_tokens BIGINT NOT NULL DEFAULT 0,
|
|
||||||
total_tokens BIGINT NOT NULL DEFAULT 0,
|
|
||||||
latency_ms BIGINT NOT NULL DEFAULT 0,
|
|
||||||
ttft_ms BIGINT,
|
|
||||||
cached BOOLEAN NOT NULL DEFAULT false,
|
|
||||||
success BOOLEAN NOT NULL DEFAULT true,
|
|
||||||
error TEXT,
|
|
||||||
estimated_cost DOUBLE PRECISION NOT NULL DEFAULT 0,
|
|
||||||
timestamp TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
|
||||||
metadata JSONB NOT NULL DEFAULT '{}'
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Aggregated metrics (hourly rollup)
|
|
||||||
CREATE TABLE IF NOT EXISTS llm_metrics_hourly (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
hour TIMESTAMP WITH TIME ZONE NOT NULL,
|
|
||||||
total_requests BIGINT NOT NULL DEFAULT 0,
|
|
||||||
successful_requests BIGINT NOT NULL DEFAULT 0,
|
|
||||||
failed_requests BIGINT NOT NULL DEFAULT 0,
|
|
||||||
cache_hits BIGINT NOT NULL DEFAULT 0,
|
|
||||||
cache_misses BIGINT NOT NULL DEFAULT 0,
|
|
||||||
total_input_tokens BIGINT NOT NULL DEFAULT 0,
|
|
||||||
total_output_tokens BIGINT NOT NULL DEFAULT 0,
|
|
||||||
total_tokens BIGINT NOT NULL DEFAULT 0,
|
|
||||||
total_cost DOUBLE PRECISION NOT NULL DEFAULT 0,
|
|
||||||
avg_latency_ms DOUBLE PRECISION NOT NULL DEFAULT 0,
|
|
||||||
p50_latency_ms DOUBLE PRECISION NOT NULL DEFAULT 0,
|
|
||||||
p95_latency_ms DOUBLE PRECISION NOT NULL DEFAULT 0,
|
|
||||||
p99_latency_ms DOUBLE PRECISION NOT NULL DEFAULT 0,
|
|
||||||
max_latency_ms BIGINT NOT NULL DEFAULT 0,
|
|
||||||
min_latency_ms BIGINT NOT NULL DEFAULT 0,
|
|
||||||
requests_by_model JSONB NOT NULL DEFAULT '{}',
|
|
||||||
tokens_by_model JSONB NOT NULL DEFAULT '{}',
|
|
||||||
cost_by_model JSONB NOT NULL DEFAULT '{}',
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
|
||||||
|
|
||||||
UNIQUE(bot_id, hour)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Budget tracking
|
|
||||||
CREATE TABLE IF NOT EXISTS llm_budget (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
bot_id UUID NOT NULL UNIQUE,
|
|
||||||
daily_limit DOUBLE PRECISION NOT NULL DEFAULT 100,
|
|
||||||
monthly_limit DOUBLE PRECISION NOT NULL DEFAULT 2000,
|
|
||||||
alert_threshold DOUBLE PRECISION NOT NULL DEFAULT 0.8,
|
|
||||||
daily_spend DOUBLE PRECISION NOT NULL DEFAULT 0,
|
|
||||||
monthly_spend DOUBLE PRECISION NOT NULL DEFAULT 0,
|
|
||||||
daily_reset_date DATE NOT NULL DEFAULT CURRENT_DATE,
|
|
||||||
monthly_reset_date DATE NOT NULL DEFAULT DATE_TRUNC('month', CURRENT_DATE)::DATE,
|
|
||||||
daily_alert_sent BOOLEAN NOT NULL DEFAULT false,
|
|
||||||
monthly_alert_sent BOOLEAN NOT NULL DEFAULT false,
|
|
||||||
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Trace events
|
|
||||||
CREATE TABLE IF NOT EXISTS llm_traces (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
parent_id UUID,
|
|
||||||
trace_id UUID NOT NULL,
|
|
||||||
name VARCHAR(200) NOT NULL,
|
|
||||||
component VARCHAR(100) NOT NULL,
|
|
||||||
event_type VARCHAR(50) NOT NULL,
|
|
||||||
duration_ms BIGINT,
|
|
||||||
start_time TIMESTAMP WITH TIME ZONE NOT NULL,
|
|
||||||
end_time TIMESTAMP WITH TIME ZONE,
|
|
||||||
attributes JSONB NOT NULL DEFAULT '{}',
|
|
||||||
status VARCHAR(50) NOT NULL DEFAULT 'in_progress',
|
|
||||||
error TEXT,
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Indexes for observability tables
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_llm_metrics_bot_id ON llm_metrics(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_llm_metrics_session_id ON llm_metrics(session_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_llm_metrics_timestamp ON llm_metrics(timestamp DESC);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_llm_metrics_model ON llm_metrics(model);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_llm_metrics_hourly_bot_id ON llm_metrics_hourly(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_llm_metrics_hourly_hour ON llm_metrics_hourly(hour DESC);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_llm_traces_trace_id ON llm_traces(trace_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_llm_traces_start_time ON llm_traces(start_time DESC);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_llm_traces_component ON llm_traces(component);
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- WORKFLOW TABLES
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
-- Workflow definitions
|
|
||||||
CREATE TABLE IF NOT EXISTS workflow_definitions (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
name VARCHAR(200) NOT NULL,
|
|
||||||
description TEXT,
|
|
||||||
steps JSONB NOT NULL DEFAULT '[]',
|
|
||||||
triggers JSONB NOT NULL DEFAULT '[]',
|
|
||||||
error_handling JSONB NOT NULL DEFAULT '{}',
|
|
||||||
enabled BOOLEAN NOT NULL DEFAULT true,
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
|
||||||
|
|
||||||
UNIQUE(bot_id, name)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Workflow executions
|
|
||||||
CREATE TABLE IF NOT EXISTS workflow_executions (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
workflow_id UUID NOT NULL REFERENCES workflow_definitions(id) ON DELETE CASCADE,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
session_id UUID,
|
|
||||||
initiated_by UUID,
|
|
||||||
status VARCHAR(50) NOT NULL DEFAULT 'pending',
|
|
||||||
current_step INTEGER NOT NULL DEFAULT 0,
|
|
||||||
input_data JSONB NOT NULL DEFAULT '{}',
|
|
||||||
output_data JSONB NOT NULL DEFAULT '{}',
|
|
||||||
step_results JSONB NOT NULL DEFAULT '[]',
|
|
||||||
error TEXT,
|
|
||||||
started_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
|
||||||
completed_at TIMESTAMP WITH TIME ZONE,
|
|
||||||
metadata JSONB NOT NULL DEFAULT '{}'
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Workflow step executions
|
|
||||||
CREATE TABLE IF NOT EXISTS workflow_step_executions (
|
|
||||||
id UUID PRIMARY KEY,
|
|
||||||
execution_id UUID NOT NULL REFERENCES workflow_executions(id) ON DELETE CASCADE,
|
|
||||||
step_name VARCHAR(200) NOT NULL,
|
|
||||||
step_index INTEGER NOT NULL,
|
|
||||||
status VARCHAR(50) NOT NULL DEFAULT 'pending',
|
|
||||||
input_data JSONB NOT NULL DEFAULT '{}',
|
|
||||||
output_data JSONB NOT NULL DEFAULT '{}',
|
|
||||||
error TEXT,
|
|
||||||
started_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
|
||||||
completed_at TIMESTAMP WITH TIME ZONE,
|
|
||||||
duration_ms BIGINT
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Indexes for workflow tables
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_workflow_definitions_bot_id ON workflow_definitions(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_workflow_executions_workflow_id ON workflow_executions(workflow_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_workflow_executions_bot_id ON workflow_executions(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_workflow_executions_status ON workflow_executions(status);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_workflow_step_executions_execution_id ON workflow_step_executions(execution_id);
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- FUNCTIONS AND TRIGGERS
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
-- Function to update updated_at timestamp
|
|
||||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
|
||||||
RETURNS TRIGGER AS $$
|
|
||||||
BEGIN
|
|
||||||
NEW.updated_at = NOW();
|
|
||||||
RETURN NEW;
|
|
||||||
END;
|
|
||||||
$$ language 'plpgsql';
|
|
||||||
|
|
||||||
-- Triggers for updated_at
|
|
||||||
DROP TRIGGER IF EXISTS update_kg_entities_updated_at ON kg_entities;
|
|
||||||
CREATE TRIGGER update_kg_entities_updated_at
|
|
||||||
BEFORE UPDATE ON kg_entities
|
|
||||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
|
||||||
|
|
||||||
DROP TRIGGER IF EXISTS update_workflow_definitions_updated_at ON workflow_definitions;
|
|
||||||
CREATE TRIGGER update_workflow_definitions_updated_at
|
|
||||||
BEFORE UPDATE ON workflow_definitions
|
|
||||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
|
||||||
|
|
||||||
DROP TRIGGER IF EXISTS update_llm_budget_updated_at ON llm_budget;
|
|
||||||
CREATE TRIGGER update_llm_budget_updated_at
|
|
||||||
BEFORE UPDATE ON llm_budget
|
|
||||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
|
||||||
|
|
||||||
-- Function to aggregate hourly metrics
|
|
||||||
CREATE OR REPLACE FUNCTION aggregate_llm_metrics_hourly()
|
|
||||||
RETURNS void AS $$
|
|
||||||
DECLARE
|
|
||||||
last_hour TIMESTAMP WITH TIME ZONE;
|
|
||||||
BEGIN
|
|
||||||
last_hour := DATE_TRUNC('hour', NOW() - INTERVAL '1 hour');
|
|
||||||
|
|
||||||
INSERT INTO llm_metrics_hourly (
|
|
||||||
id, bot_id, hour, total_requests, successful_requests, failed_requests,
|
|
||||||
cache_hits, cache_misses, total_input_tokens, total_output_tokens,
|
|
||||||
total_tokens, total_cost, avg_latency_ms, p50_latency_ms, p95_latency_ms,
|
|
||||||
p99_latency_ms, max_latency_ms, min_latency_ms, requests_by_model,
|
|
||||||
tokens_by_model, cost_by_model
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
gen_random_uuid(),
|
|
||||||
bot_id,
|
|
||||||
last_hour,
|
|
||||||
COUNT(*),
|
|
||||||
COUNT(*) FILTER (WHERE success = true),
|
|
||||||
COUNT(*) FILTER (WHERE success = false),
|
|
||||||
COUNT(*) FILTER (WHERE cached = true),
|
|
||||||
COUNT(*) FILTER (WHERE cached = false),
|
|
||||||
SUM(input_tokens),
|
|
||||||
SUM(output_tokens),
|
|
||||||
SUM(total_tokens),
|
|
||||||
SUM(estimated_cost),
|
|
||||||
AVG(latency_ms),
|
|
||||||
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY latency_ms),
|
|
||||||
PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY latency_ms),
|
|
||||||
PERCENTILE_CONT(0.99) WITHIN GROUP (ORDER BY latency_ms),
|
|
||||||
MAX(latency_ms),
|
|
||||||
MIN(latency_ms),
|
|
||||||
jsonb_object_agg(model, model_count) FILTER (WHERE model IS NOT NULL),
|
|
||||||
jsonb_object_agg(model, model_tokens) FILTER (WHERE model IS NOT NULL),
|
|
||||||
jsonb_object_agg(model, model_cost) FILTER (WHERE model IS NOT NULL)
|
|
||||||
FROM (
|
|
||||||
SELECT
|
|
||||||
bot_id, model, success, cached, input_tokens, output_tokens,
|
|
||||||
total_tokens, estimated_cost, latency_ms,
|
|
||||||
COUNT(*) OVER (PARTITION BY bot_id, model) as model_count,
|
|
||||||
SUM(total_tokens) OVER (PARTITION BY bot_id, model) as model_tokens,
|
|
||||||
SUM(estimated_cost) OVER (PARTITION BY bot_id, model) as model_cost
|
|
||||||
FROM llm_metrics
|
|
||||||
WHERE timestamp >= last_hour
|
|
||||||
AND timestamp < last_hour + INTERVAL '1 hour'
|
|
||||||
) sub
|
|
||||||
GROUP BY bot_id
|
|
||||||
ON CONFLICT (bot_id, hour) DO UPDATE SET
|
|
||||||
total_requests = EXCLUDED.total_requests,
|
|
||||||
successful_requests = EXCLUDED.successful_requests,
|
|
||||||
failed_requests = EXCLUDED.failed_requests,
|
|
||||||
cache_hits = EXCLUDED.cache_hits,
|
|
||||||
cache_misses = EXCLUDED.cache_misses,
|
|
||||||
total_input_tokens = EXCLUDED.total_input_tokens,
|
|
||||||
total_output_tokens = EXCLUDED.total_output_tokens,
|
|
||||||
total_tokens = EXCLUDED.total_tokens,
|
|
||||||
total_cost = EXCLUDED.total_cost,
|
|
||||||
avg_latency_ms = EXCLUDED.avg_latency_ms,
|
|
||||||
p50_latency_ms = EXCLUDED.p50_latency_ms,
|
|
||||||
p95_latency_ms = EXCLUDED.p95_latency_ms,
|
|
||||||
p99_latency_ms = EXCLUDED.p99_latency_ms,
|
|
||||||
max_latency_ms = EXCLUDED.max_latency_ms,
|
|
||||||
min_latency_ms = EXCLUDED.min_latency_ms,
|
|
||||||
requests_by_model = EXCLUDED.requests_by_model,
|
|
||||||
tokens_by_model = EXCLUDED.tokens_by_model,
|
|
||||||
cost_by_model = EXCLUDED.cost_by_model;
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
|
|
||||||
-- Function to reset daily budget
|
|
||||||
CREATE OR REPLACE FUNCTION reset_daily_budgets()
|
|
||||||
RETURNS void AS $$
|
|
||||||
BEGIN
|
|
||||||
UPDATE llm_budget
|
|
||||||
SET daily_spend = 0,
|
|
||||||
daily_reset_date = CURRENT_DATE,
|
|
||||||
daily_alert_sent = false
|
|
||||||
WHERE daily_reset_date < CURRENT_DATE;
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
|
|
||||||
-- Function to reset monthly budget
|
|
||||||
CREATE OR REPLACE FUNCTION reset_monthly_budgets()
|
|
||||||
RETURNS void AS $$
|
|
||||||
BEGIN
|
|
||||||
UPDATE llm_budget
|
|
||||||
SET monthly_spend = 0,
|
|
||||||
monthly_reset_date = DATE_TRUNC('month', CURRENT_DATE)::DATE,
|
|
||||||
monthly_alert_sent = false
|
|
||||||
WHERE monthly_reset_date < DATE_TRUNC('month', CURRENT_DATE)::DATE;
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- VIEWS
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
-- View for recent episode summaries with user info
|
|
||||||
CREATE OR REPLACE VIEW v_recent_episodes AS
|
|
||||||
SELECT
|
|
||||||
e.id,
|
|
||||||
e.user_id,
|
|
||||||
e.bot_id,
|
|
||||||
e.session_id,
|
|
||||||
e.summary,
|
|
||||||
e.key_topics,
|
|
||||||
e.sentiment,
|
|
||||||
e.resolution,
|
|
||||||
e.message_count,
|
|
||||||
e.created_at,
|
|
||||||
e.conversation_start,
|
|
||||||
e.conversation_end
|
|
||||||
FROM conversation_episodes e
|
|
||||||
ORDER BY e.created_at DESC;
|
|
||||||
|
|
||||||
-- View for knowledge graph statistics
|
|
||||||
CREATE OR REPLACE VIEW v_kg_stats AS
|
|
||||||
SELECT
|
|
||||||
bot_id,
|
|
||||||
COUNT(DISTINCT id) as total_entities,
|
|
||||||
COUNT(DISTINCT entity_type) as entity_types,
|
|
||||||
(SELECT COUNT(*) FROM kg_relationships r WHERE r.bot_id = e.bot_id) as total_relationships
|
|
||||||
FROM kg_entities e
|
|
||||||
GROUP BY bot_id;
|
|
||||||
|
|
||||||
-- View for approval status summary
|
|
||||||
CREATE OR REPLACE VIEW v_approval_summary AS
|
|
||||||
SELECT
|
|
||||||
bot_id,
|
|
||||||
status,
|
|
||||||
COUNT(*) as count,
|
|
||||||
AVG(EXTRACT(EPOCH FROM (COALESCE(decided_at, NOW()) - created_at))) as avg_resolution_seconds
|
|
||||||
FROM approval_requests
|
|
||||||
GROUP BY bot_id, status;
|
|
||||||
|
|
||||||
-- View for LLM usage summary (last 24 hours)
|
|
||||||
CREATE OR REPLACE VIEW v_llm_usage_24h AS
|
|
||||||
SELECT
|
|
||||||
bot_id,
|
|
||||||
model,
|
|
||||||
COUNT(*) as request_count,
|
|
||||||
SUM(total_tokens) as total_tokens,
|
|
||||||
SUM(estimated_cost) as total_cost,
|
|
||||||
AVG(latency_ms) as avg_latency_ms,
|
|
||||||
SUM(CASE WHEN cached THEN 1 ELSE 0 END)::FLOAT / COUNT(*) as cache_hit_rate,
|
|
||||||
SUM(CASE WHEN success THEN 0 ELSE 1 END)::FLOAT / COUNT(*) as error_rate
|
|
||||||
FROM llm_metrics
|
|
||||||
WHERE timestamp > NOW() - INTERVAL '24 hours'
|
|
||||||
GROUP BY bot_id, model;
|
|
||||||
|
|
||||||
-- ============================================
|
|
||||||
-- CLEANUP POLICIES (retention)
|
|
||||||
-- ============================================
|
|
||||||
|
|
||||||
-- Create a cleanup function for old data
|
|
||||||
CREATE OR REPLACE FUNCTION cleanup_old_observability_data(retention_days INTEGER DEFAULT 30)
|
|
||||||
RETURNS void AS $$
|
|
||||||
BEGIN
|
|
||||||
-- Delete old LLM metrics (keep hourly aggregates longer)
|
|
||||||
DELETE FROM llm_metrics WHERE timestamp < NOW() - (retention_days || ' days')::INTERVAL;
|
|
||||||
|
|
||||||
-- Delete old traces
|
|
||||||
DELETE FROM llm_traces WHERE start_time < NOW() - (retention_days || ' days')::INTERVAL;
|
|
||||||
|
|
||||||
-- Delete old approval audit logs
|
|
||||||
DELETE FROM approval_audit_log WHERE timestamp < NOW() - (retention_days * 3 || ' days')::INTERVAL;
|
|
||||||
|
|
||||||
-- Delete expired approval tokens
|
|
||||||
DELETE FROM approval_tokens WHERE expires_at < NOW() - INTERVAL '1 day';
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
-- Rollback Suite Applications Migration
|
|
||||||
-- Removes tables for: Paper (Documents), Designer (Dialogs), and analytics support
|
|
||||||
|
|
||||||
-- Drop indexes first
|
|
||||||
DROP INDEX IF EXISTS idx_research_history_created;
|
|
||||||
DROP INDEX IF EXISTS idx_research_history_user;
|
|
||||||
DROP INDEX IF EXISTS idx_analytics_daily_bot;
|
|
||||||
DROP INDEX IF EXISTS idx_analytics_daily_date;
|
|
||||||
DROP INDEX IF EXISTS idx_analytics_events_created;
|
|
||||||
DROP INDEX IF EXISTS idx_analytics_events_session;
|
|
||||||
DROP INDEX IF EXISTS idx_analytics_events_user;
|
|
||||||
DROP INDEX IF EXISTS idx_analytics_events_type;
|
|
||||||
DROP INDEX IF EXISTS idx_source_templates_category;
|
|
||||||
DROP INDEX IF EXISTS idx_designer_dialogs_updated;
|
|
||||||
DROP INDEX IF EXISTS idx_designer_dialogs_active;
|
|
||||||
DROP INDEX IF EXISTS idx_designer_dialogs_bot;
|
|
||||||
DROP INDEX IF EXISTS idx_paper_documents_updated;
|
|
||||||
DROP INDEX IF EXISTS idx_paper_documents_owner;
|
|
||||||
|
|
||||||
-- Drop tables
|
|
||||||
DROP TABLE IF EXISTS research_search_history;
|
|
||||||
DROP TABLE IF EXISTS analytics_daily_aggregates;
|
|
||||||
DROP TABLE IF EXISTS analytics_events;
|
|
||||||
DROP TABLE IF EXISTS source_templates;
|
|
||||||
DROP TABLE IF EXISTS designer_dialogs;
|
|
||||||
DROP TABLE IF EXISTS paper_documents;
|
|
||||||
|
|
@ -1,87 +0,0 @@
|
||||||
-- Suite Applications Migration
|
|
||||||
-- Adds tables for: Paper (Documents), Designer (Dialogs), and additional analytics support
|
|
||||||
|
|
||||||
-- Paper Documents table
|
|
||||||
CREATE TABLE IF NOT EXISTS paper_documents (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
title TEXT NOT NULL DEFAULT 'Untitled Document',
|
|
||||||
content TEXT NOT NULL DEFAULT '',
|
|
||||||
owner_id TEXT NOT NULL,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_paper_documents_owner ON paper_documents(owner_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_paper_documents_updated ON paper_documents(updated_at DESC);
|
|
||||||
|
|
||||||
-- Designer Dialogs table
|
|
||||||
CREATE TABLE IF NOT EXISTS designer_dialogs (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
description TEXT NOT NULL DEFAULT '',
|
|
||||||
bot_id TEXT NOT NULL,
|
|
||||||
content TEXT NOT NULL DEFAULT '',
|
|
||||||
is_active BOOLEAN NOT NULL DEFAULT false,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_designer_dialogs_bot ON designer_dialogs(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_designer_dialogs_active ON designer_dialogs(is_active);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_designer_dialogs_updated ON designer_dialogs(updated_at DESC);
|
|
||||||
|
|
||||||
-- Sources Templates table (for template metadata caching)
|
|
||||||
CREATE TABLE IF NOT EXISTS source_templates (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
description TEXT NOT NULL DEFAULT '',
|
|
||||||
category TEXT NOT NULL DEFAULT 'General',
|
|
||||||
preview_url TEXT,
|
|
||||||
file_path TEXT NOT NULL,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_source_templates_category ON source_templates(category);
|
|
||||||
|
|
||||||
-- Analytics Events table (for additional event tracking)
|
|
||||||
CREATE TABLE IF NOT EXISTS analytics_events (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
event_type TEXT NOT NULL,
|
|
||||||
user_id UUID,
|
|
||||||
session_id UUID,
|
|
||||||
bot_id UUID,
|
|
||||||
metadata JSONB DEFAULT '{}',
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_analytics_events_type ON analytics_events(event_type);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_analytics_events_user ON analytics_events(user_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_analytics_events_session ON analytics_events(session_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_analytics_events_created ON analytics_events(created_at DESC);
|
|
||||||
|
|
||||||
-- Analytics Daily Aggregates (for faster dashboard queries)
|
|
||||||
CREATE TABLE IF NOT EXISTS analytics_daily_aggregates (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
date DATE NOT NULL,
|
|
||||||
bot_id UUID,
|
|
||||||
metric_name TEXT NOT NULL,
|
|
||||||
metric_value BIGINT NOT NULL DEFAULT 0,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
UNIQUE(date, bot_id, metric_name)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_analytics_daily_date ON analytics_daily_aggregates(date DESC);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_analytics_daily_bot ON analytics_daily_aggregates(bot_id);
|
|
||||||
|
|
||||||
-- Research Search History (for recent searches feature)
|
|
||||||
CREATE TABLE IF NOT EXISTS research_search_history (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
user_id UUID NOT NULL,
|
|
||||||
query TEXT NOT NULL,
|
|
||||||
collection_id TEXT,
|
|
||||||
results_count INTEGER NOT NULL DEFAULT 0,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_research_history_user ON research_search_history(user_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_research_history_created ON research_search_history(created_at DESC);
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
-- Down migration: Remove email tracking table and related objects
|
|
||||||
|
|
||||||
-- Drop trigger first
|
|
||||||
DROP TRIGGER IF EXISTS trigger_update_sent_email_tracking_updated_at ON sent_email_tracking;
|
|
||||||
|
|
||||||
-- Drop function
|
|
||||||
DROP FUNCTION IF EXISTS update_sent_email_tracking_updated_at();
|
|
||||||
|
|
||||||
-- Drop indexes
|
|
||||||
DROP INDEX IF EXISTS idx_sent_email_tracking_tracking_id;
|
|
||||||
DROP INDEX IF EXISTS idx_sent_email_tracking_bot_id;
|
|
||||||
DROP INDEX IF EXISTS idx_sent_email_tracking_account_id;
|
|
||||||
DROP INDEX IF EXISTS idx_sent_email_tracking_to_email;
|
|
||||||
DROP INDEX IF EXISTS idx_sent_email_tracking_sent_at;
|
|
||||||
DROP INDEX IF EXISTS idx_sent_email_tracking_is_read;
|
|
||||||
DROP INDEX IF EXISTS idx_sent_email_tracking_read_status;
|
|
||||||
|
|
||||||
-- Drop table
|
|
||||||
DROP TABLE IF EXISTS sent_email_tracking;
|
|
||||||
|
|
@ -1,56 +0,0 @@
|
||||||
-- Email Read Tracking Table
|
|
||||||
-- Stores sent email tracking data for read receipt functionality
|
|
||||||
-- Enabled via config.csv: email-read-pixel,true
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS sent_email_tracking (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
tracking_id UUID NOT NULL UNIQUE,
|
|
||||||
bot_id UUID NOT NULL,
|
|
||||||
account_id UUID NOT NULL,
|
|
||||||
from_email VARCHAR(255) NOT NULL,
|
|
||||||
to_email VARCHAR(255) NOT NULL,
|
|
||||||
cc TEXT,
|
|
||||||
bcc TEXT,
|
|
||||||
subject TEXT NOT NULL,
|
|
||||||
sent_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
is_read BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
read_at TIMESTAMPTZ,
|
|
||||||
read_count INTEGER NOT NULL DEFAULT 0,
|
|
||||||
first_read_ip VARCHAR(45),
|
|
||||||
last_read_ip VARCHAR(45),
|
|
||||||
user_agent TEXT,
|
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Indexes for efficient queries
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_sent_email_tracking_tracking_id ON sent_email_tracking(tracking_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_sent_email_tracking_bot_id ON sent_email_tracking(bot_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_sent_email_tracking_account_id ON sent_email_tracking(account_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_sent_email_tracking_to_email ON sent_email_tracking(to_email);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_sent_email_tracking_sent_at ON sent_email_tracking(sent_at DESC);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_sent_email_tracking_is_read ON sent_email_tracking(is_read);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_sent_email_tracking_read_status ON sent_email_tracking(bot_id, is_read, sent_at DESC);
|
|
||||||
|
|
||||||
-- Trigger to auto-update updated_at
|
|
||||||
CREATE OR REPLACE FUNCTION update_sent_email_tracking_updated_at()
|
|
||||||
RETURNS TRIGGER AS $$
|
|
||||||
BEGIN
|
|
||||||
NEW.updated_at = NOW();
|
|
||||||
RETURN NEW;
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE plpgsql;
|
|
||||||
|
|
||||||
DROP TRIGGER IF EXISTS trigger_update_sent_email_tracking_updated_at ON sent_email_tracking;
|
|
||||||
CREATE TRIGGER trigger_update_sent_email_tracking_updated_at
|
|
||||||
BEFORE UPDATE ON sent_email_tracking
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE FUNCTION update_sent_email_tracking_updated_at();
|
|
||||||
|
|
||||||
-- Add comment for documentation
|
|
||||||
COMMENT ON TABLE sent_email_tracking IS 'Tracks sent emails for read receipt functionality via tracking pixel';
|
|
||||||
COMMENT ON COLUMN sent_email_tracking.tracking_id IS 'Unique ID embedded in tracking pixel URL';
|
|
||||||
COMMENT ON COLUMN sent_email_tracking.is_read IS 'Whether the email has been opened (pixel loaded)';
|
|
||||||
COMMENT ON COLUMN sent_email_tracking.read_count IS 'Number of times the email was opened';
|
|
||||||
COMMENT ON COLUMN sent_email_tracking.first_read_ip IS 'IP address of first email open';
|
|
||||||
COMMENT ON COLUMN sent_email_tracking.last_read_ip IS 'IP address of most recent email open';
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -257,55 +257,35 @@ impl EmailConfig {
|
||||||
impl AppConfig {
|
impl AppConfig {
|
||||||
pub fn from_database(pool: &DbPool) -> Result<Self, diesel::result::Error> {
|
pub fn from_database(pool: &DbPool) -> Result<Self, diesel::result::Error> {
|
||||||
use crate::shared::models::schema::bot_configuration::dsl::*;
|
use crate::shared::models::schema::bot_configuration::dsl::*;
|
||||||
|
use diesel::prelude::*;
|
||||||
|
|
||||||
let mut conn = pool.get().map_err(|e| {
|
let mut conn = pool.get().map_err(|e| {
|
||||||
diesel::result::Error::DatabaseError(
|
diesel::result::Error::DatabaseError(
|
||||||
diesel::result::DatabaseErrorKind::UnableToSendCommand,
|
diesel::result::DatabaseErrorKind::UnableToSendCommand,
|
||||||
Box::new(e.to_string()),
|
Box::new(e.to_string()),
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
let config_map: HashMap<String, (Uuid, Uuid, String, String, String, bool)> =
|
|
||||||
bot_configuration
|
// Load all config values into a HashMap for efficient lookup
|
||||||
.select((
|
let config_map: HashMap<String, String> = bot_configuration
|
||||||
id,
|
.select((config_key, config_value))
|
||||||
bot_id,
|
.load::<(String, String)>(&mut conn)
|
||||||
config_key,
|
.unwrap_or_default()
|
||||||
config_value,
|
.into_iter()
|
||||||
config_type,
|
.collect();
|
||||||
is_encrypted,
|
|
||||||
))
|
// Helper functions that use the pre-loaded config_map
|
||||||
.load::<(Uuid, Uuid, String, String, String, bool)>(&mut conn)
|
let get_str = |key: &str, default: &str| -> String {
|
||||||
.unwrap_or_default()
|
|
||||||
.into_iter()
|
|
||||||
.map(|(_, _, key, value, _, _)| {
|
|
||||||
(
|
|
||||||
key.clone(),
|
|
||||||
(Uuid::nil(), Uuid::nil(), key, value, String::new(), false),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
let mut get_str = |key: &str, default: &str| -> String {
|
|
||||||
bot_configuration
|
|
||||||
.filter(config_key.eq(key))
|
|
||||||
.select(config_value)
|
|
||||||
.first::<String>(&mut conn)
|
|
||||||
.unwrap_or_else(|_| default.to_string())
|
|
||||||
};
|
|
||||||
let _get_u32 = |key: &str, default: u32| -> u32 {
|
|
||||||
config_map
|
config_map
|
||||||
.get(key)
|
.get(key)
|
||||||
.and_then(|v| v.3.parse().ok())
|
.cloned()
|
||||||
.unwrap_or(default)
|
.unwrap_or_else(|| default.to_string())
|
||||||
};
|
};
|
||||||
|
|
||||||
let get_u16 = |key: &str, default: u16| -> u16 {
|
let get_u16 = |key: &str, default: u16| -> u16 {
|
||||||
config_map
|
config_map
|
||||||
.get(key)
|
.get(key)
|
||||||
.and_then(|v| v.3.parse().ok())
|
.and_then(|v| v.parse().ok())
|
||||||
.unwrap_or(default)
|
|
||||||
};
|
|
||||||
let _get_bool = |key: &str, default: bool| -> bool {
|
|
||||||
config_map
|
|
||||||
.get(key)
|
|
||||||
.map(|v| v.3.to_lowercase() == "true")
|
|
||||||
.unwrap_or(default)
|
.unwrap_or(default)
|
||||||
};
|
};
|
||||||
let drive = DriveConfig {
|
let drive = DriveConfig {
|
||||||
|
|
@ -326,9 +306,9 @@ impl AppConfig {
|
||||||
drive,
|
drive,
|
||||||
email,
|
email,
|
||||||
server: ServerConfig {
|
server: ServerConfig {
|
||||||
host: get_str("SERVER_HOST", "127.0.0.1"),
|
host: get_str("server_host", "0.0.0.0"),
|
||||||
port: get_u16("SERVER_PORT", 8080),
|
port: get_u16("server_port", 8080),
|
||||||
base_url: get_str("SERVER_BASE_URL", "http://localhost:8080"),
|
base_url: get_str("server_base_url", "http://localhost:8080"),
|
||||||
},
|
},
|
||||||
site_path: {
|
site_path: {
|
||||||
ConfigManager::new(pool.clone())
|
ConfigManager::new(pool.clone())
|
||||||
|
|
|
||||||
|
|
@ -434,8 +434,10 @@ impl PackageManager {
|
||||||
} else {
|
} else {
|
||||||
PathBuf::from("/opt/gbo/data")
|
PathBuf::from("/opt/gbo/data")
|
||||||
};
|
};
|
||||||
|
// CONF_PATH should be the base conf directory, not component-specific
|
||||||
|
// Commands that need component subdirs include them explicitly (e.g., {{CONF_PATH}}/directory/zitadel.yaml)
|
||||||
let conf_path = if target == "local" {
|
let conf_path = if target == "local" {
|
||||||
self.base_path.join("conf").join(component)
|
self.base_path.join("conf")
|
||||||
} else {
|
} else {
|
||||||
PathBuf::from("/opt/gbo/conf")
|
PathBuf::from("/opt/gbo/conf")
|
||||||
};
|
};
|
||||||
|
|
@ -444,12 +446,28 @@ impl PackageManager {
|
||||||
} else {
|
} else {
|
||||||
PathBuf::from("/opt/gbo/logs")
|
PathBuf::from("/opt/gbo/logs")
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Get DB password from Vault for commands that need it (e.g., PostgreSQL initdb)
|
||||||
|
let db_password = match get_database_url_sync() {
|
||||||
|
Ok(url) => {
|
||||||
|
let (_, password, _, _, _) = parse_database_url(&url);
|
||||||
|
password
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
// Vault not available yet - this is OK during early bootstrap
|
||||||
|
// Commands that don't need DB_PASSWORD will still work
|
||||||
|
trace!("Vault not available for DB_PASSWORD, using empty string");
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
for cmd in commands {
|
for cmd in commands {
|
||||||
let rendered_cmd = cmd
|
let rendered_cmd = cmd
|
||||||
.replace("{{BIN_PATH}}", &bin_path.to_string_lossy())
|
.replace("{{BIN_PATH}}", &bin_path.to_string_lossy())
|
||||||
.replace("{{DATA_PATH}}", &data_path.to_string_lossy())
|
.replace("{{DATA_PATH}}", &data_path.to_string_lossy())
|
||||||
.replace("{{CONF_PATH}}", &conf_path.to_string_lossy())
|
.replace("{{CONF_PATH}}", &conf_path.to_string_lossy())
|
||||||
.replace("{{LOGS_PATH}}", &logs_path.to_string_lossy());
|
.replace("{{LOGS_PATH}}", &logs_path.to_string_lossy())
|
||||||
|
.replace("{{DB_PASSWORD}}", &db_password);
|
||||||
if target == "local" {
|
if target == "local" {
|
||||||
trace!("Executing command: {}", rendered_cmd);
|
trace!("Executing command: {}", rendered_cmd);
|
||||||
let child = Command::new("bash")
|
let child = Command::new("bash")
|
||||||
|
|
|
||||||
|
|
@ -107,7 +107,7 @@ impl PackageManager {
|
||||||
("MINIO_ROOT_PASSWORD".to_string(), "$DRIVE_SECRET".to_string()),
|
("MINIO_ROOT_PASSWORD".to_string(), "$DRIVE_SECRET".to_string()),
|
||||||
]),
|
]),
|
||||||
data_download_list: Vec::new(),
|
data_download_list: Vec::new(),
|
||||||
exec_cmd: "nohup {{BIN_PATH}}/minio server {{DATA_PATH}} --address :9000 --console-address :9001 --certs-dir {{CONF_PATH}}/system/certificates/minio > {{LOGS_PATH}}/minio.log 2>&1 &".to_string(),
|
exec_cmd: "nohup {{BIN_PATH}}/minio server {{DATA_PATH}} --address :9000 --console-address :9001 --certs-dir {{CONF_PATH}}/system/certificates/drive > {{LOGS_PATH}}/minio.log 2>&1 &".to_string(),
|
||||||
check_cmd: "ps -ef | grep minio | grep -v grep | grep {{BIN_PATH}}".to_string(),
|
check_cmd: "ps -ef | grep minio | grep -v grep | grep {{BIN_PATH}}".to_string(),
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
@ -124,20 +124,20 @@ impl PackageManager {
|
||||||
macos_packages: vec![],
|
macos_packages: vec![],
|
||||||
windows_packages: vec![],
|
windows_packages: vec![],
|
||||||
download_url: Some(
|
download_url: Some(
|
||||||
"https://github.com/theseus-rs/postgresql-binaries/releases/download/18.0.0/postgresql-18.0.0-x86_64-unknown-linux-gnu.tar.gz".to_string(),
|
"https://github.com/theseus-rs/postgresql-binaries/releases/download/17.2.0/postgresql-17.2.0-x86_64-unknown-linux-gnu.tar.gz".to_string(),
|
||||||
),
|
),
|
||||||
binary_name: Some("postgres".to_string()),
|
binary_name: Some("postgres".to_string()),
|
||||||
pre_install_cmds_linux: vec![],
|
pre_install_cmds_linux: vec![],
|
||||||
post_install_cmds_linux: vec![
|
post_install_cmds_linux: vec![
|
||||||
"chmod +x ./bin/*".to_string(),
|
"chmod +x ./bin/*".to_string(),
|
||||||
format!("if [ ! -d \"{{{{DATA_PATH}}}}/pgdata\" ]; then PG_PASSWORD={{DB_PASSWORD}} ./bin/initdb -D {{{{DATA_PATH}}}}/pgdata -U gbuser --pwfile=<(echo $PG_PASSWORD); fi"),
|
"if [ ! -d \"{{DATA_PATH}}/pgdata\" ]; then PG_PASSWORD='{{DB_PASSWORD}}' ./bin/initdb -D {{DATA_PATH}}/pgdata -U gbuser --pwfile=<(echo \"$PG_PASSWORD\"); fi".to_string(),
|
||||||
"echo \"data_directory = '{{DATA_PATH}}/pgdata'\" > {{CONF_PATH}}/postgresql.conf".to_string(),
|
"echo \"data_directory = '{{DATA_PATH}}/pgdata'\" > {{CONF_PATH}}/postgresql.conf".to_string(),
|
||||||
"echo \"ident_file = '{{CONF_PATH}}/pg_ident.conf'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
"echo \"ident_file = '{{CONF_PATH}}/pg_ident.conf'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
||||||
"echo \"port = 5432\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
"echo \"port = 5432\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
||||||
"echo \"listen_addresses = '*'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
"echo \"listen_addresses = '*'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
||||||
"echo \"ssl = on\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
"echo \"ssl = on\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
||||||
"echo \"ssl_cert_file = '{{CONF_PATH}}/system/certificates/postgres/server.crt'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
"echo \"ssl_cert_file = '{{CONF_PATH}}/system/certificates/tables/server.crt'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
||||||
"echo \"ssl_key_file = '{{CONF_PATH}}/system/certificates/postgres/server.key'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
"echo \"ssl_key_file = '{{CONF_PATH}}/system/certificates/tables/server.key'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
||||||
"echo \"ssl_ca_file = '{{CONF_PATH}}/system/certificates/ca/ca.crt'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
"echo \"ssl_ca_file = '{{CONF_PATH}}/system/certificates/ca/ca.crt'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
||||||
"echo \"log_directory = '{{LOGS_PATH}}'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
"echo \"log_directory = '{{LOGS_PATH}}'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
||||||
"echo \"logging_collector = on\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
"echo \"logging_collector = on\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
|
||||||
|
|
@ -147,7 +147,7 @@ impl PackageManager {
|
||||||
"sleep 5".to_string(),
|
"sleep 5".to_string(),
|
||||||
"for i in $(seq 1 30); do ./bin/pg_isready -h localhost -p 5432 -U gbuser >/dev/null 2>&1 && echo 'PostgreSQL is ready' && break || echo \"Waiting for PostgreSQL... attempt $i/30\" >&2; sleep 2; done".to_string(),
|
"for i in $(seq 1 30); do ./bin/pg_isready -h localhost -p 5432 -U gbuser >/dev/null 2>&1 && echo 'PostgreSQL is ready' && break || echo \"Waiting for PostgreSQL... attempt $i/30\" >&2; sleep 2; done".to_string(),
|
||||||
"./bin/pg_isready -h localhost -p 5432 -U gbuser || { echo 'ERROR: PostgreSQL failed to start properly' >&2; cat {{LOGS_PATH}}/postgres.log >&2; exit 1; }".to_string(),
|
"./bin/pg_isready -h localhost -p 5432 -U gbuser || { echo 'ERROR: PostgreSQL failed to start properly' >&2; cat {{LOGS_PATH}}/postgres.log >&2; exit 1; }".to_string(),
|
||||||
format!("PGPASSWORD={{DB_PASSWORD}} ./bin/psql -h localhost -p 5432 -U gbuser -d postgres -c \"CREATE DATABASE botserver WITH OWNER gbuser\" 2>&1 | grep -v 'already exists' || true"),
|
"PGPASSWORD='{{DB_PASSWORD}}' ./bin/psql -h localhost -p 5432 -U gbuser -d postgres -c \"CREATE DATABASE botserver WITH OWNER gbuser\" 2>&1 | grep -v 'already exists' || true".to_string(),
|
||||||
],
|
],
|
||||||
pre_install_cmds_macos: vec![],
|
pre_install_cmds_macos: vec![],
|
||||||
post_install_cmds_macos: vec![
|
post_install_cmds_macos: vec![
|
||||||
|
|
@ -165,6 +165,8 @@ impl PackageManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn register_cache(&mut self) {
|
fn register_cache(&mut self) {
|
||||||
|
// Using Valkey - the Redis-compatible fork with pre-built binaries
|
||||||
|
// Valkey is maintained by the Linux Foundation and provides direct binary downloads
|
||||||
self.components.insert(
|
self.components.insert(
|
||||||
"cache".to_string(),
|
"cache".to_string(),
|
||||||
ComponentConfig {
|
ComponentConfig {
|
||||||
|
|
@ -175,19 +177,23 @@ impl PackageManager {
|
||||||
macos_packages: vec![],
|
macos_packages: vec![],
|
||||||
windows_packages: vec![],
|
windows_packages: vec![],
|
||||||
download_url: Some(
|
download_url: Some(
|
||||||
"https://download.redis.io/redis-stable.tar.gz".to_string(),
|
"https://github.com/valkey-io/valkey/releases/download/9.0.0/valkey-9.0.0-linux-x86_64.tar.gz".to_string(),
|
||||||
),
|
),
|
||||||
binary_name: Some("redis-server".to_string()),
|
binary_name: Some("valkey-server".to_string()),
|
||||||
pre_install_cmds_linux: vec![],
|
pre_install_cmds_linux: vec![],
|
||||||
post_install_cmds_linux: vec![],
|
post_install_cmds_linux: vec![
|
||||||
|
// Create symlink for redis-server compatibility
|
||||||
|
"ln -sf {{BIN_PATH}}/valkey-server {{BIN_PATH}}/redis-server 2>/dev/null || true".to_string(),
|
||||||
|
"ln -sf {{BIN_PATH}}/valkey-cli {{BIN_PATH}}/redis-cli 2>/dev/null || true".to_string(),
|
||||||
|
],
|
||||||
pre_install_cmds_macos: vec![],
|
pre_install_cmds_macos: vec![],
|
||||||
post_install_cmds_macos: vec![],
|
post_install_cmds_macos: vec![],
|
||||||
pre_install_cmds_windows: vec![],
|
pre_install_cmds_windows: vec![],
|
||||||
post_install_cmds_windows: vec![],
|
post_install_cmds_windows: vec![],
|
||||||
env_vars: HashMap::new(),
|
env_vars: HashMap::new(),
|
||||||
data_download_list: Vec::new(),
|
data_download_list: Vec::new(),
|
||||||
exec_cmd: "{{BIN_PATH}}/redis-server --port 0 --tls-port 6379 --tls-cert-file {{CONF_PATH}}/system/certificates/redis/server.crt --tls-key-file {{CONF_PATH}}/system/certificates/redis/server.key --tls-ca-cert-file {{CONF_PATH}}/system/certificates/ca/ca.crt".to_string(),
|
exec_cmd: "nohup {{BIN_PATH}}/valkey-server --port 6379 --dir {{DATA_PATH}} --logfile {{LOGS_PATH}}/valkey.log --daemonize yes > {{LOGS_PATH}}/valkey-startup.log 2>&1".to_string(),
|
||||||
check_cmd: "ps -ef | grep redis-server | grep -v grep | grep {{BIN_PATH}}".to_string(),
|
check_cmd: "{{BIN_PATH}}/valkey-cli ping 2>/dev/null | grep -q PONG".to_string(),
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -308,11 +314,14 @@ impl PackageManager {
|
||||||
binary_name: Some("zitadel".to_string()),
|
binary_name: Some("zitadel".to_string()),
|
||||||
pre_install_cmds_linux: vec![
|
pre_install_cmds_linux: vec![
|
||||||
"mkdir -p {{CONF_PATH}}/directory".to_string(),
|
"mkdir -p {{CONF_PATH}}/directory".to_string(),
|
||||||
|
"mkdir -p {{LOGS_PATH}}".to_string(),
|
||||||
],
|
],
|
||||||
post_install_cmds_linux: vec![
|
post_install_cmds_linux: vec![
|
||||||
// Initialize Zitadel with first instance setup to generate admin PAT
|
// Use start-from-init which does init + setup + start in one command
|
||||||
"{{BIN_PATH}}/zitadel init --config {{CONF_PATH}}/directory/zitadel.yaml".to_string(),
|
// This properly creates the first instance with PAT
|
||||||
"{{BIN_PATH}}/zitadel setup --config {{CONF_PATH}}/directory/zitadel.yaml --init-projections --masterkeyFromEnv --steps {{CONF_PATH}}/directory/steps.yaml".to_string(),
|
"ZITADEL_MASTERKEY=MasterkeyNeedsToHave32Characters nohup {{BIN_PATH}}/zitadel start-from-init --config {{CONF_PATH}}/directory/zitadel.yaml --masterkeyFromEnv --tlsMode disabled --steps {{CONF_PATH}}/directory/steps.yaml > {{LOGS_PATH}}/zitadel.log 2>&1 &".to_string(),
|
||||||
|
// Wait for Zitadel to be fully ready (up to 90 seconds for first instance setup)
|
||||||
|
"for i in $(seq 1 90); do curl -sf http://localhost:8080/debug/ready && break || sleep 1; done".to_string(),
|
||||||
],
|
],
|
||||||
pre_install_cmds_macos: vec![
|
pre_install_cmds_macos: vec![
|
||||||
"mkdir -p {{CONF_PATH}}/directory".to_string(),
|
"mkdir -p {{CONF_PATH}}/directory".to_string(),
|
||||||
|
|
@ -718,31 +727,17 @@ impl PackageManager {
|
||||||
macos_packages: vec![],
|
macos_packages: vec![],
|
||||||
windows_packages: vec![],
|
windows_packages: vec![],
|
||||||
download_url: Some(
|
download_url: Some(
|
||||||
"https://releases.hashicorp.com/vault/1.15.4/vault_1.15.4_linux_amd64.zip".to_string(),
|
"https://releases.hashicorp.com/vault/1.15.4/vault_1.15.4_linux_amd64.zip"
|
||||||
|
.to_string(),
|
||||||
),
|
),
|
||||||
binary_name: Some("vault".to_string()),
|
binary_name: Some("vault".to_string()),
|
||||||
pre_install_cmds_linux: vec![
|
pre_install_cmds_linux: vec![
|
||||||
"mkdir -p {{DATA_PATH}}/vault".to_string(),
|
"mkdir -p {{DATA_PATH}}/vault".to_string(),
|
||||||
"mkdir -p {{CONF_PATH}}/vault".to_string(),
|
"mkdir -p {{CONF_PATH}}/vault".to_string(),
|
||||||
],
|
],
|
||||||
post_install_cmds_linux: vec![
|
// Note: Vault initialization is handled in bootstrap::setup_vault()
|
||||||
// Initialize Vault and store root token
|
// because it requires the Vault server to be running first
|
||||||
"{{BIN_PATH}}/vault operator init -key-shares=1 -key-threshold=1 -format=json > {{CONF_PATH}}/vault/init.json".to_string(),
|
post_install_cmds_linux: vec![],
|
||||||
// Extract and store unseal key and root token
|
|
||||||
"VAULT_UNSEAL_KEY=$(cat {{CONF_PATH}}/vault/init.json | grep -o '\"unseal_keys_b64\":\\[\"[^\"]*\"' | cut -d'\"' -f4)".to_string(),
|
|
||||||
"VAULT_ROOT_TOKEN=$(cat {{CONF_PATH}}/vault/init.json | grep -o '\"root_token\":\"[^\"]*\"' | cut -d'\"' -f4)".to_string(),
|
|
||||||
// Unseal vault
|
|
||||||
"{{BIN_PATH}}/vault operator unseal $VAULT_UNSEAL_KEY".to_string(),
|
|
||||||
// Enable KV secrets engine
|
|
||||||
"VAULT_TOKEN=$VAULT_ROOT_TOKEN {{BIN_PATH}}/vault secrets enable -path=gbo kv-v2".to_string(),
|
|
||||||
// Store initial secrets paths
|
|
||||||
"VAULT_TOKEN=$VAULT_ROOT_TOKEN {{BIN_PATH}}/vault kv put gbo/drive accesskey={{GENERATED_PASSWORD}} secret={{GENERATED_PASSWORD}}".to_string(),
|
|
||||||
"VAULT_TOKEN=$VAULT_ROOT_TOKEN {{BIN_PATH}}/vault kv put gbo/tables username=gbuser password={{GENERATED_PASSWORD}}".to_string(),
|
|
||||||
"VAULT_TOKEN=$VAULT_ROOT_TOKEN {{BIN_PATH}}/vault kv put gbo/cache password={{GENERATED_PASSWORD}}".to_string(),
|
|
||||||
"VAULT_TOKEN=$VAULT_ROOT_TOKEN {{BIN_PATH}}/vault kv put gbo/directory client_id= client_secret=".to_string(),
|
|
||||||
"echo 'Vault initialized. Add VAULT_ADDR=https://localhost:8200 and VAULT_TOKEN to .env'".to_string(),
|
|
||||||
"chmod 600 {{CONF_PATH}}/vault/init.json".to_string(),
|
|
||||||
],
|
|
||||||
pre_install_cmds_macos: vec![
|
pre_install_cmds_macos: vec![
|
||||||
"mkdir -p {{DATA_PATH}}/vault".to_string(),
|
"mkdir -p {{DATA_PATH}}/vault".to_string(),
|
||||||
"mkdir -p {{CONF_PATH}}/vault".to_string(),
|
"mkdir -p {{CONF_PATH}}/vault".to_string(),
|
||||||
|
|
@ -752,13 +747,18 @@ impl PackageManager {
|
||||||
post_install_cmds_windows: vec![],
|
post_install_cmds_windows: vec![],
|
||||||
env_vars: {
|
env_vars: {
|
||||||
let mut env = HashMap::new();
|
let mut env = HashMap::new();
|
||||||
env.insert("VAULT_ADDR".to_string(), "https://localhost:8200".to_string());
|
env.insert(
|
||||||
|
"VAULT_ADDR".to_string(),
|
||||||
|
"https://localhost:8200".to_string(),
|
||||||
|
);
|
||||||
env.insert("VAULT_SKIP_VERIFY".to_string(), "true".to_string());
|
env.insert("VAULT_SKIP_VERIFY".to_string(), "true".to_string());
|
||||||
env
|
env
|
||||||
},
|
},
|
||||||
data_download_list: Vec::new(),
|
data_download_list: Vec::new(),
|
||||||
exec_cmd: "{{BIN_PATH}}/vault server -config={{CONF_PATH}}/vault/config.hcl".to_string(),
|
exec_cmd: "{{BIN_PATH}}/vault server -config={{CONF_PATH}}/vault/config.hcl"
|
||||||
check_cmd: "curl -f -k https://localhost:8200/v1/sys/health >/dev/null 2>&1".to_string(),
|
.to_string(),
|
||||||
|
check_cmd: "curl -f -k https://localhost:8200/v1/sys/health >/dev/null 2>&1"
|
||||||
|
.to_string(),
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -98,10 +98,12 @@ impl SecretsManager {
|
||||||
// mTLS certificate paths - default to botserver-stack paths
|
// mTLS certificate paths - default to botserver-stack paths
|
||||||
let ca_cert = env::var("VAULT_CACERT")
|
let ca_cert = env::var("VAULT_CACERT")
|
||||||
.unwrap_or_else(|_| "./botserver-stack/conf/system/certificates/ca/ca.crt".to_string());
|
.unwrap_or_else(|_| "./botserver-stack/conf/system/certificates/ca/ca.crt".to_string());
|
||||||
let client_cert = env::var("VAULT_CLIENT_CERT")
|
let client_cert = env::var("VAULT_CLIENT_CERT").unwrap_or_else(|_| {
|
||||||
.unwrap_or_else(|_| "./botserver-stack/conf/system/certificates/botserver/client.crt".to_string());
|
"./botserver-stack/conf/system/certificates/botserver/client.crt".to_string()
|
||||||
let client_key = env::var("VAULT_CLIENT_KEY")
|
});
|
||||||
.unwrap_or_else(|_| "./botserver-stack/conf/system/certificates/botserver/client.key".to_string());
|
let client_key = env::var("VAULT_CLIENT_KEY").unwrap_or_else(|_| {
|
||||||
|
"./botserver-stack/conf/system/certificates/botserver/client.key".to_string()
|
||||||
|
});
|
||||||
|
|
||||||
let enabled = !token.is_empty() && !addr.is_empty();
|
let enabled = !token.is_empty() && !addr.is_empty();
|
||||||
|
|
||||||
|
|
@ -121,9 +123,7 @@ impl SecretsManager {
|
||||||
let key_path = PathBuf::from(&client_key);
|
let key_path = PathBuf::from(&client_key);
|
||||||
|
|
||||||
let mut settings_builder = VaultClientSettingsBuilder::default();
|
let mut settings_builder = VaultClientSettingsBuilder::default();
|
||||||
settings_builder
|
settings_builder.address(&addr).token(&token);
|
||||||
.address(&addr)
|
|
||||||
.token(&token);
|
|
||||||
|
|
||||||
// Configure TLS verification
|
// Configure TLS verification
|
||||||
if skip_verify {
|
if skip_verify {
|
||||||
|
|
@ -131,7 +131,6 @@ impl SecretsManager {
|
||||||
settings_builder.verify(false);
|
settings_builder.verify(false);
|
||||||
} else {
|
} else {
|
||||||
settings_builder.verify(true);
|
settings_builder.verify(true);
|
||||||
|
|
||||||
// Add CA certificate if it exists
|
// Add CA certificate if it exists
|
||||||
if ca_path.exists() {
|
if ca_path.exists() {
|
||||||
info!("Using CA certificate for Vault: {}", ca_cert);
|
info!("Using CA certificate for Vault: {}", ca_cert);
|
||||||
|
|
@ -140,17 +139,8 @@ impl SecretsManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Configure mTLS client certificates if they exist
|
// Configure mTLS client certificates if they exist
|
||||||
if cert_path.exists() && key_path.exists() {
|
if cert_path.exists() && key_path.exists() && !skip_verify {
|
||||||
info!("Using mTLS client certificate for Vault: {}", client_cert);
|
info!("Using mTLS client certificate for Vault: {}", client_cert);
|
||||||
// Note: vaultrs uses the identity parameter for client certificates
|
|
||||||
// The identity is a PKCS12/PFX file or can be set via environment
|
|
||||||
// For now, we set environment variables that the underlying reqwest client will use
|
|
||||||
env::set_var("SSL_CERT_FILE", &ca_cert);
|
|
||||||
// Client certificate authentication is handled by reqwest through env vars
|
|
||||||
// or by building a custom client - vaultrs doesn't directly support client certs
|
|
||||||
// We'll document this limitation and use token auth with TLS verification
|
|
||||||
} else if !skip_verify {
|
|
||||||
info!("mTLS client certificates not found at {} - using token auth with TLS", client_cert);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let settings = settings_builder.build()?;
|
let settings = settings_builder.build()?;
|
||||||
|
|
|
||||||
|
|
@ -140,11 +140,16 @@ pub fn to_array(value: Dynamic) -> Array {
|
||||||
/// Download a file from a URL with progress bar (when progress-bars feature is enabled)
|
/// Download a file from a URL with progress bar (when progress-bars feature is enabled)
|
||||||
#[cfg(feature = "progress-bars")]
|
#[cfg(feature = "progress-bars")]
|
||||||
pub async fn download_file(url: &str, output_path: &str) -> Result<(), anyhow::Error> {
|
pub async fn download_file(url: &str, output_path: &str) -> Result<(), anyhow::Error> {
|
||||||
|
use std::time::Duration;
|
||||||
let url = url.to_string();
|
let url = url.to_string();
|
||||||
let output_path = output_path.to_string();
|
let output_path = output_path.to_string();
|
||||||
let download_handle = tokio::spawn(async move {
|
let download_handle = tokio::spawn(async move {
|
||||||
let client = Client::builder()
|
let client = Client::builder()
|
||||||
.user_agent("Mozilla/5.0 (compatible; BotServer/1.0)")
|
.user_agent("Mozilla/5.0 (compatible; BotServer/1.0)")
|
||||||
|
.connect_timeout(Duration::from_secs(30))
|
||||||
|
.read_timeout(Duration::from_secs(300))
|
||||||
|
.pool_idle_timeout(Duration::from_secs(90))
|
||||||
|
.tcp_keepalive(Duration::from_secs(60))
|
||||||
.build()?;
|
.build()?;
|
||||||
let response = client.get(&url).send().await?;
|
let response = client.get(&url).send().await?;
|
||||||
if response.status().is_success() {
|
if response.status().is_success() {
|
||||||
|
|
@ -176,11 +181,16 @@ pub async fn download_file(url: &str, output_path: &str) -> Result<(), anyhow::E
|
||||||
/// Download a file from a URL (without progress bar when progress-bars feature is disabled)
|
/// Download a file from a URL (without progress bar when progress-bars feature is disabled)
|
||||||
#[cfg(not(feature = "progress-bars"))]
|
#[cfg(not(feature = "progress-bars"))]
|
||||||
pub async fn download_file(url: &str, output_path: &str) -> Result<(), anyhow::Error> {
|
pub async fn download_file(url: &str, output_path: &str) -> Result<(), anyhow::Error> {
|
||||||
|
use std::time::Duration;
|
||||||
let url = url.to_string();
|
let url = url.to_string();
|
||||||
let output_path = output_path.to_string();
|
let output_path = output_path.to_string();
|
||||||
let download_handle = tokio::spawn(async move {
|
let download_handle = tokio::spawn(async move {
|
||||||
let client = Client::builder()
|
let client = Client::builder()
|
||||||
.user_agent("Mozilla/5.0 (compatible; BotServer/1.0)")
|
.user_agent("Mozilla/5.0 (compatible; BotServer/1.0)")
|
||||||
|
.connect_timeout(Duration::from_secs(30))
|
||||||
|
.read_timeout(Duration::from_secs(300))
|
||||||
|
.pool_idle_timeout(Duration::from_secs(90))
|
||||||
|
.tcp_keepalive(Duration::from_secs(60))
|
||||||
.build()?;
|
.build()?;
|
||||||
let response = client.get(&url).send().await?;
|
let response = client.get(&url).send().await?;
|
||||||
if response.status().is_success() {
|
if response.status().is_success() {
|
||||||
|
|
|
||||||
93
src/main.rs
93
src/main.rs
|
|
@ -255,7 +255,10 @@ async fn main() -> std::io::Result<()> {
|
||||||
// Initialize SecretsManager early - this connects to Vault if configured
|
// Initialize SecretsManager early - this connects to Vault if configured
|
||||||
// Only VAULT_ADDR, VAULT_TOKEN, and VAULT_SKIP_VERIFY should be in .env
|
// Only VAULT_ADDR, VAULT_TOKEN, and VAULT_SKIP_VERIFY should be in .env
|
||||||
if let Err(e) = crate::shared::utils::init_secrets_manager().await {
|
if let Err(e) = crate::shared::utils::init_secrets_manager().await {
|
||||||
warn!("Failed to initialize SecretsManager: {}. Falling back to env vars.", e);
|
warn!(
|
||||||
|
"Failed to initialize SecretsManager: {}. Falling back to env vars.",
|
||||||
|
e
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
info!("SecretsManager initialized - fetching secrets from Vault");
|
info!("SecretsManager initialized - fetching secrets from Vault");
|
||||||
}
|
}
|
||||||
|
|
@ -416,11 +419,20 @@ async fn main() -> std::io::Result<()> {
|
||||||
trace!("Creating BootstrapManager...");
|
trace!("Creating BootstrapManager...");
|
||||||
let mut bootstrap = BootstrapManager::new(install_mode.clone(), tenant.clone()).await;
|
let mut bootstrap = BootstrapManager::new(install_mode.clone(), tenant.clone()).await;
|
||||||
|
|
||||||
// Check if services are already configured in Directory
|
// Check if bootstrap has completed by looking for:
|
||||||
let services_configured =
|
// 1. .env with VAULT_TOKEN
|
||||||
std::path::Path::new("./botserver-stack/conf/directory/zitadel.yaml").exists();
|
// 2. Vault init.json exists (actual credentials)
|
||||||
|
// Both must exist for bootstrap to be considered complete
|
||||||
|
let env_path = std::path::Path::new("./.env");
|
||||||
|
let vault_init_path = std::path::Path::new("./botserver-stack/conf/vault/init.json");
|
||||||
|
let bootstrap_completed = env_path.exists() && vault_init_path.exists() && {
|
||||||
|
// Check if .env contains VAULT_TOKEN (not just exists)
|
||||||
|
std::fs::read_to_string(env_path)
|
||||||
|
.map(|content| content.contains("VAULT_TOKEN="))
|
||||||
|
.unwrap_or(false)
|
||||||
|
};
|
||||||
|
|
||||||
let cfg = if services_configured {
|
let cfg = if bootstrap_completed {
|
||||||
trace!("Services already configured, ensuring all are running...");
|
trace!("Services already configured, ensuring all are running...");
|
||||||
info!("Ensuring database and drive services are running...");
|
info!("Ensuring database and drive services are running...");
|
||||||
progress_tx_clone
|
progress_tx_clone
|
||||||
|
|
@ -437,6 +449,7 @@ async fn main() -> std::io::Result<()> {
|
||||||
|
|
||||||
bootstrap
|
bootstrap
|
||||||
.start_all()
|
.start_all()
|
||||||
|
.await
|
||||||
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?;
|
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?;
|
||||||
trace!("bootstrap.start_all() completed");
|
trace!("bootstrap.start_all() completed");
|
||||||
|
|
||||||
|
|
@ -471,6 +484,7 @@ async fn main() -> std::io::Result<()> {
|
||||||
.ok();
|
.ok();
|
||||||
bootstrap
|
bootstrap
|
||||||
.start_all()
|
.start_all()
|
||||||
|
.await
|
||||||
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?;
|
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?;
|
||||||
|
|
||||||
match create_conn() {
|
match create_conn() {
|
||||||
|
|
@ -480,21 +494,34 @@ async fn main() -> std::io::Result<()> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
trace!("Config loaded, uploading templates...");
|
trace!("Config loaded, syncing templates to database...");
|
||||||
progress_tx_clone
|
progress_tx_clone
|
||||||
.send(BootstrapProgress::UploadingTemplates)
|
.send(BootstrapProgress::UploadingTemplates)
|
||||||
.ok();
|
.ok();
|
||||||
|
|
||||||
if let Err(e) = bootstrap.upload_templates_to_drive(&cfg).await {
|
// First sync config.csv to database (fast, no S3 needed)
|
||||||
trace!("Template upload error: {}", e);
|
if let Err(e) = bootstrap.sync_templates_to_database() {
|
||||||
progress_tx_clone
|
warn!("Failed to sync templates to database: {}", e);
|
||||||
.send(BootstrapProgress::BootstrapError(format!(
|
|
||||||
"Failed to upload templates: {}",
|
|
||||||
e
|
|
||||||
)))
|
|
||||||
.ok();
|
|
||||||
} else {
|
} else {
|
||||||
trace!("Templates uploaded successfully");
|
trace!("Templates synced to database");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then upload to drive with timeout to prevent blocking on MinIO issues
|
||||||
|
match tokio::time::timeout(
|
||||||
|
std::time::Duration::from_secs(30),
|
||||||
|
bootstrap.upload_templates_to_drive(&cfg),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(Ok(_)) => {
|
||||||
|
trace!("Templates uploaded to drive successfully");
|
||||||
|
}
|
||||||
|
Ok(Err(e)) => {
|
||||||
|
warn!("Template drive upload error (non-blocking): {}", e);
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
warn!("Template drive upload timed out after 30s, continuing startup...");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok::<AppConfig, std::io::Error>(cfg)
|
Ok::<AppConfig, std::io::Error>(cfg)
|
||||||
|
|
@ -505,10 +532,6 @@ async fn main() -> std::io::Result<()> {
|
||||||
trace!("Reloading dotenv...");
|
trace!("Reloading dotenv...");
|
||||||
dotenv().ok();
|
dotenv().ok();
|
||||||
|
|
||||||
trace!("Loading refreshed config from env...");
|
|
||||||
let refreshed_cfg = AppConfig::from_env().expect("Failed to load config from env");
|
|
||||||
let config = std::sync::Arc::new(refreshed_cfg.clone());
|
|
||||||
|
|
||||||
trace!("Creating database pool again...");
|
trace!("Creating database pool again...");
|
||||||
progress_tx.send(BootstrapProgress::ConnectingDatabase).ok();
|
progress_tx.send(BootstrapProgress::ConnectingDatabase).ok();
|
||||||
|
|
||||||
|
|
@ -541,6 +564,21 @@ async fn main() -> std::io::Result<()> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Load config from database (which now has values from config.csv)
|
||||||
|
info!("Loading config from database after template sync...");
|
||||||
|
let refreshed_cfg = AppConfig::from_database(&pool).unwrap_or_else(|e| {
|
||||||
|
warn!(
|
||||||
|
"Failed to load config from database: {}, falling back to env",
|
||||||
|
e
|
||||||
|
);
|
||||||
|
AppConfig::from_env().expect("Failed to load config from env")
|
||||||
|
});
|
||||||
|
let config = std::sync::Arc::new(refreshed_cfg.clone());
|
||||||
|
info!(
|
||||||
|
"Server configured to listen on {}:{}",
|
||||||
|
config.server.host, config.server.port
|
||||||
|
);
|
||||||
|
|
||||||
let cache_url = "rediss://localhost:6379".to_string();
|
let cache_url = "rediss://localhost:6379".to_string();
|
||||||
let redis_client = match redis::Client::open(cache_url.as_str()) {
|
let redis_client = match redis::Client::open(cache_url.as_str()) {
|
||||||
Ok(client) => Some(Arc::new(client)),
|
Ok(client) => Some(Arc::new(client)),
|
||||||
|
|
@ -583,11 +621,16 @@ async fn main() -> std::io::Result<()> {
|
||||||
let config_manager = ConfigManager::new(pool.clone());
|
let config_manager = ConfigManager::new(pool.clone());
|
||||||
|
|
||||||
let mut bot_conn = pool.get().expect("Failed to get database connection");
|
let mut bot_conn = pool.get().expect("Failed to get database connection");
|
||||||
let (default_bot_id, _default_bot_name) = crate::bot::get_default_bot(&mut bot_conn);
|
let (default_bot_id, default_bot_name) = crate::bot::get_default_bot(&mut bot_conn);
|
||||||
|
info!(
|
||||||
|
"Using default bot: {} (id: {})",
|
||||||
|
default_bot_name, default_bot_id
|
||||||
|
);
|
||||||
|
|
||||||
let llm_url = config_manager
|
let llm_url = config_manager
|
||||||
.get_config(&default_bot_id, "llm-url", Some("https://localhost:8081"))
|
.get_config(&default_bot_id, "llm-url", Some("http://localhost:8081"))
|
||||||
.unwrap_or_else(|_| "https://localhost:8081".to_string());
|
.unwrap_or_else(|_| "http://localhost:8081".to_string());
|
||||||
|
info!("LLM URL: {}", llm_url);
|
||||||
|
|
||||||
// Create base LLM provider
|
// Create base LLM provider
|
||||||
let base_llm_provider = Arc::new(botserver::llm::OpenAIClient::new(
|
let base_llm_provider = Arc::new(botserver::llm::OpenAIClient::new(
|
||||||
|
|
@ -602,12 +645,14 @@ async fn main() -> std::io::Result<()> {
|
||||||
.get_config(
|
.get_config(
|
||||||
&default_bot_id,
|
&default_bot_id,
|
||||||
"embedding-url",
|
"embedding-url",
|
||||||
Some("https://localhost:8082"),
|
Some("http://localhost:8082"),
|
||||||
)
|
)
|
||||||
.unwrap_or_else(|_| "https://localhost:8082".to_string());
|
.unwrap_or_else(|_| "http://localhost:8082".to_string());
|
||||||
let embedding_model = config_manager
|
let embedding_model = config_manager
|
||||||
.get_config(&default_bot_id, "embedding-model", Some("all-MiniLM-L6-v2"))
|
.get_config(&default_bot_id, "embedding-model", Some("all-MiniLM-L6-v2"))
|
||||||
.unwrap_or_else(|_| "all-MiniLM-L6-v2".to_string());
|
.unwrap_or_else(|_| "all-MiniLM-L6-v2".to_string());
|
||||||
|
info!("Embedding URL: {}", embedding_url);
|
||||||
|
info!("Embedding Model: {}", embedding_model);
|
||||||
|
|
||||||
let embedding_service = Some(Arc::new(botserver::llm::cache::LocalEmbeddingService::new(
|
let embedding_service = Some(Arc::new(botserver::llm::cache::LocalEmbeddingService::new(
|
||||||
embedding_url,
|
embedding_url,
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ name,value
|
||||||
# SERVER CONFIGURATION
|
# SERVER CONFIGURATION
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
server_host,0.0.0.0
|
server_host,0.0.0.0
|
||||||
server_port,8080
|
server_port,8088
|
||||||
sites_root,/tmp
|
sites_root,/tmp
|
||||||
,
|
,
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|
|
||||||
|
Loading…
Add table
Reference in a new issue