App generator LLM-only, app logs, knowledge base, web search, designer magic

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-12-28 11:50:50 -03:00
parent 9dee342817
commit 96cf7b57f8
38 changed files with 4728 additions and 1433 deletions

View file

@ -1,7 +1,7 @@
{
"base_url": "http://localhost:8300",
"default_org": {
"id": "351468402772017166",
"id": "353032199743733774",
"name": "default",
"domain": "default.localhost"
},
@ -13,8 +13,8 @@
"first_name": "Admin",
"last_name": "User"
},
"admin_token": "2YJqHuenWddFpMw4vqw6vEHtgSF5jbvSG4NxTANnV9KJJMnaDSuvbUNSGsS06-QLFZnpFbw",
"admin_token": "1X7ImWy1yPmGYYumPJ0RfVaLuuLHKstH8BItaTGlp-6jTFPeM0uFo8sjdfxtk-jxjLivcVM",
"project_id": "",
"client_id": "351468407201267726",
"client_secret": "vLxjxWiPv8fVvown7zBOqKdb7RPntqVW8fNfphaiMWtkXFI8fXQX8WoyBE5KmhJA"
"client_id": "353032201220194318",
"client_secret": "mrGZZk7Aqx1QbOHIwadgZHZkKHuPqZtOGDtdHTe4eZxEK86TDKfTiMlW2NxSEIHl"
}

View file

@ -0,0 +1,12 @@
-- Rollback: Remove role-based access control columns from dynamic tables
-- Migration: 6.1.2_table_role_access
-- Remove columns from dynamic_table_definitions
ALTER TABLE dynamic_table_definitions
DROP COLUMN IF EXISTS read_roles,
DROP COLUMN IF EXISTS write_roles;
-- Remove columns from dynamic_table_fields
ALTER TABLE dynamic_table_fields
DROP COLUMN IF EXISTS read_roles,
DROP COLUMN IF EXISTS write_roles;

View file

@ -0,0 +1,28 @@
-- Migration: 6.1.2_table_role_access
-- Add role-based access control columns to dynamic table definitions and fields
--
-- Syntax in .gbdialog TABLE definitions:
-- TABLE Contatos ON maria READ BY "admin;manager"
-- Id number key
-- Nome string(150)
-- NumeroDocumento string(25) READ BY "admin"
-- Celular string(20) WRITE BY "admin;manager"
--
-- Empty roles = everyone has access (default behavior)
-- Roles are semicolon-separated and match Zitadel directory roles
-- Add role columns to dynamic_table_definitions
ALTER TABLE dynamic_table_definitions
ADD COLUMN IF NOT EXISTS read_roles TEXT DEFAULT NULL,
ADD COLUMN IF NOT EXISTS write_roles TEXT DEFAULT NULL;
-- Add role columns to dynamic_table_fields
ALTER TABLE dynamic_table_fields
ADD COLUMN IF NOT EXISTS read_roles TEXT DEFAULT NULL,
ADD COLUMN IF NOT EXISTS write_roles TEXT DEFAULT NULL;
-- Add comments for documentation
COMMENT ON COLUMN dynamic_table_definitions.read_roles IS 'Semicolon-separated roles that can read from this table (empty = everyone)';
COMMENT ON COLUMN dynamic_table_definitions.write_roles IS 'Semicolon-separated roles that can write to this table (empty = everyone)';
COMMENT ON COLUMN dynamic_table_fields.read_roles IS 'Semicolon-separated roles that can read this field (empty = everyone)';
COMMENT ON COLUMN dynamic_table_fields.write_roles IS 'Semicolon-separated roles that can write this field (empty = everyone)';

View file

@ -0,0 +1,25 @@
-- Rollback Migration: Knowledge Base Sources
-- Drop triggers first
DROP TRIGGER IF EXISTS update_knowledge_sources_updated_at ON knowledge_sources;
-- Drop indexes
DROP INDEX IF EXISTS idx_knowledge_sources_bot_id;
DROP INDEX IF EXISTS idx_knowledge_sources_status;
DROP INDEX IF EXISTS idx_knowledge_sources_collection;
DROP INDEX IF EXISTS idx_knowledge_sources_content_hash;
DROP INDEX IF EXISTS idx_knowledge_sources_created_at;
DROP INDEX IF EXISTS idx_knowledge_chunks_source_id;
DROP INDEX IF EXISTS idx_knowledge_chunks_chunk_index;
DROP INDEX IF EXISTS idx_knowledge_chunks_content_fts;
DROP INDEX IF EXISTS idx_knowledge_chunks_embedding;
DROP INDEX IF EXISTS idx_research_search_history_bot_id;
DROP INDEX IF EXISTS idx_research_search_history_user_id;
DROP INDEX IF EXISTS idx_research_search_history_created_at;
-- Drop tables (order matters due to foreign key constraints)
DROP TABLE IF EXISTS research_search_history;
DROP TABLE IF EXISTS knowledge_chunks;
DROP TABLE IF EXISTS knowledge_sources;

View file

@ -0,0 +1,95 @@
-- Migration: Knowledge Base Sources
-- Description: Tables for document ingestion, chunking, and RAG support
-- Table for knowledge sources (uploaded documents)
CREATE TABLE IF NOT EXISTS knowledge_sources (
id TEXT PRIMARY KEY,
bot_id UUID,
name TEXT NOT NULL,
source_type TEXT NOT NULL DEFAULT 'txt',
file_path TEXT,
url TEXT,
content_hash TEXT NOT NULL,
chunk_count INTEGER NOT NULL DEFAULT 0,
status TEXT NOT NULL DEFAULT 'pending',
collection TEXT NOT NULL DEFAULT 'default',
error_message TEXT,
metadata JSONB DEFAULT '{}'::jsonb,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
indexed_at TIMESTAMPTZ
);
-- Indexes for knowledge_sources
CREATE INDEX IF NOT EXISTS idx_knowledge_sources_bot_id ON knowledge_sources(bot_id);
CREATE INDEX IF NOT EXISTS idx_knowledge_sources_status ON knowledge_sources(status);
CREATE INDEX IF NOT EXISTS idx_knowledge_sources_collection ON knowledge_sources(collection);
CREATE INDEX IF NOT EXISTS idx_knowledge_sources_content_hash ON knowledge_sources(content_hash);
CREATE INDEX IF NOT EXISTS idx_knowledge_sources_created_at ON knowledge_sources(created_at);
-- Table for document chunks
CREATE TABLE IF NOT EXISTS knowledge_chunks (
id TEXT PRIMARY KEY,
source_id TEXT NOT NULL REFERENCES knowledge_sources(id) ON DELETE CASCADE,
chunk_index INTEGER NOT NULL,
content TEXT NOT NULL,
token_count INTEGER NOT NULL DEFAULT 0,
embedding vector(1536),
metadata JSONB DEFAULT '{}'::jsonb,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
-- Indexes for knowledge_chunks
CREATE INDEX IF NOT EXISTS idx_knowledge_chunks_source_id ON knowledge_chunks(source_id);
CREATE INDEX IF NOT EXISTS idx_knowledge_chunks_chunk_index ON knowledge_chunks(chunk_index);
-- Full-text search index on content
CREATE INDEX IF NOT EXISTS idx_knowledge_chunks_content_fts
ON knowledge_chunks USING gin(to_tsvector('english', content));
-- Vector similarity index (if pgvector extension is available)
-- Note: This will only work if pgvector extension is installed
DO $$
BEGIN
IF EXISTS (SELECT 1 FROM pg_extension WHERE extname = 'vector') THEN
EXECUTE 'CREATE INDEX IF NOT EXISTS idx_knowledge_chunks_embedding
ON knowledge_chunks USING ivfflat (embedding vector_cosine_ops) WITH (lists = 100)';
END IF;
EXCEPTION WHEN OTHERS THEN
RAISE NOTICE 'Could not create vector index: %', SQLERRM;
END $$;
-- Table for search history
CREATE TABLE IF NOT EXISTS research_search_history (
id TEXT PRIMARY KEY,
bot_id UUID,
user_id UUID,
query TEXT NOT NULL,
search_type TEXT NOT NULL DEFAULT 'web',
results_count INTEGER NOT NULL DEFAULT 0,
metadata JSONB DEFAULT '{}'::jsonb,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
-- Indexes for search history
CREATE INDEX IF NOT EXISTS idx_research_search_history_bot_id ON research_search_history(bot_id);
CREATE INDEX IF NOT EXISTS idx_research_search_history_user_id ON research_search_history(user_id);
CREATE INDEX IF NOT EXISTS idx_research_search_history_created_at ON research_search_history(created_at);
-- Trigger for updated_at on knowledge_sources
DROP TRIGGER IF EXISTS update_knowledge_sources_updated_at ON knowledge_sources;
CREATE TRIGGER update_knowledge_sources_updated_at
BEFORE UPDATE ON knowledge_sources
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
-- Comments for documentation
COMMENT ON TABLE knowledge_sources IS 'Uploaded documents for knowledge base ingestion';
COMMENT ON TABLE knowledge_chunks IS 'Text chunks extracted from knowledge sources for RAG';
COMMENT ON TABLE research_search_history IS 'History of web and knowledge base searches';
COMMENT ON COLUMN knowledge_sources.source_type IS 'Document type: pdf, docx, txt, markdown, html, csv, xlsx, url';
COMMENT ON COLUMN knowledge_sources.status IS 'Processing status: pending, processing, indexed, failed, reindexing';
COMMENT ON COLUMN knowledge_sources.collection IS 'Collection/namespace for organizing sources';
COMMENT ON COLUMN knowledge_chunks.embedding IS 'Vector embedding for semantic search (1536 dimensions for OpenAI)';
COMMENT ON COLUMN knowledge_chunks.token_count IS 'Estimated token count for the chunk';

View file

@ -1,3 +1,4 @@
use crate::core::urls::ApiUrls;
use crate::llm::observability::{ObservabilityConfig, ObservabilityManager, QuickStats};
use crate::shared::state::AppState;
use axum::{
@ -89,39 +90,42 @@ impl Default for AnalyticsService {
pub fn configure_analytics_routes() -> Router<Arc<AppState>> {
Router::new()
.route("/api/analytics/messages/count", get(handle_message_count))
.route(ApiUrls::ANALYTICS_MESSAGES_COUNT, get(handle_message_count))
.route(
"/api/analytics/sessions/active",
ApiUrls::ANALYTICS_SESSIONS_ACTIVE,
get(handle_active_sessions),
)
.route("/api/analytics/response/avg", get(handle_avg_response_time))
.route("/api/analytics/llm/tokens", get(handle_llm_tokens))
.route("/api/analytics/storage/usage", get(handle_storage_usage))
.route("/api/analytics/errors/count", get(handle_errors_count))
.route(
"/api/analytics/timeseries/messages",
ApiUrls::ANALYTICS_RESPONSE_AVG,
get(handle_avg_response_time),
)
.route(ApiUrls::ANALYTICS_LLM_TOKENS, get(handle_llm_tokens))
.route(ApiUrls::ANALYTICS_STORAGE_USAGE, get(handle_storage_usage))
.route(ApiUrls::ANALYTICS_ERRORS_COUNT, get(handle_errors_count))
.route(
ApiUrls::ANALYTICS_TIMESERIES_MESSAGES,
get(handle_timeseries_messages),
)
.route(
"/api/analytics/timeseries/response_time",
ApiUrls::ANALYTICS_TIMESERIES_RESPONSE,
get(handle_timeseries_response),
)
.route(
"/api/analytics/channels/distribution",
ApiUrls::ANALYTICS_CHANNELS_DISTRIBUTION,
get(handle_channels_distribution),
)
.route(
"/api/analytics/bots/performance",
ApiUrls::ANALYTICS_BOTS_PERFORMANCE,
get(handle_bots_performance),
)
.route(
"/api/analytics/activity/recent",
ApiUrls::ANALYTICS_ACTIVITY_RECENT,
get(handle_recent_activity),
)
.route("/api/analytics/queries/top", get(handle_top_queries))
.route("/api/analytics/chat", post(handle_analytics_chat))
.route("/api/analytics/llm/stats", get(handle_llm_stats))
.route("/api/analytics/budget/status", get(handle_budget_status))
.route(ApiUrls::ANALYTICS_QUERIES_TOP, get(handle_top_queries))
.route(ApiUrls::ANALYTICS_CHAT, post(handle_analytics_chat))
.route(ApiUrls::ANALYTICS_LLM_STATS, get(handle_llm_stats))
.route(ApiUrls::ANALYTICS_BUDGET_STATUS, get(handle_budget_status))
}
pub async fn handle_message_count(State(state): State<Arc<AppState>>) -> impl IntoResponse {

View file

@ -21,6 +21,7 @@ pub use queue::{
use crate::core::bot::channels::whatsapp::WhatsAppAdapter;
use crate::core::bot::channels::ChannelAdapter;
use crate::core::urls::ApiUrls;
use crate::shared::models::{BotResponse, UserSession};
use crate::shared::state::{AppState, AttendantNotification};
use axum::{
@ -45,39 +46,42 @@ use uuid::Uuid;
pub fn configure_attendance_routes() -> Router<Arc<AppState>> {
Router::new()
.route("/api/attendance/queue", get(queue::list_queue))
.route("/api/attendance/attendants", get(queue::list_attendants))
.route("/api/attendance/assign", post(queue::assign_conversation))
.route(ApiUrls::ATTENDANCE_QUEUE, get(queue::list_queue))
.route(ApiUrls::ATTENDANCE_ATTENDANTS, get(queue::list_attendants))
.route(ApiUrls::ATTENDANCE_ASSIGN, post(queue::assign_conversation))
.route(
"/api/attendance/transfer",
ApiUrls::ATTENDANCE_TRANSFER,
post(queue::transfer_conversation),
)
.route(
"/api/attendance/resolve/{session_id}",
&ApiUrls::ATTENDANCE_RESOLVE.replace(":session_id", "{session_id}"),
post(queue::resolve_conversation),
)
.route("/api/attendance/insights", get(queue::get_insights))
.route("/api/attendance/respond", post(attendant_respond))
.route("/ws/attendant", get(attendant_websocket_handler))
.route("/api/attendance/llm/tips", post(llm_assist::generate_tips))
.route(ApiUrls::ATTENDANCE_INSIGHTS, get(queue::get_insights))
.route(ApiUrls::ATTENDANCE_RESPOND, post(attendant_respond))
.route(ApiUrls::WS_ATTENDANT, get(attendant_websocket_handler))
.route(
"/api/attendance/llm/polish",
ApiUrls::ATTENDANCE_LLM_TIPS,
post(llm_assist::generate_tips),
)
.route(
ApiUrls::ATTENDANCE_LLM_POLISH,
post(llm_assist::polish_message),
)
.route(
"/api/attendance/llm/smart-replies",
ApiUrls::ATTENDANCE_LLM_SMART_REPLIES,
post(llm_assist::generate_smart_replies),
)
.route(
"/api/attendance/llm/summary/{session_id}",
&ApiUrls::ATTENDANCE_LLM_SUMMARY.replace(":session_id", "{session_id}"),
get(llm_assist::generate_summary),
)
.route(
"/api/attendance/llm/sentiment",
ApiUrls::ATTENDANCE_LLM_SENTIMENT,
post(llm_assist::analyze_sentiment),
)
.route(
"/api/attendance/llm/config/{bot_id}",
&ApiUrls::ATTENDANCE_LLM_CONFIG.replace(":bot_id", "{bot_id}"),
get(llm_assist::get_llm_config),
)
}

View file

@ -472,9 +472,9 @@ When generating an app, create these files:
└── app.js Optional custom JavaScript
```
### Required HTML Head
### Required HTML Head (WITH SEO)
Every HTML page MUST include:
Every HTML page MUST include proper SEO meta tags:
```html
<!DOCTYPE html>
@ -482,13 +482,26 @@ Every HTML page MUST include:
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Page Title</title>
<meta name="description" content="{app_name} - {brief description of this page}">
<meta name="robots" content="noindex, nofollow">
<meta name="theme-color" content="#1e1e2e">
<meta property="og:title" content="{Page Title} - {App Name}">
<meta property="og:description" content="{Brief description}">
<meta property="og:type" content="website">
<link rel="icon" href="/assets/icons/gb-logo.svg" type="image/svg+xml">
<title>{Page Title} - {App Name}</title>
<link rel="stylesheet" href="styles.css">
<script src="/js/vendor/htmx.min.js"></script>
<script src="designer.js" defer></script>
</head>
```
**SEO is required even for authenticated apps because:**
- Shared links preview correctly in chat/email
- Browser tabs show meaningful titles
- Bookmarks are descriptive
- Accessibility tools work better
---
## RESPONSE FORMAT
@ -590,4 +603,110 @@ Response:
5. **Use the APIs** - Connect to /api/db/ for data operations
6. **Be complete** - Generate all necessary pages, not just stubs
7. **Match the request** - If user wants pink, make it pink
8. **Tables are optional** - Simple tools don't need database tables
8. **Tables are optional** - Simple tools don't need database tables
9. **SEO required** - All pages MUST have proper meta tags (description, og:title, etc.)
10. **No comments in generated code** - Code must be self-documenting, no HTML/JS/CSS comments
---
## DESIGNER MAGIC BUTTON
The Designer has a "Magic" button that sends the current HTMX code to the LLM with an improvement prompt. It works like a user asking "improve this code" automatically.
**What Magic Button does:**
1. Captures current page HTML/CSS/JS
2. Sends to LLM with prompt: "Analyze and improve this HTMX code. Suggest better structure, accessibility, performance, and UX improvements."
3. LLM responds with refactored code or suggestions
4. User can apply suggestions or dismiss
**Example Magic prompt sent to LLM:**
```
You are reviewing this HTMX application code. Suggest improvements for:
- Better HTMX patterns (reduce JS, use hx-* attributes)
- Accessibility (ARIA labels, keyboard navigation)
- Performance (lazy loading, efficient selectors)
- UX (loading states, error handling, feedback)
- Code organization (semantic HTML, clean CSS)
Current code:
{current_page_html}
Respond with improved code and brief explanation.
```
---
## CUSTOM DOMAIN SUPPORT
Custom domains are configured in the bot's `config.csv` file:
```csv
appname-domain,www.customerdomain.com
```
**Configuration in config.csv:**
```csv
# Bot configuration
bot-name,My Company Bot
appname-domain,app.mycompany.com
```
**How it works:**
1. Bot reads `appname-domain` from config.csv
2. Server routes requests from custom domain to the app
3. SSL auto-provisioned via Let's Encrypt
**DNS Requirements:**
- CNAME record: `app.mycompany.com``{bot-id}.generalbots.app`
- Or A record pointing to server IP
---
## ZERO COMMENTS POLICY
**DO NOT generate any comments in code.**
```html
<!-- ❌ WRONG - no HTML comments -->
<div class="container">
<!-- User info section -->
<div class="user-info">...</div>
</div>
<!-- ✅ CORRECT - self-documenting structure -->
<div class="container">
<div class="user-info">...</div>
</div>
```
```css
/* ❌ WRONG - no CSS comments */
.button {
/* Primary action style */
background: blue;
}
/* ✅ CORRECT - clear naming */
.button-primary {
background: blue;
}
```
```javascript
// ❌ WRONG - no JS comments
function save() {
// Save to database
htmx.ajax('POST', '/api/db/items', {...});
}
// ✅ CORRECT - descriptive function name
function saveItemToDatabase() {
htmx.ajax('POST', '/api/db/items', {...});
}
```
**Why no comments:**
- Comments become stale when code changes
- Good naming is better than comments
- LLMs can infer intent from well-structured code
- Reduces generated file size

File diff suppressed because it is too large Load diff

613
src/auto_task/app_logs.rs Normal file
View file

@ -0,0 +1,613 @@
use chrono::{DateTime, Duration, Utc};
use log::{debug, error, info, warn};
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, VecDeque};
use std::fmt::Write;
use std::sync::{Arc, RwLock};
use uuid::Uuid;
const MAX_LOGS_PER_APP: usize = 500;
const MAX_LOGS_FOR_DESIGNER: usize = 50;
const LOG_RETENTION_DAYS: i64 = 1;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AppLogEntry {
pub id: String,
pub timestamp: DateTime<Utc>,
pub level: LogLevel,
pub source: LogSource,
pub app_name: String,
pub bot_id: Option<Uuid>,
pub user_id: Option<Uuid>,
pub message: String,
pub details: Option<String>,
pub file_path: Option<String>,
pub line_number: Option<u32>,
pub stack_trace: Option<String>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum LogLevel {
Debug,
Info,
Warn,
Error,
Critical,
}
impl std::fmt::Display for LogLevel {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Debug => write!(f, "debug"),
Self::Info => write!(f, "info"),
Self::Warn => write!(f, "warn"),
Self::Error => write!(f, "error"),
Self::Critical => write!(f, "critical"),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum LogSource {
Server,
Client,
Generator,
Designer,
Validation,
Runtime,
}
impl std::fmt::Display for LogSource {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Server => write!(f, "server"),
Self::Client => write!(f, "client"),
Self::Generator => write!(f, "generator"),
Self::Designer => write!(f, "designer"),
Self::Validation => write!(f, "validation"),
Self::Runtime => write!(f, "runtime"),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ClientLogRequest {
pub app_name: String,
pub level: String,
pub message: String,
pub details: Option<String>,
pub file_path: Option<String>,
pub line_number: Option<u32>,
pub stack_trace: Option<String>,
pub user_agent: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LogQueryParams {
pub app_name: Option<String>,
pub level: Option<String>,
pub source: Option<String>,
pub limit: Option<usize>,
pub since: Option<DateTime<Utc>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LogStats {
pub total_logs: usize,
pub errors: usize,
pub warnings: usize,
pub by_app: HashMap<String, usize>,
}
pub struct AppLogStore {
logs: RwLock<HashMap<String, VecDeque<AppLogEntry>>>,
global_logs: RwLock<VecDeque<AppLogEntry>>,
}
impl AppLogStore {
pub fn new() -> Self {
Self {
logs: RwLock::new(HashMap::new()),
global_logs: RwLock::new(VecDeque::with_capacity(MAX_LOGS_PER_APP)),
}
}
pub fn log(
&self,
app_name: &str,
level: LogLevel,
source: LogSource,
message: &str,
details: Option<String>,
bot_id: Option<Uuid>,
user_id: Option<Uuid>,
) {
let entry = AppLogEntry {
id: Uuid::new_v4().to_string(),
timestamp: Utc::now(),
level,
source,
app_name: app_name.to_string(),
bot_id,
user_id,
message: message.to_string(),
details,
file_path: None,
line_number: None,
stack_trace: None,
};
self.add_entry(entry);
match level {
LogLevel::Debug => debug!("[{}] {}: {}", app_name, source, message),
LogLevel::Info => info!("[{}] {}: {}", app_name, source, message),
LogLevel::Warn => warn!("[{}] {}: {}", app_name, source, message),
LogLevel::Error | LogLevel::Critical => {
error!("[{}] {}: {}", app_name, source, message);
}
}
}
pub fn log_error(
&self,
app_name: &str,
source: LogSource,
message: &str,
error: &str,
file_path: Option<&str>,
line_number: Option<u32>,
stack_trace: Option<&str>,
) {
let entry = AppLogEntry {
id: Uuid::new_v4().to_string(),
timestamp: Utc::now(),
level: LogLevel::Error,
source,
app_name: app_name.to_string(),
bot_id: None,
user_id: None,
message: message.to_string(),
details: Some(error.to_string()),
file_path: file_path.map(String::from),
line_number,
stack_trace: stack_trace.map(String::from),
};
self.add_entry(entry);
error!(
"[{}] {}: {} - {} ({}:{})",
app_name,
source,
message,
error,
file_path.unwrap_or("unknown"),
line_number.unwrap_or(0)
);
}
pub fn log_client(
&self,
request: ClientLogRequest,
bot_id: Option<Uuid>,
user_id: Option<Uuid>,
) {
let level = match request.level.to_lowercase().as_str() {
"debug" => LogLevel::Debug,
"warn" | "warning" => LogLevel::Warn,
"error" => LogLevel::Error,
"critical" => LogLevel::Critical,
_ => LogLevel::Info,
};
let entry = AppLogEntry {
id: Uuid::new_v4().to_string(),
timestamp: Utc::now(),
level,
source: LogSource::Client,
app_name: request.app_name,
bot_id,
user_id,
message: request.message,
details: request.details,
file_path: request.file_path,
line_number: request.line_number,
stack_trace: request.stack_trace,
};
self.add_entry(entry);
}
fn add_entry(&self, entry: AppLogEntry) {
if let Ok(mut logs) = self.logs.write() {
let app_logs = logs
.entry(entry.app_name.clone())
.or_insert_with(|| VecDeque::with_capacity(MAX_LOGS_PER_APP));
if app_logs.len() >= MAX_LOGS_PER_APP {
app_logs.pop_front();
}
app_logs.push_back(entry.clone());
}
if let Ok(mut global) = self.global_logs.write() {
if global.len() >= MAX_LOGS_PER_APP {
global.pop_front();
}
global.push_back(entry);
}
}
pub fn get_logs(&self, params: &LogQueryParams) -> Vec<AppLogEntry> {
let limit = params.limit.unwrap_or(100).min(500);
let cutoff = params
.since
.unwrap_or_else(|| Utc::now() - Duration::days(LOG_RETENTION_DAYS));
let level_filter: Option<LogLevel> =
params
.level
.as_ref()
.and_then(|l| match l.to_lowercase().as_str() {
"debug" => Some(LogLevel::Debug),
"info" => Some(LogLevel::Info),
"warn" => Some(LogLevel::Warn),
"error" => Some(LogLevel::Error),
"critical" => Some(LogLevel::Critical),
_ => None,
});
let source_filter: Option<LogSource> =
params
.source
.as_ref()
.and_then(|s| match s.to_lowercase().as_str() {
"server" => Some(LogSource::Server),
"client" => Some(LogSource::Client),
"generator" => Some(LogSource::Generator),
"designer" => Some(LogSource::Designer),
"validation" => Some(LogSource::Validation),
"runtime" => Some(LogSource::Runtime),
_ => None,
});
if let Some(ref app_name) = params.app_name {
if let Ok(logs) = self.logs.read() {
if let Some(app_logs) = logs.get(app_name) {
return app_logs
.iter()
.rev()
.filter(|e| e.timestamp >= cutoff)
.filter(|e| level_filter.is_none_or(|l| e.level == l))
.filter(|e| source_filter.is_none_or(|s| e.source == s))
.take(limit)
.cloned()
.collect();
}
}
return Vec::new();
}
if let Ok(global) = self.global_logs.read() {
return global
.iter()
.rev()
.filter(|e| e.timestamp >= cutoff)
.filter(|e| level_filter.is_none_or(|l| e.level == l))
.filter(|e| source_filter.is_none_or(|s| e.source == s))
.take(limit)
.cloned()
.collect();
}
Vec::new()
}
pub fn get_errors_for_designer(&self, app_name: &str) -> Vec<AppLogEntry> {
if let Ok(logs) = self.logs.read() {
if let Some(app_logs) = logs.get(app_name) {
let cutoff = Utc::now() - Duration::hours(1);
return app_logs
.iter()
.rev()
.filter(|e| e.timestamp >= cutoff)
.filter(|e| {
matches!(
e.level,
LogLevel::Error | LogLevel::Critical | LogLevel::Warn
)
})
.take(MAX_LOGS_FOR_DESIGNER)
.cloned()
.collect();
}
}
Vec::new()
}
pub fn format_errors_for_prompt(&self, app_name: &str) -> Option<String> {
let errors = self.get_errors_for_designer(app_name);
if errors.is_empty() {
return None;
}
let mut output = String::new();
output.push_str("\n\n=== RECENT ERRORS AND WARNINGS ===\n");
output.push_str("The following issues were detected. Please fix them:\n\n");
for (idx, entry) in errors.iter().enumerate() {
let _ = writeln!(
output,
"{}. [{}] [{}] {}",
idx + 1,
entry.level,
entry.source,
entry.message
);
if let Some(ref details) = entry.details {
let _ = writeln!(output, " Details: {details}");
}
if let Some(ref file) = entry.file_path {
let _ = writeln!(
output,
" Location: {}:{}",
file,
entry.line_number.unwrap_or(0)
);
}
if let Some(ref stack) = entry.stack_trace {
let short_stack: String = stack.lines().take(3).collect::<Vec<_>>().join("\n ");
let _ = writeln!(output, " Stack: {short_stack}");
}
output.push('\n');
}
output.push_str("=== END OF ERRORS ===\n");
Some(output)
}
pub fn get_stats(&self) -> LogStats {
let mut stats = LogStats {
total_logs: 0,
errors: 0,
warnings: 0,
by_app: HashMap::new(),
};
if let Ok(logs) = self.logs.read() {
for (app_name, app_logs) in logs.iter() {
let count = app_logs.len();
stats.total_logs += count;
stats.by_app.insert(app_name.clone(), count);
for entry in app_logs {
match entry.level {
LogLevel::Error | LogLevel::Critical => stats.errors += 1,
LogLevel::Warn => stats.warnings += 1,
_ => {}
}
}
}
}
stats
}
pub fn cleanup_old_logs(&self) {
let cutoff = Utc::now() - Duration::days(LOG_RETENTION_DAYS);
if let Ok(mut logs) = self.logs.write() {
for app_logs in logs.values_mut() {
while let Some(front) = app_logs.front() {
if front.timestamp < cutoff {
app_logs.pop_front();
} else {
break;
}
}
}
logs.retain(|_, v| !v.is_empty());
}
if let Ok(mut global) = self.global_logs.write() {
while let Some(front) = global.front() {
if front.timestamp < cutoff {
global.pop_front();
} else {
break;
}
}
}
info!("Log cleanup completed");
}
pub fn clear_app_logs(&self, app_name: &str) {
if let Ok(mut logs) = self.logs.write() {
logs.remove(app_name);
}
info!("Cleared logs for app: {}", app_name);
}
}
impl Default for AppLogStore {
fn default() -> Self {
Self::new()
}
}
pub static APP_LOGS: Lazy<Arc<AppLogStore>> = Lazy::new(|| Arc::new(AppLogStore::new()));
pub fn log_generator_info(app_name: &str, message: &str) {
APP_LOGS.log(
app_name,
LogLevel::Info,
LogSource::Generator,
message,
None,
None,
None,
);
}
pub fn log_generator_error(app_name: &str, message: &str, error: &str) {
APP_LOGS.log_error(
app_name,
LogSource::Generator,
message,
error,
None,
None,
None,
);
}
pub fn log_validation_error(
app_name: &str,
message: &str,
file_path: Option<&str>,
line_number: Option<u32>,
) {
APP_LOGS.log_error(
app_name,
LogSource::Validation,
message,
"Validation failed",
file_path,
line_number,
None,
);
}
pub fn log_runtime_error(app_name: &str, message: &str, error: &str, stack_trace: Option<&str>) {
APP_LOGS.log_error(
app_name,
LogSource::Runtime,
message,
error,
None,
None,
stack_trace,
);
}
pub fn get_designer_error_context(app_name: &str) -> Option<String> {
APP_LOGS.format_errors_for_prompt(app_name)
}
pub fn start_log_cleanup_scheduler() {
std::thread::spawn(|| loop {
std::thread::sleep(std::time::Duration::from_secs(3600));
APP_LOGS.cleanup_old_logs();
});
info!("Log cleanup scheduler started (runs hourly)");
}
pub fn generate_client_logger_js() -> &'static str {
r"
(function() {
const APP_NAME = document.body.dataset.appName || window.location.pathname.split('/')[1] || 'unknown';
const LOG_ENDPOINT = '/api/app-logs/client';
const LOG_BUFFER = [];
const FLUSH_INTERVAL = 5000;
const MAX_BUFFER_SIZE = 50;
function sendLogs() {
if (LOG_BUFFER.length === 0) return;
const logs = LOG_BUFFER.splice(0, LOG_BUFFER.length);
fetch(LOG_ENDPOINT, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ logs: logs })
}).catch(function(e) {
console.warn('Failed to send logs:', e);
});
}
function addLog(level, message, details) {
const entry = {
app_name: APP_NAME,
level: level,
message: message,
details: details || null,
file_path: null,
line_number: null,
stack_trace: null,
user_agent: navigator.userAgent
};
LOG_BUFFER.push(entry);
if (LOG_BUFFER.length >= MAX_BUFFER_SIZE) {
sendLogs();
}
}
window.onerror = function(message, source, lineno, colno, error) {
addLog('error', message, JSON.stringify({
source: source,
line: lineno,
column: colno,
stack: error ? error.stack : null
}));
return false;
};
window.onunhandledrejection = function(event) {
addLog('error', 'Unhandled Promise Rejection: ' + event.reason,
event.reason && event.reason.stack ? event.reason.stack : null);
};
const originalConsoleError = console.error;
console.error = function() {
addLog('error', Array.from(arguments).join(' '));
originalConsoleError.apply(console, arguments);
};
const originalConsoleWarn = console.warn;
console.warn = function() {
addLog('warn', Array.from(arguments).join(' '));
originalConsoleWarn.apply(console, arguments);
};
document.body.addEventListener('htmx:responseError', function(evt) {
addLog('error', 'HTMX Request Failed', JSON.stringify({
url: evt.detail.xhr.responseURL,
status: evt.detail.xhr.status,
response: evt.detail.xhr.responseText.substring(0, 500)
}));
});
document.body.addEventListener('htmx:sendError', function(evt) {
addLog('error', 'HTMX Send Error', JSON.stringify({
url: evt.detail.requestConfig.path
}));
});
setInterval(sendLogs, FLUSH_INTERVAL);
window.addEventListener('beforeunload', sendLogs);
window.AppLogger = {
debug: function(msg, details) { addLog('debug', msg, details); },
info: function(msg, details) { addLog('info', msg, details); },
warn: function(msg, details) { addLog('warn', msg, details); },
error: function(msg, details) { addLog('error', msg, details); },
flush: sendLogs
};
console.log('[AppLogger] Initialized for app:', APP_NAME);
})();
"
}

View file

@ -57,13 +57,11 @@ pub struct IntentResultResponse {
pub next_steps: Vec<String>,
}
/// Request for one-click create and execute
#[derive(Debug, Deserialize)]
pub struct CreateAndExecuteRequest {
pub intent: String,
}
/// Response for create and execute - simple status updates
#[derive(Debug, Serialize)]
pub struct CreateAndExecuteResponse {
pub success: bool,
@ -289,8 +287,6 @@ pub struct RecommendationResponse {
pub action: Option<String>,
}
/// Create and execute in one call - no dialogs, just do it
/// POST /api/autotask/create
pub async fn create_and_execute_handler(
State(state): State<Arc<AppState>>,
Json(request): Json<CreateAndExecuteRequest>,
@ -388,8 +384,6 @@ pub async fn create_and_execute_handler(
}
}
/// Classify and optionally process an intent
/// POST /api/autotask/classify
pub async fn classify_intent_handler(
State(state): State<Arc<AppState>>,
Json(request): Json<ClassifyIntentRequest>,
@ -1555,7 +1549,6 @@ fn update_task_status(
Ok(())
}
/// Create task record in database
fn create_task_record(
state: &Arc<AppState>,
task_id: Uuid,
@ -1584,7 +1577,6 @@ fn create_task_record(
Ok(())
}
/// Update task status in database
fn update_task_status_db(
state: &Arc<AppState>,
task_id: Uuid,
@ -1620,7 +1612,6 @@ fn update_task_status_db(
Ok(())
}
/// Get pending items (ASK LATER) for a bot
fn get_pending_items_for_bot(state: &Arc<AppState>, bot_id: Uuid) -> Vec<PendingItemResponse> {
let mut conn = match state.conn.get() {
Ok(c) => c,
@ -1753,8 +1744,6 @@ fn html_escape(s: &str) -> String {
// MISSING ENDPOINTS - Required by botui/autotask.js
// =============================================================================
/// Execute a specific task by ID
/// POST /api/autotask/:task_id/execute
pub async fn execute_task_handler(
State(state): State<Arc<AppState>>,
Path(task_id): Path<String>,
@ -1785,8 +1774,6 @@ pub async fn execute_task_handler(
}
}
/// Get execution logs for a task
/// GET /api/autotask/:task_id/logs
pub async fn get_task_logs_handler(
State(state): State<Arc<AppState>>,
Path(task_id): Path<String>,
@ -1805,8 +1792,6 @@ pub async fn get_task_logs_handler(
.into_response()
}
/// Apply a recommendation from simulation results
/// POST /api/autotask/recommendations/:rec_id/apply
pub async fn apply_recommendation_handler(
State(state): State<Arc<AppState>>,
Path(rec_id): Path<String>,

View file

@ -1,21 +1,3 @@
//! Designer AI Assistant
//!
//! An AI-powered assistant that modifies applications through natural conversation.
//! Based on Chapter 17 - Designer documentation.
//!
//! Designer understands context:
//! - Current app being viewed
//! - Current page/file active
//! - Available tables and their schemas
//! - Existing tools and schedulers
//!
//! Designer can modify:
//! - Styles (colors, layout, fonts)
//! - HTML pages (forms, lists, buttons)
//! - Database (add fields, create tables)
//! - Tools (voice/chat commands)
//! - Schedulers (automated tasks)
use crate::shared::models::UserSession;
use crate::shared::state::AppState;
use chrono::{DateTime, Utc};
@ -28,23 +10,15 @@ use serde::{Deserialize, Serialize};
use std::sync::Arc;
use uuid::Uuid;
/// Types of modifications Designer can make
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum ModificationType {
/// Modify CSS styles
Style,
/// Modify HTML structure
Html,
/// Add/modify database fields or tables
Database,
/// Create/modify voice/chat commands
Tool,
/// Create/modify scheduled automations
Scheduler,
/// Multiple modifications
Multiple,
/// Unknown modification type
Unknown,
}
@ -62,28 +36,18 @@ impl std::fmt::Display for ModificationType {
}
}
/// Context about what the user is currently viewing/editing
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct DesignerContext {
/// Current app name
pub current_app: Option<String>,
/// Current page/file being viewed
pub current_page: Option<String>,
/// Current element selected (if any)
pub current_element: Option<String>,
/// Available tables in this bot's database
pub available_tables: Vec<TableInfo>,
/// Available tools
pub available_tools: Vec<String>,
/// Available schedulers
pub available_schedulers: Vec<String>,
/// Recent changes for undo support
pub recent_changes: Vec<ChangeRecord>,
/// Conversation history for context
pub conversation_history: Vec<ConversationTurn>,
}
/// Summary info about a table
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TableInfo {
pub name: String,
@ -91,7 +55,6 @@ pub struct TableInfo {
pub record_count: Option<i64>,
}
/// Record of a change for undo support
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChangeRecord {
pub id: String,
@ -104,7 +67,6 @@ pub struct ChangeRecord {
pub can_undo: bool,
}
/// A turn in the conversation
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConversationTurn {
pub role: String, // "user" or "assistant"
@ -112,14 +74,12 @@ pub struct ConversationTurn {
pub timestamp: DateTime<Utc>,
}
/// Request to modify something
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModificationRequest {
pub instruction: String,
pub context: DesignerContext,
}
/// Result of a modification
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModificationResult {
pub success: bool,
@ -134,7 +94,6 @@ pub struct ModificationResult {
pub error: Option<String>,
}
/// A single file change
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileChange {
pub file_path: String,
@ -144,7 +103,6 @@ pub struct FileChange {
pub line_number: Option<i32>,
}
/// Analyzed modification from LLM
#[derive(Debug, Clone, Serialize, Deserialize)]
struct AnalyzedModification {
modification_type: ModificationType,
@ -163,7 +121,6 @@ struct CodeChange {
context: Option<String>,
}
/// The Designer AI Assistant
pub struct DesignerAI {
state: Arc<AppState>,
}
@ -173,7 +130,6 @@ impl DesignerAI {
Self { state }
}
/// Process a modification request
pub async fn process_request(
&self,
request: &ModificationRequest,
@ -221,7 +177,6 @@ impl DesignerAI {
self.apply_modification(&analysis, session).await
}
/// Apply a confirmed modification
pub async fn apply_confirmed_modification(
&self,
change_id: &str,
@ -247,7 +202,6 @@ impl DesignerAI {
}
}
/// Undo a previous change
pub async fn undo_change(
&self,
change_id: &str,
@ -311,7 +265,6 @@ impl DesignerAI {
}
}
/// Get current context for the designer
pub async fn get_context(
&self,
session: &UserSession,
@ -335,11 +288,6 @@ impl DesignerAI {
})
}
// =========================================================================
// ANALYSIS
// =========================================================================
/// Analyze what modification the user wants
async fn analyze_modification(
&self,
instruction: &str,
@ -390,7 +338,6 @@ Respond ONLY with valid JSON."#
self.parse_analysis_response(&response, instruction)
}
/// Parse LLM analysis response
fn parse_analysis_response(
&self,
response: &str,
@ -452,7 +399,6 @@ Respond ONLY with valid JSON."#
}
}
/// Fallback heuristic analysis
fn analyze_modification_heuristic(
&self,
instruction: &str,
@ -515,11 +461,6 @@ Respond ONLY with valid JSON."#
})
}
// =========================================================================
// MODIFICATION APPLICATION
// =========================================================================
/// Apply analyzed modification
async fn apply_modification(
&self,
analysis: &AnalyzedModification,
@ -611,7 +552,6 @@ Respond ONLY with valid JSON."#
})
}
/// Apply CSS style changes
async fn apply_style_changes(
&self,
original: &str,
@ -649,7 +589,6 @@ Respond ONLY with valid JSON."#
Ok(content)
}
/// Apply HTML changes
async fn apply_html_changes(
&self,
original: &str,
@ -688,7 +627,6 @@ Respond ONLY with valid JSON."#
Ok(content)
}
/// Apply database schema changes
async fn apply_database_changes(
&self,
original: &str,
@ -728,7 +666,6 @@ Respond ONLY with valid JSON."#
Ok(content)
}
/// Generate a tool file
async fn generate_tool_file(
&self,
changes: &[CodeChange],
@ -750,7 +687,6 @@ Respond ONLY with valid JSON."#
Ok(content)
}
/// Generate a scheduler file
async fn generate_scheduler_file(
&self,
changes: &[CodeChange],
@ -772,7 +708,6 @@ Respond ONLY with valid JSON."#
Ok(content)
}
/// Handle multiple changes
async fn apply_multiple_changes(
&self,
_analysis: &AnalyzedModification,
@ -783,7 +718,6 @@ Respond ONLY with valid JSON."#
Ok("Multiple changes applied".to_string())
}
/// Generate preview of changes
fn generate_preview(&self, analysis: &AnalyzedModification) -> String {
let mut preview = String::new();
preview.push_str(&format!("File: {}\n\nChanges:\n", analysis.target_file));
@ -806,11 +740,6 @@ Respond ONLY with valid JSON."#
preview
}
// =========================================================================
// CONTEXT HELPERS
// =========================================================================
/// Get available tables for the bot
fn get_available_tables(
&self,
_session: &UserSession,
@ -843,7 +772,6 @@ Respond ONLY with valid JSON."#
.collect())
}
/// Get available tools
fn get_available_tools(
&self,
session: &UserSession,
@ -865,7 +793,6 @@ Respond ONLY with valid JSON."#
Ok(tools)
}
/// Get available schedulers
fn get_available_schedulers(
&self,
session: &UserSession,
@ -887,7 +814,6 @@ Respond ONLY with valid JSON."#
Ok(schedulers)
}
/// Get recent changes for undo
fn get_recent_changes(
&self,
session: &UserSession,
@ -947,11 +873,6 @@ Respond ONLY with valid JSON."#
.collect())
}
// =========================================================================
// FILE OPERATIONS
// =========================================================================
/// Get site path from config
fn get_site_path(&self) -> String {
self.state
.config
@ -960,7 +881,6 @@ Respond ONLY with valid JSON."#
.unwrap_or_else(|| "./botserver-stack/sites".to_string())
}
/// Read a file from the bot's directory
fn read_file(
&self,
bot_id: Uuid,
@ -978,7 +898,6 @@ Respond ONLY with valid JSON."#
}
}
/// Write a file to the bot's directory
fn write_file(
&self,
bot_id: Uuid,
@ -1001,7 +920,6 @@ Respond ONLY with valid JSON."#
Ok(())
}
/// Sync schema changes to database
fn sync_schema_changes(
&self,
_session: &UserSession,
@ -1012,11 +930,6 @@ Respond ONLY with valid JSON."#
Ok(())
}
// =========================================================================
// CHANGE RECORD MANAGEMENT
// =========================================================================
/// Store a change record for undo
fn store_change_record(
&self,
record: &ChangeRecord,
@ -1043,7 +956,6 @@ Respond ONLY with valid JSON."#
Ok(())
}
/// Get a change record by ID
fn get_change_record(
&self,
change_id: &str,
@ -1098,7 +1010,6 @@ Respond ONLY with valid JSON."#
}))
}
/// Remove a change record (after undo)
fn remove_change_record(
&self,
change_id: &str,
@ -1114,7 +1025,6 @@ Respond ONLY with valid JSON."#
Ok(())
}
/// Get pending change (for confirmation flow)
fn get_pending_change(
&self,
change_id: &str,
@ -1146,11 +1056,6 @@ Respond ONLY with valid JSON."#
}
}
// =========================================================================
// LLM INTEGRATION
// =========================================================================
/// Call LLM for analysis
async fn call_llm(
&self,
prompt: &str,

View file

@ -1,17 +1,3 @@
//! Intent Classifier for AutoTask System
//!
//! Classifies user intents and routes them to appropriate handlers.
//! Based on Chapter 17 - Autonomous Tasks documentation.
//!
//! Intent Types:
//! - APP_CREATE: "create app for clinic" → HTMX pages, tools, schedulers
//! - TODO: "call John tomorrow" → Task saved to tasks table
//! - MONITOR: "alert when IBM changes" → ON CHANGE event handler
//! - ACTION: "email all customers" → Executes immediately
//! - SCHEDULE: "daily 9am summary" → SET SCHEDULE automation
//! - GOAL: "increase sales 20%" → Autonomous LLM loop with metrics
//! - TOOL: "when I say X, do Y" → Voice/chat command
use crate::auto_task::app_generator::AppGenerator;
use crate::auto_task::intent_compiler::IntentCompiler;
use crate::shared::models::UserSession;
@ -25,25 +11,16 @@ use serde::{Deserialize, Serialize};
use std::sync::Arc;
use uuid::Uuid;
/// The seven intent types supported by the AutoTask system
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum IntentType {
/// Create a full application with HTMX pages, tables, tools, schedulers
AppCreate,
/// Simple task/reminder saved to tasks table
Todo,
/// Monitor for changes with ON CHANGE event handler
Monitor,
/// Execute an action immediately
Action,
/// Create a scheduled automation with SET SCHEDULE
Schedule,
/// Long-running goal with autonomous LLM loop
Goal,
/// Create a voice/chat command trigger
Tool,
/// Unknown or ambiguous intent requiring clarification
Unknown,
}
@ -77,7 +54,6 @@ impl From<&str> for IntentType {
}
}
/// Result of intent classification
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ClassifiedIntent {
pub id: String,
@ -92,32 +68,20 @@ pub struct ClassifiedIntent {
pub classified_at: DateTime<Utc>,
}
/// Extracted entities from the intent
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ClassifiedEntities {
/// Main subject (e.g., "clinic", "customers", "IBM stock")
pub subject: Option<String>,
/// Target action verb
pub action: Option<String>,
/// Domain/industry context
pub domain: Option<String>,
/// Time-related information
pub time_spec: Option<TimeSpec>,
/// Condition for triggers
pub condition: Option<String>,
/// Recipient for notifications
pub recipient: Option<String>,
/// List of features requested
pub features: Vec<String>,
/// Tables/entities mentioned
pub tables: Vec<String>,
/// Specific trigger phrases for TOOL type
pub trigger_phrases: Vec<String>,
/// Metric/goal value for GOAL type
pub target_value: Option<String>,
}
/// Time specification for scheduled tasks
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TimeSpec {
pub schedule_type: ScheduleType,
@ -137,7 +101,6 @@ pub enum ScheduleType {
Cron,
}
/// Alternative classification with lower confidence
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AlternativeClassification {
pub intent_type: IntentType,
@ -145,7 +108,6 @@ pub struct AlternativeClassification {
pub reason: String,
}
/// Result of processing an intent through the appropriate handler
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct IntentResult {
pub success: bool,
@ -167,7 +129,6 @@ pub struct CreatedResource {
pub path: Option<String>,
}
/// Main intent classifier and router
pub struct IntentClassifier {
state: Arc<AppState>,
intent_compiler: IntentCompiler,
@ -528,20 +489,18 @@ Respond with JSON only:
});
}
// Track created pages
for page in &app.pages {
resources.push(CreatedResource {
resource_type: "page".to_string(),
name: page.title.clone(),
name: page.filename.clone(),
path: Some(page.filename.clone()),
});
}
// Track created tools
for tool in &app.tools {
resources.push(CreatedResource {
resource_type: "tool".to_string(),
name: tool.name.clone(),
name: tool.filename.clone(),
path: Some(tool.filename.clone()),
});
}

View file

@ -1,4 +1,5 @@
pub mod app_generator;
pub mod app_logs;
pub mod ask_later;
pub mod auto_task;
pub mod autotask_api;
@ -8,9 +9,14 @@ pub mod intent_compiler;
pub mod safety_layer;
pub use app_generator::{
AppGenerator, AppStructure, GeneratedApp, GeneratedPage, GeneratedScript, PageType, ScriptType,
AppGenerator, AppStructure, FileType, GeneratedApp, GeneratedFile, GeneratedPage, PageType,
SyncResult,
};
pub use app_logs::{
generate_client_logger_js, get_designer_error_context, log_generator_error, log_generator_info,
log_runtime_error, log_validation_error, start_log_cleanup_scheduler, AppLogEntry, AppLogStore,
ClientLogRequest, LogLevel, LogQueryParams, LogSource, LogStats, APP_LOGS,
};
pub use ask_later::{ask_later_keyword, PendingInfoItem};
pub use auto_task::{AutoTask, AutoTaskStatus, ExecutionMode, TaskPriority};
pub use autotask_api::{
@ -26,52 +32,116 @@ pub use intent_classifier::{ClassifiedIntent, IntentClassifier, IntentType};
pub use intent_compiler::{CompiledIntent, IntentCompiler};
pub use safety_layer::{AuditEntry, ConstraintCheckResult, SafetyLayer, SimulationResult};
use crate::core::urls::ApiUrls;
pub fn configure_autotask_routes() -> axum::Router<std::sync::Arc<crate::shared::state::AppState>> {
use axum::routing::{get, post};
axum::Router::new()
.route("/api/autotask/create", post(create_and_execute_handler))
.route("/api/autotask/classify", post(classify_intent_handler))
.route("/api/autotask/compile", post(compile_intent_handler))
.route("/api/autotask/execute", post(execute_plan_handler))
.route(ApiUrls::AUTOTASK_CREATE, post(create_and_execute_handler))
.route(ApiUrls::AUTOTASK_CLASSIFY, post(classify_intent_handler))
.route(ApiUrls::AUTOTASK_COMPILE, post(compile_intent_handler))
.route(ApiUrls::AUTOTASK_EXECUTE, post(execute_plan_handler))
.route(
"/api/autotask/simulate/:plan_id",
&ApiUrls::AUTOTASK_SIMULATE.replace(":plan_id", "{plan_id}"),
post(simulate_plan_handler),
)
.route("/api/autotask/list", get(list_tasks_handler))
.route("/api/autotask/stats", get(get_stats_handler))
.route("/api/autotask/:task_id/pause", post(pause_task_handler))
.route("/api/autotask/:task_id/resume", post(resume_task_handler))
.route("/api/autotask/:task_id/cancel", post(cancel_task_handler))
.route(ApiUrls::AUTOTASK_LIST, get(list_tasks_handler))
.route(ApiUrls::AUTOTASK_STATS, get(get_stats_handler))
.route(
"/api/autotask/:task_id/simulate",
&ApiUrls::AUTOTASK_PAUSE.replace(":task_id", "{task_id}"),
post(pause_task_handler),
)
.route(
&ApiUrls::AUTOTASK_RESUME.replace(":task_id", "{task_id}"),
post(resume_task_handler),
)
.route(
&ApiUrls::AUTOTASK_CANCEL.replace(":task_id", "{task_id}"),
post(cancel_task_handler),
)
.route(
&ApiUrls::AUTOTASK_TASK_SIMULATE.replace(":task_id", "{task_id}"),
post(simulate_task_handler),
)
.route(
"/api/autotask/:task_id/decisions",
&ApiUrls::AUTOTASK_DECISIONS.replace(":task_id", "{task_id}"),
get(get_decisions_handler),
)
.route(
"/api/autotask/:task_id/decide",
&ApiUrls::AUTOTASK_DECIDE.replace(":task_id", "{task_id}"),
post(submit_decision_handler),
)
.route(
"/api/autotask/:task_id/approvals",
&ApiUrls::AUTOTASK_APPROVALS.replace(":task_id", "{task_id}"),
get(get_approvals_handler),
)
.route(
"/api/autotask/:task_id/approve",
&ApiUrls::AUTOTASK_APPROVE.replace(":task_id", "{task_id}"),
post(submit_approval_handler),
)
.route("/api/autotask/:task_id/execute", post(execute_task_handler))
.route("/api/autotask/:task_id/logs", get(get_task_logs_handler))
.route(
"/api/autotask/recommendations/:rec_id/apply",
&ApiUrls::AUTOTASK_TASK_EXECUTE.replace(":task_id", "{task_id}"),
post(execute_task_handler),
)
.route(
&ApiUrls::AUTOTASK_LOGS.replace(":task_id", "{task_id}"),
get(get_task_logs_handler),
)
.route(
&ApiUrls::AUTOTASK_RECOMMENDATIONS_APPLY.replace(":rec_id", "{rec_id}"),
post(apply_recommendation_handler),
)
.route("/api/autotask/pending", get(get_pending_items_handler))
.route(ApiUrls::AUTOTASK_PENDING, get(get_pending_items_handler))
.route(
"/api/autotask/pending/:item_id",
&ApiUrls::AUTOTASK_PENDING_ITEM.replace(":item_id", "{item_id}"),
post(submit_pending_item_handler),
)
.route("/api/app-logs/client", post(handle_client_logs))
.route("/api/app-logs/list", get(handle_list_logs))
.route("/api/app-logs/stats", get(handle_log_stats))
.route("/api/app-logs/clear/{app_name}", post(handle_clear_logs))
.route("/api/app-logs/logger.js", get(handle_logger_js))
}
async fn handle_client_logs(
axum::Json(payload): axum::Json<ClientLogsPayload>,
) -> impl axum::response::IntoResponse {
for log in payload.logs {
APP_LOGS.log_client(log, None, None);
}
axum::Json(serde_json::json!({"success": true}))
}
#[derive(serde::Deserialize)]
struct ClientLogsPayload {
logs: Vec<ClientLogRequest>,
}
async fn handle_list_logs(
axum::extract::Query(params): axum::extract::Query<LogQueryParams>,
) -> impl axum::response::IntoResponse {
let logs = APP_LOGS.get_logs(&params);
axum::Json(logs)
}
async fn handle_log_stats() -> impl axum::response::IntoResponse {
let stats = APP_LOGS.get_stats();
axum::Json(stats)
}
async fn handle_clear_logs(
axum::extract::Path(app_name): axum::extract::Path<String>,
) -> impl axum::response::IntoResponse {
APP_LOGS.clear_app_logs(&app_name);
axum::Json(
serde_json::json!({"success": true, "message": format!("Logs cleared for {}", app_name)}),
)
}
async fn handle_logger_js() -> impl axum::response::IntoResponse {
(
[(axum::http::header::CONTENT_TYPE, "application/javascript")],
generate_client_logger_js(),
)
}

View file

@ -1,18 +1,3 @@
//! App Server Module
//!
//! Serves generated HTMX applications with clean URLs.
//! Apps are synced from drive to SITE_ROOT/{app_name}/ for serving.
//!
//! URL structure:
//! - `/apps/{app_name}/` -> {site_path}/{app_name}/index.html
//! - `/apps/{app_name}/patients.html` -> {site_path}/{app_name}/patients.html
//! - `/apps/{app_name}/styles.css` -> {site_path}/{app_name}/styles.css
//!
//! Flow:
//! 1. AppGenerator writes to S3 drive: {bucket}/.gbdrive/apps/{app_name}/
//! 2. sync_app_to_site_root() copies to: {site_path}/{app_name}/
//! 3. This module serves from: {site_path}/{app_name}/
use crate::shared::state::AppState;
use axum::{
body::Body,
@ -25,7 +10,6 @@ use axum::{
use log::{error, trace, warn};
use std::sync::Arc;
/// Configure routes for serving generated apps
pub fn configure_app_server_routes() -> Router<Arc<AppState>> {
Router::new()
// Serve app files: /apps/{app_name}/* (clean URLs)
@ -36,7 +20,6 @@ pub fn configure_app_server_routes() -> Router<Arc<AppState>> {
.route("/apps", get(list_all_apps))
}
/// Path parameters for app serving
#[derive(Debug, serde::Deserialize)]
pub struct AppPath {
pub app_name: String,
@ -48,7 +31,6 @@ pub struct AppFilePath {
pub file_path: String,
}
/// Serve the index.html for an app
pub async fn serve_app_index(
State(state): State<Arc<AppState>>,
Path(params): Path<AppPath>,
@ -56,7 +38,6 @@ pub async fn serve_app_index(
serve_app_file_internal(&state, &params.app_name, "index.html").await
}
/// Serve any file from an app directory
pub async fn serve_app_file(
State(state): State<Arc<AppState>>,
Path(params): Path<AppFilePath>,
@ -64,7 +45,6 @@ pub async fn serve_app_file(
serve_app_file_internal(&state, &params.app_name, &params.file_path).await
}
/// Internal function to serve files from app directory
async fn serve_app_file_internal(state: &AppState, app_name: &str, file_path: &str) -> Response {
// Sanitize paths to prevent directory traversal
let sanitized_app_name = sanitize_path_component(app_name);
@ -120,7 +100,6 @@ async fn serve_app_file_internal(state: &AppState, app_name: &str, file_path: &s
}
}
/// List all available apps from SITE_ROOT
pub async fn list_all_apps(State(state): State<Arc<AppState>>) -> impl IntoResponse {
let site_path = state
.config
@ -165,7 +144,6 @@ pub async fn list_all_apps(State(state): State<Arc<AppState>>) -> impl IntoRespo
.into_response()
}
/// Sanitize path component to prevent directory traversal
fn sanitize_path_component(component: &str) -> String {
component
.replace("..", "")
@ -177,7 +155,6 @@ fn sanitize_path_component(component: &str) -> String {
.collect()
}
/// Get content type based on file extension
fn get_content_type(file_path: &str) -> &'static str {
let ext = file_path.rsplit('.').next().unwrap_or("").to_lowercase();

View file

@ -1,10 +1,11 @@
use super::table_access::{check_table_access, AccessType, UserRoles};
use crate::shared::models::UserSession;
use crate::shared::state::AppState;
use crate::shared::utils::{json_value_to_dynamic, to_array};
use diesel::prelude::*;
use diesel::sql_query;
use diesel::sql_types::Text;
use log::{error, trace};
use log::{error, trace, warn};
use rhai::{Array, Dynamic, Engine, Map};
use serde_json::{json, Value};
use std::collections::HashMap;
@ -26,8 +27,9 @@ pub fn register_data_operations(state: Arc<AppState>, user: UserSession, engine:
register_group_by_keyword(state, user, engine);
}
pub fn register_save_keyword(state: Arc<AppState>, _user: UserSession, engine: &mut Engine) {
pub fn register_save_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state);
let user_roles = UserRoles::from_user_session(&user);
engine
.register_custom_syntax(
@ -45,6 +47,14 @@ pub fn register_save_keyword(state: Arc<AppState>, _user: UserSession, engine: &
.get()
.map_err(|e| format!("DB error: {}", e))?;
// Check write access
if let Err(e) =
check_table_access(&mut conn, &table, &user_roles, AccessType::Write)
{
warn!("SAVE access denied: {}", e);
return Err(e.into());
}
let result = execute_save(&mut conn, &table, &id, &data)
.map_err(|e| format!("SAVE error: {}", e))?;
@ -54,8 +64,9 @@ pub fn register_save_keyword(state: Arc<AppState>, _user: UserSession, engine: &
.unwrap();
}
pub fn register_insert_keyword(state: Arc<AppState>, _user: UserSession, engine: &mut Engine) {
pub fn register_insert_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state);
let user_roles = UserRoles::from_user_session(&user);
engine
.register_custom_syntax(
@ -72,6 +83,14 @@ pub fn register_insert_keyword(state: Arc<AppState>, _user: UserSession, engine:
.get()
.map_err(|e| format!("DB error: {}", e))?;
// Check write access
if let Err(e) =
check_table_access(&mut conn, &table, &user_roles, AccessType::Write)
{
warn!("INSERT access denied: {}", e);
return Err(e.into());
}
let result = execute_insert(&mut conn, &table, &data)
.map_err(|e| format!("INSERT error: {}", e))?;
@ -81,8 +100,9 @@ pub fn register_insert_keyword(state: Arc<AppState>, _user: UserSession, engine:
.unwrap();
}
pub fn register_update_keyword(state: Arc<AppState>, _user: UserSession, engine: &mut Engine) {
pub fn register_update_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state);
let user_roles = UserRoles::from_user_session(&user);
engine
.register_custom_syntax(
@ -100,6 +120,14 @@ pub fn register_update_keyword(state: Arc<AppState>, _user: UserSession, engine:
.get()
.map_err(|e| format!("DB error: {}", e))?;
// Check write access
if let Err(e) =
check_table_access(&mut conn, &table, &user_roles, AccessType::Write)
{
warn!("UPDATE access denied: {}", e);
return Err(e.into());
}
let result = execute_update(&mut conn, &table, &filter, &data)
.map_err(|e| format!("UPDATE error: {}", e))?;
@ -109,8 +137,9 @@ pub fn register_update_keyword(state: Arc<AppState>, _user: UserSession, engine:
.unwrap();
}
pub fn register_delete_keyword(state: Arc<AppState>, _user: UserSession, engine: &mut Engine) {
pub fn register_delete_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
let state_clone = Arc::clone(&state);
let user_roles = UserRoles::from_user_session(&user);
engine
.register_custom_syntax(
@ -170,6 +199,14 @@ pub fn register_delete_keyword(state: Arc<AppState>, _user: UserSession, engine:
.get()
.map_err(|e| format!("DB error: {}", e))?;
// Check write access (delete requires write permission)
if let Err(e) =
check_table_access(&mut conn, &first_arg, &user_roles, AccessType::Write)
{
warn!("DELETE access denied: {}", e);
return Err(e.into());
}
let result = execute_delete(&mut conn, &first_arg, &second_arg)
.map_err(|e| format!("DELETE error: {}", e))?;

View file

@ -1,19 +1,47 @@
use super::table_access::{
check_field_write_access, check_table_access, filter_fields_by_role, AccessType, UserRoles,
};
use crate::core::shared::state::AppState;
use crate::core::urls::ApiUrls;
use axum::{
extract::{Path, Query, State},
http::StatusCode,
http::{HeaderMap, StatusCode},
response::IntoResponse,
routing::{delete, get, post, put},
Json, Router,
};
use diesel::prelude::*;
use diesel::sql_query;
use log::{error, info};
use log::{error, info, warn};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use std::sync::Arc;
use uuid::Uuid;
fn user_roles_from_headers(headers: &HeaderMap) -> UserRoles {
let roles = headers
.get("X-User-Roles")
.and_then(|v| v.to_str().ok())
.map(|s| {
s.split(';')
.map(|r| r.trim().to_string())
.filter(|r| !r.is_empty())
.collect()
})
.unwrap_or_default();
let user_id = headers
.get("X-User-Id")
.and_then(|v| v.to_str().ok())
.and_then(|s| Uuid::parse_str(s).ok());
if let Some(uid) = user_id {
UserRoles::with_user_id(roles, uid)
} else {
UserRoles::new(roles)
}
}
#[derive(Debug, Deserialize)]
pub struct QueryParams {
pub limit: Option<i32>,
@ -46,13 +74,40 @@ pub struct DeleteResponse {
pub fn configure_db_routes() -> Router<Arc<AppState>> {
Router::new()
.route("/api/db/{table}", get(list_records_handler))
.route("/api/db/{table}", post(create_record_handler))
.route("/api/db/{table}/{id}", get(get_record_handler))
.route("/api/db/{table}/{id}", put(update_record_handler))
.route("/api/db/{table}/{id}", delete(delete_record_handler))
.route("/api/db/{table}/count", get(count_records_handler))
.route("/api/db/{table}/search", post(search_records_handler))
.route(
&ApiUrls::DB_TABLE.replace(":table", "{table}"),
get(list_records_handler),
)
.route(
&ApiUrls::DB_TABLE.replace(":table", "{table}"),
post(create_record_handler),
)
.route(
&ApiUrls::DB_TABLE_RECORD
.replace(":table", "{table}")
.replace(":id", "{id}"),
get(get_record_handler),
)
.route(
&ApiUrls::DB_TABLE_RECORD
.replace(":table", "{table}")
.replace(":id", "{id}"),
put(update_record_handler),
)
.route(
&ApiUrls::DB_TABLE_RECORD
.replace(":table", "{table}")
.replace(":id", "{id}"),
delete(delete_record_handler),
)
.route(
&ApiUrls::DB_TABLE_COUNT.replace(":table", "{table}"),
get(count_records_handler),
)
.route(
&ApiUrls::DB_TABLE_SEARCH.replace(":table", "{table}"),
post(search_records_handler),
)
}
fn sanitize_identifier(name: &str) -> String {
@ -63,10 +118,12 @@ fn sanitize_identifier(name: &str) -> String {
pub async fn list_records_handler(
State(state): State<Arc<AppState>>,
headers: HeaderMap,
Path(table): Path<String>,
Query(params): Query<QueryParams>,
) -> impl IntoResponse {
let table_name = sanitize_identifier(&table);
let user_roles = user_roles_from_headers(&headers);
let limit = params.limit.unwrap_or(20).min(100);
let offset = params.offset.unwrap_or(0);
let order_by = params
@ -95,6 +152,19 @@ pub async fn list_records_handler(
}
};
// Check table-level read access
let access_info =
match check_table_access(&mut conn, &table_name, &user_roles, AccessType::Read) {
Ok(info) => info,
Err(e) => {
warn!(
"Access denied to table {} for user {:?}",
table_name, user_roles.user_id
);
return (StatusCode::FORBIDDEN, Json(json!({ "error": e }))).into_response();
}
};
let query = format!(
"SELECT row_to_json(t.*) as data FROM {} t ORDER BY {} {} LIMIT {} OFFSET {}",
table_name, order_by, order_dir, limit, offset
@ -107,8 +177,14 @@ pub async fn list_records_handler(
match (rows, total) {
(Ok(data), Ok(count_result)) => {
// Filter fields based on user roles
let filtered_data: Vec<Value> = data
.into_iter()
.map(|r| filter_fields_by_role(r.data, &user_roles, &access_info))
.collect();
let response = ListResponse {
data: data.into_iter().map(|r| r.data).collect(),
data: filtered_data,
total: count_result.count,
limit,
offset,
@ -128,9 +204,11 @@ pub async fn list_records_handler(
pub async fn get_record_handler(
State(state): State<Arc<AppState>>,
headers: HeaderMap,
Path((table, id)): Path<(String, String)>,
) -> impl IntoResponse {
let table_name = sanitize_identifier(&table);
let user_roles = user_roles_from_headers(&headers);
let record_id = match Uuid::parse_str(&id) {
Ok(uuid) => uuid,
@ -162,6 +240,23 @@ pub async fn get_record_handler(
}
};
// Check table-level read access
let access_info =
match check_table_access(&mut conn, &table_name, &user_roles, AccessType::Read) {
Ok(info) => info,
Err(e) => {
return (
StatusCode::FORBIDDEN,
Json(RecordResponse {
success: false,
data: None,
message: Some(e),
}),
)
.into_response();
}
};
let query = format!(
"SELECT row_to_json(t.*) as data FROM {} t WHERE id = $1",
table_name
@ -173,15 +268,19 @@ pub async fn get_record_handler(
.optional();
match row {
Ok(Some(r)) => (
StatusCode::OK,
Json(RecordResponse {
success: true,
data: Some(r.data),
message: None,
}),
)
.into_response(),
Ok(Some(r)) => {
// Filter fields based on user roles
let filtered_data = filter_fields_by_role(r.data, &user_roles, &access_info);
(
StatusCode::OK,
Json(RecordResponse {
success: true,
data: Some(filtered_data),
message: None,
}),
)
.into_response()
}
Ok(None) => (
StatusCode::NOT_FOUND,
Json(RecordResponse {
@ -208,10 +307,12 @@ pub async fn get_record_handler(
pub async fn create_record_handler(
State(state): State<Arc<AppState>>,
headers: HeaderMap,
Path(table): Path<String>,
Json(payload): Json<Value>,
) -> impl IntoResponse {
let table_name = sanitize_identifier(&table);
let user_roles = user_roles_from_headers(&headers);
let obj = match payload.as_object() {
Some(o) => o,
@ -255,6 +356,41 @@ pub async fn create_record_handler(
}
};
// Check table-level write access
let access_info =
match check_table_access(&mut conn, &table_name, &user_roles, AccessType::Write) {
Ok(info) => info,
Err(e) => {
return (
StatusCode::FORBIDDEN,
Json(RecordResponse {
success: false,
data: None,
message: Some(e),
}),
)
.into_response();
}
};
// Check field-level write access for fields being inserted
let field_names: Vec<String> = obj
.keys()
.map(|k| sanitize_identifier(k))
.filter(|k| !k.is_empty() && k != "id")
.collect();
if let Err(e) = check_field_write_access(&field_names, &user_roles, &access_info) {
return (
StatusCode::FORBIDDEN,
Json(RecordResponse {
success: false,
data: None,
message: Some(e),
}),
)
.into_response();
}
let query = format!(
"INSERT INTO {} ({}) VALUES ({}) RETURNING row_to_json({}.*)::jsonb as data",
table_name,
@ -295,10 +431,12 @@ pub async fn create_record_handler(
pub async fn update_record_handler(
State(state): State<Arc<AppState>>,
headers: HeaderMap,
Path((table, id)): Path<(String, String)>,
Json(payload): Json<Value>,
) -> impl IntoResponse {
let table_name = sanitize_identifier(&table);
let user_roles = user_roles_from_headers(&headers);
let record_id = match Uuid::parse_str(&id) {
Ok(uuid) => uuid,
@ -369,6 +507,41 @@ pub async fn update_record_handler(
}
};
// Check table-level write access
let access_info =
match check_table_access(&mut conn, &table_name, &user_roles, AccessType::Write) {
Ok(info) => info,
Err(e) => {
return (
StatusCode::FORBIDDEN,
Json(RecordResponse {
success: false,
data: None,
message: Some(e),
}),
)
.into_response();
}
};
// Check field-level write access for fields being updated
let field_names: Vec<String> = obj
.keys()
.map(|k| sanitize_identifier(k))
.filter(|k| !k.is_empty() && k != "id")
.collect();
if let Err(e) = check_field_write_access(&field_names, &user_roles, &access_info) {
return (
StatusCode::FORBIDDEN,
Json(RecordResponse {
success: false,
data: None,
message: Some(e),
}),
)
.into_response();
}
let query = format!(
"UPDATE {} SET {} WHERE id = '{}' RETURNING row_to_json({}.*)::jsonb as data",
table_name,
@ -409,9 +582,11 @@ pub async fn update_record_handler(
pub async fn delete_record_handler(
State(state): State<Arc<AppState>>,
headers: HeaderMap,
Path((table, id)): Path<(String, String)>,
) -> impl IntoResponse {
let table_name = sanitize_identifier(&table);
let user_roles = user_roles_from_headers(&headers);
let record_id = match Uuid::parse_str(&id) {
Ok(uuid) => uuid,
@ -443,6 +618,19 @@ pub async fn delete_record_handler(
}
};
// Check table-level write access (delete requires write)
if let Err(e) = check_table_access(&mut conn, &table_name, &user_roles, AccessType::Write) {
return (
StatusCode::FORBIDDEN,
Json(DeleteResponse {
success: false,
deleted: 0,
message: Some(e),
}),
)
.into_response();
}
let query = format!("DELETE FROM {} WHERE id = $1", table_name);
let deleted: Result<usize, _> = sql_query(&query)
@ -483,9 +671,11 @@ pub async fn delete_record_handler(
pub async fn count_records_handler(
State(state): State<Arc<AppState>>,
headers: HeaderMap,
Path(table): Path<String>,
) -> impl IntoResponse {
let table_name = sanitize_identifier(&table);
let user_roles = user_roles_from_headers(&headers);
let mut conn = match state.conn.get() {
Ok(c) => c,
@ -498,6 +688,11 @@ pub async fn count_records_handler(
}
};
// Check table-level read access (count requires read permission)
if let Err(e) = check_table_access(&mut conn, &table_name, &user_roles, AccessType::Read) {
return (StatusCode::FORBIDDEN, Json(json!({ "error": e }))).into_response();
}
let query = format!("SELECT COUNT(*) as count FROM {}", table_name);
let result: Result<CountResult, _> = sql_query(&query).get_result(&mut conn);
@ -523,10 +718,12 @@ pub struct SearchRequest {
pub async fn search_records_handler(
State(state): State<Arc<AppState>>,
headers: HeaderMap,
Path(table): Path<String>,
Json(payload): Json<SearchRequest>,
) -> impl IntoResponse {
let table_name = sanitize_identifier(&table);
let user_roles = user_roles_from_headers(&headers);
let limit = payload.limit.unwrap_or(20).min(100);
let search_term = payload.query.replace('\'', "''");
@ -541,6 +738,15 @@ pub async fn search_records_handler(
}
};
// Check table-level read access
let access_info =
match check_table_access(&mut conn, &table_name, &user_roles, AccessType::Read) {
Ok(info) => info,
Err(e) => {
return (StatusCode::FORBIDDEN, Json(json!({ "error": e }))).into_response();
}
};
let query = format!(
"SELECT row_to_json(t.*) as data FROM {} t WHERE
COALESCE(t.title::text, '') || ' ' || COALESCE(t.name::text, '') || ' ' || COALESCE(t.description::text, '')
@ -551,11 +757,14 @@ pub async fn search_records_handler(
let rows: Result<Vec<JsonRow>, _> = sql_query(&query).get_results(&mut conn);
match rows {
Ok(data) => (
StatusCode::OK,
Json(json!({ "data": data.into_iter().map(|r| r.data).collect::<Vec<_>>() })),
)
.into_response(),
Ok(data) => {
// Filter fields based on user roles
let filtered_data: Vec<Value> = data
.into_iter()
.map(|r| filter_fields_by_role(r.data, &user_roles, &access_info))
.collect();
(StatusCode::OK, Json(json!({ "data": filtered_data }))).into_response()
}
Err(e) => {
error!("Failed to search in {table_name}: {e}");
(

View file

@ -1,16 +1,18 @@
use super::table_access::{check_table_access, filter_fields_by_role, AccessType, UserRoles};
use crate::shared::models::UserSession;
use crate::shared::state::AppState;
use crate::shared::utils;
use crate::shared::utils::to_array;
use diesel::pg::PgConnection;
use diesel::prelude::*;
use log::error;
use log::trace;
use log::{error, trace, warn};
use rhai::Dynamic;
use rhai::Engine;
use serde_json::{json, Value};
pub fn find_keyword(state: &AppState, _user: UserSession, engine: &mut Engine) {
pub fn find_keyword(state: &AppState, user: UserSession, engine: &mut Engine) {
let connection = state.conn.clone();
let user_roles = UserRoles::from_user_session(&user);
engine
.register_custom_syntax(["FIND", "$expr$", ",", "$expr$"], false, {
move |context, inputs| {
@ -19,13 +21,32 @@ pub fn find_keyword(state: &AppState, _user: UserSession, engine: &mut Engine) {
let mut binding = connection.get().map_err(|e| format!("DB error: {}", e))?;
let binding2 = table_name.to_string();
let binding3 = filter.to_string();
// Check read access before executing query
let access_info = match check_table_access(
&mut binding,
&binding2,
&user_roles,
AccessType::Read,
) {
Ok(info) => info,
Err(e) => {
warn!("FIND access denied: {}", e);
return Err(e.into());
}
};
let result = tokio::task::block_in_place(|| {
tokio::runtime::Handle::current()
.block_on(async { execute_find(&mut binding, &binding2, &binding3) })
})
.map_err(|e| format!("DB error: {}", e))?;
if let Some(results) = result.get("results") {
let array = to_array(utils::json_value_to_dynamic(results));
// Filter fields based on user roles
let filtered =
filter_fields_by_role(results.clone(), &user_roles, &access_info);
let array = to_array(utils::json_value_to_dynamic(&filtered));
Ok(Dynamic::from(array))
} else {
Err("No results".into())

View file

@ -65,6 +65,7 @@ pub mod social;
pub mod social_media;
pub mod string_functions;
pub mod switch_case;
pub mod table_access;
pub mod table_definition;
pub mod transfer_to_human;
pub mod universal_messaging;
@ -83,6 +84,10 @@ pub use app_server::configure_app_server_routes;
pub use db_api::configure_db_routes;
pub use mcp_client::{McpClient, McpRequest, McpResponse, McpServer, McpTool};
pub use mcp_directory::{McpDirectoryScanResult, McpDirectoryScanner, McpServerConfig};
pub use table_access::{
check_field_write_access, check_table_access, filter_fields_by_role, load_table_access_info,
AccessType, TableAccessInfo, UserRoles,
};
pub fn get_all_keywords() -> Vec<String> {
vec![

View file

@ -1,5 +1,6 @@
use crate::shared::models::UserSession;
use crate::shared::state::AppState;
use botlib::MAX_LOOP_ITERATIONS;
use log::trace;
use rhai::{Dynamic, Engine};
use std::collections::HashMap;
@ -32,7 +33,7 @@ fn register_while_wend(engine: &mut Engine) {
let condition_expr = &inputs[0];
let block = &inputs[1];
let max_iterations = 100_000;
let max_iterations = MAX_LOOP_ITERATIONS;
let mut iterations = 0;
loop {
@ -70,8 +71,7 @@ fn register_while_wend(engine: &mut Engine) {
iterations += 1;
if iterations >= max_iterations {
return Err(format!(
"WHILE loop exceeded maximum iterations ({}). Possible infinite loop.",
max_iterations
"WHILE loop exceeded maximum iterations ({max_iterations}). Possible infinite loop."
)
.into());
}
@ -98,7 +98,7 @@ fn register_do_loop(engine: &mut Engine) {
let condition_expr = &inputs[0];
let block = &inputs[1];
let max_iterations = 100_000;
let max_iterations = MAX_LOOP_ITERATIONS;
let mut iterations = 0;
loop {
@ -134,7 +134,7 @@ fn register_do_loop(engine: &mut Engine) {
let condition_expr = &inputs[0];
let block = &inputs[1];
let max_iterations = 100_000;
let max_iterations = MAX_LOOP_ITERATIONS;
let mut iterations = 0;
loop {
@ -170,7 +170,7 @@ fn register_do_loop(engine: &mut Engine) {
let block = &inputs[0];
let condition_expr = &inputs[1];
let max_iterations = 100_000;
let max_iterations = MAX_LOOP_ITERATIONS;
let mut iterations = 0;
loop {
@ -206,7 +206,7 @@ fn register_do_loop(engine: &mut Engine) {
let block = &inputs[0];
let condition_expr = &inputs[1];
let max_iterations = 100_000;
let max_iterations = MAX_LOOP_ITERATIONS;
let mut iterations = 0;
loop {

View file

@ -1,8 +1,9 @@
use super::table_access::{check_table_access, AccessType, UserRoles};
use crate::shared::models::UserSession;
use crate::shared::state::AppState;
use chrono::Utc;
use diesel::prelude::*;
use log::{error, trace};
use log::{error, trace, warn};
use rhai::{Dynamic, Engine};
use serde_json::{json, Value};
use std::sync::Arc;
@ -91,6 +92,16 @@ pub async fn execute_save_from_unstructured(
table_name: &str,
text: &str,
) -> Result<String, String> {
// Check write access before proceeding
let user_roles = UserRoles::from_user_session(user);
{
let mut conn = state.conn.get().map_err(|e| format!("DB error: {}", e))?;
if let Err(e) = check_table_access(&mut conn, table_name, &user_roles, AccessType::Write) {
warn!("SAVE FROM UNSTRUCTURED access denied: {}", e);
return Err(e);
}
}
let schema = get_table_schema(state, table_name)?;
let extraction_prompt = build_extraction_prompt(table_name, &schema, text);

View file

@ -0,0 +1,535 @@
/*****************************************************************************\
| ® |
| |
| |
| |
| |
| |
| General Bots Copyright (c) pragmatismo.com.br. All rights reserved. |
| Licensed under the AGPL-3.0. |
| |
| According to our dual licensing model, this program can be used either |
| under the terms of the GNU Affero General Public License, version 3, |
| or under a proprietary license. |
| |
| The texts of the GNU Affero General Public License with an additional |
| permission and of our proprietary license can be found at and |
| in the LICENSE file you have received along with this program. |
| |
| This program is distributed in the hope that it will be useful, |
| but WITHOUT ANY WARRANTY, without even the implied warranty of |
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| GNU Affero General Public License for more details. |
| |
| "General Bots" is a registered trademark of pragmatismo.com.br. |
| The licensing of the program under the AGPLv3 does not imply a |
| trademark license. Therefore any rights, title and interest in |
| our trademarks remain entirely with us. |
| |
\*****************************************************************************/
use crate::shared::models::UserSession;
use diesel::prelude::*;
use diesel::sql_query;
use diesel::sql_types::Text;
use log::{trace, warn};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::HashMap;
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct UserRoles {
pub roles: Vec<String>,
pub user_id: Option<uuid::Uuid>,
}
impl UserRoles {
pub fn new(roles: Vec<String>) -> Self {
Self {
roles: roles.into_iter().map(|r| r.to_lowercase()).collect(),
user_id: None,
}
}
pub fn with_user_id(roles: Vec<String>, user_id: uuid::Uuid) -> Self {
Self {
roles: roles.into_iter().map(|r| r.to_lowercase()).collect(),
user_id: Some(user_id),
}
}
pub fn anonymous() -> Self {
Self::default()
}
pub fn from_user_session(session: &UserSession) -> Self {
let mut roles = Vec::new();
// Try different keys where roles might be stored
let role_keys = ["roles", "user_roles", "zitadel_roles"];
for key in role_keys {
if let Some(value) = session.context_data.get(key) {
match value {
// Array of strings
Value::Array(arr) => {
for item in arr {
if let Value::String(s) = item {
roles.push(s.trim().to_lowercase());
}
}
if !roles.is_empty() {
break;
}
}
// Semicolon-separated string
Value::String(s) => {
roles = s
.split(';')
.map(|r| r.trim().to_lowercase())
.filter(|r| !r.is_empty())
.collect();
if !roles.is_empty() {
break;
}
}
_ => {}
}
}
}
// Also check if user is marked as admin in context
if let Some(Value::Bool(true)) = session.context_data.get("is_admin") {
if !roles.contains(&"admin".to_string()) {
roles.push("admin".to_string());
}
}
Self {
roles,
user_id: Some(session.user_id),
}
}
pub fn has_access(&self, required_roles: &[String]) -> bool {
if required_roles.is_empty() {
return true; // No roles specified = everyone has access
}
// Check if user has any of the required roles
self.roles.iter().any(|user_role| {
required_roles
.iter()
.any(|req| req.to_lowercase() == *user_role)
})
}
pub fn has_role(&self, role: &str) -> bool {
self.roles.iter().any(|r| r == &role.to_lowercase())
}
pub fn is_admin(&self) -> bool {
self.has_role("admin")
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum AccessType {
Read,
Write,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct TableAccessInfo {
pub table_name: String,
pub read_roles: Vec<String>,
pub write_roles: Vec<String>,
pub field_read_roles: HashMap<String, Vec<String>>,
pub field_write_roles: HashMap<String, Vec<String>>,
}
impl TableAccessInfo {
pub fn can_read(&self, user_roles: &UserRoles) -> bool {
user_roles.has_access(&self.read_roles)
}
pub fn can_write(&self, user_roles: &UserRoles) -> bool {
user_roles.has_access(&self.write_roles)
}
pub fn can_read_field(&self, field_name: &str, user_roles: &UserRoles) -> bool {
if let Some(field_roles) = self.field_read_roles.get(field_name) {
user_roles.has_access(field_roles)
} else {
true // No field-level restriction
}
}
pub fn can_write_field(&self, field_name: &str, user_roles: &UserRoles) -> bool {
if let Some(field_roles) = self.field_write_roles.get(field_name) {
user_roles.has_access(field_roles)
} else {
true // No field-level restriction
}
}
pub fn get_restricted_read_fields(&self, user_roles: &UserRoles) -> Vec<String> {
self.field_read_roles
.iter()
.filter(|(_, roles)| !user_roles.has_access(roles))
.map(|(field, _)| field.clone())
.collect()
}
pub fn get_restricted_write_fields(&self, user_roles: &UserRoles) -> Vec<String> {
self.field_write_roles
.iter()
.filter(|(_, roles)| !user_roles.has_access(roles))
.map(|(field, _)| field.clone())
.collect()
}
}
#[derive(QueryableByName, Debug)]
struct TableDefRow {
#[diesel(sql_type = Text)]
table_name: String,
#[diesel(sql_type = diesel::sql_types::Nullable<Text>)]
read_roles: Option<String>,
#[diesel(sql_type = diesel::sql_types::Nullable<Text>)]
write_roles: Option<String>,
}
#[derive(QueryableByName, Debug)]
struct FieldDefRow {
#[diesel(sql_type = Text)]
field_name: String,
#[diesel(sql_type = diesel::sql_types::Nullable<Text>)]
read_roles: Option<String>,
#[diesel(sql_type = diesel::sql_types::Nullable<Text>)]
write_roles: Option<String>,
}
pub fn load_table_access_info(
conn: &mut diesel::PgConnection,
table_name: &str,
) -> Option<TableAccessInfo> {
// Query table-level permissions
let table_result: Result<TableDefRow, _> = sql_query(
"SELECT table_name, read_roles, write_roles
FROM dynamic_table_definitions
WHERE table_name = $1
LIMIT 1",
)
.bind::<Text, _>(table_name)
.get_result(conn);
let table_def = match table_result {
Ok(row) => row,
Err(_) => {
trace!(
"No table definition found for '{}', allowing open access",
table_name
);
return None; // No definition = open access
}
};
let mut info = TableAccessInfo {
table_name: table_def.table_name,
read_roles: parse_roles_string(&table_def.read_roles),
write_roles: parse_roles_string(&table_def.write_roles),
field_read_roles: HashMap::new(),
field_write_roles: HashMap::new(),
};
// Query field-level permissions
let fields_result: Result<Vec<FieldDefRow>, _> = sql_query(
"SELECT f.field_name, f.read_roles, f.write_roles
FROM dynamic_table_fields f
JOIN dynamic_table_definitions t ON f.table_definition_id = t.id
WHERE t.table_name = $1",
)
.bind::<Text, _>(table_name)
.get_results(conn);
if let Ok(fields) = fields_result {
for field in fields {
let field_read = parse_roles_string(&field.read_roles);
let field_write = parse_roles_string(&field.write_roles);
if !field_read.is_empty() {
info.field_read_roles
.insert(field.field_name.clone(), field_read);
}
if !field_write.is_empty() {
info.field_write_roles.insert(field.field_name, field_write);
}
}
}
trace!(
"Loaded access info for table '{}': read_roles={:?}, write_roles={:?}, field_restrictions={}",
info.table_name,
info.read_roles,
info.write_roles,
info.field_read_roles.len() + info.field_write_roles.len()
);
Some(info)
}
fn parse_roles_string(roles: &Option<String>) -> Vec<String> {
roles
.as_ref()
.map(|s| {
s.split(';')
.map(|r| r.trim().to_string())
.filter(|r| !r.is_empty())
.collect()
})
.unwrap_or_default()
}
pub fn check_table_access(
conn: &mut diesel::PgConnection,
table_name: &str,
user_roles: &UserRoles,
access_type: AccessType,
) -> Result<Option<TableAccessInfo>, String> {
let access_info = load_table_access_info(conn, table_name);
if let Some(ref info) = access_info {
let has_access = match access_type {
AccessType::Read => info.can_read(user_roles),
AccessType::Write => info.can_write(user_roles),
};
if !has_access {
let action = match access_type {
AccessType::Read => "read from",
AccessType::Write => "write to",
};
warn!(
"Access denied: user {:?} cannot {} table '{}'",
user_roles.user_id, action, table_name
);
return Err(format!(
"Access denied: insufficient permissions to {} table '{}'",
action, table_name
));
}
}
Ok(access_info)
}
pub fn check_field_write_access(
fields: &[String],
user_roles: &UserRoles,
access_info: &Option<TableAccessInfo>,
) -> Result<(), String> {
let Some(info) = access_info else {
return Ok(()); // No access info = allow all
};
let mut denied_fields = Vec::new();
for field in fields {
if !info.can_write_field(field, user_roles) {
denied_fields.push(field.clone());
}
}
if denied_fields.is_empty() {
Ok(())
} else {
Err(format!(
"Access denied: insufficient permissions to write field(s): {}",
denied_fields.join(", ")
))
}
}
pub fn filter_fields_by_role(
data: Value,
user_roles: &UserRoles,
access_info: &Option<TableAccessInfo>,
) -> Value {
let Some(info) = access_info else {
return data; // No access info = return all fields
};
match data {
Value::Object(mut map) => {
let restricted = info.get_restricted_read_fields(user_roles);
for field in restricted {
trace!("Filtering out field '{}' due to role restriction", field);
map.remove(&field);
}
Value::Object(map)
}
Value::Array(arr) => Value::Array(
arr.into_iter()
.map(|v| filter_fields_by_role(v, user_roles, access_info))
.collect(),
),
other => other,
}
}
pub fn filter_write_fields(
data: Value,
user_roles: &UserRoles,
access_info: &Option<TableAccessInfo>,
) -> (Value, Vec<String>) {
let Some(info) = access_info else {
return (data, Vec::new()); // No access info = allow all
};
match data {
Value::Object(mut map) => {
let restricted = info.get_restricted_write_fields(user_roles);
let mut removed = Vec::new();
for field in &restricted {
if map.contains_key(field) {
trace!(
"Removing field '{}' from write data due to role restriction",
field
);
map.remove(field);
removed.push(field.clone());
}
}
(Value::Object(map), removed)
}
other => (other, Vec::new()),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_user_roles_has_access() {
let roles = UserRoles::new(vec!["admin".to_string(), "manager".to_string()]);
// Empty roles = everyone allowed
assert!(roles.has_access(&[]));
// User has admin role
assert!(roles.has_access(&["admin".to_string()]));
// User has manager role
assert!(roles.has_access(&["manager".to_string(), "superuser".to_string()]));
// User doesn't have superuser role only
assert!(!roles.has_access(&["superuser".to_string()]));
}
#[test]
fn test_user_roles_case_insensitive() {
let roles = UserRoles::new(vec!["Admin".to_string()]);
assert!(roles.has_access(&["admin".to_string()]));
assert!(roles.has_access(&["ADMIN".to_string()]));
assert!(roles.has_access(&["Admin".to_string()]));
}
#[test]
fn test_anonymous_user() {
let roles = UserRoles::anonymous();
// Anonymous can access if no roles required
assert!(roles.has_access(&[]));
// Anonymous cannot access if roles required
assert!(!roles.has_access(&["admin".to_string()]));
}
#[test]
fn test_table_access_info_field_restrictions() {
let mut info = TableAccessInfo {
table_name: "contacts".to_string(),
read_roles: vec![],
write_roles: vec![],
field_read_roles: HashMap::new(),
field_write_roles: HashMap::new(),
};
info.field_read_roles
.insert("ssn".to_string(), vec!["admin".to_string()]);
info.field_write_roles
.insert("salary".to_string(), vec!["hr".to_string()]);
let admin = UserRoles::new(vec!["admin".to_string()]);
let hr = UserRoles::new(vec!["hr".to_string()]);
let user = UserRoles::new(vec!["user".to_string()]);
// Admin can read SSN
assert!(info.can_read_field("ssn", &admin));
// Regular user cannot read SSN
assert!(!info.can_read_field("ssn", &user));
// HR can write salary
assert!(info.can_write_field("salary", &hr));
// Admin cannot write salary (different role)
assert!(!info.can_write_field("salary", &admin));
// Everyone can read/write unrestricted fields
assert!(info.can_read_field("name", &user));
assert!(info.can_write_field("name", &user));
}
#[test]
fn test_filter_fields_by_role() {
let mut info = TableAccessInfo::default();
info.field_read_roles
.insert("secret".to_string(), vec!["admin".to_string()]);
let data = serde_json::json!({
"id": 1,
"name": "John",
"secret": "classified"
});
let user = UserRoles::new(vec!["user".to_string()]);
let filtered = filter_fields_by_role(data.clone(), &user, &Some(info.clone()));
assert!(filtered.get("id").is_some());
assert!(filtered.get("name").is_some());
assert!(filtered.get("secret").is_none());
// Admin can see everything
let admin = UserRoles::new(vec!["admin".to_string()]);
let not_filtered = filter_fields_by_role(data, &admin, &Some(info));
assert!(not_filtered.get("secret").is_some());
}
#[test]
fn test_parse_roles_string() {
assert_eq!(parse_roles_string(&None), Vec::<String>::new());
assert_eq!(
parse_roles_string(&Some("".to_string())),
Vec::<String>::new()
);
assert_eq!(
parse_roles_string(&Some("admin".to_string())),
vec!["admin"]
);
assert_eq!(
parse_roles_string(&Some("admin;manager".to_string())),
vec!["admin", "manager"]
);
assert_eq!(
parse_roles_string(&Some(" admin ; manager ; hr ".to_string())),
vec!["admin", "manager", "hr"]
);
}
}

View file

@ -39,24 +39,41 @@ use std::error::Error;
use std::sync::Arc;
use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct FieldDefinition {
pub name: String,
pub field_type: String,
#[serde(default)]
pub length: Option<i32>,
#[serde(default)]
pub precision: Option<i32>,
#[serde(default)]
pub is_key: bool,
#[serde(default)]
pub is_nullable: bool,
#[serde(default)]
pub default_value: Option<String>,
#[serde(default)]
pub reference_table: Option<String>,
#[serde(default)]
pub field_order: i32,
#[serde(default)]
pub read_roles: Vec<String>,
#[serde(default)]
pub write_roles: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct TableDefinition {
pub name: String,
#[serde(default)]
pub connection_name: String,
#[serde(default)]
pub fields: Vec<FieldDefinition>,
#[serde(default)]
pub read_roles: Vec<String>,
#[serde(default)]
pub write_roles: Vec<String>,
}
#[derive(Debug, Clone)]
@ -97,6 +114,9 @@ fn parse_single_table(
) -> Result<TableDefinition, Box<dyn Error + Send + Sync>> {
let header_line = lines[*index].trim();
// Parse table-level READ BY and WRITE BY
let (read_roles, write_roles) = parse_role_attributes(header_line);
let parts: Vec<&str> = header_line.split_whitespace().collect();
if parts.len() < 2 {
@ -110,13 +130,27 @@ fn parse_single_table(
let table_name = parts[1].to_string();
let connection_name = if parts.len() >= 4 && parts[2].eq_ignore_ascii_case("ON") {
parts[3].to_string()
} else {
"default".to_string()
};
// Find connection name (ON keyword)
let mut connection_name = "default".to_string();
for i in 2..parts.len() {
if parts[i].eq_ignore_ascii_case("ON") && i + 1 < parts.len() {
// Check that the next part is not READ or WRITE
if !parts[i + 1].eq_ignore_ascii_case("READ")
&& !parts[i + 1].eq_ignore_ascii_case("WRITE")
{
connection_name = parts[i + 1].to_string();
}
break;
}
}
trace!("Parsing TABLE {} ON {}", table_name, connection_name);
trace!(
"Parsing TABLE {} ON {} (read_roles: {:?}, write_roles: {:?})",
table_name,
connection_name,
read_roles,
write_roles
);
*index += 1;
let mut fields = Vec::new();
@ -153,13 +187,60 @@ fn parse_single_table(
name: table_name,
connection_name,
fields,
read_roles,
write_roles,
})
}
fn parse_role_attributes(line: &str) -> (Vec<String>, Vec<String>) {
let mut read_roles = Vec::new();
let mut write_roles = Vec::new();
// Find READ BY "..."
if let Some(read_idx) = line.to_uppercase().find("READ BY") {
let after_read = &line[read_idx + 7..];
if let Some(roles_str) = extract_quoted_string(after_read) {
read_roles = roles_str
.split(';')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect();
}
}
// Find WRITE BY "..."
if let Some(write_idx) = line.to_uppercase().find("WRITE BY") {
let after_write = &line[write_idx + 8..];
if let Some(roles_str) = extract_quoted_string(after_write) {
write_roles = roles_str
.split(';')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect();
}
}
(read_roles, write_roles)
}
fn extract_quoted_string(s: &str) -> Option<String> {
let trimmed = s.trim();
if let Some(start) = trimmed.find('"') {
let after_quote = &trimmed[start + 1..];
if let Some(end) = after_quote.find('"') {
return Some(after_quote[..end].to_string());
}
}
None
}
fn parse_field_definition(
line: &str,
order: i32,
) -> Result<FieldDefinition, Box<dyn Error + Send + Sync>> {
// Parse field-level READ BY and WRITE BY
let (read_roles, write_roles) = parse_role_attributes(line);
let parts: Vec<&str> = line.split_whitespace().collect();
if parts.is_empty() {
@ -205,6 +286,8 @@ fn parse_field_definition(
reference_table = Some(parts[i + 1].to_string());
}
}
// Skip READ, BY, WRITE as they're handled separately
"read" | "by" | "write" => {}
_ => {}
}
}
@ -219,6 +302,8 @@ fn parse_field_definition(
default_value: None,
reference_table,
field_order: order,
read_roles,
write_roles,
})
}
@ -413,16 +498,30 @@ pub fn store_table_definition(
bot_id: Uuid,
table: &TableDefinition,
) -> Result<Uuid, Box<dyn Error + Send + Sync>> {
// Convert role vectors to semicolon-separated strings for storage
let read_roles_str: Option<String> = if table.read_roles.is_empty() {
None
} else {
Some(table.read_roles.join(";"))
};
let write_roles_str: Option<String> = if table.write_roles.is_empty() {
None
} else {
Some(table.write_roles.join(";"))
};
let table_id: Uuid = diesel::sql_query(
"INSERT INTO dynamic_table_definitions (bot_id, table_name, connection_name)
VALUES ($1, $2, $3)
"INSERT INTO dynamic_table_definitions (bot_id, table_name, connection_name, read_roles, write_roles)
VALUES ($1, $2, $3, $4, $5)
ON CONFLICT (bot_id, table_name, connection_name)
DO UPDATE SET updated_at = NOW()
DO UPDATE SET updated_at = NOW(), read_roles = $4, write_roles = $5
RETURNING id",
)
.bind::<diesel::sql_types::Uuid, _>(bot_id)
.bind::<Text, _>(&table.name)
.bind::<Text, _>(&table.connection_name)
.bind::<diesel::sql_types::Nullable<Text>, _>(&read_roles_str)
.bind::<diesel::sql_types::Nullable<Text>, _>(&write_roles_str)
.get_result::<IdResult>(conn)?
.id;
@ -431,11 +530,23 @@ pub fn store_table_definition(
.execute(conn)?;
for field in &table.fields {
// Convert field role vectors to semicolon-separated strings
let field_read_roles: Option<String> = if field.read_roles.is_empty() {
None
} else {
Some(field.read_roles.join(";"))
};
let field_write_roles: Option<String> = if field.write_roles.is_empty() {
None
} else {
Some(field.write_roles.join(";"))
};
diesel::sql_query(
"INSERT INTO dynamic_table_fields
(table_definition_id, field_name, field_type, field_length, field_precision,
is_key, is_nullable, default_value, reference_table, field_order)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)",
is_key, is_nullable, default_value, reference_table, field_order, read_roles, write_roles)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)",
)
.bind::<diesel::sql_types::Uuid, _>(table_id)
.bind::<Text, _>(&field.name)
@ -447,6 +558,8 @@ pub fn store_table_definition(
.bind::<diesel::sql_types::Nullable<Text>, _>(&field.default_value)
.bind::<diesel::sql_types::Nullable<Text>, _>(&field.reference_table)
.bind::<diesel::sql_types::Integer, _>(field.field_order)
.bind::<diesel::sql_types::Nullable<Text>, _>(&field_read_roles)
.bind::<diesel::sql_types::Nullable<Text>, _>(&field_write_roles)
.execute(conn)?;
}

View file

@ -154,7 +154,6 @@ pub fn remove_webhook_registration(
Ok(result)
}
/// Type alias for webhook results: (target, param, `is_active`).
pub type WebhookResult = Vec<(String, String, bool)>;
pub fn get_bot_webhooks(

View file

@ -523,10 +523,10 @@ pub fn configure_calendar_routes() -> Router<Arc<AppState>> {
&ApiUrls::CALENDAR_EVENT_BY_ID.replace(":id", "{id}"),
get(get_event).put(update_event).delete(delete_event),
)
.route("/api/calendar/export.ics", get(export_ical))
.route("/api/calendar/import", post(import_ical))
.route("/api/calendar/calendars", get(list_calendars_api))
.route("/api/calendar/events/upcoming", get(upcoming_events_api))
.route(ApiUrls::CALENDAR_EXPORT, get(export_ical))
.route(ApiUrls::CALENDAR_IMPORT, post(import_ical))
.route(ApiUrls::CALENDAR_CALENDARS, get(list_calendars_api))
.route(ApiUrls::CALENDAR_UPCOMING, get(upcoming_events_api))
.route("/ui/calendar/list", get(list_calendars))
.route("/ui/calendar/upcoming", get(upcoming_events))
.route("/ui/calendar/event/new", get(new_event_form))

View file

@ -317,6 +317,28 @@ impl BootstrapManager {
match pm.start(component.name) {
Ok(_child) => {
info!("Started component: {}", component.name);
if component.name == "drive" {
for i in 0..15 {
let drive_ready = Command::new("sh")
.arg("-c")
.arg("curl -f -s 'http://127.0.0.1:9000/minio/health/live' >/dev/null 2>&1")
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.map(|s| s.success())
.unwrap_or(false);
if drive_ready {
info!("MinIO drive is ready and responding");
break;
}
if i < 14 {
tokio::time::sleep(tokio::time::Duration::from_secs(1)).await;
} else {
warn!("MinIO drive health check timed out after 15s");
}
}
}
}
Err(e) => {
debug!(
@ -574,14 +596,12 @@ impl BootstrapManager {
|| stderr_str.contains("connection refused")
{
connection_refused = true;
continue;
}
connection_refused = false;
if let Ok(status) = serde_json::from_str::<serde_json::Value>(&status_str) {
parsed_status = Some(status);
break;
} else {
connection_refused = false;
if let Ok(status) = serde_json::from_str::<serde_json::Value>(&status_str) {
parsed_status = Some(status);
break;
}
}
}
@ -1837,7 +1857,6 @@ VAULT_CACHE_TTL=300
}
Err(e) => {
warn!("S3/MinIO not available, skipping bucket {}: {}", bucket, e);
continue;
}
}
}

View file

@ -649,7 +649,6 @@ pub fn create_interactive_buttons(text: &str, buttons: Vec<(&str, &str)>) -> ser
})
}
/// Type alias for interactive list sections: (title, rows) where rows are (id, title, description)
pub type InteractiveListSections = Vec<(String, Vec<(String, String, Option<String>)>)>;
pub fn create_interactive_list(

View file

@ -678,7 +678,9 @@ impl PackageManager {
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "{{BIN_PATH}}/nocodb".to_string(),
check_cmd: "curl -f -k --connect-timeout 2 -m 5 https://localhost:5757 >/dev/null 2>&1".to_string(),
check_cmd:
"curl -f -k --connect-timeout 2 -m 5 https://localhost:5757 >/dev/null 2>&1"
.to_string(),
},
);
}
@ -705,7 +707,9 @@ impl PackageManager {
env_vars: HashMap::new(),
data_download_list: Vec::new(),
exec_cmd: "coolwsd --config-file={{CONF_PATH}}/coolwsd.xml".to_string(),
check_cmd: "curl -f -k --connect-timeout 2 -m 5 https://localhost:9980 >/dev/null 2>&1".to_string(),
check_cmd:
"curl -f -k --connect-timeout 2 -m 5 https://localhost:9980 >/dev/null 2>&1"
.to_string(),
},
);
}
@ -883,7 +887,7 @@ impl PackageManager {
"mkdir -p {{LOGS_PATH}}".to_string(),
r#"cat > {{CONF_PATH}}/vault/config.hcl << 'EOF'
storage "file" {
path = "/opt/gbo/data/vault"
path = "{{DATA_PATH}}/vault"
}
listener "tcp" {

View file

@ -147,12 +147,20 @@ impl DirectorySetup {
log::info!(" Saved Directory configuration");
log::info!(" Directory initialization complete!");
log::info!(
" Default user: {} / {}",
config.default_user.email,
config.default_user.password
);
log::info!(" Login at: {}", self.base_url);
log::info!("");
log::info!("╔══════════════════════════════════════════════════════════════╗");
log::info!("║ DEFAULT CREDENTIALS ║");
log::info!("╠══════════════════════════════════════════════════════════════╣");
log::info!("║ Email: {:<50}║", config.default_user.email);
log::info!("║ Password: {:<50}║", config.default_user.password);
log::info!("╠══════════════════════════════════════════════════════════════╣");
log::info!("║ Login at: {:<50}║", self.base_url);
log::info!("╚══════════════════════════════════════════════════════════════╝");
log::info!("");
log::info!(">>> COPY THESE CREDENTIALS NOW - Press ENTER to continue <<<");
let mut input = String::new();
let _ = std::io::stdin().read_line(&mut input);
Ok(config)
}

View file

@ -1,14 +1,7 @@
#[derive(Debug)]
pub struct ApiUrls;
impl ApiUrls {
pub const USERS: &'static str = "/api/users";
pub const USER_BY_ID: &'static str = "/api/users/:id";
pub const USER_LOGIN: &'static str = "/api/users/login";
@ -20,7 +13,6 @@ impl ApiUrls {
pub const USER_PROVISION: &'static str = "/api/users/provision";
pub const USER_DEPROVISION: &'static str = "/api/users/:id/deprovision";
pub const GROUPS: &'static str = "/api/groups";
pub const GROUP_BY_ID: &'static str = "/api/groups/:id";
pub const GROUP_MEMBERS: &'static str = "/api/groups/:id/members";
@ -28,7 +20,6 @@ impl ApiUrls {
pub const GROUP_REMOVE_MEMBER: &'static str = "/api/groups/:id/members/:user_id";
pub const GROUP_PERMISSIONS: &'static str = "/api/groups/:id/permissions";
pub const AUTH: &'static str = "/api/auth";
pub const AUTH_TOKEN: &'static str = "/api/auth/token";
pub const AUTH_REFRESH: &'static str = "/api/auth/refresh";
@ -36,14 +27,12 @@ impl ApiUrls {
pub const AUTH_OAUTH: &'static str = "/api/auth/oauth";
pub const AUTH_OAUTH_CALLBACK: &'static str = "/api/auth/oauth/callback";
pub const SESSIONS: &'static str = "/api/sessions";
pub const SESSION_BY_ID: &'static str = "/api/sessions/:id";
pub const SESSION_HISTORY: &'static str = "/api/sessions/:id/history";
pub const SESSION_START: &'static str = "/api/sessions/:id/start";
pub const SESSION_END: &'static str = "/api/sessions/:id/end";
pub const BOTS: &'static str = "/api/bots";
pub const BOT_BY_ID: &'static str = "/api/bots/:id";
pub const BOT_CONFIG: &'static str = "/api/bots/:id/config";
@ -51,7 +40,6 @@ impl ApiUrls {
pub const BOT_LOGS: &'static str = "/api/bots/:id/logs";
pub const BOT_METRICS: &'static str = "/api/bots/:id/metrics";
pub const DRIVE_LIST: &'static str = "/api/drive/list";
pub const DRIVE_UPLOAD: &'static str = "/api/drive/upload";
pub const DRIVE_DOWNLOAD: &'static str = "/api/drive/download/:path";
@ -60,7 +48,7 @@ impl ApiUrls {
pub const DRIVE_MOVE: &'static str = "/api/drive/move";
pub const DRIVE_COPY: &'static str = "/api/drive/copy";
pub const DRIVE_SHARE: &'static str = "/api/drive/share";
pub const DRIVE_FILE: &'static str = "/api/drive/file/:path";
pub const EMAIL_ACCOUNTS: &'static str = "/api/email/accounts";
pub const EMAIL_ACCOUNT_BY_ID: &'static str = "/api/email/accounts/:id";
@ -72,13 +60,15 @@ impl ApiUrls {
pub const EMAIL_GET: &'static str = "/api/email/get/:campaign_id";
pub const EMAIL_CLICK: &'static str = "/api/email/click/:campaign_id/:email";
pub const CALENDAR_EVENTS: &'static str = "/api/calendar/events";
pub const CALENDAR_EVENT_BY_ID: &'static str = "/api/calendar/events/:id";
pub const CALENDAR_REMINDERS: &'static str = "/api/calendar/reminders";
pub const CALENDAR_SHARE: &'static str = "/api/calendar/share";
pub const CALENDAR_SYNC: &'static str = "/api/calendar/sync";
pub const CALENDAR_EXPORT: &'static str = "/api/calendar/export.ics";
pub const CALENDAR_IMPORT: &'static str = "/api/calendar/import";
pub const CALENDAR_CALENDARS: &'static str = "/api/calendar/calendars";
pub const CALENDAR_UPCOMING: &'static str = "/api/calendar/events/upcoming";
pub const TASKS: &'static str = "/api/tasks";
pub const TASK_BY_ID: &'static str = "/api/tasks/:id";
@ -87,7 +77,6 @@ impl ApiUrls {
pub const TASK_PRIORITY: &'static str = "/api/tasks/:id/priority";
pub const TASK_COMMENTS: &'static str = "/api/tasks/:id/comments";
pub const MEET_CREATE: &'static str = "/api/meet/create";
pub const MEET_ROOMS: &'static str = "/api/meet/rooms";
pub const MEET_ROOM_BY_ID: &'static str = "/api/meet/rooms/:id";
@ -96,24 +85,40 @@ impl ApiUrls {
pub const MEET_TOKEN: &'static str = "/api/meet/token";
pub const MEET_INVITE: &'static str = "/api/meet/invite";
pub const MEET_TRANSCRIPTION: &'static str = "/api/meet/rooms/:id/transcription";
pub const MEET_PARTICIPANTS: &'static str = "/api/meet/participants";
pub const MEET_RECENT: &'static str = "/api/meet/recent";
pub const MEET_SCHEDULED: &'static str = "/api/meet/scheduled";
pub const VOICE_START: &'static str = "/api/voice/start";
pub const VOICE_STOP: &'static str = "/api/voice/stop";
pub const VOICE_STATUS: &'static str = "/api/voice/status";
pub const DNS_REGISTER: &'static str = "/api/dns/register";
pub const DNS_REMOVE: &'static str = "/api/dns/remove";
pub const DNS_LIST: &'static str = "/api/dns/list";
pub const DNS_UPDATE: &'static str = "/api/dns/update";
pub const ANALYTICS_DASHBOARD: &'static str = "/api/analytics/dashboard";
pub const ANALYTICS_METRIC: &'static str = "/api/analytics/metric";
pub const ANALYTICS_MESSAGES_COUNT: &'static str = "/api/analytics/messages/count";
pub const ANALYTICS_SESSIONS_ACTIVE: &'static str = "/api/analytics/sessions/active";
pub const ANALYTICS_RESPONSE_AVG: &'static str = "/api/analytics/response/avg";
pub const ANALYTICS_LLM_TOKENS: &'static str = "/api/analytics/llm/tokens";
pub const ANALYTICS_STORAGE_USAGE: &'static str = "/api/analytics/storage/usage";
pub const ANALYTICS_ERRORS_COUNT: &'static str = "/api/analytics/errors/count";
pub const ANALYTICS_TIMESERIES_MESSAGES: &'static str = "/api/analytics/timeseries/messages";
pub const ANALYTICS_TIMESERIES_RESPONSE: &'static str =
"/api/analytics/timeseries/response_time";
pub const ANALYTICS_CHANNELS_DISTRIBUTION: &'static str =
"/api/analytics/channels/distribution";
pub const ANALYTICS_BOTS_PERFORMANCE: &'static str = "/api/analytics/bots/performance";
pub const ANALYTICS_ACTIVITY_RECENT: &'static str = "/api/analytics/activity/recent";
pub const ANALYTICS_QUERIES_TOP: &'static str = "/api/analytics/queries/top";
pub const ANALYTICS_CHAT: &'static str = "/api/analytics/chat";
pub const ANALYTICS_LLM_STATS: &'static str = "/api/analytics/llm/stats";
pub const ANALYTICS_BUDGET_STATUS: &'static str = "/api/analytics/budget/status";
pub const METRICS: &'static str = "/api/metrics";
pub const ADMIN_STATS: &'static str = "/api/admin/stats";
pub const ADMIN_USERS: &'static str = "/api/admin/users";
pub const ADMIN_SYSTEM: &'static str = "/api/admin/system";
@ -122,12 +127,10 @@ impl ApiUrls {
pub const ADMIN_SERVICES: &'static str = "/api/admin/services";
pub const ADMIN_AUDIT: &'static str = "/api/admin/audit";
pub const HEALTH: &'static str = "/api/health";
pub const STATUS: &'static str = "/api/status";
pub const SERVICES_STATUS: &'static str = "/api/services/status";
pub const KB_SEARCH: &'static str = "/api/kb/search";
pub const KB_UPLOAD: &'static str = "/api/kb/upload";
pub const KB_DOCUMENTS: &'static str = "/api/kb/documents";
@ -135,20 +138,152 @@ impl ApiUrls {
pub const KB_INDEX: &'static str = "/api/kb/index";
pub const KB_EMBEDDINGS: &'static str = "/api/kb/embeddings";
pub const LLM_CHAT: &'static str = "/api/llm/chat";
pub const LLM_COMPLETIONS: &'static str = "/api/llm/completions";
pub const LLM_EMBEDDINGS: &'static str = "/api/llm/embeddings";
pub const LLM_MODELS: &'static str = "/api/llm/models";
pub const LLM_GENERATE: &'static str = "/api/llm/generate";
pub const LLM_IMAGE: &'static str = "/api/llm/image";
pub const ATTENDANCE_QUEUE: &'static str = "/api/attendance/queue";
pub const ATTENDANCE_ATTENDANTS: &'static str = "/api/attendance/attendants";
pub const ATTENDANCE_ASSIGN: &'static str = "/api/attendance/assign";
pub const ATTENDANCE_TRANSFER: &'static str = "/api/attendance/transfer";
pub const ATTENDANCE_RESOLVE: &'static str = "/api/attendance/resolve/:session_id";
pub const ATTENDANCE_INSIGHTS: &'static str = "/api/attendance/insights";
pub const ATTENDANCE_RESPOND: &'static str = "/api/attendance/respond";
pub const ATTENDANCE_LLM_TIPS: &'static str = "/api/attendance/llm/tips";
pub const ATTENDANCE_LLM_POLISH: &'static str = "/api/attendance/llm/polish";
pub const ATTENDANCE_LLM_SMART_REPLIES: &'static str = "/api/attendance/llm/smart-replies";
pub const ATTENDANCE_LLM_SUMMARY: &'static str = "/api/attendance/llm/summary/:session_id";
pub const ATTENDANCE_LLM_SENTIMENT: &'static str = "/api/attendance/llm/sentiment";
pub const ATTENDANCE_LLM_CONFIG: &'static str = "/api/attendance/llm/config/:bot_id";
pub const AUTOTASK_CREATE: &'static str = "/api/autotask/create";
pub const AUTOTASK_CLASSIFY: &'static str = "/api/autotask/classify";
pub const AUTOTASK_COMPILE: &'static str = "/api/autotask/compile";
pub const AUTOTASK_EXECUTE: &'static str = "/api/autotask/execute";
pub const AUTOTASK_SIMULATE: &'static str = "/api/autotask/simulate/:plan_id";
pub const AUTOTASK_LIST: &'static str = "/api/autotask/list";
pub const AUTOTASK_STATS: &'static str = "/api/autotask/stats";
pub const AUTOTASK_PAUSE: &'static str = "/api/autotask/:task_id/pause";
pub const AUTOTASK_RESUME: &'static str = "/api/autotask/:task_id/resume";
pub const AUTOTASK_CANCEL: &'static str = "/api/autotask/:task_id/cancel";
pub const AUTOTASK_TASK_SIMULATE: &'static str = "/api/autotask/:task_id/simulate";
pub const AUTOTASK_DECISIONS: &'static str = "/api/autotask/:task_id/decisions";
pub const AUTOTASK_DECIDE: &'static str = "/api/autotask/:task_id/decide";
pub const AUTOTASK_APPROVALS: &'static str = "/api/autotask/:task_id/approvals";
pub const AUTOTASK_APPROVE: &'static str = "/api/autotask/:task_id/approve";
pub const AUTOTASK_TASK_EXECUTE: &'static str = "/api/autotask/:task_id/execute";
pub const AUTOTASK_LOGS: &'static str = "/api/autotask/:task_id/logs";
pub const AUTOTASK_RECOMMENDATIONS_APPLY: &'static str =
"/api/autotask/recommendations/:rec_id/apply";
pub const AUTOTASK_PENDING: &'static str = "/api/autotask/pending";
pub const AUTOTASK_PENDING_ITEM: &'static str = "/api/autotask/pending/:item_id";
pub const DB_TABLE: &'static str = "/api/db/:table";
pub const DB_TABLE_RECORD: &'static str = "/api/db/:table/:id";
pub const DB_TABLE_COUNT: &'static str = "/api/db/:table/count";
pub const DB_TABLE_SEARCH: &'static str = "/api/db/:table/search";
pub const DESIGNER_FILES: &'static str = "/api/v1/designer/files";
pub const DESIGNER_LOAD: &'static str = "/api/v1/designer/load";
pub const DESIGNER_SAVE: &'static str = "/api/v1/designer/save";
pub const DESIGNER_VALIDATE: &'static str = "/api/v1/designer/validate";
pub const DESIGNER_EXPORT: &'static str = "/api/v1/designer/export";
pub const DESIGNER_MODIFY: &'static str = "/api/designer/modify";
pub const MAIL_SEND: &'static str = "/api/mail/send";
pub const WHATSAPP_SEND: &'static str = "/api/whatsapp/send";
pub const FILES_BY_ID: &'static str = "/api/files/:id";
pub const MESSAGES: &'static str = "/api/messages";
pub const DESIGNER_DIALOGS: &'static str = "/api/designer/dialogs";
pub const DESIGNER_DIALOG_BY_ID: &'static str = "/api/designer/dialogs/:id";
pub const EMAIL_TRACKING_LIST: &'static str = "/api/email/tracking/list";
pub const EMAIL_TRACKING_STATS: &'static str = "/api/email/tracking/stats";
pub const INSTAGRAM_WEBHOOK: &'static str = "/api/instagram/webhook";
pub const INSTAGRAM_SEND: &'static str = "/api/instagram/send";
pub const MONITORING_DASHBOARD: &'static str = "/api/monitoring/dashboard";
pub const MONITORING_SERVICES: &'static str = "/api/monitoring/services";
pub const MONITORING_RESOURCES: &'static str = "/api/monitoring/resources";
pub const MONITORING_LOGS: &'static str = "/api/monitoring/logs";
pub const MONITORING_LLM: &'static str = "/api/monitoring/llm";
pub const MONITORING_HEALTH: &'static str = "/api/monitoring/health";
pub const MSTEAMS_MESSAGES: &'static str = "/api/msteams/messages";
pub const MSTEAMS_SEND: &'static str = "/api/msteams/send";
pub const PAPER_NEW: &'static str = "/api/paper/new";
pub const PAPER_LIST: &'static str = "/api/paper/list";
pub const PAPER_SEARCH: &'static str = "/api/paper/search";
pub const PAPER_SAVE: &'static str = "/api/paper/save";
pub const PAPER_AUTOSAVE: &'static str = "/api/paper/autosave";
pub const PAPER_BY_ID: &'static str = "/api/paper/:id";
pub const PAPER_DELETE: &'static str = "/api/paper/:id/delete";
pub const PAPER_TEMPLATE_BLANK: &'static str = "/api/paper/template/blank";
pub const PAPER_TEMPLATE_MEETING: &'static str = "/api/paper/template/meeting";
pub const PAPER_TEMPLATE_TODO: &'static str = "/api/paper/template/todo";
pub const PAPER_TEMPLATE_RESEARCH: &'static str = "/api/paper/template/research";
pub const PAPER_AI_SUMMARIZE: &'static str = "/api/paper/ai/summarize";
pub const PAPER_AI_EXPAND: &'static str = "/api/paper/ai/expand";
pub const PAPER_AI_IMPROVE: &'static str = "/api/paper/ai/improve";
pub const PAPER_AI_SIMPLIFY: &'static str = "/api/paper/ai/simplify";
pub const PAPER_AI_TRANSLATE: &'static str = "/api/paper/ai/translate";
pub const PAPER_AI_CUSTOM: &'static str = "/api/paper/ai/custom";
pub const PAPER_EXPORT_PDF: &'static str = "/api/paper/export/pdf";
pub const PAPER_EXPORT_DOCX: &'static str = "/api/paper/export/docx";
pub const PAPER_EXPORT_MD: &'static str = "/api/paper/export/md";
pub const PAPER_EXPORT_HTML: &'static str = "/api/paper/export/html";
pub const PAPER_EXPORT_TXT: &'static str = "/api/paper/export/txt";
pub const RESEARCH_COLLECTIONS: &'static str = "/api/research/collections";
pub const RESEARCH_COLLECTIONS_NEW: &'static str = "/api/research/collections/new";
pub const RESEARCH_COLLECTION_BY_ID: &'static str = "/api/research/collections/:id";
pub const RESEARCH_SEARCH: &'static str = "/api/research/search";
pub const RESEARCH_RECENT: &'static str = "/api/research/recent";
pub const RESEARCH_TRENDING: &'static str = "/api/research/trending";
pub const RESEARCH_PROMPTS: &'static str = "/api/research/prompts";
pub const SOURCES_PROMPTS: &'static str = "/api/sources/prompts";
pub const SOURCES_TEMPLATES: &'static str = "/api/sources/templates";
pub const SOURCES_NEWS: &'static str = "/api/sources/news";
pub const SOURCES_MCP_SERVERS: &'static str = "/api/sources/mcp-servers";
pub const SOURCES_LLM_TOOLS: &'static str = "/api/sources/llm-tools";
pub const SOURCES_MODELS: &'static str = "/api/sources/models";
pub const SOURCES_SEARCH: &'static str = "/api/sources/search";
pub const SOURCES_REPOSITORIES: &'static str = "/api/sources/repositories";
pub const SOURCES_REPOSITORIES_CONNECT: &'static str = "/api/sources/repositories/connect";
pub const SOURCES_REPOSITORIES_DISCONNECT: &'static str =
"/api/sources/repositories/disconnect";
pub const SOURCES_APPS: &'static str = "/api/sources/apps";
pub const SOURCES_MCP: &'static str = "/api/sources/mcp";
pub const SOURCES_MCP_BY_NAME: &'static str = "/api/sources/mcp/:name";
pub const SOURCES_MCP_ENABLE: &'static str = "/api/sources/mcp/:name/enable";
pub const SOURCES_MCP_DISABLE: &'static str = "/api/sources/mcp/:name/disable";
pub const SOURCES_MCP_TOOLS: &'static str = "/api/sources/mcp/:name/tools";
pub const SOURCES_MCP_TEST: &'static str = "/api/sources/mcp/:name/test";
pub const SOURCES_MCP_SCAN: &'static str = "/api/sources/mcp/scan";
pub const SOURCES_MCP_EXAMPLES: &'static str = "/api/sources/mcp/examples";
pub const SOURCES_MENTIONS: &'static str = "/api/sources/mentions";
pub const SOURCES_TOOLS: &'static str = "/api/sources/tools";
pub const TASKS_STATS: &'static str = "/api/tasks/stats";
pub const TASKS_STATS_JSON: &'static str = "/api/tasks/stats/json";
pub const TASKS_COMPLETED: &'static str = "/api/tasks/completed";
pub const WS: &'static str = "/ws";
pub const WS_MEET: &'static str = "/ws/meet";
pub const WS_CHAT: &'static str = "/ws/chat";
pub const WS_NOTIFICATIONS: &'static str = "/ws/notifications";
pub const WS_ATTENDANT: &'static str = "/ws/attendant";
}
#[derive(Debug)]
pub struct InternalUrls;
@ -163,20 +298,20 @@ impl InternalUrls {
pub const QDRANT: &'static str = "http://localhost:6334";
pub const FORGEJO: &'static str = "http://localhost:3000";
pub const LIVEKIT: &'static str = "http://localhost:7880";
pub const BOTMODELS_VISION_QRCODE: &'static str = "/api/v1/vision/qrcode";
pub const BOTMODELS_SPEECH_TO_TEXT: &'static str = "/api/v1/speech/to-text";
pub const BOTMODELS_VISION_DESCRIBE_VIDEO: &'static str = "/api/v1/vision/describe-video";
}
impl ApiUrls {
pub fn with_params(url: &str, params: &[(&str, &str)]) -> String {
let mut result = url.to_string();
for (key, value) in params {
result = result.replace(&format!(":{}", key), value);
result = result.replace(&format!(":{key}"), value);
}
result
}
pub fn with_query(url: &str, params: &[(&str, &str)]) -> String {
if params.is_empty() {
return url.to_string();
@ -184,10 +319,10 @@ impl ApiUrls {
let query = params
.iter()
.map(|(k, v)| format!("{}={}", k, urlencoding::encode(v)))
.map(|(k, v)| format!("{k}={}", urlencoding::encode(v)))
.collect::<Vec<_>>()
.join("&");
format!("{}?{}", url, query)
format!("{url}?{query}")
}
}

View file

@ -1,3 +1,5 @@
use crate::auto_task::get_designer_error_context;
use crate::core::urls::ApiUrls;
use crate::shared::state::AppState;
use axum::{
extract::{Query, State},
@ -65,19 +67,244 @@ pub struct ValidationWarning {
pub node_id: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MagicRequest {
pub nodes: Vec<MagicNode>,
pub connections: i32,
pub filename: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EditorMagicRequest {
pub code: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EditorMagicResponse {
pub improved_code: Option<String>,
pub explanation: Option<String>,
pub suggestions: Option<Vec<MagicSuggestion>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MagicNode {
#[serde(rename = "type")]
pub node_type: String,
pub fields: serde_json::Value,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MagicSuggestion {
#[serde(rename = "type")]
pub suggestion_type: String,
pub title: String,
pub description: String,
}
pub fn configure_designer_routes() -> Router<Arc<AppState>> {
Router::new()
.route("/api/v1/designer/files", get(handle_list_files))
.route("/api/v1/designer/load", get(handle_load_file))
.route("/api/v1/designer/save", post(handle_save))
.route("/api/v1/designer/validate", post(handle_validate))
.route("/api/v1/designer/export", get(handle_export))
.route(ApiUrls::DESIGNER_FILES, get(handle_list_files))
.route(ApiUrls::DESIGNER_LOAD, get(handle_load_file))
.route(ApiUrls::DESIGNER_SAVE, post(handle_save))
.route(ApiUrls::DESIGNER_VALIDATE, post(handle_validate))
.route(ApiUrls::DESIGNER_EXPORT, get(handle_export))
.route(
"/api/designer/dialogs",
get(handle_list_dialogs).post(handle_create_dialog),
)
.route("/api/designer/dialogs/{id}", get(handle_get_dialog))
.route("/api/designer/modify", post(handle_designer_modify))
.route(ApiUrls::DESIGNER_MODIFY, post(handle_designer_modify))
.route("/api/v1/designer/magic", post(handle_magic_suggestions))
.route("/api/v1/editor/magic", post(handle_editor_magic))
}
pub async fn handle_editor_magic(
State(state): State<Arc<AppState>>,
Json(request): Json<EditorMagicRequest>,
) -> impl IntoResponse {
let code = request.code;
if code.trim().is_empty() {
return Json(EditorMagicResponse {
improved_code: None,
explanation: Some("No code provided".to_string()),
suggestions: None,
});
}
let prompt = format!(
r#"You are reviewing this HTMX application code. Analyze and improve it.
Focus on:
- Better HTMX patterns (reduce JS, use hx-* attributes properly)
- Accessibility (ARIA labels, keyboard navigation, semantic HTML)
- Performance (lazy loading, efficient selectors)
- UX (loading states, error handling, user feedback)
- Code organization (clean structure, no comments needed)
Current code:
```
{code}
```
Respond with JSON only:
{{
"improved_code": "the improved code here",
"explanation": "brief explanation of changes made"
}}
If the code is already good, respond with:
{{
"improved_code": null,
"explanation": "Code looks good, no improvements needed"
}}"#
);
#[cfg(feature = "llm")]
{
let config = serde_json::json!({
"temperature": 0.3,
"max_tokens": 4000
});
match state
.llm_provider
.generate(&prompt, &config, "gpt-4", "")
.await
{
Ok(response) => {
if let Ok(result) = serde_json::from_str::<EditorMagicResponse>(&response) {
return Json(result);
}
return Json(EditorMagicResponse {
improved_code: Some(response),
explanation: Some("AI suggestions".to_string()),
suggestions: None,
});
}
Err(e) => {
log::warn!("LLM call failed: {e}");
}
}
}
let _ = state;
let mut suggestions = Vec::new();
if !code.contains("hx-") {
suggestions.push(MagicSuggestion {
suggestion_type: "ux".to_string(),
title: "Use HTMX attributes".to_string(),
description: "Consider using hx-get, hx-post instead of JavaScript fetch calls."
.to_string(),
});
}
if !code.contains("hx-indicator") {
suggestions.push(MagicSuggestion {
suggestion_type: "ux".to_string(),
title: "Add loading indicators".to_string(),
description: "Use hx-indicator to show loading state during requests.".to_string(),
});
}
if !code.contains("aria-") && !code.contains("role=") {
suggestions.push(MagicSuggestion {
suggestion_type: "a11y".to_string(),
title: "Improve accessibility".to_string(),
description: "Add ARIA labels and roles for screen reader support.".to_string(),
});
}
if code.contains("onclick=") || code.contains("addEventListener") {
suggestions.push(MagicSuggestion {
suggestion_type: "perf".to_string(),
title: "Replace JS with HTMX".to_string(),
description: "HTMX can handle most interactions without custom JavaScript.".to_string(),
});
}
Json(EditorMagicResponse {
improved_code: None,
explanation: None,
suggestions: if suggestions.is_empty() {
None
} else {
Some(suggestions)
},
})
}
pub async fn handle_magic_suggestions(
State(state): State<Arc<AppState>>,
Json(request): Json<MagicRequest>,
) -> impl IntoResponse {
let mut suggestions = Vec::new();
let nodes = &request.nodes;
let has_hear = nodes.iter().any(|n| n.node_type == "HEAR");
let has_talk = nodes.iter().any(|n| n.node_type == "TALK");
let has_if = nodes
.iter()
.any(|n| n.node_type == "IF" || n.node_type == "SWITCH");
let talk_count = nodes.iter().filter(|n| n.node_type == "TALK").count();
if !has_hear && has_talk {
suggestions.push(MagicSuggestion {
suggestion_type: "ux".to_string(),
title: "Add User Input".to_string(),
description:
"Your dialog has no HEAR nodes. Consider adding user input to make it interactive."
.to_string(),
});
}
if talk_count > 5 {
suggestions.push(MagicSuggestion {
suggestion_type: "ux".to_string(),
title: "Break Up Long Responses".to_string(),
description:
"You have many TALK nodes. Consider grouping related messages or using a menu."
.to_string(),
});
}
if !has_if && nodes.len() > 3 {
suggestions.push(MagicSuggestion {
suggestion_type: "feature".to_string(),
title: "Add Decision Logic".to_string(),
description: "Add IF or SWITCH nodes to handle different user responses dynamically."
.to_string(),
});
}
if request.connections < (nodes.len() as i32 - 1) && nodes.len() > 1 {
suggestions.push(MagicSuggestion {
suggestion_type: "perf".to_string(),
title: "Check Connections".to_string(),
description: "Some nodes may not be connected. Ensure all nodes flow properly."
.to_string(),
});
}
if nodes.is_empty() {
suggestions.push(MagicSuggestion {
suggestion_type: "feature".to_string(),
title: "Start with TALK".to_string(),
description: "Begin your dialog with a TALK node to greet the user.".to_string(),
});
}
suggestions.push(MagicSuggestion {
suggestion_type: "a11y".to_string(),
title: "Use Clear Language".to_string(),
description: "Keep messages short and clear. Avoid jargon for better accessibility."
.to_string(),
});
let _ = state;
Json(suggestions)
}
pub async fn handle_list_files(State(state): State<Arc<AppState>>) -> impl IntoResponse {
@ -881,12 +1108,15 @@ fn build_designer_prompt(request: &DesignerModifyRequest) -> String {
})
.unwrap_or_default();
let error_context = get_designer_error_context(&request.app_name).unwrap_or_default();
format!(
r#"You are a Designer AI assistant helping modify an HTMX-based application.
App Name: {}
Current Page: {}
{}
{}
User Request: "{}"
Analyze the request and respond with JSON describing the changes needed:
@ -915,6 +1145,7 @@ Respond with valid JSON only."#,
request.app_name,
request.current_page.as_deref().unwrap_or("index.html"),
context_info,
error_context,
request.message
)
}
@ -980,7 +1211,7 @@ async fn parse_and_apply_changes(
) -> Result<(Vec<DesignerChange>, String, Vec<String>), Box<dyn std::error::Error + Send + Sync>> {
#[derive(Deserialize)]
struct LlmChangeResponse {
understanding: Option<String>,
_understanding: Option<String>,
changes: Option<Vec<LlmChange>>,
message: Option<String>,
suggestions: Option<Vec<String>>,
@ -996,7 +1227,7 @@ async fn parse_and_apply_changes(
}
let parsed: LlmChangeResponse = serde_json::from_str(llm_response).unwrap_or(LlmChangeResponse {
understanding: Some("Could not parse LLM response".to_string()),
_understanding: Some("Could not parse LLM response".to_string()),
changes: None,
message: Some("I understood your request but encountered an issue processing it. Could you try rephrasing?".to_string()),
suggestions: Some(vec!["Try being more specific".to_string()]),

View file

@ -357,10 +357,6 @@ fn decrypt_password(encrypted: &str) -> Result<String, String> {
})
}
/// Add a new email account.
///
/// # Errors
/// Returns an error if authentication fails or database operations fail.
pub async fn add_email_account(
State(state): State<Arc<AppState>>,
Json(request): Json<EmailAccountRequest>,
@ -495,10 +491,6 @@ pub async fn list_email_accounts_htmx(State(state): State<Arc<AppState>>) -> imp
axum::response::Html(html)
}
/// List all email accounts for the current user.
///
/// # Errors
/// Returns an error if authentication fails or database operations fail.
pub async fn list_email_accounts(
State(state): State<Arc<AppState>>,
) -> Result<Json<ApiResponse<Vec<EmailAccountResponse>>>, EmailError> {
@ -590,10 +582,6 @@ pub async fn list_email_accounts(
}))
}
/// Delete an email account.
///
/// # Errors
/// Returns an error if the account ID is invalid or database operations fail.
pub async fn delete_email_account(
State(state): State<Arc<AppState>>,
Path(account_id): Path<String>,
@ -625,10 +613,6 @@ pub async fn delete_email_account(
}))
}
/// List emails from a specific account and folder.
///
/// # Errors
/// Returns an error if the account ID is invalid, IMAP connection fails, or emails cannot be fetched.
pub async fn list_emails(
State(state): State<Arc<AppState>>,
Json(request): Json<ListEmailsRequest>,
@ -768,10 +752,6 @@ pub async fn list_emails(
}))
}
/// Send an email from a specific account.
///
/// # Errors
/// Returns an error if the account ID is invalid, SMTP connection fails, or email cannot be sent.
pub async fn send_email(
State(state): State<Arc<AppState>>,
Json(request): Json<SendEmailRequest>,
@ -896,10 +876,6 @@ pub async fn send_email(
}))
}
/// Save an email draft.
///
/// # Errors
/// Returns an error if the account ID is invalid, authentication fails, or database operations fail.
pub async fn save_draft(
State(state): State<Arc<AppState>>,
Json(request): Json<SaveDraftRequest>,
@ -944,10 +920,6 @@ pub async fn save_draft(
}))
}
/// List all folders for an email account.
///
/// # Errors
/// Returns an error if the account ID is invalid, IMAP connection fails, or folders cannot be listed.
pub async fn list_folders(
State(state): State<Arc<AppState>>,
Path(account_id): Path<String>,
@ -1011,10 +983,6 @@ pub async fn list_folders(
}))
}
/// Get the latest email from a specific sender.
///
/// # Errors
/// Returns an error if the operation fails.
pub fn get_latest_email_from(
State(_state): State<Arc<AppState>>,
Json(_request): Json<serde_json::Value>,

View file

@ -23,10 +23,9 @@ pub fn configure() -> Router<Arc<AppState>> {
.route(ApiUrls::VOICE_STOP, post(voice_stop))
.route(ApiUrls::MEET_CREATE, post(create_meeting))
.route(ApiUrls::MEET_ROOMS, get(list_rooms))
.route("/api/meet/rooms", get(list_rooms_ui))
.route("/api/meet/recent", get(recent_meetings))
.route("/api/meet/participants", get(all_participants))
.route("/api/meet/scheduled", get(scheduled_meetings))
.route(ApiUrls::MEET_PARTICIPANTS, get(all_participants))
.route(ApiUrls::MEET_RECENT, get(recent_meetings))
.route(ApiUrls::MEET_SCHEDULED, get(scheduled_meetings))
.route(
&ApiUrls::MEET_ROOM_BY_ID.replace(":id", "{room_id}"),
get(get_room),
@ -182,8 +181,7 @@ pub async fn voice_start(
{
Ok(token) => {
info!(
"Voice session started successfully for session {}",
session_id
"Voice session started successfully for session {session_id}"
);
(
StatusCode::OK,
@ -192,8 +190,7 @@ pub async fn voice_start(
}
Err(e) => {
error!(
"Failed to start voice session for session {}: {}",
session_id, e
"Failed to start voice session for session {session_id}: {e}"
);
(
StatusCode::INTERNAL_SERVER_ERROR,
@ -215,8 +212,7 @@ pub async fn voice_stop(
match data.voice_adapter.stop_voice_session(session_id).await {
Ok(()) => {
info!(
"Voice session stopped successfully for session {}",
session_id
"Voice session stopped successfully for session {session_id}"
);
(
StatusCode::OK,
@ -225,8 +221,7 @@ pub async fn voice_stop(
}
Err(e) => {
error!(
"Failed to stop voice session for session {}: {}",
session_id, e
"Failed to stop voice session for session {session_id}: {e}"
);
(
StatusCode::INTERNAL_SERVER_ERROR,
@ -252,7 +247,7 @@ pub async fn create_meeting(
(StatusCode::OK, Json(serde_json::json!(room)))
}
Err(e) => {
error!("Failed to create meeting room: {}", e);
error!("Failed to create meeting room: {e}");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({"error": e.to_string()})),
@ -301,11 +296,11 @@ pub async fn join_room(
.await
{
Ok(participant) => {
info!("Participant {} joined room {}", participant.id, room_id);
info!("Participant {} joined room {room_id}", participant.id);
(StatusCode::OK, Json(serde_json::json!(participant)))
}
Err(e) => {
error!("Failed to join room {}: {}", room_id, e);
error!("Failed to join room {room_id}: {e}");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({"error": e.to_string()})),
@ -322,15 +317,15 @@ pub async fn start_transcription(
let meeting_service = MeetingService::new(state.clone(), transcription_service);
match meeting_service.start_transcription(&room_id).await {
Ok(_) => {
info!("Started transcription for room {}", room_id);
Ok(()) => {
info!("Started transcription for room {room_id}");
(
StatusCode::OK,
Json(serde_json::json!({"status": "transcription_started"})),
)
}
Err(e) => {
error!("Failed to start transcription for room {}: {}", room_id, e);
error!("Failed to start transcription for room {room_id}: {e}");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({"error": e.to_string()})),
@ -387,10 +382,10 @@ async fn handle_meeting_socket(_socket: axum::extract::ws::WebSocket, _state: Ar
info!("Meeting WebSocket connection established");
}
pub async fn list_rooms_ui(State(_state): State<Arc<AppState>>) -> Json<serde_json::Value> {
pub async fn all_participants(State(_state): State<Arc<AppState>>) -> Json<serde_json::Value> {
Json(serde_json::json!({
"rooms": [],
"message": "No active meeting rooms"
"participants": [],
"message": "No participants"
}))
}
@ -401,13 +396,6 @@ pub async fn recent_meetings(State(_state): State<Arc<AppState>>) -> Json<serde_
}))
}
pub async fn all_participants(State(_state): State<Arc<AppState>>) -> Json<serde_json::Value> {
Json(serde_json::json!({
"participants": [],
"message": "No participants"
}))
}
pub async fn scheduled_meetings(State(_state): State<Arc<AppState>>) -> Json<serde_json::Value> {
Json(serde_json::json!({
"meetings": [],

View file

@ -8,11 +8,6 @@ pub struct SystemMetrics {
pub cpu_usage: f32,
}
/// Gets current system metrics including CPU and GPU usage.
///
/// # Errors
///
/// Returns an error if GPU utilization query fails when an NVIDIA GPU is present.
pub fn get_system_metrics() -> Result<SystemMetrics> {
let mut sys = System::new();
sys.refresh_cpu_usage();
@ -28,7 +23,6 @@ pub fn get_system_metrics() -> Result<SystemMetrics> {
})
}
/// Checks if an NVIDIA GPU is present in the system.
#[must_use]
pub fn has_nvidia_gpu() -> bool {
match std::process::Command::new("nvidia-smi")
@ -41,14 +35,6 @@ pub fn has_nvidia_gpu() -> bool {
}
}
/// Gets GPU utilization metrics from nvidia-smi.
///
/// # Errors
///
/// Returns an error if:
/// - The nvidia-smi command fails to execute
/// - The command returns a non-success status
/// - The output cannot be parsed as UTF-8
pub fn get_gpu_utilization() -> Result<HashMap<String, f32>> {
let output = std::process::Command::new("nvidia-smi")
.arg("--query-gpu=utilization.gpu,utilization.memory")

View file

@ -1,3 +1,5 @@
pub mod web_search;
use crate::shared::state::AppState;
use axum::{
extract::{Path, State},
@ -57,6 +59,7 @@ pub struct CollectionRow {
pub fn configure_research_routes() -> Router<Arc<AppState>> {
Router::new()
.merge(web_search::configure_web_search_routes())
.route("/api/research/collections", get(handle_list_collections))
.route(
"/api/research/collections/new",

638
src/research/web_search.rs Normal file
View file

@ -0,0 +1,638 @@
use crate::shared::state::AppState;
use axum::{
extract::{Query, State},
response::{Html, IntoResponse},
routing::{get, post},
Json, Router,
};
use chrono::{DateTime, Utc};
use log::{debug, error, info};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fmt::Write;
use std::sync::Arc;
use std::time::Duration;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WebSearchRequest {
pub query: String,
pub max_results: Option<usize>,
pub region: Option<String>,
pub safe_search: Option<bool>,
pub time_range: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WebSearchResult {
pub title: String,
pub url: String,
pub snippet: String,
pub source: String,
pub favicon: Option<String>,
pub published_date: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WebSearchResponse {
pub results: Vec<WebSearchResult>,
pub query: String,
pub total_results: usize,
pub search_time_ms: u64,
pub source: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SummarizeRequest {
pub query: String,
pub results: Vec<WebSearchResult>,
pub max_length: Option<usize>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SummarizeResponse {
pub summary: String,
pub citations: Vec<Citation>,
pub confidence: f32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Citation {
pub index: usize,
pub title: String,
pub url: String,
pub relevance: f32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DeepResearchRequest {
pub query: String,
pub depth: Option<usize>,
pub max_sources: Option<usize>,
pub follow_links: Option<bool>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DeepResearchResponse {
pub answer: String,
pub sources: Vec<WebSearchResult>,
pub citations: Vec<Citation>,
pub related_queries: Vec<String>,
pub confidence: f32,
pub research_time_ms: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchHistoryEntry {
pub id: String,
pub query: String,
pub results_count: usize,
pub timestamp: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchHistoryQuery {
pub page: Option<i32>,
pub per_page: Option<i32>,
}
pub fn configure_web_search_routes() -> Router<Arc<AppState>> {
Router::new()
.route("/api/research/web/search", post(handle_web_search))
.route("/api/research/web/summarize", post(handle_summarize))
.route("/api/research/web/deep", post(handle_deep_research))
.route("/api/research/web/history", get(handle_search_history))
.route("/api/research/web/instant", get(handle_instant_answer))
}
pub async fn handle_web_search(
State(_state): State<Arc<AppState>>,
Json(payload): Json<WebSearchRequest>,
) -> impl IntoResponse {
let start_time = std::time::Instant::now();
if payload.query.trim().is_empty() {
return Json(WebSearchResponse {
results: Vec::new(),
query: payload.query,
total_results: 0,
search_time_ms: 0,
source: "none".to_string(),
});
}
let max_results = payload.max_results.unwrap_or(10).min(25);
let region = payload.region.as_deref().unwrap_or("wt-wt");
let results = match search_duckduckgo(&payload.query, max_results, region).await {
Ok(r) => r,
Err(e) => {
error!("DuckDuckGo search failed: {}", e);
Vec::new()
}
};
let search_time_ms = start_time.elapsed().as_millis() as u64;
Json(WebSearchResponse {
total_results: results.len(),
results,
query: payload.query,
search_time_ms,
source: "duckduckgo".to_string(),
})
}
pub async fn handle_summarize(
State(_state): State<Arc<AppState>>,
Json(payload): Json<SummarizeRequest>,
) -> impl IntoResponse {
if payload.results.is_empty() {
return Json(SummarizeResponse {
summary: "No results to summarize.".to_string(),
citations: Vec::new(),
confidence: 0.0,
});
}
let mut combined_text = String::new();
let mut citations = Vec::new();
for (idx, result) in payload.results.iter().enumerate() {
let _ = writeln!(combined_text, "[{}] {}", idx + 1, result.snippet);
citations.push(Citation {
index: idx + 1,
title: result.title.clone(),
url: result.url.clone(),
relevance: 1.0 - (idx as f32 * 0.1).min(0.5),
});
}
let max_len = payload.max_length.unwrap_or(500);
let summary = if combined_text.len() > max_len {
let mut truncated = combined_text.chars().take(max_len).collect::<String>();
if let Some(last_period) = truncated.rfind(". ") {
truncated.truncate(last_period + 1);
}
truncated
} else {
combined_text
};
let confidence = (payload.results.len() as f32 / 10.0).min(1.0);
Json(SummarizeResponse {
summary,
citations,
confidence,
})
}
pub async fn handle_deep_research(
State(_state): State<Arc<AppState>>,
Json(payload): Json<DeepResearchRequest>,
) -> impl IntoResponse {
let start_time = std::time::Instant::now();
if payload.query.trim().is_empty() {
return Json(DeepResearchResponse {
answer: "Please provide a research query.".to_string(),
sources: Vec::new(),
citations: Vec::new(),
related_queries: Vec::new(),
confidence: 0.0,
research_time_ms: 0,
});
}
let depth = payload.depth.unwrap_or(2).min(3);
let max_sources = payload.max_sources.unwrap_or(10).min(20);
let mut all_results: Vec<WebSearchResult> = Vec::new();
let mut seen_urls: std::collections::HashSet<String> = std::collections::HashSet::new();
let initial_results = search_duckduckgo(&payload.query, max_sources, "wt-wt")
.await
.unwrap_or_default();
for result in initial_results {
if !seen_urls.contains(&result.url) {
seen_urls.insert(result.url.clone());
all_results.push(result);
}
}
if depth > 1 {
let related_queries = generate_related_queries(&payload.query);
for rq in related_queries.iter().take(depth - 1) {
if let Ok(more_results) = search_duckduckgo(rq, 5, "wt-wt").await {
for result in more_results {
if !seen_urls.contains(&result.url) && all_results.len() < max_sources {
seen_urls.insert(result.url.clone());
all_results.push(result);
}
}
}
}
}
let mut citations = Vec::new();
let mut answer_parts: Vec<String> = Vec::new();
for (idx, result) in all_results.iter().enumerate() {
if idx < 5 {
answer_parts.push(format!("{}", result.snippet));
}
citations.push(Citation {
index: idx + 1,
title: result.title.clone(),
url: result.url.clone(),
relevance: 1.0 - (idx as f32 * 0.05).min(0.5),
});
}
let answer = if answer_parts.is_empty() {
format!("No results found for: {}", payload.query)
} else {
format!(
"Based on {} sources about \"{}\":\n\n{}",
all_results.len(),
payload.query,
answer_parts.join("\n\n")
)
};
let related = generate_related_queries(&payload.query);
let research_time_ms = start_time.elapsed().as_millis() as u64;
let confidence = (citations.len() as f32 / 10.0).min(1.0);
Json(DeepResearchResponse {
answer,
sources: all_results,
citations,
related_queries: related,
confidence,
research_time_ms,
})
}
pub async fn handle_search_history(
State(_state): State<Arc<AppState>>,
Query(params): Query<SearchHistoryQuery>,
) -> impl IntoResponse {
let _page = params.page.unwrap_or(1).max(1);
let _per_page = params.per_page.unwrap_or(20).min(100);
let history: Vec<SearchHistoryEntry> = vec![
SearchHistoryEntry {
id: "1".to_string(),
query: "Example search 1".to_string(),
results_count: 10,
timestamp: Utc::now(),
},
SearchHistoryEntry {
id: "2".to_string(),
query: "Example search 2".to_string(),
results_count: 8,
timestamp: Utc::now(),
},
];
let mut html = String::new();
html.push_str("<div class=\"search-history\">");
if history.is_empty() {
html.push_str("<div class=\"empty-state\">");
html.push_str("<p>No search history yet</p>");
html.push_str("</div>");
} else {
for entry in &history {
html.push_str("<div class=\"history-item\" data-id=\"");
html.push_str(&html_escape(&entry.id));
html.push_str("\">");
html.push_str("<span class=\"history-query\">");
html.push_str(&html_escape(&entry.query));
html.push_str("</span>");
html.push_str("<span class=\"history-count\">");
html.push_str(&entry.results_count.to_string());
html.push_str(" results</span>");
html.push_str("</div>");
}
}
html.push_str("</div>");
Html(html)
}
pub async fn handle_instant_answer(
State(_state): State<Arc<AppState>>,
Query(params): Query<HashMap<String, String>>,
) -> impl IntoResponse {
let query = params.get("q").cloned().unwrap_or_default();
if query.is_empty() {
return Json(serde_json::json!({
"answer": null,
"type": "none"
}));
}
if let Some(answer) = get_instant_answer(&query).await {
Json(serde_json::json!({
"answer": answer.0,
"type": answer.1,
"source": "duckduckgo"
}))
} else {
Json(serde_json::json!({
"answer": null,
"type": "none"
}))
}
}
async fn search_duckduckgo(
query: &str,
max_results: usize,
region: &str,
) -> Result<Vec<WebSearchResult>, Box<dyn std::error::Error + Send + Sync>> {
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(10))
.user_agent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36")
.build()?;
let encoded_query = urlencoding::encode(query);
let url = format!(
"https://html.duckduckgo.com/html/?q={}&kl={}",
encoded_query, region
);
debug!("Searching DuckDuckGo: {}", query);
let response = client.get(&url).send().await?;
let html = response.text().await?;
let results = parse_duckduckgo_html(&html, max_results);
info!(
"DuckDuckGo search for '{}' returned {} results",
query,
results.len()
);
Ok(results)
}
fn parse_duckduckgo_html(html: &str, max_results: usize) -> Vec<WebSearchResult> {
let mut results = Vec::new();
let mut current_title = String::new();
let mut current_url = String::new();
let mut current_snippet = String::new();
for line in html.lines() {
let line = line.trim();
if line.contains("class=\"result__a\"") {
if let Some(href_start) = line.find("href=\"") {
let start = href_start + 6;
if let Some(href_end) = line[start..].find('"') {
let raw_url = &line[start..start + href_end];
current_url = decode_ddg_url(raw_url);
}
}
if let Some(title_start) = line.find('>') {
let after_tag = &line[title_start + 1..];
if let Some(title_end) = after_tag.find('<') {
current_title = html_decode(&after_tag[..title_end]);
}
}
}
if line.contains("class=\"result__snippet\"") {
if let Some(snippet_start) = line.find('>') {
let after_tag = &line[snippet_start + 1..];
let snippet_text = strip_html_inline(after_tag);
current_snippet = html_decode(&snippet_text);
}
if !current_title.is_empty() && !current_url.is_empty() {
let domain = extract_domain(&current_url);
results.push(WebSearchResult {
title: current_title.clone(),
url: current_url.clone(),
snippet: current_snippet.clone(),
source: domain.clone(),
favicon: Some(format!(
"https://www.google.com/s2/favicons?domain={}",
domain
)),
published_date: None,
});
current_title.clear();
current_url.clear();
current_snippet.clear();
if results.len() >= max_results {
break;
}
}
}
}
if results.is_empty() {
results = parse_duckduckgo_fallback(html, max_results);
}
results
}
fn parse_duckduckgo_fallback(html: &str, max_results: usize) -> Vec<WebSearchResult> {
let mut results = Vec::new();
let parts: Vec<&str> = html.split("class=\"result ").collect();
for part in parts.iter().skip(1).take(max_results) {
let mut title = String::new();
let mut url = String::new();
let mut snippet = String::new();
if let Some(a_start) = part.find("class=\"result__a\"") {
let section = &part[a_start..];
if let Some(href_pos) = section.find("href=\"") {
let start = href_pos + 6;
if let Some(end) = section[start..].find('"') {
url = decode_ddg_url(&section[start..start + end]);
}
}
if let Some(text_start) = section.find('>') {
let after = &section[text_start + 1..];
if let Some(text_end) = after.find('<') {
title = html_decode(&after[..text_end]);
}
}
}
if let Some(snippet_start) = part.find("class=\"result__snippet\"") {
let section = &part[snippet_start..];
if let Some(text_start) = section.find('>') {
let after = &section[text_start + 1..];
let text = strip_html_inline(after);
snippet = html_decode(&text);
if let Some(end) = snippet.find("</") {
snippet.truncate(end);
}
}
}
if !title.is_empty() && !url.is_empty() {
let domain = extract_domain(&url);
results.push(WebSearchResult {
title,
url: url.clone(),
snippet,
source: domain.clone(),
favicon: Some(format!(
"https://www.google.com/s2/favicons?domain={}",
domain
)),
published_date: None,
});
}
}
results
}
fn decode_ddg_url(raw_url: &str) -> String {
if raw_url.starts_with("//duckduckgo.com/l/?uddg=") {
let encoded_part = raw_url.trim_start_matches("//duckduckgo.com/l/?uddg=");
if let Some(amp_pos) = encoded_part.find('&') {
let url_part = &encoded_part[..amp_pos];
return urlencoding::decode(url_part)
.map(|s| s.to_string())
.unwrap_or_else(|_| raw_url.to_string());
}
return urlencoding::decode(encoded_part)
.map(|s| s.to_string())
.unwrap_or_else(|_| raw_url.to_string());
}
if raw_url.starts_with("http") {
return raw_url.to_string();
}
format!("https:{}", raw_url)
}
fn extract_domain(url: &str) -> String {
let without_protocol = url
.trim_start_matches("https://")
.trim_start_matches("http://");
if let Some(slash_pos) = without_protocol.find('/') {
without_protocol[..slash_pos].to_string()
} else {
without_protocol.to_string()
}
}
fn strip_html_inline(s: &str) -> String {
let mut result = String::new();
let mut in_tag = false;
for c in s.chars() {
match c {
'<' => in_tag = true,
'>' => in_tag = false,
_ if !in_tag => result.push(c),
_ => {}
}
}
result.trim().to_string()
}
fn html_decode(s: &str) -> String {
s.replace("&amp;", "&")
.replace("&lt;", "<")
.replace("&gt;", ">")
.replace("&quot;", "\"")
.replace("&#39;", "'")
.replace("&nbsp;", " ")
.replace("&#x27;", "'")
.replace("&#x2F;", "/")
}
fn html_escape(s: &str) -> String {
s.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('"', "&quot;")
.replace('\'', "&#39;")
}
fn generate_related_queries(query: &str) -> Vec<String> {
let base_words: Vec<&str> = query.split_whitespace().collect();
let mut related = Vec::new();
related.push(format!("what is {}", query));
related.push(format!("{} explained", query));
related.push(format!("{} examples", query));
related.push(format!("how does {} work", query));
related.push(format!("{} vs alternatives", query));
if base_words.len() > 2 {
let shortened: String = base_words[..2].join(" ");
related.push(shortened);
}
related.into_iter().take(5).collect()
}
async fn get_instant_answer(query: &str) -> Option<(String, String)> {
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(5))
.build()
.ok()?;
let encoded = urlencoding::encode(query);
let url = format!(
"https://api.duckduckgo.com/?q={}&format=json&no_html=1&skip_disambig=1",
encoded
);
let response = client.get(&url).send().await.ok()?;
let json: serde_json::Value = response.json().await.ok()?;
if let Some(abstract_text) = json.get("AbstractText").and_then(|v| v.as_str()) {
if !abstract_text.is_empty() {
let answer_type = json
.get("Type")
.and_then(|v| v.as_str())
.unwrap_or("A")
.to_string();
return Some((abstract_text.to_string(), answer_type));
}
}
if let Some(answer) = json.get("Answer").and_then(|v| v.as_str()) {
if !answer.is_empty() {
return Some((answer.to_string(), "answer".to_string()));
}
}
if let Some(definition) = json.get("Definition").and_then(|v| v.as_str()) {
if !definition.is_empty() {
return Some((definition.to_string(), "definition".to_string()));
}
}
None
}

File diff suppressed because it is too large Load diff

View file

@ -1,3 +1,4 @@
pub mod knowledge_base;
pub mod mcp;
use crate::basic::keywords::mcp_directory::{generate_example_configs, McpCsvLoader, McpCsvRow};
@ -148,6 +149,7 @@ pub struct AppInfo {
pub fn configure_sources_routes() -> Router<Arc<AppState>> {
Router::new()
.merge(knowledge_base::configure_knowledge_base_routes())
.route("/api/sources/prompts", get(handle_prompts))
.route("/api/sources/templates", get(handle_templates))
.route("/api/sources/news", get(handle_news))