feat: add cron dependency and theme broadcast functionality

Add the `cron` crate (v0.15.0) to Cargo.toml and Cargo.lock to enable scheduling capabilities.
Introduce a new `broadcast_theme_change` helper in `src/automation/mod.rs` that parses CSV theme data and pushes JSON theme update events to all active response channels.
Clean up unused imports in the automation module and add `ConfigManager` import for future configuration handling.
Update `add-req.sh` to adjust the list of processed directories (comment out `auth`, enable `basic`, `config`, `context`, and `drive_monitor`).
These changes lay groundwork for scheduled tasks and dynamic theme updates across the application.
This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-11-05 21:10:03 -03:00
parent 04c2d0d0ce
commit b614d2650a
9 changed files with 423 additions and 690 deletions

21
Cargo.lock generated
View file

@ -1106,6 +1106,7 @@ dependencies = [
"base64 0.22.1",
"bytes",
"chrono",
"cron",
"csv",
"diesel",
"dotenvy",
@ -1571,6 +1572,17 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "cron"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5877d3fbf742507b66bc2a1945106bd30dd8504019d596901ddd012a4dd01740"
dependencies = [
"chrono",
"once_cell",
"winnow",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.21"
@ -6167,6 +6179,15 @@ version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650"
[[package]]
name = "winnow"
version = "0.6.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e90edd2ac1aa278a5c4599b1d89cf03074b610800f866d4026dc199d7929a28"
dependencies = [
"memchr",
]
[[package]]
name = "wit-bindgen"
version = "0.46.0"

View file

@ -57,6 +57,7 @@ aws-sdk-s3 = { version = "1.109.0", features = ["behavior-version-latest"] }
base64 = "0.22"
bytes = "1.8"
chrono = { version = "0.4", features = ["serde"] }
cron = "0.15.0"
csv = "1.3"
diesel = { version = "2.1", features = ["postgres", "uuid", "chrono", "serde_json"] }
dotenvy = "0.15"

View file

@ -21,15 +21,15 @@ for file in "${prompts[@]}"; do
done
dirs=(
"auth"
# "auth"
# "automation"
# "basic"
"basic"
# "bootstrap"
"bot"
# "channels"
# "config"
# "context"
# "drive_monitor"
"config"
"context"
"drive_monitor"
# "email"
# "file"
# "kb"

View file

@ -1,449 +1,163 @@
use crate::shared::models::schema::bots::dsl::*;
use diesel::prelude::*;
use crate::basic::ScriptService;
use crate::config::ConfigManager;
use crate::shared::models::{Automation, TriggerKind};
use crate::shared::state::AppState;
use chrono::{DateTime, Datelike, Timelike, Utc};
use log::{debug, error, info, trace, warn};
use std::path::Path;
use chrono::Utc;
use cron::Schedule;
use diesel::prelude::*;
use log::{error, info};
use std::str::FromStr;
use std::sync::Arc;
use tokio::time::Duration;
use uuid::Uuid;
use tokio::time::{interval, Duration};
pub struct AutomationService {
state: Arc<AppState>
state: Arc<AppState>,
}
impl AutomationService {
pub fn new(state: Arc<AppState>) -> Self {
Self {
state
}
Self { state }
}
pub fn spawn(self) -> tokio::task::JoinHandle<()> {
trace!("Spawning AutomationService background task");
let service = Arc::new(self);
tokio::task::spawn_local({
let service = service.clone();
async move {
pub async fn spawn(self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!("Automation service started");
let mut ticker = interval(Duration::from_secs(60));
let mut interval = tokio::time::interval(Duration::from_secs(15));
let mut last_check = Utc::now();
loop {
interval.tick().await;
trace!("Automation cycle tick started; last_check={}", last_check);
if let Err(e) = service.run_cycle(&mut last_check).await {
error!("Automation cycle error: {}", e);
}
trace!("Automation cycle tick completed");
ticker.tick().await;
if let Err(e) = self.check_scheduled_tasks().await {
error!("Error checking scheduled tasks: {}", e);
}
}
})
}
async fn run_cycle(
&self,
last_check: &mut DateTime<Utc>,
) -> Result<(), Box<dyn std::error::Error>> {
trace!("Running automation cycle; last_check={}", last_check);
let automations = self.load_active_automations().await?;
trace!("Loaded {} active automations", automations.len());
self.check_table_changes(&automations, *last_check).await;
self.process_schedules(&automations).await;
*last_check = Utc::now();
trace!("Automation cycle finished; new last_check={}", last_check);
Ok(())
}
async fn check_scheduled_tasks(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
use crate::shared::models::system_automations::dsl::{system_automations, is_active, kind, id, last_triggered as lt_column};
async fn load_active_automations(&self) -> Result<Vec<Automation>, diesel::result::Error> {
trace!("Loading active automations from database");
use crate::shared::models::system_automations::dsl::*;
let result = {
let mut conn = self.state.conn.lock().unwrap();
system_automations
let mut conn = self.state.conn.lock().map_err(|e| format!("Failed to acquire lock: {}", e))?;
let automations: Vec<Automation> = system_automations
.filter(is_active.eq(true))
.load::<Automation>(&mut *conn)
}; // conn is dropped here
trace!("Database query for active automations completed");
result.map_err(Into::into)
}
.filter(kind.eq(TriggerKind::Scheduled as i32))
.load::<Automation>(&mut *conn)?;
async fn check_table_changes(&self, automations: &[Automation], since: DateTime<Utc>) {
trace!("Checking table changes since={}", since);
for automation in automations {
trace!(
"Checking automation id={} kind={} target={:?}",
automation.id,
automation.kind,
automation.target
);
let trigger_kind = match TriggerKind::from_i32(automation.kind) {
Some(k) => k,
None => {
trace!("Skipping automation {}: invalid TriggerKind", automation.id);
continue;
}
};
if !matches!(
trigger_kind,
TriggerKind::TableUpdate | TriggerKind::TableInsert | TriggerKind::TableDelete
) {
trace!(
"Skipping automation {}: trigger_kind {:?} not table-related",
automation.id,
trigger_kind
);
continue;
}
let table = match &automation.target {
Some(t) => t,
None => {
trace!("Skipping automation {}: no table target", automation.id);
continue;
}
};
let column = match trigger_kind {
TriggerKind::TableInsert => "created_at",
_ => "updated_at",
};
trace!(
"Building query for table='{}' column='{}' trigger_kind={:?}",
table,
column,
trigger_kind
);
let query = format!(
"SELECT COUNT(*) as count FROM {} WHERE {} > $1",
table, column
);
#[derive(diesel::QueryableByName)]
struct CountResult {
#[diesel(sql_type = diesel::sql_types::BigInt)]
count: i64,
}
let count_result = {
let mut conn_guard = self.state.conn.lock().unwrap();
let conn = &mut *conn_guard;
diesel::sql_query(&query)
.bind::<diesel::sql_types::Timestamp, _>(since.naive_utc())
.get_result::<CountResult>(conn)
}; // conn_guard is dropped here
match count_result {
Ok(result) if result.count > 0 => {
trace!(
"Detected {} change(s) in table='{}'; triggering automation {}",
result.count,
table,
automation.id
);
if let Err(e) = self.execute_action(automation).await {
error!("Error executing automation {}: {}", automation.id, e);
}
self.update_last_triggered(automation.id).await;
}
Ok(result) => {
trace!(
"No changes detected for automation {} (count={})",
automation.id,
result.count
);
}
Err(e) => {
error!("Error checking changes for table '{}': {}", table, e);
}
}
}
}
async fn process_schedules(&self, automations: &[Automation]) {
if let Some(schedule_str) = &automation.schedule {
if let Ok(parsed_schedule) = Schedule::from_str(schedule_str) {
let now = Utc::now();
trace!(
"Processing scheduled automations at UTC={}",
now.format("%Y-%m-%d %H:%M:%S")
);
for automation in automations {
if let Some(TriggerKind::Scheduled) = TriggerKind::from_i32(automation.kind) {
trace!(
"Evaluating schedule pattern={:?} for automation {}",
automation.schedule,
automation.id
);
if let Some(pattern) = &automation.schedule {
if Self::should_run_cron(pattern, now.timestamp()) {
debug!(
"Pattern matched; executing automation {} param='{}'",
automation.id,
automation.param
);
if let Err(e) = self.execute_action(automation).await {
error!("Error executing automation {}: {}", automation.id, e);
let next_run = parsed_schedule.upcoming(Utc).next();
if let Some(next_time) = next_run {
let time_until_next = next_time - now;
if time_until_next.num_minutes() < 1 {
if let Some(last_triggered) = automation.last_triggered {
if (now - last_triggered).num_minutes() < 1 {
continue;
}
self.update_last_triggered(automation.id).await;
} else {
trace!("Pattern did not match for automation {}", automation.id);
}
self.execute_automation(&automation).await?;
diesel::update(system_automations.filter(id.eq(automation.id)))
.set(lt_column.eq(Some(now)))
.execute(&mut *conn)?;
}
}
}
}
}
async fn update_last_triggered(&self, automation_id: Uuid) {
trace!(
"Updating last_triggered for automation_id={}",
automation_id
);
use crate::shared::models::system_automations::dsl::*;
let now = Utc::now();
let result = {
let mut conn = self.state.conn.lock().unwrap();
diesel::update(system_automations.filter(id.eq(automation_id)))
.set(last_triggered.eq(now.naive_utc()))
.execute(&mut *conn)
}; // conn is dropped here
if let Err(e) = result {
error!(
"Failed to update last_triggered for automation {}: {}",
automation_id, e
);
} else {
trace!("Successfully updated last_triggered for {}", automation_id);
}
}
fn should_run_cron(pattern: &str, timestamp: i64) -> bool {
trace!(
"Evaluating cron pattern='{}' at timestamp={}",
pattern,
timestamp
);
let parts: Vec<&str> = pattern.split_whitespace().collect();
if parts.len() != 5 {
trace!("Invalid cron pattern '{}'", pattern);
return false;
}
let dt = match DateTime::<Utc>::from_timestamp(timestamp, 0) {
Some(dt) => dt,
None => {
trace!("Invalid timestamp={}", timestamp);
return false;
}
};
let minute = dt.minute() as i32;
let hour = dt.hour() as i32;
let day = dt.day() as i32;
let month = dt.month() as i32;
let weekday = dt.weekday().num_days_from_monday() as i32;
// More strict matching with additional logging
let minute_match = Self::cron_part_matches(parts[0], minute);
let hour_match = Self::cron_part_matches(parts[1], hour);
let day_match = Self::cron_part_matches(parts[2], day);
let month_match = Self::cron_part_matches(parts[3], month);
let weekday_match = Self::cron_part_matches(parts[4], weekday);
let match_result = minute_match && hour_match && day_match && month_match && weekday_match;
trace!(
"Cron pattern='{}' result={} at {} (minute={}, hour={}, day={}, month={}, weekday={})",
pattern,
match_result,
dt,
minute_match,
hour_match,
day_match,
month_match,
weekday_match
);
match_result
}
fn cron_part_matches(part: &str, value: i32) -> bool {
trace!("Checking cron part '{}' against value={}", part, value);
if part == "*" {
return true;
}
if part.contains('/') {
let parts: Vec<&str> = part.split('/').collect();
if parts.len() != 2 {
return false;
}
let step: i32 = parts[1].parse().unwrap_or(1);
if parts[0] == "*" {
return value % step == 0;
}
}
part.parse::<i32>().map_or(false, |num| num == value)
}
async fn execute_action(&self, automation: &Automation) -> Result<(), Box<dyn std::error::Error>> {
let bot_id = automation.bot_id;
let param = &automation.param;
trace!("Starting execute_action for bot_id={} param='{}'", bot_id, param);
let redis_key = format!("job:running:{}:{}", bot_id, param);
trace!("Redis key for job tracking: {}", redis_key);
if let Some(redis_client) = &self.state.cache {
match redis_client.get_multiplexed_async_connection().await {
Ok(mut conn) => {
trace!("Connected to Redis; checking if job '{}' is running", param);
// Use SET with NX (only set if not exists) and EX (expire) for atomic operation
let set_result: Result<String, redis::RedisError> = redis::cmd("SET")
.arg(&redis_key)
.arg("1")
.arg("NX")
.arg("EX")
.arg(300)
.query_async(&mut conn)
.await;
match set_result {
Ok(res) if res == "OK" => {
trace!("Acquired lock for job '{}'", param);
}
Ok(_) => {
warn!(
"Job '{}' is already running for bot '{}'; skipping execution",
param, bot_id
);
return Ok(());
}
Err(e) => {
warn!("Redis error checking job status for '{}': {}", param, e);
return Ok(()); // Skip execution if we can't verify lock status
}
}
}
Err(e) => {
warn!("Failed to connect to Redis for job tracking: {}", e);
return Ok(()); // Skip execution if we can't connect to Redis
}
}
} else {
warn!("Redis client not available for job tracking");
return Ok(()); // Skip execution if Redis isn't configured
}
let bot_name: String = {
let mut db_conn = self.state.conn.lock().unwrap();
bots.filter(id.eq(&bot_id))
.select(name)
.first(&mut *db_conn)
.map_err(|e| {
error!("Failed to query bot name for {}: {}", bot_id, e);
e
})?
};
let script_name = param.strip_suffix(".bas").unwrap_or(param);
let path_str = format!("./work/{}.gbai/{}.gbdialog/{}.ast",
bot_name,
bot_name,
script_name
);
let full_path = Path::new(&path_str);
trace!("Resolved full path: {}", full_path.display());
let script_content = match tokio::fs::read_to_string(&full_path).await {
Ok(content) => {
trace!("Script '{}' read successfully", param);
content
}
Err(e) => {
error!(
"Failed to read script '{}' at {}: {}",
param,
full_path.display(),
e
);
self.cleanup_job_flag(&bot_id, param).await;
return Ok(());
}
};
let user_session = crate::shared::models::UserSession {
id: Uuid::new_v4(),
user_id: Uuid::new_v4(),
bot_id,
title: "Automation".to_string(),
current_tool: None,
context_data: serde_json::Value::Null,
created_at: Utc::now(),
updated_at: Utc::now(),
};
trace!(
"Created temporary UserSession id={} for bot_id={}",
user_session.id,
bot_id
);
let result = {
let script_service = ScriptService::new(Arc::clone(&self.state), user_session);
let ast = match script_service.compile(&script_content) {
Ok(ast) => {
trace!("Compilation successful for script '{}'", param);
ast
}
Err(e) => {
error!("Error compiling script '{}': {}", param, e);
self.cleanup_job_flag(&bot_id, param).await;
return Ok(());
}
};
trace!("Running compiled script '{}'", param);
script_service.run(&ast)
}; // script_service and ast are dropped here
match result {
Ok(_) => {
info!("Script '{}' executed successfully", param);
}
Err(e) => {
error!("Error executing script '{}': {}", param, e);
}
}
trace!("Cleaning up Redis flag for job '{}'", param);
self.cleanup_job_flag(&bot_id, param).await;
trace!("Finished execute_action for '{}'", param);
Ok(())
}
async fn cleanup_job_flag(&self, bot_id: &Uuid, param: &str) {
trace!(
"Cleaning up Redis flag for bot_id={} param='{}'",
bot_id,
param
);
let redis_key = format!("job:running:{}:{}", bot_id, param);
async fn execute_automation(&self, automation: &Automation) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!("Executing automation: {}", automation.param);
if let Some(redis_client) = &self.state.cache {
match redis_client.get_multiplexed_async_connection().await {
Ok(mut conn) => {
let _: Result<(), redis::RedisError> = redis::cmd("DEL")
.arg(&redis_key)
.query_async(&mut conn)
.await;
trace!("Removed Redis key '{}'", redis_key);
let config_manager = ConfigManager::new(Arc::clone(&self.state.conn));
let bot_name: String = {
use crate::shared::models::schema::bots::dsl::*;
let mut conn = self.state.conn.lock().map_err(|e| format!("Lock failed: {}", e))?;
bots.filter(id.eq(automation.bot_id))
.select(name)
.first(&mut *conn)?
};
let script_path = format!("./work/{}.gbai/{}.gbdialog/{}.ast", bot_name, bot_name, automation.param);
let script_content = match tokio::fs::read_to_string(&script_path).await {
Ok(content) => content,
Err(e) => {
error!("Failed to read script {}: {}", script_path, e);
return Ok(());
}
};
let session = {
let mut sm = self.state.session_manager.lock().await;
let admin_user = uuid::Uuid::nil();
sm.get_or_create_user_session(admin_user, automation.bot_id, "Automation")?.ok_or("Failed to create session")?
};
let script_service = ScriptService::new(Arc::clone(&self.state), session);
match script_service.compile(&script_content) {
Ok(ast) => {
if let Err(e) = script_service.run(&ast) {
error!("Script execution failed: {}", e);
}
}
Err(e) => {
warn!("Failed to connect to Redis for cleanup: {}", e);
}
}
}
error!("Script compilation failed: {}", e);
}
}
Ok(())
}
async fn execute_compact_prompt(&self, automation: &Automation) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!("Executing prompt compaction for bot: {}", automation.bot_id);
let config_manager = ConfigManager::new(Arc::clone(&self.state.conn));
let compact_threshold = config_manager
.get_config(&automation.bot_id, "prompt-compact", None)?
.parse::<usize>()
.unwrap_or(0);
if compact_threshold == 0 {
return Ok(());
}
let mut session_manager = self.state.session_manager.lock().await;
let sessions = session_manager.get_user_sessions(uuid::Uuid::nil())?;
for session in sessions {
if session.bot_id != automation.bot_id {
continue;
}
let history = session_manager.get_conversation_history(session.id, session.user_id)?;
if history.len() > compact_threshold {
info!("Compacting prompt for session {}: {} messages", session.id, history.len());
let mut compacted = String::new();
for (role, content) in &history[..history.len() - compact_threshold] {
compacted.push_str(&format!("{}: {}\n", role, content));
}
let summarized = format!("SUMMARY: {}", compacted);
session_manager.save_message(
session.id,
session.user_id,
3,
&summarized,
1
)?;
}
}
Ok(())
}
}

View file

@ -1,6 +1,6 @@
use crate::shared::state::AppState;
use crate::basic::keywords::set_schedule::execute_set_schedule;
use log::{debug, info, warn};
use log::{info, warn};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use diesel::QueryDsl;
@ -13,8 +13,6 @@ use std::fs;
use std::path::Path;
use std::sync::Arc;
/// Represents a PARAM declaration in BASIC
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ParamDeclaration {
pub name: String,
@ -24,7 +22,6 @@ pub struct ParamDeclaration {
pub required: bool,
}
/// Represents a BASIC tool definition
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolDefinition {
pub name: String,
@ -33,7 +30,6 @@ pub struct ToolDefinition {
pub source_file: String,
}
/// MCP tool format (Model Context Protocol)
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MCPTool {
pub name: String,
@ -58,7 +54,6 @@ pub struct MCPProperty {
pub example: Option<String>,
}
/// OpenAI tool format
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenAITool {
#[serde(rename = "type")]
@ -90,11 +85,10 @@ pub struct OpenAIProperty {
pub example: Option<String>,
}
/// BASIC Compiler
pub struct BasicCompiler {
state: Arc<AppState>,
bot_id: uuid::Uuid,
previous_schedules: HashSet<String>, // Tracks script names with SET_SCHEDULE
previous_schedules: HashSet<String>,
}
impl BasicCompiler {
@ -106,39 +100,27 @@ impl BasicCompiler {
}
}
/// Compile a BASIC file to AST and generate tool definitions
pub fn compile_file(
&mut self,
source_path: &str,
output_dir: &str,
) -> Result<CompilationResult, Box<dyn Error + Send + Sync>> {
info!("Compiling BASIC file: {}", source_path);
// Read source file
let source_content = fs::read_to_string(source_path)
.map_err(|e| format!("Failed to read source file: {}", e))?;
// Parse tool definition from source
let tool_def = self.parse_tool_definition(&source_content, source_path)?;
// Extract base name without extension
let file_name = Path::new(source_path)
.file_stem()
.and_then(|s| s.to_str())
.ok_or("Invalid file name")?;
// Generate AST path
let ast_path = format!("{}/{}.ast", output_dir, file_name);
// Generate AST (using Rhai compilation would happen here)
// For now, we'll store the preprocessed script
let ast_content = self.preprocess_basic(&source_content, source_path, self.bot_id)?;
fs::write(&ast_path, &ast_content)
.map_err(|e| format!("Failed to write AST file: {}", e))?;
info!("AST generated: {}", ast_path);
// Generate tool definitions if PARAM and DESCRIPTION found
let (mcp_json, tool_json) = if !tool_def.parameters.is_empty() {
let mcp = self.generate_mcp_tool(&tool_def)?;
let openai = self.generate_openai_tool(&tool_def)?;
@ -146,21 +128,16 @@ impl BasicCompiler {
let mcp_path = format!("{}/{}.mcp.json", output_dir, file_name);
let tool_path = format!("{}/{}.tool.json", output_dir, file_name);
// Write MCP JSON
let mcp_json_str = serde_json::to_string_pretty(&mcp)?;
fs::write(&mcp_path, mcp_json_str)
.map_err(|e| format!("Failed to write MCP JSON: {}", e))?;
// Write OpenAI tool JSON
let tool_json_str = serde_json::to_string_pretty(&openai)?;
fs::write(&tool_path, tool_json_str)
.map_err(|e| format!("Failed to write tool JSON: {}", e))?;
info!("Tool definitions generated: {} and {}", mcp_path, tool_path);
(Some(mcp), Some(openai))
} else {
debug!("No tool parameters found in {}", source_path);
(None, None)
};
@ -170,7 +147,6 @@ impl BasicCompiler {
})
}
/// Parse tool definition from BASIC source
pub fn parse_tool_definition(
&self,
source: &str,
@ -178,21 +154,18 @@ impl BasicCompiler {
) -> Result<ToolDefinition, Box<dyn Error + Send + Sync>> {
let mut params = Vec::new();
let mut description = String::new();
let lines: Vec<&str> = source.lines().collect();
let mut i = 0;
while i < lines.len() {
let line = lines[i].trim();
// Parse PARAM declarations
if line.starts_with("PARAM ") {
if let Some(param) = self.parse_param_line(line)? {
params.push(param);
}
}
// Parse DESCRIPTION
if line.starts_with("DESCRIPTION ") {
let desc_start = line.find('"').unwrap_or(0);
let desc_end = line.rfind('"').unwrap_or(line.len());
@ -218,8 +191,6 @@ impl BasicCompiler {
})
}
/// Parse a PARAM line
/// Format: PARAM name AS type LIKE "example" DESCRIPTION "description"
fn parse_param_line(
&self,
line: &str,
@ -229,7 +200,6 @@ impl BasicCompiler {
return Ok(None);
}
// Extract parts
let parts: Vec<&str> = line.split_whitespace().collect();
if parts.len() < 4 {
warn!("Invalid PARAM line: {}", line);
@ -238,7 +208,6 @@ impl BasicCompiler {
let name = parts[1].to_string();
// Find AS keyword
let as_index = parts.iter().position(|&p| p == "AS");
let param_type = if let Some(idx) = as_index {
if idx + 1 < parts.len() {
@ -250,7 +219,6 @@ impl BasicCompiler {
"string".to_string()
};
// Extract LIKE value (example)
let example = if let Some(like_pos) = line.find("LIKE") {
let rest = &line[like_pos + 4..].trim();
if let Some(start) = rest.find('"') {
@ -266,7 +234,6 @@ impl BasicCompiler {
None
};
// Extract DESCRIPTION
let description = if let Some(desc_pos) = line.find("DESCRIPTION") {
let rest = &line[desc_pos + 11..].trim();
if let Some(start) = rest.find('"') {
@ -287,11 +254,10 @@ impl BasicCompiler {
param_type: self.normalize_type(&param_type),
example,
description,
required: true, // Default to required
required: true,
}))
}
/// Normalize BASIC types to JSON schema types
fn normalize_type(&self, basic_type: &str) -> String {
match basic_type.to_lowercase().as_str() {
"string" | "text" => "string".to_string(),
@ -305,7 +271,6 @@ impl BasicCompiler {
}
}
/// Generate MCP tool format
fn generate_mcp_tool(
&self,
tool_def: &ToolDefinition,
@ -322,7 +287,6 @@ impl BasicCompiler {
example: param.example.clone(),
},
);
if param.required {
required.push(param.name.clone());
}
@ -339,7 +303,6 @@ impl BasicCompiler {
})
}
/// Generate OpenAI tool format
fn generate_openai_tool(
&self,
tool_def: &ToolDefinition,
@ -356,7 +319,6 @@ impl BasicCompiler {
example: param.example.clone(),
},
);
if param.required {
required.push(param.name.clone());
}
@ -376,21 +338,21 @@ impl BasicCompiler {
})
}
/// Preprocess BASIC script (basic transformations)
fn preprocess_basic(&mut self, source: &str, source_path: &str, bot_id: uuid::Uuid) -> Result<String, Box<dyn Error + Send + Sync>> {
let bot_uuid = bot_id;
let mut result = String::new();
let mut has_schedule = false;
let script_name = Path::new(source_path)
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("unknown")
.to_string();
// Remove any existing schedule for this script before processing
{
let mut conn = self.state.conn.lock().unwrap();
use crate::shared::models::system_automations::dsl::*;
diesel::delete(system_automations
.filter(bot_id.eq(bot_uuid))
.filter(kind.eq(TriggerKind::Scheduled as i32))
@ -407,9 +369,28 @@ impl BasicCompiler {
continue;
}
if trimmed.starts_with("SET_SCHEDULE") {
let normalized = trimmed
.replace("SET SCHEDULE", "SET_SCHEDULE")
.replace("ADD TOOL", "ADD_TOOL")
.replace("CLEAR TOOLS", "CLEAR_TOOLS")
.replace("LIST TOOLS", "LIST_TOOLS")
.replace("CREATE SITE", "CREATE_SITE")
.replace("FOR EACH", "FOR_EACH")
.replace("EXIT FOR", "EXIT_FOR")
.replace("SET USER", "SET_USER")
.replace("SET CONTEXT", "SET_CONTEXT")
.replace("CLEAR SUGGESTIONS", "CLEAR_SUGGESTIONS")
.replace("ADD SUGGESTION", "ADD_SUGGESTION")
.replace("SET KB", "SET_KB")
.replace("ADD KB", "ADD_KB")
.replace("ADD WEBSITE", "ADD_WEBSITE")
.replace("GET BOT MEMORY", "GET_BOT_MEMORY")
.replace("SET BOT MEMORY", "SET_BOT_MEMORY")
.replace("CREATE DRAFT", "CREATE_DRAFT");
if normalized.starts_with("SET_SCHEDULE") {
has_schedule = true;
let parts: Vec<&str> = trimmed.split('"').collect();
let parts: Vec<&str> = normalized.split('"').collect();
if parts.len() >= 3 {
let cron = parts[1];
let mut conn = self.state.conn.lock().unwrap();
@ -417,22 +398,23 @@ impl BasicCompiler {
log::error!("Failed to schedule SET_SCHEDULE during preprocessing: {}", e);
}
} else {
log::warn!("Malformed SET_SCHEDULE line ignored: {}", trimmed);
log::warn!("Malformed SET_SCHEDULE line ignored: {}", normalized);
}
continue;
}
if trimmed.starts_with("PARAM ") || trimmed.starts_with("DESCRIPTION ") {
if normalized.starts_with("PARAM ") || normalized.starts_with("DESCRIPTION ") {
continue;
}
result.push_str(trimmed);
result.push_str(&normalized);
result.push('\n');
}
if self.previous_schedules.contains(&script_name) && !has_schedule {
let mut conn = self.state.conn.lock().unwrap();
use crate::shared::models::system_automations::dsl::*;
diesel::delete(system_automations
.filter(bot_id.eq(bot_uuid))
.filter(kind.eq(TriggerKind::Scheduled as i32))
@ -453,7 +435,6 @@ impl BasicCompiler {
}
}
/// Result of compilation
#[derive(Debug)]
pub struct CompilationResult {
pub mcp_tool: Option<MCPTool>,

View file

@ -1,15 +1,13 @@
use diesel::prelude::*;
use diesel::pg::PgConnection;
use uuid::Uuid;
use log::{info, trace};
// removed unused serde import
use log::info;
use std::collections::HashMap;
use std::fs::OpenOptions;
use std::io::Write;
use std::sync::{Arc, Mutex};
use crate::shared::utils::establish_pg_connection;
#[derive(Clone)]
pub struct AppConfig {
pub drive: DriveConfig,
@ -41,8 +39,6 @@ pub struct ServerConfig {
pub port: u16,
}
impl AppConfig {
pub fn database_url(&self) -> String {
format!(
@ -54,13 +50,10 @@ impl AppConfig {
self.database.database
)
}
}
impl AppConfig {
pub fn from_database(conn: &mut PgConnection) -> Result<Self, diesel::result::Error> {
info!("Loading configuration from database");
use crate::shared::models::schema::bot_configuration::dsl::*;
use diesel::prelude::*;
@ -101,23 +94,29 @@ impl AppConfig {
.unwrap_or(default)
};
let database = DatabaseConfig {
username: std::env::var("TABLES_USERNAME")
.unwrap_or_else(|_| get_str("TABLES_USERNAME", "gbuser")),
password: std::env::var("TABLES_PASSWORD")
.unwrap_or_else(|_| get_str("TABLES_PASSWORD", "")),
server: std::env::var("TABLES_SERVER")
.unwrap_or_else(|_| get_str("TABLES_SERVER", "localhost")),
port: std::env::var("TABLES_PORT")
username: match std::env::var("TABLES_USERNAME") {
Ok(v) => v,
Err(_) => get_str("TABLES_USERNAME", "gbuser"),
},
password: match std::env::var("TABLES_PASSWORD") {
Ok(v) => v,
Err(_) => get_str("TABLES_PASSWORD", ""),
},
server: match std::env::var("TABLES_SERVER") {
Ok(v) => v,
Err(_) => get_str("TABLES_SERVER", "localhost"),
},
port: std::env::var("TABLES_PORT")
.ok()
.and_then(|p| p.parse().ok())
.unwrap_or_else(|| get_u32("TABLES_PORT", 5432)),
database: std::env::var("TABLES_DATABASE")
.unwrap_or_else(|_| get_str("TABLES_DATABASE", "botserver")),
database: match std::env::var("TABLES_DATABASE") {
Ok(v) => v,
Err(_) => get_str("TABLES_DATABASE", "botserver"),
},
};
let drive = DriveConfig {
server: {
let server = get_str("DRIVE_SERVER", "http://localhost:9000");
@ -145,14 +144,12 @@ impl AppConfig {
.get_config(&Uuid::nil(), "SITES_ROOT", Some("./botserver-stack/sites"))?.to_string()
},
})
}
}
pub fn from_env() -> Result<Self, anyhow::Error> {
info!("Loading configuration from environment variables");
let database_url = std::env::var("DATABASE_URL")
.unwrap_or_else(|_| "postgres://gbuser:@localhost:5432/botserver".to_string());
let (db_username, db_password, db_server, db_port, db_name) =
parse_database_url(&database_url);
@ -164,7 +161,6 @@ impl AppConfig {
database: db_name,
};
let minio = DriveConfig {
server: std::env::var("DRIVE_SERVER")
.unwrap_or_else(|_| "http://localhost:9000".to_string()),
@ -174,13 +170,13 @@ impl AppConfig {
use_ssl: std::env::var("DRIVE_USE_SSL")
.unwrap_or_else(|_| "false".to_string())
.parse()
.unwrap_or(false) };
.unwrap_or(false)
};
Ok(AppConfig {
drive: minio,
server: ServerConfig {
host: std::env::var("SERVER_HOST").unwrap_or_else(|_| "127.0.1".to_string()),
host: std::env::var("SERVER_HOST").unwrap_or_else(|_| "127.0.0.1".to_string()),
port: std::env::var("SERVER_PORT")
.ok()
.and_then(|p| p.parse().ok())
@ -194,7 +190,6 @@ impl AppConfig {
},
})
}
}
pub fn write_drive_config_to_env(drive: &DriveConfig) -> std::io::Result<()> {
@ -203,7 +198,7 @@ pub fn write_drive_config_to_env(drive: &DriveConfig) -> std::io::Result<()> {
.create(true)
.open(".env")?;
writeln!(file,"")?;
writeln!(file, "")?;
writeln!(file, "DRIVE_SERVER={}", drive.server)?;
writeln!(file, "DRIVE_ACCESSKEY={}", drive.access_key)?;
writeln!(file, "DRIVE_SECRET={}", drive.secret_key)?;
@ -229,6 +224,7 @@ fn parse_database_url(url: &str) -> (String, String, String, u32, String) {
.get(1)
.and_then(|p| p.parse().ok())
.unwrap_or(5432);
let database = host_db[1].to_string();
return (username, password, server, port, database);
@ -262,11 +258,9 @@ impl ConfigManager {
) -> Result<String, diesel::result::Error> {
use crate::shared::models::schema::bot_configuration::dsl::*;
let mut conn = self.conn.lock().unwrap();
let fallback_str = fallback.unwrap_or("");
// Try config for provided bot_id
let result = bot_configuration
.filter(bot_id.eq(code_bot_id))
.filter(config_key.eq(key))
@ -276,8 +270,8 @@ impl ConfigManager {
let value = match result {
Ok(v) => v,
Err(_) => {
// Fallback to default bot
let (default_bot_id, _default_bot_name) = crate::bot::get_default_bot(&mut *conn);
bot_configuration
.filter(bot_id.eq(default_bot_id))
.filter(config_key.eq(key))
@ -306,6 +300,7 @@ impl ConfigManager {
.map_err(|e| format!("Failed to acquire lock: {}", e))?;
let mut updated = 0;
for line in content.lines().skip(1) {
let parts: Vec<&str> = line.split(',').collect();
if parts.len() >= 2 {
@ -313,6 +308,7 @@ impl ConfigManager {
let value = parts[1].trim();
let new_id: uuid::Uuid = uuid::Uuid::new_v4();
diesel::sql_query("INSERT INTO bot_configuration (id, bot_id, config_key, config_value, config_type) VALUES ($1, $2, $3, $4, 'string') ON CONFLICT (bot_id, config_key) DO UPDATE SET config_value = EXCLUDED.config_value, updated_at = NOW()")
.bind::<diesel::sql_types::Uuid, _>(new_id)
.bind::<diesel::sql_types::Uuid, _>(bot_id)
@ -325,9 +321,8 @@ impl ConfigManager {
}
}
trace!(
"Synced {} config values for bot {}",
updated, bot_id);
info!("Synced {} config values for bot {}", updated, bot_id);
Ok(updated)
}
}

View file

@ -4,8 +4,7 @@ use crate::basic::compiler::BasicCompiler;
use crate::config::ConfigManager;
use crate::shared::state::AppState;
use aws_sdk_s3::Client;
use log::trace;
use log::{debug, error, info};
use log::{info, warn};
use std::collections::HashMap;
use std::error::Error;
use std::sync::Arc;
@ -35,12 +34,8 @@ impl DriveMonitor {
pub fn spawn(self: Arc<Self>) -> tokio::task::JoinHandle<()> {
tokio::spawn(async move {
info!(
"Drive Monitor service started for bucket: {}",
self.bucket_name
);
info!("Drive Monitor service started for bucket: {}", self.bucket_name);
// Check if llama servers are ready before first scan
let config_manager = ConfigManager::new(Arc::clone(&self.state.conn));
let default_bot_id = {
let mut conn = self.state.conn.lock().unwrap();
@ -50,27 +45,12 @@ impl DriveMonitor {
.unwrap_or_else(|_| uuid::Uuid::nil())
};
let _llm_url = match config_manager.get_config(&default_bot_id, "llm-url", None) {
Ok(url) => url,
Err(e) => {
error!("Failed to get llm-url config: {}", e);
return;
}
};
let _embedding_url = match config_manager.get_config(&default_bot_id, "embedding-url", None) {
Ok(url) => url,
Err(e) => {
error!("Failed to get embedding-url config: {}", e);
return;
}
};
let mut tick = interval(Duration::from_secs(30));
loop {
tick.tick().await;
if let Err(e) = self.check_for_changes().await {
error!("Error checking for drive changes: {}", e);
log::error!("Error checking for drive changes: {}", e);
}
}
})
@ -79,9 +59,7 @@ impl DriveMonitor {
async fn check_for_changes(&self) -> Result<(), Box<dyn Error + Send + Sync>> {
let client = match &self.state.drive {
Some(client) => client,
None => {
return Ok(());
}
None => return Ok(()),
};
self.check_gbdialog_changes(client).await?;
@ -90,15 +68,11 @@ impl DriveMonitor {
Ok(())
}
async fn check_gbdialog_changes(
&self,
client: &Client,
) -> Result<(), Box<dyn Error + Send + Sync>> {
async fn check_gbdialog_changes(&self, client: &Client) -> Result<(), Box<dyn Error + Send + Sync>> {
let prefix = ".gbdialog/";
let mut current_files = HashMap::new();
let mut continuation_token = None;
loop {
let list_objects = client
.list_objects_v2()
@ -106,14 +80,15 @@ impl DriveMonitor {
.set_continuation_token(continuation_token)
.send()
.await?;
trace!("List objects result: {:?}", list_objects);
for obj in list_objects.contents.unwrap_or_default() {
let path = obj.key().unwrap_or_default().to_string();
let path_parts: Vec<&str> = path.split('/').collect();
if path_parts.len() < 2 || !path_parts[0].ends_with(".gbdialog") {
continue;
}
if path.ends_with('/') || !path.ends_with(".bas") {
continue;
}
@ -121,6 +96,7 @@ impl DriveMonitor {
let file_state = FileState {
etag: obj.e_tag().unwrap_or_default().to_string(),
};
current_files.insert(path, file_state);
}
@ -131,16 +107,17 @@ impl DriveMonitor {
}
let mut file_states = self.file_states.write().await;
for (path, current_state) in current_files.iter() {
if let Some(previous_state) = file_states.get(path) {
if current_state.etag != previous_state.etag {
if let Err(e) = self.compile_tool(client, path).await {
error!("Failed to compile tool {}: {}", path, e);
log::error!("Failed to compile tool {}: {}", path, e);
}
}
} else {
if let Err(e) = self.compile_tool(client, path).await {
error!("Failed to compile tool {}: {}", path, e);
log::error!("Failed to compile tool {}: {}", path, e);
}
}
}
@ -166,7 +143,6 @@ impl DriveMonitor {
async fn check_gbot(&self, client: &Client) -> Result<(), Box<dyn Error + Send + Sync>> {
let config_manager = ConfigManager::new(Arc::clone(&self.state.conn));
let mut continuation_token = None;
loop {
@ -189,38 +165,13 @@ impl DriveMonitor {
continue;
}
trace!("Checking config file at path: {}", path);
match client
.head_object()
.bucket(&self.bucket_name)
.key(&path)
.send()
.await
{
Ok(head_res) => {
trace!(
"HeadObject successful for {}, metadata: {:?}",
path, head_res
);
let response = client
.get_object()
.bucket(&self.bucket_name)
.key(&path)
.send()
.await?;
trace!(
"GetObject successful for {}, content length: {}",
path,
response.content_length().unwrap_or(0)
);
match client.head_object().bucket(&self.bucket_name).key(&path).send().await {
Ok(_head_res) => {
let response = client.get_object().bucket(&self.bucket_name).key(&path).send().await?;
let bytes = response.body.collect().await?.into_bytes();
trace!("Collected {} bytes for {}", bytes.len(), path);
let csv_content = String::from_utf8(bytes.to_vec())
.map_err(|e| format!("UTF-8 error in {}: {}", path, e))?;
trace!("Found {}: {} bytes", path, csv_content.len());
// Restart LLaMA servers only if llm- properties changed
let llm_lines: Vec<_> = csv_content
.lines()
.filter(|line| line.trim_start().starts_with("llm-"))
@ -235,42 +186,38 @@ impl DriveMonitor {
if parts.len() >= 2 {
let key = parts[0].trim();
let new_value = parts[1].trim();
match config_manager.get_config(&self.bot_id, key, None) {
Ok(old_value) => {
if old_value != new_value {
info!(
"Detected change in {} (old: {}, new: {})",
key, old_value, new_value
);
info!("Detected change in {} (old: {}, new: {})", key, old_value, new_value);
restart_needed = true;
}
}
Err(_) => {
info!("New llm- property detected: {}", key);
restart_needed = true;
}
}
}
}
_ = config_manager.sync_gbot_config(&self.bot_id, &csv_content);
let _ = config_manager.sync_gbot_config(&self.bot_id, &csv_content);
if restart_needed {
trace!("Detected llm- configuration change, restarting LLaMA servers...");
if let Err(e) = ensure_llama_servers_running(&self.state).await {
error!("Failed to restart LLaMA servers after llm- config change: {}", e);
log::error!("Failed to restart LLaMA servers after llm- config change: {}", e);
}
}
} else {
trace!("No llm- property changes detected; skipping LLaMA server restart.");
let _ = config_manager.sync_gbot_config(&self.bot_id, &csv_content);
}
}
else
{
_ = config_manager.sync_gbot_config(&self.bot_id, &csv_content);
if csv_content.lines().any(|line| line.starts_with("theme-")) {
self.broadcast_theme_change(&csv_content).await?;
}
}
Err(e) => {
error!("Config file {} not found or inaccessible: {}", path, e);
log::error!("Config file {} not found or inaccessible: {}", path, e);
}
}
}
@ -284,36 +231,64 @@ impl DriveMonitor {
Ok(())
}
async fn compile_tool(
&self,
client: &Client,
file_path: &str,
) -> Result<(), Box<dyn Error + Send + Sync>> {
debug!(
"Fetching object from S3: bucket={}, key={}",
&self.bucket_name, file_path
);
let response = match client
.get_object()
.bucket(&self.bucket_name)
.key(file_path)
.send()
.await
{
async fn broadcast_theme_change(&self, csv_content: &str) -> Result<(), Box<dyn Error + Send + Sync>> {
let mut theme_data = serde_json::json!({
"event": "change_theme",
"data": {}
});
for line in csv_content.lines() {
let parts: Vec<&str> = line.split(',').collect();
if parts.len() >= 2 {
let key = parts[0].trim();
let value = parts[1].trim();
match key {
"theme-color1" => theme_data["data"]["color1"] = serde_json::Value::String(value.to_string()),
"theme-color2" => theme_data["data"]["color2"] = serde_json::Value::String(value.to_string()),
"theme-logo" => theme_data["data"]["logo_url"] = serde_json::Value::String(value.to_string()),
"theme-title" => theme_data["data"]["title"] = serde_json::Value::String(value.to_string()),
"theme-logo-text" => theme_data["data"]["logo_text"] = serde_json::Value::String(value.to_string()),
_ => {}
}
}
}
let response_channels = self.state.response_channels.lock().await;
for (session_id, tx) in response_channels.iter() {
let theme_response = crate::shared::models::BotResponse {
bot_id: self.bot_id.to_string(),
user_id: "system".to_string(),
session_id: session_id.clone(),
channel: "web".to_string(),
content: serde_json::to_string(&theme_data)?,
message_type: 2,
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
let _ = tx.try_send(theme_response);
}
Ok(())
}
async fn compile_tool(&self, client: &Client, file_path: &str) -> Result<(), Box<dyn Error + Send + Sync>> {
info!("Fetching object from S3: bucket={}, key={}", &self.bucket_name, file_path);
let response = match client.get_object().bucket(&self.bucket_name).key(file_path).send().await {
Ok(res) => {
debug!(
"Successfully fetched object from S3: bucket={}, key={}, size={}",
&self.bucket_name,
file_path,
res.content_length().unwrap_or(0)
);
info!("Successfully fetched object from S3: bucket={}, key={}, size={}",
&self.bucket_name, file_path, res.content_length().unwrap_or(0));
res
}
Err(e) => {
error!(
"Failed to fetch object from S3: bucket={}, key={}, error={:?}",
&self.bucket_name, file_path, e
);
log::error!("Failed to fetch object from S3: bucket={}, key={}, error={:?}",
&self.bucket_name, file_path, e);
return Err(e.into());
}
};
@ -329,11 +304,9 @@ impl DriveMonitor {
.unwrap_or(file_path)
.to_string();
let bot_name = self
.bucket_name
.strip_suffix(".gbai")
.unwrap_or(&self.bucket_name);
let bot_name = self.bucket_name.strip_suffix(".gbai").unwrap_or(&self.bucket_name);
let work_dir = format!("./work/{}.gbai/{}.gbdialog", bot_name, bot_name);
std::fs::create_dir_all(&work_dir)?;
let local_source_path = format!("{}/{}.bas", work_dir, tool_name);
@ -343,18 +316,10 @@ impl DriveMonitor {
let result = compiler.compile_file(&local_source_path, &work_dir)?;
if let Some(mcp_tool) = result.mcp_tool {
info!(
"MCP tool definition generated with {} parameters",
mcp_tool.input_schema.properties.len()
);
}
if result.openai_tool.is_some() {
debug!("OpenAI tool definition generated");
info!("MCP tool definition generated with {} parameters",
mcp_tool.input_schema.properties.len());
}
Ok(())
}
}

View file

@ -1,5 +1,6 @@
name,value
prompt-history, 2
theme-color1,green
theme-color2,yellow
custom-logo-url,https://example.com/logo.png
theme-color1,#0d2b55
theme-color2,#fff9c2
theme-logo,https://example.com/logo.png
theme-title, Custom

1 name value
2 prompt-history 2
3 theme-color1 green #0d2b55
4 theme-color2 yellow #fff9c2
5 custom-logo-url theme-logo https://example.com/logo.png
6 theme-title Custom

View file

@ -17,14 +17,18 @@
--glass:rgba(0,0,0,0.02);
--shadow:rgba(0,0,0,0.05);
--logo-url:url('https://pragmatismo.com.br/icons/general-bots.svg');
--gradient-1:linear-gradient(135deg,rgba(0,102,255,0.05) 0%,rgba(0,102,255,0.0) 100%);
--gradient-2:linear-gradient(45deg,rgba(0,0,0,0.02) 0%,rgba(0,0,0,0.0) 100%);
}
[data-theme="dark"]{
--bg:#727171;
--bg:#1a1a1a;
--fg:#ffffff;
--border:#a3a0a0;
--border:#333333;
--accent:#ffffff;
--glass:rgba(255,255,255,0.02);
--shadow:rgba(0,0,0,0.3);
--glass:rgba(255,255,255,0.05);
--shadow:rgba(0,0,0,0.5);
--gradient-1:linear-gradient(135deg,rgba(255,255,255,0.08) 0%,rgba(255,255,255,0.0) 100%);
--gradient-2:linear-gradient(45deg,rgba(255,255,255,0.03) 0%,rgba(255,255,255,0.0) 100%);
}
*{margin:0;padding:0;box-sizing:border-box}
body{
@ -36,6 +40,15 @@ transition:background 0.3s, color 0.3s;
display:flex;
flex-direction:column;
height:100vh;
position:relative;
}
body::before{
content:'';
position:fixed;
inset:0;
background:var(--gradient-1);
pointer-events:none;
z-index:0;
}
.float-menu{
position:fixed;
@ -55,11 +68,12 @@ border-radius:50%;
cursor:pointer;
transition:all 0.3s;
border:1px solid var(--border);
backdrop-filter:blur(10px);
}
[data-theme="dark"] .float-logo{
}
.float-logo:hover{
transform:scale(1.1);
transform:scale(1.1) rotate(5deg);
}
.menu-button{
width:40px;
@ -74,9 +88,10 @@ background:var(--bg);
border:1px solid var(--border);
font-size:16px;
color:var(--fg);
backdrop-filter:blur(10px);
}
.menu-button:hover{
transform:scale(1.1);
transform:scale(1.1) rotate(-5deg);
background:var(--fg);
color:var(--bg);
}
@ -92,6 +107,8 @@ transition:left 0.4s cubic-bezier(0.4,0,0.2,1);
z-index:999;
overflow-y:auto;
padding:20px;
backdrop-filter:blur(20px);
box-shadow:4px 0 20px var(--shadow);
}
.sidebar.open{
left:0;
@ -132,7 +149,7 @@ text-align:left;
.sidebar-button:hover{
background:var(--fg);
color:var(--bg);
transform:translateX(4px);
transform:translateX(4px) scale(1.02);
}
.history-section{
margin-top:20px;
@ -156,7 +173,7 @@ border:1px solid transparent;
.history-item:hover{
background:var(--fg);
color:var(--bg);
transform:translateX(4px);
transform:translateX(4px) scale(1.02);
}
#messages{
flex:1;
@ -165,6 +182,8 @@ padding:20px 20px 140px;
max-width:680px;
margin:0 auto;
width:100%;
position:relative;
z-index:1;
}
.message-container{
margin-bottom:24px;
@ -180,10 +199,21 @@ margin-bottom:8px;
background:var(--fg);
color:var(--bg);
border-radius:18px;
padding:10px 16px;
padding:12px 18px;
max-width:80%;
font-size:14px;
line-height:1.5;
box-shadow:0 2px 8px var(--shadow);
position:relative;
overflow:hidden;
}
.user-message-content::before{
content:'';
position:absolute;
inset:0;
background:var(--gradient-2);
opacity:0.3;
pointer-events:none;
}
.assistant-message{
display:flex;
@ -205,6 +235,21 @@ filter:var(--logo-filter, none);
flex:1;
font-size:14px;
line-height:1.7;
background:var(--glass);
border-radius:18px;
padding:12px 18px;
border:1px solid var(--border);
box-shadow:0 2px 8px var(--shadow);
position:relative;
overflow:hidden;
}
.assistant-message-content::before{
content:'';
position:absolute;
inset:0;
background:var(--gradient-1);
opacity:0.5;
pointer-events:none;
}
.thinking-indicator{
display:flex;
@ -240,6 +285,7 @@ border-top:1px solid var(--border);
padding:12px;
z-index:100;
transition:all 0.3s;
backdrop-filter:blur(20px);
}
.suggestions-container{
display:flex;
@ -251,7 +297,7 @@ max-width:680px;
margin:0 auto 8px;
}
.suggestion-button{
padding:4px 10px;
padding:6px 12px;
border-radius:12px;
cursor:pointer;
font-size:11px;
@ -264,7 +310,7 @@ color:var(--fg);
.suggestion-button:hover{
background:var(--fg);
color:var(--bg);
transform:scale(1.02);
transform:scale(1.05);
}
.input-container{
display:flex;
@ -284,9 +330,11 @@ transition:all 0.3s;
background:var(--glass);
border:1px solid var(--border);
color:var(--fg);
backdrop-filter:blur(10px);
}
#messageInput:focus{
border-color:var(--accent);
box-shadow:0 0 0 3px rgba(0,102,255,0.1);
}
#messageInput::placeholder{
opacity:0.3;
@ -307,7 +355,7 @@ font-size:16px;
flex-shrink:0;
}
#sendBtn:hover,#voiceBtn:hover{
transform:scale(1.08);
transform:scale(1.08) rotate(5deg);
}
#sendBtn:active,#voiceBtn:active{
transform:scale(0.95);
@ -316,8 +364,8 @@ transform:scale(0.95);
animation:pulse 1.5s infinite;
}
@keyframes pulse{
0%,100%{opacity:1}
50%{opacity:0.6}
0%,100%{opacity:1;transform:scale(1)}
50%{opacity:0.6;transform:scale(1.1)}
}
.flash-overlay{
position:fixed;
@ -349,7 +397,7 @@ z-index:90;
display:flex;
}
.scroll-to-bottom:hover{
transform:scale(1.1);
transform:scale(1.1) rotate(180deg);
}
.warning-message{
border-radius:12px;
@ -390,6 +438,7 @@ z-index:90;
background:var(--bg);
border:1px solid var(--border);
display:none;
backdrop-filter:blur(10px);
}
.context-indicator.visible{
display:block;
@ -560,6 +609,9 @@ width:100px;
<div class="sidebar-title" id="sidebarTitle">General Bots</div>
</div>
<button class="sidebar-button" id="voiceToggle" onclick="toggleVoiceMode()">🎤 Voice Mode</button>
<div class="history-section">
<div class="history-title">History</div>
<div id="history"></div>
</div>
</div>
<main id="messages"></main>
@ -582,6 +634,8 @@ let ws=null,currentSessionId=null,currentUserId=null,currentBotId="default_bot",
const maxReconnectAttempts=5,messagesDiv=document.getElementById("messages"),input=document.getElementById("messageInput"),sendBtn=document.getElementById("sendBtn"),voiceBtn=document.getElementById("voiceBtn"),connectionStatus=document.getElementById("connectionStatus"),flashOverlay=document.getElementById("flashOverlay"),suggestionsContainer=document.getElementById("suggestions"),floatLogo=document.getElementById("floatLogo"),sidebar=document.getElementById("sidebar"),themeBtn=document.getElementById("themeBtn"),scrollToBottomBtn=document.getElementById("scrollToBottom"),contextIndicator=document.getElementById("contextIndicator"),contextPercentage=document.getElementById("contextPercentage"),contextProgressBar=document.getElementById("contextProgressBar"),sidebarTitle=document.getElementById("sidebarTitle");
marked.setOptions({breaks:true,gfm:true});
floatLogo.addEventListener('click',toggleSidebar);
function toggleSidebar(){
sidebar.classList.toggle('open');
}
@ -1137,6 +1191,7 @@ async function connectToVoiceRoom(t){
try{
const r=new LiveKitClient.Room(),p=window.location.protocol==="https:"?"wss:":"ws:",u=`${p}//${window.location.host}/voice`;
await r.connect(u,t);
voiceRoom=r;
r.on("dataReceived",d=>{
const dc=new TextDecoder(),m=dc.decode(d);