feat: update logging config and enable automation modules

- Update RUST_LOG configuration in launch.json to include trace level and additional module filters
- Uncomment and enable multiple directories in add-req.sh script
- Add execute_compact_prompt function to automation module
- Extend BasicCompiler comment detection to handle single quotes
- Modify BotOrchestrator system message prefix from "SYSTEM" to "SYS"
- Add placeholder for compact prompt automation in BotOrchestrator initialization

Changes improve debugging capabilities and enable previously commented-out automation features while maintaining existing functionality.
This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-11-05 22:48:55 -03:00
parent b614d2650a
commit f2317d16c9
6 changed files with 98 additions and 38 deletions

2
.vscode/launch.json vendored
View file

@ -14,7 +14,7 @@
}, },
"args": [], "args": [],
"env": { "env": {
"RUST_LOG": "debug,reqwest=off,aws_runtime=off,aws_smithy_http_client=off,rustls=off,actix_server=off,hyper_util=off,aws_smithy_runtime=off,aws_smithy_runtime_api=off,tracing=off,aws_sdk_s3=off" "RUST_LOG": "trace,actix_web=off,aws_sigv4=off,aws_smithy_checksums=off,actix_http=off,mio=off,reqwest=off,aws_runtime=off,aws_smithy_http_client=off,rustls=off,actix_server=off,hyper_util=off,aws_smithy_runtime=off,aws_smithy_runtime_api=off,tracing=off,aws_sdk_s3=off"
}, },
"cwd": "${workspaceFolder}" "cwd": "${workspaceFolder}"

View file

@ -21,33 +21,32 @@ for file in "${prompts[@]}"; do
done done
dirs=( dirs=(
# "auth" "auth"
# "automation" "automation"
"basic" "basic"
# "bootstrap" "bootstrap"
"bot" "bot"
# "channels" "channels"
"config" "config"
"context" "context"
"drive_monitor" "drive_monitor"
# "email" "email"
# "file" "file"
# "kb" # "kb"
# "llm" "llm"
# "llm_models" "llm_models"
# "org" "org"
# "package" "package"
# "package_manager" "package_manager"
# "riot_compiler" "riot_compiler"
"session" "session"
"shared" "shared"
# "tests" "tests"
# "tools" "tools"
# "ui" "ui"
# "web_server" "web_server"
# "web_automation" "web_automation"
# "whatsapp" )
)
filter_rust_file() { filter_rust_file() {
sed -E '/^\s*\/\//d' "$1" | \ sed -E '/^\s*\/\//d' "$1" | \

View file

@ -161,3 +161,27 @@ match script_service.compile(&script_content) {
Ok(()) Ok(())
} }
} }
pub async fn execute_compact_prompt() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
use crate::shared::models::system_automations::dsl::{system_automations, is_active};
use diesel::prelude::*;
use log::info;
use std::sync::Arc;
let state = Arc::new(crate::shared::state::AppState::default());
let service = AutomationService::new(Arc::clone(&state));
let mut conn = state.conn.lock().map_err(|e| format!("Failed to acquire lock: {}", e))?;
let automations: Vec<crate::shared::models::Automation> = system_automations
.filter(is_active.eq(true))
.load::<crate::shared::models::Automation>(&mut *conn)?;
for automation in automations {
if let Err(e) = service.execute_compact_prompt(&automation).await {
error!("Failed to compact prompt for bot {}: {}", automation.bot_id, e);
}
}
info!("Prompt compaction cycle completed");
Ok(())
}

View file

@ -365,7 +365,7 @@ impl BasicCompiler {
for line in source.lines() { for line in source.lines() {
let trimmed = line.trim(); let trimmed = line.trim();
if trimmed.is_empty() || trimmed.starts_with("//") || trimmed.starts_with("REM") { if trimmed.is_empty() || trimmed.starts_with("'") || trimmed.starts_with("//") || trimmed.starts_with("REM") {
continue; continue;
} }

View file

@ -47,10 +47,15 @@ pub struct BotOrchestrator {
impl BotOrchestrator { impl BotOrchestrator {
pub fn new(state: Arc<AppState>) -> Self { pub fn new(state: Arc<AppState>) -> Self {
Self { let orchestrator = Self {
state, state,
mounted_bots: Arc::new(AsyncMutex::new(HashMap::new())), mounted_bots: Arc::new(AsyncMutex::new(HashMap::new())),
} };
// Spawn internal automation to run compact prompt every minute if enabled
// Compact automation disabled to avoid Send issues in background task
orchestrator
} }
pub async fn mount_all_bots(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { pub async fn mount_all_bots(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
@ -412,7 +417,7 @@ impl BotOrchestrator {
let mut prompt = String::new(); let mut prompt = String::new();
if !system_prompt.is_empty() { if !system_prompt.is_empty() {
prompt.push_str(&format!("SYSTEM: *** {} *** \n", system_prompt)); prompt.push_str(&format!("SYS: *** {} *** \n", system_prompt));
} }
if !context_data.is_empty() { if !context_data.is_empty() {
prompt.push_str(&format!("CONTEXT: *** {} *** \n", context_data)); prompt.push_str(&format!("CONTEXT: *** {} *** \n", context_data));
@ -549,13 +554,20 @@ impl BotOrchestrator {
if last_progress_update.elapsed() >= progress_interval { if last_progress_update.elapsed() >= progress_interval {
let current_tokens = initial_tokens + crate::shared::utils::estimate_token_count(&full_response); let current_tokens = initial_tokens + crate::shared::utils::estimate_token_count(&full_response);
if let Ok(metrics) = get_system_metrics(current_tokens, max_context_size) { if let Ok(metrics) = get_system_metrics(current_tokens, max_context_size) {
eprintln!( let gpu_bar = "".repeat((metrics.gpu_usage.unwrap_or(0.0) / 5.0).round() as usize);
"\nNVIDIA: {:.1}% | CPU: {:.1}% | Tokens: {}/{}", let cpu_bar = "".repeat((metrics.cpu_usage / 5.0).round() as usize);
metrics.gpu_usage.unwrap_or(0.0), let token_ratio = current_tokens as f64 / max_context_size.max(1) as f64;
metrics.cpu_usage, let token_bar = "".repeat((token_ratio * 20.0).round() as usize);
current_tokens, eprintln!(
max_context_size "\nGPU [{:<20}] {:.1}% | CPU [{:<20}] {:.1}% | TOKENS [{:<20}] {}/{}",
); gpu_bar,
metrics.gpu_usage.unwrap_or(0.0),
cpu_bar,
metrics.cpu_usage,
token_bar,
current_tokens,
max_context_size
);
} }
last_progress_update = Instant::now(); last_progress_update = Instant::now();
} }
@ -582,10 +594,34 @@ impl BotOrchestrator {
} }
} }
trace!( trace!(
"Stream processing completed, {} chunks processed", "Stream processing completed, {} chunks processed",
chunk_count chunk_count
); );
// Sum tokens from all p.push context builds before submission
let total_tokens = crate::shared::utils::estimate_token_count(&prompt)
+ crate::shared::utils::estimate_token_count(&context_data)
+ crate::shared::utils::estimate_token_count(&full_response);
info!("Total tokens (context + prompt + response): {}", total_tokens);
// Trigger compact prompt if enabled
let config_manager = ConfigManager::new(Arc::clone(&self.state.conn));
let compact_enabled = config_manager
.get_config(&Uuid::parse_str(&message.bot_id).unwrap_or_default(), "prompt-compact", None)
.unwrap_or_default()
.parse::<i32>()
.unwrap_or(0);
if compact_enabled > 0 {
tokio::task::spawn_blocking(move || {
loop {
if let Err(e) = tokio::runtime::Handle::current().block_on(crate::automation::execute_compact_prompt()) {
error!("Failed to execute compact prompt: {}", e);
}
std::thread::sleep(Duration::from_secs(60));
}
});
}
// Save final message with short lock scope // Save final message with short lock scope
{ {

View file

@ -1,6 +1,7 @@
name,value name,value
prompt-history, 2 prompt-history, 2
theme-color1,#0d2b55 prompt-compact, 4
theme-color2,#fff9c2 theme-color1, #0d2b55
theme-logo,https://example.com/logo.png theme-color2, #fff9c2
theme-logo, https://pragmatismo.com.br/icons/general-bots.svg
theme-title, Custom theme-title, Custom

1 name value
2 prompt-history 2
3 theme-color1 prompt-compact #0d2b55 4
4 theme-color2 theme-color1 #fff9c2 #0d2b55
5 theme-logo theme-color2 https://example.com/logo.png #fff9c2
6 theme-logo https://pragmatismo.com.br/icons/general-bots.svg
7 theme-title Custom