feat(bot): refactor progress and warning output to use UI module
Extract progress bar rendering and warning message display from BotOrchestrator into a dedicated BotUI module. This improves code organization by separating UI concerns from core bot logic. The UI module handles both progress visualization with system metrics and warning message presentation, providing a cleaner interface for output operations.
This commit is contained in:
parent
7fa6ea9f6a
commit
0539af9f6b
4 changed files with 123 additions and 57 deletions
|
|
@ -1,7 +1,10 @@
|
|||
mod ui;
|
||||
|
||||
use crate::config::ConfigManager;
|
||||
use crate::drive_monitor::DriveMonitor;
|
||||
use crate::llm_models;
|
||||
use crate::nvidia::get_system_metrics;
|
||||
use crate::bot::ui::BotUI;
|
||||
use crate::shared::models::{BotResponse, Suggestion, UserMessage, UserSession};
|
||||
use crate::shared::state::AppState;
|
||||
use actix_web::{web, HttpRequest, HttpResponse, Result};
|
||||
|
|
@ -508,13 +511,6 @@ impl BotOrchestrator {
|
|||
|
||||
// Show initial progress
|
||||
if let Ok(metrics) = get_system_metrics(initial_tokens, max_context_size) {
|
||||
eprintln!(
|
||||
"\nNVIDIA: {:.1}% | CPU: {:.1}% | Tokens: {}/{}",
|
||||
metrics.gpu_usage.unwrap_or(0.0),
|
||||
metrics.cpu_usage,
|
||||
initial_tokens,
|
||||
max_context_size
|
||||
);
|
||||
}
|
||||
let model = config_manager
|
||||
.get_config(
|
||||
|
|
@ -572,18 +568,8 @@ impl BotOrchestrator {
|
|||
let cpu_bar = "█".repeat((metrics.cpu_usage / 5.0).round() as usize);
|
||||
let token_ratio = current_tokens as f64 / max_context_size.max(1) as f64;
|
||||
let token_bar = "█".repeat((token_ratio * 20.0).round() as usize);
|
||||
use std::io::{self, Write};
|
||||
print!(
|
||||
"\rGPU [{:<20}] {:.1}% | CPU [{:<20}] {:.1}% | TOKENS [{:<20}] {}/{}",
|
||||
gpu_bar,
|
||||
metrics.gpu_usage.unwrap_or(0.0),
|
||||
cpu_bar,
|
||||
metrics.cpu_usage,
|
||||
token_bar,
|
||||
current_tokens,
|
||||
max_context_size
|
||||
);
|
||||
io::stdout().flush().unwrap();
|
||||
let mut ui = BotUI::new().unwrap();
|
||||
ui.render_progress(current_tokens, max_context_size).unwrap();
|
||||
}
|
||||
last_progress_update = Instant::now();
|
||||
}
|
||||
|
|
@ -795,45 +781,10 @@ impl BotOrchestrator {
|
|||
session_id, channel, message
|
||||
);
|
||||
|
||||
if channel == "web" {
|
||||
self.send_event(
|
||||
"system",
|
||||
"system",
|
||||
session_id,
|
||||
channel,
|
||||
"warn",
|
||||
serde_json::json!({
|
||||
"message": message,
|
||||
"timestamp": Utc::now().to_rfc3339()
|
||||
}),
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
if let Some(adapter) = self.state.channels.lock().unwrap().get(channel) {
|
||||
let warn_response = BotResponse {
|
||||
bot_id: "system".to_string(),
|
||||
user_id: "system".to_string(),
|
||||
session_id: session_id.to_string(),
|
||||
channel: channel.to_string(),
|
||||
content: format!("⚠️ WARNING: {}", message),
|
||||
message_type: 1,
|
||||
stream_token: None,
|
||||
is_complete: true,
|
||||
suggestions: Vec::new(),
|
||||
context_name: None,
|
||||
context_length: 0,
|
||||
context_max_length: 0,
|
||||
};
|
||||
adapter.send_message(warn_response).await
|
||||
} else {
|
||||
warn!(
|
||||
"No channel adapter found for warning on channel: {}",
|
||||
channel
|
||||
);
|
||||
let mut ui = BotUI::new().unwrap();
|
||||
ui.render_warning(message).unwrap();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn trigger_auto_welcome(
|
||||
&self,
|
||||
|
|
|
|||
78
src/bot/ui.rs
Normal file
78
src/bot/ui.rs
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Constraint, Direction, Layout},
|
||||
style::{Color, Modifier, Style},
|
||||
widgets::{Block, Borders, Gauge, Paragraph},
|
||||
Terminal,
|
||||
};
|
||||
use std::io::{self, Stdout};
|
||||
use crate::nvidia::get_system_metrics;
|
||||
|
||||
pub struct BotUI {
|
||||
terminal: Terminal<CrosstermBackend<Stdout>>,
|
||||
}
|
||||
|
||||
impl BotUI {
|
||||
pub fn new() -> io::Result<Self> {
|
||||
let stdout = io::stdout();
|
||||
let backend = CrosstermBackend::new(stdout);
|
||||
let terminal = Terminal::new(backend)?;
|
||||
Ok(Self { terminal })
|
||||
}
|
||||
|
||||
pub fn render_progress(&mut self, current_tokens: usize, max_context_size: usize) -> io::Result<()> {
|
||||
let metrics = get_system_metrics(current_tokens, max_context_size).unwrap_or_default();
|
||||
let gpu_usage = metrics.gpu_usage.unwrap_or(0.0);
|
||||
let cpu_usage = metrics.cpu_usage;
|
||||
let token_ratio = current_tokens as f64 / max_context_size.max(1) as f64;
|
||||
|
||||
self.terminal.draw(|f| {
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints([
|
||||
Constraint::Length(3),
|
||||
Constraint::Length(3),
|
||||
Constraint::Length(3),
|
||||
Constraint::Min(0),
|
||||
])
|
||||
.split(f.area());
|
||||
|
||||
let gpu_gauge = Gauge::default()
|
||||
.block(Block::default().title("GPU Usage").borders(Borders::ALL))
|
||||
.gauge_style(Style::default().fg(Color::Green).add_modifier(Modifier::BOLD))
|
||||
.ratio(gpu_usage as f64 / 100.0)
|
||||
.label(format!("{:.1}%", gpu_usage));
|
||||
|
||||
let cpu_gauge = Gauge::default()
|
||||
.block(Block::default().title("CPU Usage").borders(Borders::ALL))
|
||||
.gauge_style(Style::default().fg(Color::Yellow).add_modifier(Modifier::BOLD))
|
||||
.ratio(cpu_usage as f64 / 100.0)
|
||||
.label(format!("{:.1}%", cpu_usage));
|
||||
|
||||
let token_gauge = Gauge::default()
|
||||
.block(Block::default().title("Token Progress").borders(Borders::ALL))
|
||||
.gauge_style(Style::default().fg(Color::Cyan).add_modifier(Modifier::BOLD))
|
||||
.ratio(token_ratio)
|
||||
.label(format!("{}/{}", current_tokens, max_context_size));
|
||||
|
||||
f.render_widget(gpu_gauge, chunks[0]);
|
||||
f.render_widget(cpu_gauge, chunks[1]);
|
||||
f.render_widget(token_gauge, chunks[2]);
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn render_warning(&mut self, message: &str) -> io::Result<()> {
|
||||
self.terminal.draw(|f| {
|
||||
let block = Block::default()
|
||||
.title("⚠️ NVIDIA Warning")
|
||||
.borders(Borders::ALL)
|
||||
.border_style(Style::default().fg(Color::Red));
|
||||
let paragraph = Paragraph::new(message)
|
||||
.style(Style::default().fg(Color::Red).add_modifier(Modifier::BOLD))
|
||||
.block(block);
|
||||
f.render_widget(paragraph, f.area());
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
@ -4,6 +4,7 @@ use std::collections::HashMap;
|
|||
use sysinfo::{System};
|
||||
|
||||
/// System monitoring data
|
||||
#[derive(Default)]
|
||||
pub struct SystemMetrics {
|
||||
pub gpu_usage: Option<f32>,
|
||||
pub cpu_usage: f32,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
use std::sync::Arc;
|
||||
use crate::shared::state::AppState;
|
||||
use crate::shared::models::schema::bots::dsl::*;
|
||||
use crate::nvidia;
|
||||
use diesel::prelude::*;
|
||||
|
||||
pub struct StatusPanel {
|
||||
|
|
@ -53,6 +54,30 @@ impl StatusPanel {
|
|||
let llm_status = "🟢 ONLINE";
|
||||
lines.push(format!(" LLM: {}", llm_status));
|
||||
|
||||
// Get system metrics
|
||||
let system_metrics = match nvidia::get_system_metrics(0, 0) {
|
||||
Ok(metrics) => metrics,
|
||||
Err(_) => nvidia::SystemMetrics::default(),
|
||||
};
|
||||
|
||||
// Add system metrics with progress bars
|
||||
lines.push("".to_string());
|
||||
lines.push("───────────────────────────────────────".to_string());
|
||||
lines.push(" SYSTEM METRICS".to_string());
|
||||
lines.push("───────────────────────────────────────".to_string());
|
||||
|
||||
// CPU usage with progress bar
|
||||
let cpu_bar = Self::create_progress_bar(system_metrics.cpu_usage, 20);
|
||||
lines.push(format!(" CPU: {:5.1}% {}", system_metrics.cpu_usage, cpu_bar));
|
||||
|
||||
// GPU usage with progress bar (if available)
|
||||
if let Some(gpu_usage) = system_metrics.gpu_usage {
|
||||
let gpu_bar = Self::create_progress_bar(gpu_usage, 20);
|
||||
lines.push(format!(" GPU: {:5.1}% {}", gpu_usage, gpu_bar));
|
||||
} else {
|
||||
lines.push(" GPU: Not available".to_string());
|
||||
}
|
||||
|
||||
lines.push("".to_string());
|
||||
lines.push("───────────────────────────────────────".to_string());
|
||||
lines.push(" ACTIVE BOTS".to_string());
|
||||
|
|
@ -102,4 +127,15 @@ impl StatusPanel {
|
|||
pub fn render(&self) -> String {
|
||||
self.cached_content.clone()
|
||||
}
|
||||
|
||||
/// Creates a visual progress bar for percentage values
|
||||
fn create_progress_bar(percentage: f32, width: usize) -> String {
|
||||
let filled = (percentage / 100.0 * width as f32).round() as usize;
|
||||
let empty = width.saturating_sub(filled);
|
||||
|
||||
let filled_chars = "█".repeat(filled);
|
||||
let empty_chars = "░".repeat(empty);
|
||||
|
||||
format!("[{}{}]", filled_chars, empty_chars)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue