Fix clippy warnings: raw strings, unused imports, format args, must_use attributes

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-12-24 09:39:23 -03:00
parent 7cbfe43319
commit 3a8510d191
23 changed files with 126 additions and 324 deletions

View file

@ -1,6 +1,3 @@
use anyhow::{anyhow, Result};
use chrono::{DateTime, Duration, Local, Utc};
use serde::{Deserialize, Serialize};
@ -8,7 +5,6 @@ use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum AttendanceCommand {
CheckIn,
@ -20,7 +16,6 @@ pub enum AttendanceCommand {
Override,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct KeywordConfig {
pub enabled: bool,
@ -58,7 +53,6 @@ impl Default for KeywordConfig {
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ParsedCommand {
pub command: AttendanceCommand,
@ -67,21 +61,18 @@ pub struct ParsedCommand {
pub raw_input: String,
}
#[derive(Debug, Clone)]
pub struct KeywordParser {
config: Arc<RwLock<KeywordConfig>>,
}
impl KeywordParser {
pub fn new(config: KeywordConfig) -> Self {
Self {
config: Arc::new(RwLock::new(config)),
}
}
pub async fn parse(&self, input: &str) -> Option<ParsedCommand> {
let config = self.config.read().await;
@ -95,7 +86,6 @@ impl KeywordParser {
input.trim().to_lowercase()
};
let command_text = if let Some(prefix) = &config.prefix {
if !processed_input.starts_with(prefix) {
return None;
@ -105,7 +95,6 @@ impl KeywordParser {
&processed_input
};
let parts: Vec<&str> = command_text.split_whitespace().collect();
if parts.is_empty() {
return None;
@ -114,14 +103,12 @@ impl KeywordParser {
let command_word = parts[0];
let args: Vec<String> = parts[1..].iter().map(|s| s.to_string()).collect();
let resolved_command = if let Some(alias) = config.aliases.get(command_word) {
alias.as_str()
} else {
command_word
};
let command = config.keywords.get(resolved_command)?;
Some(ParsedCommand {
@ -132,43 +119,36 @@ impl KeywordParser {
})
}
pub async fn update_config(&self, config: KeywordConfig) {
let mut current = self.config.write().await;
*current = config;
}
pub async fn add_keyword(&self, keyword: String, command: AttendanceCommand) {
let mut config = self.config.write().await;
config.keywords.insert(keyword, command);
}
pub async fn add_alias(&self, alias: String, target: String) {
let mut config = self.config.write().await;
config.aliases.insert(alias, target);
}
pub async fn remove_keyword(&self, keyword: &str) -> bool {
let mut config = self.config.write().await;
config.keywords.remove(keyword).is_some()
}
pub async fn remove_alias(&self, alias: &str) -> bool {
let mut config = self.config.write().await;
config.aliases.remove(alias).is_some()
}
pub async fn get_config(&self) -> KeywordConfig {
self.config.read().await.clone()
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AttendanceRecord {
pub id: String,
@ -179,7 +159,6 @@ pub struct AttendanceRecord {
pub notes: Option<String>,
}
#[derive(Debug, Clone)]
pub struct AttendanceService {
parser: Arc<KeywordParser>,
@ -187,7 +166,6 @@ pub struct AttendanceService {
}
impl AttendanceService {
pub fn new(parser: KeywordParser) -> Self {
Self {
parser: Arc::new(parser),
@ -195,12 +173,7 @@ impl AttendanceService {
}
}
pub async fn process_input(
&self,
user_id: &str,
input: &str,
) -> Result<AttendanceResponse> {
pub async fn process_input(&self, user_id: &str, input: &str) -> Result<AttendanceResponse> {
let parsed = self
.parser
.parse(input)
@ -218,7 +191,6 @@ impl AttendanceService {
}
}
async fn handle_check_in(
&self,
user_id: &str,
@ -226,7 +198,6 @@ impl AttendanceService {
) -> Result<AttendanceResponse> {
let mut records = self.records.write().await;
if let Some(last_record) = records.iter().rev().find(|r| r.user_id == user_id) {
if matches!(last_record.command, AttendanceCommand::CheckIn) {
return Ok(AttendanceResponse::Error {
@ -257,7 +228,6 @@ impl AttendanceService {
})
}
async fn handle_check_out(
&self,
user_id: &str,
@ -265,7 +235,6 @@ impl AttendanceService {
) -> Result<AttendanceResponse> {
let mut records = self.records.write().await;
let check_in_time = records
.iter()
.rev()
@ -303,7 +272,6 @@ impl AttendanceService {
})
}
async fn handle_break(
&self,
user_id: &str,
@ -311,7 +279,6 @@ impl AttendanceService {
) -> Result<AttendanceResponse> {
let mut records = self.records.write().await;
let is_checked_in = records
.iter()
.rev()
@ -343,7 +310,6 @@ impl AttendanceService {
})
}
async fn handle_resume(
&self,
user_id: &str,
@ -351,7 +317,6 @@ impl AttendanceService {
) -> Result<AttendanceResponse> {
let mut records = self.records.write().await;
let break_time = records
.iter()
.rev()
@ -384,14 +349,10 @@ impl AttendanceService {
})
}
async fn handle_status(&self, user_id: &str) -> Result<AttendanceResponse> {
let records = self.records.read().await;
let user_records: Vec<_> = records
.iter()
.filter(|r| r.user_id == user_id)
.collect();
let user_records: Vec<_> = records.iter().filter(|r| r.user_id == user_id).collect();
if user_records.is_empty() {
return Ok(AttendanceResponse::Status {
@ -421,18 +382,14 @@ impl AttendanceService {
})
}
async fn handle_report(
&self,
user_id: &str,
parsed: &ParsedCommand,
_parsed: &ParsedCommand,
) -> Result<AttendanceResponse> {
let records = self.records.read().await;
let user_records: Vec<_> = records
.iter()
.filter(|r| r.user_id == user_id)
.collect();
let user_records: Vec<_> = records.iter().filter(|r| r.user_id == user_id).collect();
if user_records.is_empty() {
return Ok(AttendanceResponse::Report {
@ -464,7 +421,6 @@ impl AttendanceService {
Ok(AttendanceResponse::Report { data: report })
}
async fn handle_override(
&self,
user_id: &str,
@ -479,7 +435,6 @@ impl AttendanceService {
let target_user = &parsed.args[0];
let action = &parsed.args[1];
log::warn!(
"Override command by {} for user {}: {}",
user_id,
@ -493,7 +448,6 @@ impl AttendanceService {
})
}
pub async fn get_user_records(&self, user_id: &str) -> Vec<AttendanceRecord> {
let records = self.records.read().await;
records
@ -503,13 +457,11 @@ impl AttendanceService {
.collect()
}
pub async fn clear_records(&self) {
let mut records = self.records.write().await;
records.clear();
}
pub async fn get_today_work_time(&self, user_id: &str) -> Duration {
let records = self.records.read().await;
let today = Local::now().date_naive();
@ -536,7 +488,6 @@ impl AttendanceService {
}
}
if let Some(checkin) = last_checkin {
total_duration = total_duration + (Utc::now() - checkin);
}
@ -545,7 +496,6 @@ impl AttendanceService {
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum AttendanceResponse {
Success {

View file

@ -52,7 +52,6 @@ impl LlmAssistConfig {
if let Ok(content) = std::fs::read_to_string(&path) {
for line in content.lines() {
let line_lower = line.to_lowercase();
let parts: Vec<&str> = line.split(',').map(|s| s.trim()).collect();
if parts.len() < 2 {
@ -1420,7 +1419,7 @@ async fn handle_summary_command(
}
fn get_help_text() -> String {
r#"*Attendant Commands*
r"*Attendant Commands*
*Queue Management:*
`/queue` - View waiting conversations
@ -1438,7 +1437,7 @@ fn get_help_text() -> String {
*Other:*
`/help` - Show this help
_Portuguese: /fila, /pegar, /transferir, /resolver, /dicas, /polir, /respostas, /resumo, /ajuda_"#
_Portuguese: /fila, /pegar, /transferir, /resolver, /dicas, /polir, /respostas, /resumo, /ajuda_"
.to_string()
}

View file

@ -450,7 +450,7 @@ async fn handle_attendant_websocket(socket: WebSocket, state: Arc<AppState>, att
.await;
}
}
Message::Ping(data) => {
Message::Ping(_data) => {
debug!("Received ping from attendant {}", attendant_id_for_recv);
}
Message::Close(_) => {

View file

@ -7,20 +7,13 @@ use log::debug;
use rhai::Engine;
use std::sync::Arc;
pub fn register_crm_keywords(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
score_lead::score_lead_keyword(state.clone(), user.clone(), engine);
score_lead::get_lead_score_keyword(state.clone(), user.clone(), engine);
score_lead::qualify_lead_keyword(state.clone(), user.clone(), engine);
score_lead::update_lead_score_keyword(state.clone(), user.clone(), engine);
score_lead::ai_score_lead_keyword(state.clone(), user.clone(), engine);
attendance::register_attendance_keywords(state.clone(), user.clone(), engine);
debug!("Registered all CRM keywords (lead scoring + attendance + LLM assist)");

View file

@ -9,7 +9,7 @@ use log::debug;
use rhai::Engine;
use std::sync::Arc;
pub fn register_datetime_functions(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
pub fn register_datetime_functions(state: &Arc<AppState>, user: UserSession, engine: &mut Engine) {
now::now_keyword(&state, user.clone(), engine);
now::today_keyword(&state, user.clone(), engine);
now::time_keyword(&state, user.clone(), engine);
@ -28,4 +28,3 @@ pub fn register_datetime_functions(state: Arc<AppState>, user: UserSession, engi
debug!("Registered all datetime functions");
}

View file

@ -590,7 +590,7 @@ pub fn register_approval_keywords(engine: &mut Engine) {
info!("Approval keywords registered");
}
pub const APPROVAL_SCHEMA: &str = r#"
pub const APPROVAL_SCHEMA: &str = r"
-- Approval requests
CREATE TABLE IF NOT EXISTS approval_requests (
id UUID PRIMARY KEY,
@ -667,7 +667,7 @@ CREATE INDEX IF NOT EXISTS idx_approval_audit_timestamp ON approval_audit_log(ti
CREATE INDEX IF NOT EXISTS idx_approval_tokens_token ON approval_tokens(token);
CREATE INDEX IF NOT EXISTS idx_approval_tokens_request_id ON approval_tokens(request_id);
"#;
";
pub mod sql {
pub const INSERT_REQUEST: &str = r"

View file

@ -254,7 +254,7 @@ Respond with valid JSON only:
pub fn generate_query_prompt(&self, query: &str, context: &str) -> String {
format!(
r#"Answer this question using the knowledge graph context.
r"Answer this question using the knowledge graph context.
QUESTION: {query}
@ -263,7 +263,7 @@ KNOWLEDGE GRAPH CONTEXT:
Provide a natural language answer based on the entities and relationships.
If the information is not available, say so clearly.
"#
"
)
}
@ -494,7 +494,7 @@ pub fn register_knowledge_graph_keywords(engine: &mut Engine) {
info!("Knowledge graph keywords registered");
}
pub const KNOWLEDGE_GRAPH_SCHEMA: &str = r#"
pub const KNOWLEDGE_GRAPH_SCHEMA: &str = r"
-- Knowledge graph entities
CREATE TABLE IF NOT EXISTS kg_entities (
id UUID PRIMARY KEY,
@ -542,7 +542,7 @@ CREATE INDEX IF NOT EXISTS idx_kg_relationships_type ON kg_relationships(relatio
-- Full-text search on entity names
CREATE INDEX IF NOT EXISTS idx_kg_entities_name_fts ON kg_entities
USING GIN(to_tsvector('english', entity_name));
"#;
";
pub mod sql {
pub const INSERT_ENTITY: &str = r"

View file

@ -11,7 +11,7 @@ use log::debug;
use rhai::Engine;
use std::sync::Arc;
pub fn register_math_functions(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
pub fn register_math_functions(state: &Arc<AppState>, user: UserSession, engine: &mut Engine) {
abs::abs_keyword(&state, user.clone(), engine);
round::round_keyword(&state, user.clone(), engine);
basic_math::int_keyword(&state, user.clone(), engine);

View file

@ -6,7 +6,7 @@ use rhai::Engine;
pub fn print_keyword(_state: &AppState, _user: UserSession, engine: &mut Engine) {
engine
.register_custom_syntax(&["PRINT", "$expr$"], true, |context, inputs| {
.register_custom_syntax(["PRINT", "$expr$"], true, |context, inputs| {
let value = context.eval_expression_tree(&inputs[0])?;
trace!("PRINT: {value}");
Ok(Dynamic::UNIT)

View file

@ -4,73 +4,43 @@ use log::debug;
use rhai::{Dynamic, Engine, Map};
use std::sync::Arc;
pub fn isempty_keyword(_state: &Arc<AppState>, _user: UserSession, engine: &mut Engine) {
engine.register_fn("ISEMPTY", |value: Dynamic| -> bool { check_empty(&value) });
engine.register_fn("isempty", |value: Dynamic| -> bool { check_empty(&value) });
engine.register_fn("IsEmpty", |value: Dynamic| -> bool { check_empty(&value) });
debug!("Registered ISEMPTY keyword");
}
fn check_empty(value: &Dynamic) -> bool {
if value.is_unit() {
return true;
}
if value.is_string() {
if let Some(s) = value.clone().into_string().ok() {
if let Ok(s) = value.clone().into_string() {
return s.is_empty();
}
}
if value.is_array() {
if let Ok(arr) = value.clone().into_array() {
return arr.is_empty();
}
}
if value.is_map() {
if let Some(map) = value.clone().try_cast::<Map>() {
return map.is_empty();
}
}
if value.is_bool() {
return false;
}
if value.is_int() || value.is_float() {
return false;
}

View file

@ -9,7 +9,11 @@ use log::debug;
use rhai::Engine;
use std::sync::Arc;
pub fn register_validation_functions(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
pub fn register_validation_functions(
state: &Arc<AppState>,
user: UserSession,
engine: &mut Engine,
) {
str_val::val_keyword(&state, user.clone(), engine);
str_val::str_keyword(&state, user.clone(), engine);
str_val::cint_keyword(&state, user.clone(), engine);

View file

@ -1095,7 +1095,7 @@ DefaultInstance:
fs::create_dir_all(caddy_config.parent().unwrap())?;
let config = format!(
r#"{{
r"{{
admin off
auto_https disable_redirects
}}
@ -1129,7 +1129,7 @@ meet.botserver.local {{
tls /botserver-stack/conf/system/certificates/caddy/server.crt /botserver-stack/conf/system/certificates/caddy/server.key
reverse_proxy {}
}}
"#,
",
crate::core::urls::InternalUrls::DIRECTORY_BASE.replace("https://", ""),
crate::core::urls::InternalUrls::DIRECTORY_BASE.replace("https://", ""),
crate::core::urls::InternalUrls::LLM.replace("https://", ""),
@ -1148,7 +1148,7 @@ meet.botserver.local {{
let zone_file = self.stack_dir("conf/dns/botserver.local.zone");
let corefile = r#"botserver.local:53 {
let corefile = r"botserver.local:53 {
file /botserver-stack/conf/dns/botserver.local.zone
reload 10s
log
@ -1159,11 +1159,11 @@ meet.botserver.local {{
cache 30
log
}
"#;
";
fs::write(dns_config, corefile)?;
let zone = r#"$ORIGIN botserver.local.
let zone = r"$ORIGIN botserver.local.
$TTL 60
@ IN SOA ns1.botserver.local. admin.botserver.local. (
2024010101 ; Serial
@ -1192,7 +1192,7 @@ email IN A 127.0.0.1
meet IN A 127.0.0.1
; Dynamic entries will be added below
"#;
";
fs::write(zone_file, zone)?;
info!("CoreDNS configured for dynamic DNS");
@ -1553,7 +1553,7 @@ meet IN A 127.0.0.1
info!("Writing .env file with Vault configuration...");
let env_content = format!(
r#"# BotServer Environment Configuration
r"# BotServer Environment Configuration
# Generated by bootstrap - DO NOT ADD OTHER SECRETS HERE
# All secrets are stored in Vault at the paths below:
# - gbo/tables - PostgreSQL credentials
@ -1573,7 +1573,7 @@ VAULT_TOKEN={}
# Cache TTL for secrets (seconds)
VAULT_CACHE_TTL=300
"#,
",
vault_addr, root_token
);
fs::write(&env_file_path, &env_content)?;

View file

@ -1,7 +1,5 @@
use std::collections::HashMap;
#[derive(Debug, Clone)]
pub struct InstallResult {
pub component: String,
@ -14,8 +12,9 @@ pub struct InstallResult {
impl InstallResult {
pub fn print(&self) {
let component_upper = self.component.to_uppercase();
println!("\n========================================");
println!(" {} Installation Complete", self.component.to_uppercase());
println!(" {component_upper} Installation Complete");
println!("========================================\n");
println!("Container: {}", self.container_name);
println!("IP Address: {}", self.container_ip);
@ -25,7 +24,7 @@ impl InstallResult {
if !self.env_vars.is_empty() {
println!("\n--- Environment Variables (.env) ---\n");
for (key, value) in &self.env_vars {
println!("{}={}", key, value);
println!("{key}={value}");
}
}
println!("\n========================================\n");

View file

@ -1,5 +1,7 @@
use crate::package_manager::OsType;
pub fn detect_os() -> OsType {
#[must_use]
pub const fn detect_os() -> OsType {
if cfg!(target_os = "linux") {
OsType::Linux
} else if cfg!(target_os = "macos") {

View file

@ -48,6 +48,7 @@ pub struct Extensions {
}
impl Extensions {
#[must_use]
pub fn new() -> Self {
Self {
map: Arc::new(RwLock::new(HashMap::new())),

View file

@ -14,7 +14,6 @@ use qdrant_client::{
Qdrant,
};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileDocument {
pub id: String,
@ -32,7 +31,6 @@ pub struct FileDocument {
pub tags: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileSearchQuery {
pub query_text: String,
@ -44,7 +42,6 @@ pub struct FileSearchQuery {
pub limit: usize,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileSearchResult {
pub file: FileDocument,
@ -53,7 +50,6 @@ pub struct FileSearchResult {
pub highlights: Vec<String>,
}
pub struct UserDriveVectorDB {
user_id: Uuid,
bot_id: Uuid,
@ -64,7 +60,6 @@ pub struct UserDriveVectorDB {
}
impl UserDriveVectorDB {
pub fn new(user_id: Uuid, bot_id: Uuid, db_path: PathBuf) -> Self {
let collection_name = format!("drive_{}_{}", bot_id, user_id);
@ -90,12 +85,10 @@ impl UserDriveVectorDB {
&self.collection_name
}
#[cfg(feature = "vectordb")]
pub async fn initialize(&mut self, qdrant_url: &str) -> Result<()> {
let client = Qdrant::from_url(qdrant_url).build()?;
let collections = client.list_collections().await?;
let exists = collections
.collections
@ -103,7 +96,6 @@ impl UserDriveVectorDB {
.any(|c| c.name == self.collection_name);
if !exists {
client
.create_collection(
qdrant_client::qdrant::CreateCollectionBuilder::new(&self.collection_name)
@ -129,7 +121,6 @@ impl UserDriveVectorDB {
Ok(())
}
#[cfg(feature = "vectordb")]
pub async fn index_file(&self, file: &FileDocument, embedding: Vec<f32>) -> Result<()> {
let client = self
@ -149,9 +140,10 @@ impl UserDriveVectorDB {
let point = PointStruct::new(file.id.clone(), embedding, payload);
client
.upsert_points(
qdrant_client::qdrant::UpsertPointsBuilder::new(&self.collection_name, vec![point]),
)
.upsert_points(qdrant_client::qdrant::UpsertPointsBuilder::new(
&self.collection_name,
vec![point],
))
.await?;
log::debug!("Indexed file: {} - {}", file.id, file.file_name);
@ -160,14 +152,12 @@ impl UserDriveVectorDB {
#[cfg(not(feature = "vectordb"))]
pub async fn index_file(&self, file: &FileDocument, _embedding: Vec<f32>) -> Result<()> {
let file_path = self.db_path.join(format!("{}.json", file.id));
let json = serde_json::to_string_pretty(file)?;
fs::write(file_path, json).await?;
Ok(())
}
pub async fn index_files_batch(&self, files: &[(FileDocument, Vec<f32>)]) -> Result<()> {
#[cfg(feature = "vectordb")]
{
@ -184,7 +174,9 @@ impl UserDriveVectorDB {
let payload: qdrant_client::Payload = m
.clone()
.into_iter()
.map(|(k, v)| (k, qdrant_client::qdrant::Value::from(v.to_string())))
.map(|(k, v)| {
(k, qdrant_client::qdrant::Value::from(v.to_string()))
})
.collect::<std::collections::HashMap<_, _>>()
.into();
PointStruct::new(file.id.clone(), embedding.clone(), payload)
@ -195,9 +187,10 @@ impl UserDriveVectorDB {
if !points.is_empty() {
client
.upsert_points(
qdrant_client::qdrant::UpsertPointsBuilder::new(&self.collection_name, points),
)
.upsert_points(qdrant_client::qdrant::UpsertPointsBuilder::new(
&self.collection_name,
points,
))
.await?;
}
}
@ -212,7 +205,6 @@ impl UserDriveVectorDB {
Ok(())
}
#[cfg(feature = "vectordb")]
pub async fn search(
&self,
@ -224,39 +216,39 @@ impl UserDriveVectorDB {
.as_ref()
.ok_or_else(|| anyhow::anyhow!("Vector DB not initialized"))?;
let filter =
if query.bucket.is_some() || query.file_type.is_some() || !query.tags.is_empty() {
let mut conditions = vec![];
let filter = if query.bucket.is_some() || query.file_type.is_some() || !query.tags.is_empty() {
let mut conditions = vec![];
if let Some(bucket) = &query.bucket {
conditions.push(qdrant_client::qdrant::Condition::matches(
"bucket",
bucket.clone(),
));
}
if let Some(bucket) = &query.bucket {
conditions.push(qdrant_client::qdrant::Condition::matches(
"bucket",
bucket.clone(),
));
}
if let Some(file_type) = &query.file_type {
conditions.push(qdrant_client::qdrant::Condition::matches(
"file_type",
file_type.clone(),
));
}
if let Some(file_type) = &query.file_type {
conditions.push(qdrant_client::qdrant::Condition::matches(
"file_type",
file_type.clone(),
));
}
for tag in &query.tags {
conditions.push(qdrant_client::qdrant::Condition::matches(
"tags",
tag.clone(),
));
}
for tag in &query.tags {
conditions.push(qdrant_client::qdrant::Condition::matches(
"tags",
tag.clone(),
));
}
if !conditions.is_empty() {
Some(qdrant_client::qdrant::Filter::must(conditions))
if !conditions.is_empty() {
Some(qdrant_client::qdrant::Filter::must(conditions))
} else {
None
}
} else {
None
}
} else {
None
};
};
let mut search_builder = qdrant_client::qdrant::SearchPointsBuilder::new(
&self.collection_name,
@ -273,12 +265,11 @@ impl UserDriveVectorDB {
let mut results = Vec::new();
for point in search_result.result {
let payload = &point.payload;
if !payload.is_empty() {
let get_str = |key: &str| -> String {
payload.get(key)
payload
.get(key)
.and_then(|v| v.as_str())
.map(|s| s.to_string())
.unwrap_or_default()
@ -289,24 +280,26 @@ impl UserDriveVectorDB {
file_path: get_str("file_path"),
file_name: get_str("file_name"),
file_type: get_str("file_type"),
file_size: payload.get("file_size")
file_size: payload
.get("file_size")
.and_then(|v| v.as_integer())
.unwrap_or(0) as u64,
bucket: get_str("bucket"),
content_text: get_str("content_text"),
content_summary: payload.get("content_summary")
content_summary: payload
.get("content_summary")
.and_then(|v| v.as_str())
.map(|s| s.to_string()),
created_at: chrono::Utc::now(),
modified_at: chrono::Utc::now(),
indexed_at: chrono::Utc::now(),
mime_type: payload.get("mime_type")
mime_type: payload
.get("mime_type")
.and_then(|v| v.as_str())
.map(|s| s.to_string()),
tags: vec![],
};
let snippet = self.create_snippet(&file.content_text, &query.query_text, 200);
let highlights = self.extract_highlights(&file.content_text, &query.query_text, 3);
@ -328,7 +321,6 @@ impl UserDriveVectorDB {
query: &FileSearchQuery,
_query_embedding: Vec<f32>,
) -> Result<Vec<FileSearchResult>> {
let mut results = Vec::new();
let mut entries = fs::read_dir(&self.db_path).await?;
@ -336,7 +328,6 @@ impl UserDriveVectorDB {
if entry.path().extension().and_then(|s| s.to_str()) == Some("json") {
let content = fs::read_to_string(entry.path()).await?;
if let Ok(file) = serde_json::from_str::<FileDocument>(&content) {
if let Some(bucket) = &query.bucket {
if &file.bucket != bucket {
continue;
@ -349,7 +340,6 @@ impl UserDriveVectorDB {
}
}
let query_lower = query.query_text.to_lowercase();
if file.file_name.to_lowercase().contains(&query_lower)
|| file.content_text.to_lowercase().contains(&query_lower)
@ -381,7 +371,6 @@ impl UserDriveVectorDB {
Ok(results)
}
fn create_snippet(&self, content: &str, query: &str, max_length: usize) -> String {
let content_lower = content.to_lowercase();
let query_lower = query.to_lowercase();
@ -407,7 +396,6 @@ impl UserDriveVectorDB {
}
}
fn extract_highlights(&self, content: &str, query: &str, max_highlights: usize) -> Vec<String> {
let content_lower = content.to_lowercase();
let query_lower = query.to_lowercase();
@ -431,7 +419,6 @@ impl UserDriveVectorDB {
highlights
}
#[cfg(feature = "vectordb")]
pub async fn delete_file(&self, file_id: &str) -> Result<()> {
let client = self
@ -441,8 +428,9 @@ impl UserDriveVectorDB {
client
.delete_points(
qdrant_client::qdrant::DeletePointsBuilder::new(&self.collection_name)
.points(vec![qdrant_client::qdrant::PointId::from(file_id.to_string())]),
qdrant_client::qdrant::DeletePointsBuilder::new(&self.collection_name).points(
vec![qdrant_client::qdrant::PointId::from(file_id.to_string())],
),
)
.await?;
@ -459,7 +447,6 @@ impl UserDriveVectorDB {
Ok(())
}
#[cfg(feature = "vectordb")]
pub async fn get_count(&self) -> Result<u64> {
let client = self
@ -486,9 +473,7 @@ impl UserDriveVectorDB {
Ok(count)
}
pub async fn update_file_metadata(&self, file_id: &str, tags: Vec<String>) -> Result<()> {
pub async fn update_file_metadata(&self, _file_id: &str, _tags: Vec<String>) -> Result<()> {
#[cfg(not(feature = "vectordb"))]
{
let file_path = self.db_path.join(format!("{}.json", file_id));
@ -503,14 +488,12 @@ impl UserDriveVectorDB {
#[cfg(feature = "vectordb")]
{
log::warn!("Metadata update not yet implemented for Qdrant backend");
}
Ok(())
}
#[cfg(feature = "vectordb")]
pub async fn clear(&self) -> Result<()> {
let client = self
@ -518,10 +501,7 @@ impl UserDriveVectorDB {
.as_ref()
.ok_or_else(|| anyhow::anyhow!("Vector DB not initialized"))?;
client
.delete_collection(&self.collection_name)
.await?;
client.delete_collection(&self.collection_name).await?;
client
.create_collection(
@ -548,47 +528,39 @@ impl UserDriveVectorDB {
}
}
#[derive(Debug)]
pub struct FileContentExtractor;
impl FileContentExtractor {
pub async fn extract_text(file_path: &PathBuf, mime_type: &str) -> Result<String> {
match mime_type {
"text/plain" | "text/markdown" | "text/csv" => {
let content = fs::read_to_string(file_path).await?;
Ok(content)
}
t if t.starts_with("text/") => {
let content = fs::read_to_string(file_path).await?;
Ok(content)
}
"application/pdf" => {
log::info!("PDF extraction for {:?}", file_path);
Self::extract_pdf_text(file_path).await
}
"application/vnd.openxmlformats-officedocument.wordprocessingml.document"
| "application/msword" => {
log::info!("Word document extraction for {:?}", file_path);
Self::extract_docx_text(file_path).await
}
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
| "application/vnd.ms-excel" => {
log::info!("Spreadsheet extraction for {:?}", file_path);
Self::extract_xlsx_text(file_path).await
}
"application/json" => {
let content = fs::read_to_string(file_path).await?;
@ -598,7 +570,6 @@ impl FileContentExtractor {
}
}
"text/xml" | "application/xml" | "text/html" => {
let content = fs::read_to_string(file_path).await?;
@ -607,7 +578,6 @@ impl FileContentExtractor {
Ok(text.trim().to_string())
}
"text/rtf" | "application/rtf" => {
let content = fs::read_to_string(file_path).await?;
@ -734,14 +704,11 @@ impl FileContentExtractor {
}
}
pub fn should_index(mime_type: &str, file_size: u64) -> bool {
if file_size > 10 * 1024 * 1024 {
return false;
}
matches!(
mime_type,
"text/plain"

View file

@ -1,6 +1,6 @@
use anyhow::{anyhow, Context, Result};
use chrono::{DateTime, NaiveDate, Utc};
use reqwest::{Client, Method, StatusCode};
use reqwest::{Client, Method};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use serde_json::{json, Value};
use std::time::Duration;

View file

@ -38,16 +38,13 @@ async fn verify_webhook(Query(query): Query<WebhookVerifyQuery>) -> impl IntoRes
query.verify_token.as_deref(),
query.challenge,
) {
(Some(mode), Some(token), Some(challenge)) => {
if let Some(response) = adapter
.handle_webhook_verification(mode, token, &challenge)
.await
{
(StatusCode::OK, response)
} else {
(StatusCode::FORBIDDEN, "Verification failed".to_string())
}
}
(Some(mode), Some(token), Some(challenge)) => adapter
.handle_webhook_verification(mode, token, &challenge)
.await
.map_or_else(
|| (StatusCode::FORBIDDEN, "Verification failed".to_string()),
|response| (StatusCode::OK, response),
),
_ => (StatusCode::BAD_REQUEST, "Missing parameters".to_string()),
}
}

View file

@ -1,4 +1,3 @@
use super::ModelHandler;
#[derive(Debug)]
pub struct GptOss20bHandler;
@ -7,11 +6,9 @@ impl ModelHandler for GptOss20bHandler {
buffer.ends_with("final")
}
fn process_content(&self, content: &str) -> String {
if let Some(pos) = content.find("final") {
content[..pos].to_string()
} else {
content.to_string()
}
content
.find("final")
.map_or_else(|| content.to_string(), |pos| content[..pos].to_string())
}
fn has_analysis_markers(&self, buffer: &str) -> bool {
buffer.contains("analysis<|message|>")

View file

@ -1,4 +1,3 @@
pub mod deepseek_r3;
pub mod gpt_oss_120b;
pub mod gpt_oss_20b;
@ -8,14 +7,14 @@ pub trait ModelHandler: Send + Sync {
fn process_content(&self, content: &str) -> String;
fn has_analysis_markers(&self, buffer: &str) -> bool;
}
#[must_use]
pub fn get_handler(model_path: &str) -> Box<dyn ModelHandler> {
let path = model_path.to_lowercase();
if path.contains("deepseek") {
Box::new(deepseek_r3::DeepseekR3Handler)
} else if path.contains("120b") {
Box::new(gpt_oss_120b::GptOss120bHandler::new())
} else if path.contains("gpt-oss") || path.contains("gpt") {
Box::new(gpt_oss_20b::GptOss20bHandler)
} else {
Box::new(gpt_oss_20b::GptOss20bHandler)
}

View file

@ -3,7 +3,6 @@ pub use crate::core::bot::channels::teams::TeamsAdapter;
use crate::core::bot::channels::ChannelAdapter;
use crate::shared::state::AppState;
use axum::{extract::State, http::StatusCode, response::IntoResponse, routing::post, Json, Router};
use diesel::prelude::*;
use serde::Deserialize;
use std::sync::Arc;
use uuid::Uuid;

View file

@ -7,6 +7,12 @@ pub struct SystemMetrics {
pub gpu_usage: Option<f32>,
pub cpu_usage: f32,
}
/// Gets current system metrics including CPU and GPU usage.
///
/// # Errors
///
/// Returns an error if GPU utilization query fails when an NVIDIA GPU is present.
pub fn get_system_metrics() -> Result<SystemMetrics> {
let mut sys = System::new();
sys.refresh_cpu_usage();
@ -21,6 +27,9 @@ pub fn get_system_metrics() -> Result<SystemMetrics> {
cpu_usage,
})
}
/// Checks if an NVIDIA GPU is present in the system.
#[must_use]
pub fn has_nvidia_gpu() -> bool {
match std::process::Command::new("nvidia-smi")
.arg("--query-gpu=utilization.gpu")
@ -31,6 +40,15 @@ pub fn has_nvidia_gpu() -> bool {
Err(_) => false,
}
}
/// Gets GPU utilization metrics from nvidia-smi.
///
/// # Errors
///
/// Returns an error if:
/// - The nvidia-smi command fails to execute
/// - The command returns a non-success status
/// - The output cannot be parsed as UTF-8
pub fn get_gpu_utilization() -> Result<HashMap<String, f32>> {
let output = std::process::Command::new("nvidia-smi")
.arg("--query-gpu=utilization.gpu,utilization.memory")

View file

@ -1,22 +1,3 @@
use crate::bot::BotOrchestrator;
use crate::core::bot::channels::whatsapp::WhatsAppAdapter;
use crate::core::bot::channels::ChannelAdapter;
@ -33,16 +14,13 @@ use botlib::MessageType;
use chrono::Utc;
use diesel::prelude::*;
use log::{debug, error, info, warn};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use serde::Deserialize;
use std::sync::Arc;
use tokio::sync::broadcast;
use uuid::Uuid;
pub type AttendantBroadcast = broadcast::Sender<AttendantNotification>;
#[derive(Debug, Deserialize)]
pub struct WebhookVerifyQuery {
#[serde(rename = "hub.mode")]
@ -53,7 +31,6 @@ pub struct WebhookVerifyQuery {
pub challenge: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct WhatsAppWebhook {
pub object: String,
@ -193,7 +170,6 @@ pub struct WhatsAppStatus {
pub recipient_id: String,
}
pub fn configure() -> Router<Arc<AppState>> {
Router::new()
.route("/webhook/whatsapp", get(verify_webhook))
@ -202,7 +178,6 @@ pub fn configure() -> Router<Arc<AppState>> {
.route("/api/attendance/respond", post(attendant_respond))
}
pub async fn verify_webhook(
State(state): State<Arc<AppState>>,
Query(params): Query<WebhookVerifyQuery>,
@ -218,7 +193,6 @@ pub async fn verify_webhook(
return (StatusCode::FORBIDDEN, "Invalid mode".to_string());
}
let expected_token = get_verify_token(&state).await;
if token == expected_token {
@ -230,7 +204,6 @@ pub async fn verify_webhook(
}
}
pub async fn handle_webhook(
State(state): State<Arc<AppState>>,
Json(payload): Json<WhatsAppWebhook>,
@ -244,12 +217,10 @@ pub async fn handle_webhook(
for entry in payload.entry {
for change in entry.changes {
if change.field == "messages" {
let contact = change.value.contacts.first();
let contact_name = contact.map(|c| c.profile.name.clone());
let contact_phone = contact.map(|c| c.wa_id.clone());
for message in change.value.messages {
if let Err(e) = process_incoming_message(
state.clone(),
@ -263,7 +234,6 @@ pub async fn handle_webhook(
}
}
for status in change.value.statuses {
debug!(
"Message {} status: {} for {}",
@ -277,7 +247,6 @@ pub async fn handle_webhook(
StatusCode::OK
}
async fn process_incoming_message(
state: Arc<AppState>,
message: &WhatsAppMessage,
@ -294,17 +263,14 @@ async fn process_incoming_message(
name, phone, message.message_type
);
let content = extract_message_content(message);
if content.is_empty() {
debug!("Empty message content, skipping");
return Ok(());
}
if content.starts_with('/') {
if let Some(response) = process_attendant_command(&state, &phone, &content).await {
let adapter = WhatsAppAdapter::new(state.conn.clone(), Uuid::nil());
let bot_response = BotResponse {
bot_id: Uuid::nil().to_string(),
@ -327,41 +293,32 @@ async fn process_incoming_message(
}
}
let (session, is_new) = find_or_create_session(&state, &phone, &name).await?;
let needs_human = check_needs_human(&session);
if needs_human {
route_to_attendant(&state, &session, &content, &name, &phone).await?;
} else {
route_to_bot(&state, &session, &content, is_new).await?;
}
Ok(())
}
async fn process_attendant_command(
state: &Arc<AppState>,
phone: &str,
content: &str,
) -> Option<String> {
let is_attendant = check_is_attendant(state, phone).await;
if !is_attendant {
return None;
}
let current_session = get_attendant_active_session(state, phone).await;
#[cfg(feature = "attendance")]
{
match crate::attendance::llm_assist::process_attendant_command(
@ -387,17 +344,13 @@ async fn process_attendant_command(
}
}
async fn check_is_attendant(state: &Arc<AppState>, phone: &str) -> bool {
let conn = state.conn.clone();
let _conn = state.conn.clone();
let phone_clone = phone.to_string();
tokio::task::spawn_blocking(move || {
let work_path = std::env::var("WORK_PATH").unwrap_or_else(|_| "./work".to_string());
if let Ok(entries) = std::fs::read_dir(&work_path) {
for entry in entries.flatten() {
let path = entry.path();
@ -406,7 +359,6 @@ async fn check_is_attendant(state: &Arc<AppState>, phone: &str) -> bool {
if attendant_path.exists() {
if let Ok(content) = std::fs::read_to_string(&attendant_path) {
for line in content.lines().skip(1) {
if line.to_lowercase().contains(&phone_clone.to_lowercase()) {
return true;
}
@ -422,7 +374,6 @@ async fn check_is_attendant(state: &Arc<AppState>, phone: &str) -> bool {
.unwrap_or(false)
}
async fn get_attendant_active_session(state: &Arc<AppState>, phone: &str) -> Option<Uuid> {
let conn = state.conn.clone();
let phone_clone = phone.to_string();
@ -432,7 +383,6 @@ async fn get_attendant_active_session(state: &Arc<AppState>, phone: &str) -> Opt
use crate::shared::models::schema::user_sessions;
let session: Option<UserSession> = user_sessions::table
.filter(
user_sessions::context_data
@ -455,7 +405,6 @@ async fn get_attendant_active_session(state: &Arc<AppState>, phone: &str) -> Opt
.flatten()
}
fn extract_message_content(message: &WhatsAppMessage) -> String {
match message.message_type.as_str() {
"text" => message
@ -506,7 +455,6 @@ fn extract_message_content(message: &WhatsAppMessage) -> String {
}
}
async fn find_or_create_session(
state: &Arc<AppState>,
phone: &str,
@ -521,7 +469,6 @@ async fn find_or_create_session(
use crate::shared::models::schema::{bots, user_sessions, users};
let existing_user: Option<(Uuid, String)> = users::table
.filter(users::email.eq(&phone_clone))
.select((users::id, users::username))
@ -532,7 +479,6 @@ async fn find_or_create_session(
let (user_id, _username) = if let Some((id, uname)) = existing_user {
(id, uname)
} else {
let new_user_id = Uuid::new_v4();
diesel::insert_into(users::table)
.values((
@ -547,14 +493,12 @@ async fn find_or_create_session(
(new_user_id, name_clone.clone())
};
let bot_id: Uuid = bots::table
.filter(bots::is_active.eq(true))
.select(bots::id)
.first(&mut db_conn)
.map_err(|e| format!("No active bot found: {}", e))?;
let existing_session: Option<UserSession> = user_sessions::table
.filter(user_sessions::user_id.eq(user_id))
.filter(user_sessions::bot_id.eq(bot_id))
@ -564,14 +508,12 @@ async fn find_or_create_session(
.map_err(|e| format!("Session query error: {}", e))?;
if let Some(session) = existing_session {
let age = Utc::now() - session.updated_at;
if age.num_hours() < 24 {
return Ok::<(UserSession, bool), String>((session, false));
}
}
let new_session_id = Uuid::new_v4();
let context_data = serde_json::json!({
"channel": "whatsapp",
@ -604,7 +546,6 @@ async fn find_or_create_session(
Ok(result)
}
fn check_needs_human(session: &UserSession) -> bool {
if let Some(needs_human) = session.context_data.get("needs_human") {
return needs_human.as_bool().unwrap_or(false);
@ -612,12 +553,11 @@ fn check_needs_human(session: &UserSession) -> bool {
false
}
async fn route_to_bot(
state: &Arc<AppState>,
session: &UserSession,
content: &str,
is_new: bool,
_is_new: bool,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!("Routing WhatsApp message to bot for session {}", session.id);
@ -633,16 +573,12 @@ async fn route_to_bot(
context_name: None,
};
let adapter = WhatsAppAdapter::new(state.conn.clone(), session.bot_id);
let orchestrator = BotOrchestrator::new(state.clone());
let (tx, mut rx) = tokio::sync::mpsc::channel::<BotResponse>(100);
let phone = session
.context_data
.get("phone")
@ -656,7 +592,6 @@ async fn route_to_bot(
tokio::spawn(async move {
while let Some(response) = rx.recv().await {
if !response.content.is_empty() {
let mut wa_response = response.clone();
wa_response.user_id = phone.clone();
wa_response.channel = "whatsapp".to_string();
@ -668,11 +603,9 @@ async fn route_to_bot(
}
});
if let Err(e) = orchestrator.stream_response(user_message, tx).await {
error!("Bot processing error: {}", e);
let error_response = BotResponse {
bot_id: session.bot_id.to_string(),
session_id: session.id.to_string(),
@ -697,7 +630,6 @@ async fn route_to_bot(
Ok(())
}
async fn route_to_attendant(
state: &Arc<AppState>,
session: &UserSession,
@ -710,7 +642,6 @@ async fn route_to_attendant(
session.id
);
let assigned_to = session
.context_data
.get("assigned_to")
@ -723,10 +654,8 @@ async fn route_to_attendant(
.and_then(|v| v.as_i64())
.unwrap_or(1) as i32;
save_message_to_history(state, session, content, "customer").await?;
let notification = AttendantNotification {
notification_type: "new_message".to_string(),
session_id: session.id.to_string(),
@ -740,7 +669,6 @@ async fn route_to_attendant(
priority,
};
if let Some(broadcast_tx) = state.attendant_broadcast.as_ref() {
if let Err(e) = broadcast_tx.send(notification.clone()) {
debug!("No attendants listening: {}", e);
@ -749,13 +677,11 @@ async fn route_to_attendant(
}
}
update_queue_item(state, session, content).await?;
Ok(())
}
async fn save_message_to_history(
state: &Arc<AppState>,
session: &UserSession,
@ -795,7 +721,6 @@ async fn save_message_to_history(
Ok(())
}
async fn update_queue_item(
state: &Arc<AppState>,
session: &UserSession,
@ -810,7 +735,6 @@ async fn update_queue_item(
use crate::shared::models::schema::user_sessions;
let current: UserSession = user_sessions::table
.find(session_id)
.first(&mut db_conn)
@ -836,7 +760,6 @@ async fn update_queue_item(
Ok(())
}
#[derive(Debug, Deserialize)]
pub struct SendMessageRequest {
pub to: String,
@ -845,14 +768,12 @@ pub struct SendMessageRequest {
pub template: Option<String>,
}
pub async fn send_message(
State(state): State<Arc<AppState>>,
Json(request): Json<SendMessageRequest>,
) -> impl IntoResponse {
info!("Sending WhatsApp message to {}", request.to);
let bot_id = get_default_bot_id(&state).await;
let adapter = WhatsAppAdapter::new(state.conn.clone(), bot_id);
@ -889,7 +810,6 @@ pub async fn send_message(
}
}
#[derive(Debug, Deserialize)]
pub struct AttendantRespondRequest {
pub session_id: String,
@ -897,7 +817,6 @@ pub struct AttendantRespondRequest {
pub attendant_id: String,
}
pub async fn attendant_respond(
State(state): State<Arc<AppState>>,
Json(request): Json<AttendantRespondRequest>,
@ -920,7 +839,6 @@ pub async fn attendant_respond(
}
};
let conn = state.conn.clone();
let session_result = tokio::task::spawn_blocking(move || {
let mut db_conn = conn.get().ok()?;
@ -947,14 +865,12 @@ pub async fn attendant_respond(
}
};
let channel = session
.context_data
.get("channel")
.and_then(|v| v.as_str())
.unwrap_or("web");
let recipient = session
.context_data
.get("phone")
@ -971,12 +887,10 @@ pub async fn attendant_respond(
);
}
if let Err(e) = save_message_to_history(&state, &session, &request.message, "attendant").await {
error!("Failed to save attendant message: {}", e);
}
match channel {
"whatsapp" => {
let adapter = WhatsAppAdapter::new(state.conn.clone(), session.bot_id);
@ -1013,7 +927,6 @@ pub async fn attendant_respond(
}
}
_ => {
if let Some(broadcast_tx) = state.attendant_broadcast.as_ref() {
let notification = AttendantNotification {
notification_type: "attendant_response".to_string(),
@ -1042,23 +955,18 @@ pub async fn attendant_respond(
}
}
async fn get_verify_token(_state: &Arc<AppState>) -> String {
use crate::core::secrets::SecretsManager;
match SecretsManager::from_env() {
Ok(secrets) => {
match secrets.get_value("gbo/whatsapp", "verify_token").await {
Ok(token) => token,
Err(_) => "webhook_verify".to_string(),
}
}
Ok(secrets) => match secrets.get_value("gbo/whatsapp", "verify_token").await {
Ok(token) => token,
Err(_) => "webhook_verify".to_string(),
},
Err(_) => "webhook_verify".to_string(),
}
}
async fn get_default_bot_id(state: &Arc<AppState>) -> Uuid {
let conn = state.conn.clone();