feat: Separate switchers from suggestions - server-side prompt resolution
Some checks failed
Botlib CI / build (push) Successful in 22s
BotServer CI / build (push) Failing after 0s
Bottest CI / build (push) Successful in 31s
BotUI CI / build (push) Successful in 52s

- Add Switcher struct to botlib (id, label, prompt, color, icon)
- Separate Redis key switchers:{bot_id}:{session_id} from suggestions
- Server-side SWITCHER_PROMPT_MAP resolves prompts (not frontend)
- Frontend sends active_switchers array in USER WS message
- Remove SYSTEM message dispatch (type 7 deprecated)
- Remove hardcoded promptMap from chat.html
- Render switchers from BotResponse.switchers (backend-driven)
- Support custom switchers via ADD_SWITCHER keyword
- Filter switcher-type items from get_suggestions()
- Add switchers field to all BotResponse construction sites
This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2026-04-23 21:07:53 +00:00
parent 7a5cbf9e8f
commit cf00a51245
15 changed files with 605 additions and 447 deletions

View file

@ -31,7 +31,7 @@ pub use limits::{
RATE_LIMIT_BURST_MULTIPLIER, RATE_LIMIT_WINDOW_SECONDS,
};
pub use message_types::MessageType;
pub use models::{ApiResponse, BotResponse, Session, Suggestion, UserMessage};
pub use models::{ApiResponse, BotResponse, Session, Suggestion, Switcher, UserMessage};
pub use resilience::{ResilienceError, RetryConfig};
pub use version::{
get_botserver_version, init_version_registry, register_component, version_string,

View file

@ -152,6 +152,8 @@ pub struct UserMessage {
pub timestamp: DateTime<Utc>,
#[serde(skip_serializing_if = "Option::is_none")]
pub context_name: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub active_switchers: Vec<String>,
}
impl UserMessage {
@ -173,6 +175,7 @@ impl UserMessage {
media_url: None,
timestamp: Utc::now(),
context_name: None,
active_switchers: Vec::new(),
}
}
@ -241,6 +244,49 @@ impl<S: Into<String>> From<S> for Suggestion {
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct Switcher {
pub id: String,
pub label: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub prompt: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub color: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub icon: Option<String>,
}
impl Switcher {
#[must_use]
pub fn new(id: impl Into<String>, label: impl Into<String>) -> Self {
Self {
id: id.into(),
label: label.into(),
prompt: None,
color: None,
icon: None,
}
}
#[must_use]
pub fn with_prompt(mut self, prompt: impl Into<String>) -> Self {
self.prompt = Some(prompt.into());
self
}
#[must_use]
pub fn with_color(mut self, color: impl Into<String>) -> Self {
self.color = Some(color.into());
self
}
#[must_use]
pub fn with_icon(mut self, icon: impl Into<String>) -> Self {
self.icon = Some(icon.into());
self
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BotResponse {
pub bot_id: String,
@ -254,6 +300,8 @@ pub struct BotResponse {
pub is_complete: bool,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub suggestions: Vec<Suggestion>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub switchers: Vec<Switcher>,
#[serde(skip_serializing_if = "Option::is_none")]
pub context_name: Option<String>,
#[serde(default)]
@ -281,6 +329,7 @@ impl BotResponse {
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -305,6 +354,7 @@ impl BotResponse {
stream_token: Some(stream_token.into()),
is_complete: false,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -373,9 +423,10 @@ impl Default for BotResponse {
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
}
}
}

View file

@ -207,15 +207,16 @@ pub async fn attendant_respond(
message_type: botlib::MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions: vec![],
context_name: None,
context_length: 0,
context_max_length: 0,
};
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
match adapter.send_message(response).await {
Ok(_) => {
broadcast_attendant_action(&state, &session, &request, "attendant_response")
match adapter.send_message(response).await {
Ok(_) => {
broadcast_attendant_action(&state, &session, &request, "attendant_response")
.await;
(
@ -253,12 +254,13 @@ pub async fn attendant_respond(
message_type: botlib::MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions: vec![],
context_name: None,
context_length: 0,
context_max_length: 0,
};
tx.send(response).await.is_ok()
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
tx.send(response).await.is_ok()
} else {
false
};
@ -578,12 +580,13 @@ async fn handle_attendant_message(
message_type: botlib::MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions: vec![],
context_name: None,
context_length: 0,
context_max_length: 0,
};
let _ = adapter.send_message(response).await;
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
let _ = adapter.send_message(response).await;
}
}

View file

@ -334,23 +334,27 @@ pub fn get_suggestions(
redis::cmd("SMEMBERS").arg(&redis_key).query(&mut conn);
match result {
Ok(items) => {
for item in items {
if let Ok(json) = serde_json::from_str::<serde_json::Value>(&item) {
let suggestion = crate::core::shared::models::Suggestion {
text: json["text"].as_str().unwrap_or("").to_string(),
context: json["context"].as_str().map(|s| s.to_string()),
action: json
.get("action")
.and_then(|v| serde_json::to_string(v).ok()),
icon: json
.get("icon")
.and_then(|v| v.as_str())
.map(|s| s.to_string()),
};
suggestions.push(suggestion);
Ok(items) => {
for item in items {
if let Ok(json) = serde_json::from_str::<serde_json::Value>(&item) {
let item_type = json["type"].as_str().unwrap_or("");
if item_type == "switcher" || item_type == "switch_context" {
continue;
}
let suggestion = crate::core::shared::models::Suggestion {
text: json["text"].as_str().unwrap_or("").to_string(),
context: json["context"].as_str().map(|s| s.to_string()),
action: json
.get("action")
.and_then(|v| serde_json::to_string(v).ok()),
icon: json
.get("icon")
.and_then(|v| v.as_str())
.map(|s| s.to_string()),
};
suggestions.push(suggestion);
}
}
info!(
"Retrieved {} suggestions for session {}",
suggestions.len(),

View file

@ -42,6 +42,7 @@ pub async fn execute_talk(
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,

View file

@ -582,15 +582,16 @@ async fn send_play_to_client(
message_type: crate::core::shared::message_types::MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
state
.web_adapter
.send_message_to_session(&session_id.to_string(), bot_response)
state
.web_adapter
.send_message_to_session(&session_id.to_string(), bot_response)
.await
.map_err(|e| format!("Failed to send to client: {e}"))?;
@ -624,6 +625,7 @@ async fn send_player_command(
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,

View file

@ -1,11 +1,42 @@
use crate::core::shared::models::UserSession;
use crate::core::shared::models::Switcher;
use crate::core::shared::state::AppState;
use log::{error, trace};
use log::{error, info, trace};
use rhai::{Dynamic, Engine};
use serde_json::json;
use std::sync::Arc;
use std::time::Duration;
const STANDARD_SWITCHER_IDS: &[&str] = &[
"tables", "infographic", "cards", "list", "comparison", "timeline", "markdown", "chart",
];
fn get_switcher_prompt_map() -> &'static [(&'static str, &'static str)] {
&[
("tables", "REGRAS DE FORMATO: SEMPRE retorne suas respostas em formato de tabela HTML usando <table>, <thead>, <tbody>, <tr>, <th>, <td>. Cada dado deve ser uma célula. Use cabeçalhos claros na primeira linha. Se houver dados numéricos, alinhe à direita. Se houver texto, alinhe à esquerda. Use cores sutis em linhas alternadas (nth-child). NÃO use markdown tables, use HTML puro."),
("infographic", "REGRAS DE FORMATO: Crie representações visuais HTML usando SVG, progress bars, stat cards, e elementos gráficos. Use elementos como: <svg> para gráficos, <div style=\"width:X%;background:color\"> para barras de progresso, ícones emoji, badges coloridos. Organize informações visualmente com grids, flexbox, e espaçamento. Inclua legendas e rótulos visuais claros."),
("cards", "REGRAS DE FORMATO: Retorne informações em formato de cards HTML. Cada card deve ter: <div class=\"card\" style=\"border:1px solid #ddd;border-radius:8px;padding:16px;margin:8px;box-shadow:0 2px 4px rgba(0,0,0,0.1)\">. Dentro do card use: título em <h3> ou <strong>, subtítulo em <p> style=\"color:#666\", ícone emoji ou ícone SVG no topo, badges de status. Organize cards em grid usando display:grid ou flex-wrap."),
("list", "REGRAS DE FORMATO: Use apenas listas HTML: <ul> para bullets e <ol> para números numerados. Cada item em <li>. Use sublistas aninhadas quando apropriado. NÃO use parágrafos de texto, converta tudo em itens de lista. Adicione ícones emoji no início de cada <li> quando possível. Use classes CSS para estilização: .list-item, .sub-list."),
("comparison", "REGRAS DE FORMATO: Crie comparações lado a lado em HTML. Use grid de 2 colunas: <div style=\"display:grid;grid-template-columns:1fr 1fr;gap:20px\">. Cada lado em uma <div class=\"comparison-side\"> com borda colorida distinta. Use headers claros para cada lado. Adicione seção de \"Diferenças Chave\" com bullet points. Use cores contrastantes para cada lado (ex: azul vs laranja). Inclua tabela de comparação resumida no final."),
("timeline", "REGRAS DE FORMATO: Organize eventos cronologicamente em formato de timeline HTML. Use <div class=\"timeline\"> com border-left vertical. Cada evento em <div class=\"timeline-item\"> com: data em <span class=\"timeline-date\" style=\"font-weight:bold;color:#666\">, título em <h3>, descrição em <p>. Adicione círculo indicador na timeline line. Ordene do mais antigo para o mais recente. Use espaçamento claro entre eventos."),
("markdown", "REGRAS DE FORMATO: Use exclusivamente formato Markdown padrão. Sintaxe permitida: **negrito**, *itálico*, `inline code`, ```bloco de código```, # cabeçalhos, - bullets, 1. números, [links](url), ![alt](url), | tabela | markdown |. NÃO use HTML tags exceto para blocos de código. Siga estritamente a sintaxe CommonMark."),
("chart", "REGRAS DE FORMATO: Crie gráficos e diagramas em HTML SVG. Use elementos SVG: <svg width=\"X\" height=\"Y\">, <line> para gráficos de linha, <rect> para gráficos de barra, <circle> para gráficos de pizza, <path> para gráficos de área. Inclua eixos com labels, grid lines, legendas. Use cores distintas para cada série de dados (ex: vermelho, azul, verde). Adicione tooltips com valores ao hover."),
]
}
pub fn resolve_switcher_prompt(switcher_id: &str) -> Option<String> {
for (id, prompt) in get_switcher_prompt_map() {
if *id == switcher_id {
return Some((*prompt).to_string());
}
}
None
}
fn is_standard_switcher(id: &str) -> bool {
STANDARD_SWITCHER_IDS.contains(&id)
}
fn get_redis_connection(cache_client: &Arc<redis::Client>) -> Option<redis::Connection> {
let timeout = Duration::from_millis(50);
cache_client.get_connection_with_timeout(timeout).ok()
@ -21,7 +52,7 @@ pub fn clear_switchers_keyword(
engine
.register_custom_syntax(["CLEAR", "SWITCHERS"], true, move |_context, _inputs| {
if let Some(cache_client) = &cache {
let redis_key = format!("suggestions:{}:{}", user_session.bot_id, user_session.id);
let redis_key = format!("switchers:{}:{}", user_session.bot_id, user_session.id);
let mut conn = match get_redis_connection(cache_client) {
Some(conn) => conn,
None => {
@ -59,20 +90,18 @@ pub fn add_switcher_keyword(
) {
let cache = state.cache.clone();
// ADD_SWITCHER "switcher_name" as "button text"
// Note: compiler converts AS -> as (lowercase keywords), so we use lowercase here
engine
.register_custom_syntax(
["ADD_SWITCHER", "$expr$", "as", "$expr$"],
true,
move |context, inputs| {
let switcher_name = context.eval_expression_tree(&inputs[0])?.to_string();
let first_param = context.eval_expression_tree(&inputs[0])?.to_string();
let button_text = context.eval_expression_tree(&inputs[1])?.to_string();
add_switcher(
cache.as_ref(),
&user_session,
&switcher_name,
&first_param,
&button_text,
)?;
@ -85,26 +114,32 @@ pub fn add_switcher_keyword(
fn add_switcher(
cache: Option<&Arc<redis::Client>>,
user_session: &UserSession,
switcher_name: &str,
first_param: &str,
button_text: &str,
) -> Result<(), Box<rhai::EvalAltResult>> {
let (switcher_id, switcher_prompt) = if is_standard_switcher(first_param) {
(first_param.to_string(), resolve_switcher_prompt(first_param))
} else {
let custom_id = format!("custom:{}", simple_hash(first_param));
(custom_id, Some(first_param.to_string()))
};
trace!(
"ADD_SWITCHER called: switcher={}, button={}",
switcher_name,
button_text
"ADD_SWITCHER: id={}, label={}, is_standard={}",
switcher_id,
button_text,
is_standard_switcher(first_param)
);
if let Some(cache_client) = cache {
let redis_key = format!("suggestions:{}:{}", user_session.bot_id, user_session.id);
let redis_key = format!("switchers:{}:{}", user_session.bot_id, user_session.id);
let suggestion = json!({
"type": "switcher",
"switcher": switcher_name,
"text": button_text,
"action": {
"type": "switch_context",
"switcher": switcher_name
}
let switcher_data = json!({
"id": switcher_id,
"label": button_text,
"prompt": switcher_prompt,
"is_standard": is_standard_switcher(first_param),
"original_param": first_param
});
let mut conn = match get_redis_connection(cache_client) {
@ -117,39 +152,137 @@ fn add_switcher(
let _: Result<i64, redis::RedisError> = redis::cmd("SADD")
.arg(&redis_key)
.arg(suggestion.to_string())
.arg(switcher_data.to_string())
.query(&mut conn);
trace!(
"Added switcher suggestion '{}' to session {}",
switcher_name,
"Added switcher '{}' ({}) to session {}",
switcher_id,
if is_standard_switcher(first_param) { "standard" } else { "custom" },
user_session.id
);
} else {
trace!("No cache configured, switcher suggestion not added");
trace!("No cache configured, switcher not added");
}
Ok(())
}
fn simple_hash(s: &str) -> u64 {
let mut hash: u64 = 0;
for byte in s.bytes() {
hash = hash.wrapping_mul(31).wrapping_add(byte as u64);
}
hash
}
pub fn get_switchers(
cache: Option<&Arc<redis::Client>>,
bot_id: &str,
session_id: &str,
) -> Vec<Switcher> {
let mut switchers = Vec::new();
if let Some(cache_client) = cache {
let redis_key = format!("switchers:{}:{}", bot_id, session_id);
let mut conn = match get_redis_connection(cache_client) {
Some(conn) => conn,
None => {
trace!("Cache not ready, returning empty switchers");
return switchers;
}
};
let result: Result<Vec<String>, redis::RedisError> =
redis::cmd("SMEMBERS").arg(&redis_key).query(&mut conn);
match result {
Ok(items) => {
for item in items {
if let Ok(json) = serde_json::from_str::<serde_json::Value>(&item) {
let switcher = Switcher::new(
json["id"].as_str().unwrap_or(""),
json["label"].as_str().unwrap_or(""),
)
.with_prompt(json["prompt"].as_str().unwrap_or(""));
switchers.push(switcher);
}
}
info!(
"Retrieved {} switchers for session {}",
switchers.len(),
session_id
);
}
Err(e) => error!("Failed to get switchers from Redis: {}", e),
}
}
switchers
}
pub fn resolve_active_switchers(
cache: Option<&Arc<redis::Client>>,
bot_id: &str,
session_id: &str,
active_ids: &[String],
) -> String {
if active_ids.is_empty() {
return String::new();
}
let stored_switchers = get_switchers(cache, bot_id, session_id);
let mut prompts: Vec<String> = Vec::new();
for id in active_ids {
let prompt = stored_switchers
.iter()
.find(|s| s.id == *id)
.and_then(|s| s.prompt.clone())
.or_else(|| resolve_switcher_prompt(id));
if let Some(p) = prompt {
if !p.is_empty() {
prompts.push(p);
}
}
}
prompts.join("\n\n")
}
#[cfg(test)]
mod tests {
use serde_json::json;
use super::*;
#[test]
fn test_switcher_json() {
let suggestion = json!({
"type": "switcher",
"switcher": "mode_switcher",
"text": "Switch Mode",
"action": {
"type": "switch_context",
"switcher": "mode_switcher"
}
});
fn test_is_standard_switcher() {
assert!(is_standard_switcher("tables"));
assert!(is_standard_switcher("chart"));
assert!(!is_standard_switcher("my_custom"));
}
assert_eq!(suggestion["type"], "switcher");
assert_eq!(suggestion["action"]["type"], "switch_context");
assert_eq!(suggestion["switcher"], "mode_switcher");
#[test]
fn test_resolve_standard_prompt() {
let prompt = resolve_switcher_prompt("tables");
assert!(prompt.is_some());
assert!(prompt.unwrap().contains("tabela HTML"));
}
#[test]
fn test_resolve_unknown_returns_none() {
let prompt = resolve_switcher_prompt("nonexistent");
assert!(prompt.is_none());
}
#[test]
fn test_custom_switcher_id() {
let id = if is_standard_switcher("use quadrados") {
"use quadrados".to_string()
} else {
format!("custom:{}", simple_hash("use quadrados"))
};
assert!(id.starts_with("custom:"));
}
}

View file

@ -249,43 +249,46 @@ pub async fn send_message_to_recipient(
message_type: MessageType::EXTERNAL,
stream_token: None,
is_complete: true,
suggestions: vec![],
context_name: None,
context_length: 0,
context_max_length: 0,
};
adapter.send_message(response).await?;
}
"instagram" => {
let adapter = InstagramAdapter::new();
let response = crate::core::shared::models::BotResponse {
bot_id: "default".to_string(),
session_id: user.id.to_string(),
user_id: recipient_id.clone(),
channel: "instagram".to_string(),
content: message.to_string(),
message_type: MessageType::EXTERNAL,
stream_token: None,
is_complete: true,
suggestions: vec![],
context_name: None,
context_length: 0,
context_max_length: 0,
};
adapter.send_message(response).await?;
}
"teams" => {
let adapter = TeamsAdapter::new(state.conn.clone(), user.bot_id);
let response = crate::core::shared::models::BotResponse {
bot_id: "default".to_string(),
session_id: user.id.to_string(),
user_id: recipient_id.clone(),
channel: "teams".to_string(),
content: message.to_string(),
message_type: MessageType::EXTERNAL,
stream_token: None,
is_complete: true,
suggestions: vec![],
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
adapter.send_message(response).await?;
}
"instagram" => {
let adapter = InstagramAdapter::new();
let response = crate::core::shared::models::BotResponse {
bot_id: "default".to_string(),
session_id: user.id.to_string(),
user_id: recipient_id.clone(),
channel: "instagram".to_string(),
content: message.to_string(),
message_type: MessageType::EXTERNAL,
stream_token: None,
is_complete: true,
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
adapter.send_message(response).await?;
}
"teams" => {
let adapter = TeamsAdapter::new(state.conn.clone(), user.bot_id);
let response = crate::core::shared::models::BotResponse {
bot_id: "default".to_string(),
session_id: user.id.to_string(),
user_id: recipient_id.clone(),
channel: "teams".to_string(),
content: message.to_string(),
message_type: MessageType::EXTERNAL,
stream_token: None,
is_complete: true,
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -608,6 +611,7 @@ async fn send_web_message(
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,

View file

@ -19,9 +19,13 @@ use crate::llm::OpenAIClient;
use crate::nvidia::get_system_metrics;
use crate::core::shared::message_types::MessageType;
use crate::core::shared::models::{BotResponse, UserMessage, UserSession};
#[cfg(not(feature = "chat"))]
use crate::core::shared::models::Switcher;
use crate::core::shared::state::AppState;
#[cfg(feature = "chat")]
use crate::basic::keywords::add_suggestion::get_suggestions;
#[cfg(feature = "chat")]
use crate::basic::keywords::switcher::{get_switchers, resolve_active_switchers};
use html2md::parse_html;
use axum::extract::ws::{Message, WebSocket};
@ -408,45 +412,44 @@ impl BotOrchestrator {
format!("Erro ao executar '{}': {}", tool_name, tool_result.error.unwrap_or_default())
};
// Direct tool execution — return result immediately, no LLM call
let mut suggestions = vec![];
if let Some(cache) = &self.state.cache {
#[cfg(feature = "chat")]
{
// Try to restore existing suggestions so they don't disappear in the UI
suggestions = get_suggestions(Some(cache), &message.bot_id, &message.session_id);
}
}
// Direct tool execution — return result immediately, no LLM call
let mut suggestions = vec![];
let mut switchers = vec![];
if let Some(cache) = &self.state.cache {
#[cfg(feature = "chat")]
{
// Try to restore existing suggestions so they don't disappear in the UI
suggestions = get_suggestions(Some(cache), &message.bot_id, &message.session_id);
switchers = get_switchers(Some(cache), &message.bot_id, &message.session_id);
}
}
let final_response = BotResponse {
bot_id: message.bot_id.clone(),
user_id: message.user_id.clone(),
session_id: message.session_id.clone(),
channel: message.channel.clone(),
content: response_content,
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions,
context_name: None,
context_length: 0,
context_max_length: 0,
};
let final_response = BotResponse {
bot_id: message.bot_id.clone(),
user_id: message.user_id.clone(),
session_id: message.session_id.clone(),
channel: message.channel.clone(),
content: response_content,
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions,
switchers,
context_name: None,
context_length: 0,
context_max_length: 0,
};
let _ = response_tx.send(final_response).await;
return Ok(());
}
}
// Handle SYSTEM messages (type 7) - inject into history as system role
if message.message_type == MessageType::SYSTEM {
if !message_content.is_empty() {
info!("SYSTEM message injection for session {}", session_id);
let mut sm = self.state.session_manager.blocking_lock();
sm.save_message(session_id, user_id, 3, &message_content, 1)?; // role 3 = System
}
return Ok(());
}
// Handle SYSTEM messages (type 7) - no longer saved to DB, just acknowledge
if message.message_type == MessageType::SYSTEM {
trace!("SYSTEM message received for session {} (deprecated - switchers now via active_switchers field)", session_id);
return Ok(());
}
// Legacy: Handle direct tool invocation via __TOOL__: prefix
if message_content.starts_with("__TOOL__:") {
@ -477,11 +480,13 @@ impl BotOrchestrator {
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions: vec![],
context_name: None,
context_length: 0,
context_max_length: 0,
};
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
if let Err(e) = response_tx.send(final_response).await {
error!("Failed to send tool response: {}", e);
@ -603,10 +608,26 @@ impl BotOrchestrator {
Ok((session, context_data, history, model, key, system_prompt, bot_llm_url, explicit_llm_provider, bot_endpoint_path))
},
)
.await??
};
.await??
};
let mut messages = OpenAIClient::build_messages(&system_prompt, &context_data, &history);
let system_prompt = if !message.active_switchers.is_empty() {
let switcher_prompts = resolve_active_switchers(
self.state.cache.as_ref(),
&session.bot_id.to_string(),
&session.id.to_string(),
&message.active_switchers,
);
if switcher_prompts.is_empty() {
system_prompt
} else {
format!("{system_prompt}\n\n{switcher_prompts}")
}
} else {
system_prompt
};
let mut messages = OpenAIClient::build_messages(&system_prompt, &context_data, &history);
trace!("Built messages array with {} items, first message role: {:?}",
messages.as_array().map(|a| a.len()).unwrap_or(0),
@ -731,22 +752,28 @@ impl BotOrchestrator {
let bot_id_str = message.bot_id.clone();
let session_id_str = message.session_id.clone();
#[cfg(feature = "chat")]
let suggestions = get_suggestions(self.state.cache.as_ref(), &bot_id_str, &session_id_str);
#[cfg(not(feature = "chat"))]
let suggestions: Vec<crate::core::shared::models::Suggestion> = Vec::new();
#[cfg(feature = "chat")]
let suggestions = get_suggestions(self.state.cache.as_ref(), &bot_id_str, &session_id_str);
#[cfg(not(feature = "chat"))]
let suggestions: Vec<crate::core::shared::models::Suggestion> = Vec::new();
let final_response = BotResponse {
bot_id: message.bot_id,
user_id: message.user_id,
session_id: message.session_id,
channel: message.channel,
content: String::new(),
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions,
context_name: None,
#[cfg(feature = "chat")]
let switchers = get_switchers(self.state.cache.as_ref(), &bot_id_str, &session_id_str);
#[cfg(not(feature = "chat"))]
let switchers: Vec<Switcher> = Vec::new();
let final_response = BotResponse {
bot_id: message.bot_id,
user_id: message.user_id,
session_id: message.session_id,
channel: message.channel,
content: String::new(),
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions,
switchers,
context_name: None,
context_length: 0,
context_max_length: 0,
};
@ -982,8 +1009,9 @@ while let Some(chunk) = stream_rx.recv().await {
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: false,
suggestions: Vec::new(),
context_name: None,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
@ -1046,6 +1074,7 @@ while let Some(chunk) = stream_rx.recv().await {
stream_token: None,
is_complete: false,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -1078,6 +1107,7 @@ while let Some(chunk) = stream_rx.recv().await {
stream_token: None,
is_complete: false,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -1114,6 +1144,7 @@ while let Some(chunk) = stream_rx.recv().await {
stream_token: None,
is_complete: false,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -1165,6 +1196,7 @@ while let Some(chunk) = stream_rx.recv().await {
stream_token: None,
is_complete: false,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -1205,6 +1237,7 @@ while let Some(chunk) = stream_rx.recv().await {
stream_token: None,
is_complete: false,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -1264,6 +1297,7 @@ while let Some(chunk) = stream_rx.recv().await {
stream_token: None,
is_complete: false,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -1348,6 +1382,11 @@ while let Some(chunk) = stream_rx.recv().await {
#[cfg(not(feature = "chat"))]
let suggestions: Vec<crate::core::shared::models::Suggestion> = Vec::new();
#[cfg(feature = "chat")]
let switchers = get_switchers(self.state.cache.as_ref(), &bot_id_str, &session_id_str);
#[cfg(not(feature = "chat"))]
let switchers: Vec<Switcher> = Vec::new();
// Flush any remaining HTML buffer before sending final response
if !html_buffer.is_empty() {
trace!("Flushing remaining {} chars in HTML buffer", html_buffer.len());
@ -1359,13 +1398,14 @@ while let Some(chunk) = stream_rx.recv().await {
content: html_buffer.clone(),
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: false,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
let _ = response_tx.send(final_chunk).await;
is_complete: false,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
let _ = response_tx.send(final_chunk).await;
html_buffer.clear();
}
@ -1383,11 +1423,12 @@ while let Some(chunk) = stream_rx.recv().await {
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions,
context_name: None,
context_length: 0,
context_max_length: 0,
};
suggestions,
switchers,
context_name: None,
context_length: 0,
context_max_length: 0,
};
response_tx.send(final_response).await?;
Ok(())
@ -1411,6 +1452,7 @@ while let Some(chunk) = stream_rx.recv().await {
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -1711,10 +1753,11 @@ async fn handle_websocket(
}
}
// Fetch suggestions from Redis and send to frontend
let user_id_str = user_id.to_string();
let suggestions = get_suggestions(state_for_redis.cache.as_ref(), &bot_id_str, &session_id_str);
if !suggestions.is_empty() {
// Fetch suggestions and switchers from Redis and send to frontend
let user_id_str = user_id.to_string();
let suggestions = get_suggestions(state_for_redis.cache.as_ref(), &bot_id_str, &session_id_str);
let switchers = get_switchers(state_for_redis.cache.as_ref(), &bot_id_str, &session_id_str);
if !suggestions.is_empty() || !switchers.is_empty() {
info!("Sending {} suggestions to frontend for session {}", suggestions.len(), session_id);
let response = BotResponse {
bot_id: bot_id_str.clone(),
@ -1724,9 +1767,10 @@ async fn handle_websocket(
content: String::new(),
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions,
context_name: None,
is_complete: true,
suggestions,
switchers,
context_name: None,
context_length: 0,
context_max_length: 0,
};

View file

@ -185,6 +185,7 @@ impl MultimediaHandler for DefaultMultimediaHandler {
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -213,6 +214,7 @@ impl MultimediaHandler for DefaultMultimediaHandler {
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -246,6 +248,7 @@ impl MultimediaHandler for DefaultMultimediaHandler {
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -279,6 +282,7 @@ impl MultimediaHandler for DefaultMultimediaHandler {
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -308,6 +312,7 @@ impl MultimediaHandler for DefaultMultimediaHandler {
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -325,6 +330,7 @@ impl MultimediaHandler for DefaultMultimediaHandler {
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,

View file

@ -46,7 +46,7 @@ pub use super::schema::{
pub use super::schema::kb::{kb_collections, kb_group_associations};
pub use botlib::message_types::MessageType;
pub use botlib::models::{ApiResponse, Attachment, BotResponse, Session, Suggestion, UserMessage};
pub use botlib::models::{ApiResponse, Attachment, BotResponse, Session, Suggestion, Switcher, UserMessage};
// Manually export OrganizationInvitation as it is defined in core but table is organization_invitations
pub use self::core::OrganizationInvitation;

View file

@ -458,12 +458,13 @@ impl MeetingService {
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
})
}
suggestions: Vec::new(),
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
})
}
async fn broadcast_to_room(&self, room_id: &str, message: MeetingMessage) {
let connections = self.connections.read().await;

View file

@ -100,11 +100,12 @@ async fn send_message(
message_type: botlib::MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions: vec![],
context_name: None,
context_length: 0,
context_max_length: 0,
};
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
match adapter.send_message(response).await {
Ok(_) => (StatusCode::OK, Json(serde_json::json!({"success": true}))),

View file

@ -579,14 +579,15 @@ async fn process_incoming_message(
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions: vec![],
context_name: None,
context_length: 0,
context_max_length: 0,
};
if let Err(e) = adapter.send_message(bot_response).await {
error!("Failed to send routing confirmation: {}", e);
}
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
if let Err(e) = adapter.send_message(bot_response).await {
error!("Failed to send routing confirmation: {}", e);
}
// Execute start.bas immediately by calling route_to_bot
info!("Executing start.bas for bot '{}' via route_to_bot", routed_bot_id);
@ -634,14 +635,15 @@ async fn process_incoming_message(
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions: vec![],
context_name: None,
context_length: 0,
context_max_length: 0,
};
if let Err(e) = adapter.send_message(bot_response).await {
error!("Failed to send clear confirmation: {}", e);
}
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
if let Err(e) = adapter.send_message(bot_response).await {
error!("Failed to send clear confirmation: {}", e);
}
info!("Cleared conversation history for WhatsApp user {}", phone);
}
Err(e) => {
@ -663,14 +665,15 @@ async fn process_incoming_message(
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions: vec![],
context_name: None,
context_length: 0,
context_max_length: 0,
};
if let Err(e) = adapter.send_message(bot_response).await {
error!("Failed to send attendant command response: {}", e);
}
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
if let Err(e) = adapter.send_message(bot_response).await {
error!("Failed to send attendant command response: {}", e);
}
return Ok(());
}
}
@ -1147,13 +1150,14 @@ async fn route_to_bot(
message_type: crate::core::shared::models::MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: is_final,
suggestions: vec![],
context_name: None,
context_length: 0,
context_max_length: 0,
};
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
if let Err(e) = adapter.send_message(wa_response).await {
if let Err(e) = adapter.send_message(wa_response).await {
log::error!("Failed to send WhatsApp response part: {}", e);
}
// Rate limiting is handled by WhatsAppAdapter::send_whatsapp_message
@ -1319,13 +1323,14 @@ async fn route_to_bot(
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions: vec![],
context_name: None,
context_length: 0,
context_max_length: 0,
};
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
if let Err(e) = adapter.send_message(error_response).await {
if let Err(e) = adapter.send_message(error_response).await {
error!("Failed to send error response: {}", e);
}
}
@ -1490,6 +1495,7 @@ pub async fn send_message(
stream_token: None,
is_complete: true,
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
@ -1600,13 +1606,14 @@ pub async fn attendant_respond(
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions: vec![],
context_name: None,
context_length: 0,
context_max_length: 0,
};
suggestions: vec![],
switchers: Vec::new(),
context_name: None,
context_length: 0,
context_max_length: 0,
};
match adapter.send_message(response).await {
match adapter.send_message(response).await {
Ok(_) => (
StatusCode::OK,
Json(serde_json::json!({

View file

@ -18,7 +18,7 @@
<footer>
<div class="chat-footer-content">
<div class="suggestions-container" id="suggestions"></div>
<div class="switchers-container" id="switchers">
<div class="switchers-container" id="switchers" style="display:none">
<div class="switchers-label">Formato:</div>
<div class="switchers-chips" id="switchersChips">
<!-- Switcher chips will be rendered here -->
@ -119,7 +119,7 @@
CONTINUE: 3,
SUGGESTION: 4,
CONTEXT_CHANGE: 5,
SYSTEM: 7, // For switcher/modifier injection - doesn't appear in chat history
TOOL_EXEC: 6,
};
var EntityTypes = {
@ -853,13 +853,21 @@ function processMessage(data) {
isStreaming = false;
// Render suggestions when message is complete
if (
data.suggestions &&
Array.isArray(data.suggestions) &&
data.suggestions.length > 0
) {
renderSuggestions(data.suggestions);
}
if (
data.suggestions &&
Array.isArray(data.suggestions) &&
data.suggestions.length > 0
) {
renderSuggestions(data.suggestions);
}
if (
data.switchers &&
Array.isArray(data.switchers) &&
data.switchers.length > 0
) {
renderBotSwitchers(data.switchers);
}
} else {
// Streaming chunk received
if (!isStreaming) {
@ -972,44 +980,12 @@ function hideThinkingIndicator() {
var chip = document.createElement("button");
chip.className = "suggestion-chip";
chip.textContent = suggestion.text || "Suggestion";
chip.className = isSwitcher ? "switcher-chip" : "suggestion-chip";
var switcherName = action ? (action.switcher || action.context) : suggestion.text;
if (isSwitcher) {
var hash = 0;
for (var i = 0; i < switcherName.length; i++) {
hash = switcherName.charCodeAt(i) + ((hash << 5) - hash);
}
var h = Math.abs(hash) % 360;
chip.style.setProperty("--switcher-color", "hsl(" + h + ", 70%, 50%)");
if (activeSwitchers.has(switcherName)) {
chip.classList.add("active");
}
}
chip.textContent = suggestion.text || "Suggestion";
chip.onclick = (function (sugg, act, name, isSw) {
return function () {
console.log("Suggestion clicked:", sugg, "as switcher:", isSw);
if (isSw) {
// Toggle switcher logic
if (activeSwitchers.has(name)) {
activeSwitchers.delete(name);
chip.classList.remove("active");
} else {
activeSwitchers.add(name);
chip.classList.add("active");
}
console.log("Switcher " + name + " is now " + (activeSwitchers.has(name) ? "ON" : "OFF"));
renderSwitchers(); // Re-render switcher chips to update their state
return; // STAY QUIET
}
if (act) {
if (act.type === "invoke_tool") {
chip.onclick = (function (sugg, act) {
return function () {
if (act) {
if (act.type === "invoke_tool") {
// Direct tool execution via WebSocket (Type 6)
if (ws && ws.readyState === WebSocket.OPEN) {
ws.send(JSON.stringify({
@ -1034,7 +1010,7 @@ function hideThinkingIndicator() {
// Default fallback: send suggestion text
window.sendMessage(sugg.text);
};
})(suggestion, action, switcherName, isSwitcher);
})(suggestion, action);
suggestionsEl.appendChild(chip);
});
@ -1068,77 +1044,30 @@ function sendMessage(messageContent) {
// Add user message to chat BEFORE sending to server
// This ensures user message appears immediately and streaming responses go to new div
addMessage("user", content);
addMessage("user", content);
// Build switcher prompts as SYSTEM message (doesn't appear in chat history)
var activeKeys = Array.from(activeSwitchers);
var switcherPrompt = null;
var activeSwitcherIds = Array.from(activeSwitchers);
if (activeKeys.length > 0) {
var activePrompts = [];
var promptMap = {
'tables': 'REGRAS DE FORMATO: SEMPRE retorne suas respostas em formato de tabela HTML usando <table>, <thead>, <tbody>, <tr>, <th>, <td>. Cada dado deve ser uma célula. Use cabeçalhos claros na primeira linha. Se houver dados numéricos, alinhe à direita. Se houver texto, alinhe à esquerda. Use cores sutis em linhas alternadas (nth-child). NÃO use markdown tables, use HTML puro.',
'infographic': 'REGRAS DE FORMATO: Crie representações visuais HTML usando SVG, progress bars, stat cards, e elementos gráficos. Use elementos como: <svg> para gráficos, <div style="width:X%;background:color"> para barras de progresso, ícones emoji, badges coloridos. Organize informações visualmente com grids, flexbox, e espaçamento. Inclua legendas e rótulos visuais claros.',
'cards': 'REGRAS DE FORMATO: Retorne informações em formato de cards HTML. Cada card deve ter: <div class="card" style="border:1px solid #ddd;border-radius:8px;padding:16px;margin:8px;box-shadow:0 2px 4px rgba(0,0,0,0.1)">. Dentro do card use: título em <h3> ou <strong>, subtítulo em <p> style="color:#666", ícone emoji ou ícone SVG no topo, badges de status. Organize cards em grid usando display:grid ou flex-wrap.',
'list': 'REGRAS DE FORMATO: Use apenas listas HTML: <ul> para bullets e <ol> para números numerados. Cada item em <li>. Use sublistas aninhadas quando apropriado. NÃO use parágrafos de texto, converta tudo em itens de lista. Adicione ícones emoji no início de cada <li> quando possível. Use classes CSS para estilização: .list-item, .sub-list.',
'comparison': 'REGRAS DE FORMATO: Crie comparações lado a lado em HTML. Use grid de 2 colunas: <div style="display:grid;grid-template-columns:1fr 1fr;gap:20px">. Cada lado em uma <div class="comparison-side"> com borda colorida distinta. Use headers claros para cada lado. Adicione seção de "Diferenças Chave" com bullet points. Use cores contrastantes para cada lado (ex: azul vs laranja). Inclua tabela de comparação resumida no final.',
'timeline': 'REGRAS DE FORMATO: Organize eventos cronologicamente em formato de timeline HTML. Use <div class="timeline"> com border-left vertical. Cada evento em <div class="timeline-item"> com: data em <span class="timeline-date" style="font-weight:bold;color:#666">, título em <h3>, descrição em <p>. Adicione círculo indicador na timeline line. Ordene do mais antigo para o mais recente. Use espaçamento claro entre eventos.',
'markdown': 'REGRAS DE FORMATO: Use exclusivamente formato Markdown padrão. Sintaxe permitida: **negrito**, *itálico*, `inline code`, ```bloco de código```, # cabeçalhos, - bullets, 1. números, [links](url), ![alt](url), | tabela | markdown |. NÃO use HTML tags exceto para blocos de código. Siga estritamente a sintaxe CommonMark.',
'chart': 'REGRAS DE FORMATO: Crie gráficos e diagramas em HTML SVG. Use elementos SVG: <svg width="X" height="Y">, <line> para gráficos de linha, <rect> para gráficos de barra, <circle> para gráficos de pizza, <path> para gráficos de área. Inclua eixos com labels, grid lines, legendas. Use cores distintas para cada série de dados (ex: vermelho, azul, verde). Adicione tooltips com valores ao hover. Se o usuário pedir gráfico de pizza com "pizza vermelha", use fill="#FF0000" no SVG.'
};
if (!messageContent) {
hideMentionDropdown();
input.value = "";
input.focus();
}
activeKeys.forEach(function(id) {
if (promptMap[id]) {
activePrompts.push(promptMap[id]);
} else {
// Se o switcher não estiver no mapa padrão, o ID (texto do switcher) é o próprio prompt
activePrompts.push(id);
}
});
if (activePrompts.length > 0) {
switcherPrompt = activePrompts.join('\n\n');
}
}
// Clear input if called from input field
if (!messageContent) {
hideMentionDropdown();
input.value = "";
input.focus();
}
// User message was already added above before sending to server
if (ws && ws.readyState === WebSocket.OPEN) {
// Send switcher prompts as SYSTEM message first (if any)
if (switcherPrompt) {
ws.send(
JSON.stringify({
bot_id: currentBotId,
user_id: currentUserId,
session_id: currentSessionId,
channel: "web",
content: switcherPrompt,
message_type: MessageType.SYSTEM, // SYSTEM message type - doesn't appear in chat
timestamp: new Date().toISOString(),
}),
);
}
// Then send user message
ws.send(
JSON.stringify({
bot_id: currentBotId,
user_id: currentUserId,
session_id: currentSessionId,
channel: "web",
content: content, // Clean user content - no switcher prompts
message_type: MessageType.USER,
timestamp: new Date().toISOString(),
}),
);
} else {
if (ws && ws.readyState === WebSocket.OPEN) {
ws.send(
JSON.stringify({
bot_id: currentBotId,
user_id: currentUserId,
session_id: currentSessionId,
channel: "web",
content: content,
message_type: MessageType.USER,
active_switchers: activeSwitcherIds,
timestamp: new Date().toISOString(),
}),
);
} else {
notify("Not connected to server. Message not sent.", "warning");
}
}
@ -1579,91 +1508,63 @@ function sendMessage(messageContent) {
// Switcher Logic - Response Format Modifiers
// activeSwitchers is already defined above as a Set
var switcherDefinitions = [
{
id: 'tables',
label: 'Tabelas',
icon: '📊',
color: '#1E8E3E' // Google Green 700 - cleaner, more professional
},
{
id: 'infographic',
label: 'Infográfico',
icon: '📈',
color: '#1967D2' // Google Blue 700
},
{
id: 'cards',
label: 'Cards',
icon: '🃏',
color: '#E37400' // Google Orange 700
},
{
id: 'list',
label: 'Lista',
icon: '📋',
color: '#9334E6' // Google Purple 700
},
{
id: 'comparison',
label: 'Comparação',
icon: '⚖️',
color: '#C5221F' // Google Red 700
},
{
id: 'timeline',
label: 'Timeline',
icon: '📅',
color: '#0B57D0' // Google Blue 600
},
{
id: 'markdown',
label: 'Markdown',
icon: '📝',
color: '#444746' // Google Grey 700 - neutral, professional
},
{
id: 'chart',
label: 'Gráfico',
icon: '📉',
color: '#B3261E' // Google Red 600
}
];
var switcherDefinitions = [];
function renderSwitchers() {
var container = document.getElementById("switcherChips");
if (!container) return;
function renderBotSwitchers(switchers) {
if (!switchers || switchers.length === 0) return;
var existingIds = {};
switcherDefinitions.forEach(function(sw) { existingIds[sw.id] = true; });
switchers.forEach(function(sw) {
if (!existingIds[sw.id]) {
switcherDefinitions.push({
id: sw.id,
label: sw.label || sw.id,
icon: sw.icon || '🔀',
color: sw.color || '#666'
});
existingIds[sw.id] = true;
}
});
renderSwitchers();
var container = document.getElementById("switchers");
if (container && switcherDefinitions.length > 0) {
container.style.display = '';
}
}
container.innerHTML = switcherDefinitions.map(function(sw) {
var isActive = activeSwitchers.has(sw.id);
return (
'<div class="switcher-chip' + (isActive ? ' active' : '') + '" ' +
'data-switch-id="' + sw.id + '" ' +
'style="--switcher-color: ' + sw.color + '; ' +
(isActive ? 'color: ' + sw.color + ' background: ' + sw.color + '; ' : '') +
'">' +
'<span class="switcher-chip-icon">' + sw.icon + '</span>' +
'<span>' + sw.label + '</span>' +
'</div>'
);
}).join('');
function renderSwitchers() {
var container = document.getElementById("switcherChips");
if (!container) return;
// Add click handlers
container.querySelectorAll('.switcher-chip').forEach(function(chip) {
chip.addEventListener('click', function() {
toggleSwitcher(this.getAttribute('data-switch-id'));
});
});
}
container.innerHTML = switcherDefinitions.map(function(sw) {
var isActive = activeSwitchers.has(sw.id);
return (
'<div class="switcher-chip' + (isActive ? ' active' : '') + '" ' +
'data-switch-id="' + sw.id + '" ' +
'style="--switcher-color: ' + sw.color + '; ' +
(isActive ? 'color: ' + sw.color + ' background: ' + sw.color + '; ' : '') +
'">' +
'<span class="switcher-chip-icon">' + sw.icon + '</span>' +
'<span>' + sw.label + '</span>' +
'</div>'
);
}).join('');
function toggleSwitcher(switcherId) {
if (activeSwitchers.has(switcherId)) {
activeSwitchers.delete(switcherId);
} else {
activeSwitchers.add(switcherId);
}
renderSwitchers();
}
container.querySelectorAll('.switcher-chip').forEach(function(chip) {
chip.addEventListener('click', function() {
toggleSwitcher(this.getAttribute('data-switch-id'));
});
});
}
function toggleSwitcher(switcherId) {
if (activeSwitchers.has(switcherId)) {
activeSwitchers.delete(switcherId);
} else {
activeSwitchers.add(switcherId);
}
renderSwitchers();
}
function setupEventHandlers() {
var form = document.getElementById("chatForm");