- Reorganize services & shared models, add bot/chart

Rename mod files, add bot & chart, move structs, extend LLM.
This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-10-05 07:18:43 -03:00
parent 694ca28d6f
commit 1a7d6ae0e2
10 changed files with 287 additions and 126 deletions

View file

@ -1,17 +1,16 @@
pub mod auth;
pub mod automation;
pub mod bot;
pub mod channels;
pub mod chart;
pub mod config;
pub mod context;
pub mod email;
pub mod file;
pub mod keywords;
pub mod llm;
pub mod llm_generic;
pub mod llm_local;
pub mod orchestrator;
pub mod session;
pub mod shared;
pub mod state;
pub mod tools;
pub mod web_automation;
pub mod whatsapp;

View file

@ -1,34 +1,22 @@
use actix_cors::Cors;
use actix_web::middleware::Logger;
use actix_web::{web, App, HttpRequest, HttpResponse, HttpServer, Result};
use actix_ws::Message;
use actix_web::{web, HttpRequest, HttpResponse, Result};
use chrono::Utc;
use langchain_rust::{
chain::{Chain, LLMChain, LLMChainBuilder},
chain::{Chain, LLMChain},
embedding::openai::openai_embedder::OpenAiEmbedder,
llm::openai::OpenAI,
memory::SimpleMemory,
prompt_args,
schemas::{Document, Message},
tools::{postgres::PostgreSQLEngine, SQLDatabaseBuilder},
vectorstore::qdrant::{Qdrant as LangChainQdrant, StoreBuilder},
vectorstore::qdrant::Qdrant as LangChainQdrant,
vectorstore::{VecStoreOptions, VectorStore},
};
use log::info;
use qdrant_client::qdrant::Qdrant as QdrantClient;
use serde::{Deserialize, Serialize};
use sqlx::{postgres::PgPoolOptions, PgPool};
use std::collections::HashMap;
use std::fs;
use std::sync::Arc;
use tokio::sync::{mpsc, Mutex};
use uuid::Uuid;
mod shared;
use shared::*;
mod services;
use services::auth::AuthService;
use services::channels::{ChannelAdapter, VoiceAdapter, WebChannelAdapter};
use services::chart::ChartGenerator;

View file

@ -1,12 +1,12 @@
use async_trait::async_trait;
use livekit::{AccessToken, Room, RoomOptions, DataPacketKind};
use chrono::Utc;
use livekit::{DataPacketKind, Room, RoomOptions};
use log::info;
use tokio::sync::{mpsc, Mutex};
use std::collections::HashMap;
use std::sync::Arc;
use chrono::Utc;
use tokio::sync::{mpsc, Mutex};
use crate::shared::{UserMessage, BotResponse};
use crate::services::shared::{BotResponse, UserMessage};
#[async_trait]
pub trait ChannelAdapter: Send + Sync {
@ -63,7 +63,11 @@ impl VoiceAdapter {
}
}
pub async fn start_voice_session(&self, session_id: &str, user_id: &str) -> Result<String, Box<dyn std::error::Error>> {
pub async fn start_voice_session(
&self,
session_id: &str,
user_id: &str,
) -> Result<String, Box<dyn std::error::Error>> {
let token = AccessToken::with_api_key(&self.api_key, &self.api_secret)
.with_identity(user_id)
.with_name(user_id)
@ -77,7 +81,10 @@ impl VoiceAdapter {
};
let (room, mut events) = Room::connect(&self.livekit_url, &token, room_options).await?;
self.rooms.lock().await.insert(session_id.to_string(), room.clone());
self.rooms
.lock()
.await
.insert(session_id.to_string(), room.clone());
let rooms_clone = self.rooms.clone();
let connections_clone = self.connections.clone();
@ -87,10 +94,15 @@ impl VoiceAdapter {
while let Some(event) = events.recv().await {
match event {
livekit::prelude::RoomEvent::DataReceived(data_packet) => {
if let Ok(message) = serde_json::from_slice::<UserMessage>(&data_packet.data) {
if let Ok(message) =
serde_json::from_slice::<UserMessage>(&data_packet.data)
{
info!("Received voice message: {}", message.content);
if let Some(tx) = connections_clone.lock().await.get(&message.session_id) {
let _ = tx.send(BotResponse {
if let Some(tx) =
connections_clone.lock().await.get(&message.session_id)
{
let _ = tx
.send(BotResponse {
bot_id: message.bot_id,
user_id: message.user_id,
session_id: message.session_id,
@ -99,11 +111,16 @@ impl VoiceAdapter {
message_type: "voice".to_string(),
stream_token: None,
is_complete: true,
}).await;
})
.await;
}
}
}
livekit::prelude::RoomEvent::TrackSubscribed(track, publication, participant) => {
livekit::prelude::RoomEvent::TrackSubscribed(
track,
publication,
participant,
) => {
info!("Voice track subscribed from {}", participant.identity());
}
_ => {}
@ -115,7 +132,10 @@ impl VoiceAdapter {
Ok(token)
}
pub async fn stop_voice_session(&self, session_id: &str) -> Result<(), Box<dyn std::error::Error>> {
pub async fn stop_voice_session(
&self,
session_id: &str,
) -> Result<(), Box<dyn std::error::Error>> {
if let Some(room) = self.rooms.lock().await.remove(session_id) {
room.disconnect();
}
@ -126,7 +146,11 @@ impl VoiceAdapter {
self.connections.lock().await.insert(session_id, tx);
}
pub async fn send_voice_response(&self, session_id: &str, text: &str) -> Result<(), Box<dyn std::error::Error>> {
pub async fn send_voice_response(
&self,
session_id: &str,
text: &str,
) -> Result<(), Box<dyn std::error::Error>> {
if let Some(room) = self.rooms.lock().await.get(session_id) {
let voice_response = serde_json::json!({
"type": "voice_response",
@ -148,6 +172,7 @@ impl VoiceAdapter {
impl ChannelAdapter for VoiceAdapter {
async fn send_message(&self, response: BotResponse) -> Result<(), Box<dyn std::error::Error>> {
info!("Sending voice response to: {}", response.user_id);
self.send_voice_response(&response.session_id, &response.content).await
self.send_voice_response(&response.session_id, &response.content)
.await
}
}

View file

@ -6,10 +6,10 @@ use langchain_rust::{
schemas::Message,
};
use serde_json::Value;
use std::sync::Arc;
use tokio::sync::mpsc;
pub mod llm_generic;
pub mod llm_local;
pub mod llm_provider;
use crate::services::tools::ToolManager;
#[async_trait]
pub trait LLMProvider: Send + Sync {
@ -26,49 +26,40 @@ pub trait LLMProvider: Send + Sync {
tx: mpsc::Sender<String>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>>;
// Add tool calling capability
// Add tool calling capability using LangChain tools
async fn generate_with_tools(
&self,
prompt: &str,
config: &serde_json::Value,
config: &Value,
available_tools: &[String],
tool_manager: Arc<tools::ToolManager>,
tool_manager: Arc<ToolManager>,
session_id: &str,
user_id: &str,
) -> Result<String, Box<dyn std::error::Error>>;
) -> Result<String, Box<dyn std::error::Error + Send + Sync>>;
}
pub struct OpenAIClient<C>
where
C: langchain_rust::llm::Config,
{
client: OpenAI<C>,
pub struct OpenAIClient {
client: OpenAI,
}
impl<C> OpenAIClient<C>
where
C: langchain_rust::llm::Config,
{
pub fn new(config: C) -> Self {
let client = OpenAI::new(config);
impl OpenAIClient {
pub fn new(client: OpenAI) -> Self {
Self { client }
}
}
#[async_trait]
impl<C> LLMProvider for OpenAIClient<C>
where
C: langchain_rust::llm::Config + Send + Sync,
{
impl LLMProvider for OpenAIClient {
async fn generate(
&self,
prompt: &str,
_config: &Value,
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
// Call the underlying OpenAI client with the raw prompt string.
let messages = vec![Message::new_human_message(prompt.to_string())];
let result = self
.client
.invoke(prompt)
.invoke(&messages)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error + Send + Sync>)?;
@ -81,7 +72,6 @@ where
_config: &Value,
mut tx: mpsc::Sender<String>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
// Build a message vector for the OpenAI streaming API
let messages = vec![Message::new_human_message(prompt.to_string())];
let mut stream = self
@ -93,7 +83,6 @@ where
while let Some(result) = stream.next().await {
match result {
Ok(chunk) => {
// The `content` field is accessed directly (no method).
let content = chunk.content;
if !content.is_empty() {
let _ = tx.send(content.to_string()).await;
@ -107,6 +96,35 @@ where
Ok(())
}
async fn generate_with_tools(
&self,
prompt: &str,
_config: &Value,
available_tools: &[String],
_tool_manager: Arc<ToolManager>,
_session_id: &str,
_user_id: &str,
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
// Enhanced prompt with tool information
let tools_info = if available_tools.is_empty() {
String::new()
} else {
format!("\n\nAvailable tools: {}. You can suggest using these tools if they would help answer the user's question.", available_tools.join(", "))
};
let enhanced_prompt = format!("{}{}", prompt, tools_info);
let messages = vec![Message::new_human_message(enhanced_prompt)];
let result = self
.client
.invoke(&messages)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error + Send + Sync>)?;
Ok(result)
}
}
pub struct AnthropicClient {
@ -119,3 +137,134 @@ impl AnthropicClient {
Self { client }
}
}
#[async_trait]
impl LLMProvider for AnthropicClient {
async fn generate(
&self,
prompt: &str,
_config: &Value,
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
let messages = vec![Message::new_human_message(prompt.to_string())];
let result = self
.client
.invoke(&messages)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error + Send + Sync>)?;
Ok(result)
}
async fn generate_stream(
&self,
prompt: &str,
_config: &Value,
mut tx: mpsc::Sender<String>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let messages = vec![Message::new_human_message(prompt.to_string())];
let mut stream = self
.client
.stream(&messages)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error + Send + Sync>)?;
while let Some(result) = stream.next().await {
match result {
Ok(chunk) => {
let content = chunk.content;
if !content.is_empty() {
let _ = tx.send(content.to_string()).await;
}
}
Err(e) => {
eprintln!("Stream error: {}", e);
}
}
}
Ok(())
}
async fn generate_with_tools(
&self,
prompt: &str,
_config: &Value,
available_tools: &[String],
_tool_manager: Arc<ToolManager>,
_session_id: &str,
_user_id: &str,
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
let tools_info = if available_tools.is_empty() {
String::new()
} else {
format!("\n\nAvailable tools: {}. You can suggest using these tools if they would help answer the user's question.", available_tools.join(", "))
};
let enhanced_prompt = format!("{}{}", prompt, tools_info);
let messages = vec![Message::new_human_message(enhanced_prompt)];
let result = self
.client
.invoke(&messages)
.await
.map_err(|e| Box::new(e) as Box<dyn std::error::Error + Send + Sync>)?;
Ok(result)
}
}
pub struct MockLLMProvider;
impl MockLLMProvider {
pub fn new() -> Self {
Self
}
}
#[async_trait]
impl LLMProvider for MockLLMProvider {
async fn generate(
&self,
prompt: &str,
_config: &Value,
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
Ok(format!("Mock response to: {}", prompt))
}
async fn generate_stream(
&self,
prompt: &str,
_config: &Value,
mut tx: mpsc::Sender<String>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let response = format!("Mock stream response to: {}", prompt);
for word in response.split_whitespace() {
let _ = tx.send(format!("{} ", word)).await;
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
}
Ok(())
}
async fn generate_with_tools(
&self,
prompt: &str,
_config: &Value,
available_tools: &[String],
_tool_manager: Arc<ToolManager>,
_session_id: &str,
_user_id: &str,
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
let tools_list = if available_tools.is_empty() {
"no tools available".to_string()
} else {
available_tools.join(", ")
};
Ok(format!(
"Mock response with tools [{}] to: {}",
tools_list, prompt
))
}
}

View file

@ -1,3 +1,61 @@
pub mod shared;
pub mod state;
pub mod utils;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sqlx::FromRow;
use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct UserSession {
pub id: Uuid,
pub user_id: Uuid,
pub bot_id: Uuid,
pub title: String,
pub context_data: serde_json::Value,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmbeddingRequest {
pub text: String,
pub model: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmbeddingResponse {
pub embedding: Vec<f32>,
pub model: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchResult {
pub text: String,
pub similarity: f32,
pub metadata: serde_json::Value,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserMessage {
pub bot_id: String,
pub user_id: String,
pub session_id: String,
pub channel: String,
pub content: String,
pub message_type: String,
pub media_url: Option<String>,
pub timestamp: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BotResponse {
pub bot_id: String,
pub user_id: String,
pub session_id: String,
pub channel: String,
pub content: String,
pub message_type: String,
pub stream_token: Option<String>,
pub is_complete: bool,
}

View file

@ -1,58 +0,0 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sqlx::FromRow;
use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct UserSession {
pub id: Uuid,
pub user_id: Uuid,
pub bot_id: Uuid,
pub title: String,
pub context_data: serde_json::Value,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmbeddingRequest {
pub text: String,
pub model: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmbeddingResponse {
pub embedding: Vec<f32>,
pub model: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchResult {
pub text: String,
pub similarity: f32,
pub metadata: serde_json::Value,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserMessage {
pub bot_id: String,
pub user_id: String,
pub session_id: String,
pub channel: String,
pub content: String,
pub message_type: String,
pub media_url: Option<String>,
pub timestamp: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BotResponse {
pub bot_id: String,
pub user_id: String,
pub session_id: String,
pub channel: String,
pub content: String,
pub message_type: String,
pub stream_token: Option<String>,
pub is_complete: bool,
}