- LLM on footer.
Some checks failed
GBCI / build (push) Has been cancelled

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-07-04 23:20:48 -03:00
parent 986d3b58ea
commit 869f23d066
6 changed files with 1371 additions and 34 deletions

1277
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -11,6 +11,11 @@ repository = "https://alm.pragmatismo.com.br/generalbots/gbserver"
actix-cors = "0.6"
actix-multipart = "0.6"
actix-web = "4"
actix-ws="0.3.0"
bytes = "1.1"
futures-util = "0.3"
reqwest = { version = "0.11", features = ["json", "stream"] }
chrono = { version = "0.4", features = ["serde"] }
dotenv = "0.15"
imap = "2.0"
@ -27,3 +32,9 @@ tokio = { version = "1", features = ["full"] }
tokio-stream = "0.1.17"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["fmt"] }
env_logger = "0.10"
anyhow = "1.0"
futures = "0.3"
langchain-rust = "4.4.3"
async-stream = "0.3"

View file

@ -2,12 +2,13 @@ use actix_cors::Cors;
use actix_web::http::header;
use actix_web::{web, App, HttpServer};
use dotenv::dotenv;
use sqlx::PgPool;
use services::config::*;
use services::email::*;
use services::file::*;
use services::state::*;
use services::llm::*;
mod services;
@ -46,6 +47,8 @@ async fn main() -> std::io::Result<()> {
.service(get_emails)
.service(list_emails)
.service(send_email)
.service(chat_stream)
.service(chat)
})
.bind((config.server.host.clone(), config.server.port))?
.run()

View file

@ -54,6 +54,7 @@ pub struct AIConfig {
pub endpoint: String,
}
impl AppConfig {
pub fn database_url(&self) -> String {
format!(

View file

@ -3,8 +3,8 @@ use actix_web::error::ErrorInternalServerError;
use actix_web::http::header::ContentType;
use actix_web::{web, HttpResponse, Result};
use lettre::{transport::smtp::authentication::Credentials, Message, SmtpTransport, Transport};
use serde::{Deserialize, Serialize};
use tokio_stream::StreamExt;
use serde::Serialize;
use mailparse::{parse_mail, MailHeaderMap}; // Added MailHeaderMap import
use imap::types::{Seq};

View file

@ -1,15 +1,106 @@
use actix_web::http::Error;
use actix_web::{
get, post,
web::{self, Bytes},
App, HttpResponse, HttpServer, Responder,
};
use anyhow::Result;
use futures::StreamExt;
use langchain_rust::{
chain::{Chain, LLMChainBuilder},
fmt_message, fmt_template,
language_models::llm::LLM,
llm::{openai::OpenAI, AzureConfig},
message_formatter,
prompt::HumanMessagePromptTemplate,
prompt_args,
schemas::messages::Message,
template_fstring,
};
use std::env;
use crate::services::{config::AIConfig, state::AppState};
// You'll need to add this to your AppState
pub struct LLM {
// Your AI client implementation
pub fn from_config(config: &AIConfig) -> AzureConfig {
AzureConfig::default()
.with_api_key(&config.key)
.with_api_base(&config.endpoint)
.with_api_version(&config.version)
.with_deployment_id(&config.instance)
}
impl LLM {
pub async fn generate_response(&self, prompt: &str) -> Result<String, Error> {
// Implement your AI service call here
Ok("Suggested response".to_string())
}
#[derive(serde::Deserialize)]
struct ChatRequest {
input: String,
}#[actix_web::post("/chat")]
pub async fn chat(
web::Json(request): web::Json<ChatRequest>,
state: web::Data<AppState>,
) -> Result<impl Responder, actix_web::Error> {
let azure_config = from_config(&state.config.clone().unwrap().ai);
let open_ai = OpenAI::new(azure_config);
let response = match open_ai.invoke(&request.input).await {
Ok(res) => res,
Err(err) => {
eprintln!("Error invoking API: {}", err);
return Err(actix_web::error::ErrorInternalServerError(
"Failed to invoke OpenAI API",
));
}
};
Ok(HttpResponse::Ok().body(response))
}
#[actix_web::post("/stream")]
pub async fn chat_stream(
web::Json(request): web::Json<ChatRequest>,
state: web::Data<AppState>,
) -> Result<impl Responder, actix_web::Error> {
let azure_config = from_config(&state.config.clone().unwrap().ai);
let open_ai = OpenAI::new(azure_config);
let response = match open_ai.invoke("Why is the sky blue?").await {
Ok(res) => res,
Err(err) => {
eprintln!("Error invoking API: {}", err);
return Err(actix_web::error::ErrorInternalServerError(
"Failed to invoke OpenAI API",
));
}
};
let prompt = message_formatter![
fmt_message!(Message::new_system_message(
"You are world class technical documentation writer."
)),
fmt_template!(HumanMessagePromptTemplate::new(template_fstring!(
"{input}", "input"
)))
];
let chain = LLMChainBuilder::new()
.prompt(prompt)
.llm(open_ai)
.build()
.map_err(actix_web::error::ErrorInternalServerError)?;
let mut stream = chain
.stream(prompt_args! { "input" => request.input })
.await
.map_err(actix_web::error::ErrorInternalServerError)?;
let actix_stream = async_stream::stream! {
while let Some(result) = stream.next().await {
match result {
Ok(value) => yield Ok::<_, actix_web::Error>(Bytes::from(value.content)),
Err(e) => yield Err(actix_web::error::ErrorInternalServerError(e)),
}
}
};
Ok(HttpResponse::Ok()
.content_type("text/event-stream")
.streaming(actix_stream))
}