- New rust version is now compiling.
This commit is contained in:
parent
c0c470e9aa
commit
84f28bc956
20 changed files with 926 additions and 486 deletions
|
|
@ -1,5 +1,6 @@
|
|||
use crate::email::fetch_latest_sent_to;
|
||||
use crate::email::save_email_draft;
|
||||
use crate::email::{fetch_latest_sent_to, SaveDraftRequest};
|
||||
use crate::email::SaveDraftRequest;
|
||||
use crate::shared::state::AppState;
|
||||
use rhai::Dynamic;
|
||||
use rhai::Engine;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{state::AppState, web_automation::BrowserPool};
|
||||
use crate::{shared::state::AppState, web_automation::BrowserPool};
|
||||
use log::info;
|
||||
use rhai::{Dynamic, Engine};
|
||||
use std::error::Error;
|
||||
|
|
@ -33,19 +33,27 @@ pub fn get_website_keyword(state: &AppState, engine: &mut Engine) {
|
|||
}
|
||||
|
||||
pub async fn execute_headless_browser_search(
|
||||
browser_pool: Arc<BrowserPool>, // Adjust path as needed
|
||||
browser_pool: Arc<BrowserPool>,
|
||||
search_term: &str,
|
||||
) -> Result<String, Box<dyn Error + Send + Sync>> {
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
info!("Starting headless browser search: '{}' ", search_term);
|
||||
|
||||
let search_term = search_term.to_string();
|
||||
// Clone the search term so it can be moved into the async closure.
|
||||
let term = search_term.to_string();
|
||||
|
||||
// `with_browser` expects a closure that returns a `Future` yielding
|
||||
// `Result<_, Box<dyn Error + Send + Sync>>`. `perform_search` already returns
|
||||
// that exact type, so we can forward the result directly.
|
||||
let result = browser_pool
|
||||
.with_browser(|driver| Box::pin(async move { perform_search(driver, &search_term).await }))
|
||||
.with_browser(move |driver| {
|
||||
let term = term.clone();
|
||||
Box::pin(async move { perform_search(driver, &term).await })
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
async fn perform_search(
|
||||
driver: WebDriver,
|
||||
search_term: &str,
|
||||
|
|
@ -96,11 +104,12 @@ async fn extract_search_results(
|
|||
".result a[href]", // Generic result links
|
||||
];
|
||||
|
||||
for selector in &selectors {
|
||||
// Iterate over selectors, dereferencing each `&&str` to `&str` for `By::Css`
|
||||
for &selector in &selectors {
|
||||
if let Ok(elements) = driver.find_all(By::Css(selector)).await {
|
||||
for element in elements {
|
||||
if let Ok(Some(href)) = element.attr("href").await {
|
||||
// Filter out internal and non-http links
|
||||
// Filter out internal and non‑http links
|
||||
if href.starts_with("http")
|
||||
&& !href.contains("duckduckgo.com")
|
||||
&& !href.contains("duck.co")
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
use log::info;
|
||||
|
||||
use crate::{shared::state::AppState, utils::call_llm};
|
||||
use crate::{shared::state::AppState, shared::utils::call_llm};
|
||||
use rhai::{Dynamic, Engine};
|
||||
|
||||
pub fn llm_keyword(state: &AppState, engine: &mut Engine) {
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ use rhai::Engine;
|
|||
use serde_json::{json, Value};
|
||||
use sqlx::PgPool;
|
||||
|
||||
use crate::shared::models::automation_model::TriggerKind;
|
||||
use crate::shared::models::TriggerKind;
|
||||
use crate::shared::state::AppState;
|
||||
|
||||
pub fn on_keyword(state: &AppState, engine: &mut Engine) {
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ use rhai::Engine;
|
|||
use serde_json::{json, Value};
|
||||
use sqlx::PgPool;
|
||||
|
||||
use crate::shared::models::automation_model::TriggerKind;
|
||||
use crate::shared::models::TriggerKind;
|
||||
use crate::shared::state::AppState;
|
||||
|
||||
pub fn set_schedule_keyword(state: &AppState, engine: &mut Engine) {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
use actix_web::{web, HttpRequest, HttpResponse, Result};
|
||||
use actix_ws::Message as WsMessage;
|
||||
use chrono::Utc;
|
||||
use langchain_rust::schemas::Message;
|
||||
use log::info;
|
||||
use serde_json;
|
||||
use std::collections::HashMap;
|
||||
|
|
@ -163,7 +162,7 @@ impl BotOrchestrator {
|
|||
pub async fn stream_response(
|
||||
&self,
|
||||
message: UserMessage,
|
||||
mut response_tx: mpsc::Sender<BotResponse>,
|
||||
response_tx: mpsc::Sender<BotResponse>,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
info!("Streaming response for user: {}", message.user_id);
|
||||
|
||||
|
|
|
|||
566
src/email/mod.rs
566
src/email/mod.rs
|
|
@ -1,81 +1,533 @@
|
|||
use actix_web::{post, web, HttpResponse, Result};
|
||||
use lettre::{
|
||||
message::header::ContentType,
|
||||
transport::smtp::authentication::Credentials,
|
||||
Message,
|
||||
SmtpTransport,
|
||||
Transport
|
||||
};
|
||||
use crate::{config::EmailConfig, shared::state::AppState};
|
||||
use log::info;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct EmailRequest {
|
||||
pub to: String,
|
||||
use actix_web::error::ErrorInternalServerError;
|
||||
use actix_web::http::header::ContentType;
|
||||
use actix_web::{web, HttpResponse, Result};
|
||||
use lettre::{transport::smtp::authentication::Credentials, Message, SmtpTransport, Transport};
|
||||
use serde::Serialize;
|
||||
|
||||
use imap::types::Seq;
|
||||
use mailparse::{parse_mail, MailHeaderMap}; // Added MailHeaderMap import
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct EmailResponse {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub email: String,
|
||||
pub subject: String,
|
||||
pub body: String,
|
||||
pub text: String,
|
||||
date: String,
|
||||
read: bool,
|
||||
labels: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct EmailConfig {
|
||||
pub from: String,
|
||||
pub server: String,
|
||||
pub port: u16,
|
||||
pub username: String,
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
async fn send_email_impl(
|
||||
config: &EmailConfig,
|
||||
to: &str,
|
||||
subject: &str,
|
||||
body: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
async fn internal_send_email(config: &EmailConfig, to: &str, subject: &str, body: &str) {
|
||||
let email = Message::builder()
|
||||
.from(config.from.parse()?)
|
||||
.to(to.parse()?)
|
||||
.from(config.from.parse().unwrap())
|
||||
.to(to.parse().unwrap())
|
||||
.subject(subject)
|
||||
.header(ContentType::TEXT_PLAIN)
|
||||
.body(body.to_string())?;
|
||||
.body(body.to_string())
|
||||
.unwrap();
|
||||
|
||||
let creds = Credentials::new(config.username.clone(), config.password.clone());
|
||||
|
||||
let mailer = SmtpTransport::relay(&config.server)?
|
||||
SmtpTransport::relay(&config.server)
|
||||
.unwrap()
|
||||
.port(config.port)
|
||||
.credentials(creds)
|
||||
.build();
|
||||
.build()
|
||||
.send(&email)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
match mailer.send(&email) {
|
||||
Ok(_) => {
|
||||
info!("Email sent to {}", to);
|
||||
Ok(())
|
||||
#[actix_web::get("/emails/list")]
|
||||
pub async fn list_emails(
|
||||
state: web::Data<AppState>,
|
||||
) -> Result<web::Json<Vec<EmailResponse>>, actix_web::Error> {
|
||||
let _config = state
|
||||
.config
|
||||
.as_ref()
|
||||
.ok_or_else(|| ErrorInternalServerError("Configuration not available"))?;
|
||||
|
||||
// Establish connection
|
||||
let tls = native_tls::TlsConnector::builder().build().map_err(|e| {
|
||||
ErrorInternalServerError(format!("Failed to create TLS connector: {:?}", e))
|
||||
})?;
|
||||
|
||||
let client = imap::connect(
|
||||
(_config.email.server.as_str(), 993),
|
||||
_config.email.server.as_str(),
|
||||
&tls,
|
||||
)
|
||||
.map_err(|e| ErrorInternalServerError(format!("Failed to connect to IMAP: {:?}", e)))?;
|
||||
|
||||
// Login
|
||||
let mut session = client
|
||||
.login(&_config.email.username, &_config.email.password)
|
||||
.map_err(|e| ErrorInternalServerError(format!("Login failed: {:?}", e)))?;
|
||||
|
||||
// Select INBOX
|
||||
session
|
||||
.select("INBOX")
|
||||
.map_err(|e| ErrorInternalServerError(format!("Failed to select INBOX: {:?}", e)))?;
|
||||
|
||||
// Search for all messages
|
||||
let messages = session
|
||||
.search("ALL")
|
||||
.map_err(|e| ErrorInternalServerError(format!("Failed to search emails: {:?}", e)))?;
|
||||
|
||||
let mut email_list = Vec::new();
|
||||
|
||||
// Get last 20 messages
|
||||
let recent_messages: Vec<_> = messages.iter().cloned().collect(); // Collect items into a Vec
|
||||
let recent_messages: Vec<Seq> = recent_messages.into_iter().rev().take(20).collect(); // Now you can reverse and take the last 20
|
||||
for seq in recent_messages {
|
||||
// Fetch the entire message (headers + body)
|
||||
let fetch_result = session.fetch(seq.to_string(), "RFC822");
|
||||
let messages = fetch_result
|
||||
.map_err(|e| ErrorInternalServerError(format!("Failed to fetch email: {:?}", e)))?;
|
||||
|
||||
for msg in messages.iter() {
|
||||
let body = msg
|
||||
.body()
|
||||
.ok_or_else(|| ErrorInternalServerError("No body found"))?;
|
||||
|
||||
// Parse the complete email message
|
||||
let parsed = parse_mail(body)
|
||||
.map_err(|e| ErrorInternalServerError(format!("Failed to parse email: {:?}", e)))?;
|
||||
|
||||
// Extract headers
|
||||
let headers = parsed.get_headers();
|
||||
let subject = headers.get_first_value("Subject").unwrap_or_default();
|
||||
let from = headers.get_first_value("From").unwrap_or_default();
|
||||
let date = headers.get_first_value("Date").unwrap_or_default();
|
||||
|
||||
// Extract body text (handles both simple and multipart emails)
|
||||
let body_text = if let Some(body_part) = parsed
|
||||
.subparts
|
||||
.iter()
|
||||
.find(|p| p.ctype.mimetype == "text/plain")
|
||||
{
|
||||
body_part.get_body().unwrap_or_default()
|
||||
} else {
|
||||
parsed.get_body().unwrap_or_default()
|
||||
};
|
||||
|
||||
// Create preview
|
||||
let preview = body_text.lines().take(3).collect::<Vec<_>>().join(" ");
|
||||
let preview_truncated = if preview.len() > 150 {
|
||||
format!("{}...", &preview[..150])
|
||||
} else {
|
||||
preview
|
||||
};
|
||||
|
||||
// Parse From field
|
||||
let (from_name, from_email) = parse_from_field(&from);
|
||||
|
||||
email_list.push(EmailResponse {
|
||||
id: seq.to_string(),
|
||||
name: from_name,
|
||||
email: from_email,
|
||||
subject: if subject.is_empty() {
|
||||
"(No Subject)".to_string()
|
||||
} else {
|
||||
subject
|
||||
},
|
||||
text: preview_truncated,
|
||||
date: if date.is_empty() {
|
||||
chrono::Utc::now().format("%Y-%m-%d %H:%M:%S").to_string()
|
||||
} else {
|
||||
date
|
||||
},
|
||||
read: false,
|
||||
labels: Vec::new(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
session
|
||||
.logout()
|
||||
.map_err(|e| ErrorInternalServerError(format!("Failed to logout: {:?}", e)))?;
|
||||
|
||||
Ok(web::Json(email_list))
|
||||
}
|
||||
|
||||
// Helper function to parse From field
|
||||
fn parse_from_field(from: &str) -> (String, String) {
|
||||
if let Some(start) = from.find('<') {
|
||||
if let Some(end) = from.find('>') {
|
||||
let email = from[start + 1..end].trim().to_string();
|
||||
let name = from[..start].trim().trim_matches('"').to_string();
|
||||
return (name, email);
|
||||
}
|
||||
}
|
||||
("Unknown".to_string(), from.to_string())
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct SaveDraftRequest {
|
||||
pub to: String,
|
||||
pub subject: String,
|
||||
pub cc: Option<String>,
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub struct SaveDraftResponse {
|
||||
pub success: bool,
|
||||
pub message: String,
|
||||
pub draft_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct GetLatestEmailRequest {
|
||||
pub from_email: String,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub struct LatestEmailResponse {
|
||||
pub success: bool,
|
||||
pub email_text: Option<String>,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[actix_web::post("/emails/save_draft")]
|
||||
pub async fn save_draft(
|
||||
state: web::Data<AppState>,
|
||||
draft_data: web::Json<SaveDraftRequest>,
|
||||
) -> Result<web::Json<SaveDraftResponse>, actix_web::Error> {
|
||||
let config = state
|
||||
.config
|
||||
.as_ref()
|
||||
.ok_or_else(|| ErrorInternalServerError("Configuration not available"))?;
|
||||
|
||||
match save_email_draft(&config.email, &draft_data).await {
|
||||
Ok(draft_id) => Ok(web::Json(SaveDraftResponse {
|
||||
success: true,
|
||||
message: "Draft saved successfully".to_string(),
|
||||
draft_id: Some(draft_id),
|
||||
})),
|
||||
Err(e) => Ok(web::Json(SaveDraftResponse {
|
||||
success: false,
|
||||
message: format!("Failed to save draft: {}", e),
|
||||
draft_id: None,
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn save_email_draft(
|
||||
email_config: &EmailConfig,
|
||||
draft_data: &SaveDraftRequest,
|
||||
) -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Establish connection
|
||||
let tls = native_tls::TlsConnector::builder().build()?;
|
||||
let client = imap::connect(
|
||||
(email_config.server.as_str(), 993),
|
||||
email_config.server.as_str(),
|
||||
&tls,
|
||||
)?;
|
||||
|
||||
// Login
|
||||
let mut session = client
|
||||
.login(&email_config.username, &email_config.password)
|
||||
.map_err(|e| format!("Login failed: {:?}", e))?;
|
||||
|
||||
// Select or create Drafts folder
|
||||
if session.select("Drafts").is_err() {
|
||||
// Try to create Drafts folder if it doesn't exist
|
||||
session.create("Drafts")?;
|
||||
session.select("Drafts")?;
|
||||
}
|
||||
|
||||
// Create email message
|
||||
let cc_header = draft_data
|
||||
.cc
|
||||
.as_deref()
|
||||
.filter(|cc| !cc.is_empty())
|
||||
.map(|cc| format!("Cc: {}\r\n", cc))
|
||||
.unwrap_or_default();
|
||||
let email_message = format!(
|
||||
"From: {}\r\nTo: {}\r\n{}Subject: {}\r\nDate: {}\r\nContent-Type: text/html; charset=UTF-8\r\n\r\n{}",
|
||||
email_config.username,
|
||||
draft_data.to,
|
||||
cc_header,
|
||||
draft_data.subject,
|
||||
chrono::Utc::now().format("%a, %d %b %Y %H:%M:%S +0000"),
|
||||
draft_data.text
|
||||
);
|
||||
|
||||
// Append to Drafts folder
|
||||
session.append("Drafts", &email_message)?;
|
||||
|
||||
session.logout()?;
|
||||
|
||||
Ok(chrono::Utc::now().timestamp().to_string())
|
||||
}
|
||||
|
||||
async fn fetch_latest_email_from_sender(
|
||||
email_config: &EmailConfig,
|
||||
from_email: &str,
|
||||
) -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Establish connection
|
||||
let tls = native_tls::TlsConnector::builder().build()?;
|
||||
let client = imap::connect(
|
||||
(email_config.server.as_str(), 993),
|
||||
email_config.server.as_str(),
|
||||
&tls,
|
||||
)?;
|
||||
|
||||
// Login
|
||||
let mut session = client
|
||||
.login(&email_config.username, &email_config.password)
|
||||
.map_err(|e| format!("Login failed: {:?}", e))?;
|
||||
|
||||
// Try to select Archive folder first, then fall back to INBOX
|
||||
if session.select("Archive").is_err() {
|
||||
session.select("INBOX")?;
|
||||
}
|
||||
|
||||
// Search for emails from the specified sender
|
||||
let search_query = format!("FROM \"{}\"", from_email);
|
||||
let messages = session.search(&search_query)?;
|
||||
|
||||
if messages.is_empty() {
|
||||
session.logout()?;
|
||||
return Err(format!("No emails found from {}", from_email).into());
|
||||
}
|
||||
|
||||
// Get the latest message (highest sequence number)
|
||||
let latest_seq = messages.iter().max().unwrap();
|
||||
|
||||
// Fetch the entire message
|
||||
let messages = session.fetch(latest_seq.to_string(), "RFC822")?;
|
||||
|
||||
let mut email_text = String::new();
|
||||
|
||||
for msg in messages.iter() {
|
||||
let body = msg.body().ok_or("No body found in email")?;
|
||||
|
||||
// Parse the complete email message
|
||||
let parsed = parse_mail(body)?;
|
||||
|
||||
// Extract headers
|
||||
let headers = parsed.get_headers();
|
||||
let subject = headers.get_first_value("Subject").unwrap_or_default();
|
||||
let from = headers.get_first_value("From").unwrap_or_default();
|
||||
let date = headers.get_first_value("Date").unwrap_or_default();
|
||||
let to = headers.get_first_value("To").unwrap_or_default();
|
||||
|
||||
// Extract body text
|
||||
let body_text = if let Some(body_part) = parsed
|
||||
.subparts
|
||||
.iter()
|
||||
.find(|p| p.ctype.mimetype == "text/plain")
|
||||
{
|
||||
body_part.get_body().unwrap_or_default()
|
||||
} else {
|
||||
parsed.get_body().unwrap_or_default()
|
||||
};
|
||||
|
||||
// Format the email text ready for reply with headers
|
||||
email_text = format!(
|
||||
"--- Original Message ---\nFrom: {}\nTo: {}\nDate: {}\nSubject: {}\n\n{}\n\n--- Reply Above This Line ---\n\n",
|
||||
from, to, date, subject, body_text
|
||||
);
|
||||
|
||||
break; // We only want the first (and should be only) message
|
||||
}
|
||||
|
||||
session.logout()?;
|
||||
|
||||
if email_text.is_empty() {
|
||||
Err("Failed to extract email content".into())
|
||||
} else {
|
||||
Ok(email_text)
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_web::post("/emails/get_latest_from")]
|
||||
pub async fn get_latest_email_from(
|
||||
state: web::Data<AppState>,
|
||||
request: web::Json<GetLatestEmailRequest>,
|
||||
) -> Result<web::Json<LatestEmailResponse>, actix_web::Error> {
|
||||
let config = state
|
||||
.config
|
||||
.as_ref()
|
||||
.ok_or_else(|| ErrorInternalServerError("Configuration not available"))?;
|
||||
|
||||
match fetch_latest_email_from_sender(&config.email, &request.from_email).await {
|
||||
Ok(email_text) => Ok(web::Json(LatestEmailResponse {
|
||||
success: true,
|
||||
email_text: Some(email_text),
|
||||
message: "Latest email retrieved successfully".to_string(),
|
||||
})),
|
||||
Err(e) => {
|
||||
log::error!("Failed to send email: {}", e);
|
||||
Err(Box::new(e))
|
||||
if e.to_string().contains("No emails found") {
|
||||
Ok(web::Json(LatestEmailResponse {
|
||||
success: false,
|
||||
email_text: None,
|
||||
message: e.to_string(),
|
||||
}))
|
||||
} else {
|
||||
Err(ErrorInternalServerError(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/email/send")]
|
||||
pub async fn fetch_latest_sent_to(
|
||||
email_config: &EmailConfig,
|
||||
to_email: &str,
|
||||
) -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Establish connection
|
||||
let tls = native_tls::TlsConnector::builder().build()?;
|
||||
let client = imap::connect(
|
||||
(email_config.server.as_str(), 993),
|
||||
email_config.server.as_str(),
|
||||
&tls,
|
||||
)?;
|
||||
|
||||
// Login
|
||||
let mut session = client
|
||||
.login(&email_config.username, &email_config.password)
|
||||
.map_err(|e| format!("Login failed: {:?}", e))?;
|
||||
|
||||
// Try to select Archive folder first, then fall back to INBOX
|
||||
if session.select("Sent").is_err() {
|
||||
session.select("Sent Items")?;
|
||||
}
|
||||
|
||||
// Search for emails from the specified sender
|
||||
let search_query = format!("TO \"{}\"", to_email);
|
||||
let messages = session.search(&search_query)?;
|
||||
|
||||
if messages.is_empty() {
|
||||
session.logout()?;
|
||||
return Err(format!("No emails found to {}", to_email).into());
|
||||
}
|
||||
|
||||
// Get the latest message (highest sequence number)
|
||||
let latest_seq = messages.iter().max().unwrap();
|
||||
|
||||
// Fetch the entire message
|
||||
let messages = session.fetch(latest_seq.to_string(), "RFC822")?;
|
||||
|
||||
let mut email_text = String::new();
|
||||
|
||||
for msg in messages.iter() {
|
||||
let body = msg.body().ok_or("No body found in email")?;
|
||||
|
||||
// Parse the complete email message
|
||||
let parsed = parse_mail(body)?;
|
||||
|
||||
// Extract headers
|
||||
let headers = parsed.get_headers();
|
||||
let subject = headers.get_first_value("Subject").unwrap_or_default();
|
||||
let from = headers.get_first_value("From").unwrap_or_default();
|
||||
let date = headers.get_first_value("Date").unwrap_or_default();
|
||||
let to = headers.get_first_value("To").unwrap_or_default();
|
||||
|
||||
if !to
|
||||
.trim()
|
||||
.to_lowercase()
|
||||
.contains(&to_email.trim().to_lowercase())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// Extract body text (handles both simple and multipart emails) - SAME AS LIST_EMAILS
|
||||
let body_text = if let Some(body_part) = parsed
|
||||
.subparts
|
||||
.iter()
|
||||
.find(|p| p.ctype.mimetype == "text/plain")
|
||||
{
|
||||
body_part.get_body().unwrap_or_default()
|
||||
} else {
|
||||
parsed.get_body().unwrap_or_default()
|
||||
};
|
||||
|
||||
// Only format if we have actual content
|
||||
if !body_text.trim().is_empty() && body_text != "No readable content found" {
|
||||
// Format the email text ready for reply with headers
|
||||
email_text = format!(
|
||||
"--- Original Message ---\nFrom: {}\nTo: {}\nDate: {}\nSubject: {}\n\n{}\n\n--- Reply Above This Line ---\n\n",
|
||||
from, to, date, subject, body_text.trim()
|
||||
);
|
||||
} else {
|
||||
// Still provide headers even if body is empty
|
||||
email_text = format!(
|
||||
"--- Original Message ---\nFrom: {}\nTo: {}\nDate: {}\nSubject: {}\n\n[No readable content]\n\n--- Reply Above This Line ---\n\n",
|
||||
from, to, date, subject
|
||||
);
|
||||
}
|
||||
|
||||
break; // We only want the first (and should be only) message
|
||||
}
|
||||
|
||||
session.logout()?;
|
||||
|
||||
// Always return something, even if it's just headers
|
||||
if email_text.is_empty() {
|
||||
Err("Failed to extract email content".into())
|
||||
} else {
|
||||
Ok(email_text)
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_web::post("/emails/send")]
|
||||
pub async fn send_email(
|
||||
config: web::Data<crate::config::AppConfig>,
|
||||
payload: web::Json<EmailRequest>,
|
||||
) -> Result<HttpResponse> {
|
||||
let email_request = payload.into_inner();
|
||||
payload: web::Json<(String, String, String)>,
|
||||
state: web::Data<AppState>,
|
||||
) -> Result<HttpResponse, actix_web::Error> {
|
||||
let (to, subject, body) = payload.into_inner();
|
||||
|
||||
match send_email_impl(&config.email, &email_request.to, &email_request.subject, &email_request.body).await {
|
||||
Ok(_) => Ok(HttpResponse::Ok().json(serde_json::json!({"status": "sent"}))),
|
||||
Err(e) => Ok(HttpResponse::InternalServerError().json(serde_json::json!({"error": e.to_string()})))
|
||||
}
|
||||
info!("To: {}", to);
|
||||
info!("Subject: {}", subject);
|
||||
info!("Body: {}", body);
|
||||
|
||||
// Send via SMTP
|
||||
internal_send_email(&state.config.clone().unwrap().email, &to, &subject, &body).await;
|
||||
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
#[post("/email/test")]
|
||||
pub async fn test_email(
|
||||
config: web::Data<crate::config::AppConfig>,
|
||||
) -> Result<HttpResponse> {
|
||||
match send_email_impl(&config.email, &config.email.from, "Test Email", "This is a test email from BotServer").await {
|
||||
Ok(_) => Ok(HttpResponse::Ok().json(serde_json::json!({"status": "test_sent"}))),
|
||||
Err(e) => Ok(HttpResponse::InternalServerError().json(serde_json::json!({"error": e.to_string()})))
|
||||
}
|
||||
#[actix_web::get("/campaigns/{campaign_id}/click/{email}")]
|
||||
pub async fn save_click(
|
||||
path: web::Path<(String, String)>,
|
||||
state: web::Data<AppState>,
|
||||
) -> HttpResponse {
|
||||
let (campaign_id, email) = path.into_inner();
|
||||
let _ = sqlx::query("INSERT INTO public.clicks (campaign_id, email, updated_at) VALUES ($1, $2, NOW()) ON CONFLICT (campaign_id, email) DO UPDATE SET updated_at = NOW()")
|
||||
.bind(campaign_id)
|
||||
.bind(email)
|
||||
.execute(state.db.as_ref().unwrap())
|
||||
.await;
|
||||
|
||||
let pixel = [
|
||||
0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, // PNG header
|
||||
0x00, 0x00, 0x00, 0x0D, 0x49, 0x48, 0x44, 0x52, // IHDR chunk
|
||||
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, // 1x1 dimension
|
||||
0x08, 0x06, 0x00, 0x00, 0x00, 0x1F, 0x15, 0xC4, 0x89, // RGBA
|
||||
0x00, 0x00, 0x00, 0x0A, 0x49, 0x44, 0x41, 0x54, // IDAT chunk
|
||||
0x78, 0x9C, 0x63, 0x00, 0x01, 0x00, 0x00, 0x05, // data
|
||||
0x00, 0x01, 0x0D, 0x0A, 0x2D, 0xB4, // CRC
|
||||
0x00, 0x00, 0x00, 0x00, 0x49, 0x45, 0x4E, 0x44, // IEND chunk
|
||||
0xAE, 0x42, 0x60, 0x82,
|
||||
]; // EOF
|
||||
|
||||
// At the end of your save_click function:
|
||||
HttpResponse::Ok()
|
||||
.content_type(ContentType::png())
|
||||
.body(pixel.to_vec()) // Using slicing to pass a reference
|
||||
}
|
||||
|
||||
#[actix_web::get("/campaigns/{campaign_id}/emails")]
|
||||
pub async fn get_emails(path: web::Path<String>, state: web::Data<AppState>) -> String {
|
||||
let campaign_id = path.into_inner();
|
||||
let rows = sqlx::query_scalar::<_, String>("SELECT email FROM clicks WHERE campaign_id = $1")
|
||||
.bind(campaign_id)
|
||||
.fetch_all(state.db.as_ref().unwrap())
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
rows.join(",")
|
||||
}
|
||||
|
|
|
|||
170
src/file/mod.rs
170
src/file/mod.rs
|
|
@ -1,78 +1,142 @@
|
|||
use actix_web::web;
|
||||
|
||||
use actix_multipart::Multipart;
|
||||
use actix_web::{get, post, web, HttpResponse, Result};
|
||||
use futures_util::StreamExt as _;
|
||||
use log::info;
|
||||
use actix_web::{post, HttpResponse};
|
||||
use minio::s3::builders::ObjectContent;
|
||||
use minio::s3::types::ToStream;
|
||||
use minio::s3::Client;
|
||||
use std::io::Write;
|
||||
use tokio::fs;
|
||||
use tempfile::NamedTempFile;
|
||||
use tokio_stream::StreamExt;
|
||||
|
||||
use minio::s3::client::{Client as MinioClient, ClientBuilder as MinioClientBuilder};
|
||||
use minio::s3::creds::StaticProvider;
|
||||
use minio::s3::http::BaseUrl;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::config::AppConfig;
|
||||
use crate::shared::state::AppState;
|
||||
|
||||
pub async fn init_minio(config: &AppConfig) -> Result<MinioClient, minio::s3::error::Error> {
|
||||
let scheme = if config.minio.use_ssl {
|
||||
"https"
|
||||
} else {
|
||||
"http"
|
||||
};
|
||||
let base_url = format!("{}://{}", scheme, config.minio.server);
|
||||
let base_url = BaseUrl::from_str(&base_url)?;
|
||||
let credentials = StaticProvider::new(&config.minio.access_key, &config.minio.secret_key, None);
|
||||
|
||||
let minio_client = MinioClientBuilder::new(base_url)
|
||||
.provider(Some(credentials))
|
||||
.build()?;
|
||||
|
||||
Ok(minio_client)
|
||||
}
|
||||
|
||||
#[post("/files/upload/{folder_path}")]
|
||||
pub async fn upload_file(
|
||||
folder_path: web::Path<String>,
|
||||
mut payload: Multipart,
|
||||
path: web::Path<String>,
|
||||
) -> Result<HttpResponse> {
|
||||
let folder_path = path.into_inner();
|
||||
state: web::Data<AppState>,
|
||||
) -> Result<HttpResponse, actix_web::Error> {
|
||||
let folder_path = folder_path.into_inner();
|
||||
|
||||
while let Some(item) = payload.next().await {
|
||||
let mut field = item?;
|
||||
let content_disposition = field.content_disposition();
|
||||
// Create a temporary file to store the uploaded file.
|
||||
let mut temp_file = NamedTempFile::new().map_err(|e| {
|
||||
actix_web::error::ErrorInternalServerError(format!("Failed to create temp file: {}", e))
|
||||
})?;
|
||||
|
||||
let file_name = if let Some(name) = content_disposition.get_filename() {
|
||||
name.to_string()
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
let mut file_name: Option<String> = None;
|
||||
|
||||
let file_path = format!("./uploads/{}/{}", folder_path, file_name);
|
||||
|
||||
if let Some(parent) = std::path::Path::new(&file_path).parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
// Iterate over the multipart stream.
|
||||
while let Some(mut field) = payload.try_next().await? {
|
||||
// Extract the filename from the content disposition, if present.
|
||||
if let Some(disposition) = field.content_disposition() {
|
||||
if let Some(name) = disposition.get_filename() {
|
||||
file_name = Some(name.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
let mut f = web::block(|| std::fs::File::create(&file_path))
|
||||
.await??;
|
||||
|
||||
while let Some(chunk) = field.next().await {
|
||||
let data = chunk?;
|
||||
f = web::block(move || f.write_all(&data).map(|_| f)).await??;
|
||||
// Write the file content to the temporary file.
|
||||
while let Some(chunk) = field.try_next().await? {
|
||||
temp_file.write_all(&chunk).map_err(|e| {
|
||||
actix_web::error::ErrorInternalServerError(format!(
|
||||
"Failed to write to temp file: {}",
|
||||
e
|
||||
))
|
||||
})?;
|
||||
}
|
||||
}
|
||||
|
||||
info!("File uploaded to folder: {}", folder_path);
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({"status": "uploaded"})))
|
||||
// Get the file name or use a default name.
|
||||
let file_name = file_name.unwrap_or_else(|| "unnamed_file".to_string());
|
||||
|
||||
// Construct the object name using the folder path and file name.
|
||||
let object_name = format!("{}/{}", folder_path, file_name);
|
||||
|
||||
// Upload the file to the MinIO bucket.
|
||||
let client: Client = state.minio_client.clone().unwrap();
|
||||
let bucket_name = state.config.as_ref().unwrap().minio.bucket.clone();
|
||||
|
||||
let content = ObjectContent::from(temp_file.path());
|
||||
client
|
||||
.put_object_content(bucket_name, &object_name, content)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
actix_web::error::ErrorInternalServerError(format!(
|
||||
"Failed to upload file to MinIO: {}",
|
||||
e
|
||||
))
|
||||
})?;
|
||||
|
||||
// Clean up the temporary file.
|
||||
temp_file.close().map_err(|e| {
|
||||
actix_web::error::ErrorInternalServerError(format!("Failed to close temp file: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Ok().body(format!(
|
||||
"Uploaded file '{}' to folder '{}'",
|
||||
file_name, folder_path
|
||||
)))
|
||||
}
|
||||
|
||||
#[post("/files/list/{folder_path}")]
|
||||
pub async fn list_file(
|
||||
path: web::Path<String>,
|
||||
) -> Result<HttpResponse> {
|
||||
let folder_path = path.into_inner();
|
||||
let dir_path = format!("./uploads/{}", folder_path);
|
||||
folder_path: web::Path<String>,
|
||||
state: web::Data<AppState>,
|
||||
) -> Result<HttpResponse, actix_web::Error> {
|
||||
let folder_path = folder_path.into_inner();
|
||||
|
||||
let mut entries = Vec::new();
|
||||
let client: Client = state.minio_client.clone().unwrap();
|
||||
let bucket_name = "file-upload-rust-bucket";
|
||||
|
||||
if let Ok(mut read_dir) = fs::read_dir(&dir_path).await {
|
||||
while let Ok(Some(entry)) = read_dir.next_entry().await {
|
||||
if let Ok(file_name) = entry.file_name().into_string() {
|
||||
entries.push(file_name);
|
||||
// Create the stream using the to_stream() method
|
||||
let mut objects_stream = client
|
||||
.list_objects(bucket_name)
|
||||
.prefix(Some(folder_path))
|
||||
.to_stream()
|
||||
.await;
|
||||
|
||||
let mut file_list = Vec::new();
|
||||
|
||||
// Use StreamExt::next() to iterate through the stream
|
||||
while let Some(items) = objects_stream.next().await {
|
||||
match items {
|
||||
Ok(result) => {
|
||||
for item in result.contents {
|
||||
file_list.push(item.name);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(actix_web::error::ErrorInternalServerError(format!(
|
||||
"Failed to list files in MinIO: {}",
|
||||
e
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(entries))
|
||||
}
|
||||
|
||||
#[get("/files/download/{file_path:.*}")]
|
||||
pub async fn download_file(
|
||||
path: web::Path<String>,
|
||||
) -> Result<HttpResponse> {
|
||||
let file_path = path.into_inner();
|
||||
let full_path = format!("./uploads/{}", file_path);
|
||||
|
||||
if let Ok(content) = fs::read(&full_path).await {
|
||||
Ok(HttpResponse::Ok()
|
||||
.content_type("application/octet-stream")
|
||||
.body(content))
|
||||
} else {
|
||||
Ok(HttpResponse::NotFound().body("File not found"))
|
||||
}
|
||||
Ok(HttpResponse::Ok().json(file_list))
|
||||
}
|
||||
|
|
|
|||
68
src/main.rs
68
src/main.rs
|
|
@ -2,7 +2,6 @@ use actix_cors::Cors;
|
|||
use actix_web::{web, App, HttpServer};
|
||||
use dotenv::dotenv;
|
||||
use log::info;
|
||||
use std::error::Error as StdError;
|
||||
use std::sync::Arc;
|
||||
|
||||
mod auth;
|
||||
|
|
@ -30,12 +29,9 @@ use crate::bot::{
|
|||
};
|
||||
use crate::channels::{VoiceAdapter, WebChannelAdapter};
|
||||
use crate::config::AppConfig;
|
||||
use crate::email::{send_email, test_email};
|
||||
use crate::file::{download_file, list_file, upload_file};
|
||||
use crate::llm_legacy::llm::{
|
||||
chat_completions_local, embeddings_local, generic_chat_completions, health,
|
||||
};
|
||||
use crate::shared::state::AppState;
|
||||
use crate::email::send_email;
|
||||
use crate::file::{list_file, upload_file};
|
||||
use crate::shared::AppState;
|
||||
use crate::whatsapp::WhatsAppAdapter;
|
||||
|
||||
#[actix_web::main]
|
||||
|
|
@ -47,6 +43,7 @@ async fn main() -> std::io::Result<()> {
|
|||
|
||||
let config = AppConfig::from_env();
|
||||
|
||||
// Main database pool (required)
|
||||
let db_pool = match sqlx::postgres::PgPool::connect(&config.database_url()).await {
|
||||
Ok(pool) => {
|
||||
info!("Connected to main database");
|
||||
|
|
@ -61,17 +58,20 @@ async fn main() -> std::io::Result<()> {
|
|||
}
|
||||
};
|
||||
|
||||
let db_custom_pool = match sqlx::postgres::PgPool::connect(&config.database_custom_url()).await {
|
||||
Ok(pool) => {
|
||||
info!("Connected to custom database");
|
||||
pool
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("Failed to connect to custom database: {}", e);
|
||||
None
|
||||
}
|
||||
};
|
||||
// Optional custom database pool
|
||||
let db_custom_pool: Option<sqlx::Pool<sqlx::Postgres>> =
|
||||
match sqlx::postgres::PgPool::connect(&config.database_custom_url()).await {
|
||||
Ok(pool) => {
|
||||
info!("Connected to custom database");
|
||||
Some(pool)
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("Failed to connect to custom database: {}", e);
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
// Optional Redis client
|
||||
let redis_client = match redis::Client::open("redis://127.0.0.1/") {
|
||||
Ok(client) => {
|
||||
info!("Connected to Redis");
|
||||
|
|
@ -83,6 +83,7 @@ async fn main() -> std::io::Result<()> {
|
|||
}
|
||||
};
|
||||
|
||||
// Placeholder for MinIO (not yet implemented)
|
||||
let minio_client = None;
|
||||
|
||||
let auth_service = auth::AuthService::new(db_pool.clone(), redis_client.clone());
|
||||
|
|
@ -91,12 +92,8 @@ async fn main() -> std::io::Result<()> {
|
|||
let tool_manager = tools::ToolManager::new();
|
||||
let llm_provider = Arc::new(llm::MockLLMProvider::new());
|
||||
|
||||
let orchestrator = bot::BotOrchestrator::new(
|
||||
session_manager,
|
||||
tool_manager,
|
||||
llm_provider,
|
||||
auth_service,
|
||||
);
|
||||
let orchestrator =
|
||||
bot::BotOrchestrator::new(session_manager, tool_manager, llm_provider, auth_service);
|
||||
|
||||
let web_adapter = Arc::new(WebChannelAdapter::new());
|
||||
let voice_adapter = Arc::new(VoiceAdapter::new(
|
||||
|
|
@ -113,13 +110,13 @@ async fn main() -> std::io::Result<()> {
|
|||
|
||||
let tool_api = Arc::new(tools::ToolApi::new());
|
||||
|
||||
let browser_pool = match web_automation::BrowserPool::new(2).await {
|
||||
Ok(pool) => Arc::new(pool),
|
||||
Err(e) => {
|
||||
log::warn!("Failed to create browser pool: {}", e);
|
||||
Arc::new(web_automation::BrowserPool::new(0).await.unwrap())
|
||||
}
|
||||
};
|
||||
// Browser pool – constructed with the required three arguments.
|
||||
// Adjust the strings as needed for your environment.
|
||||
let browser_pool = Arc::new(web_automation::BrowserPool::new(
|
||||
"chrome".to_string(),
|
||||
2,
|
||||
"headless".to_string(),
|
||||
));
|
||||
|
||||
let app_state = AppState {
|
||||
minio_client,
|
||||
|
|
@ -134,7 +131,10 @@ async fn main() -> std::io::Result<()> {
|
|||
tool_api,
|
||||
};
|
||||
|
||||
info!("Starting server on {}:{}", config.server.host, config.server.port);
|
||||
info!(
|
||||
"Starting server on {}:{}",
|
||||
config.server.host, config.server.port
|
||||
);
|
||||
|
||||
HttpServer::new(move || {
|
||||
let cors = Cors::default()
|
||||
|
|
@ -158,14 +158,8 @@ async fn main() -> std::io::Result<()> {
|
|||
.service(get_session_history)
|
||||
.service(set_mode_handler)
|
||||
.service(send_email)
|
||||
.service(test_email)
|
||||
.service(upload_file)
|
||||
.service(list_file)
|
||||
.service(download_file)
|
||||
.service(health)
|
||||
.service(chat_completions_local)
|
||||
.service(embeddings_local)
|
||||
.service(generic_chat_completions)
|
||||
})
|
||||
.bind((config.server.host.clone(), config.server.port))?
|
||||
.run()
|
||||
|
|
|
|||
|
|
@ -1,27 +0,0 @@
|
|||
export BOT_ID=
|
||||
./mc alias set minio http://localhost:9000 user pass
|
||||
./mc admin user add minio $BOT_ID
|
||||
|
||||
cat > $BOT_ID-policy.json <<EOF
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"s3:ListBucket",
|
||||
"s3:GetObject",
|
||||
"s3:PutObject",
|
||||
"s3:DeleteObject"
|
||||
],
|
||||
"Resource": [
|
||||
"arn:aws:s3:::pragmatismo-$BOT_ID.gbai",
|
||||
"arn:aws:s3:::pragmatismo-$BOT_ID.gbai/*"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
EOF
|
||||
|
||||
./mc admin policy create minio $BOT_ID-policy $BOT_ID-policy.json
|
||||
./mc admin policy attach minio $BOT_ID-policy --user $BOT_ID
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
df -h
|
||||
printf "%-20s %-10s %-10s %-10s %-6s %s\n" "CONTAINER" "USED" "AVAIL" "TOTAL" "USE%" "MOUNT"
|
||||
for container in $(lxc list -c n --format csv); do
|
||||
disk_info=$(lxc exec $container -- df -h / --output=used,avail,size,pcent | tail -n 1)
|
||||
printf "%-20s %s\n" "$container" "$disk_info"
|
||||
done
|
||||
|
||||
#!/bin/bash
|
||||
|
||||
# Directory to analyze
|
||||
TARGET_DIR="/opt/gbo/tenants/pragmatismo"
|
||||
|
||||
echo "Calculating sizes for directories in $TARGET_DIR..."
|
||||
echo ""
|
||||
|
||||
# Check if directory exists
|
||||
if [ ! -d "$TARGET_DIR" ]; then
|
||||
echo "Error: Directory $TARGET_DIR does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the size of each subdirectory
|
||||
echo "Directory Size Report:"
|
||||
echo "----------------------"
|
||||
du -h --max-depth=1 "$TARGET_DIR" | sort -hr | awk -F'\t' '{printf "%-50s %s\n", $2, $1}'
|
||||
|
||||
echo ""
|
||||
echo "Total size:"
|
||||
du -sh "$TARGET_DIR"
|
||||
|
|
@ -1,71 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Cleanup script for Ubuntu Server and LXC containers
|
||||
# Run with sudo privileges
|
||||
|
||||
echo "Starting system cleanup..."
|
||||
|
||||
### Host System Cleanup ###
|
||||
echo -e "\n[ HOST SYSTEM CLEANUP ]"
|
||||
|
||||
# Package manager cache
|
||||
echo "Cleaning package cache..."
|
||||
apt clean
|
||||
apt autoclean
|
||||
apt autoremove -y
|
||||
|
||||
# Journal logs
|
||||
echo "Cleaning journal logs..."
|
||||
journalctl --vacuum-time=2d 2>/dev/null
|
||||
|
||||
# Temporary files
|
||||
echo "Cleaning temporary files..."
|
||||
rm -rf /tmp/* /var/tmp/*
|
||||
|
||||
# Thumbnail cache
|
||||
echo "Cleaning thumbnail cache..."
|
||||
rm -rf ~/.cache/thumbnails/* /root/.cache/thumbnails/*
|
||||
|
||||
# DNS cache
|
||||
echo "Flushing DNS cache..."
|
||||
systemd-resolve --flush-caches 2>/dev/null || true
|
||||
|
||||
# Old kernels (keep 2 latest)
|
||||
echo "Removing old kernels..."
|
||||
apt purge -y $(dpkg -l | awk '/^ii linux-image-*/{print $2}' | grep -v $(uname -r) | head -n -2) 2>/dev/null
|
||||
|
||||
# Crash reports
|
||||
echo "Clearing crash reports..."
|
||||
rm -f /var/crash/*
|
||||
|
||||
### LXC Containers Cleanup ###
|
||||
echo -e "\n[ LXC CONTAINERS CLEANUP ]"
|
||||
|
||||
# Check if LXC is installed
|
||||
if command -v lxc >/dev/null 2>&1; then
|
||||
for container in $(lxc list -c n --format csv | grep -v "^$"); do
|
||||
echo -e "\nCleaning container: $container"
|
||||
|
||||
# Execute cleanup commands in container
|
||||
lxc exec "$container" -- bash -c "
|
||||
echo 'Cleaning package cache...'
|
||||
apt clean && apt autoclean && apt autoremove -y
|
||||
|
||||
echo 'Cleaning temporary files...'
|
||||
rm -rf /tmp/* /var/tmp/*
|
||||
|
||||
echo 'Cleaning logs...'
|
||||
rm -rf /opt/gbo/logs/*
|
||||
|
||||
echo 'Cleaning journal logs...'
|
||||
journalctl --vacuum-time=1d 2>/dev/null || true
|
||||
|
||||
echo 'Cleaning thumbnail cache...'
|
||||
rm -rf /home/*/.cache/thumbnails/* /root/.cache/thumbnails/*
|
||||
" 2>/dev/null
|
||||
done
|
||||
else
|
||||
echo "LXC not installed, skipping container cleanup."
|
||||
fi
|
||||
|
||||
echo -e "\nCleanup completed!"
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
lxc list --format json | jq -r '.[].name' | while read container; do
|
||||
echo -n "$container: "
|
||||
lxc exec $container -- df -h / --output=used < /dev/null | tail -n1
|
||||
done
|
||||
|
||||
du -h --max-depth=1 "." 2>/dev/null | sort -rh | head -n 50 | awk '{printf "%-10s %s\n", $1, $2}'
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
az network public-ip list --resource-group "$CLOUD_GROUP" \
|
||||
--query "[].{Name:name, IP:ipAddress, ReverseDNS:dnsSettings.reverseFqdn}" \
|
||||
-o table
|
||||
|
||||
az network public-ip update --resource-group "$CLOUD_GROUP"
|
||||
--name "pip-network-adapter-name"
|
||||
--reverse-fqdn "outbound14.domain.com.br"
|
||||
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
sudo apt install -y cloud-guest-utils e2fsprogs
|
||||
|
||||
apt install -y make g++ build-essential
|
||||
apt install -y openjdk-17-jdk ant
|
||||
apt install -y sudo systemd wget zip procps ccache
|
||||
apt install -y automake bison flex git gperf graphviz junit4 libtool m4 nasm
|
||||
apt install -y libcairo2-dev libjpeg-dev libegl1-mesa-dev libfontconfig1-dev \
|
||||
libgl1-mesa-dev libgif-dev libgtk-3-dev librsvg2-dev libpango1.0-dev
|
||||
apt install -y libcap-dev libcap2-bin libkrb5-dev libpcap0.8-dev openssl libssl-dev
|
||||
apt install -y libxcb-dev libx11-xcb-dev libxkbcommon-x11-dev libxtst-dev \
|
||||
libxrender-dev libxslt1-dev libxt-dev xsltproc
|
||||
apt install -y libcunit1-dev libcppunit-dev libpam0g-dev libcups2-dev libzstd-dev uuid-runtime
|
||||
apt install -y python3-dev python3-lxml python3-pip python3-polib
|
||||
apt install -y nodejs npm
|
||||
apt install -y libpoco-dev libpococrypto80
|
||||
apt install -y libreoffice-dev
|
||||
|
||||
|
||||
mkdir -p /opt/lo && cd /opt/lo
|
||||
wget https://github.com/CollaboraOnline/online/releases/download/for-code-assets/core-co-24.04-assets.tar.gz
|
||||
tar xf core-co-24.04-assets.tar.gz && rm core-co-24.04-assets.tar.gz
|
||||
|
||||
useradd cool -G sudo
|
||||
mkdir -p /opt/cool && chown cool:cool /opt/cool
|
||||
cd /opt/cool
|
||||
sudo -Hu cool git clone https://github.com/CollaboraOnline/online.git
|
||||
cd online && sudo -Hu cool ./autogen.sh
|
||||
|
||||
export CPPFLAGS=-I/opt/lo/include
|
||||
export LDFLAGS=-L/opt/lo/instdir/program
|
||||
./configure --with-lokit-path=/opt/lo --with-lo-path=/opt/lo/instdir --with-poco-includes=/usr/local/include --with-poco-libs=/usr/local/lib
|
||||
|
||||
sudo -Hu cool make -j$(nproc)
|
||||
|
||||
make install
|
||||
mkdir -p /etc/coolwsd /usr/local/var/cache/coolwsd
|
||||
|
||||
chown cool:cool /usr/local/var/cache/coolwsd
|
||||
admin_pwd=$(openssl rand -hex 6)
|
||||
|
||||
cat <<EOT > /lib/systemd/system/coolwsd.service
|
||||
[Unit]
|
||||
Description=Collabora Online WebSocket Daemon
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
ExecStart=/opt/cool/online/coolwsd --o:sys_template_path=/opt/cool/online/systemplate \
|
||||
--o:lo_template_path=/opt/lo/instdir --o:child_root_path=/opt/cool/online/jails \
|
||||
--o:admin_console.username=admin --o:admin_console.password=$DOC_EDITOR_ADMIN_PWD \
|
||||
--o:ssl.enable=false
|
||||
User=cool
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOT
|
||||
|
||||
systemctl daemon-reload
|
||||
systemctl enable coolwsd.service
|
||||
systemctl start coolwsd.service
|
||||
"
|
||||
|
||||
echo "Installation complete!"
|
||||
echo "Admin password: $admin_pwd"
|
||||
echo "Access at: https://localhost:9980"
|
||||
|
||||
|
|
@ -1,53 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Define container limits in an associative array
|
||||
declare -A container_limits=(
|
||||
# Pattern Memory CPU Allowance
|
||||
["*tables*"]="4096MB:100ms/100ms"
|
||||
["*dns*"]="2048MB:100ms/100ms"
|
||||
["*doc-editor*"]="512MB:10ms/100ms"
|
||||
["*proxy*"]="2048MB:100ms/100ms"
|
||||
["*directory*"]="1024MB:50ms/100ms"
|
||||
["*drive*"]="4096MB:50ms/100ms"
|
||||
["*email*"]="4096MB:100ms/100ms"
|
||||
["*webmail*"]="4096MB:100ms/100ms"
|
||||
["*bot*"]="4096MB:50ms/100ms"
|
||||
["*meeting*"]="4096MB:100ms/100ms"
|
||||
["*alm*"]="512MB:50ms/100ms"
|
||||
["*alm-ci*"]="4096MB:100ms/100ms"
|
||||
["*system*"]="4096MB:50ms/100ms"
|
||||
["*mailer*"]="4096MB:25ms/100ms"
|
||||
)
|
||||
|
||||
# Default values (for containers that don't match any pattern)
|
||||
DEFAULT_MEMORY="1024MB"
|
||||
DEFAULT_CPU_ALLOWANCE="15ms/100ms"
|
||||
CPU_COUNT=2
|
||||
CPU_PRIORITY=10
|
||||
|
||||
for pattern in "${!container_limits[@]}"; do
|
||||
echo "Configuring $container..."
|
||||
|
||||
memory=$DEFAULT_MEMORY
|
||||
cpu_allowance=$DEFAULT_CPU_ALLOWANCE
|
||||
|
||||
# Configure all containers
|
||||
for container in $(lxc list -c n --format csv); do
|
||||
# Check if container matches any pattern
|
||||
if [[ $container == $pattern ]]; then
|
||||
IFS=':' read -r memory cpu_allowance <<< "${container_limits[$pattern]}"
|
||||
|
||||
# Apply configuration
|
||||
lxc config set "$container" limits.memory "$memory"
|
||||
lxc config set "$container" limits.cpu.allowance "$cpu_allowance"
|
||||
lxc config set "$container" limits.cpu "$CPU_COUNT"
|
||||
lxc config set "$container" limits.cpu.priority "$CPU_PRIORITY"
|
||||
|
||||
echo "Restarting $container..."
|
||||
lxc restart "$container"
|
||||
|
||||
lxc config show "$container" | grep -E "memory|cpu"
|
||||
break
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
lxc config device override $CONTAINER_NAME root
|
||||
lxc config device set $CONTAINER_NAME root size 6GB
|
||||
|
||||
zpool set autoexpand=on default
|
||||
zpool online -e default /var/snap/lxd/common/lxd/disks/default.img
|
||||
zpool list
|
||||
zfs list
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
|
||||
# Host
|
||||
sudo lxc config set core.trust_password "$LXC_TRUST_PASSWORD"
|
||||
|
||||
# ALM-CI
|
||||
lxc remote add bot 10.16.164.? --accept-certificate --password "$LXC_TRUST_PASSWORD"
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Disable shell timeout
|
||||
|
||||
sed -i '/TMOUT/d' /etc/profile /etc/bash.bashrc /etc/profile.d/*
|
||||
echo 'export TMOUT=0' > /etc/profile.d/notimeout.sh
|
||||
chmod +x /etc/profile.d/notimeout.sh
|
||||
sed -i '/pam_exec.so/s/quiet/quiet set_timeout=0/' /etc/pam.d/sshd 2>/dev/null
|
||||
source /etc/profile
|
||||
|
||||
|
|
@ -1,46 +1,249 @@
|
|||
use thirtyfour::{ChromeCapabilities, ChromiumLikeCapabilities, WebDriver};
|
||||
use tokio::sync::{Semaphore, SemaphorePermit};
|
||||
// wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
|
||||
// sudo dpkg -i google-chrome-stable_current_amd64.deb
|
||||
use log::info;
|
||||
|
||||
use std::env;
|
||||
use std::error::Error;
|
||||
use std::future::Future;
|
||||
use std::path::PathBuf;
|
||||
use std::pin::Pin;
|
||||
use std::process::Command;
|
||||
use std::sync::Arc;
|
||||
use thirtyfour::{ChromiumLikeCapabilities, DesiredCapabilities, WebDriver};
|
||||
use tokio::fs;
|
||||
use tokio::sync::Semaphore;
|
||||
|
||||
use crate::shared::utils::{download_file, extract_zip_recursive};
|
||||
|
||||
pub struct BrowserSetup {
|
||||
pub brave_path: String,
|
||||
pub chromedriver_path: String,
|
||||
}
|
||||
|
||||
pub struct BrowserPool {
|
||||
semaphore: Arc<Semaphore>,
|
||||
webdriver_url: String,
|
||||
semaphore: Semaphore,
|
||||
brave_path: String,
|
||||
}
|
||||
|
||||
impl BrowserPool {
|
||||
pub async fn new(max_browsers: usize) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let webdriver_url = std::env::var("WEBDRIVER_URL")
|
||||
.unwrap_or_else(|_| "http://localhost:9515".to_string());
|
||||
|
||||
Ok(Self {
|
||||
semaphore: Arc::new(Semaphore::new(max_browsers)),
|
||||
pub fn new(webdriver_url: String, max_concurrent: usize, brave_path: String) -> Self {
|
||||
Self {
|
||||
webdriver_url,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_browser(&self) -> Result<(WebDriver, SemaphorePermit<'_>), Box<dyn std::error::Error>> {
|
||||
let permit = self.semaphore.acquire().await?;
|
||||
|
||||
let mut caps = ChromeCapabilities::new();
|
||||
caps.add_arg("--headless=new")?;
|
||||
caps.add_arg("--no-sandbox")?;
|
||||
caps.add_arg("--disable-dev-shm-usage")?;
|
||||
|
||||
let driver = WebDriver::new(&self.webdriver_url, caps).await?;
|
||||
Ok((driver, permit))
|
||||
}
|
||||
|
||||
pub async fn with_browser<F, T>(&self, f: F) -> Result<T, Box<dyn std::error::Error>>
|
||||
where
|
||||
F: FnOnce(WebDriver) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<T, Box<dyn std::error::Error>>> + Send>>,
|
||||
{
|
||||
let (driver, _permit) = self.get_browser().await?;
|
||||
let result = f(driver).await;
|
||||
|
||||
if let Ok(driver) = result.as_ref().map(|_| &driver) {
|
||||
let _ = driver.quit().await;
|
||||
semaphore: Semaphore::new(max_concurrent),
|
||||
brave_path,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn with_browser<F, T>(&self, f: F) -> Result<T, Box<dyn Error + Send + Sync>>
|
||||
where
|
||||
F: FnOnce(
|
||||
WebDriver,
|
||||
)
|
||||
-> Pin<Box<dyn Future<Output = Result<T, Box<dyn Error + Send + Sync>>> + Send>>
|
||||
+ Send
|
||||
+ 'static,
|
||||
T: Send + 'static,
|
||||
{
|
||||
// Acquire a permit to respect the concurrency limit
|
||||
let _permit = self.semaphore.acquire().await?;
|
||||
|
||||
// Build Chrome/Brave capabilities
|
||||
let mut caps = DesiredCapabilities::chrome();
|
||||
caps.set_binary(&self.brave_path)?;
|
||||
// caps.add_arg("--headless=new")?; // Uncomment if headless mode is desired
|
||||
caps.add_arg("--disable-gpu")?;
|
||||
caps.add_arg("--no-sandbox")?;
|
||||
|
||||
// Create a new WebDriver instance
|
||||
let driver = WebDriver::new(&self.webdriver_url, caps).await?;
|
||||
|
||||
// Execute the user‑provided async function with the driver
|
||||
let result = f(driver).await;
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
impl BrowserSetup {
|
||||
pub async fn new() -> Result<Self, Box<dyn std::error::Error>> {
|
||||
// Check for Brave installation
|
||||
let brave_path = Self::find_brave().await?;
|
||||
|
||||
// Check for chromedriver
|
||||
let chromedriver_path = Self::setup_chromedriver().await?;
|
||||
|
||||
Ok(Self {
|
||||
brave_path,
|
||||
chromedriver_path,
|
||||
})
|
||||
}
|
||||
|
||||
async fn find_brave() -> Result<String, Box<dyn std::error::Error>> {
|
||||
let mut possible_paths = vec![
|
||||
// Windows - Program Files
|
||||
String::from(r"C:\Program Files\BraveSoftware\Brave-Browser\Application\brave.exe"),
|
||||
// macOS
|
||||
String::from("/Applications/Brave Browser.app/Contents/MacOS/Brave Browser"),
|
||||
// Linux
|
||||
String::from("/usr/bin/brave-browser"),
|
||||
String::from("/usr/bin/brave"),
|
||||
];
|
||||
|
||||
// Windows - AppData (usuário atual)
|
||||
if let Ok(local_appdata) = env::var("LOCALAPPDATA") {
|
||||
let mut path = PathBuf::from(local_appdata);
|
||||
path.push("BraveSoftware\\Brave-Browser\\Application\\brave.exe");
|
||||
possible_paths.push(path.to_string_lossy().to_string());
|
||||
}
|
||||
|
||||
for path in possible_paths {
|
||||
if fs::metadata(&path).await.is_ok() {
|
||||
return Ok(path);
|
||||
}
|
||||
}
|
||||
|
||||
Err("Brave browser not found. Please install Brave first.".into())
|
||||
}
|
||||
async fn setup_chromedriver() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Create chromedriver directory in executable's parent directory
|
||||
let mut chromedriver_dir = env::current_exe()?.parent().unwrap().to_path_buf();
|
||||
chromedriver_dir.push("chromedriver");
|
||||
|
||||
// Ensure the directory exists
|
||||
if !chromedriver_dir.exists() {
|
||||
fs::create_dir(&chromedriver_dir).await?;
|
||||
}
|
||||
|
||||
// Determine the final chromedriver path
|
||||
let chromedriver_path = if cfg!(target_os = "windows") {
|
||||
chromedriver_dir.join("chromedriver.exe")
|
||||
} else {
|
||||
chromedriver_dir.join("chromedriver")
|
||||
};
|
||||
|
||||
// Check if chromedriver exists
|
||||
if fs::metadata(&chromedriver_path).await.is_err() {
|
||||
let (download_url, platform) = match (cfg!(target_os = "windows"), cfg!(target_arch = "x86_64")) {
|
||||
(true, true) => (
|
||||
"https://storage.googleapis.com/chrome-for-testing-public/138.0.7204.183/win64/chromedriver-win64.zip",
|
||||
"win64",
|
||||
),
|
||||
(true, false) => (
|
||||
"https://storage.googleapis.com/chrome-for-testing-public/138.0.7204.183/win32/chromedriver-win32.zip",
|
||||
"win32",
|
||||
),
|
||||
(false, true) if cfg!(target_os = "macos") && cfg!(target_arch = "aarch64") => (
|
||||
"https://storage.googleapis.com/chrome-for-testing-public/138.0.7204.183/mac-arm64/chromedriver-mac-arm64.zip",
|
||||
"mac-arm64",
|
||||
),
|
||||
(false, true) if cfg!(target_os = "macos") => (
|
||||
"https://storage.googleapis.com/chrome-for-testing-public/138.0.7204.183/mac-x64/chromedriver-mac-x64.zip",
|
||||
"mac-x64",
|
||||
),
|
||||
(false, true) => (
|
||||
"https://storage.googleapis.com/chrome-for-testing-public/138.0.7204.183/linux64/chromedriver-linux64.zip",
|
||||
"linux64",
|
||||
),
|
||||
_ => return Err("Unsupported platform".into()),
|
||||
};
|
||||
|
||||
let mut zip_path = std::env::temp_dir();
|
||||
zip_path.push("chromedriver.zip");
|
||||
info!("Downloading chromedriver for {}...", platform);
|
||||
|
||||
// Download the zip file
|
||||
download_file(download_url, &zip_path.to_str().unwrap()).await?;
|
||||
|
||||
// Extract the zip to a temporary directory first
|
||||
let mut temp_extract_dir = std::env::temp_dir();
|
||||
temp_extract_dir.push("chromedriver_extract");
|
||||
let temp_extract_dir1 = temp_extract_dir.clone();
|
||||
|
||||
// Clean up any previous extraction
|
||||
let _ = fs::remove_dir_all(&temp_extract_dir).await;
|
||||
fs::create_dir(&temp_extract_dir).await?;
|
||||
|
||||
extract_zip_recursive(&zip_path, &temp_extract_dir)?;
|
||||
|
||||
// Chrome for Testing zips contain a platform-specific directory
|
||||
// Find the chromedriver binary in the extracted structure
|
||||
let mut extracted_binary_path = temp_extract_dir;
|
||||
extracted_binary_path.push(format!("chromedriver-{}", platform));
|
||||
extracted_binary_path.push(if cfg!(target_os = "windows") {
|
||||
"chromedriver.exe"
|
||||
} else {
|
||||
"chromedriver"
|
||||
});
|
||||
|
||||
// Try to move the file, fall back to copy if cross-device
|
||||
match fs::rename(&extracted_binary_path, &chromedriver_path).await {
|
||||
Ok(_) => (),
|
||||
Err(e) if e.kind() == std::io::ErrorKind::CrossesDevices => {
|
||||
// Cross-device move failed, use copy instead
|
||||
fs::copy(&extracted_binary_path, &chromedriver_path).await?;
|
||||
// Set permissions on the copied file
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let mut perms = fs::metadata(&chromedriver_path).await?.permissions();
|
||||
perms.set_mode(0o755);
|
||||
fs::set_permissions(&chromedriver_path, perms).await?;
|
||||
}
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
}
|
||||
|
||||
// Clean up
|
||||
let _ = fs::remove_file(&zip_path).await;
|
||||
let _ = fs::remove_dir_all(temp_extract_dir1).await;
|
||||
|
||||
// Set executable permissions (if not already set during copy)
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let mut perms = fs::metadata(&chromedriver_path).await?.permissions();
|
||||
perms.set_mode(0o755);
|
||||
fs::set_permissions(&chromedriver_path, perms).await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(chromedriver_path.to_string_lossy().to_string())
|
||||
}
|
||||
}
|
||||
|
||||
// Modified BrowserPool initialization
|
||||
pub async fn initialize_browser_pool() -> Result<Arc<BrowserPool>, Box<dyn std::error::Error>> {
|
||||
let setup = BrowserSetup::new().await?;
|
||||
|
||||
// Start chromedriver process if not running
|
||||
if !is_process_running("chromedriver").await {
|
||||
Command::new(&setup.chromedriver_path)
|
||||
.arg("--port=9515")
|
||||
.spawn()?;
|
||||
|
||||
// Give chromedriver time to start
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(2)).await;
|
||||
}
|
||||
|
||||
Ok(Arc::new(BrowserPool::new(
|
||||
"http://localhost:9515".to_string(),
|
||||
5, // Max concurrent browsers
|
||||
setup.brave_path,
|
||||
)))
|
||||
}
|
||||
|
||||
async fn is_process_running(name: &str) -> bool {
|
||||
if cfg!(target_os = "windows") {
|
||||
Command::new("tasklist")
|
||||
.output()
|
||||
.map(|o| String::from_utf8_lossy(&o.stdout).contains(name))
|
||||
.unwrap_or(false)
|
||||
} else {
|
||||
Command::new("pgrep")
|
||||
.arg(name)
|
||||
.output()
|
||||
.map(|o| o.status.success())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue