- Migration to Open ADAL.

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-10-28 14:20:55 -03:00
parent f29a3c1259
commit 2a2915b543
7 changed files with 34 additions and 62 deletions

View file

@ -19,8 +19,8 @@ done
dirs=( dirs=(
# "auth" # "auth"
"automation" # "automation"
"basic" #"basic"
# "bot" # "bot"
"bootstrap" "bootstrap"
# "package_manager" # "package_manager"
@ -37,7 +37,7 @@ dirs=(
"file" "file"
"kb" "kb"
"shared" "shared"
"tests" #"tests"
# "tools" # "tools"
# "web_automation" # "web_automation"
# "whatsapp" # "whatsapp"
@ -54,6 +54,8 @@ done
echo "$PROJECT_ROOT/src/main.rs" >> "$OUTPUT_FILE" echo "$PROJECT_ROOT/src/main.rs" >> "$OUTPUT_FILE"
cat "$PROJECT_ROOT/src/main.rs" >> "$OUTPUT_FILE" cat "$PROJECT_ROOT/src/main.rs" >> "$OUTPUT_FILE"
echo "$PROJECT_ROOT/src/basic/keywords/get.rs" >> "$OUTPUT_FILE"
cat "$PROJECT_ROOT/src/basic/keywords/get.rs" >> "$OUTPUT_FILE"
echo "" >> "$OUTPUT_FILE" echo "" >> "$OUTPUT_FILE"
echo "Compiling..." echo "Compiling..."

View file

@ -28,7 +28,6 @@ pub fn get_keyword(state: Arc<AppState>, _user: UserSession, engine: &mut Engine
let state_for_blocking = Arc::clone(&state_clone); let state_for_blocking = Arc::clone(&state_clone);
let url_for_blocking = url_str.clone(); let url_for_blocking = url_str.clone();
// ---- fixed section: spawn on separate thread runtime ----
let (tx, rx) = std::sync::mpsc::channel(); let (tx, rx) = std::sync::mpsc::channel();
std::thread::spawn(move || { std::thread::spawn(move || {
let rt = tokio::runtime::Builder::new_multi_thread() let rt = tokio::runtime::Builder::new_multi_thread()
@ -76,7 +75,6 @@ pub fn get_keyword(state: Arc<AppState>, _user: UserSession, engine: &mut Engine
.unwrap(); .unwrap();
} }
/// Enhanced security check for path traversal and unsafe paths
fn is_safe_path(path: &str) -> bool { fn is_safe_path(path: &str) -> bool {
if path.starts_with("https://") || path.starts_with("http://") { if path.starts_with("https://") || path.starts_with("http://") {
return true; return true;
@ -189,38 +187,22 @@ pub async fn get_from_bucket(
bucket bucket
}; };
let response = match tokio::time::timeout(
let get_object_future = s3_operator Duration::from_secs(30),
.read(&bucket_name) s3_operator.read(&format!("{}/{}", bucket_name, file_path))
.key(file_path) ).await {
.send();
let response = match tokio::time::timeout(Duration::from_secs(30), get_object_future).await {
Ok(Ok(response)) => response, Ok(Ok(response)) => response,
Ok(Err(e)) => { Ok(Err(e)) => {
error!("S3 get_object failed: {}", e); error!("S3 read failed: {}", e);
return Err(format!("S3 operation failed: {}", e).into()); return Err(format!("S3 operation failed: {}", e).into());
} }
Err(_) => { Err(_) => {
error!("S3 get_object timed out"); error!("S3 read timed out");
return Err("S3 operation timed out".into()); return Err("S3 operation timed out".into());
} }
}; };
let body_future = response.body.collect(); let bytes = response.to_vec();
let data = match tokio::time::timeout(Duration::from_secs(30), body_future).await {
Ok(Ok(data)) => data,
Ok(Err(e)) => {
error!("Failed to collect S3 response body: {}", e);
return Err(format!("Failed to read S3 response: {}", e).into());
}
Err(_) => {
error!("Timeout collecting S3 response body");
return Err("Timeout reading S3 response body".into());
}
};
let bytes = data.into_bytes().to_vec();
debug!( debug!(
"Retrieved {} bytes from S3 for key: {}", "Retrieved {} bytes from S3 for key: {}",
bytes.len(), bytes.len(),

View file

@ -1,14 +1,15 @@
use crate::config::AppConfig; use crate::config::AppConfig;
use crate::package_manager::{InstallMode, PackageManager}; use crate::package_manager::{InstallMode, PackageManager};
use actix_web::http::uri::Builder;
use anyhow::Result; use anyhow::Result;
use csv;
use diesel::connection::SimpleConnection; use diesel::connection::SimpleConnection;
use diesel::Connection; use diesel::Connection;
use diesel::RunQueryDsl; use diesel::RunQueryDsl;
use dotenvy::dotenv; use dotenvy::dotenv;
use log::{error, info, trace}; use log::{error, info};
use opendal::services::S3; use opendal::services::S3;
use opendal::{Operator, OperatorBuilder}; use opendal::Operator;
use rand::Rng;
use rand::distr::Alphanumeric; use rand::distr::Alphanumeric;
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::io::{self, Write}; use std::io::{self, Write};
@ -278,7 +279,6 @@ impl BootstrapManager {
} }
fn generate_secure_password(&self, length: usize) -> String { fn generate_secure_password(&self, length: usize) -> String {
use rand::Rng;
let mut rng = rand::rng(); let mut rng = rand::rng();
std::iter::repeat_with(|| rng.sample(Alphanumeric) as char) std::iter::repeat_with(|| rng.sample(Alphanumeric) as char)
.take(length) .take(length)
@ -315,18 +315,13 @@ impl BootstrapManager {
let mut conn = diesel::PgConnection::establish(&database_url)?; let mut conn = diesel::PgConnection::establish(&database_url)?;
self.create_bots_from_templates(&mut conn)?; self.create_bots_from_templates(&mut conn)?;
let builder = S3::default(); let client = Operator::new(
builder S3::default()
.root("/") .root("/")
.endpoint(&config.minio.server) .endpoint(&config.minio.server)
.access_key_id(&config.minio.access_key) .access_key_id(&config.minio.access_key)
.secret_access_key(&config.minio.secret_key); .secret_access_key(&config.minio.secret_key)
)?.finish();
// if !config.minio.use_ssl {
// builder.disable_ssl_verification(true);
// }
let client = Operator::new(builder)?.finish();
let templates_dir = Path::new("templates"); let templates_dir = Path::new("templates");
if !templates_dir.exists() { if !templates_dir.exists() {
@ -391,7 +386,7 @@ impl BootstrapManager {
.bind::<diesel::sql_types::Text, _>(format!("Bot for {} template", bot_name)) .bind::<diesel::sql_types::Text, _>(format!("Bot for {} template", bot_name))
.execute(conn)?; .execute(conn)?;
} else { } else {
trace!("Bot {} already exists", formatted_name); log::trace!("Bot {} already exists", formatted_name);
} }
} }
} }

View file

@ -72,15 +72,14 @@ impl DriveMonitor {
let mut current_files = HashMap::new(); let mut current_files = HashMap::new();
let mut lister = op.lister_with(prefix).recursive(true).await?; let mut lister = op.lister_with(prefix).recursive(true).await?;
while let Some(entry) = lister.next().await { while let Some(entry) = lister.try_next().await? {
let entry = entry?;
let path = entry.path().to_string(); let path = entry.path().to_string();
if path.ends_with('/') || !path.ends_with(".bas") { if path.ends_with('/') || !path.ends_with(".bas") {
continue; continue;
} }
let meta = entry.metadata().await?; let meta = op.stat(&path).await?;
let file_state = FileState { let file_state = FileState {
path: path.clone(), path: path.clone(),
size: meta.content_length() as i64, size: meta.content_length() as i64,
@ -133,8 +132,7 @@ impl DriveMonitor {
let mut current_files = HashMap::new(); let mut current_files = HashMap::new();
let mut lister = op.lister_with(prefix).recursive(true).await?; let mut lister = op.lister_with(prefix).recursive(true).await?;
while let Some(entry) = lister.next().await { while let Some(entry) = lister.try_next().await? {
let entry = entry?;
let path = entry.path().to_string(); let path = entry.path().to_string();
if path.ends_with('/') { if path.ends_with('/') {
@ -146,7 +144,7 @@ impl DriveMonitor {
continue; continue;
} }
let meta = entry.metadata().await?; let meta = op.stat(&path).await?;
let file_state = FileState { let file_state = FileState {
path: path.clone(), path: path.clone(),
size: meta.content_length() as i64, size: meta.content_length() as i64,

View file

@ -1,7 +1,6 @@
use actix_multipart::Multipart; use actix_multipart::Multipart;
use actix_web::web; use actix_web::web;
use actix_web::{post, HttpResponse}; use actix_web::{post, HttpResponse};
use log::{error, info};
use opendal::Operator; use opendal::Operator;
use std::io::Write; use std::io::Write;
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
@ -76,14 +75,12 @@ pub async fn init_drive(cfg: &DriveConfig) -> Result<Operator, Box<dyn std::erro
builder.access_key_id(&cfg.access_key); builder.access_key_id(&cfg.access_key);
builder.secret_access_key(&cfg.secret_key); builder.secret_access_key(&cfg.secret_key);
if cfg.server.contains("minio") || cfg.server.contains("localhost") { if cfg.server.contains("minio") || cfg.server.contains("localhost") {
builder.enable_virtual_host_style(); builder.enable_virtual_host_style();
} }
let op = Operator::new(builder)?.finish(); let op = Operator::new(builder)?.finish();
info!("OpenDAL S3 operator initialized for bucket: {}", cfg.bucket);
Ok(op) Ok(op)
} }
@ -91,7 +88,7 @@ async fn upload_to_s3(
op: &Operator, op: &Operator,
key: &str, key: &str,
file_path: &std::path::Path, file_path: &std::path::Path,
) -> Result<(), opendal::Error> { ) -> Result<(), Box<dyn std::error::Error>> {
let data = std::fs::read(file_path)?; let data = std::fs::read(file_path)?;
op.write(key, data).await?; op.write(key, data).await?;
Ok(()) Ok(())

View file

@ -85,18 +85,17 @@ impl MinIOHandler {
let mut current_files = HashMap::new(); let mut current_files = HashMap::new();
let mut lister = op.lister_with(prefix).recursive(true).await?; let mut lister = op.lister_with(prefix).recursive(true).await?;
while let Some(entry) = lister.next().await { while let Some(entry) = lister.try_next().await? {
let entry = entry?;
let path = entry.path().to_string(); let path = entry.path().to_string();
if path.ends_with('/') { if path.ends_with('/') {
continue; continue;
} }
let meta = entry.metadata().await?; let meta = op.stat(&path).await?;
let file_state = FileState { let file_state = FileState {
path: path.clone(), path: path.clone(),
size: meta.content_length().parse::<i64>().unwrap_or(0), size: meta.content_length() as i64,
etag: meta.etag().unwrap_or_default().to_string(), etag: meta.etag().unwrap_or_default().to_string(),
last_modified: meta.last_modified().map(|dt| dt.to_rfc3339()), last_modified: meta.last_modified().map(|dt| dt.to_rfc3339()),
}; };

View file

@ -104,15 +104,14 @@ impl KBManager {
}; };
let mut lister = op.lister_with(&collection.folder_path).recursive(true).await?; let mut lister = op.lister_with(&collection.folder_path).recursive(true).await?;
while let Some(entry) = lister.next().await { while let Some(entry) = lister.try_next().await? {
let entry = entry?;
let path = entry.path().to_string(); let path = entry.path().to_string();
if path.ends_with('/') { if path.ends_with('/') {
continue; continue;
} }
let meta = entry.metadata().await?; let meta = op.stat(&path).await?;
if let Err(e) = self if let Err(e) = self
.process_file( .process_file(
&collection, &collection,