- Migration to Open ADAL.

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-10-28 14:53:45 -03:00
parent 2a2915b543
commit dba8ffe782
7 changed files with 40 additions and 42 deletions

View file

@ -41,60 +41,61 @@ default = [ "vectordb"]
vectordb = ["qdrant-client"] vectordb = ["qdrant-client"]
email = ["imap"] email = ["imap"]
web_automation = ["headless_chrome"] web_automation = ["headless_chrome"]
desktop = []
[dependencies] [dependencies]
actix-cors = "0.7" actix-cors = "0.7"
csv = "1.3"
actix-multipart = "0.7" actix-multipart = "0.7"
imap = { version = "3.0.0-alpha.15", optional = true }
actix-web = "4.9" actix-web = "4.9"
actix-ws = "0.3" actix-ws = "0.3"
aes-gcm = "0.10"
anyhow = "1.0" anyhow = "1.0"
argon2 = "0.5"
async-stream = "0.3" async-stream = "0.3"
async-trait = "0.1" async-trait = "0.1"
aes-gcm = "0.10"
argon2 = "0.5"
base64 = "0.22" base64 = "0.22"
bytes = "1.8" bytes = "1.8"
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
csv = "1.3"
diesel = { version = "2.1", features = ["postgres", "uuid", "chrono", "serde_json"] } diesel = { version = "2.1", features = ["postgres", "uuid", "chrono", "serde_json"] }
dotenvy = "0.15" dotenvy = "0.15"
downloader = "0.2" downloader = "0.2"
env_logger = "0.11" env_logger = "0.11"
futures = "0.3" futures = "0.3"
futures-util = "0.3" futures-util = "0.3"
headless_chrome = { version = "1.0.18", optional = true }
imap = { version = "3.0.0-alpha.15", optional = true }
include_dir = "0.7"
indicatif = "0.18.0"
lettre = { version = "0.11", features = ["smtp-transport", "builder", "tokio1", "tokio1-native-tls"] } lettre = { version = "0.11", features = ["smtp-transport", "builder", "tokio1", "tokio1-native-tls"] }
livekit = "0.7" livekit = "0.7"
include_dir = "0.7"
log = "0.4" log = "0.4"
mailparse = "0.15" mailparse = "0.15"
native-tls = "0.2" native-tls = "0.2"
num-format = "0.4" num-format = "0.4"
opendal = { version = "0.54.1", features = ["services-s3"] }
pdf-extract = "0.10.0"
qdrant-client = { version = "1.12", optional = true } qdrant-client = { version = "1.12", optional = true }
rhai = { git = "https://github.com/therealprof/rhai.git", branch = "features/use-web-time" } rand = "0.9.2"
redis = { version = "0.27", features = ["tokio-comp"] } redis = { version = "0.27", features = ["tokio-comp"] }
regex = "1.11" regex = "1.11"
reqwest = { version = "0.12", features = ["json", "stream"] } reqwest = { version = "0.12", features = ["json", "stream"] }
rhai = { git = "https://github.com/therealprof/rhai.git", branch = "features/use-web-time" }
scraper = "0.20"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
sha2 = "0.10.9"
smartstring = "1.0" smartstring = "1.0"
tempfile = "3" tempfile = "3"
time = "0.3.44"
tokio = { version = "1.41", features = ["full"] } tokio = { version = "1.41", features = ["full"] }
tokio-stream = "0.1" tokio-stream = "0.1"
tracing = "0.1" tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["fmt"] } tracing-subscriber = { version = "0.3", features = ["fmt"] }
ureq = "3.1.2"
urlencoding = "2.1" urlencoding = "2.1"
uuid = { version = "1.11", features = ["serde", "v4"] } uuid = { version = "1.11", features = ["serde", "v4"] }
zip = "2.2" zip = "2.2"
time = "0.3.44"
opendal = { version = "0.54.1", features = ["services-s3"] }
headless_chrome = { version = "1.0.18", optional = true }
rand = "0.9.2"
pdf-extract = "0.10.0"
scraper = "0.20"
sha2 = "0.10.9"
ureq = "3.1.2"
indicatif = "0.18.0"
[profile.release] [profile.release]

2
migrations/6.0.8.sql Normal file
View file

@ -0,0 +1,2 @@
ALTER TABLE bot_configuration
ADD CONSTRAINT bot_configuration_config_key_unique UNIQUE (config_key);

View file

@ -1,6 +1,5 @@
use crate::config::AppConfig; use crate::config::AppConfig;
use crate::package_manager::{InstallMode, PackageManager}; use crate::package_manager::{InstallMode, PackageManager};
use actix_web::http::uri::Builder;
use anyhow::Result; use anyhow::Result;
use diesel::connection::SimpleConnection; use diesel::connection::SimpleConnection;
use diesel::Connection; use diesel::Connection;
@ -299,8 +298,8 @@ impl BootstrapManager {
for (k, v) in vec![(component.to_string(), "true".to_string())] { for (k, v) in vec![(component.to_string(), "true".to_string())] {
diesel::sql_query( diesel::sql_query(
"INSERT INTO bot_config (key, value) VALUES ($1, $2) \ "INSERT INTO bot_configuration (config_key, config_value) VALUES ($1, $2) \
ON CONFLICT (key) DO UPDATE SET value = EXCLUDED.value", ON CONFLICT (config_key) DO UPDATE SET config_value = EXCLUDED.config_value",
) )
.bind::<diesel::sql_types::Text, _>(&k) .bind::<diesel::sql_types::Text, _>(&k)
.bind::<diesel::sql_types::Text, _>(&v) .bind::<diesel::sql_types::Text, _>(&v)

View file

@ -72,7 +72,7 @@ impl DriveMonitor {
let mut current_files = HashMap::new(); let mut current_files = HashMap::new();
let mut lister = op.lister_with(prefix).recursive(true).await?; let mut lister = op.lister_with(prefix).recursive(true).await?;
while let Some(entry) = lister.try_next().await? { while let Some(entry) = futures::TryStreamExt::try_next(&mut lister).await? {
let path = entry.path().to_string(); let path = entry.path().to_string();
if path.ends_with('/') || !path.ends_with(".bas") { if path.ends_with('/') || !path.ends_with(".bas") {
@ -132,7 +132,7 @@ impl DriveMonitor {
let mut current_files = HashMap::new(); let mut current_files = HashMap::new();
let mut lister = op.lister_with(prefix).recursive(true).await?; let mut lister = op.lister_with(prefix).recursive(true).await?;
while let Some(entry) = lister.try_next().await? { while let Some(entry) = futures::TryStreamExt::try_next(&mut lister).await? {
let path = entry.path().to_string(); let path = entry.path().to_string();
if path.ends_with('/') { if path.ends_with('/') {

View file

@ -1,3 +1,5 @@
use crate::config::DriveConfig;
use crate::shared::state::AppState;
use actix_multipart::Multipart; use actix_multipart::Multipart;
use actix_web::web; use actix_web::web;
use actix_web::{post, HttpResponse}; use actix_web::{post, HttpResponse};
@ -5,8 +7,6 @@ use opendal::Operator;
use std::io::Write; use std::io::Write;
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
use tokio_stream::StreamExt as TokioStreamExt; use tokio_stream::StreamExt as TokioStreamExt;
use crate::config::DriveConfig;
use crate::shared::state::AppState;
#[post("/files/upload/{folder_path}")] #[post("/files/upload/{folder_path}")]
pub async fn upload_file( pub async fn upload_file(
@ -39,13 +39,13 @@ pub async fn upload_file(
let file_name = file_name.unwrap_or_else(|| "unnamed_file".to_string()); let file_name = file_name.unwrap_or_else(|| "unnamed_file".to_string());
let temp_file_path = temp_file.into_temp_path(); let temp_file_path = temp_file.into_temp_path();
let op = state.get_ref().s3_operator.as_ref().ok_or_else(|| { let op = state.get_ref().s3_operator.as_ref().ok_or_else(|| {
actix_web::error::ErrorInternalServerError("S3 operator is not initialized") actix_web::error::ErrorInternalServerError("S3 operator is not initialized")
})?; })?;
let s3_key = format!("{}/{}", folder_path, file_name); let s3_key = format!("{}/{}", folder_path, file_name);
match upload_to_s3(op, &s3_key, &temp_file_path).await { match upload_to_s3(op, &s3_key, &temp_file_path).await {
Ok(_) => { Ok(_) => {
let _ = std::fs::remove_file(&temp_file_path); let _ = std::fs::remove_file(&temp_file_path);
@ -64,24 +64,19 @@ pub async fn upload_file(
} }
} }
pub async fn init_drive(cfg: &DriveConfig) -> Result<Operator, Box<dyn std::error::Error>> { pub async fn init_drive(config: &DriveConfig) -> Result<Operator, Box<dyn std::error::Error>> {
use opendal::services::S3; use opendal::services::S3;
use opendal::Operator; use opendal::Operator;
let client = Operator::new(
let mut builder = S3::default(); S3::default()
.root("/")
builder.root("/"); .endpoint(&config.server)
builder.endpoint(&cfg.server); .access_key_id(&config.access_key)
builder.access_key_id(&cfg.access_key); .secret_access_key(&config.secret_key),
builder.secret_access_key(&cfg.secret_key); )?
.finish();
if cfg.server.contains("minio") || cfg.server.contains("localhost") {
builder.enable_virtual_host_style();
}
let op = Operator::new(builder)?.finish(); Ok(client)
Ok(op)
} }
async fn upload_to_s3( async fn upload_to_s3(

View file

@ -1,6 +1,7 @@
use crate::shared::state::AppState; use crate::shared::state::AppState;
use log::{debug, error, info}; use log::error;
use opendal::Operator; use opendal::Operator;
use tokio_stream::StreamExt;
use std::collections::HashMap; use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::sync::Arc; use std::sync::Arc;

View file

@ -1,7 +1,7 @@
use crate::shared::models::KBCollection; use crate::shared::models::KBCollection;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use log::{debug, error, info, warn}; use log::{ error, info, warn};
use opendal::Operator; use tokio_stream::StreamExt;
use std::collections::HashMap; use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::sync::Arc; use std::sync::Arc;