- Migration to Open ADAL.

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-10-28 14:53:45 -03:00
parent 2a2915b543
commit dba8ffe782
7 changed files with 40 additions and 42 deletions

View file

@ -41,60 +41,61 @@ default = [ "vectordb"]
vectordb = ["qdrant-client"]
email = ["imap"]
web_automation = ["headless_chrome"]
desktop = []
[dependencies]
actix-cors = "0.7"
csv = "1.3"
actix-multipart = "0.7"
imap = { version = "3.0.0-alpha.15", optional = true }
actix-web = "4.9"
actix-ws = "0.3"
aes-gcm = "0.10"
anyhow = "1.0"
argon2 = "0.5"
async-stream = "0.3"
async-trait = "0.1"
aes-gcm = "0.10"
argon2 = "0.5"
base64 = "0.22"
bytes = "1.8"
chrono = { version = "0.4", features = ["serde"] }
csv = "1.3"
diesel = { version = "2.1", features = ["postgres", "uuid", "chrono", "serde_json"] }
dotenvy = "0.15"
downloader = "0.2"
env_logger = "0.11"
futures = "0.3"
futures-util = "0.3"
headless_chrome = { version = "1.0.18", optional = true }
imap = { version = "3.0.0-alpha.15", optional = true }
include_dir = "0.7"
indicatif = "0.18.0"
lettre = { version = "0.11", features = ["smtp-transport", "builder", "tokio1", "tokio1-native-tls"] }
livekit = "0.7"
include_dir = "0.7"
log = "0.4"
mailparse = "0.15"
native-tls = "0.2"
num-format = "0.4"
opendal = { version = "0.54.1", features = ["services-s3"] }
pdf-extract = "0.10.0"
qdrant-client = { version = "1.12", optional = true }
rhai = { git = "https://github.com/therealprof/rhai.git", branch = "features/use-web-time" }
rand = "0.9.2"
redis = { version = "0.27", features = ["tokio-comp"] }
regex = "1.11"
reqwest = { version = "0.12", features = ["json", "stream"] }
rhai = { git = "https://github.com/therealprof/rhai.git", branch = "features/use-web-time" }
scraper = "0.20"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
sha2 = "0.10.9"
smartstring = "1.0"
tempfile = "3"
time = "0.3.44"
tokio = { version = "1.41", features = ["full"] }
tokio-stream = "0.1"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["fmt"] }
ureq = "3.1.2"
urlencoding = "2.1"
uuid = { version = "1.11", features = ["serde", "v4"] }
zip = "2.2"
time = "0.3.44"
opendal = { version = "0.54.1", features = ["services-s3"] }
headless_chrome = { version = "1.0.18", optional = true }
rand = "0.9.2"
pdf-extract = "0.10.0"
scraper = "0.20"
sha2 = "0.10.9"
ureq = "3.1.2"
indicatif = "0.18.0"
[profile.release]

2
migrations/6.0.8.sql Normal file
View file

@ -0,0 +1,2 @@
ALTER TABLE bot_configuration
ADD CONSTRAINT bot_configuration_config_key_unique UNIQUE (config_key);

View file

@ -1,6 +1,5 @@
use crate::config::AppConfig;
use crate::package_manager::{InstallMode, PackageManager};
use actix_web::http::uri::Builder;
use anyhow::Result;
use diesel::connection::SimpleConnection;
use diesel::Connection;
@ -299,8 +298,8 @@ impl BootstrapManager {
for (k, v) in vec![(component.to_string(), "true".to_string())] {
diesel::sql_query(
"INSERT INTO bot_config (key, value) VALUES ($1, $2) \
ON CONFLICT (key) DO UPDATE SET value = EXCLUDED.value",
"INSERT INTO bot_configuration (config_key, config_value) VALUES ($1, $2) \
ON CONFLICT (config_key) DO UPDATE SET config_value = EXCLUDED.config_value",
)
.bind::<diesel::sql_types::Text, _>(&k)
.bind::<diesel::sql_types::Text, _>(&v)

View file

@ -72,7 +72,7 @@ impl DriveMonitor {
let mut current_files = HashMap::new();
let mut lister = op.lister_with(prefix).recursive(true).await?;
while let Some(entry) = lister.try_next().await? {
while let Some(entry) = futures::TryStreamExt::try_next(&mut lister).await? {
let path = entry.path().to_string();
if path.ends_with('/') || !path.ends_with(".bas") {
@ -132,7 +132,7 @@ impl DriveMonitor {
let mut current_files = HashMap::new();
let mut lister = op.lister_with(prefix).recursive(true).await?;
while let Some(entry) = lister.try_next().await? {
while let Some(entry) = futures::TryStreamExt::try_next(&mut lister).await? {
let path = entry.path().to_string();
if path.ends_with('/') {

View file

@ -1,3 +1,5 @@
use crate::config::DriveConfig;
use crate::shared::state::AppState;
use actix_multipart::Multipart;
use actix_web::web;
use actix_web::{post, HttpResponse};
@ -5,8 +7,6 @@ use opendal::Operator;
use std::io::Write;
use tempfile::NamedTempFile;
use tokio_stream::StreamExt as TokioStreamExt;
use crate::config::DriveConfig;
use crate::shared::state::AppState;
#[post("/files/upload/{folder_path}")]
pub async fn upload_file(
@ -64,24 +64,19 @@ pub async fn upload_file(
}
}
pub async fn init_drive(cfg: &DriveConfig) -> Result<Operator, Box<dyn std::error::Error>> {
pub async fn init_drive(config: &DriveConfig) -> Result<Operator, Box<dyn std::error::Error>> {
use opendal::services::S3;
use opendal::Operator;
let client = Operator::new(
S3::default()
.root("/")
.endpoint(&config.server)
.access_key_id(&config.access_key)
.secret_access_key(&config.secret_key),
)?
.finish();
let mut builder = S3::default();
builder.root("/");
builder.endpoint(&cfg.server);
builder.access_key_id(&cfg.access_key);
builder.secret_access_key(&cfg.secret_key);
if cfg.server.contains("minio") || cfg.server.contains("localhost") {
builder.enable_virtual_host_style();
}
let op = Operator::new(builder)?.finish();
Ok(op)
Ok(client)
}
async fn upload_to_s3(

View file

@ -1,6 +1,7 @@
use crate::shared::state::AppState;
use log::{debug, error, info};
use log::error;
use opendal::Operator;
use tokio_stream::StreamExt;
use std::collections::HashMap;
use std::error::Error;
use std::sync::Arc;

View file

@ -1,7 +1,7 @@
use crate::shared::models::KBCollection;
use crate::shared::state::AppState;
use log::{debug, error, info, warn};
use opendal::Operator;
use log::{ error, info, warn};
use tokio_stream::StreamExt;
use std::collections::HashMap;
use std::error::Error;
use std::sync::Arc;