diff --git a/Cargo.lock b/Cargo.lock index b2e0d21..74b96fa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2522,6 +2522,18 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" +[[package]] +name = "filetime" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" +dependencies = [ + "cfg-if", + "libc", + "libredox", + "windows-sys 0.59.0", +] + [[package]] name = "fixedbitset" version = "0.2.0" @@ -2932,9 +2944,13 @@ version = "0.1.0" dependencies = [ "ctrlc", "dotenv", + "flate2", + "reqwest 0.11.27", "serde", "serde_json", + "tar", "tokio", + "zip", ] [[package]] @@ -3039,6 +3055,7 @@ dependencies = [ "gb-monitoring", "hyper 1.6.0", "hyper-util", + "log", "redis", "rstest", "serde", @@ -3052,6 +3069,7 @@ dependencies = [ "tracing", "tracing-subscriber", "uuid", + "vector", ] [[package]] @@ -4437,6 +4455,17 @@ version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.9.0", + "libc", + "redox_syscall 0.5.10", +] + [[package]] name = "libsqlite3-sys" version = "0.27.0" @@ -6345,6 +6374,12 @@ dependencies = [ "rand_core 0.5.1", ] +[[package]] +name = "random" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61c7093a82705b48f10d4b165c6482dbe9f58cd63a870d85537e857b2badbd88" + [[package]] name = "rawpointer" version = "0.2.1" @@ -7805,6 +7840,17 @@ dependencies = [ "version-compare", ] +[[package]] +name = "tar" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" +dependencies = [ + "filetime", + "libc", + "xattr", +] + [[package]] name = "target-lexicon" version = "0.12.16" @@ -8818,6 +8864,15 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" +[[package]] +name = "vector" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "280be990562443e0cb0ddc1ef2893fc2f4ff8ce0a2805a2dd42826f13e8eafd0" +dependencies = [ + "random", +] + [[package]] name = "version-compare" version = "0.2.0" @@ -9651,6 +9706,16 @@ dependencies = [ "time", ] +[[package]] +name = "xattr" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d65cbf2f12c15564212d48f4e3dfb87923d25d611f2aed18f4cb23f0413d89e" +dependencies = [ + "libc", + "rustix 1.0.1", +] + [[package]] name = "xml-rs" version = "0.8.25" diff --git a/Cargo.toml b/Cargo.toml index cb90b99..a1bb492 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -84,7 +84,7 @@ jsonwebtoken = "9.2" argon2 = "0.5" ring = "0.17" -reqwest = { version = "0.11", features = ["json", "stream"] } +reqwest = { version = "0.11", features = ["json", "stream", "blocking"] } # Cloud services aws-sdk-core = "1.1" @@ -135,6 +135,9 @@ whatlang = "0.16" pdf = "0.8" docx = "1.1" zip = "0.6" +tar = "0.4" +flate2 = "1.0" [workspace.metadata] msrv = "1.70.0" + diff --git a/gb-core/src/models.rs b/gb-core/src/models.rs index 7ce43dc..a187a26 100644 --- a/gb-core/src/models.rs +++ b/gb-core/src/models.rs @@ -242,9 +242,6 @@ pub struct AppState { pub minio_client: Option, pub config: Option, pub db_pool: Option, - pub redis_pool: Option, - pub kafka_producer: Option, - //pub zitadel_client: Option, } // File models diff --git a/gb-file/src/handlers.rs b/gb-file/src/handlers.rs index 98c2a01..8eab011 100644 --- a/gb-file/src/handlers.rs +++ b/gb-file/src/handlers.rs @@ -10,7 +10,6 @@ use tempfile::NamedTempFile; use minio::s3::types::ToStream; use tokio_stream::StreamExt; - #[post("/files/upload/{folder_path}")] pub async fn upload_file( folder_path: web::Path, @@ -92,6 +91,8 @@ pub async fn delete_file( Some("File deleted successfully".to_string()), )) } + + #[post("/files/list/{folder_path}")] pub async fn list_file( folder_path: web::Path, diff --git a/gb-infra/Cargo.toml b/gb-infra/Cargo.toml index aa75cb1..6bb8ff9 100644 --- a/gb-infra/Cargo.toml +++ b/gb-infra/Cargo.toml @@ -11,3 +11,8 @@ ctrlc = { workspace = true } tokio = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } +reqwest = { workspace = true } +flate2 = { workspace = true } +tar = { workspace = true } +zip = { workspace = true } + diff --git a/gb-infra/src/lib.rs b/gb-infra/src/lib.rs index f987a8a..de2a549 100644 --- a/gb-infra/src/lib.rs +++ b/gb-infra/src/lib.rs @@ -1,5 +1,5 @@ pub mod manager; -pub mod utils; +pub mod setup; pub mod services { pub mod minio; pub mod nginx; diff --git a/gb-infra/src/setup.rs b/gb-infra/src/setup.rs new file mode 100644 index 0000000..fa613de --- /dev/null +++ b/gb-infra/src/setup.rs @@ -0,0 +1,273 @@ +use std::{ + fs::{self, File}, + io::{self, Write}, + path::{Path, PathBuf}, + process::Command, + env, +}; + +use reqwest::blocking::Client; +use flate2::read::GzDecoder; +use tar::Archive; +use zip::ZipArchive; + +const INSTALL_DIR: &str = "/opt/gbo"; +const TEMP_DIR: &str = "/tmp/gbotemp"; + +#[derive(Debug)] +struct Component { + name: &'static str, + bin_dir: &'static str, + download_url: Option<&'static str>, + archive_type: ArchiveType, + binaries: Vec<&'static str>, + config_files: Vec, +} + +#[derive(Debug)] +struct ConfigFile { + src_url: Option<&'static str>, + src_path: Option<&'static str>, + dest_name: &'static str, +} + +#[derive(Debug)] +enum ArchiveType { + TarGz, + Zip, + Binary, +} + +pub fn doIt() -> io::Result<()> { + // Define all components + let components = [ + // Directory (Zitadel) + Component { + name: "zitadel", + bin_dir: "directory", + download_url: Some("https://github.com/zitadel/zitadel/releases/latest/download/zitadel_Linux_x86_64.tar.gz"), + archive_type: ArchiveType::TarGz, + binaries: vec!["zitadel"], + config_files: vec![ConfigFile { + src_url: None, + src_path: Some("src/config/directory/zitadel.yaml"), + dest_name: "zitadel.yaml", + }], + }, + // Mail (Stalwart) + Component { + name: "stalwart-mail", + bin_dir: "mail", + download_url: Some("https://github.com/stalwartlabs/mail-server/releases/latest/download/stalwart-linux-x86_64.tar.gz"), + archive_type: ArchiveType::TarGz, + binaries: vec!["stalwart-mail"], + config_files: vec![ConfigFile { + src_url: Some("https://raw.githubusercontent.com/stalwartlabs/mail-server/main/resources/config/config.toml"), + src_path: None, + dest_name: "config.toml", + }], + }, + // Tabular (PostgreSQL) + Component { + name: "postgresql", + bin_dir: "tabular", + download_url: Some("https://get.enterprisedb.com/postgresql/postgresql-14.10-1-linux-x64-binaries.tar.gz"), + archive_type: ArchiveType::TarGz, + binaries: vec!["postgres", "pg_ctl", "psql", "pg_dump", "pg_restore"], + config_files: vec![], + }, + // Object (MinIO) + Component { + name: "minio", + bin_dir: "object", + download_url: Some("https://dl.min.io/server/minio/release/linux-amd64/minio"), + archive_type: ArchiveType::Binary, + binaries: vec!["minio"], + config_files: vec![], + }, + // Webserver (Caddy) + Component { + name: "caddy", + bin_dir: "webserver", + download_url: Some("https://github.com/caddyserver/caddy/releases/latest/download/caddy_linux_amd64.tar.gz"), + archive_type: ArchiveType::TarGz, + binaries: vec!["caddy"], + config_files: vec![ConfigFile { + src_url: None, + src_path: Some("src/config/webserver/Caddyfile"), + dest_name: "Caddyfile", + }], + }, + ]; + + // Create directories + create_directories()?; + + // Install dependencies + install_dependencies()?; + + // Create HTTP client + let client = Client::new(); + + // Process all components + for component in components.iter() { + install_component(&component, &client)?; + } + + // Clean up temp directory + fs::remove_dir_all(TEMP_DIR)?; + + println!("All binaries downloaded to {}", INSTALL_DIR); + println!("Use the start-stop script to manually control all components"); + + Ok(()) +} + +fn create_directories() -> io::Result<()> { + println!("Creating directories..."); + + // Main directories + fs::create_dir_all(INSTALL_DIR)?; + Command::new("chmod").args(["777", INSTALL_DIR]).status()?; + fs::create_dir_all(TEMP_DIR)?; + + // Component directories + let dirs = [ + "bin/bot", "bin/mail", "bin/tabular", "bin/object", + "bin/directory", "bin/alm", "bin/webserver", "bin/meeting", + "config/bot", "config/mail", "config/tabular", "config/object", + "config/directory", "config/alm", "config/webserver", "config/meeting", + "data/bot", "data/mail", "data/tabular", "data/object", + "data/directory", "data/alm", "data/webserver", "data/meeting", + "logs", "certs" + ]; + + for dir in dirs { + fs::create_dir_all(format!("{}/{}", INSTALL_DIR, dir))?; + } + + Ok(()) +} + +fn install_dependencies() -> io::Result<()> { + println!("Installing system dependencies..."); + Command::new("apt-get").args(["update"]).status()?; + Command::new("apt-get").args(["install", "-y", + "apt-transport-https", "ca-certificates", "curl", + "software-properties-common", "gnupg", "wget", + "unzip", "tar", "postgresql-client", "redis-tools" + ]).status()?; + Ok(()) +} + +fn install_component(component: &Component, client: &Client) -> io::Result<()> { + println!("Installing {}...", component.name); + + if let Some(url) = component.download_url { + let temp_path = format!("{}/{}", TEMP_DIR, component.name); + let target_dir = format!("{}/bin/{}", INSTALL_DIR, component.bin_dir); + + // Download the file + download_file(client, url, &temp_path)?; + + match component.archive_type { + ArchiveType::TarGz => { + // Extract tar.gz archive + let tar_gz = File::open(&temp_path)?; + let tar = GzDecoder::new(tar_gz); + let mut archive = Archive::new(tar); + archive.unpack(TEMP_DIR)?; + + // Move binaries to target directory + for binary in &component.binaries { + let src = format!("{}/{}", TEMP_DIR, binary); + let dest = format!("{}/{}", target_dir, binary); + + if Path::new(&src).exists() { + fs::rename(&src, &dest)?; + set_executable(&dest)?; + } else { + // For PostgreSQL which has binaries in pgsql/bin/ + let pg_src = format!("{}/pgsql/bin/{}", TEMP_DIR, binary); + if Path::new(&pg_src).exists() { + fs::rename(&pg_src, &dest)?; + set_executable(&dest)?; + } + } + } + }, + ArchiveType::Zip => { + // Extract zip archive + let file = File::open(&temp_path)?; + let mut archive = ZipArchive::new(file)?; + archive.extract(TEMP_DIR)?; + + // Move binaries to target directory + for binary in &component.binaries { + let src = format!("{}/{}", TEMP_DIR, binary); + let dest = format!("{}/{}", target_dir, binary); + + if Path::new(&src).exists() { + fs::rename(&src, &dest)?; + set_executable(&dest)?; + } + } + }, + ArchiveType::Binary => { + // Single binary - just move to target location + let dest = format!("{}/{}", target_dir, component.name); + fs::rename(&temp_path, &dest)?; + set_executable(&dest)?; + }, + } + + // Clean up downloaded file + fs::remove_file(temp_path)?; + } + + // Handle config files + for config in &component.config_files { + let config_dir = format!("{}/config/{}", INSTALL_DIR, component.bin_dir); + let dest_path = format!("{}/{}", config_dir, config.dest_name); + + if let Some(url) = config.src_url { + // Download config from URL + download_file(client, url, &dest_path)?; + } else if let Some(src_path) = config.src_path { + // Copy config from local source (placeholder) + println!("Would copy config from {} to {}", src_path, dest_path); + // fs::copy(src_path, dest_path)?; + } + } + + println!("{} installed successfully!", component.name); + Ok(()) +} + +fn download_file(client: &Client, url: &str, dest_path: &str) -> io::Result<()> { + println!("Downloading {} to {}", url, dest_path); + + let mut response = client.get(url) + .send() + .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?; + + if !response.status().is_success() { + return Err(io::Error::new( + io::ErrorKind::Other, + format!("Failed to download file: HTTP {}", response.status()) + )); + } + + let mut dest_file = File::create(dest_path)?; + response.copy_to(&mut dest_file) + .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?; + + Ok(()) +} + +fn set_executable(path: &str) -> io::Result<()> { + Command::new("chmod") + .args(["+x", path]) + .status()?; + Ok(()) +} \ No newline at end of file diff --git a/gb-infra/src/utils.rs b/gb-infra/src/utils.rs deleted file mode 100644 index e69de29..0000000 diff --git a/gb-llm/prompts/send-proposal.txt b/gb-llm/prompts/send-proposal.txt new file mode 100644 index 0000000..994bf0f --- /dev/null +++ b/gb-llm/prompts/send-proposal.txt @@ -0,0 +1,11 @@ +PARAM to AS STRING +PARAM template AS STRING + +company = + +doc = FILL template + +subject= REWRITE "Based on this ${history}, generate a subject for a proposal email" +contents = REWRITE "Based on this ${history}, and ${subject}, generate the e-mail body for ${to}, signed by ${user}. + +SEND MAIL to, subject, contents \ No newline at end of file diff --git a/gb-server/Cargo.toml b/gb-server/Cargo.toml index 899bea9..997afcb 100644 --- a/gb-server/Cargo.toml +++ b/gb-server/Cargo.toml @@ -29,7 +29,9 @@ tower = { workspace = true } tower-http = { workspace = true, features = ["cors", "trace"] } actix-web = { workspace = true } dotenv = { workspace = true } +log = { workspace = true } +vector = "0.4.1" [dev-dependencies] rstest = { workspace = true } -tokio-test = { workspace = true } \ No newline at end of file +tokio-test = { workspace = true } diff --git a/gb-server/src/main.rs b/gb-server/src/main.rs index e31ebff..0a48cdc 100644 --- a/gb-server/src/main.rs +++ b/gb-server/src/main.rs @@ -1,9 +1,14 @@ +use log::{info, Level, LevelFilter, Metadata, Record}; +use std::io::Write; +use tokio::io::{AsyncWriteExt}; +use tokio::net::TcpStream; + use actix_web::{middleware, web, App, HttpServer}; use gb_core::models; use tracing_subscriber::fmt::format::FmtSpan; use dotenv::dotenv; use gb_core::config::AppConfig; -use gb_core::db::{init_kafka, init_minio, init_postgres, init_redis}; +use gb_core::db::{init_minio, init_postgres}; use gb_file::handlers::upload_file; #[actix_web::main] @@ -15,36 +20,49 @@ async fn main() -> std::io::Result<()> { .with_span_events(FmtSpan::CLOSE) .init(); + // Configure the logger + log::set_logger(&VectorLogger { stream: TcpStream::connect("127.0.0.1:9000").await? }) + .map_err(|_| "Couldn't set logger")?; + log::set_max_level(LevelFilter::Info); + + // Get the Vector agent's address and port + let vector_host = "127.0.0.1"; + let vector_port = 9000; + + // Start a Vector logger + let mut vector_logger = VectorLogger::new(vector_host, vector_port).await?; + + // Set the logger + log::set_logger(&vector_logger).map_err(|_| "Couldn't set logger")?; + log::set_max_level(LevelFilter::Info); + + // Log some messages + info!("Hello from Rust!"); + + // Load configuration let config = AppConfig::from_env(); + + // TODO: /gbo/bin/storage$ ./minio server ../../data/storage/ + + + // Initialize databases and services let db_pool = init_postgres(&config).await.expect("Failed to connect to PostgreSQL"); - let redis_pool = init_redis(&config).await.expect("Failed to connect to Redis"); - let kafka_producer = init_kafka(&config).await.expect("Failed to initialize Kafka"); - // let zitadel_client = init_zitadel(&config).await.expect("Failed to initialize Zitadel"); let minio_client = init_minio(&config).await.expect("Failed to initialize Minio"); let app_state = web::Data::new(models::AppState { config: Some(config.clone()), db_pool: Some(db_pool), - redis_pool: Some(redis_pool), - kafka_producer: Some(kafka_producer), minio_client: Some(minio_client), }); // Start HTTP server HttpServer::new(move || { - // let cors = Cors::default() - // .allow_any_origin() - // .allow_any_method() - // .allow_any_header() - // .max_age(3600); - App::new() .wrap(middleware::Logger::default()) .wrap(middleware::Compress::default()) -// .wrap(cors) .app_data(app_state.clone()) .service(upload_file) }) diff --git a/gb-server/src/router.rs b/gb-server/src/router.rs deleted file mode 100644 index 5851f82..0000000 --- a/gb-server/src/router.rs +++ /dev/null @@ -1,22 +0,0 @@ -use axum::{ - extract::{ws::WebSocket, Path, State, WebSocketUpgrade}, - response::IntoResponse, - routing::{get, post}, - Json, Router, -}; -use futures_util::StreamExt; -use gb_core::{models::*, Error, Result}; -use gb_messaging::{models::MessageEnvelope, MessageProcessor}; -use std::{collections::HashMap, sync::Arc}; -use tokio::sync::Mutex; -use tracing::{error, instrument}; -use uuid::Uuid; - -pub fn create_router(message_processor: AppState) -> Router { - let state = Arc::new(ApiState { - message_processor: Mutex::new(message_processor), - }); - Router::new() - .route("/monitoring/metrics", get(get_monitoring_metrics)) - .with_state(state) -} diff --git a/gb-testing/tests/integration_file_list_test.rs b/gb-testing/tests/integration_file_list_test.rs index c389bf8..8911b94 100644 --- a/gb-testing/tests/integration_file_list_test.rs +++ b/gb-testing/tests/integration_file_list_test.rs @@ -61,9 +61,7 @@ async fn test_successful_file_listing() -> Result<(), Box let app_state = web::Data::new(AppState { minio_client: Some(minio_client.clone()), config: None, - db_pool: None, - kafka_producer: None, - redis_pool: None, + db_pool: None }); let app = test::init_service(App::new().app_data(app_state.clone()).service(list_file)).await; diff --git a/gb-testing/tests/integration_file_upload_test.rs b/gb-testing/tests/integration_file_upload_test.rs index 75ff4da..868b739 100644 --- a/gb-testing/tests/integration_file_upload_test.rs +++ b/gb-testing/tests/integration_file_upload_test.rs @@ -40,9 +40,7 @@ async fn test_successful_file_upload() -> Result<()> { let app_state = web::Data::new(AppState { minio_client: Some(minio_client.clone()), config: None, - db_pool: None, - kafka_producer: None, - redis_pool: None, + db_pool: None }); let app = @@ -90,30 +88,6 @@ async fn test_successful_file_upload() -> Result<()> { assert!(object_exists, "Uploaded file should exist in MinIO"); - // Verify file content using object-based API - // let get_object_args = GetObjectArgs::new(bucket_name, object_name)?; - // let get_object_result = minio_client.get_object(bucket_name, object_name); - - // let mut object_content = Vec::new(); - // get_object_result.read_to_end(&mut object_content)?; - - // assert_eq!( - // String::from_utf8_lossy(&object_content), - // String::from_utf8_lossy(&file_content), - // "File content should match" - // ); - // // Cleanup test bucket - // let bucket_name = "file-upload-rust-bucket"; - - // // List all objects and delete them using object-based API - // let list_objects_args = GetObjectArgs:new(bucket_name)?; - // let objects = minio_client.list_objects(&list_objects_args).await?; - - // for obj in objects.contents { - // // Using object-based API for remove_object - // let remove_object_args = RemoveObjectArgs::new(bucket_name, &obj.key)?; - // minio_client.remove_object(&remove_object_args).await?; - // } Ok(()) } diff --git a/gb-utils/src/lib.rs b/gb-utils/src/lib.rs index e69de29..5b989ad 100644 --- a/gb-utils/src/lib.rs +++ b/gb-utils/src/lib.rs @@ -0,0 +1,45 @@ +use log::{info, Level, LevelFilter, Metadata, Record}; +use std::io::Write; +use tokio::io::{AsyncWriteExt}; +use tokio::net::TcpStream; + +// A simple logger implementation that sends logs to Vector +struct VectorLogger { + stream: TcpStream, +} + +impl VectorLogger { + async fn new(host: &str, port: u16) -> Result { + let stream = TcpStream::connect(format!("{}:{}", host, port)).await?; + Ok(VectorLogger { stream }) + } +} + +impl log::Log for VectorLogger { + fn enabled(&self, _metadata: &Metadata) -> bool { + true + } + + fn log(&self, record: &Record) { + let _ = self.log_async(record).await; + } + + fn flush(&self) {} +} + +impl VectorLogger { + async fn log_async(&self, record: &Record) -> Result<(), std::io::Error> { + let log_event = format!( + "{{\"level\":\"{}\", \"message\":\"{}\", \"module\":\"{}\", \"file\":\"{}\", \"line\":{}}}\n", + record.level(), + record.args(), + record.location().module_path(), + record.location().file(), + record.location().line() + ); + + self.stream.write_all(log_event.as_bytes()).await?; + Ok(()) + } +} + diff --git a/install.sh b/install.sh index 890cbd8..a9a355b 100755 --- a/install.sh +++ b/install.sh @@ -3,8 +3,34 @@ set -e # Create directories echo "Creating directories..." -INSTALL_DIR="$HOME/server_binaries" +INSTALL_DIR="/opt/gbo" + + mkdir -p "$INSTALL_DIR" +sudo chmod -R 777 "$INSTALL_DIR" + +mkdir -p "$INSTALL_DIR/bin" +mkdir -p "$INSTALL_DIR/bin/bot" +mkdir -p "$INSTALL_DIR/bin/mail" +mkdir -p "$INSTALL_DIR/bin/tabular" +mkdir -p "$INSTALL_DIR/bin/object" +mkdir -p "$INSTALL_DIR/bin/directory" +mkdir -p "$INSTALL_DIR/bin/alm" +mkdir -p "$INSTALL_DIR/bin/webserver" +mkdir -p "$INSTALL_DIR/bin/meeting" + +mkdir -p "$INSTALL_DIR/config" +mkdir -p "$INSTALL_DIR/data" +mkdir -p "$INSTALL_DIR/data/tabular" +mkdir -p "$INSTALL_DIR/data/object" +mkdir -p "$INSTALL_DIR/data/mail" +mkdir -p "$INSTALL_DIR/data/directory" +mkdir -p "$INSTALL_DIR/data/alm" +mkdir -p "$INSTALL_DIR/data/webserver" +mkdir -p "$INSTALL_DIR/data/meeting" + +mkdir -p "$INSTALL_DIR/logs" +mkdir -p "$INSTALL_DIR/certs" mkdir -p "$INSTALL_DIR/config" mkdir -p "$INSTALL_DIR/data" @@ -25,67 +51,58 @@ sudo apt-get install -y \ echo "System dependencies installed" # Download PostgreSQL binary (using the official package) -# echo "Downloading PostgreSQL..." -# if [ ! -d "$INSTALL_DIR/postgresql" ]; then -# mkdir -p "$INSTALL_DIR/postgresql" -# wget -O "$INSTALL_DIR/postgresql/postgresql.tar.gz" "https://get.enterprisedb.com/postgresql/postgresql-14.10-1-linux-x64-binaries.tar.gz" -# tar -xzf "$INSTALL_DIR/postgresql/postgresql.tar.gz" -C "$INSTALL_DIR/postgresql" --strip-components=1 -# rm "$INSTALL_DIR/postgresql/postgresql.tar.gz" -# mkdir -p "$INSTALL_DIR/data/postgresql" -# fi - -# Download Redis binary -echo "Downloading Redis..." -if [ ! -d "$INSTALL_DIR/redis" ]; then - mkdir -p "$INSTALL_DIR/redis" - wget -O "$INSTALL_DIR/redis/redis.tar.gz" "https://download.redis.io/releases/redis-7.2.4.tar.gz" - tar -xzf "$INSTALL_DIR/redis/redis.tar.gz" -C "$INSTALL_DIR/redis" --strip-components=1 - rm "$INSTALL_DIR/redis/redis.tar.gz" - mkdir -p "$INSTALL_DIR/data/redis" +echo "Downloading PostgreSQL..." +if [ ! -d "$INSTALL_DIR/postgresql" ]; then + mkdir -p "$INSTALL_DIR/postgresql" + wget -O "$INSTALL_DIR/postgresql/postgresql.tar.gz" "https://get.enterprisedb.com/postgresql/postgresql-14.10-1-linux-x64-binaries.tar.gz" + tar -xzf "$INSTALL_DIR/postgresql/postgresql.tar.gz" -C "$INSTALL_DIR/postgresql" --strip-components=1 + rm "$INSTALL_DIR/postgresql/postgresql.tar.gz" + mkdir -p "$INSTALL_DIR/data/postgresql" fi -# Download Zitadel binary -# echo "Downloading Zitadel..." -# if [ ! -d "$INSTALL_DIR/zitadel" ]; then -# mkdir -p "$INSTALL_DIR/zitadel" -# # Get latest release URL -# ZITADEL_LATEST=$(curl -s https://api.github.com/repos/zitadel/zitadel/releases/latest | grep "browser_download_url.*linux_amd64.tar.gz" | cut -d '"' -f 4) -# wget -O "$INSTALL_DIR/zitadel/zitadel.tar.gz" "$ZITADEL_LATEST" -# tar -xzf "$INSTALL_DIR/zitadel/zitadel.tar.gz" -C "$INSTALL_DIR/zitadel" -# rm "$INSTALL_DIR/zitadel/zitadel.tar.gz" -# mkdir -p "$INSTALL_DIR/data/zitadel" - -# # Create default Zitadel config -# cat > "$INSTALL_DIR/config/zitadel.yaml" < "$INSTALL_DIR/config/zitadel.yaml" < Result; - async fn wait(&self, pid: Uuid, seconds: i32) -> Result<()>; - async fn save_file(&self, pid: Uuid, data: Vec) -> Result; - async fn execute_sql(&self, pid: Uuid, sql: String) -> Result; -} -``` - -B. Room APIs: -```rust -pub trait RoomAPI { - async fn create_room(&self, config: RoomConfig) -> Result; - async fn join_room(&self, room_id: Uuid, user_id: Uuid) -> Result; - async fn publish_track(&self, track: TrackInfo) -> Result; - async fn subscribe_track(&self, track_id: Uuid) -> Result; -} -``` - -C. Message APIs: -```rust -pub trait MessageAPI { - async fn send_message(&self, message: Message) -> Result; - async fn get_messages(&self, filter: MessageFilter) -> Result>; - async fn update_status(&self, message_id: Uuid, status: Status) -> Result<()>; -} -``` - 7. Monitoring & Operations: A. Metrics: @@ -189,7 +521,7 @@ B. Scaling Operations: C. Security: - Authentication -- Authorization +- Authorization (zitadel API) - Rate limiting - Data encryption - Audit logging @@ -555,4 +887,5 @@ migrate them to rust compatible, - reate environment configuration for everything and ingress to have several server nodes if eeed automatically - I NEED FULL CODE SOLUTION IN PROFESSIONAL TESTABLE RUST CODE: if you need split answer in several parts, but provide ENTIRE CODE. Complete working balenced aserver. IMPORTANTE: Generate the project in a .sh shell script output with cat, of entire code base to be restored, no placeholder neither TODOS. - VERY IMPORNTANT: DO NOT put things like // Add other system routes... you should WRITE ACUTAL CODE -- Need tests for every line of code written. \ No newline at end of file +- Need tests for every line of code written. +- single project organized in folders. \ No newline at end of file