refactor(gb-infra): Remove unused utils module and update dependencies; add Vector logger implementation

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-04-08 18:39:41 -03:00
parent 0473753001
commit a86a965cd0
17 changed files with 898 additions and 178 deletions

65
Cargo.lock generated
View file

@ -2522,6 +2522,18 @@ version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d"
[[package]]
name = "filetime"
version = "0.2.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586"
dependencies = [
"cfg-if",
"libc",
"libredox",
"windows-sys 0.59.0",
]
[[package]]
name = "fixedbitset"
version = "0.2.0"
@ -2932,9 +2944,13 @@ version = "0.1.0"
dependencies = [
"ctrlc",
"dotenv",
"flate2",
"reqwest 0.11.27",
"serde",
"serde_json",
"tar",
"tokio",
"zip",
]
[[package]]
@ -3039,6 +3055,7 @@ dependencies = [
"gb-monitoring",
"hyper 1.6.0",
"hyper-util",
"log",
"redis",
"rstest",
"serde",
@ -3052,6 +3069,7 @@ dependencies = [
"tracing",
"tracing-subscriber",
"uuid",
"vector",
]
[[package]]
@ -4437,6 +4455,17 @@ version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa"
[[package]]
name = "libredox"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
dependencies = [
"bitflags 2.9.0",
"libc",
"redox_syscall 0.5.10",
]
[[package]]
name = "libsqlite3-sys"
version = "0.27.0"
@ -6345,6 +6374,12 @@ dependencies = [
"rand_core 0.5.1",
]
[[package]]
name = "random"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61c7093a82705b48f10d4b165c6482dbe9f58cd63a870d85537e857b2badbd88"
[[package]]
name = "rawpointer"
version = "0.2.1"
@ -7805,6 +7840,17 @@ dependencies = [
"version-compare",
]
[[package]]
name = "tar"
version = "0.4.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a"
dependencies = [
"filetime",
"libc",
"xattr",
]
[[package]]
name = "target-lexicon"
version = "0.12.16"
@ -8818,6 +8864,15 @@ version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "vector"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "280be990562443e0cb0ddc1ef2893fc2f4ff8ce0a2805a2dd42826f13e8eafd0"
dependencies = [
"random",
]
[[package]]
name = "version-compare"
version = "0.2.0"
@ -9651,6 +9706,16 @@ dependencies = [
"time",
]
[[package]]
name = "xattr"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d65cbf2f12c15564212d48f4e3dfb87923d25d611f2aed18f4cb23f0413d89e"
dependencies = [
"libc",
"rustix 1.0.1",
]
[[package]]
name = "xml-rs"
version = "0.8.25"

View file

@ -84,7 +84,7 @@ jsonwebtoken = "9.2"
argon2 = "0.5"
ring = "0.17"
reqwest = { version = "0.11", features = ["json", "stream"] }
reqwest = { version = "0.11", features = ["json", "stream", "blocking"] }
# Cloud services
aws-sdk-core = "1.1"
@ -135,6 +135,9 @@ whatlang = "0.16"
pdf = "0.8"
docx = "1.1"
zip = "0.6"
tar = "0.4"
flate2 = "1.0"
[workspace.metadata]
msrv = "1.70.0"

View file

@ -242,9 +242,6 @@ pub struct AppState {
pub minio_client: Option<MinioClient>,
pub config: Option<AppConfig>,
pub db_pool: Option<PgPool>,
pub redis_pool: Option<RedisConnectionManager>,
pub kafka_producer: Option<FutureProducer>,
//pub zitadel_client: Option<AuthServiceClient><tonic::transport::Channel>,
}
// File models

View file

@ -10,7 +10,6 @@ use tempfile::NamedTempFile;
use minio::s3::types::ToStream;
use tokio_stream::StreamExt;
#[post("/files/upload/{folder_path}")]
pub async fn upload_file(
folder_path: web::Path<String>,
@ -92,6 +91,8 @@ pub async fn delete_file(
Some("File deleted successfully".to_string()),
))
}
#[post("/files/list/{folder_path}")]
pub async fn list_file(
folder_path: web::Path<String>,

View file

@ -11,3 +11,8 @@ ctrlc = { workspace = true }
tokio = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
reqwest = { workspace = true }
flate2 = { workspace = true }
tar = { workspace = true }
zip = { workspace = true }

View file

@ -1,5 +1,5 @@
pub mod manager;
pub mod utils;
pub mod setup;
pub mod services {
pub mod minio;
pub mod nginx;

273
gb-infra/src/setup.rs Normal file
View file

@ -0,0 +1,273 @@
use std::{
fs::{self, File},
io::{self, Write},
path::{Path, PathBuf},
process::Command,
env,
};
use reqwest::blocking::Client;
use flate2::read::GzDecoder;
use tar::Archive;
use zip::ZipArchive;
const INSTALL_DIR: &str = "/opt/gbo";
const TEMP_DIR: &str = "/tmp/gbotemp";
#[derive(Debug)]
struct Component {
name: &'static str,
bin_dir: &'static str,
download_url: Option<&'static str>,
archive_type: ArchiveType,
binaries: Vec<&'static str>,
config_files: Vec<ConfigFile>,
}
#[derive(Debug)]
struct ConfigFile {
src_url: Option<&'static str>,
src_path: Option<&'static str>,
dest_name: &'static str,
}
#[derive(Debug)]
enum ArchiveType {
TarGz,
Zip,
Binary,
}
pub fn doIt() -> io::Result<()> {
// Define all components
let components = [
// Directory (Zitadel)
Component {
name: "zitadel",
bin_dir: "directory",
download_url: Some("https://github.com/zitadel/zitadel/releases/latest/download/zitadel_Linux_x86_64.tar.gz"),
archive_type: ArchiveType::TarGz,
binaries: vec!["zitadel"],
config_files: vec![ConfigFile {
src_url: None,
src_path: Some("src/config/directory/zitadel.yaml"),
dest_name: "zitadel.yaml",
}],
},
// Mail (Stalwart)
Component {
name: "stalwart-mail",
bin_dir: "mail",
download_url: Some("https://github.com/stalwartlabs/mail-server/releases/latest/download/stalwart-linux-x86_64.tar.gz"),
archive_type: ArchiveType::TarGz,
binaries: vec!["stalwart-mail"],
config_files: vec![ConfigFile {
src_url: Some("https://raw.githubusercontent.com/stalwartlabs/mail-server/main/resources/config/config.toml"),
src_path: None,
dest_name: "config.toml",
}],
},
// Tabular (PostgreSQL)
Component {
name: "postgresql",
bin_dir: "tabular",
download_url: Some("https://get.enterprisedb.com/postgresql/postgresql-14.10-1-linux-x64-binaries.tar.gz"),
archive_type: ArchiveType::TarGz,
binaries: vec!["postgres", "pg_ctl", "psql", "pg_dump", "pg_restore"],
config_files: vec![],
},
// Object (MinIO)
Component {
name: "minio",
bin_dir: "object",
download_url: Some("https://dl.min.io/server/minio/release/linux-amd64/minio"),
archive_type: ArchiveType::Binary,
binaries: vec!["minio"],
config_files: vec![],
},
// Webserver (Caddy)
Component {
name: "caddy",
bin_dir: "webserver",
download_url: Some("https://github.com/caddyserver/caddy/releases/latest/download/caddy_linux_amd64.tar.gz"),
archive_type: ArchiveType::TarGz,
binaries: vec!["caddy"],
config_files: vec![ConfigFile {
src_url: None,
src_path: Some("src/config/webserver/Caddyfile"),
dest_name: "Caddyfile",
}],
},
];
// Create directories
create_directories()?;
// Install dependencies
install_dependencies()?;
// Create HTTP client
let client = Client::new();
// Process all components
for component in components.iter() {
install_component(&component, &client)?;
}
// Clean up temp directory
fs::remove_dir_all(TEMP_DIR)?;
println!("All binaries downloaded to {}", INSTALL_DIR);
println!("Use the start-stop script to manually control all components");
Ok(())
}
fn create_directories() -> io::Result<()> {
println!("Creating directories...");
// Main directories
fs::create_dir_all(INSTALL_DIR)?;
Command::new("chmod").args(["777", INSTALL_DIR]).status()?;
fs::create_dir_all(TEMP_DIR)?;
// Component directories
let dirs = [
"bin/bot", "bin/mail", "bin/tabular", "bin/object",
"bin/directory", "bin/alm", "bin/webserver", "bin/meeting",
"config/bot", "config/mail", "config/tabular", "config/object",
"config/directory", "config/alm", "config/webserver", "config/meeting",
"data/bot", "data/mail", "data/tabular", "data/object",
"data/directory", "data/alm", "data/webserver", "data/meeting",
"logs", "certs"
];
for dir in dirs {
fs::create_dir_all(format!("{}/{}", INSTALL_DIR, dir))?;
}
Ok(())
}
fn install_dependencies() -> io::Result<()> {
println!("Installing system dependencies...");
Command::new("apt-get").args(["update"]).status()?;
Command::new("apt-get").args(["install", "-y",
"apt-transport-https", "ca-certificates", "curl",
"software-properties-common", "gnupg", "wget",
"unzip", "tar", "postgresql-client", "redis-tools"
]).status()?;
Ok(())
}
fn install_component(component: &Component, client: &Client) -> io::Result<()> {
println!("Installing {}...", component.name);
if let Some(url) = component.download_url {
let temp_path = format!("{}/{}", TEMP_DIR, component.name);
let target_dir = format!("{}/bin/{}", INSTALL_DIR, component.bin_dir);
// Download the file
download_file(client, url, &temp_path)?;
match component.archive_type {
ArchiveType::TarGz => {
// Extract tar.gz archive
let tar_gz = File::open(&temp_path)?;
let tar = GzDecoder::new(tar_gz);
let mut archive = Archive::new(tar);
archive.unpack(TEMP_DIR)?;
// Move binaries to target directory
for binary in &component.binaries {
let src = format!("{}/{}", TEMP_DIR, binary);
let dest = format!("{}/{}", target_dir, binary);
if Path::new(&src).exists() {
fs::rename(&src, &dest)?;
set_executable(&dest)?;
} else {
// For PostgreSQL which has binaries in pgsql/bin/
let pg_src = format!("{}/pgsql/bin/{}", TEMP_DIR, binary);
if Path::new(&pg_src).exists() {
fs::rename(&pg_src, &dest)?;
set_executable(&dest)?;
}
}
}
},
ArchiveType::Zip => {
// Extract zip archive
let file = File::open(&temp_path)?;
let mut archive = ZipArchive::new(file)?;
archive.extract(TEMP_DIR)?;
// Move binaries to target directory
for binary in &component.binaries {
let src = format!("{}/{}", TEMP_DIR, binary);
let dest = format!("{}/{}", target_dir, binary);
if Path::new(&src).exists() {
fs::rename(&src, &dest)?;
set_executable(&dest)?;
}
}
},
ArchiveType::Binary => {
// Single binary - just move to target location
let dest = format!("{}/{}", target_dir, component.name);
fs::rename(&temp_path, &dest)?;
set_executable(&dest)?;
},
}
// Clean up downloaded file
fs::remove_file(temp_path)?;
}
// Handle config files
for config in &component.config_files {
let config_dir = format!("{}/config/{}", INSTALL_DIR, component.bin_dir);
let dest_path = format!("{}/{}", config_dir, config.dest_name);
if let Some(url) = config.src_url {
// Download config from URL
download_file(client, url, &dest_path)?;
} else if let Some(src_path) = config.src_path {
// Copy config from local source (placeholder)
println!("Would copy config from {} to {}", src_path, dest_path);
// fs::copy(src_path, dest_path)?;
}
}
println!("{} installed successfully!", component.name);
Ok(())
}
fn download_file(client: &Client, url: &str, dest_path: &str) -> io::Result<()> {
println!("Downloading {} to {}", url, dest_path);
let mut response = client.get(url)
.send()
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
if !response.status().is_success() {
return Err(io::Error::new(
io::ErrorKind::Other,
format!("Failed to download file: HTTP {}", response.status())
));
}
let mut dest_file = File::create(dest_path)?;
response.copy_to(&mut dest_file)
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
Ok(())
}
fn set_executable(path: &str) -> io::Result<()> {
Command::new("chmod")
.args(["+x", path])
.status()?;
Ok(())
}

View file

View file

@ -0,0 +1,11 @@
PARAM to AS STRING
PARAM template AS STRING
company =
doc = FILL template
subject= REWRITE "Based on this ${history}, generate a subject for a proposal email"
contents = REWRITE "Based on this ${history}, and ${subject}, generate the e-mail body for ${to}, signed by ${user}.
SEND MAIL to, subject, contents

View file

@ -29,7 +29,9 @@ tower = { workspace = true }
tower-http = { workspace = true, features = ["cors", "trace"] }
actix-web = { workspace = true }
dotenv = { workspace = true }
log = { workspace = true }
vector = "0.4.1"
[dev-dependencies]
rstest = { workspace = true }
tokio-test = { workspace = true }
tokio-test = { workspace = true }

View file

@ -1,9 +1,14 @@
use log::{info, Level, LevelFilter, Metadata, Record};
use std::io::Write;
use tokio::io::{AsyncWriteExt};
use tokio::net::TcpStream;
use actix_web::{middleware, web, App, HttpServer};
use gb_core::models;
use tracing_subscriber::fmt::format::FmtSpan;
use dotenv::dotenv;
use gb_core::config::AppConfig;
use gb_core::db::{init_kafka, init_minio, init_postgres, init_redis};
use gb_core::db::{init_minio, init_postgres};
use gb_file::handlers::upload_file;
#[actix_web::main]
@ -15,36 +20,49 @@ async fn main() -> std::io::Result<()> {
.with_span_events(FmtSpan::CLOSE)
.init();
// Configure the logger
log::set_logger(&VectorLogger { stream: TcpStream::connect("127.0.0.1:9000").await? })
.map_err(|_| "Couldn't set logger")?;
log::set_max_level(LevelFilter::Info);
// Get the Vector agent's address and port
let vector_host = "127.0.0.1";
let vector_port = 9000;
// Start a Vector logger
let mut vector_logger = VectorLogger::new(vector_host, vector_port).await?;
// Set the logger
log::set_logger(&vector_logger).map_err(|_| "Couldn't set logger")?;
log::set_max_level(LevelFilter::Info);
// Log some messages
info!("Hello from Rust!");
// Load configuration
let config = AppConfig::from_env();
// TODO: /gbo/bin/storage$ ./minio server ../../data/storage/
// Initialize databases and services
let db_pool = init_postgres(&config).await.expect("Failed to connect to PostgreSQL");
let redis_pool = init_redis(&config).await.expect("Failed to connect to Redis");
let kafka_producer = init_kafka(&config).await.expect("Failed to initialize Kafka");
// let zitadel_client = init_zitadel(&config).await.expect("Failed to initialize Zitadel");
let minio_client = init_minio(&config).await.expect("Failed to initialize Minio");
let app_state = web::Data::new(models::AppState {
config: Some(config.clone()),
db_pool: Some(db_pool),
redis_pool: Some(redis_pool),
kafka_producer: Some(kafka_producer),
minio_client: Some(minio_client),
});
// Start HTTP server
HttpServer::new(move || {
// let cors = Cors::default()
// .allow_any_origin()
// .allow_any_method()
// .allow_any_header()
// .max_age(3600);
App::new()
.wrap(middleware::Logger::default())
.wrap(middleware::Compress::default())
// .wrap(cors)
.app_data(app_state.clone())
.service(upload_file)
})

View file

@ -1,22 +0,0 @@
use axum::{
extract::{ws::WebSocket, Path, State, WebSocketUpgrade},
response::IntoResponse,
routing::{get, post},
Json, Router,
};
use futures_util::StreamExt;
use gb_core::{models::*, Error, Result};
use gb_messaging::{models::MessageEnvelope, MessageProcessor};
use std::{collections::HashMap, sync::Arc};
use tokio::sync::Mutex;
use tracing::{error, instrument};
use uuid::Uuid;
pub fn create_router(message_processor: AppState) -> Router {
let state = Arc::new(ApiState {
message_processor: Mutex::new(message_processor),
});
Router::new()
.route("/monitoring/metrics", get(get_monitoring_metrics))
.with_state(state)
}

View file

@ -61,9 +61,7 @@ async fn test_successful_file_listing() -> Result<(), Box<dyn std::error::Error>
let app_state = web::Data::new(AppState {
minio_client: Some(minio_client.clone()),
config: None,
db_pool: None,
kafka_producer: None,
redis_pool: None,
db_pool: None
});
let app = test::init_service(App::new().app_data(app_state.clone()).service(list_file)).await;

View file

@ -40,9 +40,7 @@ async fn test_successful_file_upload() -> Result<()> {
let app_state = web::Data::new(AppState {
minio_client: Some(minio_client.clone()),
config: None,
db_pool: None,
kafka_producer: None,
redis_pool: None,
db_pool: None
});
let app =
@ -90,30 +88,6 @@ async fn test_successful_file_upload() -> Result<()> {
assert!(object_exists, "Uploaded file should exist in MinIO");
// Verify file content using object-based API
// let get_object_args = GetObjectArgs::new(bucket_name, object_name)?;
// let get_object_result = minio_client.get_object(bucket_name, object_name);
// let mut object_content = Vec::new();
// get_object_result.read_to_end(&mut object_content)?;
// assert_eq!(
// String::from_utf8_lossy(&object_content),
// String::from_utf8_lossy(&file_content),
// "File content should match"
// );
// // Cleanup test bucket
// let bucket_name = "file-upload-rust-bucket";
// // List all objects and delete them using object-based API
// let list_objects_args = GetObjectArgs:new(bucket_name)?;
// let objects = minio_client.list_objects(&list_objects_args).await?;
// for obj in objects.contents {
// // Using object-based API for remove_object
// let remove_object_args = RemoveObjectArgs::new(bucket_name, &obj.key)?;
// minio_client.remove_object(&remove_object_args).await?;
// }
Ok(())
}

View file

@ -0,0 +1,45 @@
use log::{info, Level, LevelFilter, Metadata, Record};
use std::io::Write;
use tokio::io::{AsyncWriteExt};
use tokio::net::TcpStream;
// A simple logger implementation that sends logs to Vector
struct VectorLogger {
stream: TcpStream,
}
impl VectorLogger {
async fn new(host: &str, port: u16) -> Result<Self, std::io::Error> {
let stream = TcpStream::connect(format!("{}:{}", host, port)).await?;
Ok(VectorLogger { stream })
}
}
impl log::Log for VectorLogger {
fn enabled(&self, _metadata: &Metadata) -> bool {
true
}
fn log(&self, record: &Record) {
let _ = self.log_async(record).await;
}
fn flush(&self) {}
}
impl VectorLogger {
async fn log_async(&self, record: &Record) -> Result<(), std::io::Error> {
let log_event = format!(
"{{\"level\":\"{}\", \"message\":\"{}\", \"module\":\"{}\", \"file\":\"{}\", \"line\":{}}}\n",
record.level(),
record.args(),
record.location().module_path(),
record.location().file(),
record.location().line()
);
self.stream.write_all(log_event.as_bytes()).await?;
Ok(())
}
}

View file

@ -3,8 +3,34 @@ set -e
# Create directories
echo "Creating directories..."
INSTALL_DIR="$HOME/server_binaries"
INSTALL_DIR="/opt/gbo"
mkdir -p "$INSTALL_DIR"
sudo chmod -R 777 "$INSTALL_DIR"
mkdir -p "$INSTALL_DIR/bin"
mkdir -p "$INSTALL_DIR/bin/bot"
mkdir -p "$INSTALL_DIR/bin/mail"
mkdir -p "$INSTALL_DIR/bin/tabular"
mkdir -p "$INSTALL_DIR/bin/object"
mkdir -p "$INSTALL_DIR/bin/directory"
mkdir -p "$INSTALL_DIR/bin/alm"
mkdir -p "$INSTALL_DIR/bin/webserver"
mkdir -p "$INSTALL_DIR/bin/meeting"
mkdir -p "$INSTALL_DIR/config"
mkdir -p "$INSTALL_DIR/data"
mkdir -p "$INSTALL_DIR/data/tabular"
mkdir -p "$INSTALL_DIR/data/object"
mkdir -p "$INSTALL_DIR/data/mail"
mkdir -p "$INSTALL_DIR/data/directory"
mkdir -p "$INSTALL_DIR/data/alm"
mkdir -p "$INSTALL_DIR/data/webserver"
mkdir -p "$INSTALL_DIR/data/meeting"
mkdir -p "$INSTALL_DIR/logs"
mkdir -p "$INSTALL_DIR/certs"
mkdir -p "$INSTALL_DIR/config"
mkdir -p "$INSTALL_DIR/data"
@ -25,67 +51,58 @@ sudo apt-get install -y \
echo "System dependencies installed"
# Download PostgreSQL binary (using the official package)
# echo "Downloading PostgreSQL..."
# if [ ! -d "$INSTALL_DIR/postgresql" ]; then
# mkdir -p "$INSTALL_DIR/postgresql"
# wget -O "$INSTALL_DIR/postgresql/postgresql.tar.gz" "https://get.enterprisedb.com/postgresql/postgresql-14.10-1-linux-x64-binaries.tar.gz"
# tar -xzf "$INSTALL_DIR/postgresql/postgresql.tar.gz" -C "$INSTALL_DIR/postgresql" --strip-components=1
# rm "$INSTALL_DIR/postgresql/postgresql.tar.gz"
# mkdir -p "$INSTALL_DIR/data/postgresql"
# fi
# Download Redis binary
echo "Downloading Redis..."
if [ ! -d "$INSTALL_DIR/redis" ]; then
mkdir -p "$INSTALL_DIR/redis"
wget -O "$INSTALL_DIR/redis/redis.tar.gz" "https://download.redis.io/releases/redis-7.2.4.tar.gz"
tar -xzf "$INSTALL_DIR/redis/redis.tar.gz" -C "$INSTALL_DIR/redis" --strip-components=1
rm "$INSTALL_DIR/redis/redis.tar.gz"
mkdir -p "$INSTALL_DIR/data/redis"
echo "Downloading PostgreSQL..."
if [ ! -d "$INSTALL_DIR/postgresql" ]; then
mkdir -p "$INSTALL_DIR/postgresql"
wget -O "$INSTALL_DIR/postgresql/postgresql.tar.gz" "https://get.enterprisedb.com/postgresql/postgresql-14.10-1-linux-x64-binaries.tar.gz"
tar -xzf "$INSTALL_DIR/postgresql/postgresql.tar.gz" -C "$INSTALL_DIR/postgresql" --strip-components=1
rm "$INSTALL_DIR/postgresql/postgresql.tar.gz"
mkdir -p "$INSTALL_DIR/data/postgresql"
fi
# Download Zitadel binary
# echo "Downloading Zitadel..."
# if [ ! -d "$INSTALL_DIR/zitadel" ]; then
# mkdir -p "$INSTALL_DIR/zitadel"
# # Get latest release URL
# ZITADEL_LATEST=$(curl -s https://api.github.com/repos/zitadel/zitadel/releases/latest | grep "browser_download_url.*linux_amd64.tar.gz" | cut -d '"' -f 4)
# wget -O "$INSTALL_DIR/zitadel/zitadel.tar.gz" "$ZITADEL_LATEST"
# tar -xzf "$INSTALL_DIR/zitadel/zitadel.tar.gz" -C "$INSTALL_DIR/zitadel"
# rm "$INSTALL_DIR/zitadel/zitadel.tar.gz"
# mkdir -p "$INSTALL_DIR/data/zitadel"
# # Create default Zitadel config
# cat > "$INSTALL_DIR/config/zitadel.yaml" <<EOF
# Log:
# Level: info
# Database:
# postgres:
# Host: localhost
# Port: 5432
# Database: zitadel
# User: postgres
# Password: postgres
# SSL:
# Mode: disable
# EOF
# fi
# Download Stalwart Mail binary
# echo "Downloading Stalwart Mail..."
# if [ ! -d "$INSTALL_DIR/stalwart" ]; then
# mkdir -p "$INSTALL_DIR/stalwart"
# # Get latest release URL
# STALWART_LATEST=$(curl -s https://api.github.com/repos/stalwartlabs/mail-server/releases/latest | grep "browser_download_url.*linux-x86_64.tar.gz" | cut -d '"' -f 4)
# wget -O "$INSTALL_DIR/stalwart/stalwart.tar.gz" "$STALWART_LATEST"
# tar -xzf "$INSTALL_DIR/stalwart/stalwart.tar.gz" -C "$INSTALL_DIR/stalwart"
# rm "$INSTALL_DIR/stalwart/stalwart.tar.gz"
# mkdir -p "$INSTALL_DIR/data/stalwart"
# Download Zitadel binary
echo "Downloading Zitadel..."
if [ ! -d "$INSTALL_DIR/zitadel" ]; then
mkdir -p "$INSTALL_DIR/zitadel"
# Get latest release URL
ZITADEL_LATEST=$(curl -s https://api.github.com/repos/zitadel/zitadel/releases/latest | grep "browser_download_url.*linux_amd64.tar.gz" | cut -d '"' -f 4)
wget -O "$INSTALL_DIR/zitadel/zitadel.tar.gz" "$ZITADEL_LATEST"
tar -xzf "$INSTALL_DIR/zitadel/zitadel.tar.gz" -C "$INSTALL_DIR/zitadel"
rm "$INSTALL_DIR/zitadel/zitadel.tar.gz"
mkdir -p "$INSTALL_DIR/data/zitadel"
# # Download config files
# mkdir -p "$INSTALL_DIR/config/stalwart"
# wget -O "$INSTALL_DIR/config/stalwart/config.toml" "https://raw.githubusercontent.com/stalwartlabs/mail-server/main/resources/config/config.toml"
# fi
# Create default Zitadel config
cat > "$INSTALL_DIR/config/zitadel.yaml" <<EOF
Log:
Level: info
Database:
postgres:
Host: localhost
Port: 5432
Database: zitadel
User: postgres
Password: postgres
SSL:
Mode: disable
EOF
fi
#Download Stalwart Mail binary
echo "Downloading Stalwart Mail..."
if [ ! -d "$INSTALL_DIR/stalwart" ]; then
mkdir -p "$INSTALL_DIR/stalwart"
# Get latest release URL
STALWART_LATEST=$(curl -s https://api.github.com/repos/stalwartlabs/mail-server/releases/latest | grep "browser_download_url.*linux-x86_64.tar.gz" | cut -d '"' -f 4)
wget -O "$INSTALL_DIR/stalwart/stalwart.tar.gz" "$STALWART_LATEST"
tar -xzf "$INSTALL_DIR/stalwart/stalwart.tar.gz" -C "$INSTALL_DIR/stalwart"
rm "$INSTALL_DIR/stalwart/stalwart.tar.gz"
mkdir -p "$INSTALL_DIR/data/stalwart"
# Download config files
mkdir -p "$INSTALL_DIR/config/stalwart"
wget -O "$INSTALL_DIR/config/stalwart/config.toml" "https://raw.githubusercontent.com/stalwartlabs/mail-server/main/resources/config/config.toml"
fi
# Download MinIO binary
echo "Downloading MinIO..."

429
prompt.md
View file

@ -2,6 +2,383 @@ You are a distributed systems architect for a billion-scale real-time communicat
1. Core Domains and Models:
CREATE TABLE "GBOnlineSubscription" (
"Id" serial4 NOT NULL,
"instanceId" int4 NULL,
"externalSubscriptionId" varchar(255) NULL,
"saasSubscriptionStatus" varchar(255) NULL,
"isFreeTrial" bool NULL,
"planId" varchar(255) NULL,
quantity int4 NULL,
"lastCCFourDigits" int4 NULL,
status varchar(255) NULL,
CONSTRAINT "GBOnlineSubscription_pkey" PRIMARY KEY ("Id")
);
CREATE TABLE "GuaribasAdmin" (
id serial4 NOT NULL,
"instanceId" int4 NULL,
"key" varchar(255) NULL,
value varchar(4000) NULL,
"createdAt" timestamptz NULL,
"updatedAt" timestamptz NULL,
CONSTRAINT "GuaribasAdmin_pkey" PRIMARY KEY (id)
);
CREATE TABLE "GuaribasChannel" (
"channelId" serial4 NOT NULL,
title varchar(255) NULL,
"createdAt" timestamptz NULL,
"updatedAt" timestamptz NULL,
CONSTRAINT "GuaribasChannel_pkey" PRIMARY KEY ("channelId")
);
-- public."GuaribasInstance" definition
-- Drop table
-- DROP TABLE "GuaribasInstance";
CREATE TABLE "GuaribasInstance" (
"instanceId" serial4 NOT NULL,
"botEndpoint" varchar(255) NULL,
"whoAmIVideo" varchar(255) NULL,
"botId" varchar(255) NULL,
title varchar(255) NULL,
"activationCode" varchar(16) NULL,
description varchar(255) NULL,
state varchar(16) NULL,
"botKey" varchar(64) NULL,
"enabledAdmin" varchar(255) NULL,
"engineName" varchar(255) NULL,
"marketplaceId" varchar(255) NULL,
"textAnalyticsKey" varchar(255) NULL,
"textAnalyticsEndpoint" varchar(255) NULL,
"translatorKey" varchar(64) NULL,
"translatorEndpoint" varchar(128) NULL,
"marketplacePassword" varchar(255) NULL,
"webchatKey" varchar(255) NULL,
"authenticatorTenant" varchar(255) NULL,
"authenticatorAuthorityHostUrl" varchar(255) NULL,
"cloudSubscriptionId" varchar(255) NULL,
"cloudUsername" varchar(255) NULL,
"cloudPassword" varchar(255) NULL,
"cloudLocation" varchar(255) NULL,
"googleBotKey" varchar(255) NULL,
"googleChatApiKey" varchar(255) NULL,
"googleChatSubscriptionName" varchar(255) NULL,
"googleClientEmail" varchar(255) NULL,
"googlePrivateKey" varchar(4000) NULL,
"googleProjectId" varchar(255) NULL,
"facebookWorkplaceVerifyToken" varchar(255) NULL,
"facebookWorkplaceAppSecret" varchar(255) NULL,
"facebookWorkplaceAccessToken" varchar(512) NULL,
"whatsappBotKey" varchar(255) NULL,
"whatsappServiceKey" varchar(255) NULL,
"whatsappServiceNumber" varchar(255) NULL,
"whatsappServiceUrl" varchar(255) NULL,
"smsKey" varchar(255) NULL,
"smsSecret" varchar(255) NULL,
"smsServiceNumber" varchar(255) NULL,
"speechKey" varchar(255) NULL,
"speechEndpoint" varchar(255) NULL,
"spellcheckerKey" varchar(255) NULL,
"spellcheckerEndpoint" varchar(255) NULL,
theme varchar(255) NULL,
ui varchar(255) NULL,
kb varchar(255) NULL,
"nlpAppId" varchar(255) NULL,
"nlpKey" varchar(255) NULL,
"nlpEndpoint" varchar(512) NULL,
"nlpAuthoringKey" varchar(255) NULL,
"deploymentPaths" varchar(255) NULL,
"searchHost" varchar(255) NULL,
"searchKey" varchar(255) NULL,
"searchIndex" varchar(255) NULL,
"searchIndexer" varchar(255) NULL,
"storageUsername" varchar(255) NULL,
"storagePassword" varchar(255) NULL,
"storageName" varchar(255) NULL,
"storageServer" varchar(255) NULL,
"storageDialect" varchar(255) NULL,
"storagePath" varchar(255) NULL,
"adminPass" varchar(255) NULL,
"searchScore" float8 NULL,
"nlpScore" float8 NULL,
"createdAt" timestamptz NULL,
"updatedAt" timestamptz NULL,
params varchar(4000) NULL,
CONSTRAINT "GuaribasInstance_pkey" PRIMARY KEY ("instanceId")
);
-- public."GuaribasApplications" definition
-- Drop table
-- DROP TABLE "GuaribasApplications";
CREATE TABLE "GuaribasApplications" (
id serial4 NOT NULL,
"name" varchar(255) NULL,
"instanceId" int4 NULL,
"createdAt" timestamptz NULL,
"updatedAt" timestamptz NULL,
CONSTRAINT "GuaribasApplications_pkey" PRIMARY KEY (id),
CONSTRAINT "GuaribasApplications_instanceId_fkey" FOREIGN KEY ("instanceId") REFERENCES "GuaribasInstance"("instanceId") ON UPDATE CASCADE
);
-- public."GuaribasGroup" definition
-- Drop table
-- DROP TABLE "GuaribasGroup";
CREATE TABLE "GuaribasGroup" (
"groupId" serial4 NOT NULL,
"displayName" varchar(512) NULL,
"instanceId" int4 NULL,
CONSTRAINT "GuaribasGroup_pkey" PRIMARY KEY ("groupId"),
CONSTRAINT "GuaribasGroup_instanceId_fkey" FOREIGN KEY ("instanceId") REFERENCES "GuaribasInstance"("instanceId") ON UPDATE CASCADE
);
-- public."GuaribasLog" definition
-- Drop table
-- DROP TABLE "GuaribasLog";
CREATE TABLE "GuaribasLog" (
"logId" serial4 NOT NULL,
message varchar(1024) NULL,
kind varchar(1) NULL,
"instanceId" int4 NULL,
"createdAt" timestamptz NULL,
"updatedAt" timestamptz NULL,
CONSTRAINT "GuaribasLog_pkey" PRIMARY KEY ("logId"),
CONSTRAINT "GuaribasLog_instanceId_fkey" FOREIGN KEY ("instanceId") REFERENCES "GuaribasInstance"("instanceId") ON UPDATE CASCADE
);
-- public."GuaribasPackage" definition
-- Drop table
-- DROP TABLE "GuaribasPackage";
CREATE TABLE "GuaribasPackage" (
"packageId" serial4 NOT NULL,
"packageName" varchar(255) NULL,
"instanceId" int4 NULL,
"createdAt" timestamptz NULL,
"updatedAt" timestamptz NULL,
custom varchar(512) NULL,
CONSTRAINT "GuaribasPackage_pkey" PRIMARY KEY ("packageId"),
CONSTRAINT "GuaribasPackage_instanceId_fkey" FOREIGN KEY ("instanceId") REFERENCES "GuaribasInstance"("instanceId") ON UPDATE CASCADE
);
-- public."GuaribasQuestionAlternate" definition
-- Drop table
-- DROP TABLE "GuaribasQuestionAlternate";
CREATE TABLE "GuaribasQuestionAlternate" (
"quickAnswerId" serial4 NOT NULL,
"questionTyped" varchar(255) NULL,
"questionText" varchar(255) NULL,
"instanceId" int4 NULL,
CONSTRAINT "GuaribasQuestionAlternate_pkey" PRIMARY KEY ("quickAnswerId"),
CONSTRAINT "GuaribasQuestionAlternate_instanceId_fkey" FOREIGN KEY ("instanceId") REFERENCES "GuaribasInstance"("instanceId") ON UPDATE CASCADE
);
-- public."GuaribasSchedule" definition
-- Drop table
-- DROP TABLE "GuaribasSchedule";
CREATE TABLE "GuaribasSchedule" (
id serial4 NOT NULL,
"name" varchar(255) NULL,
schedule varchar(255) NULL,
"instanceId" int4 NULL,
"createdAt" timestamptz NULL,
"updatedAt" timestamptz NULL,
CONSTRAINT "GuaribasSchedule_pkey" PRIMARY KEY (id),
CONSTRAINT "GuaribasSchedule_instanceId_fkey" FOREIGN KEY ("instanceId") REFERENCES "GuaribasInstance"("instanceId") ON UPDATE CASCADE
);
-- public."GuaribasUser" definition
-- Drop table
-- DROP TABLE "GuaribasUser";
CREATE TABLE "GuaribasUser" (
"userId" serial4 NOT NULL,
"displayName" varchar(255) NULL,
"userSystemId" varchar(255) NULL,
"userName" varchar(255) NULL,
"defaultChannel" varchar(255) NULL,
email varchar(255) NULL,
locale varchar(5) NULL,
"instanceId" int4 NULL,
"agentSystemId" int4 NULL,
"agentContacted" timestamptz NULL,
"agentMode" varchar(16) NULL,
"conversationReference" text NULL,
"conversationId" int4 NULL,
"hearOnDialog" varchar(64) NULL,
params varchar(4000) NULL,
CONSTRAINT "GuaribasUser_pkey" PRIMARY KEY ("userId"),
CONSTRAINT "GuaribasUser_instanceId_fkey" FOREIGN KEY ("instanceId") REFERENCES "GuaribasInstance"("instanceId") ON UPDATE CASCADE
);
-- public."GuaribasUserGroup" definition
-- Drop table
-- DROP TABLE "GuaribasUserGroup";
CREATE TABLE "GuaribasUserGroup" (
id serial4 NOT NULL,
"userId" int4 NULL,
"groupId" int4 NULL,
"instanceId" int4 NULL,
CONSTRAINT "GuaribasUserGroup_pkey" PRIMARY KEY (id),
CONSTRAINT "GuaribasUserGroup_groupId_fkey" FOREIGN KEY ("groupId") REFERENCES "GuaribasGroup"("groupId") ON UPDATE CASCADE,
CONSTRAINT "GuaribasUserGroup_instanceId_fkey" FOREIGN KEY ("instanceId") REFERENCES "GuaribasInstance"("instanceId") ON UPDATE CASCADE,
CONSTRAINT "GuaribasUserGroup_userId_fkey" FOREIGN KEY ("userId") REFERENCES "GuaribasUser"("userId") ON UPDATE CASCADE
);
-- public."GuaribasAnswer" definition
-- Drop table
-- DROP TABLE "GuaribasAnswer";
CREATE TABLE "GuaribasAnswer" (
"answerId" serial4 NOT NULL,
media varchar(512) NULL,
format varchar(12) NULL,
"content" text NULL,
"createdAt" timestamptz NULL,
"updatedAt" timestamptz NULL,
"nextId" int4 NULL,
"prevId" int4 NULL,
"instanceId" int4 NULL,
"packageId" int4 NULL,
CONSTRAINT "GuaribasAnswer_pkey" PRIMARY KEY ("answerId"),
CONSTRAINT "GuaribasAnswer_packageId_fkey" FOREIGN KEY ("packageId") REFERENCES "GuaribasPackage"("packageId") ON UPDATE CASCADE
);
-- public."GuaribasQuestion" definition
-- Drop table
-- DROP TABLE "GuaribasQuestion";
CREATE TABLE "GuaribasQuestion" (
"questionId" serial4 NOT NULL,
subject1 varchar(64) NULL,
subject2 varchar(64) NULL,
subject3 varchar(64) NULL,
subject4 varchar(64) NULL,
keywords varchar(1024) NULL,
"skipIndex" bool NULL,
"from" varchar(512) NULL,
"to" varchar(512) NULL,
"content" text NULL,
"createdAt" timestamptz NULL,
"updatedAt" timestamptz NULL,
"answerId" int4 NULL,
"instanceId" int4 NULL,
"packageId" int4 NULL,
CONSTRAINT "GuaribasQuestion_pkey" PRIMARY KEY ("questionId"),
CONSTRAINT "GuaribasQuestion_answerId_fkey" FOREIGN KEY ("answerId") REFERENCES "GuaribasAnswer"("answerId") ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT "GuaribasQuestion_instanceId_fkey" FOREIGN KEY ("instanceId") REFERENCES "GuaribasInstance"("instanceId") ON UPDATE CASCADE,
CONSTRAINT "GuaribasQuestion_packageId_fkey" FOREIGN KEY ("packageId") REFERENCES "GuaribasPackage"("packageId") ON UPDATE CASCADE
);
-- public."GuaribasSubject" definition
-- Drop table
-- DROP TABLE "GuaribasSubject";
CREATE TABLE "GuaribasSubject" (
"subjectId" serial4 NOT NULL,
"internalId" varchar(255) NULL,
description varchar(512) NULL,
"from" varchar(255) NULL,
"to" varchar(255) NULL,
"parentSubjectId" int4 NULL,
"instanceId" int4 NULL,
"responsibleUserId" int4 NULL,
"packageId" int4 NULL,
CONSTRAINT "GuaribasSubject_pkey" PRIMARY KEY ("subjectId"),
CONSTRAINT "GuaribasSubject_instanceId_fkey" FOREIGN KEY ("instanceId") REFERENCES "GuaribasInstance"("instanceId") ON UPDATE CASCADE,
CONSTRAINT "GuaribasSubject_packageId_fkey" FOREIGN KEY ("packageId") REFERENCES "GuaribasPackage"("packageId") ON UPDATE CASCADE,
CONSTRAINT "GuaribasSubject_parentSubjectId_fkey" FOREIGN KEY ("parentSubjectId") REFERENCES "GuaribasSubject"("subjectId") ON UPDATE CASCADE,
CONSTRAINT "GuaribasSubject_responsibleUserId_fkey" FOREIGN KEY ("responsibleUserId") REFERENCES "GuaribasUser"("userId") ON UPDATE CASCADE
);
-- public."GuaribasConversation" definition
-- Drop table
-- DROP TABLE "GuaribasConversation";
CREATE TABLE "GuaribasConversation" (
"conversationId" serial4 NOT NULL,
"instanceId" int4 NULL,
"startSubjectId" int4 NULL,
"channelId" int4 NULL,
"rateDate" timestamptz NULL,
rate float8 NULL,
feedback varchar(512) NULL,
"createdAt" timestamptz NULL,
"updatedAt" timestamptz NULL,
"text" varchar(255) NULL,
"startedByUserId" int4 NULL,
CONSTRAINT "GuaribasConversation_pkey" PRIMARY KEY ("conversationId"),
CONSTRAINT "GuaribasConversation_startSubjectId_fkey" FOREIGN KEY ("startSubjectId") REFERENCES "GuaribasSubject"("subjectId") ON UPDATE CASCADE,
CONSTRAINT "GuaribasConversation_startedByUserId_fkey" FOREIGN KEY ("startedByUserId") REFERENCES "GuaribasUser"("userId") ON UPDATE CASCADE
);
-- public."GuaribasConversationMessage" definition
-- Drop table
-- DROP TABLE "GuaribasConversationMessage";
CREATE TABLE "GuaribasConversationMessage" (
"conversationMessageId" serial4 NOT NULL,
"subjectId" int4 NULL,
"content" text NULL,
"createdAt" timestamptz NULL,
"updatedAt" timestamptz NULL,
"conversationId" int4 NULL,
"instanceId" int4 NULL,
"userId" int4 NULL,
CONSTRAINT "GuaribasConversationMessage_pkey" PRIMARY KEY ("conversationMessageId"),
CONSTRAINT "GuaribasConversationMessage_conversationId_fkey" FOREIGN KEY ("conversationId") REFERENCES "GuaribasConversation"("conversationId") ON UPDATE CASCADE,
CONSTRAINT "GuaribasConversationMessage_userId_fkey" FOREIGN KEY ("userId") REFERENCES "GuaribasUser"("userId") ON UPDATE CASCADE
);
A. Customer Hierarchy:
- Customer (top-level organization)
@ -118,58 +495,13 @@ A. Storage Layer:
- Real-time data
- Cache layer
- Fast lookups
- Redis (caching)
- Session data
- Rate limiting
- Temporary storage
B. Message Queue:
- Kafka clusters
- Sharded topics
- Message routing
- Event streaming
- Redis Pub/Sub
- Real-time updates
- Presence information
- Status changes
C. Media Handling:
- WebRTC media servers
- SFU WebRTC media servers
- Track multiplexing
- Media processing
- Recording storage
6. API Structure:
A. System APIs:
```rust
pub trait SystemAPI {
async fn call_vm(&self, pid: Uuid, text: String) -> Result<String>;
async fn wait(&self, pid: Uuid, seconds: i32) -> Result<()>;
async fn save_file(&self, pid: Uuid, data: Vec<u8>) -> Result<FileInfo>;
async fn execute_sql(&self, pid: Uuid, sql: String) -> Result<QueryResult>;
}
```
B. Room APIs:
```rust
pub trait RoomAPI {
async fn create_room(&self, config: RoomConfig) -> Result<Room>;
async fn join_room(&self, room_id: Uuid, user_id: Uuid) -> Result<Connection>;
async fn publish_track(&self, track: TrackInfo) -> Result<Track>;
async fn subscribe_track(&self, track_id: Uuid) -> Result<Subscription>;
}
```
C. Message APIs:
```rust
pub trait MessageAPI {
async fn send_message(&self, message: Message) -> Result<MessageId>;
async fn get_messages(&self, filter: MessageFilter) -> Result<Vec<Message>>;
async fn update_status(&self, message_id: Uuid, status: Status) -> Result<()>;
}
```
7. Monitoring & Operations:
A. Metrics:
@ -189,7 +521,7 @@ B. Scaling Operations:
C. Security:
- Authentication
- Authorization
- Authorization (zitadel API)
- Rate limiting
- Data encryption
- Audit logging
@ -555,4 +887,5 @@ migrate them to rust compatible,
- reate environment configuration for everything and ingress to have several server nodes if eeed automatically
- I NEED FULL CODE SOLUTION IN PROFESSIONAL TESTABLE RUST CODE: if you need split answer in several parts, but provide ENTIRE CODE. Complete working balenced aserver. IMPORTANTE: Generate the project in a .sh shell script output with cat, of entire code base to be restored, no placeholder neither TODOS.
- VERY IMPORNTANT: DO NOT put things like // Add other system routes... you should WRITE ACUTAL CODE
- Need tests for every line of code written.
- Need tests for every line of code written.
- single project organized in folders.