From 2ce7d2e340b01860107eb0ef55cb446034756ce7 Mon Sep 17 00:00:00 2001 From: Rodrigo Rodriguez Date: Mon, 24 Feb 2025 11:40:46 -0300 Subject: [PATCH] fix(all): Organizing APIs and docs. --- README.md | 80 +++++++++++------------------- gb-api/Cargo.toml | 1 + gb-api/src/router.rs | 51 ++----------------- gb-file/Cargo.toml | 20 ++++++++ gb-file/src/facade.rs | 113 ++++++++++++++++++++++++++++++++++++++++++ gb-storage/Cargo.toml | 1 - 6 files changed, 168 insertions(+), 98 deletions(-) create mode 100644 gb-file/Cargo.toml create mode 100644 gb-file/src/facade.rs diff --git a/README.md b/README.md index 71b1f60..d9be19b 100644 --- a/README.md +++ b/README.md @@ -36,12 +36,11 @@ GB6 is a billion-scale real-time communication platform integrating advanced bot - PostgreSQL with sharding - Redis caching - TiKV distributed storage - - Customer data management ## 🏗 Architecture ### Multi-Tenant Core -- Organization hierarchy +- Organizations - Instance management - Resource quotas - Usage analytics @@ -52,29 +51,6 @@ GB6 is a billion-scale real-time communication platform integrating advanced bot - Media processing - Video conferencing -### Storage Architecture -```sql --- Customer Sharding Example -CREATE TABLE customers ( - id UUID PRIMARY KEY, - name TEXT, - subscription_tier TEXT, - status TEXT, - max_instances INTEGER -); -``` - -### Message Processing -```rust -// Kafka Producer Example -pub async fn publish( - &self, - topic: &str, - key: &str, - message: &T, -) -> Result<()> -``` - ## 🛠 Installation ### Prerequisites @@ -206,47 +182,49 @@ Licensed under terms specified in workspace configuration. - Extended monitoring ### Long Term -- AI/ML integration - Advanced analytics - Global expansion - Enterprise features - -# Infrastructure Compliance Checklist - ISO 27001, HIPAA, LGPD - | ✓ | Requirement | Component | Standard | Implementation Steps | |---|-------------|-----------|-----------|---------------------| -| ⬜ | TLS 1.3 Configuration | Nginx | All | Configure modern SSL parameters and ciphers in `/etc/nginx/conf.d/ssl.conf` | -| ⬜ | Access Logging | Nginx | All | Enable detailed access logs with privacy fields in `/etc/nginx/nginx.conf` | +| ✅ | TLS 1.3 Configuration | Nginx | All | Configure modern SSL parameters and ciphers in `/etc/nginx/conf.d/ssl.conf` | +| ✅ | Access Logging | Nginx | All | Enable detailed access logs with privacy fields in `/etc/nginx/nginx.conf` | | ⬜ | Rate Limiting | Nginx | ISO 27001 | Implement rate limiting rules in location blocks | | ⬜ | WAF Rules | Nginx | HIPAA | Install and configure ModSecurity with OWASP rules | -| ⬜ | Reverse Proxy Security | Nginx | All | Configure security headers (X-Frame-Options, HSTS, CSP) | -| ⬜ | MFA Implementation | Zitadel | All | Enable and enforce MFA for all administrative accounts | -| ⬜ | RBAC Configuration | Zitadel | All | Set up role-based access control with least privilege | -| ⬜ | Password Policy | Zitadel | All | Configure strong password requirements (length, complexity, history) | -| ⬜ | OAuth2/OIDC Setup | Zitadel | ISO 27001 | Configure secure OAuth flows and token policies | -| ⬜ | Audit Logging | Zitadel | All | Enable comprehensive audit logging for user activities | -| ⬜ | Encryption at Rest | Garage (S3) | All | Configure encrypted storage with key management | -| ⬜ | Bucket Policies | Garage (S3) | All | Implement strict bucket access policies | -| ⬜ | Object Versioning | Garage (S3) | HIPAA | Enable versioning for data recovery capability | -| ⬜ | Access Logging | Garage (S3) | All | Enable detailed access logging for object operations | -| ⬜ | Lifecycle Rules | Garage (S3) | LGPD | Configure data retention and deletion policies | -| ⬜ | DKIM/SPF/DMARC | Stalwart | All | Configure email authentication mechanisms | -| ⬜ | Mail Encryption | Stalwart | All | Enable TLS for mail transport | -| ⬜ | Content Filtering | Stalwart | All | Implement content scanning and filtering rules | +| ✅ | Reverse Proxy Security | Nginx | All | Configure security headers (X-Frame-Options, HSTS, CSP) | +| ✅ | MFA Implementation | Zitadel | All | Enable and enforce MFA for all administrative accounts | +| ✅ | RBAC Configuration | Zitadel | All | Set up role-based access control with least privilege | +| ✅ | Password Policy | Zitadel | All | Configure strong password requirements (length, complexity, history) | +| ✅ | OAuth2/OIDC Setup | Zitadel | ISO 27001 | Configure secure OAuth flows and token policies | +| ✅ | Audit Logging | Zitadel | All | Enable comprehensive audit logging for user activities | +| ✅ | Encryption at Rest | MinIO | All | Configure encrypted storage with key management | +| ✅ | Bucket Policies | MinIO | All | Implement strict bucket access policies | +| ✅ | Object Versioning | MinIO | HIPAA | Enable versioning for data recovery capability | +| ✅ | Access Logging | MinIO | All | Enable detailed access logging for object operations | +| ⬜ | Lifecycle Rules | MinIO | LGPD | Configure data retention and deletion policies | +| ✅ | DKIM/SPF/DMARC | Stalwart | All | Configure email authentication mechanisms | +| ✅ | Mail Encryption | Stalwart | All | Enable TLS for mail transport | +| ✅ | Content Filtering | Stalwart | All | Implement content scanning and filtering rules | | ⬜ | Mail Archiving | Stalwart | HIPAA | Configure compliant email archiving | -| ⬜ | Sieve Filtering | Stalwart | All | Implement security-focused mail filtering rules | +| ✅ | Sieve Filtering | Stalwart | All | Implement security-focused mail filtering rules | | ⬜ | System Hardening | Ubuntu | All | Apply CIS Ubuntu Linux benchmarks | -| ⬜ | System Updates | Ubuntu | All | Configure unattended-upgrades for security patches | +| ✅ | System Updates | Ubuntu | All | Configure unattended-upgrades for security patches | | ⬜ | Audit Daemon | Ubuntu | All | Configure auditd for system event logging | -| ⬜ | Firewall Rules | Ubuntu | All | Configure UFW with restrictive rules | +| ✅ | Firewall Rules | Ubuntu | All | Configure UFW with restrictive rules | | ⬜ | Disk Encryption | Ubuntu | All | Implement LUKS encryption for system disks | | ⬜ | SELinux/AppArmor | Ubuntu | All | Enable and configure mandatory access control | -| ⬜ | Monitoring Setup | All | All | Install and configure Prometheus + Grafana | -| ⬜ | Log Aggregation | All | All | Implement centralized logging (e.g., ELK Stack) | +| ✅ | Monitoring Setup | All | All | Install and configure Prometheus + Grafana | +| ✅ | Log Aggregation | All | All | Implement centralized logging (e.g., ELK Stack) | | ⬜ | Backup System | All | All | Configure automated backup system with encryption | -| ⬜ | Network Isolation | All | All | Implement proper network segmentation | +| ✅ | Network Isolation | All | All | Implement proper network segmentation | +| ✅ | Data Classification | All | HIPAA/LGPD | Document data types and handling procedures | +| ✅ | Session Management | Zitadel | All | Configure secure session timeouts and invalidation | +| ✅ | Certificate Management | All | All | Implement automated certificate renewal with Let's Encrypt | +| ✅ | Vulnerability Scanning | All | ISO 27001 | Regular automated scanning with tools like OpenVAS | +| ✅ | Incident Response Plan | All | All | Document and test incident response procedures | +| ✅ | Disaster Recovery | All | HIPAA | Implement and test disaster recovery procedures | ## Documentation Requirements diff --git a/gb-api/Cargo.toml b/gb-api/Cargo.toml index 8d67a7f..5255cd5 100644 --- a/gb-api/Cargo.toml +++ b/gb-api/Cargo.toml @@ -9,6 +9,7 @@ license = { workspace = true } gb-core = { path = "../gb-core" } gb-messaging = { path = "../gb-messaging" } gb-monitoring = { path = "../gb-monitoring" } +gb-file = {path = "../gb-file" } tokio = { version = "1.0", features = ["full", "macros", "rt-multi-thread"] } # Add these features axum = { version = "0.7.9", features = ["ws", "multipart", "macros"] } serde= { workspace = true } diff --git a/gb-api/src/router.rs b/gb-api/src/router.rs index 66bc6a5..91475a0 100644 --- a/gb-api/src/router.rs +++ b/gb-api/src/router.rs @@ -25,7 +25,10 @@ pub fn create_router(message_processor: MessageProcessor) -> Router { message_processor: Mutex::new(message_processor), }); Router::new() - // File & Document Management + + + + // File & Document Management .route("/files/upload", post(upload_file)) .route("/files/download", post(download)) .route("/files/copy", post(copy_file)) @@ -49,7 +52,7 @@ pub fn create_router(message_processor: MessageProcessor) -> Router { .route("/files/sync/start", post(start_sync)) .route("/files/sync/stop", post(stop_sync)) - // Document Processing +full ode bucket is abstrctd path variable, src, dest, full file manager acessible via actixweb ALL methods no excluses, inline funcition params, s3 api inside, all methodos, full code. // Document Processing .route("/docs/merge", post(merge_documents)) .route("/docs/convert", post(convert_document)) .route("/docs/fill", post(fill_document)) @@ -74,26 +77,6 @@ pub fn create_router(message_processor: MessageProcessor) -> Router { .route("/groups/invites/send", post(send_group_invite)) .route("/groups/invites/list", get(list_group_invites)) - // Teams & Projects - .route("/teams/create", post(create_team)) - .route("/teams/update", put(update_team)) - .route("/teams/delete", delete(delete_team)) - .route("/teams/list", get(get_teams)) - .route("/teams/search", post(search_teams)) - .route("/teams/members", get(get_team_members)) - .route("/teams/members/add", post(add_team_member)) - .route("/teams/members/remove", post(remove_team_member)) - .route("/teams/roles", post(set_team_roles)) - .route("/teams/permissions", post(set_team_permissions)) - .route("/teams/settings", post(update_team_settings)) - .route("/teams/analytics", get(get_team_analytics)) - .route("/teams/projects/create", post(create_project)) - .route("/teams/projects/list", get(get_projects)) - .route("/teams/projects/update", put(update_project)) - .route("/teams/projects/delete", delete(delete_project)) - .route("/teams/reports/generate", post(generate_team_report)) - .route("/teams/activity", get(get_team_activity)) - // Conversations & Real-time Communication .route("/conversations/create", post(create_conversation)) .route("/conversations/join", post(join_conversation)) @@ -183,20 +166,6 @@ pub fn create_router(message_processor: MessageProcessor) -> Router { .route("/storage/archive", post(archive_data)) .route("/storage/metrics", get(get_storage_metrics)) - // Automation & Workflows - .route("/automation/workflow/create", post(create_workflow)) - .route("/automation/workflow/update", put(update_workflow)) - .route("/automation/workflow/delete", delete(delete_workflow)) - .route("/automation/workflow/execute", post(execute_workflow)) - .route("/automation/workflow/status", get(get_workflow_status)) - .route("/automation/triggers/create", post(create_trigger)) - .route("/automation/triggers/list", get(list_triggers)) - .route("/automation/schedule/create", post(create_schedule)) - .route("/automation/schedule/update", put(update_schedule)) - .route("/automation/actions/create", post(create_action)) - .route("/automation/actions/execute", post(execute_action)) - .route("/automation/rules/create", post(create_rule)) - .route("/automation/rules/evaluate", post(evaluate_rules)) // Analytics & Reporting .route("/analytics/dashboard", get(get_dashboard_data)) @@ -221,16 +190,6 @@ pub fn create_router(message_processor: MessageProcessor) -> Router { .route("/admin/quotas/manage", post(manage_quotas)) .route("/admin/licenses/manage", post(manage_licenses)) - // Integration & External Services - .route("/integrations/list", get(list_integrations)) - .route("/integrations/install", post(install_integration)) - .route("/integrations/configure", post(configure_integration)) - .route("/integrations/uninstall", post(uninstall_integration)) - .route("/integrations/status", get(get_integration_status)) - .route("/integrations/sync", post(sync_integration_data)) - .route("/integrations/webhook/create", post(create_webhook)) - .route("/integrations/webhook/manage", post(manage_webhooks)) - // AI & Machine Learning .route("/ai/analyze/text", post(analyze_text)) .route("/ai/analyze/image", post(analyze_image)) diff --git a/gb-file/Cargo.toml b/gb-file/Cargo.toml new file mode 100644 index 0000000..b554e9c --- /dev/null +++ b/gb-file/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "gb-document" +version = { workspace = true } +edition = { workspace = true } +authors = { workspace = true } +license = { workspace = true } + +[dependencies] +gb-core = { path = "../gb-core" } +async-trait= { workspace = true } +tokio= { workspace = true } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0"thiserror= { workspace = true } +tracing= { workspace = true } +minio-rs = "0.1" + +[dev-dependencies] +rstest= { workspace = true } +tokio-test = "0.4" +tempfile = "3.8" diff --git a/gb-file/src/facade.rs b/gb-file/src/facade.rs new file mode 100644 index 0000000..648ce3a --- /dev/null +++ b/gb-file/src/facade.rs @@ -0,0 +1,113 @@ +use minio_rs::minio::client::Client; +use minio_rs::minio::s3::args::{BucketExistsArgs, MakeBucketArgs, RemoveObjectArgs, GetObjectArgs, PutObjectArgs, ListObjectsArgs}; +use minio_rs::minio::s3::response::Object; +use minio_rs::minio::s3::error::Error as MinioError; +use std::path::Path; +use std::io::Cursor; + +/// Represents a file manager for handling MinIO file operations. +pub struct FileManager { + client: Client, + bucket_name: String, +} + +impl FileManager { + /// Creates a new `FileManager` instance. + pub async fn new(endpoint: &str, access_key: &str, secret_key: &str, bucket_name: &str, use_ssl: bool) -> Result { + let client = Client::new(endpoint, access_key, secret_key, use_ssl).await?; + Ok(Self { + client, + bucket_name: bucket_name.to_string(), + }) + } + + /// Checks if the bucket exists, and creates it if it doesn't. + pub async fn ensure_bucket_exists(&self) -> Result<(), MinioError> { + let exists = self.client + .bucket_exists(&BucketExistsArgs::new(&self.bucket_name)) + .await?; + if !exists { + self.client + .make_bucket(&MakeBucketArgs::new(&self.bucket_name)) + .await?; + } + Ok(()) + } + + /// Uploads a file to the specified path. + pub async fn upload_file(&self, path: &str, file_data: Vec) -> Result<(), MinioError> { + let args = PutObjectArgs::new(&self.bucket_name, path, Cursor::new(file_data), file_data.len() as u64); + self.client.put_object(&args).await?; + Ok(()) + } + + /// Downloads a file from the specified path. + pub async fn download_file(&self, path: &str) -> Result, MinioError> { + let args = GetObjectArgs::new(&self.bucket_name, path); + let object = self.client.get_object(&args).await?; + let data = object.bytes().await?; + Ok(data.to_vec()) + } + + /// Copies a file from the source path to the destination path. + pub async fn copy_file(&self, source_path: &str, destination_path: &str) -> Result<(), MinioError> { + let source_args = GetObjectArgs::new(&self.bucket_name, source_path); + let object = self.client.get_object(&source_args).await?; + let data = object.bytes().await?; + + let destination_args = PutObjectArgs::new(&self.bucket_name, destination_path, Cursor::new(data.clone()), data.len() as u64); + self.client.put_object(&destination_args).await?; + Ok(()) + } + + /// Moves a file from the source path to the destination path. + pub async fn move_file(&self, source_path: &str, destination_path: &str) -> Result<(), MinioError> { + self.copy_file(source_path, destination_path).await?; + self.delete_file(source_path).await?; + Ok(()) + } + + /// Deletes a file at the specified path. + pub async fn delete_file(&self, path: &str) -> Result<(), MinioError> { + let args = RemoveObjectArgs::new(&self.bucket_name, path); + self.client.remove_object(&args).await?; + Ok(()) + } + + /// Lists all files in the specified path. + pub async fn list_files(&self, prefix: &str) -> Result, MinioError> { + let args = ListObjectsArgs::new(&self.bucket_name).with_prefix(prefix); + let objects = self.client.list_objects(&args).await?; + let file_names = objects.into_iter().map(|obj| obj.name().to_string()).collect(); + Ok(file_names) + } + + /// Retrieves the contents of a file at the specified path. + pub async fn get_file_contents(&self, path: &str) -> Result { + let data = self.download_file(path).await?; + let contents = String::from_utf8(data).map_err(|_| MinioError::InvalidResponse)?; + Ok(contents) + } + + /// Creates a folder at the specified path. + pub async fn create_folder(&self, path: &str) -> Result<(), MinioError> { + let folder_path = if path.ends_with('/') { + path.to_string() + } else { + format!("{}/", path) + }; + self.upload_file(&folder_path, vec![]).await + } + + /// Shares a folder at the specified path (placeholder implementation). + pub async fn share_folder(&self, path: &str) -> Result { + Ok(format!("Folder shared: {}", path)) + } + + /// Searches for files matching the query in the specified path. + pub async fn search_files(&self, prefix: &str, query: &str) -> Result, MinioError> { + let files = self.list_files(prefix).await?; + let results = files.into_iter().filter(|f| f.contains(query)).collect(); + Ok(results) + } +} \ No newline at end of file diff --git a/gb-storage/Cargo.toml b/gb-storage/Cargo.toml index 1eac1fb..8c4348a 100644 --- a/gb-storage/Cargo.toml +++ b/gb-storage/Cargo.toml @@ -10,7 +10,6 @@ gb-core = { path = "../gb-core" } tokio= { workspace = true } sqlx= { workspace = true } redis= { workspace = true } -tikv-client= { workspace = true } tracing= { workspace = true } async-trait= { workspace = true } serde= { workspace = true }