refactor(all): Remove unused code and update dependencies in multiple modules
This commit is contained in:
parent
cfa14dd222
commit
3291266a42
24 changed files with 1351 additions and 1157 deletions
736
Cargo.lock
generated
736
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -57,7 +57,9 @@ sea-orm = { version = "0.12", features = ["sqlx-postgres", "runtime-tokio-native
|
|||
rdkafka = { version = "0.36", features = ["cmake-build", "ssl"] }
|
||||
lapin = "2.3"
|
||||
|
||||
# Serialization and data formats
|
||||
|
||||
# Drive, Serialization and data formats
|
||||
minio = "0.1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
protobuf = "3.3"
|
||||
|
@ -71,7 +73,9 @@ opus = "0.3"
|
|||
image = "0.24"
|
||||
|
||||
# Authentication and security
|
||||
zitadel = {version = "5.5.1", features = ["api-common", "api-auth-v1", "zitadel-auth-v1", "credentials", "interceptors"]}
|
||||
jsonwebtoken = "9.2"
|
||||
|
||||
argon2 = "0.5"
|
||||
ring = "0.17"
|
||||
reqwest = { version = "0.11", features = ["json", "stream"] }
|
||||
|
@ -95,6 +99,7 @@ fake = { version = "2.9", features = ["derive"] }
|
|||
rstest = "0.18"
|
||||
|
||||
# Utilities
|
||||
actix-web = "4.0.1"
|
||||
uuid = { version = "1.6", features = ["serde", "v4"] }
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
thiserror = "1.0"
|
||||
|
|
537
api.json
Normal file
537
api.json
Normal file
|
@ -0,0 +1,537 @@
|
|||
openapi: 3.0.0
|
||||
info:
|
||||
title: General Bots API
|
||||
description: API for managing files, documents, groups, conversations, and more.
|
||||
version: 1.0.0
|
||||
servers:
|
||||
- url: https://api.generalbots.com/v1
|
||||
description: Production server
|
||||
paths:
|
||||
/files/upload:
|
||||
post:
|
||||
summary: Upload a file
|
||||
operationId: uploadFile
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
multipart/form-data:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
file:
|
||||
type: string
|
||||
format: binary
|
||||
responses:
|
||||
'200':
|
||||
description: File uploaded successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
|
||||
/files/download:
|
||||
post:
|
||||
summary: Download a file
|
||||
operationId: downloadFile
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File downloaded successfully
|
||||
content:
|
||||
application/octet-stream:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
|
||||
/files/copy:
|
||||
post:
|
||||
summary: Copy a file
|
||||
operationId: copyFile
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
sourcePath:
|
||||
type: string
|
||||
destinationPath:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File copied successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/move:
|
||||
post:
|
||||
summary: Move a file
|
||||
operationId: moveFile
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
sourcePath:
|
||||
type: string
|
||||
destinationPath:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File moved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/delete:
|
||||
post:
|
||||
summary: Delete a file
|
||||
operationId: deleteFile
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File deleted successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/getContents:
|
||||
post:
|
||||
summary: Get file contents
|
||||
operationId: getFileContents
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File contents retrieved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
contents:
|
||||
type: string
|
||||
|
||||
/files/save:
|
||||
post:
|
||||
summary: Save a file
|
||||
operationId: saveFile
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
contents:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File saved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/createFolder:
|
||||
post:
|
||||
summary: Create a folder
|
||||
operationId: createFolder
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
folderName:
|
||||
type: string
|
||||
parentFolderId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Folder created successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
folderId:
|
||||
type: string
|
||||
|
||||
/files/shareFolder:
|
||||
post:
|
||||
summary: Share a folder
|
||||
operationId: shareFolder
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
folderId:
|
||||
type: string
|
||||
userIds:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Folder shared successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/dirFolder:
|
||||
post:
|
||||
summary: List folder contents
|
||||
operationId: dirFolder
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
folderId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Folder contents retrieved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
size:
|
||||
type: integer
|
||||
|
||||
/files/list:
|
||||
post:
|
||||
summary: List files
|
||||
operationId: getFiles
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
folderId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Files listed successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
size:
|
||||
type: integer
|
||||
|
||||
/files/search:
|
||||
post:
|
||||
summary: Search files
|
||||
operationId: searchFiles
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
query:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Files searched successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
size:
|
||||
type: integer
|
||||
|
||||
/files/recent:
|
||||
post:
|
||||
summary: Get recent files
|
||||
operationId: getRecentFiles
|
||||
responses:
|
||||
'200':
|
||||
description: Recent files retrieved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
size:
|
||||
type: integer
|
||||
|
||||
/files/favorite:
|
||||
post:
|
||||
summary: Toggle favorite status of a file
|
||||
operationId: toggleFavorite
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Favorite status toggled successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
isFavorite:
|
||||
type: boolean
|
||||
|
||||
/files/versions:
|
||||
post:
|
||||
summary: Get file versions
|
||||
operationId: getFileVersions
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File versions retrieved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
versionId:
|
||||
type: string
|
||||
timestamp:
|
||||
type: string
|
||||
size:
|
||||
type: integer
|
||||
|
||||
/files/restore:
|
||||
post:
|
||||
summary: Restore a file version
|
||||
operationId: restoreFileVersion
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
versionId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File version restored successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/permissions:
|
||||
post:
|
||||
summary: Set file permissions
|
||||
operationId: setFilePermissions
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
permissions:
|
||||
type: object
|
||||
responses:
|
||||
'200':
|
||||
description: File permissions updated successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/quota:
|
||||
get:
|
||||
summary: Get storage quota
|
||||
operationId: getStorageQuota
|
||||
responses:
|
||||
'200':
|
||||
description: Storage quota retrieved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
used:
|
||||
type: integer
|
||||
total:
|
||||
type: integer
|
||||
|
||||
/files/shared:
|
||||
get:
|
||||
summary: Get shared files
|
||||
operationId: getSharedFiles
|
||||
responses:
|
||||
'200':
|
||||
description: Shared files retrieved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
size:
|
||||
type: integer
|
||||
|
||||
/files/sync/status:
|
||||
get:
|
||||
summary: Get sync status
|
||||
operationId: getSyncStatus
|
||||
responses:
|
||||
'200':
|
||||
description: Sync status retrieved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
status:
|
||||
type: string
|
||||
|
||||
/files/sync/start:
|
||||
post:
|
||||
summary: Start sync
|
||||
operationId: startSync
|
||||
responses:
|
||||
'200':
|
||||
description: Sync started successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/sync/stop:
|
||||
post:
|
||||
summary: Stop sync
|
||||
operationId: stopSync
|
||||
responses:
|
||||
'200':
|
||||
description: Sync stopped successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
|
@ -1,3 +0,0 @@
|
|||
use axum::{Json, Extension};
|
||||
use crate::models::{LoginRequest, LoginResponse};
|
||||
use std::sync::Arc;
|
|
@ -1,3 +1,2 @@
|
|||
mod auth_handler;
|
||||
|
||||
pub use auth_handler::*;
|
|
@ -1,49 +0,0 @@
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use gb_auth::services::auth_service::AuthService;
|
||||
use gb_auth::models::LoginRequest;
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
use rstest::*;
|
||||
|
||||
#[fixture]
|
||||
async fn auth_service() -> AuthService {
|
||||
let db_pool = PgPool::connect("postgresql://postgres:postgres@localhost:5432/test_db")
|
||||
.await
|
||||
.expect("Failed to create database connection");
|
||||
|
||||
AuthService::new(
|
||||
Arc::new(db_pool),
|
||||
"test_secret".to_string(),
|
||||
3600
|
||||
)
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_login_success() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let auth_service = auth_service().await;
|
||||
let request = LoginRequest {
|
||||
email: "test@example.com".to_string(),
|
||||
password: "password123".to_string(),
|
||||
};
|
||||
|
||||
let result = auth_service.login(request).await;
|
||||
assert!(result.is_ok());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_login_invalid_credentials() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let auth_service = auth_service().await;
|
||||
let request = LoginRequest {
|
||||
email: "wrong@example.com".to_string(),
|
||||
password: "wrongpassword".to_string(),
|
||||
};
|
||||
|
||||
let result = auth_service.login(request).await;
|
||||
assert!(result.is_err());
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -18,7 +18,12 @@ axum = { version = "0.7", features = ["json"] }
|
|||
serde_json = "1.0"
|
||||
sqlx = { workspace = true }
|
||||
redis = { workspace = true }
|
||||
|
||||
minio = { workspace = true }
|
||||
zitadel = { workspace = true }
|
||||
rdkafka = { workspace = true }
|
||||
tonic = { workspace = true }
|
||||
actix-web ={ workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
mockall= { workspace = true }
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
use anyhow::Result;
|
||||
use minio_rs::client::{Client as MinioClient, ClientBuilder as MinioClientBuilder};
|
||||
use rdkafka::ClientConfig;
|
||||
use rdkafka::producer::FutureProducer;
|
||||
use redis::aio::ConnectionManager as RedisConnectionManager;
|
||||
use sqlx::postgres::{PgPoolOptions, PgPool};
|
||||
use zitadel::api::v1::auth::AuthServiceClient;
|
||||
|
||||
use zitadel::api::clients::ClientBuilder;
|
||||
use zitadel::api::zitadel::auth::v1::auth_service_client::AuthServiceClient;
|
||||
use minio::s3::creds::StaticProvider;
|
||||
use minio::s3::http::BaseUrl;
|
||||
use minio::s3::client::{Client as MinioClient, ClientBuilder as MinioClientBuilder};
|
||||
use std::str::FromStr;
|
||||
use crate::config::AppConfig;
|
||||
|
||||
pub async fn init_postgres(config: &AppConfig) -> Result<PgPool> {
|
||||
|
@ -14,11 +17,6 @@ pub async fn init_postgres(config: &AppConfig) -> Result<PgPool> {
|
|||
.connect(&config.database.url)
|
||||
.await?;
|
||||
|
||||
// Run migrations
|
||||
sqlx::migrate!("./migrations")
|
||||
.run(&pool)
|
||||
.await?;
|
||||
|
||||
Ok(pool)
|
||||
}
|
||||
|
||||
|
@ -39,27 +37,55 @@ pub async fn init_kafka(config: &AppConfig) -> Result<FutureProducer> {
|
|||
}
|
||||
|
||||
pub async fn init_zitadel(config: &AppConfig) -> Result<AuthServiceClient<tonic::transport::Channel>> {
|
||||
let channel = tonic::transport::Channel::from_shared(format!("https://{}", config.zitadel.domain))?
|
||||
.connect()
|
||||
.await?;
|
||||
|
||||
let client = AuthServiceClient::new(channel);
|
||||
let mut client = ClientBuilder::new(&config.zitadel.domain)
|
||||
|
||||
.with_access_token(&"test")
|
||||
.build_auth_client()
|
||||
.await?;
|
||||
|
||||
|
||||
Ok(client)
|
||||
}
|
||||
|
||||
pub async fn init_minio(config: &AppConfig) -> Result<MinioClient> {
|
||||
let client = MinioClientBuilder::new()
|
||||
.endpoint(&config.minio.endpoint)
|
||||
.access_key(&config.minio.access_key)
|
||||
.secret_key(&config.minio.secret_key)
|
||||
.ssl(config.minio.use_ssl)
|
||||
|
||||
pub async fn init_minio(config: &AppConfig) -> Result<MinioClient, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Construct the base URL
|
||||
let base_url = format!("https://{}", config.minio.endpoint);
|
||||
let base_url = BaseUrl::from_str(&base_url)?;
|
||||
|
||||
// Create credentials provider
|
||||
let credentials = StaticProvider::new(
|
||||
&config.minio.access_key,
|
||||
&config.minio.secret_key,
|
||||
None,
|
||||
);
|
||||
|
||||
// Build the MinIO client
|
||||
let client = MinioClientBuilder::new(base_url.clone())
|
||||
.provider(Some(Box::new(credentials)))
|
||||
//.secure(config.minio.use_ssl)
|
||||
.build()?;
|
||||
|
||||
// Ensure bucket exists
|
||||
if !client.bucket_exists(&config.minio.bucket).await? {
|
||||
client.make_bucket(&config.minio.bucket, None).await?;
|
||||
}
|
||||
|
||||
|
||||
Ok(client)
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,41 +1,7 @@
|
|||
pub mod db;
|
||||
pub mod errors;
|
||||
pub mod models;
|
||||
pub mod traits;
|
||||
pub mod config;
|
||||
pub use errors::{Error, ErrorKind, Result};
|
||||
|
||||
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AppState {
|
||||
db: PgPool,
|
||||
redis: RedisClient,
|
||||
storage: MinioClient,
|
||||
message_processor: MessageProcessor,
|
||||
customer: PostgresCustomerRepository,
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::models::{Customer, SubscriptionTier};
|
||||
use rstest::*;
|
||||
|
||||
#[fixture]
|
||||
fn customer() -> Customer {
|
||||
Customer::new(
|
||||
"Test Corp".to_string(),
|
||||
"test@example.com".to_string(),
|
||||
SubscriptionTier::Enterprise,
|
||||
10,
|
||||
)
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_customer_fixture(customer: Customer) {
|
||||
assert_eq!(customer.name, "Test Corp");
|
||||
assert_eq!(customer.email, "test@example.com");
|
||||
|
||||
assert_eq!(customer.max_instances, 10);
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,17 +1,15 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
use minio_rs::client::Client as MinioClient;
|
||||
use minio::s3::client::Client as MinioClient;
|
||||
use rdkafka::producer::FutureProducer;
|
||||
use redis::aio::ConnectionManager as RedisConnectionManager;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use uuid::Uuid;
|
||||
use zitadel::api::v1::auth::AuthServiceClient;
|
||||
use crate::config::AppConfig;
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use zitadel::api::zitadel::auth::v1::auth_service_client::AuthServiceClient;
|
||||
use serde_json::Value as JsonValue;
|
||||
use std::str::FromStr;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::config::AppConfig;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CoreError(pub String);
|
||||
|
@ -251,17 +249,6 @@ pub struct AppState {
|
|||
pub minio_client: MinioClient,
|
||||
}
|
||||
|
||||
// User models
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct User {
|
||||
pub id: Uuid,
|
||||
pub external_id: String, // Zitadel user ID
|
||||
pub username: String,
|
||||
pub email: String,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
// File models
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct File {
|
||||
|
@ -301,16 +288,7 @@ pub struct Conversation {
|
|||
pub struct ConversationMember {
|
||||
pub conversation_id: Uuid,
|
||||
pub user_id: Uuid,
|
||||
pub joined_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Message {
|
||||
pub id: Uuid,
|
||||
pub conversation_id: Uuid,
|
||||
pub user_id: Uuid,
|
||||
pub content: String,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub joined_at: DateTime<Utc>
|
||||
}
|
||||
|
||||
// Calendar models
|
||||
|
|
|
@ -9,12 +9,12 @@ license = { workspace = true }
|
|||
gb-core = { path = "../gb-core" }
|
||||
async-trait= { workspace = true }
|
||||
tokio= { workspace = true }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde = { workspace = true , features = ["derive"] }
|
||||
serde_json ={ workspace = true }
|
||||
thiserror= { workspace = true }
|
||||
tracing= { workspace = true }
|
||||
minio = "0.1.0"
|
||||
|
||||
minio = { workspace = true }
|
||||
actix-web ={ workspace = true }
|
||||
[dev-dependencies]
|
||||
rstest= { workspace = true }
|
||||
tokio-test = "0.4"
|
||||
|
|
|
@ -1,144 +0,0 @@
|
|||
use minio::s3::client::Client;
|
||||
use minio::s3::args::{BucketExistsArgs, MakeBucketArgs, RemoveObjectArgs, GetObjectArgs, PutObjectArgs, ListObjectsArgs};
|
||||
use minio::s3::creds::StaticProvider;
|
||||
use minio::s3::error::Error as MinioError;
|
||||
use minio::s3::types::{BaseUrl, Item};
|
||||
use std::io::Cursor;
|
||||
use std::path::Path;
|
||||
|
||||
pub struct FileManager {
|
||||
client: Client,
|
||||
bucket_name: String,
|
||||
}
|
||||
|
||||
impl FileManager {
|
||||
pub async fn new(endpoint: &str, access_key: &str, secret_key: &str, bucket_name: &str, use_ssl: bool) -> Result<Self, MinioError> {
|
||||
// Create BaseUrl from endpoint
|
||||
let base_url = BaseUrl::from_string(endpoint)?;
|
||||
let static_provider = StaticProvider::new(
|
||||
access_key,
|
||||
secret_key,
|
||||
None,
|
||||
);
|
||||
let client = Client::new(base_url.clone(), Some(Box::new(static_provider)), None, None).unwrap();
|
||||
|
||||
|
||||
Ok(Self {
|
||||
client,
|
||||
bucket_name: bucket_name.to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn ensure_bucket_exists(&self) -> Result<(), MinioError> {
|
||||
let exists = self.client
|
||||
.bucket_exists(&BucketExistsArgs::new(&self.bucket_name)?)
|
||||
.await?;
|
||||
if !exists {
|
||||
self.client
|
||||
.make_bucket(&MakeBucketArgs::new(&self.bucket_name)?)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn upload_file(&self, path: &str, file_data: Vec<u8>) -> Result<(), MinioError> {
|
||||
let reader = Cursor::new(&file_data);
|
||||
let file_size = file_data.len() as u64;
|
||||
|
||||
let args = PutObjectArgs::new(
|
||||
&self.bucket_name,
|
||||
path,
|
||||
reader,
|
||||
Some(file_size),
|
||||
None
|
||||
)?;
|
||||
|
||||
self.client.put_object(&args).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn download_file(&self, path: &str) -> Result<Vec<u8>, MinioError> {
|
||||
let args = GetObjectArgs::new(&self.bucket_name, path)?;
|
||||
let object = self.client.get_object(&args).await?;
|
||||
let data = object.bytes().await?;
|
||||
Ok(data.to_vec())
|
||||
}
|
||||
|
||||
pub async fn copy_file(&self, source_path: &str, destination_path: &str) -> Result<(), MinioError> {
|
||||
// Download the source file
|
||||
let data = self.download_file(source_path).await?;
|
||||
|
||||
// Upload it to the destination
|
||||
let reader = Cursor::new(&data);
|
||||
let file_size = data.len() as u64;
|
||||
|
||||
let args = PutObjectArgs::new(
|
||||
&self.bucket_name,
|
||||
destination_path,
|
||||
reader,
|
||||
Some(file_size),
|
||||
None
|
||||
)?;
|
||||
|
||||
self.client.put_object(&args).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn move_file(&self, source_path: &str, destination_path: &str) -> Result<(), MinioError> {
|
||||
self.copy_file(source_path, destination_path).await?;
|
||||
self.delete_file(source_path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_file(&self, path: &str) -> Result<(), MinioError> {
|
||||
let args = RemoveObjectArgs::new(&self.bucket_name, path)?;
|
||||
self.client.remove_object(&args).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn list_files(&self, prefix: &str) -> Result<Vec<String>, MinioError> {
|
||||
// Create a predicate function that always returns true
|
||||
let predicate = |_: Vec<Item>| -> bool { true };
|
||||
|
||||
let args = ListObjectsArgs::new(&self.bucket_name, &predicate)?;
|
||||
let objects = self.client.list_objects(&args).await?;
|
||||
|
||||
// Filter objects based on prefix manually
|
||||
let file_names: Vec<String> = objects
|
||||
.into_iter()
|
||||
.filter(|obj| obj.name().starts_with(prefix))
|
||||
.map(|obj| obj.name().to_string())
|
||||
.collect();
|
||||
|
||||
Ok(file_names)
|
||||
}
|
||||
|
||||
pub async fn get_file_contents(&self, path: &str) -> Result<String, MinioError> {
|
||||
let data = self.download_file(path).await?;
|
||||
let contents = String::from_utf8(data)
|
||||
.map_err(|_| MinioError::InvalidResponse(400, "Invalid UTF-8 sequence".to_string()))?;
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
pub async fn create_folder(&self, path: &str) -> Result<(), MinioError> {
|
||||
let folder_path = if path.ends_with('/') {
|
||||
path.to_string()
|
||||
} else {
|
||||
format!("{}/", path)
|
||||
};
|
||||
|
||||
// Create empty file with folder path
|
||||
self.upload_file(&folder_path, vec![]).await
|
||||
}
|
||||
|
||||
pub async fn share_folder(&self, path: &str) -> Result<String, MinioError> {
|
||||
// This is just a placeholder implementation
|
||||
Ok(format!("Folder shared: {}", path))
|
||||
}
|
||||
|
||||
pub async fn search_files(&self, prefix: &str, query: &str) -> Result<Vec<String>, MinioError> {
|
||||
let files = self.list_files(prefix).await?;
|
||||
let results = files.into_iter().filter(|f| f.contains(query)).collect();
|
||||
Ok(results)
|
||||
}
|
||||
}
|
|
@ -1,10 +1,7 @@
|
|||
use gb_core::{Error, Result};
|
||||
use image::{DynamicImage, ImageOutputFormat, Rgba};
|
||||
use image::{DynamicImage, Rgba};
|
||||
use imageproc::drawing::draw_text_mut;
|
||||
use rusttype::{Font, Scale};
|
||||
use std::io::Cursor;
|
||||
use tempfile::NamedTempFile;
|
||||
use std::io::Write;
|
||||
use std::fs;
|
||||
|
||||
|
||||
|
@ -15,28 +12,7 @@ impl ImageProcessor {
|
|||
Self
|
||||
}
|
||||
|
||||
pub async fn extract_text(&self, image: &DynamicImage) -> Result<String> {
|
||||
// Create a temporary file
|
||||
let mut temp_file = NamedTempFile::new()
|
||||
.map_err(|e| Error::internal(format!("Failed to create temp file: {}", e)))?;
|
||||
|
||||
// Convert image to PNG and write to temp file
|
||||
let mut cursor = Cursor::new(Vec::new());
|
||||
image.write_to(&mut cursor, ImageOutputFormat::Png)
|
||||
.map_err(|e| Error::internal(format!("Failed to encode image: {}", e)))?;
|
||||
|
||||
temp_file.write_all(&cursor.into_inner())
|
||||
.map_err(|e| Error::internal(format!("Failed to write to temp file: {}", e)))?;
|
||||
|
||||
|
||||
api.set_image(temp_file.path().to_str().unwrap())
|
||||
.map_err(|e| Error::internal(format!("Failed to set image: {}", e)))?
|
||||
.recognize()
|
||||
.map_err(|e| Error::internal(format!("Failed to recognize text: {}", e)))?
|
||||
.get_text()
|
||||
.map_err(|e| Error::internal(format!("Failed to get text: {}", e)))
|
||||
}
|
||||
|
||||
|
||||
|
||||
pub fn resize(&self, image: &DynamicImage, width: u32, height: u32) -> DynamicImage {
|
||||
image.resize(width, height, image::imageops::FilterType::Lanczos3)
|
||||
|
|
|
@ -27,6 +27,7 @@ hyper = { workspace = true, features = ["server"] }
|
|||
hyper-util = { workspace = true }
|
||||
tower = { workspace = true }
|
||||
tower-http = { workspace = true, features = ["cors", "trace"] }
|
||||
actix-web = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rstest = { workspace = true }
|
||||
|
|
|
@ -1,65 +0,0 @@
|
|||
pub mod router;
|
||||
|
||||
pub use router::{create_router, ApiState};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gb_messaging::MessageProcessor;
|
||||
use axum::Router;
|
||||
use tower::ServiceExt;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_api_integration() {
|
||||
// Initialize message processor
|
||||
let processor = MessageProcessor::new();
|
||||
|
||||
// Create router
|
||||
let app: Router = create_router(processor);
|
||||
|
||||
// Test health endpoint
|
||||
let response = app
|
||||
.clone()
|
||||
.oneshot(
|
||||
axum::http::Request::builder()
|
||||
.uri("/health")
|
||||
.body(axum::body::Body::empty())
|
||||
.unwrap(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(response.status(), axum::http::StatusCode::OK);
|
||||
|
||||
// Test message sending
|
||||
let message = gb_core::models::Message {
|
||||
id: Uuid::new_v4(),
|
||||
customer_id: Uuid::new_v4(),
|
||||
instance_id: Uuid::new_v4(),
|
||||
conversation_id: Uuid::new_v4(),
|
||||
sender_id: Uuid::new_v4(),
|
||||
kind: "test".to_string(),
|
||||
content: "integration test".to_string(),
|
||||
metadata: serde_json::Value::Object(serde_json::Map::new()),
|
||||
created_at: Some(chrono::Utc::now()),
|
||||
shard_key: Some(0),
|
||||
};
|
||||
|
||||
let response = app
|
||||
.oneshot(
|
||||
axum::http::Request::builder()
|
||||
.method("POST")
|
||||
.uri("/messages")
|
||||
.header("content-type", "application/json")
|
||||
.body(axum::body::Body::from(
|
||||
serde_json::to_string(&message).unwrap()
|
||||
))
|
||||
.unwrap(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(response.status(), axum::http::StatusCode::OK);
|
||||
}
|
||||
}
|
|
@ -1,22 +1,9 @@
|
|||
use gb_core::{Error, Result};
|
||||
use tracing::{info, error};
|
||||
use std::{net::SocketAddr, sync::Arc};
|
||||
use sqlx::PgPool;
|
||||
use redis::Client as RedisClient;
|
||||
use minio::MinioClient;
|
||||
use gb_api::PostgresCustomerRepository;
|
||||
use gb_messaging::MessageProcessor;
|
||||
use axum::Router;
|
||||
use tower_http::trace::TraceLayer;
|
||||
|
||||
use actix_cors::Cors;
|
||||
use actix_web::{middleware, web, App, HttpServer};
|
||||
use dotenv::dotenv;
|
||||
use tracing_subscriber::fmt::format::FmtSpan;
|
||||
|
||||
use crate::config::AppConfig;
|
||||
use crate::db::{init_kafka, init_minio, init_postgres, init_redis, init_zitadel};
|
||||
use crate::router::*;
|
||||
use gb_core::config::AppConfig;
|
||||
use gb_core::db::{init_kafka, init_minio, init_postgres, init_redis, init_zitadel};
|
||||
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
|
@ -59,7 +46,7 @@ async fn main() -> std::io::Result<()> {
|
|||
.wrap(middleware::Compress::default())
|
||||
.wrap(cors)
|
||||
.app_data(app_state.clone())
|
||||
.configure(filesrouter::files_router_configure)
|
||||
.configure(files_router_configure)
|
||||
})
|
||||
.bind((config.server.host.clone(), config.server.port))?
|
||||
.run()
|
||||
|
|
|
@ -1,299 +1,22 @@
|
|||
use axum::{
|
||||
routing::{get, post},
|
||||
Router,
|
||||
extract::{
|
||||
ws::WebSocket,
|
||||
Path, State, WebSocketUpgrade,
|
||||
},
|
||||
extract::{ws::WebSocket, Path, State, WebSocketUpgrade},
|
||||
response::IntoResponse,
|
||||
Json,
|
||||
routing::{get, post},
|
||||
Json, Router,
|
||||
};
|
||||
use gb_core::{Result, Error, models::*};
|
||||
use gb_messaging::{MessageProcessor, models::MessageEnvelope};
|
||||
use std::{sync::Arc, collections::HashMap};
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::{instrument, error};
|
||||
use uuid::Uuid;
|
||||
use futures_util::StreamExt;
|
||||
|
||||
use gb_core::{models::*, Error, Result};
|
||||
use gb_messaging::{models::MessageEnvelope, MessageProcessor};
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::{error, instrument};
|
||||
use uuid::Uuid;
|
||||
|
||||
pub fn create_router(message_processor: AppState) -> Router {
|
||||
let state = Arc::new(ApiState {
|
||||
message_processor: Mutex::new(message_processor),
|
||||
});
|
||||
Router::new()
|
||||
|
||||
|
||||
|
||||
// File & Document Management
|
||||
.route("/files/upload", post(upload_file))
|
||||
.route("/files/download", post(download))
|
||||
.route("/files/copy", post(copy_file))
|
||||
.route("/files/move", post(move_file))
|
||||
.route("/files/delete", post(delete_file))
|
||||
.route("/files/getContents", post(get_file_contents))
|
||||
.route("/files/save", post(save_file))
|
||||
.route("/files/createFolder", post(create_folder))
|
||||
.route("/files/shareFolder", post(share_folder))
|
||||
.route("/files/dirFolder", post(dir_folder))
|
||||
.route("/files/list", post(get_files))
|
||||
.route("/files/search", post(search_files))
|
||||
.route("/files/recent", post(get_recent_files))
|
||||
.route("/files/favorite", post(toggle_favorite))
|
||||
.route("/files/versions", post(get_file_versions))
|
||||
.route("/files/restore", post(restore_file_version))
|
||||
.route("/files/permissions", post(set_file_permissions))
|
||||
.route("/files/quota", get(get_storage_quota))
|
||||
.route("/files/shared", get(get_shared_files))
|
||||
.route("/files/sync/status", get(get_sync_status))
|
||||
.route("/files/sync/start", post(start_sync))
|
||||
.route("/files/sync/stop", post(stop_sync))
|
||||
|
||||
// full ode bucket is abstrctd path variable, src, dest, full file manager acessible via actixweb ALL methods no excluses, inline funcition params, s3 api inside, all methodos, full code. // Document Processing
|
||||
|
||||
.route("/docs/merge", post(merge_documents))
|
||||
.route("/docs/convert", post(convert_document))
|
||||
.route("/docs/fill", post(fill_document))
|
||||
.route("/docs/export", post(export_document))
|
||||
.route("/docs/import", post(import_document))
|
||||
|
||||
// Groups & Organizations
|
||||
.route("/groups/create", post(create_group))
|
||||
.route("/groups/update", put(update_group))
|
||||
.route("/groups/delete", delete(delete_group))
|
||||
.route("/groups/list", get(get_groups))
|
||||
.route("/groups/search", post(search_groups))
|
||||
.route("/groups/members", get(get_group_members))
|
||||
.route("/groups/members/add", post(add_group_member))
|
||||
.route("/groups/members/remove", post(remove_group_member))
|
||||
.route("/groups/permissions", post(set_group_permissions))
|
||||
.route("/groups/settings", post(update_group_settings))
|
||||
.route("/groups/analytics", get(get_group_analytics))
|
||||
.route("/groups/join/request", post(request_group_join))
|
||||
.route("/groups/join/approve", post(approve_join_request))
|
||||
.route("/groups/join/reject", post(reject_join_request))
|
||||
.route("/groups/invites/send", post(send_group_invite))
|
||||
.route("/groups/invites/list", get(list_group_invites))
|
||||
|
||||
// Conversations & Real-time Communication
|
||||
.route("/conversations/create", post(create_conversation))
|
||||
.route("/conversations/join", post(join_conversation))
|
||||
.route("/conversations/leave", post(leave_conversation))
|
||||
.route("/conversations/members", get(get_conversation_members))
|
||||
.route("/conversations/messages", get(get_messages))
|
||||
.route("/conversations/messages/send", post(send_message))
|
||||
.route("/conversations/messages/edit", put(edit_message))
|
||||
.route("/conversations/messages/delete", delete(delete_message))
|
||||
.route("/conversations/messages/react", post(react_to_message))
|
||||
.route("/conversations/messages/pin", post(pin_message))
|
||||
.route("/conversations/messages/search", post(search_messages))
|
||||
.route("/conversations/calls/start", post(start_call))
|
||||
.route("/conversations/calls/join", post(join_call))
|
||||
.route("/conversations/calls/leave", post(leave_call))
|
||||
.route("/conversations/calls/mute", post(mute_participant))
|
||||
.route("/conversations/calls/unmute", post(unmute_participant))
|
||||
.route("/conversations/screen/share", post(share_screen))
|
||||
.route("/conversations/screen/stop", post(stop_screen_share))
|
||||
.route("/conversations/recording/start", post(start_recording))
|
||||
.route("/conversations/recording/stop", post(stop_recording))
|
||||
.route("/conversations/whiteboard/create", post(create_whiteboard))
|
||||
.route("/conversations/whiteboard/collaborate", post(collaborate_whiteboard))
|
||||
|
||||
// Communication Services
|
||||
.route("/comm/email/send", post(send_email))
|
||||
.route("/comm/email/template", post(send_template_email))
|
||||
.route("/comm/email/schedule", post(schedule_email))
|
||||
.route("/comm/email/cancel", post(cancel_scheduled_email))
|
||||
.route("/comm/sms/send", post(send_sms))
|
||||
.route("/comm/sms/bulk", post(send_bulk_sms))
|
||||
.route("/comm/notifications/send", post(send_notification))
|
||||
.route("/comm/notifications/preferences", post(set_notification_preferences))
|
||||
.route("/comm/broadcast/send", post(send_broadcast))
|
||||
.route("/comm/contacts/import", post(import_contacts))
|
||||
.route("/comm/contacts/export", post(export_contacts))
|
||||
.route("/comm/contacts/sync", post(sync_contacts))
|
||||
.route("/comm/contacts/groups", post(manage_contact_groups))
|
||||
|
||||
// User Management & Authentication
|
||||
.route("/users/create", post(create_user))
|
||||
.route("/users/update", put(update_user))
|
||||
.route("/users/delete", delete(delete_user))
|
||||
.route("/users/list", get(get_users))
|
||||
.route("/users/search", post(search_users))
|
||||
.route("/users/profile", get(get_user_profile))
|
||||
.route("/users/profile/update", put(update_profile))
|
||||
.route("/users/settings", post(update_user_settings))
|
||||
.route("/users/permissions", post(set_user_permissions))
|
||||
.route("/users/roles", post(manage_user_roles))
|
||||
.route("/users/status", post(update_user_status))
|
||||
.route("/users/presence", get(get_user_presence))
|
||||
.route("/users/activity", get(get_user_activity))
|
||||
.route("/users/security/2fa/enable", post(enable_2fa))
|
||||
.route("/users/security/2fa/disable", post(disable_2fa))
|
||||
.route("/users/security/devices", get(get_registered_devices))
|
||||
.route("/users/security/sessions", get(get_active_sessions))
|
||||
.route("/users/notifications/settings", post(update_notification_settings))
|
||||
|
||||
// Calendar & Task Management
|
||||
.route("/calendar/events/create", post(create_event))
|
||||
.route("/calendar/events/update", put(update_event))
|
||||
.route("/calendar/events/delete", delete(delete_event))
|
||||
.route("/calendar/events/list", get(get_calendar_events))
|
||||
.route("/calendar/events/search", post(search_events))
|
||||
.route("/calendar/availability/check", post(check_availability))
|
||||
.route("/calendar/schedule/meeting", post(schedule_meeting))
|
||||
.route("/calendar/reminders/set", post(set_reminder))
|
||||
.route("/tasks/create", post(create_task))
|
||||
.route("/tasks/update", put(update_task))
|
||||
.route("/tasks/delete", delete(delete_task))
|
||||
.route("/tasks/list", get(get_tasks))
|
||||
.route("/tasks/assign", post(assign_task))
|
||||
.route("/tasks/status/update", put(update_task_status))
|
||||
.route("/tasks/priority/set", post(set_task_priority))
|
||||
.route("/tasks/dependencies/set", post(set_task_dependencies))
|
||||
|
||||
// Storage & Data Management
|
||||
.route("/storage/save", post(save_to_storage))
|
||||
.route("/storage/batch", post(save_batch_to_storage))
|
||||
.route("/storage/json", post(save_json_to_storage))
|
||||
.route("/storage/delete", delete(delete_from_storage))
|
||||
.route("/storage/quota/check", get(check_storage_quota))
|
||||
.route("/storage/cleanup", post(cleanup_storage))
|
||||
.route("/storage/backup/create", post(create_backup))
|
||||
.route("/storage/backup/restore", post(restore_backup))
|
||||
.route("/storage/archive", post(archive_data))
|
||||
.route("/storage/metrics", get(get_storage_metrics))
|
||||
|
||||
|
||||
// Analytics & Reporting
|
||||
.route("/analytics/dashboard", get(get_dashboard_data))
|
||||
.route("/analytics/reports/generate", post(generate_report))
|
||||
.route("/analytics/reports/schedule", post(schedule_report))
|
||||
.route("/analytics/metrics/collect", post(collect_metrics))
|
||||
.route("/analytics/insights/generate", post(generate_insights))
|
||||
.route("/analytics/trends/analyze", post(analyze_trends))
|
||||
.route("/analytics/export", post(export_analytics))
|
||||
|
||||
// System & Administration
|
||||
.route("/admin/system/status", get(get_system_status))
|
||||
.route("/admin/system/metrics", get(get_system_metrics))
|
||||
.route("/admin/logs/view", get(view_logs))
|
||||
.route("/admin/logs/export", post(export_logs))
|
||||
.route("/admin/config/update", post(update_config))
|
||||
.route("/admin/maintenance/schedule", post(schedule_maintenance))
|
||||
.route("/admin/backup/create", post(create_system_backup))
|
||||
.route("/admin/backup/restore", post(restore_system_backup))
|
||||
.route("/admin/users/manage", post(manage_system_users))
|
||||
.route("/admin/roles/manage", post(manage_system_roles))
|
||||
.route("/admin/quotas/manage", post(manage_quotas))
|
||||
.route("/admin/licenses/manage", post(manage_licenses))
|
||||
|
||||
// AI & Machine Learning
|
||||
.route("/ai/analyze/text", post(analyze_text))
|
||||
.route("/ai/analyze/image", post(analyze_image))
|
||||
.route("/ai/generate/text", post(generate_text))
|
||||
.route("/ai/generate/image", post(generate_image))
|
||||
.route("/ai/translate", post(translate_content))
|
||||
.route("/ai/summarize", post(summarize_content))
|
||||
.route("/ai/recommend", post(get_recommendations))
|
||||
.route("/ai/train/model", post(train_custom_model))
|
||||
.route("/ai/predict", post(make_prediction))
|
||||
|
||||
// Security & Compliance
|
||||
.route("/security/audit/logs", get(get_audit_logs))
|
||||
.route("/security/compliance/check", post(check_compliance))
|
||||
.route("/security/threats/scan", post(scan_for_threats))
|
||||
.route("/security/access/review", post(review_access))
|
||||
.route("/security/encryption/manage", post(manage_encryption))
|
||||
.route("/security/certificates/manage", post(manage_certificates))
|
||||
|
||||
// Health & Monitoring
|
||||
.route("/health", get(health_check))
|
||||
.route("/health/detailed", get(detailed_health_check))
|
||||
.route("/monitoring/status", get(get_monitoring_status))
|
||||
.route("/monitoring/alerts", get(get_active_alerts))
|
||||
.route("/monitoring/metrics", get(get_monitoring_metrics))
|
||||
.with_state(state)
|
||||
Router::new()
|
||||
.route("/monitoring/metrics", get(get_monitoring_metrics))
|
||||
.with_state(state)
|
||||
}
|
||||
|
||||
async fn handle_ws_connection(
|
||||
ws: WebSocket,
|
||||
state: Arc<ApiState>,
|
||||
) -> Result<()> {
|
||||
let (_sender, mut receiver) = ws.split();
|
||||
|
||||
while let Some(Ok(msg)) = receiver.next().await {
|
||||
if let Ok(text) = msg.to_text() {
|
||||
if let Ok(_envelope) = serde_json::from_str::<MessageEnvelope>(text) {
|
||||
let mut processor = state.message_processor.lock().await;
|
||||
if let Err(e) = processor.process_messages().await {
|
||||
error!("Failed to process message: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
async fn websocket_handler(
|
||||
State(state): State<Arc<ApiState>>,
|
||||
ws: WebSocketUpgrade,
|
||||
) -> impl IntoResponse {
|
||||
ws.on_upgrade(move |socket| async move {
|
||||
let _ = handle_ws_connection(socket, state).await;
|
||||
})
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
async fn send_message(
|
||||
State(state): State<Arc<ApiState>>,
|
||||
Json(message): Json<Message>,
|
||||
) -> Result<Json<MessageId>> {
|
||||
// Clone the message before using it in envelope
|
||||
let envelope = MessageEnvelope {
|
||||
id: Uuid::new_v4(),
|
||||
message: message.clone(), // Clone here
|
||||
metadata: HashMap::new(),
|
||||
};
|
||||
|
||||
let mut processor = state.message_processor.lock().await;
|
||||
processor.add_message(message) // Use original message here
|
||||
.await
|
||||
.map_err(|e| Error::internal(format!("Failed to add message: {}", e)))?;
|
||||
|
||||
Ok(Json(MessageId(envelope.id)))
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
async fn get_message(
|
||||
State(_state): State<Arc<ApiState>>,
|
||||
Path(_id): Path<Uuid>,
|
||||
) -> Result<Json<Message>> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
async fn create_room(
|
||||
State(_state): State<Arc<ApiState>>,
|
||||
Json(_config): Json<RoomConfig>,
|
||||
) -> Result<Json<Room>> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
async fn get_room(
|
||||
State(_state): State<Arc<ApiState>>,
|
||||
Path(_id): Path<Uuid>,
|
||||
) -> Result<Json<Room>> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
async fn join_room(
|
||||
State(_state): State<Arc<ApiState>>,
|
||||
Path(_id): Path<Uuid>,
|
||||
Json(_user_id): Json<Uuid>,
|
||||
) -> Result<Json<Connection>> {
|
||||
todo!()
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
use rand::seq::SliceRandom;
|
||||
|
||||
pub struct ChaosTest {
|
||||
client: Client,
|
||||
namespace: String,
|
||||
}
|
||||
|
||||
impl ChaosTest {
|
||||
pub async fn new(namespace: String) -> anyhow::Result<Self> {
|
||||
let client = Client::try_default().await?;
|
||||
Ok(Self { client, namespace })
|
||||
}
|
||||
|
||||
pub async fn kill_random_pod(&self) -> anyhow::Result<()> {
|
||||
let pods: Api<Pod> = Api::namespaced(self.client.clone(), &self.namespace);
|
||||
let pod_list = pods.list(&Default::default()).await?;
|
||||
|
||||
if let Some(pod) = pod_list.items.choose(&mut rand::thread_rng()) {
|
||||
if let Some(name) = &pod.metadata.name {
|
||||
pods.delete(name, &DeleteParams::default()).await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn network_partition(&self) -> anyhow::Result<()> {
|
||||
// Network partition test implementation
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn resource_exhaustion(&self) -> anyhow::Result<()> {
|
||||
// Resource exhaustion test implementation
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,18 +1,9 @@
|
|||
use gb_core::{Result, Error};
|
||||
use mime_guess::{from_path, mime};
|
||||
use std::path::Path;
|
||||
use tracing::instrument;
|
||||
|
||||
pub struct FileTypeDetector;
|
||||
|
||||
impl FileTypeDetector {
|
||||
#[instrument]
|
||||
pub fn detect_mime_type(path: &Path) -> Result<mime::Mime> {
|
||||
from_path(path)
|
||||
.first_or_octet_stream()
|
||||
.to_owned()
|
||||
.into()
|
||||
}
|
||||
|
||||
#[instrument(skip(data))]
|
||||
pub fn detect_from_bytes(data: &[u8]) -> Result<FileType> {
|
||||
|
@ -59,16 +50,8 @@ pub enum FileType {
|
|||
mod tests {
|
||||
use super::*;
|
||||
use rstest::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
|
||||
#[rstest]
|
||||
fn test_detect_mime_type() -> Result<()> {
|
||||
let path = PathBuf::from("test.pdf");
|
||||
let mime = FileTypeDetector::detect_mime_type(&path)?;
|
||||
assert_eq!(mime.type_(), "application");
|
||||
assert_eq!(mime.subtype(), "pdf");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_detect_from_bytes() -> Result<()> {
|
||||
|
|
|
@ -1,40 +0,0 @@
|
|||
pub mod detector;
|
||||
pub mod processor;
|
||||
|
||||
pub use detector::{FileTypeDetector, FileType};
|
||||
pub use processor::{FileProcessor, ProcessedFile, ProcessedContent};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gb_core::Result;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_utils_integration() -> Result<()> {
|
||||
// Initialize processor
|
||||
let processor = FileProcessor::new()?;
|
||||
|
||||
// Test PDF processing
|
||||
let pdf_data = b"%PDF-1.4\nTest PDF";
|
||||
let pdf_path = PathBuf::from("test.pdf");
|
||||
let processed_pdf = processor.process_file(pdf_data, &pdf_path).await?;
|
||||
assert_eq!(processed_pdf.content_type(), "text");
|
||||
|
||||
// Test image processing
|
||||
let image_data = [0x89, 0x50, 0x4E, 0x47]; // PNG header
|
||||
let image_path = PathBuf::from("test.png");
|
||||
let processed_image = processor.process_file(&image_data, &image_path).await?;
|
||||
assert_eq!(processed_image.content_type(), "image");
|
||||
|
||||
// Test file type detection
|
||||
let detected_type = FileTypeDetector::detect_from_bytes(pdf_data)?;
|
||||
assert_eq!(detected_type, FileType::Pdf);
|
||||
|
||||
let mime_type = FileTypeDetector::detect_mime_type(&pdf_path)?;
|
||||
assert_eq!(mime_type.type_(), "application");
|
||||
assert_eq!(mime_type.subtype(), "pdf");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,216 +0,0 @@
|
|||
use gb_core::{Result, Error};
|
||||
use gb_document::{PdfProcessor, WordProcessor, ExcelProcessor};
|
||||
use gb_image::{ImageProcessor, ImageConverter};
|
||||
use super::detector::{FileTypeDetector, FileType};
|
||||
use std::path::Path;
|
||||
use tracing::{instrument, error};
|
||||
use uuid::Uuid;
|
||||
|
||||
pub struct FileProcessor {
|
||||
image_processor: ImageProcessor,
|
||||
}
|
||||
|
||||
impl FileProcessor {
|
||||
pub fn new() -> Result<Self> {
|
||||
Ok(Self {
|
||||
image_processor: ImageProcessor::new()?,
|
||||
})
|
||||
}
|
||||
|
||||
#[instrument(skip(self, data))]
|
||||
pub async fn process_file(&self, data: &[u8], path: &Path) -> Result<ProcessedFile> {
|
||||
let file_type = FileTypeDetector::detect_from_bytes(data)?;
|
||||
let mime_type = FileTypeDetector::detect_mime_type(path)?;
|
||||
|
||||
match file_type {
|
||||
FileType::Pdf => {
|
||||
let text = PdfProcessor::extract_text(data)?;
|
||||
Ok(ProcessedFile {
|
||||
id: Uuid::new_v4(),
|
||||
original_name: path.file_name().unwrap().to_string_lossy().to_string(),
|
||||
mime_type,
|
||||
content: ProcessedContent::Text(text),
|
||||
})
|
||||
}
|
||||
FileType::Word => {
|
||||
let text = WordProcessor::extract_text(data)?;
|
||||
Ok(ProcessedFile {
|
||||
id: Uuid::new_v4(),
|
||||
original_name: path.file_name().unwrap().to_string_lossy().to_string(),
|
||||
mime_type,
|
||||
content: ProcessedContent::Text(text),
|
||||
})
|
||||
}
|
||||
FileType::Excel => {
|
||||
let json = ExcelProcessor::extract_as_json(data)?;
|
||||
Ok(ProcessedFile {
|
||||
id: Uuid::new_v4(),
|
||||
original_name: path.file_name().unwrap().to_string_lossy().to_string(),
|
||||
mime_type,
|
||||
content: ProcessedContent::Json(json),
|
||||
})
|
||||
}
|
||||
FileType::Png | FileType::Jpeg | FileType::WebP => {
|
||||
let image = self.image_processor.load_image(data)?;
|
||||
let text = self.image_processor.extract_text(&image)?;
|
||||
Ok(ProcessedFile {
|
||||
id: Uuid::new_v4(),
|
||||
original_name: path.file_name().unwrap().to_string_lossy().to_string(),
|
||||
mime_type,
|
||||
content: ProcessedContent::Image {
|
||||
text,
|
||||
width: image.width(),
|
||||
height: image.height(),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(skip(self, file))]
|
||||
pub async fn convert_file(
|
||||
&self,
|
||||
file: &ProcessedFile,
|
||||
target_type: FileType,
|
||||
) -> Result<Vec<u8>> {
|
||||
match (&file.content, target_type) {
|
||||
(ProcessedContent::Image { .. }, FileType::Png) => {
|
||||
let image = self.image_processor.load_image(file.raw_data())?;
|
||||
ImageConverter::to_png(&image)
|
||||
}
|
||||
(ProcessedContent::Image { .. }, FileType::Jpeg) => {
|
||||
let image = self.image_processor.load_image(file.raw_data())?;
|
||||
ImageConverter::to_jpeg(&image, 80)
|
||||
}
|
||||
EOL
|
||||
|
||||
# Continuing gb-utils/src/processor.rs
|
||||
cat >> gb-utils/src/processor.rs << 'EOL'
|
||||
(ProcessedContent::Image { .. }, FileType::WebP) => {
|
||||
let image = self.image_processor.load_image(file.raw_data())?;
|
||||
ImageConverter::to_webp(&image, 80)
|
||||
}
|
||||
(ProcessedContent::Text(text), FileType::Pdf) => {
|
||||
let doc = PdfProcessor::create_document(text)?;
|
||||
Ok(doc)
|
||||
}
|
||||
(ProcessedContent::Text(text), FileType::Word) => {
|
||||
let doc = WordProcessor::create_document(text)?;
|
||||
Ok(doc)
|
||||
}
|
||||
(ProcessedContent::Json(json), FileType::Excel) => {
|
||||
let data: Vec<Vec<String>> = serde_json::from_value(json.clone())?;
|
||||
let headers: Vec<&str> = data[0].iter().map(|s| s.as_str()).collect();
|
||||
ExcelProcessor::create_excel(&headers, &data[1..])
|
||||
}
|
||||
_ => Err(Error::internal(format!(
|
||||
"Unsupported conversion: {:?} to {:?}",
|
||||
file.content_type(),
|
||||
target_type
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ProcessedFile {
|
||||
pub id: Uuid,
|
||||
pub original_name: String,
|
||||
pub mime_type: mime::Mime,
|
||||
pub content: ProcessedContent,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ProcessedContent {
|
||||
Text(String),
|
||||
Json(serde_json::Value),
|
||||
Image {
|
||||
text: String,
|
||||
width: u32,
|
||||
height: u32,
|
||||
},
|
||||
}
|
||||
|
||||
impl ProcessedFile {
|
||||
pub fn content_type(&self) -> &'static str {
|
||||
match self.content {
|
||||
ProcessedContent::Text(_) => "text",
|
||||
ProcessedContent::Json(_) => "json",
|
||||
ProcessedContent::Image { .. } => "image",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn raw_data(&self) -> &[u8] {
|
||||
// This is a placeholder - in a real implementation,
|
||||
// we would store the raw data alongside the processed content
|
||||
&[]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rstest::*;
|
||||
|
||||
#[fixture]
|
||||
fn processor() -> FileProcessor {
|
||||
FileProcessor::new().unwrap()
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_process_pdf(processor: FileProcessor) -> Result<()> {
|
||||
let pdf_data = b"%PDF-1.4\nTest content";
|
||||
let path = Path::new("test.pdf");
|
||||
|
||||
let processed = processor.process_file(pdf_data, path).await?;
|
||||
assert_eq!(processed.content_type(), "text");
|
||||
|
||||
if let ProcessedContent::Text(text) = &processed.content {
|
||||
assert!(text.contains("Test content"));
|
||||
} else {
|
||||
panic!("Expected text content");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_process_image(processor: FileProcessor) -> Result<()> {
|
||||
let image_data = [0x89, 0x50, 0x4E, 0x47]; // PNG header
|
||||
let path = Path::new("test.png");
|
||||
|
||||
let processed = processor.process_file(&image_data, path).await?;
|
||||
assert_eq!(processed.content_type(), "image");
|
||||
|
||||
if let ProcessedContent::Image { width, height, .. } = processed.content {
|
||||
assert!(width > 0);
|
||||
assert!(height > 0);
|
||||
} else {
|
||||
panic!("Expected image content");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[tokio::test]
|
||||
async fn test_convert_file(processor: FileProcessor) -> Result<()> {
|
||||
let text = "Test conversion";
|
||||
let processed = ProcessedFile {
|
||||
id: Uuid::new_v4(),
|
||||
original_name: "test.txt".to_string(),
|
||||
mime_type: mime::TEXT_PLAIN,
|
||||
content: ProcessedContent::Text(text.to_string()),
|
||||
};
|
||||
|
||||
let pdf_data = processor.convert_file(&processed, FileType::Pdf).await?;
|
||||
assert!(!pdf_data.is_empty());
|
||||
|
||||
let word_data = processor.convert_file(&processed, FileType::Word).await?;
|
||||
assert!(!word_data.is_empty());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -13,7 +13,6 @@ serde = { workspace = true }
|
|||
serde_json = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
minio = "0.1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
rstest = { workspace = true }
|
||||
|
|
144
gb-vm/src/lib.rs
144
gb-vm/src/lib.rs
|
@ -1,144 +0,0 @@
|
|||
use minio::s3::client::Client;
|
||||
use minio::s3::args::{BucketExistsArgs, MakeBucketArgs, RemoveObjectArgs, GetObjectArgs, PutObjectArgs, ListObjectsArgs};
|
||||
use minio::s3::creds::StaticProvider;
|
||||
use minio::s3::error::Error as MinioError;
|
||||
use minio::s3::types::{BaseUrl, Item};
|
||||
use std::io::Cursor;
|
||||
use std::path::Path;
|
||||
|
||||
pub struct FileManager {
|
||||
client: Client,
|
||||
bucket_name: String,
|
||||
}
|
||||
|
||||
impl FileManager {
|
||||
pub async fn new(endpoint: &str, access_key: &str, secret_key: &str, bucket_name: &str, use_ssl: bool) -> Result<Self, MinioError> {
|
||||
// Create BaseUrl from endpoint
|
||||
let base_url = BaseUrl::from_string(endpoint)?;
|
||||
let static_provider = StaticProvider::new(
|
||||
access_key,
|
||||
secret_key,
|
||||
None,
|
||||
);
|
||||
let client = Client::new(base_url.clone(), Some(Box::new(static_provider)), None, None).unwrap();
|
||||
|
||||
|
||||
Ok(Self {
|
||||
client,
|
||||
bucket_name: bucket_name.to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn ensure_bucket_exists(&self) -> Result<(), MinioError> {
|
||||
let exists = self.client
|
||||
.bucket_exists(&BucketExistsArgs::new(&self.bucket_name)?)
|
||||
.await?;
|
||||
if !exists {
|
||||
self.client
|
||||
.make_bucket(&MakeBucketArgs::new(&self.bucket_name)?)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn upload_file(&self, path: &str, file_data: Vec<u8>) -> Result<(), MinioError> {
|
||||
let reader = Cursor::new(&file_data);
|
||||
let file_size = file_data.len() as u64;
|
||||
|
||||
let args = PutObjectArgs::new(
|
||||
&self.bucket_name,
|
||||
path,
|
||||
reader,
|
||||
Some(file_size),
|
||||
None
|
||||
)?;
|
||||
|
||||
self.client.put_object(&args).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn download_file(&self, path: &str) -> Result<Vec<u8>, MinioError> {
|
||||
let args = GetObjectArgs::new(&self.bucket_name, path)?;
|
||||
let object = self.client.get_object(&args).await?;
|
||||
let data = object.bytes().await?;
|
||||
Ok(data.to_vec())
|
||||
}
|
||||
|
||||
pub async fn copy_file(&self, source_path: &str, destination_path: &str) -> Result<(), MinioError> {
|
||||
// Download the source file
|
||||
let data = self.download_file(source_path).await?;
|
||||
|
||||
// Upload it to the destination
|
||||
let reader = Cursor::new(&data);
|
||||
let file_size = data.len() as u64;
|
||||
|
||||
let args = PutObjectArgs::new(
|
||||
&self.bucket_name,
|
||||
destination_path,
|
||||
reader,
|
||||
Some(file_size),
|
||||
None
|
||||
)?;
|
||||
|
||||
self.client.put_object(&args).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn move_file(&self, source_path: &str, destination_path: &str) -> Result<(), MinioError> {
|
||||
self.copy_file(source_path, destination_path).await?;
|
||||
self.delete_file(source_path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_file(&self, path: &str) -> Result<(), MinioError> {
|
||||
let args = RemoveObjectArgs::new(&self.bucket_name, path)?;
|
||||
self.client.remove_object(&args).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn list_files(&self, prefix: &str) -> Result<Vec<String>, MinioError> {
|
||||
// Create a predicate function that always returns true
|
||||
let predicate = |_: Vec<Item>| -> bool { true };
|
||||
|
||||
let args = ListObjectsArgs::new(&self.bucket_name, &predicate)?;
|
||||
let objects = self.client.list_objects(&args).await?;
|
||||
|
||||
// Filter objects based on prefix manually
|
||||
let file_names: Vec<String> = objects
|
||||
.into_iter()
|
||||
.filter(|obj| obj.name().starts_with(prefix))
|
||||
.map(|obj| obj.name().to_string())
|
||||
.collect();
|
||||
|
||||
Ok(file_names)
|
||||
}
|
||||
|
||||
pub async fn get_file_contents(&self, path: &str) -> Result<String, MinioError> {
|
||||
let data = self.download_file(path).await?;
|
||||
let contents = String::from_utf8(data)
|
||||
.map_err(|_| MinioError::InvalidResponse(400, "Invalid UTF-8 sequence".to_string()))?;
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
pub async fn create_folder(&self, path: &str) -> Result<(), MinioError> {
|
||||
let folder_path = if path.ends_with('/') {
|
||||
path.to_string()
|
||||
} else {
|
||||
format!("{}/", path)
|
||||
};
|
||||
|
||||
// Create empty file with folder path
|
||||
self.upload_file(&folder_path, vec![]).await
|
||||
}
|
||||
|
||||
pub async fn share_folder(&self, path: &str) -> Result<String, MinioError> {
|
||||
// This is just a placeholder implementation
|
||||
Ok(format!("Folder shared: {}", path))
|
||||
}
|
||||
|
||||
pub async fn search_files(&self, prefix: &str, query: &str) -> Result<Vec<String>, MinioError> {
|
||||
let files = self.list_files(prefix).await?;
|
||||
let results = files.into_iter().filter(|f| f.contains(query)).collect();
|
||||
Ok(results)
|
||||
}
|
||||
}
|
Loading…
Add table
Reference in a new issue