Compare commits
No commits in common. "main" and "modular" have entirely different histories.
228 changed files with 15572 additions and 6580 deletions
|
@ -2,35 +2,33 @@ name: GBCI
|
|||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
branches: [ "main" ]
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
branches: [ "main" ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: gbo
|
||||
|
||||
|
||||
steps:
|
||||
- name: Disable SSL verification (temporary)
|
||||
run: git config --global http.sslVerify false
|
||||
- name: Disable SSL verification (temporary)
|
||||
run: git config --global http.sslVerify false
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: msrd0/rust-toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
- name: Install Rust
|
||||
uses: msrd0/rust-toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
|
||||
|
||||
- name: Run build
|
||||
run: cargo build --workspace --release --locked
|
||||
|
||||
- name: Run build
|
||||
run: |
|
||||
sudo cp /opt/gbo/bin/system/.env .
|
||||
cargo build --locked
|
||||
|
||||
- name: Deploy binary and restart
|
||||
run: |
|
||||
lxc exec bot:pragmatismo-system -- systemctl stop system
|
||||
|
||||
sudo cp ./target/debug/gbserver /opt/gbo/bin/system
|
||||
sudo chmod +x /opt/gbo/bin/system/gbserver
|
||||
|
||||
lxc exec bot:pragmatismo-system -- systemctl start system
|
||||
- name: Deploy binary
|
||||
run: |
|
||||
sudo cp ./target/release/gbserver /opt/gbo/bin/system
|
||||
sudo chmod +x /opt/gbo/bin/system/gbserver
|
||||
|
||||
sudo systemctl restart gbserver
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -1,4 +1,3 @@
|
|||
target
|
||||
.env
|
||||
*.env
|
||||
work
|
||||
*.env
|
30
.lapce/run.toml
Normal file
30
.lapce/run.toml
Normal file
|
@ -0,0 +1,30 @@
|
|||
|
||||
# The run config is used for both run mode and debug mode
|
||||
|
||||
[[configs]]
|
||||
# the name of this task
|
||||
name = "task"
|
||||
|
||||
# the type of the debugger. If not set, it can't be debugged but can still be run
|
||||
# type = "lldb"
|
||||
|
||||
# the program to run, e.g. "${workspace}\\target\\debug\\check.exe"
|
||||
program = ""
|
||||
|
||||
# the program arguments, e.g. args = ["arg1", "arg2"], optional
|
||||
# args = []
|
||||
|
||||
# current working directory, optional
|
||||
# cwd = "${workspace}"
|
||||
|
||||
# environment variables, optional
|
||||
# [configs.env]
|
||||
# VAR1 = "VAL1"
|
||||
# VAR2 = "VAL2"
|
||||
|
||||
# task to run before the run/debug session is started, optional
|
||||
# [configs.prelaunch]
|
||||
# program = "cargo"
|
||||
# args = [
|
||||
# "build",
|
||||
# ]
|
79
.vscode/launch.json
vendored
Normal file
79
.vscode/launch.json
vendored
Normal file
|
@ -0,0 +1,79 @@
|
|||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Debug GB API Server",
|
||||
"cargo": {
|
||||
"args": [
|
||||
"build",
|
||||
"--bin=gb-server"
|
||||
],
|
||||
"filter": {
|
||||
"name": "gb-server",
|
||||
"kind": "bin"
|
||||
}
|
||||
},
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"RUST_LOG": "info",
|
||||
"DATABASE_URL": "postgres://gbuser:gbpassword@localhost:5432/generalbots",
|
||||
"REDIS_URL": "redis://localhost:6379"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Debug unit tests in executable 'gb-server'",
|
||||
"cargo": {
|
||||
"args": [
|
||||
"test",
|
||||
"--no-run",
|
||||
"--lib",
|
||||
"--package=gb-server"
|
||||
],
|
||||
"filter": {
|
||||
"name": "gb-server",
|
||||
"kind": "bin"
|
||||
}
|
||||
},
|
||||
"args": [
|
||||
"--test-threads=1"
|
||||
],
|
||||
"cwd": "${workspaceFolder}", "env": {
|
||||
"RUST_LOG": "info"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Debug Integration Tests",
|
||||
"cargo": {
|
||||
"args": [
|
||||
"test",
|
||||
"--no-run",
|
||||
"--lib",
|
||||
"--package=gb-server"
|
||||
],
|
||||
"filter": {
|
||||
"name": "integration",
|
||||
"kind": "test"
|
||||
}
|
||||
},
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}", "env": {
|
||||
"RUST_LOG": "info"
|
||||
}
|
||||
},
|
||||
],
|
||||
"compounds": [
|
||||
{
|
||||
"name": "API Server + Debug",
|
||||
"configurations": [
|
||||
"Debug GB API Server"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
7
.vscode/settings.json
vendored
Normal file
7
.vscode/settings.json
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"lldb.executable": "/usr/bin/lldb",
|
||||
"lldb.showDisassembly": "never",
|
||||
"lldb.dereferencePointers": true,
|
||||
"lldb.consoleMode": "commands",
|
||||
"rust-test Explorer.cargoTestExtraArgs": ["--", "--nocapture"]
|
||||
}
|
15
.vscode/tasks.json
vendored
Normal file
15
.vscode/tasks.json
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "build",
|
||||
"type": "shell",
|
||||
"command": "cargo",
|
||||
"args": ["build"],
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
[
|
||||
{
|
||||
"label": "Build & Debug native binary",
|
||||
"build": {
|
||||
"command": "cargo",
|
||||
"args": ["build"]
|
||||
},
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/gbserver",
|
||||
|
||||
"sourceLanguages": ["rust"],
|
||||
"request": "launch",
|
||||
"adapter": "CodeLLDB"
|
||||
}
|
||||
]
|
7256
Cargo.lock
generated
7256
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
185
Cargo.toml
185
Cargo.toml
|
@ -1,62 +1,143 @@
|
|||
[package]
|
||||
name = "gbserver"
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"gb-core",
|
||||
"gb-server",
|
||||
"gb-media",
|
||||
"gb-messaging",
|
||||
"gb-storage",
|
||||
"gb-monitoring",
|
||||
"gb-auth",
|
||||
"gb-testing",
|
||||
"gb-migrations",
|
||||
"gb-cloud",
|
||||
"gb-vm",
|
||||
"gb-automation",
|
||||
"gb-image",
|
||||
"gb-utils",
|
||||
"gb-document",
|
||||
"gb-file",
|
||||
"gb-llm",
|
||||
"gb-calendar", "gb-infra",
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
authors = ["Rodrigo Rodriguez <me@rodrigorodriguez.com>"]
|
||||
description = "General Bots Server"
|
||||
license = "AGPL"
|
||||
repository = "https://alm.pragmatismo.com.br/generalbots/gbserver"
|
||||
authors = ["General Bots Maintainers"]
|
||||
license = "MIT"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
local_llm = []
|
||||
|
||||
[dependencies]
|
||||
actix-cors = "0.6"
|
||||
actix-multipart = "0.6"
|
||||
actix-web = "4"
|
||||
actix-ws = "0.3.0"
|
||||
thirtyfour = { version = "0.30" }
|
||||
downloader = "0.2.8"
|
||||
anyhow = "1.0"
|
||||
async-stream = "0.3"
|
||||
bytes = "1.1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
dotenv = "0.15"
|
||||
env_logger = "0.10"
|
||||
[workspace.dependencies]
|
||||
# Core async runtime and utilities
|
||||
tokio = { version = "1.34", features = ["full"] }
|
||||
tokio-tungstenite = { version = "0.24.0", features = ["native-tls"] }
|
||||
tungstenite = "0.20"
|
||||
tokio-test = "0.4"
|
||||
tokio-stream = "0.1.17"
|
||||
async-trait = "0.1"
|
||||
futures = "0.3"
|
||||
futures-util = "0.3"
|
||||
imap = "2.0"
|
||||
langchain-rust = "4.4.3"
|
||||
lettre = { version = "0.10", features = [
|
||||
"smtp-transport",
|
||||
"builder",
|
||||
"tokio1",
|
||||
"tokio1-native-tls",
|
||||
] }
|
||||
futures-util = "0.3" # Add futures-util here
|
||||
parking_lot = "0.12"
|
||||
bytes = "1.0"
|
||||
log = "0.4"
|
||||
mailparse = "0.13"
|
||||
env_logger = "0.10"
|
||||
ctrlc = "3.2"
|
||||
|
||||
# Web framework and servers
|
||||
axum = { version = "0.7.9", features = ["ws", "multipart"] }
|
||||
tower = "0.4"
|
||||
tower-http = { version = "0.5", features = ["cors", "trace", "fs"] }
|
||||
hyper = { version = "1.1", features = ["full"] }
|
||||
hyper-util = { version = "0.1" }
|
||||
tonic = { version = "0.10", features = ["tls", "transport"] }
|
||||
actix-multipart = "0.4"
|
||||
|
||||
|
||||
# Database and storage
|
||||
sqlx = { version = "0.7", features = ["runtime-tokio-native-tls", "postgres", "mysql", "sqlite", "uuid", "time", "json"] }
|
||||
redis = { version = "0.24", features = ["tokio-comp", "connection-manager"] }
|
||||
tikv-client = "0.3"
|
||||
sea-orm = { version = "0.12", features = ["sqlx-postgres", "runtime-tokio-native-tls", "macros"] }
|
||||
|
||||
# Message queues
|
||||
rdkafka = { version = "0.36", features = ["cmake-build", "ssl"] }
|
||||
lapin = "2.3"
|
||||
|
||||
|
||||
# Drive, Serialization and data formats
|
||||
minio = { git = "https://github.com/minio/minio-rs", branch = "master" }
|
||||
native-tls = "0.2"
|
||||
reqwest = { version = "0.11", features = ["json", "stream"] }
|
||||
rhai = "1.22.2"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
smartstring = "1.0" # Use the latest version from crates.io
|
||||
sqlx = { version = "0.7", features = [
|
||||
"time",
|
||||
"uuid",
|
||||
"runtime-tokio-rustls",
|
||||
"postgres",
|
||||
"chrono",
|
||||
] }
|
||||
tempfile = "3"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-stream = "0.1.17"
|
||||
protobuf = "3.3"
|
||||
prost = "0.12"
|
||||
csv = "1.3"
|
||||
|
||||
# WebRTC and media processing
|
||||
webrtc = "0.9"
|
||||
gstreamer = "0.21"
|
||||
opus = "0.3"
|
||||
image = "0.24"
|
||||
|
||||
# Authentication and security
|
||||
zitadel = {version = "5.5.1", features = ["api-common", "api-auth-v1", "zitadel-auth-v1", "credentials", "interceptors"]}
|
||||
jsonwebtoken = "9.2"
|
||||
|
||||
argon2 = "0.5"
|
||||
ring = "0.17"
|
||||
reqwest = { version = "0.11", features = ["json", "stream", "blocking"] }
|
||||
|
||||
# Cloud services
|
||||
aws-sdk-core = "1.1"
|
||||
azure_core = "0.15"
|
||||
azure_identity = "0.15"
|
||||
google-cloud-storage = "0.16"
|
||||
|
||||
# Monitoring and metrics
|
||||
prometheus = "0.13.0"
|
||||
opentelemetry = { version = "0.20", features = ["rt-tokio"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["fmt"] }
|
||||
scraper = "0.18"
|
||||
urlencoding = "2.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
|
||||
# Testing
|
||||
criterion = "0.5"
|
||||
mockall = "0.12"
|
||||
fake = { version = "2.9", features = ["derive"] }
|
||||
rstest = "0.18"
|
||||
|
||||
# Utilities
|
||||
actix-web = "4.0.1"
|
||||
uuid = { version = "1.6", features = ["serde", "v4"] }
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
thiserror = "1.0"
|
||||
anyhow = "1.0"
|
||||
regex = "1.10"
|
||||
uuid = { version = "1.4", features = ["serde", "v4"] } # v4, v7, etc. as needed
|
||||
zip = "4.3.0"
|
||||
url = "2.5"
|
||||
rand = "0.8"
|
||||
base64 = "0.21"
|
||||
semver = "1.0"
|
||||
walkdir = "2.4"
|
||||
tempfile = "3.9"
|
||||
dotenv = "0.15"
|
||||
lettre = "0.10"
|
||||
sanitize-filename = "0.3"
|
||||
|
||||
# Web assembly
|
||||
wasm-bindgen = "0.2"
|
||||
js-sys = "0.3"
|
||||
web-sys = { version = "0.3", features = ["WebSocket", "WebRtcPeerConnection"] }
|
||||
|
||||
# Natural language processing
|
||||
rust-bert = "0.21"
|
||||
tokenizers = "0.15"
|
||||
whatlang = "0.16"
|
||||
|
||||
# PDF and document processing
|
||||
pdf = "0.8"
|
||||
docx = "1.1"
|
||||
zip = "0.6"
|
||||
tar = "0.4"
|
||||
flate2 = "1.0"
|
||||
|
||||
[workspace.metadata]
|
||||
msrv = "1.70.0"
|
||||
|
||||
|
|
537
api.json
Normal file
537
api.json
Normal file
|
@ -0,0 +1,537 @@
|
|||
openapi: 3.0.0
|
||||
info:
|
||||
title: General Bots API
|
||||
description: API for managing files, documents, groups, conversations, and more.
|
||||
version: 1.0.0
|
||||
servers:
|
||||
- url: https://api.generalbots.com/v1
|
||||
description: Production server
|
||||
paths:
|
||||
/files/upload:
|
||||
post:
|
||||
summary: Upload a file
|
||||
operationId: uploadFile
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
multipart/form-data:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
file:
|
||||
type: string
|
||||
format: binary
|
||||
responses:
|
||||
'200':
|
||||
description: File uploaded successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
|
||||
/files/download:
|
||||
post:
|
||||
summary: Download a file
|
||||
operationId: downloadFile
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File downloaded successfully
|
||||
content:
|
||||
application/octet-stream:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
|
||||
/files/copy:
|
||||
post:
|
||||
summary: Copy a file
|
||||
operationId: copyFile
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
sourcePath:
|
||||
type: string
|
||||
destinationPath:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File copied successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/move:
|
||||
post:
|
||||
summary: Move a file
|
||||
operationId: moveFile
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
sourcePath:
|
||||
type: string
|
||||
destinationPath:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File moved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/delete:
|
||||
post:
|
||||
summary: Delete a file
|
||||
operationId: deleteFile
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File deleted successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/getContents:
|
||||
post:
|
||||
summary: Get file contents
|
||||
operationId: getFileContents
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File contents retrieved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
contents:
|
||||
type: string
|
||||
|
||||
/files/save:
|
||||
post:
|
||||
summary: Save a file
|
||||
operationId: saveFile
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
contents:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File saved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/createFolder:
|
||||
post:
|
||||
summary: Create a folder
|
||||
operationId: createFolder
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
folderName:
|
||||
type: string
|
||||
parentFolderId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Folder created successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
folderId:
|
||||
type: string
|
||||
|
||||
/files/shareFolder:
|
||||
post:
|
||||
summary: Share a folder
|
||||
operationId: shareFolder
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
folderId:
|
||||
type: string
|
||||
userIds:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Folder shared successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/dirFolder:
|
||||
post:
|
||||
summary: List folder contents
|
||||
operationId: dirFolder
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
folderId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Folder contents retrieved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
size:
|
||||
type: integer
|
||||
|
||||
/files/list:
|
||||
post:
|
||||
summary: List files
|
||||
operationId: getFiles
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
folderId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Files listed successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
size:
|
||||
type: integer
|
||||
|
||||
/files/search:
|
||||
post:
|
||||
summary: Search files
|
||||
operationId: searchFiles
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
query:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Files searched successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
size:
|
||||
type: integer
|
||||
|
||||
/files/recent:
|
||||
post:
|
||||
summary: Get recent files
|
||||
operationId: getRecentFiles
|
||||
responses:
|
||||
'200':
|
||||
description: Recent files retrieved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
size:
|
||||
type: integer
|
||||
|
||||
/files/favorite:
|
||||
post:
|
||||
summary: Toggle favorite status of a file
|
||||
operationId: toggleFavorite
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Favorite status toggled successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
isFavorite:
|
||||
type: boolean
|
||||
|
||||
/files/versions:
|
||||
post:
|
||||
summary: Get file versions
|
||||
operationId: getFileVersions
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File versions retrieved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
versionId:
|
||||
type: string
|
||||
timestamp:
|
||||
type: string
|
||||
size:
|
||||
type: integer
|
||||
|
||||
/files/restore:
|
||||
post:
|
||||
summary: Restore a file version
|
||||
operationId: restoreFileVersion
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
versionId:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: File version restored successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/permissions:
|
||||
post:
|
||||
summary: Set file permissions
|
||||
operationId: setFilePermissions
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
fileId:
|
||||
type: string
|
||||
permissions:
|
||||
type: object
|
||||
responses:
|
||||
'200':
|
||||
description: File permissions updated successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/quota:
|
||||
get:
|
||||
summary: Get storage quota
|
||||
operationId: getStorageQuota
|
||||
responses:
|
||||
'200':
|
||||
description: Storage quota retrieved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
used:
|
||||
type: integer
|
||||
total:
|
||||
type: integer
|
||||
|
||||
/files/shared:
|
||||
get:
|
||||
summary: Get shared files
|
||||
operationId: getSharedFiles
|
||||
responses:
|
||||
'200':
|
||||
description: Shared files retrieved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
size:
|
||||
type: integer
|
||||
|
||||
/files/sync/status:
|
||||
get:
|
||||
summary: Get sync status
|
||||
operationId: getSyncStatus
|
||||
responses:
|
||||
'200':
|
||||
description: Sync status retrieved successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
status:
|
||||
type: string
|
||||
|
||||
/files/sync/start:
|
||||
post:
|
||||
summary: Start sync
|
||||
operationId: startSync
|
||||
responses:
|
||||
'200':
|
||||
description: Sync started successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
|
||||
/files/sync/stop:
|
||||
post:
|
||||
summary: Stop sync
|
||||
operationId: stopSync
|
||||
responses:
|
||||
'200':
|
||||
description: Sync stopped successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
12
deploy.sh
Executable file
12
deploy.sh
Executable file
|
@ -0,0 +1,12 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
echo "Deploying General Bots platform..."
|
||||
|
||||
# Create DB.
|
||||
|
||||
cargo run -p gb-migrations --bin migrations
|
||||
|
||||
echo "Deployment completed successfully!"
|
||||
echo "Please wait for all pods to be ready..."
|
||||
|
58
gb-auth/Cargo.toml
Normal file
58
gb-auth/Cargo.toml
Normal file
|
@ -0,0 +1,58 @@
|
|||
[package]
|
||||
name = "gb-auth"
|
||||
version = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
gb-core = { path = "../gb-core" }
|
||||
|
||||
# Authentication & Security
|
||||
jsonwebtoken = { workspace = true }
|
||||
argon2 = "0.5"
|
||||
rand = { version = "0.8", features = ["std"] }
|
||||
oauth2 = "4.4"
|
||||
openid = "0.12"
|
||||
tokio-openssl = "0.6"
|
||||
ring = "0.17"
|
||||
|
||||
# Async Runtime
|
||||
tokio= { workspace = true }
|
||||
async-trait= { workspace = true }
|
||||
|
||||
|
||||
# Database
|
||||
sqlx = { version = "0.7", features = ["runtime-tokio-native-tls", "postgres", "uuid", "chrono", "json"] }
|
||||
redis = { version = "0.24", features = ["tokio-comp", "json"] }
|
||||
|
||||
# Serialization
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
||||
# Error Handling
|
||||
thiserror = "1.0"
|
||||
|
||||
# Logging & Metrics
|
||||
tracing= { workspace = true }
|
||||
|
||||
# Utils
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
uuid = { version = "1.6", features = ["serde", "v4"] }
|
||||
validator = { version = "0.16", features = ["derive"] }
|
||||
|
||||
# Web Framework
|
||||
axum = { version = "0.7.9" }
|
||||
axum-extra = { version = "0.7" } # Add headers feature
|
||||
tower = "0.4"
|
||||
tower-http = { version = "0.5", features = ["auth", "cors", "trace"] }
|
||||
headers = "0.3"
|
||||
tokio-stream = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rstest = "0.18"
|
||||
tokio-test = "0.4"
|
||||
mockall = "0.12"
|
||||
axum-extra = { version = "0.7" }
|
||||
sqlx = { version = "0.7", features = ["runtime-tokio-native-tls", "postgres", "uuid", "chrono", "json"] }
|
||||
|
23
gb-auth/config/auth_config.yaml
Normal file
23
gb-auth/config/auth_config.yaml
Normal file
|
@ -0,0 +1,23 @@
|
|||
jwt:
|
||||
secret: your_jwt_secret_key_here
|
||||
expiration: 3600 # 1 hour in seconds
|
||||
|
||||
password:
|
||||
min_length: 8
|
||||
require_uppercase: true
|
||||
require_lowercase: true
|
||||
require_numbers: true
|
||||
require_special: true
|
||||
|
||||
oauth:
|
||||
providers:
|
||||
google:
|
||||
client_id: your_google_client_id
|
||||
client_secret: your_google_client_secret
|
||||
github:
|
||||
client_id: your_github_client_id
|
||||
client_secret: your_github_client_secret
|
||||
|
||||
redis:
|
||||
url: redis://localhost:6379
|
||||
session_ttl: 86400 # 24 hours in seconds
|
26
gb-auth/src/error.rs
Normal file
26
gb-auth/src/error.rs
Normal file
|
@ -0,0 +1,26 @@
|
|||
use gb_core::Error as CoreError;
|
||||
use redis::RedisError;
|
||||
use sqlx::Error as SqlxError;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum AuthError {
|
||||
#[error("Invalid token")]
|
||||
InvalidToken,
|
||||
#[error("Invalid credentials")]
|
||||
InvalidCredentials,
|
||||
#[error("Database error: {0}")]
|
||||
Database(#[from] SqlxError),
|
||||
#[error("Redis error: {0}")]
|
||||
Redis(#[from] RedisError),
|
||||
#[error("Internal error: {0}")]
|
||||
Internal(String),
|
||||
}
|
||||
|
||||
impl From<CoreError> for AuthError {
|
||||
fn from(err: CoreError) -> Self {
|
||||
match err {
|
||||
CoreError { .. } => AuthError::Internal(err.to_string()),
|
||||
}
|
||||
}
|
||||
}
|
24
gb-auth/src/errors.rs
Normal file
24
gb-auth/src/errors.rs
Normal file
|
@ -0,0 +1,24 @@
|
|||
use gb_core::Error as CoreError;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum AuthError {
|
||||
#[error("Invalid token")]
|
||||
InvalidToken,
|
||||
#[error("Invalid credentials")]
|
||||
InvalidCredentials,
|
||||
#[error("Database error: {0}")]
|
||||
Database(#[from] sqlx::Error),
|
||||
#[error("Redis error: {0}")]
|
||||
Redis(#[from] redis::RedisError),
|
||||
#[error("Internal error: {0}")]
|
||||
Internal(String),
|
||||
}
|
||||
|
||||
impl From<CoreError> for AuthError {
|
||||
fn from(err: CoreError) -> Self {
|
||||
match err {
|
||||
CoreError { .. } => AuthError::Internal(err.to_string()),
|
||||
}
|
||||
}
|
||||
}
|
0
gb-auth/src/handlers/auth_handler.rs
Normal file
0
gb-auth/src/handlers/auth_handler.rs
Normal file
2
gb-auth/src/handlers/mod.rs
Normal file
2
gb-auth/src/handlers/mod.rs
Normal file
|
@ -0,0 +1,2 @@
|
|||
mod auth_handler;
|
||||
|
7
gb-auth/src/lib.rs
Normal file
7
gb-auth/src/lib.rs
Normal file
|
@ -0,0 +1,7 @@
|
|||
mod error;
|
||||
pub mod handlers;
|
||||
pub mod models;
|
||||
pub mod services; // Make services module public
|
||||
pub mod middleware;
|
||||
|
||||
pub use error::AuthError;
|
33
gb-auth/src/middleware/auth_middleware.rs
Normal file
33
gb-auth/src/middleware/auth_middleware.rs
Normal file
|
@ -0,0 +1,33 @@
|
|||
use axum::{
|
||||
http::{Request, Response},
|
||||
middleware::Next,
|
||||
body::Body,
|
||||
};
|
||||
use headers::{Authorization, authorization::Bearer};
|
||||
use jsonwebtoken::{decode, DecodingKey, Validation};
|
||||
use serde::{Serialize, Deserialize};
|
||||
use crate::AuthError;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct Claims {
|
||||
sub: String,
|
||||
exp: i64,
|
||||
}
|
||||
|
||||
pub async fn auth_middleware(
|
||||
auth: Authorization<Bearer>,
|
||||
request: Request<Body>,
|
||||
next: Next,
|
||||
) -> Result<Response<Body>, AuthError> {
|
||||
let token = auth.token();
|
||||
let key = DecodingKey::from_secret(b"secret");
|
||||
let validation = Validation::default();
|
||||
|
||||
match decode::<Claims>(token, &key, &validation) {
|
||||
Ok(_claims) => {
|
||||
let response = next.run(request).await;
|
||||
Ok(response)
|
||||
}
|
||||
Err(_) => Err(AuthError::InvalidToken),
|
||||
}
|
||||
}
|
3
gb-auth/src/middleware/mod.rs
Normal file
3
gb-auth/src/middleware/mod.rs
Normal file
|
@ -0,0 +1,3 @@
|
|||
mod auth_middleware;
|
||||
|
||||
pub use auth_middleware::*;
|
15
gb-auth/src/models/auth.rs
Normal file
15
gb-auth/src/models/auth.rs
Normal file
|
@ -0,0 +1,15 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct LoginRequest {
|
||||
pub email: String,
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct LoginResponse {
|
||||
pub access_token: String,
|
||||
pub refresh_token: String,
|
||||
pub token_type: String,
|
||||
pub expires_in: i64,
|
||||
}
|
4
gb-auth/src/models/mod.rs
Normal file
4
gb-auth/src/models/mod.rs
Normal file
|
@ -0,0 +1,4 @@
|
|||
mod auth;
|
||||
pub mod user;
|
||||
|
||||
pub use auth::{LoginRequest, LoginResponse};
|
48
gb-auth/src/models/user.rs
Normal file
48
gb-auth/src/models/user.rs
Normal file
|
@ -0,0 +1,48 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
use uuid::Uuid;
|
||||
use chrono::{DateTime, Utc};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum UserRole {
|
||||
Admin,
|
||||
User,
|
||||
Guest,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum UserStatus {
|
||||
Active,
|
||||
Inactive,
|
||||
Suspended,
|
||||
}
|
||||
|
||||
impl From<String> for UserRole {
|
||||
fn from(s: String) -> Self {
|
||||
match s.to_lowercase().as_str() {
|
||||
"admin" => UserRole::Admin,
|
||||
"guest" => UserRole::Guest,
|
||||
_ => UserRole::User,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for UserStatus {
|
||||
fn from(s: String) -> Self {
|
||||
match s.to_lowercase().as_str() {
|
||||
"inactive" => UserStatus::Inactive,
|
||||
"suspended" => UserStatus::Suspended,
|
||||
_ => UserStatus::Active,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct DbUser {
|
||||
pub id: Uuid,
|
||||
pub email: String,
|
||||
pub password_hash: String,
|
||||
pub role: UserRole,
|
||||
pub status: UserStatus,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
0
gb-auth/src/services/auth_service.rs
Normal file
0
gb-auth/src/services/auth_service.rs
Normal file
0
gb-auth/src/services/mod.rs
Normal file
0
gb-auth/src/services/mod.rs
Normal file
1
gb-auth/src/utils/mod.rs
Normal file
1
gb-auth/src/utils/mod.rs
Normal file
|
@ -0,0 +1 @@
|
|||
// Auth utilities module
|
0
gb-auth/tests/auth_service_tests.rs
Normal file
0
gb-auth/tests/auth_service_tests.rs
Normal file
28
gb-automation/Cargo.toml
Normal file
28
gb-automation/Cargo.toml
Normal file
|
@ -0,0 +1,28 @@
|
|||
[package]
|
||||
name = "gb-automation"
|
||||
version = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
gb-core = { path = "../gb-core" }
|
||||
image = { version = "0.24", features = ["webp", "jpeg", "png", "gif"] }
|
||||
chromiumoxide = { version = "0.5", features = ["tokio-runtime"] }
|
||||
futures-util = "0.3"
|
||||
async-trait= { workspace = true }
|
||||
tokio= { workspace = true }
|
||||
serde= { workspace = true }
|
||||
serde_json= { workspace = true }
|
||||
thiserror= { workspace = true }
|
||||
tracing= { workspace = true }
|
||||
uuid= { workspace = true }
|
||||
regex = "1.10"
|
||||
fantoccini = "0.19"
|
||||
headless_chrome = "1.0"
|
||||
async-recursion = "1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
rstest= { workspace = true }
|
||||
tokio-test = "0.4"
|
||||
mock_instant = "0.2"
|
4
gb-automation/src/lib.rs
Normal file
4
gb-automation/src/lib.rs
Normal file
|
@ -0,0 +1,4 @@
|
|||
mod web;
|
||||
|
||||
pub use chromiumoxide::element::Element;
|
||||
pub use web::WebAutomation;
|
127
gb-automation/src/process.rs
Normal file
127
gb-automation/src/process.rs
Normal file
|
@ -0,0 +1,127 @@
|
|||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
process::{Child, Command, Stdio},
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::{error, instrument};
|
||||
use uuid::Uuid;
|
||||
use gb_core::{Error, Result};
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Process {
|
||||
id: Uuid,
|
||||
handle: Child,
|
||||
}
|
||||
|
||||
pub struct ProcessAutomation {
|
||||
working_dir: PathBuf,
|
||||
processes: Mutex<Vec<Process>>,
|
||||
}
|
||||
|
||||
impl ProcessAutomation {
|
||||
pub fn new(working_dir: impl AsRef<Path>) -> Self {
|
||||
Self {
|
||||
working_dir: working_dir.as_ref().to_path_buf(),
|
||||
processes: Mutex::new(Vec::new()),
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(skip(self, command))]
|
||||
pub async fn execute(&self, command: &str, args: &[&str]) -> Result<String> {
|
||||
let output = Command::new(command)
|
||||
.args(args)
|
||||
.current_dir(&self.working_dir)
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.output()
|
||||
.map_err(|e| Error::internal(format!("Failed to execute command: {}", e)))?;
|
||||
|
||||
if !output.status.success() {
|
||||
let error = String::from_utf8_lossy(&output.stderr);
|
||||
return Err(Error::internal(format!("Command failed: {}", error)));
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8_lossy(&output.stdout).to_string();
|
||||
Ok(stdout)
|
||||
}
|
||||
|
||||
pub async fn spawn(&self, command: &str, args: &[&str]) -> Result<Uuid> {
|
||||
let child = Command::new(command)
|
||||
.args(args)
|
||||
.current_dir(&self.working_dir)
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.map_err(|e| Error::internal(format!("Failed to spawn process: {}", e)))?;
|
||||
|
||||
let id = Uuid::new_v4();
|
||||
let mut processes = self.processes.lock().await;
|
||||
processes.push(Process { id, handle: child });
|
||||
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
pub async fn kill(&self, id: Uuid) -> Result<()> {
|
||||
let mut processes = self.processes.lock().await;
|
||||
|
||||
if let Some(index) = processes.iter().position(|p| p.id == id) {
|
||||
let mut process = processes.remove(index);
|
||||
process.handle.kill()
|
||||
.map_err(|e| Error::internal(format!("Failed to kill process: {}", e)))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn cleanup(&self) -> Result<()> {
|
||||
let mut processes = self.processes.lock().await;
|
||||
|
||||
for process in processes.iter_mut() {
|
||||
if let Err(e) = process.handle.kill() {
|
||||
error!("Failed to kill process {}: {}", process.id, e);
|
||||
}
|
||||
}
|
||||
|
||||
processes.clear();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tempfile::tempdir;
|
||||
|
||||
fn automation() -> ProcessAutomation {
|
||||
let dir = tempdir().unwrap();
|
||||
ProcessAutomation::new(dir.path())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_execute() -> Result<()> {
|
||||
let automation = automation();
|
||||
let output = automation.execute("echo", &["Hello, World!"]).await?;
|
||||
assert!(output.contains("Hello, World!"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_spawn_and_kill() -> Result<()> {
|
||||
let automation = automation();
|
||||
let id = automation.spawn("sleep", &["1"]).await?;
|
||||
automation.kill(id).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_cleanup() -> Result<()> {
|
||||
let automation = automation();
|
||||
automation.spawn("sleep", &["1"]).await?;
|
||||
automation.spawn("sleep", &["2"]).await?;
|
||||
automation.cleanup().await?;
|
||||
|
||||
let processes = automation.processes.lock().await;
|
||||
assert!(processes.is_empty());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
61
gb-automation/src/web.rs
Normal file
61
gb-automation/src/web.rs
Normal file
|
@ -0,0 +1,61 @@
|
|||
use chromiumoxide::{Browser, Element};
|
||||
use chromiumoxide::page::Page;
|
||||
use chromiumoxide::browser::BrowserConfig;
|
||||
use futures_util::StreamExt;
|
||||
use gb_core::{Error, Result};
|
||||
use std::time::Duration;
|
||||
|
||||
pub struct WebAutomation {
|
||||
browser: Browser,
|
||||
}
|
||||
|
||||
impl WebAutomation {
|
||||
pub async fn new() -> Result<Self> {
|
||||
let config = BrowserConfig::builder()
|
||||
.build()
|
||||
.map_err(|e| Error::internal(e.to_string()))?;
|
||||
|
||||
let (browser, handler) = Browser::launch(config)
|
||||
.await
|
||||
.map_err(|e| Error::internal(e.to_string()))?;
|
||||
|
||||
// Spawn the handler in the background
|
||||
tokio::spawn(async move {
|
||||
handler.for_each(|_| async {}).await;
|
||||
});
|
||||
|
||||
Ok(Self { browser })
|
||||
}
|
||||
|
||||
pub async fn new_page(&self) -> Result<Page> {
|
||||
self.browser
|
||||
.new_page("about:blank")
|
||||
.await
|
||||
.map_err(|e| Error::internal(e.to_string()))
|
||||
}
|
||||
|
||||
pub async fn navigate(&self, page: &Page, url: &str) -> Result<()> {
|
||||
page.goto(url)
|
||||
.await
|
||||
.map_err(|e| Error::internal(e.to_string()))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn take_screenshot(&self, page: &Page) -> Result<Vec<u8>> {
|
||||
let params = chromiumoxide::page::ScreenshotParams::builder().build();
|
||||
|
||||
page.screenshot(params)
|
||||
.await
|
||||
.map_err(|e| Error::internal(e.to_string()))
|
||||
}
|
||||
|
||||
pub async fn find_element(&self, page: &Page, selector: &str, timeout: Duration) -> Result<Element> {
|
||||
tokio::time::timeout(
|
||||
timeout,
|
||||
page.find_element(selector)
|
||||
)
|
||||
.await
|
||||
.map_err(|_| Error::internal("Timeout waiting for element"))?
|
||||
.map_err(|e| Error::internal(e.to_string()))
|
||||
}
|
||||
}
|
9
gb-calendar/Cargo.toml
Normal file
9
gb-calendar/Cargo.toml
Normal file
|
@ -0,0 +1,9 @@
|
|||
[package]
|
||||
name = "gb-calendar"
|
||||
version = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
gb-core = { path = "../gb-core" }
|
0
gb-calendar/src/lib.rs
Normal file
0
gb-calendar/src/lib.rs
Normal file
19
gb-cloud/Cargo.toml
Normal file
19
gb-cloud/Cargo.toml
Normal file
|
@ -0,0 +1,19 @@
|
|||
[package]
|
||||
name = "gb-cloud"
|
||||
version = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
gb-core = { path = "../gb-core" }
|
||||
async-trait= { workspace = true }
|
||||
tokio= { workspace = true }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
thiserror= { workspace = true }
|
||||
tracing= { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rstest= { workspace = true }
|
||||
tokio-test = "0.4"
|
||||
tempfile = "3.8"
|
0
gb-cloud/src/lib.rs
Normal file
0
gb-cloud/src/lib.rs
Normal file
33
gb-core/Cargo.toml
Normal file
33
gb-core/Cargo.toml
Normal file
|
@ -0,0 +1,33 @@
|
|||
[package]
|
||||
name = "gb-core"
|
||||
version = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
tokio-tungstenite = { workspace = true }
|
||||
async-trait= { workspace = true }
|
||||
serde= { workspace = true }
|
||||
uuid= { workspace = true }
|
||||
tokio= { workspace = true }
|
||||
thiserror= { workspace = true }
|
||||
chrono= { workspace = true }
|
||||
tracing= { workspace = true }
|
||||
axum = { version = "0.7", features = ["json"] }
|
||||
serde_json = "1.0"
|
||||
sqlx = { workspace = true }
|
||||
redis = { workspace = true }
|
||||
minio = { workspace = true }
|
||||
zitadel = { workspace = true }
|
||||
rdkafka = { workspace = true }
|
||||
tonic = { workspace = true }
|
||||
actix-web ={ workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
jsonwebtoken = { workspace = true }
|
||||
lettre= { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
mockall= { workspace = true }
|
||||
rstest= { workspace = true }
|
||||
tokio-test = { workspace = true }
|
114
gb-core/src/config.rs
Normal file
114
gb-core/src/config.rs
Normal file
|
@ -0,0 +1,114 @@
|
|||
use serde::Deserialize;
|
||||
use std::env;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct AppConfig {
|
||||
pub server: ServerConfig,
|
||||
pub database: DatabaseConfig,
|
||||
pub redis: RedisConfig,
|
||||
pub kafka: KafkaConfig,
|
||||
// pub zitadel: ZitadelConfig,
|
||||
pub minio: MinioConfig,
|
||||
pub email: EmailConfig,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct ServerConfig {
|
||||
pub host: String,
|
||||
pub port: u16,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct DatabaseConfig {
|
||||
pub url: String,
|
||||
pub max_connections: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct RedisConfig {
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct KafkaConfig {
|
||||
pub brokers: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct ZitadelConfig {
|
||||
pub domain: String,
|
||||
pub client_id: String,
|
||||
pub client_secret: String,
|
||||
pub access_token: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct MinioConfig {
|
||||
pub endpoint: String,
|
||||
pub access_key: String,
|
||||
pub secret_key: String,
|
||||
pub use_ssl: bool,
|
||||
pub bucket: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct EmailConfig {
|
||||
pub smtp_server: String,
|
||||
pub smtp_port: u16,
|
||||
pub username: String,
|
||||
pub password: String,
|
||||
pub from_email: String,
|
||||
}
|
||||
|
||||
impl AppConfig {
|
||||
pub fn from_env() -> Self {
|
||||
Self {
|
||||
server: ServerConfig {
|
||||
host: env::var("SERVER_HOST").unwrap_or_else(|_| "127.0.0.1".to_string()),
|
||||
port: env::var("SERVER_PORT")
|
||||
.unwrap_or_else(|_| "8080".to_string())
|
||||
.parse()
|
||||
.expect("Invalid SERVER_PORT"),
|
||||
},
|
||||
database: DatabaseConfig {
|
||||
url: env::var("DATABASE_URL").expect("DATABASE_URL must be set"),
|
||||
max_connections: env::var("DATABASE_MAX_CONNECTIONS")
|
||||
.unwrap_or_else(|_| "5".to_string())
|
||||
.parse()
|
||||
.expect("Invalid DATABASE_MAX_CONNECTIONS"),
|
||||
},
|
||||
redis: RedisConfig {
|
||||
url: env::var("REDIS_URL").expect("REDIS_URL must be set"),
|
||||
},
|
||||
kafka: KafkaConfig {
|
||||
brokers: env::var("KAFKA_BROKERS").expect("KAFKA_BROKERS must be set"),
|
||||
},
|
||||
// zitadel: ZitadelConfig {
|
||||
// domain: env::var("ZITADEL_DOMAIN").expect("ZITADEL_DOMAIN must be set"),
|
||||
// client_id: env::var("ZITADEL_CLIENT_ID").expect("ZITADEL_CLIENT_ID must be set"),
|
||||
// client_secret: env::var("ZITADEL_CLIENT_SECRET")
|
||||
// .expect("ZITADEL_CLIENT_SECRET must be set"),
|
||||
// },
|
||||
minio: MinioConfig {
|
||||
endpoint: env::var("MINIO_ENDPOINT").expect("MINIO_ENDPOINT must be set"),
|
||||
access_key: env::var("MINIO_ACCESS_KEY").expect("MINIO_ACCESS_KEY must be set"),
|
||||
secret_key: env::var("MINIO_SECRET_KEY").expect("MINIO_SECRET_KEY must be set"),
|
||||
use_ssl: env::var("MINIO_USE_SSL")
|
||||
.unwrap_or_else(|_| "false".to_string())
|
||||
.parse()
|
||||
.expect("Invalid MINIO_USE_SSL"),
|
||||
bucket: env::var("MINIO_BUCKET").expect("MINIO_BUCKET must be set"),
|
||||
},
|
||||
email: EmailConfig {
|
||||
smtp_server: env::var("EMAIL_SMTP_SERVER").expect("EMAIL_SMTP_SERVER must be set"),
|
||||
smtp_port: env::var("EMAIL_SMTP_PORT")
|
||||
.unwrap_or_else(|_| "587".to_string())
|
||||
.parse()
|
||||
.expect("Invalid EMAIL_SMTP_PORT"),
|
||||
username: env::var("EMAIL_USERNAME").expect("EMAIL_USERNAME must be set"),
|
||||
password: env::var("EMAIL_PASSWORD").expect("EMAIL_PASSWORD must be set"),
|
||||
from_email: env::var("EMAIL_FROM").expect("EMAIL_FROM must be set"),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
69
gb-core/src/db.rs
Normal file
69
gb-core/src/db.rs
Normal file
|
@ -0,0 +1,69 @@
|
|||
use crate::config::AppConfig;
|
||||
use anyhow::Result;
|
||||
use minio::s3::client::{Client as MinioClient, ClientBuilder as MinioClientBuilder};
|
||||
use minio::s3::creds::StaticProvider;
|
||||
use minio::s3::http::BaseUrl;
|
||||
use rdkafka::producer::FutureProducer;
|
||||
use rdkafka::ClientConfig;
|
||||
use redis::aio::ConnectionManager as RedisConnectionManager;
|
||||
use sqlx::postgres::{PgPool, PgPoolOptions};
|
||||
use std::str::FromStr;
|
||||
// use zitadel::api::clients::ClientBuilder;
|
||||
// use zitadel::api::interceptors::AccessTokenInterceptor;
|
||||
// use zitadel::api::zitadel::auth::v1::auth_service_client::AuthServiceClient;
|
||||
|
||||
pub async fn init_postgres(config: &AppConfig) -> Result<PgPool> {
|
||||
let pool = PgPoolOptions::new()
|
||||
.max_connections(config.database.max_connections)
|
||||
.connect(&config.database.url)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!(e))?;
|
||||
|
||||
Ok(pool)
|
||||
}
|
||||
|
||||
pub async fn init_redis(config: &AppConfig) -> Result<RedisConnectionManager> {
|
||||
let client = redis::Client::open(config.redis.url.as_str())?;
|
||||
let connection_manager = RedisConnectionManager::new(client).await?;
|
||||
|
||||
Ok(connection_manager)
|
||||
}
|
||||
|
||||
pub async fn init_kafka(config: &AppConfig) -> Result<FutureProducer> {
|
||||
let producer: FutureProducer = ClientConfig::new()
|
||||
.set("bootstrap.servers", &config.kafka.brokers)
|
||||
.set("message.timeout.ms", "5000")
|
||||
.create()?;
|
||||
|
||||
Ok(producer)
|
||||
}
|
||||
|
||||
pub async fn init_zitadel(
|
||||
_config: &AppConfig,
|
||||
) -> Result<
|
||||
(),
|
||||
Box<dyn std::error::Error>>
|
||||
{
|
||||
|
||||
// TODO: https://github.com/smartive/zitadel-rust/blob/be389ca08c7f82d36fc1bcc36d2d9eb8666b22cd/examples/fetch_profile_with_service_account.rs#L18
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn init_minio(
|
||||
config: &AppConfig,
|
||||
) -> Result<MinioClient, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Construct the base URL
|
||||
let base_url = format!("https://{}", config.minio.endpoint);
|
||||
let base_url = BaseUrl::from_str(&base_url)?;
|
||||
|
||||
// Create credentials provider
|
||||
let credentials = StaticProvider::new(&config.minio.access_key, &config.minio.secret_key, None);
|
||||
|
||||
// Build the MinIO client
|
||||
let client = MinioClientBuilder::new(base_url.clone())
|
||||
.provider(Some(Box::new(credentials)))
|
||||
//.secure(config.minio.use_ssl)
|
||||
.build()?;
|
||||
|
||||
Ok(client)
|
||||
}
|
123
gb-core/src/errors.rs
Normal file
123
gb-core/src/errors.rs
Normal file
|
@ -0,0 +1,123 @@
|
|||
use thiserror::Error;
|
||||
use axum::{
|
||||
response::{IntoResponse, Response},
|
||||
http::StatusCode,
|
||||
Json,
|
||||
};
|
||||
use serde_json::json;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ErrorKind {
|
||||
|
||||
#[error("Database error: {0}")]
|
||||
Database(String),
|
||||
|
||||
#[error("Configuration error: {0}")]
|
||||
Configuration(String),
|
||||
|
||||
#[error("Redis error: {0}")]
|
||||
Redis(String),
|
||||
|
||||
#[error("Kafka error: {0}")]
|
||||
Kafka(String),
|
||||
|
||||
#[error("Invalid input: {0}")]
|
||||
InvalidInput(String),
|
||||
|
||||
#[error("Not found: {0}")]
|
||||
NotFound(String),
|
||||
|
||||
#[error("Authentication error: {0}")]
|
||||
Authentication(String),
|
||||
|
||||
#[error("Authorization error: {0}")]
|
||||
Authorization(String),
|
||||
|
||||
#[error("Internal error: {0}")]
|
||||
Internal(String),
|
||||
|
||||
#[error("External service error: {0}")]
|
||||
ExternalService(String),
|
||||
|
||||
#[error("WebSocket error: {0}")]
|
||||
WebSocket(String),
|
||||
|
||||
#[error("Messaging error: {0}")]
|
||||
Messaging(String),
|
||||
|
||||
#[error("API HTTP Server error: {0}")]
|
||||
Server(String),
|
||||
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Error {
|
||||
pub kind: ErrorKind,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl Error {
|
||||
pub fn new(kind: ErrorKind, message: impl Into<String>) -> Self {
|
||||
Self {
|
||||
kind,
|
||||
message: message.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn internal<T: std::fmt::Display>(msg: T) -> Self {
|
||||
Self::new(ErrorKind::Internal(msg.to_string()), msg.to_string())
|
||||
}
|
||||
|
||||
pub fn redis<T: std::fmt::Display>(msg: T) -> Self {
|
||||
Self::new(ErrorKind::Redis(msg.to_string()), msg.to_string())
|
||||
}
|
||||
|
||||
pub fn kafka<T: std::fmt::Display>(msg: T) -> Self {
|
||||
Self::new(ErrorKind::Kafka(msg.to_string()), msg.to_string())
|
||||
}
|
||||
|
||||
pub fn database<T: std::fmt::Display>(msg: T) -> Self {
|
||||
Self::new(ErrorKind::Database(msg.to_string()), msg.to_string())
|
||||
}
|
||||
|
||||
pub fn websocket<T: std::fmt::Display>(msg: T) -> Self {
|
||||
Self::new(ErrorKind::WebSocket(msg.to_string()), msg.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}: {}", self.kind, self.message)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum AuthError {
|
||||
InvalidToken,
|
||||
MissingToken,
|
||||
InvalidCredentials,
|
||||
Internal(String),
|
||||
}
|
||||
|
||||
impl IntoResponse for Error {
|
||||
fn into_response(self) -> Response {
|
||||
let status = match self.kind {
|
||||
ErrorKind::NotFound(_) => StatusCode::NOT_FOUND,
|
||||
ErrorKind::Authentication(_) => StatusCode::UNAUTHORIZED,
|
||||
ErrorKind::Authorization(_) => StatusCode::FORBIDDEN,
|
||||
ErrorKind::InvalidInput(_) => StatusCode::BAD_REQUEST,
|
||||
_ => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
};
|
||||
|
||||
let body = Json(json!({
|
||||
"error": self.message,
|
||||
"kind": format!("{:?}", self.kind)
|
||||
}));
|
||||
|
||||
(status, body).into_response()
|
||||
}
|
||||
}
|
9
gb-core/src/lib.rs
Normal file
9
gb-core/src/lib.rs
Normal file
|
@ -0,0 +1,9 @@
|
|||
pub mod db;
|
||||
pub mod errors;
|
||||
pub mod models;
|
||||
pub mod traits;
|
||||
pub mod config;
|
||||
pub mod utils;
|
||||
pub use errors::{Error, ErrorKind, Result};
|
||||
pub use utils::{create_response, extract_user_id};
|
||||
|
396
gb-core/src/models.rs
Normal file
396
gb-core/src/models.rs
Normal file
|
@ -0,0 +1,396 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
use minio::s3::client::Client as MinioClient;
|
||||
use rdkafka::producer::FutureProducer;
|
||||
use redis::aio::ConnectionManager as RedisConnectionManager;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use uuid::Uuid;
|
||||
//use zitadel::api::zitadel::auth::v1::auth_service_client::AuthServiceClient;
|
||||
use serde_json::Value as JsonValue;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::config::AppConfig;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CoreError(pub String);
|
||||
|
||||
// Add these near the top with other type definitions
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum CustomerStatus {
|
||||
Active,
|
||||
Inactive,
|
||||
Suspended,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum SubscriptionTier {
|
||||
Free,
|
||||
Pro,
|
||||
Enterprise,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Instance {
|
||||
pub id: Uuid,
|
||||
pub customer_id: Uuid,
|
||||
pub name: String,
|
||||
pub status: String,
|
||||
pub shard_id: i32,
|
||||
pub region: String,
|
||||
pub config: JsonValue,
|
||||
pub created_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Room {
|
||||
pub id: Uuid,
|
||||
pub instance_id: Uuid,
|
||||
pub name: String,
|
||||
pub kind: String,
|
||||
pub status: String,
|
||||
pub config: JsonValue,
|
||||
pub created_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Message {
|
||||
pub id: Uuid,
|
||||
pub customer_id: Uuid,
|
||||
pub instance_id: Uuid,
|
||||
pub conversation_id: Uuid,
|
||||
pub sender_id: Uuid,
|
||||
pub kind: String,
|
||||
pub content: String,
|
||||
pub metadata: JsonValue,
|
||||
pub created_at: Option<DateTime<Utc>>,
|
||||
pub shard_key: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct MessageFilter {
|
||||
pub conversation_id: Option<Uuid>,
|
||||
pub sender_id: Option<Uuid>,
|
||||
pub from_date: Option<DateTime<Utc>>,
|
||||
pub to_date: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SearchQuery {
|
||||
pub query: String,
|
||||
pub conversation_id: Option<Uuid>,
|
||||
pub from_date: Option<DateTime<Utc>>,
|
||||
pub to_date: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct FileUpload {
|
||||
pub content: Vec<u8>,
|
||||
pub filename: String,
|
||||
pub content_type: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct FileContent {
|
||||
pub content: Vec<u8>,
|
||||
pub content_type: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Status {
|
||||
pub code: String,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum UserStatus {
|
||||
Active,
|
||||
Inactive,
|
||||
Suspended,
|
||||
}
|
||||
|
||||
impl FromStr for UserStatus {
|
||||
type Err = CoreError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s {
|
||||
"active" => Ok(UserStatus::Active),
|
||||
"inactive" => Ok(UserStatus::Inactive),
|
||||
"suspended" => Ok(UserStatus::Suspended),
|
||||
_ => Ok(UserStatus::Inactive),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Track {
|
||||
pub id: Uuid,
|
||||
pub room_id: Uuid,
|
||||
pub user_id: Uuid,
|
||||
pub media_type: String,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct User {
|
||||
pub id: Uuid,
|
||||
pub customer_id: Uuid,
|
||||
pub instance_id: Uuid,
|
||||
pub name: String,
|
||||
pub email: String,
|
||||
pub password_hash: String,
|
||||
pub status: UserStatus,
|
||||
pub metadata: JsonValue,
|
||||
pub created_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
// Update the Customer struct to include these fields
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Customer {
|
||||
pub id: Uuid,
|
||||
pub name: String,
|
||||
pub max_instances: u32,
|
||||
pub email: String,
|
||||
pub status: CustomerStatus, // Add this field
|
||||
pub subscription_tier: SubscriptionTier, // Add this field
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl Customer {
|
||||
pub fn new(
|
||||
name: String,
|
||||
email: String,
|
||||
subscription_tier: SubscriptionTier,
|
||||
max_instances: u32,
|
||||
) -> Self {
|
||||
Customer {
|
||||
id: Uuid::new_v4(),
|
||||
name,
|
||||
email,
|
||||
max_instances,
|
||||
subscription_tier,
|
||||
status: CustomerStatus::Active, // Default to Active
|
||||
created_at: Utc::now(),
|
||||
updated_at: Utc::now(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RoomConfig {
|
||||
pub instance_id: Uuid,
|
||||
pub name: String,
|
||||
pub max_participants: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Connection {
|
||||
pub id: Uuid,
|
||||
pub room_id: Uuid,
|
||||
pub user_id: Uuid,
|
||||
pub connected_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TrackInfo {
|
||||
pub room_id: Uuid,
|
||||
pub user_id: Uuid,
|
||||
pub media_type: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Subscription {
|
||||
pub id: Uuid,
|
||||
pub track_id: Uuid,
|
||||
pub subscriber_id: Uuid,
|
||||
pub created_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Participant {
|
||||
pub user_id: Uuid,
|
||||
pub room_id: Uuid,
|
||||
pub joined_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RoomStats {
|
||||
pub participant_count: u32,
|
||||
pub track_count: u32,
|
||||
pub duration: i64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MessageId(pub Uuid);
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct FileInfo {
|
||||
pub id: Uuid,
|
||||
pub filename: String,
|
||||
pub content_type: String,
|
||||
pub size: usize,
|
||||
pub url: String,
|
||||
pub created_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
// App state shared across all handlers
|
||||
|
||||
|
||||
// App state shared across all handlers
|
||||
pub struct AppState {
|
||||
pub minio_client: Option<MinioClient>,
|
||||
pub config: Option<AppConfig>,
|
||||
pub db_pool: Option<PgPool>,
|
||||
}
|
||||
|
||||
// File models
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct File {
|
||||
pub id: Uuid,
|
||||
pub user_id: Uuid,
|
||||
pub folder_id: Option<Uuid>,
|
||||
pub name: String,
|
||||
pub path: String,
|
||||
pub mime_type: String,
|
||||
pub size: i64,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Folder {
|
||||
pub id: Uuid,
|
||||
pub user_id: Uuid,
|
||||
pub parent_id: Option<Uuid>,
|
||||
pub name: String,
|
||||
pub path: String,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
// Conversation models
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Conversation {
|
||||
pub id: Uuid,
|
||||
pub name: String,
|
||||
pub created_by: Uuid,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ConversationMember {
|
||||
pub conversation_id: Uuid,
|
||||
pub user_id: Uuid,
|
||||
pub joined_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
// Calendar models
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CalendarEvent {
|
||||
pub id: Uuid,
|
||||
pub title: String,
|
||||
pub description: Option<String>,
|
||||
pub location: Option<String>,
|
||||
pub start_time: DateTime<Utc>,
|
||||
pub end_time: DateTime<Utc>,
|
||||
pub user_id: Uuid,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
// Task models
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Task {
|
||||
pub id: Uuid,
|
||||
pub title: String,
|
||||
pub description: Option<String>,
|
||||
pub due_date: Option<DateTime<Utc>>,
|
||||
pub status: TaskStatus,
|
||||
pub priority: TaskPriority,
|
||||
pub user_id: Uuid,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum TaskStatus {
|
||||
Pending,
|
||||
InProgress,
|
||||
Completed,
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum TaskPriority {
|
||||
Low,
|
||||
Medium,
|
||||
High,
|
||||
Urgent,
|
||||
}
|
||||
|
||||
// Response models
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct ApiResponse<T> {
|
||||
pub success: bool,
|
||||
pub message: Option<String>,
|
||||
pub data: Option<T>,
|
||||
}
|
||||
|
||||
// Error models
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum AppError {
|
||||
#[error("Database error: {0}")]
|
||||
Database(#[from] sqlx::Error),
|
||||
|
||||
#[error("Redis error: {0}")]
|
||||
Redis(#[from] redis::RedisError),
|
||||
|
||||
#[error("Kafka error: {0}")]
|
||||
Kafka(String),
|
||||
|
||||
#[error("Zitadel error: {0}")]
|
||||
Zitadel(#[from] tonic::Status),
|
||||
|
||||
#[error("Minio error: {0}")]
|
||||
Minio(String),
|
||||
|
||||
#[error("Validation error: {0}")]
|
||||
Validation(String),
|
||||
|
||||
#[error("Not found: {0}")]
|
||||
NotFound(String),
|
||||
|
||||
#[error("Unauthorized: {0}")]
|
||||
Unauthorized(String),
|
||||
|
||||
#[error("Forbidden: {0}")]
|
||||
Forbidden(String),
|
||||
|
||||
#[error("Internal server error: {0}")]
|
||||
Internal(String),
|
||||
}
|
||||
|
||||
impl actix_web::ResponseError for AppError {
|
||||
fn error_response(&self) -> actix_web::HttpResponse {
|
||||
let (status, error_message) = match self {
|
||||
AppError::Validation(_) => (actix_web::http::StatusCode::BAD_REQUEST, self.to_string()),
|
||||
AppError::NotFound(_) => (actix_web::http::StatusCode::NOT_FOUND, self.to_string()),
|
||||
AppError::Unauthorized(_) => {
|
||||
(actix_web::http::StatusCode::UNAUTHORIZED, self.to_string())
|
||||
}
|
||||
AppError::Forbidden(_) => (actix_web::http::StatusCode::FORBIDDEN, self.to_string()),
|
||||
_ => (
|
||||
actix_web::http::StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Internal server error".to_string(),
|
||||
),
|
||||
};
|
||||
|
||||
actix_web::HttpResponse::build(status).json(ApiResponse::<()> {
|
||||
success: false,
|
||||
message: Some(error_message),
|
||||
data: None,
|
||||
})
|
||||
}
|
||||
}
|
0
gb-core/src/traits.rs
Normal file
0
gb-core/src/traits.rs
Normal file
155
gb-core/src/utils.rs
Normal file
155
gb-core/src/utils.rs
Normal file
|
@ -0,0 +1,155 @@
|
|||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use chrono::{DateTime, Utc};
|
||||
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
|
||||
use lettre::message::header::ContentType;
|
||||
use lettre::transport::smtp::authentication::Credentials;
|
||||
use lettre::{Message, SmtpTransport, Transport};
|
||||
use rdkafka::producer::{FutureProducer, FutureRecord};
|
||||
use rdkafka::util::Timeout;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::time::{Duration, SystemTime};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::config::AppConfig;
|
||||
use crate::models::{ApiResponse, AppError, User};
|
||||
|
||||
// JWT Claims
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Claims {
|
||||
pub sub: String, // subject (user ID)
|
||||
pub exp: usize, // expiration time
|
||||
pub iat: usize, // issued at
|
||||
pub email: String, // user email
|
||||
pub username: String, // username
|
||||
}
|
||||
|
||||
// Generate JWT token
|
||||
pub fn generate_jwt(user: &User, secret: &str) -> Result<String, AppError> {
|
||||
let expiration = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs() as usize + 86400; // 24 hours
|
||||
|
||||
let issued_at = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs() as usize;
|
||||
|
||||
let claims = Claims {
|
||||
sub: user.id.to_string(),
|
||||
exp: expiration,
|
||||
iat: issued_at,
|
||||
email: user.email.clone(),
|
||||
username: user.email.clone(),
|
||||
};
|
||||
|
||||
encode(
|
||||
&Header::default(),
|
||||
&claims,
|
||||
&EncodingKey::from_secret(secret.as_bytes()),
|
||||
)
|
||||
.map_err(|e| AppError::Internal(format!("Failed to generate JWT: {}", e)))
|
||||
}
|
||||
|
||||
// Validate JWT token
|
||||
pub fn validate_jwt(token: &str, secret: &str) -> Result<Claims, AppError> {
|
||||
let validation = Validation::default();
|
||||
|
||||
decode::<Claims>(
|
||||
token,
|
||||
&DecodingKey::from_secret(secret.as_bytes()),
|
||||
&validation,
|
||||
)
|
||||
.map(|data| data.claims)
|
||||
.map_err(|e| AppError::Unauthorized(format!("Invalid token: {}", e)))
|
||||
}
|
||||
|
||||
// Extract user ID from request
|
||||
pub fn extract_user_id(req: &HttpRequest) -> Result<Uuid, AppError> {
|
||||
let auth_header = req
|
||||
.headers()
|
||||
.get("Authorization")
|
||||
.ok_or_else(|| AppError::Unauthorized("Missing Authorization header".to_string()))?
|
||||
.to_str()
|
||||
.map_err(|_| AppError::Unauthorized("Invalid Authorization header".to_string()))?;
|
||||
|
||||
if !auth_header.starts_with("Bearer ") {
|
||||
return Err(AppError::Unauthorized("Invalid Authorization header format".to_string()));
|
||||
}
|
||||
|
||||
let token = &auth_header[7..];
|
||||
let claims = validate_jwt(token, "your-secret-key")?;
|
||||
|
||||
Uuid::parse_str(&claims.sub)
|
||||
.map_err(|_| AppError::Unauthorized("Invalid user ID in token".to_string()))
|
||||
}
|
||||
|
||||
// Send email
|
||||
pub async fn send_email(
|
||||
config: &AppConfig,
|
||||
to_email: &str,
|
||||
subject: &str,
|
||||
body: &str,
|
||||
) -> Result<(), AppError> {
|
||||
let email = Message::builder()
|
||||
.from(config.email.from_email.parse().unwrap())
|
||||
.to(to_email.parse().unwrap())
|
||||
.subject(subject)
|
||||
.header(ContentType::TEXT_PLAIN)
|
||||
.body(body.to_string())
|
||||
.map_err(|e| AppError::Internal(format!("Failed to create email: {}", e)))?;
|
||||
|
||||
let creds = Credentials::new(
|
||||
config.email.username.clone(),
|
||||
config.email.password.clone(),
|
||||
);
|
||||
|
||||
// Open a remote connection to the SMTP server
|
||||
let mailer = SmtpTransport::relay(&config.email.smtp_server)
|
||||
.unwrap()
|
||||
.credentials(creds)
|
||||
.build();
|
||||
|
||||
// Send the email
|
||||
mailer.send(&email)
|
||||
.map_err(|e| AppError::Internal(format!("Failed to send email: {}", e)))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Send message to Kafka
|
||||
pub async fn send_to_kafka(
|
||||
producer: &FutureProducer,
|
||||
topic: &str,
|
||||
key: &str,
|
||||
payload: &str,
|
||||
) -> Result<(), AppError> {
|
||||
producer
|
||||
.send(
|
||||
FutureRecord::to(topic)
|
||||
.key(key)
|
||||
.payload(payload),
|
||||
Timeout::After(Duration::from_secs(5)),
|
||||
)
|
||||
.await
|
||||
.map_err(|(e, _)| AppError::Kafka(format!("Failed to send message to Kafka: {}", e)))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Format datetime for JSON responses
|
||||
pub fn format_datetime(dt: DateTime<Utc>) -> String {
|
||||
dt.to_rfc3339()
|
||||
}
|
||||
|
||||
// Create a standard API response
|
||||
pub fn create_response<T: Serialize>(
|
||||
data: T,
|
||||
message: Option<String>,
|
||||
) -> HttpResponse {
|
||||
HttpResponse::Ok().json(ApiResponse {
|
||||
success: true,
|
||||
message,
|
||||
data: Some(data),
|
||||
})
|
||||
}
|
25
gb-document/Cargo.toml
Normal file
25
gb-document/Cargo.toml
Normal file
|
@ -0,0 +1,25 @@
|
|||
[package]
|
||||
name = "gb-document"
|
||||
version = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
gb-core = { path = "../gb-core" }
|
||||
lopdf = "0.31"
|
||||
docx-rs = "0.4"
|
||||
calamine = "0.21"
|
||||
async-trait= { workspace = true }
|
||||
tokio= { workspace = true }
|
||||
serde= { workspace = true }
|
||||
serde_json= { workspace = true }
|
||||
thiserror= { workspace = true }
|
||||
tracing= { workspace = true }
|
||||
encoding_rs = "0.8"
|
||||
zip = "0.6"
|
||||
|
||||
[dev-dependencies]
|
||||
rstest= { workspace = true }
|
||||
tokio-test = "0.4"
|
||||
tempfile = "3.8"
|
35
gb-document/src/excel.rs
Normal file
35
gb-document/src/excel.rs
Normal file
|
@ -0,0 +1,35 @@
|
|||
use gb_core::{Result, Error};
|
||||
use calamine::{Reader, Xlsx, RangeDeserializerBuilder};
|
||||
use std::io::Cursor;
|
||||
use tracing::{instrument, error};
|
||||
|
||||
pub struct ExcelProcessor;
|
||||
|
||||
impl ExcelProcessor {
|
||||
#[instrument(skip(data))]
|
||||
pub fn extract_data(data: &[u8]) -> Result<Vec<Vec<String>>> {
|
||||
let cursor = Cursor::new(data);
|
||||
let mut workbook = Xlsx::new(cursor)
|
||||
.map_err(|e| Error::internal(format!("Failed to read Excel file: {}", e)))?;
|
||||
|
||||
let sheet_name = workbook.sheet_names()[0].clone();
|
||||
let range = workbook.worksheet_range(&sheet_name)
|
||||
.ok_or_else(|| Error::internal("Failed to get worksheet".to_string()))?
|
||||
.map_err(|e| Error::internal(format!("Failed to read range: {}", e)))?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for row in range.rows() {
|
||||
let row_data: Vec<String> = row.iter()
|
||||
.map(|cell| cell.to_string())
|
||||
.collect();
|
||||
result.push(row_data);
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
#[instrument(skip(headers, data))]
|
||||
pub fn create_excel(headers: &[&str], data: &[Vec<String>]) -> Result<Vec<u8>> {
|
||||
todo!("Implement Excel creation using a suitable library");
|
||||
}
|
||||
|
0
gb-document/src/lib.rs
Normal file
0
gb-document/src/lib.rs
Normal file
127
gb-document/src/pdf.rs
Normal file
127
gb-document/src/pdf.rs
Normal file
|
@ -0,0 +1,127 @@
|
|||
use gb_core::{Result, Error};
|
||||
use lopdf::{Document, Object, StringFormat};
|
||||
use std::io::Cursor;
|
||||
use tracing::{instrument, error};
|
||||
|
||||
pub struct PdfProcessor;
|
||||
|
||||
impl PdfProcessor {
|
||||
#[instrument(skip(data))]
|
||||
pub fn extract_text(data: &[u8]) -> Result<String> {
|
||||
let doc = Document::load_from(Cursor::new(data))
|
||||
.map_err(|e| Error::internal(format!("Failed to load PDF: {}", e)))?;
|
||||
|
||||
let mut text = String::new();
|
||||
for page_num in 1..=doc.get_pages().len() {
|
||||
if let Ok(page_text) = Self::extract_page_text(&doc, page_num) {
|
||||
text.push_str(&page_text);
|
||||
text.push('\n');
|
||||
}
|
||||
}
|
||||
|
||||
Ok(text)
|
||||
}
|
||||
|
||||
#[instrument(skip(doc))]
|
||||
fn extract_page_text(doc: &Document, page_num: u32) -> Result<String> {
|
||||
let page = doc.get_page(page_num)
|
||||
.map_err(|e| Error::internal(format!("Failed to get page {}: {}", page_num, e)))?;
|
||||
|
||||
let contents = doc.get_page_content(page)
|
||||
.map_err(|e| Error::internal(format!("Failed to get page content: {}", e)))?;
|
||||
|
||||
let mut text = String::new();
|
||||
for content in contents.iter() {
|
||||
if let Ok(Object::String(s, StringFormat::Literal)) = content {
|
||||
if let Ok(decoded) = String::from_utf8(s.clone()) {
|
||||
text.push_str(&decoded);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(text)
|
||||
}
|
||||
|
||||
#[instrument(skip(data))]
|
||||
pub fn merge_pdfs(pdfs: Vec<&[u8]>) -> Result<Vec<u8>> {
|
||||
let mut merged = Document::new();
|
||||
let mut current_page = 1;
|
||||
|
||||
for pdf_data in pdfs {
|
||||
let doc = Document::load_from(Cursor::new(pdf_data))
|
||||
.map_err(|e| Error::internal(format!("Failed to load PDF: {}", e)))?;
|
||||
|
||||
for (_, page) in doc.get_pages() {
|
||||
merged.add_page(page.clone());
|
||||
current_page += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let mut output = Vec::new();
|
||||
merged.save_to(&mut Cursor::new(&mut output))
|
||||
.map_err(|e| Error::internal(format!("Failed to save merged PDF: {}", e)))?;
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
#[instrument(skip(data))]
|
||||
pub fn split_pdf(data: &[u8], pages: &[u32]) -> Result<Vec<Vec<u8>>> {
|
||||
let doc = Document::load_from(Cursor::new(data))
|
||||
.map_err(|e| Error::internal(format!("Failed to load PDF: {}", e)))?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for &page_num in pages {
|
||||
let mut new_doc = Document::new();
|
||||
if let Ok(page) = doc.get_page(page_num) {
|
||||
new_doc.add_page(page.clone());
|
||||
let mut output = Vec::new();
|
||||
new_doc.save_to(&mut Cursor::new(&mut output))
|
||||
.map_err(|e| Error::internal(format!("Failed to save split PDF: {}", e)))?;
|
||||
result.push(output);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rstest::*;
|
||||
|
||||
fn create_test_pdf() -> Vec<u8> {
|
||||
let mut doc = Document::new();
|
||||
doc.add_page(lopdf::dictionary! {
|
||||
"Type" => "Page",
|
||||
"Contents" => Object::String(b"BT /F1 12 Tf 72 712 Td (Test Page) Tj ET".to_vec(), StringFormat::Literal),
|
||||
});
|
||||
let mut output = Vec::new();
|
||||
doc.save_to(&mut Cursor::new(&mut output)).unwrap();
|
||||
output
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_extract_text() -> Result<()> {
|
||||
let pdf_data = create_test_pdf();
|
||||
let text = PdfProcessor::extract_text(&pdf_data)?;
|
||||
assert!(text.contains("Test Page"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_merge_pdfs() -> Result<()> {
|
||||
let pdf1 = create_test_pdf();
|
||||
let pdf2 = create_test_pdf();
|
||||
let merged = PdfProcessor::merge_pdfs(vec[build-dependencies]&pdf1, &pdf2])?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_split_pdf() -> Result<()> {
|
||||
let pdf_data = create_test_pdf();
|
||||
let split = PdfProcessor::split_pdf(&pdf_data, &[1])?;
|
||||
assert_eq!(split.len(), 1);
|
||||
Ok(())
|
||||
}
|
||||
}
|
105
gb-document/src/word.rs
Normal file
105
gb-document/src/word.rs
Normal file
|
@ -0,0 +1,105 @@
|
|||
use gb_core::{Result, Error};
|
||||
use docx_rs::{Docx, Paragraph, Run, RunText};
|
||||
use std::io::{Cursor, Read};
|
||||
use tracing::{instrument, error};
|
||||
|
||||
pub struct WordProcessor;
|
||||
|
||||
impl WordProcessor {
|
||||
#[instrument(skip(data))]
|
||||
pub fn extract_text(data: &[u8]) -> Result<String> {
|
||||
let doc = Docx::from_reader(Cursor::new(data))
|
||||
.map_err(|e| Error::internal(format!("Failed to read DOCX: {}", e)))?;
|
||||
|
||||
let mut text = String::new();
|
||||
for para in doc.document.paragraphs() {
|
||||
for run in para.runs() {
|
||||
if let Some(text_content) = run.text() {
|
||||
text.push_str(text_content);
|
||||
}
|
||||
text.push(' ');
|
||||
}
|
||||
text.push('\n');
|
||||
}
|
||||
|
||||
Ok(text)
|
||||
}
|
||||
|
||||
#[instrument(skip(content))]
|
||||
pub fn create_document(content: &str) -> Result<Vec<u8>> {
|
||||
let mut docx = Docx::new();
|
||||
|
||||
for line in content.lines() {
|
||||
let paragraph = Paragraph::new()
|
||||
.add_run(
|
||||
Run::new().add_text(RunText::new(line))
|
||||
);
|
||||
docx = docx.add_paragraph(paragraph);
|
||||
}
|
||||
|
||||
let mut output = Vec::new();
|
||||
docx.build()
|
||||
.pack(&mut Cursor::new(&mut output))
|
||||
.map_err(|e| Error::internal(format!("Failed to create DOCX: {}", e)))?;
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
#[instrument(skip(template_data, variables))]
|
||||
pub fn fill_template(template_data: &[u8], variables: &serde_json::Value) -> Result<Vec<u8>> {
|
||||
let doc = Docx::from_reader(Cursor::new(template_data))
|
||||
.map_err(|e| Error::internal(format!("Failed to read template: {}", e)))?;
|
||||
|
||||
let mut new_doc = doc.clone();
|
||||
|
||||
for para in new_doc.document.paragraphs_mut() {
|
||||
for run in para.runs_mut() {
|
||||
if let Some(text) = run.text_mut() {
|
||||
let mut new_text = text.clone();
|
||||
for (key, value) in variables.as_object().unwrap() {
|
||||
let placeholder = format!("{{{}}}", key);
|
||||
new_text = new_text.replace(&placeholder, value.as_str().unwrap_or(""));
|
||||
}
|
||||
*text = new_text;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut output = Vec::new();
|
||||
new_doc.build()
|
||||
.pack(&mut Cursor::new(&mut output))
|
||||
.map_err(|e| Error::internal(format!("Failed to save filled template: {}", e)))?;
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rstest::*;
|
||||
use serde_json::json;
|
||||
|
||||
#[rstest]
|
||||
fn test_create_document() -> Result<()> {
|
||||
let content = "Test document\nSecond line";
|
||||
let doc_data = WordProcessor::create_document(content)?;
|
||||
|
||||
let extracted_text = WordProcessor::extract_text(&doc_data)?;
|
||||
assert!(extracted_text.contains("Test document"));
|
||||
assert!(extracted_text.contains("Second line"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_fill_template() -> Result<()> {
|
||||
let template = WordProcessor::create_document("Hello, {name}!")?;
|
||||
"name": "World"
|
||||
});
|
||||
|
||||
let filled = WordProcessor::fill_template(&template, &variables)?;
|
||||
let text = WordProcessor::extract_text(&filled)?;
|
||||
assert!(text.contains("Hello, World!"));
|
||||
Ok(())
|
||||
}
|
||||
}
|
32
gb-file/Cargo.toml
Normal file
32
gb-file/Cargo.toml
Normal file
|
@ -0,0 +1,32 @@
|
|||
[package]
|
||||
name = "gb-file"
|
||||
version = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
gb-core = { path = "../gb-core" }
|
||||
async-trait= { workspace = true }
|
||||
tokio= { workspace = true }
|
||||
serde = { workspace = true , features = ["derive"] }
|
||||
serde_json ={ workspace = true }
|
||||
thiserror= { workspace = true }
|
||||
tracing= { workspace = true }
|
||||
futures ={ workspace = true }
|
||||
uuid = { workspace = true }
|
||||
jsonwebtoken = { workspace = true }
|
||||
lettre= { workspace = true }
|
||||
minio = { workspace = true }
|
||||
actix-web ={ workspace = true }
|
||||
actix-multipart ={ workspace = true }
|
||||
sanitize-filename = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
log = { workspace = true }
|
||||
env_logger = { workspace = true }
|
||||
tokio-stream = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rstest= { workspace = true }
|
||||
tokio-test = "0.4"
|
||||
tempfile = "3.8"
|
|
@ -1,9 +1,8 @@
|
|||
|
||||
|
||||
use actix_web::{ web};
|
||||
|
||||
use actix_multipart::Multipart;
|
||||
use actix_web::{post, HttpResponse};
|
||||
use actix_web::{post, web, HttpRequest, HttpResponse};
|
||||
use gb_core::models::AppError;
|
||||
use gb_core::models::AppState;
|
||||
use gb_core::utils::{create_response, extract_user_id};
|
||||
use minio::s3::builders::ObjectContent;
|
||||
use minio::s3::Client;
|
||||
use std::io::Write;
|
||||
|
@ -11,31 +10,6 @@ use tempfile::NamedTempFile;
|
|||
use minio::s3::types::ToStream;
|
||||
use tokio_stream::StreamExt;
|
||||
|
||||
use minio::s3::client::{Client as MinioClient, ClientBuilder as MinioClientBuilder};
|
||||
use minio::s3::creds::StaticProvider;
|
||||
use minio::s3::http::BaseUrl;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::services::config::{AppConfig};
|
||||
use crate::services::state::AppState;
|
||||
|
||||
pub async fn init_minio(config: &AppConfig) -> Result<MinioClient, minio::s3::error::Error> {
|
||||
let scheme = if config.minio.use_ssl { "https" } else { "http" };
|
||||
let base_url = format!("{}://{}", scheme, config.minio.server);
|
||||
let base_url = BaseUrl::from_str(&base_url)?;
|
||||
let credentials = StaticProvider::new(
|
||||
&config.minio.access_key,
|
||||
&config.minio.secret_key,
|
||||
None,
|
||||
);
|
||||
|
||||
let minio_client = MinioClientBuilder::new(base_url)
|
||||
.provider(Some(credentials))
|
||||
.build()?;
|
||||
|
||||
Ok(minio_client)
|
||||
}
|
||||
|
||||
#[post("/files/upload/{folder_path}")]
|
||||
pub async fn upload_file(
|
||||
folder_path: web::Path<String>,
|
||||
|
@ -79,7 +53,7 @@ pub async fn upload_file(
|
|||
|
||||
// Upload the file to the MinIO bucket
|
||||
let client: Client = state.minio_client.clone().unwrap();
|
||||
let bucket_name = state.config.as_ref().unwrap().minio.bucket.clone();
|
||||
let bucket_name = "file-upload-rust-bucket";
|
||||
|
||||
let content = ObjectContent::from(temp_file.path());
|
||||
client
|
||||
|
@ -104,6 +78,19 @@ pub async fn upload_file(
|
|||
)))
|
||||
}
|
||||
|
||||
#[actix_web::post("/files/delete")]
|
||||
pub async fn delete_file(
|
||||
req: HttpRequest,
|
||||
_state: web::Data<AppState>,
|
||||
_file_path: web::Json<String>,
|
||||
) -> Result<HttpResponse, AppError> {
|
||||
let _user_id = extract_user_id(&req)?;
|
||||
|
||||
Ok(create_response(
|
||||
true,
|
||||
Some("File deleted successfully".to_string()),
|
||||
))
|
||||
}
|
||||
|
||||
|
||||
#[post("/files/list/{folder_path}")]
|
||||
|
@ -142,4 +129,4 @@ pub async fn list_file(
|
|||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(file_list))
|
||||
}
|
||||
}
|
1
gb-file/src/lib.rs
Normal file
1
gb-file/src/lib.rs
Normal file
|
@ -0,0 +1 @@
|
|||
pub mod handlers;
|
27
gb-image/Cargo.toml
Normal file
27
gb-image/Cargo.toml
Normal file
|
@ -0,0 +1,27 @@
|
|||
[package]
|
||||
name = "gb-image"
|
||||
version = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
gb-core = { path = "../gb-core" }
|
||||
image = { version = "0.24", features = ["webp", "jpeg", "png", "gif"] }
|
||||
imageproc = "0.23"
|
||||
rusttype = "0.9"
|
||||
async-trait= { workspace = true }
|
||||
tokio= { workspace = true }
|
||||
serde= { workspace = true }
|
||||
serde_json= { workspace = true }
|
||||
thiserror= { workspace = true }
|
||||
tracing= { workspace = true }
|
||||
tempfile = "3.8"
|
||||
|
||||
[dev-dependencies]
|
||||
rstest= { workspace = true }
|
||||
tokio-test = "0.4"
|
||||
|
||||
|
||||
[build-dependencies]
|
||||
reqwest = { version = "0.11", features = ["blocking"] }
|
1794
gb-image/assets/DejaVuSans.ttf
Normal file
1794
gb-image/assets/DejaVuSans.ttf
Normal file
File diff suppressed because one or more lines are too long
21
gb-image/src/build.rs
Normal file
21
gb-image/src/build.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
fn main() {
|
||||
let assets_dir = Path::new("assets");
|
||||
if !assets_dir.exists() {
|
||||
fs::create_dir(assets_dir).expect("Failed to create assets directory");
|
||||
}
|
||||
|
||||
let font_path = assets_dir.join("DejaVuSans.ttf");
|
||||
if !font_path.exists() {
|
||||
let font_url = "https://github.com/dejavu-fonts/dejavu-fonts/raw/master/ttf/DejaVuSans.ttf";
|
||||
let response = reqwest::blocking::get(font_url)
|
||||
.expect("Failed to download font")
|
||||
.bytes()
|
||||
.expect("Failed to get font bytes");
|
||||
|
||||
fs::write(font_path, response)
|
||||
.expect("Failed to save font file");
|
||||
}
|
||||
}
|
74
gb-image/src/converter.rs
Normal file
74
gb-image/src/converter.rs
Normal file
|
@ -0,0 +1,74 @@
|
|||
use std::io::Cursor;
|
||||
use gb_core::{Result, Error};
|
||||
use image::{ImageOutputFormat, DynamicImage};
|
||||
use tracing::instrument;
|
||||
|
||||
pub struct ImageConverter;
|
||||
|
||||
impl ImageConverter {
|
||||
#[instrument]
|
||||
pub fn to_jpeg(img: &DynamicImage, quality: u8) -> Result<Vec<u8>> {
|
||||
let mut buffer = Cursor::new(Vec::new());
|
||||
img.write_to(&mut buffer, ImageOutputFormat::Jpeg(quality))
|
||||
.map_err(|e| Error::internal(format!("JPEG conversion failed: {}", e)))?;
|
||||
Ok(buffer.into_inner())
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
pub fn to_png(img: &DynamicImage) -> Result<Vec<u8>> {
|
||||
let mut buffer = Cursor::new(Vec::new());
|
||||
img.write_to(&mut buffer, ImageOutputFormat::Png)
|
||||
.map_err(|e| Error::internal(format!("PNG conversion failed: {}", e)))?;
|
||||
Ok(buffer.into_inner())
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
pub fn to_webp(img: &DynamicImage, quality: u8) -> Result<Vec<u8>> {
|
||||
let mut buffer = Cursor::new(Vec::new());
|
||||
img.write_to(&mut buffer, ImageOutputFormat::WebP)
|
||||
.map_err(|e| Error::internal(format!("WebP conversion failed: {}", e)))?;
|
||||
Ok(buffer.into_inner())
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
pub fn to_gif(img: &DynamicImage) -> Result<Vec<u8>> {
|
||||
let mut buffer = Cursor::new(Vec::new());
|
||||
img.write_to(&mut buffer, ImageOutputFormat::Gif)
|
||||
.map_err(|e| Error::internal(format!("GIF conversion failed: {}", e)))?;
|
||||
Ok(buffer.into_inner())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rstest::*;
|
||||
|
||||
#[fixture]
|
||||
fn test_image() -> DynamicImage {
|
||||
DynamicImage::new_rgb8(100, 100)
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_jpeg_conversion(test_image: DynamicImage) -> Result<()> {
|
||||
let jpeg_data = ImageConverter::to_jpeg(&test_image, 80)?;
|
||||
assert!(!jpeg_data.is_empty());
|
||||
assert_eq!(image::guess_format(&jpeg_data).unwrap(), image::ImageFormat::Jpeg);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_png_conversion(test_image: DynamicImage) -> Result<()> {
|
||||
let png_data = ImageConverter::to_png(&test_image)?;
|
||||
assert!(!png_data.is_empty());
|
||||
assert_eq!(image::guess_format(&png_data).unwrap(), image::ImageFormat::Png);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn test_webp_conversion(test_image: DynamicImage) -> Result<()> {
|
||||
let webp_data = ImageConverter::to_webp(&test_image, 80)?;
|
||||
assert!(!webp_data.is_empty());
|
||||
Ok(())
|
||||
}
|
||||
}
|
54
gb-image/src/lib.rs
Normal file
54
gb-image/src/lib.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
pub mod processor;
|
||||
pub mod converter;
|
||||
|
||||
pub use processor::ImageProcessor;
|
||||
pub use converter::ImageConverter;
|
||||
// Remove the ImageFormat re-export since it's private in the image crate
|
||||
pub use image::ImageFormat;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gb_core::Result;
|
||||
use image::{DynamicImage, Rgba};
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_image_processing_integration() -> Result<()> {
|
||||
// Initialize components
|
||||
let processor = ImageProcessor::new();
|
||||
|
||||
// Create test image
|
||||
let mut image = DynamicImage::new_rgb8(200, 200);
|
||||
|
||||
// Test image processing operations
|
||||
let resized = processor.resize(&image, 100, 100);
|
||||
assert_eq!(resized.width(), 100);
|
||||
assert_eq!(resized.height(), 100);
|
||||
|
||||
let cropped = processor.crop(&image, 50, 50, 100, 100)?;
|
||||
assert_eq!(cropped.width(), 100);
|
||||
assert_eq!(cropped.height(), 100);
|
||||
|
||||
let _blurred = processor.apply_blur(&image, 1.0);
|
||||
let _brightened = processor.adjust_brightness(&image, 10);
|
||||
let _contrasted = processor.adjust_contrast(&image, 1.2);
|
||||
|
||||
// Test text addition
|
||||
processor.add_text(
|
||||
&mut image,
|
||||
"Integration Test",
|
||||
10,
|
||||
10,
|
||||
24.0,
|
||||
Rgba([0, 0, 0, 255]),
|
||||
)?;
|
||||
|
||||
// Test format conversion
|
||||
let _webp_data = ImageConverter::to_webp(&image, 80)?;
|
||||
let _jpeg_data = ImageConverter::to_jpeg(&image, 80)?;
|
||||
let _png_data = ImageConverter::to_png(&image)?;
|
||||
let _gif_data = ImageConverter::to_gif(&image)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
74
gb-image/src/processor.rs
Normal file
74
gb-image/src/processor.rs
Normal file
|
@ -0,0 +1,74 @@
|
|||
use gb_core::{Error, Result};
|
||||
use image::{DynamicImage, Rgba};
|
||||
use imageproc::drawing::draw_text_mut;
|
||||
use rusttype::{Font, Scale};
|
||||
use std::fs;
|
||||
|
||||
|
||||
pub struct ImageProcessor;
|
||||
|
||||
impl ImageProcessor {
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
|
||||
|
||||
pub fn resize(&self, image: &DynamicImage, width: u32, height: u32) -> DynamicImage {
|
||||
image.resize(width, height, image::imageops::FilterType::Lanczos3)
|
||||
}
|
||||
|
||||
pub fn crop(&self, image: &DynamicImage, x: u32, y: u32, width: u32, height: u32) -> Result<DynamicImage> {
|
||||
if x + width > image.width() || y + height > image.height() {
|
||||
return Err(Error::internal("Crop dimensions exceed image bounds".to_string()));
|
||||
}
|
||||
Ok(image.crop_imm(x, y, width, height))
|
||||
}
|
||||
|
||||
pub fn apply_blur(&self, image: &DynamicImage, sigma: f32) -> DynamicImage {
|
||||
image.blur(sigma)
|
||||
}
|
||||
|
||||
pub fn adjust_brightness(&self, image: &DynamicImage, value: i32) -> DynamicImage {
|
||||
image.brighten(value)
|
||||
}
|
||||
|
||||
pub fn adjust_contrast(&self, image: &DynamicImage, value: f32) -> DynamicImage {
|
||||
image.adjust_contrast(value)
|
||||
}
|
||||
|
||||
pub fn add_text(
|
||||
&self,
|
||||
image: &mut DynamicImage,
|
||||
text: &str,
|
||||
x: i32,
|
||||
y: i32,
|
||||
size: f32,
|
||||
color: Rgba<u8>,
|
||||
) -> Result<()> {
|
||||
// Load the font file from assets (downloaded in build.rs)
|
||||
let font_data = fs::read("assets/DejaVuSans.ttf")
|
||||
.map_err(|e| Error::internal(format!("Failed to load font: {}", e)))?;
|
||||
|
||||
let font = Font::try_from_vec(font_data)
|
||||
.ok_or_else(|| Error::internal("Failed to parse font data".to_string()))?;
|
||||
|
||||
let scale = Scale::uniform(size);
|
||||
let image_buffer = image.as_mut_rgba8()
|
||||
.ok_or_else(|| Error::internal("Failed to convert image to RGBA".to_string()))?;
|
||||
|
||||
draw_text_mut(
|
||||
image_buffer,
|
||||
color,
|
||||
x,
|
||||
y,
|
||||
scale,
|
||||
&font,
|
||||
text
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
}
|
18
gb-infra/Cargo.toml
Normal file
18
gb-infra/Cargo.toml
Normal file
|
@ -0,0 +1,18 @@
|
|||
[package]
|
||||
name = "gb-infra"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
dotenv = { workspace = true }
|
||||
ctrlc = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
reqwest = { workspace = true }
|
||||
flate2 = { workspace = true }
|
||||
tar = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
20
gb-infra/README.md
Normal file
20
gb-infra/README.md
Normal file
|
@ -0,0 +1,20 @@
|
|||
|
||||
# Backup
|
||||
|
||||
## Fastest way to tranfer files between servers over TCP/IP
|
||||
|
||||
rsync -avz --progress --bwlimit=0 -e "ssh -p 22 -T -c aes128-gcm@openssh.com -o Compression=no -o IPQoS=throughput" gbbackup@host.com.br:/opt/gbo/backup /home/user/Desktop
|
||||
|
||||
|
||||
# Security
|
||||
apt update && apt install -y fail2ban
|
||||
systemctl enable fail2ban
|
||||
|
||||
|
||||
apt update && apt install -y fail2ban iptables-persistent
|
||||
|
||||
systemctl enable fail2ban
|
||||
systemctl enable netfilter-persistent
|
||||
|
||||
# Add
|
||||
Docusign: https://www.docuseal.com/on-premises
|
BIN
gb-infra/architecture.png
Normal file
BIN
gb-infra/architecture.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 148 KiB |
78
gb-infra/architecture.txt
Normal file
78
gb-infra/architecture.txt
Normal file
|
@ -0,0 +1,78 @@
|
|||
flowchart TB
|
||||
%% Style definitions
|
||||
classDef users fill:#FF9900,stroke:#FF6600,stroke-width:2px,color:white,font-weight:bold
|
||||
classDef identity fill:#4285F4,stroke:#2956B2,stroke-width:2px,color:white,font-weight:bold
|
||||
classDef content fill:#0F9D58,stroke:#0B8043,stroke-width:2px,color:white,font-weight:bold
|
||||
classDef communication fill:#DB4437,stroke:#B31412,stroke-width:2px,color:white,font-weight:bold
|
||||
classDef ai fill:#9C27B0,stroke:#7B1FA2,stroke-width:2px,color:white,font-weight:bold
|
||||
classDef bot fill:#FB8C00,stroke:#EF6C00,stroke-width:2px,color:white,font-weight:bold
|
||||
|
||||
%% Main user node
|
||||
User((👤 Users))
|
||||
|
||||
subgraph "Identity & Access Management"
|
||||
Zitadel["🔐 Identity Provider<br>(Zitadel)"]
|
||||
Stalwart["✉️ Mail Server<br>(Stalwart)"]
|
||||
end
|
||||
|
||||
subgraph "Content & Collaboration"
|
||||
Forgejo["📊 ALM Server<br>(Forgejo)"]
|
||||
Static["🌐 Static Site Generator<br>(Website Static)"]
|
||||
Minio["💾 Object Storage<br>(MinIO)"]
|
||||
end
|
||||
|
||||
subgraph "Communication & Delivery"
|
||||
Caddy["🔄 Reverse Proxy<br>(Caddy)"]
|
||||
LiveKit["💬 Real-time Communication<br>(LiveKit)"]
|
||||
end
|
||||
|
||||
subgraph "AI & Integration Layer"
|
||||
LLM["🧠 LLM Engine<br>(Nomic LLM)"]
|
||||
|
||||
subgraph "Bot Framework"
|
||||
BotFramework["🤖 Bot Framework"]
|
||||
|
||||
subgraph "Bot Capabilities"
|
||||
MCP["📨 Message Control Protocol<br>(MCP)"]
|
||||
GET["🔗 GET Web Service Calls"]
|
||||
BASIC["⚙️ BASIC Engine"]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
%% Connection lines with colors
|
||||
User --> |"User Access"| Caddy
|
||||
Caddy --> |"Auth"| Zitadel
|
||||
Caddy --> |"Code & Issues"| Forgejo
|
||||
Caddy --> |"Content"| Static
|
||||
Caddy --> |"Real-time"| LiveKit
|
||||
Caddy --> |"AI & Bots"| BotFramework
|
||||
|
||||
Zitadel --> |"SSO"| Forgejo
|
||||
Zitadel --> |"Auth"| LiveKit
|
||||
Zitadel --> |"Identity"| BotFramework
|
||||
|
||||
Forgejo --> |"Store"| Minio
|
||||
Static --> |"Assets"| Minio
|
||||
|
||||
BotFramework --> MCP
|
||||
BotFramework --> GET
|
||||
BotFramework --> BASIC
|
||||
BotFramework --> |"NLP"| LLM
|
||||
|
||||
Stalwart --> |"Email"| BotFramework
|
||||
LiveKit --> |"Messaging"| BotFramework
|
||||
|
||||
%% Integration flows - dashed lines with colors
|
||||
MCP -.-> |"Message Routing"| Stalwart
|
||||
GET -.-> |"API Calls"| Forgejo
|
||||
BASIC -.-> |"Scripting"| Minio
|
||||
LLM -.-> |"Content Generation"| Static
|
||||
|
||||
%% Apply styles
|
||||
class User users
|
||||
class Zitadel,Stalwart identity
|
||||
class Forgejo,Static,Minio content
|
||||
class Caddy,LiveKit communication
|
||||
class LLM ai
|
||||
class BotFramework,MCP,GET,BASIC bot
|
9
gb-infra/src/lib.rs
Normal file
9
gb-infra/src/lib.rs
Normal file
|
@ -0,0 +1,9 @@
|
|||
pub mod manager;
|
||||
pub mod setup;
|
||||
pub mod services {
|
||||
pub mod minio;
|
||||
pub mod nginx;
|
||||
pub mod postgresql;
|
||||
pub mod stalwart;
|
||||
pub mod zitadel;
|
||||
}
|
60
gb-infra/src/manager.rs
Normal file
60
gb-infra/src/manager.rs
Normal file
|
@ -0,0 +1,60 @@
|
|||
use crate::services::{zitadel, stalwart, minio, postgresql, nginx};
|
||||
use dotenv::dotenv;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
|
||||
pub struct ServiceManager {
|
||||
services: Vec<Box<dyn Service>>,
|
||||
}
|
||||
|
||||
impl ServiceManager {
|
||||
pub fn new() -> Self {
|
||||
dotenv().ok();
|
||||
ServiceManager {
|
||||
services: vec![
|
||||
Box::new(zitadel::Zitadel::new()),
|
||||
Box::new(stalwart::Stalwart::new()),
|
||||
Box::new(minio::MinIO::new()),
|
||||
Box::new(postgresql::PostgreSQL::new()),
|
||||
Box::new(nginx::NGINX::new()),
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start(&mut self) {
|
||||
for service in &mut self.services {
|
||||
service.start().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stop(&mut self) {
|
||||
for service in &mut self.services {
|
||||
service.stop().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run(&mut self) {
|
||||
self.start();
|
||||
let running = Arc::new(Mutex::new(true));
|
||||
let running_clone = Arc::clone(&running);
|
||||
|
||||
ctrlc::set_handler(move || {
|
||||
println!("Exiting service manager...");
|
||||
let mut running = running_clone.lock().unwrap();
|
||||
*running = false;
|
||||
})
|
||||
.expect("Failed to set Ctrl+C handler.");
|
||||
|
||||
while *running.lock().unwrap() {
|
||||
thread::sleep(Duration::from_secs(1));
|
||||
}
|
||||
|
||||
self.stop();
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Service {
|
||||
fn start(&mut self) -> Result<(), String>;
|
||||
fn stop(&mut self) -> Result<(), String>;
|
||||
}
|
54
gb-infra/src/services/minio.rs
Normal file
54
gb-infra/src/services/minio.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
use crate::manager::Service;
|
||||
use std::env;
|
||||
use std::process::Command;
|
||||
use std::collections::HashMap;
|
||||
use dotenv::dotenv;
|
||||
|
||||
pub struct MinIO {
|
||||
env_vars: HashMap<String, String>,
|
||||
process: Option<std::process::Child>,
|
||||
}
|
||||
|
||||
impl MinIO {
|
||||
pub fn new() -> Self {
|
||||
dotenv().ok();
|
||||
let env_vars = vec![
|
||||
"MINIO_ROOT_USER",
|
||||
"MINIO_ROOT_PASSWORD",
|
||||
"MINIO_VOLUMES",
|
||||
"MINIO_ADDRESS",
|
||||
]
|
||||
.into_iter()
|
||||
.filter_map(|key| env::var(key).ok().map(|value| (key.to_string(), value)))
|
||||
.collect();
|
||||
|
||||
MinIO {
|
||||
env_vars,
|
||||
process: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Service for MinIO {
|
||||
fn start(&mut self) -> Result<(), String> {
|
||||
if self.process.is_some() {
|
||||
return Err("MinIO is already running.".to_string());
|
||||
}
|
||||
|
||||
let mut command = Command::new("/opt/gbo/bin/minio");
|
||||
for (key, value) in &self.env_vars {
|
||||
command.env(key, value);
|
||||
}
|
||||
|
||||
self.process = Some(command.spawn().map_err(|e| e.to_string())?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stop(&mut self) -> Result<(), String> {
|
||||
if let Some(mut child) = self.process.take() {
|
||||
child.kill().map_err(|e| e.to_string())?;
|
||||
child.wait().map_err(|e| e.to_string())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
83
gb-infra/src/services/nginx.rs
Normal file
83
gb-infra/src/services/nginx.rs
Normal file
|
@ -0,0 +1,83 @@
|
|||
use crate::manager::Service;
|
||||
use std::env;
|
||||
use std::process::Command;
|
||||
use std::collections::HashMap;
|
||||
use dotenv::dotenv;
|
||||
|
||||
pub struct NGINX {
|
||||
env_vars: HashMap<String, String>,
|
||||
process: Option<std::process::Child>,
|
||||
}
|
||||
|
||||
impl NGINX {
|
||||
pub fn new() -> Self {
|
||||
dotenv().ok();
|
||||
let env_vars = vec![
|
||||
"NGINX_ERROR_LOG",
|
||||
"NGINX_ACCESS_LOG",
|
||||
]
|
||||
.into_iter()
|
||||
.filter_map(|key| env::var(key).ok().map(|value| (key.to_string(), value)))
|
||||
.collect();
|
||||
|
||||
NGINX {
|
||||
env_vars,
|
||||
process: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Service for NGINX {
|
||||
fn start(&mut self) -> Result<(), String> {
|
||||
if self.process.is_some() {
|
||||
return Err("NGINX is already running.".to_string());
|
||||
}
|
||||
|
||||
// Configure NGINX logs
|
||||
let error_log = self.env_vars.get("NGINX_ERROR_LOG").unwrap();
|
||||
let access_log = self.env_vars.get("NGINX_ACCESS_LOG").unwrap();
|
||||
|
||||
// Update NGINX configuration
|
||||
let nginx_conf = format!(
|
||||
r#"
|
||||
error_log {} debug;
|
||||
access_log {};
|
||||
events {{}}
|
||||
http {{
|
||||
server {{
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
location / {{
|
||||
root /var/www/html;
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
"#,
|
||||
error_log, access_log
|
||||
);
|
||||
|
||||
// Write the configuration to /etc/nginx/nginx.conf
|
||||
std::fs::write("/etc/nginx/nginx.conf", nginx_conf).map_err(|e| e.to_string())?;
|
||||
|
||||
// Start NGINX
|
||||
let mut command = Command::new("nginx");
|
||||
self.process = Some(command.spawn().map_err(|e| e.to_string())?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stop(&mut self) -> Result<(), String> {
|
||||
if let Some(mut child) = self.process.take() {
|
||||
child.kill().map_err(|e| e.to_string())?;
|
||||
child.wait().map_err(|e| e.to_string())?;
|
||||
}
|
||||
|
||||
// Stop NGINX
|
||||
Command::new("nginx")
|
||||
.arg("-s")
|
||||
.arg("stop")
|
||||
.status()
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
89
gb-infra/src/services/postgresql.rs
Normal file
89
gb-infra/src/services/postgresql.rs
Normal file
|
@ -0,0 +1,89 @@
|
|||
use crate::manager::Service;
|
||||
use std::env;
|
||||
use std::process::Command;
|
||||
use std::collections::HashMap;
|
||||
use dotenv::dotenv;
|
||||
|
||||
pub struct PostgreSQL {
|
||||
env_vars: HashMap<String, String>,
|
||||
process: Option<std::process::Child>,
|
||||
}
|
||||
|
||||
impl PostgreSQL {
|
||||
pub fn new() -> Self {
|
||||
dotenv().ok();
|
||||
let env_vars = vec![
|
||||
"POSTGRES_DATA_DIR",
|
||||
"POSTGRES_PORT",
|
||||
"POSTGRES_USER",
|
||||
"POSTGRES_PASSWORD",
|
||||
]
|
||||
.into_iter()
|
||||
.filter_map(|key| env::var(key).ok().map(|value| (key.to_string(), value)))
|
||||
.collect();
|
||||
|
||||
PostgreSQL {
|
||||
env_vars,
|
||||
process: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Service for PostgreSQL {
|
||||
fn start(&mut self) -> Result<(), String> {
|
||||
if self.process.is_some() {
|
||||
return Err("PostgreSQL is already running.".to_string());
|
||||
}
|
||||
|
||||
// Initialize PostgreSQL data directory if it doesn't exist
|
||||
let data_dir = self.env_vars.get("POSTGRES_DATA_DIR").unwrap();
|
||||
if !std::path::Path::new(data_dir).exists() {
|
||||
Command::new("sudo")
|
||||
.arg("-u")
|
||||
.arg("postgres")
|
||||
.arg("/usr/lib/postgresql/14/bin/initdb")
|
||||
.arg("-D")
|
||||
.arg(data_dir)
|
||||
.status()
|
||||
.map_err(|e| e.to_string())?;
|
||||
}
|
||||
|
||||
// Start PostgreSQL
|
||||
let mut command = Command::new("sudo");
|
||||
command
|
||||
.arg("-u")
|
||||
.arg("postgres")
|
||||
.arg("/usr/lib/postgresql/14/bin/pg_ctl")
|
||||
.arg("start")
|
||||
.arg("-D")
|
||||
.arg(data_dir);
|
||||
|
||||
for (key, value) in &self.env_vars {
|
||||
command.env(key, value);
|
||||
}
|
||||
|
||||
self.process = Some(command.spawn().map_err(|e| e.to_string())?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stop(&mut self) -> Result<(), String> {
|
||||
if let Some(mut child) = self.process.take() {
|
||||
child.kill().map_err(|e| e.to_string())?;
|
||||
child.wait().map_err(|e| e.to_string())?;
|
||||
}
|
||||
|
||||
// Stop PostgreSQL
|
||||
let data_dir = self.env_vars.get("POSTGRES_DATA_DIR").unwrap();
|
||||
Command::new("sudo")
|
||||
.arg("-u")
|
||||
.arg("postgres")
|
||||
.arg("/usr/lib/postgresql/14/bin/pg_ctl")
|
||||
.arg("stop")
|
||||
.arg("-D")
|
||||
.arg(data_dir)
|
||||
.status()
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
58
gb-infra/src/services/stalwart.rs
Normal file
58
gb-infra/src/services/stalwart.rs
Normal file
|
@ -0,0 +1,58 @@
|
|||
use crate::manager::Service;
|
||||
use std::env;
|
||||
use std::process::Command;
|
||||
use std::collections::HashMap;
|
||||
use dotenv::dotenv;
|
||||
|
||||
pub struct Stalwart {
|
||||
env_vars: HashMap<String, String>,
|
||||
process: Option<std::process::Child>,
|
||||
}
|
||||
|
||||
impl Stalwart {
|
||||
pub fn new() -> Self {
|
||||
dotenv().ok();
|
||||
let env_vars = vec![
|
||||
"STALWART_LOG_LEVEL",
|
||||
"STALWART_OAUTH_PROVIDER",
|
||||
"STALWART_OAUTH_CLIENT_ID",
|
||||
"STALWART_OAUTH_CLIENT_SECRET",
|
||||
"STALWART_OAUTH_AUTHORIZATION_ENDPOINT",
|
||||
"STALWART_OAUTH_TOKEN_ENDPOINT",
|
||||
"STALWART_OAUTH_USERINFO_ENDPOINT",
|
||||
"STALWART_OAUTH_SCOPE",
|
||||
]
|
||||
.into_iter()
|
||||
.filter_map(|key| env::var(key).ok().map(|value| (key.to_string(), value)))
|
||||
.collect();
|
||||
|
||||
Stalwart {
|
||||
env_vars,
|
||||
process: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Service for Stalwart {
|
||||
fn start(&mut self) -> Result<(), String> {
|
||||
if self.process.is_some() {
|
||||
return Err("Stalwart Mail is already running.".to_string());
|
||||
}
|
||||
|
||||
let mut command = Command::new("/opt/gbo/bin/stalwart");
|
||||
for (key, value) in &self.env_vars {
|
||||
command.env(key, value);
|
||||
}
|
||||
|
||||
self.process = Some(command.spawn().map_err(|e| e.to_string())?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stop(&mut self) -> Result<(), String> {
|
||||
if let Some(mut child) = self.process.take() {
|
||||
child.kill().map_err(|e| e.to_string())?;
|
||||
child.wait().map_err(|e| e.to_string())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
63
gb-infra/src/services/zitadel.rs
Normal file
63
gb-infra/src/services/zitadel.rs
Normal file
|
@ -0,0 +1,63 @@
|
|||
use crate::manager::Service;
|
||||
use std::env;
|
||||
use std::process::Command;
|
||||
use std::collections::HashMap;
|
||||
use dotenv::dotenv;
|
||||
|
||||
pub struct Zitadel {
|
||||
env_vars: HashMap<String, String>,
|
||||
process: Option<std::process::Child>,
|
||||
}
|
||||
|
||||
impl Zitadel {
|
||||
pub fn new() -> Self {
|
||||
dotenv().ok();
|
||||
let env_vars = vec![
|
||||
"ZITADEL_DEFAULTINSTANCE_INSTANCENAME",
|
||||
"ZITADEL_DEFAULTINSTANCE_ORG_NAME",
|
||||
"ZITADEL_DATABASE_POSTGRES_HOST",
|
||||
"ZITADEL_DATABASE_POSTGRES_PORT",
|
||||
"ZITADEL_DATABASE_POSTGRES_DATABASE",
|
||||
"ZITADEL_DATABASE_POSTGRES_USER_USERNAME",
|
||||
"ZITADEL_DATABASE_POSTGRES_USER_PASSWORD",
|
||||
"ZITADEL_DATABASE_POSTGRES_ADMIN_SSL_MODE",
|
||||
"ZITADEL_DATABASE_POSTGRES_USER_SSL_MODE",
|
||||
"ZITADEL_DATABASE_POSTGRES_ADMIN_USERNAME",
|
||||
"ZITADEL_DATABASE_POSTGRES_ADMIN_PASSWORD",
|
||||
"ZITADEL_EXTERNALSECURE",
|
||||
"ZITADEL_MASTERKEY",
|
||||
]
|
||||
.into_iter()
|
||||
.filter_map(|key| env::var(key).ok().map(|value| (key.to_string(), value)))
|
||||
.collect();
|
||||
|
||||
Zitadel {
|
||||
env_vars,
|
||||
process: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Service for Zitadel {
|
||||
fn start(&mut self) -> Result<(), String> {
|
||||
if self.process.is_some() {
|
||||
return Err("Zitadel is already running.".to_string());
|
||||
}
|
||||
|
||||
let mut command = Command::new("/opt/gbo/bin/zitadel");
|
||||
for (key, value) in &self.env_vars {
|
||||
command.env(key, value);
|
||||
}
|
||||
|
||||
self.process = Some(command.spawn().map_err(|e| e.to_string())?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stop(&mut self) -> Result<(), String> {
|
||||
if let Some(mut child) = self.process.take() {
|
||||
child.kill().map_err(|e| e.to_string())?;
|
||||
child.wait().map_err(|e| e.to_string())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
276
gb-infra/src/setup.rs
Normal file
276
gb-infra/src/setup.rs
Normal file
|
@ -0,0 +1,276 @@
|
|||
use std::{
|
||||
fs::{self, File},
|
||||
io::{self, Write},
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
env,
|
||||
};
|
||||
|
||||
use reqwest::blocking::Client;
|
||||
use flate2::read::GzDecoder;
|
||||
use tar::Archive;
|
||||
use zip::ZipArchive;
|
||||
|
||||
// TODO: https://helpcenter.onlyoffice.com/docs/installation/docs-community-install-ubuntu.aspx
|
||||
|
||||
|
||||
const INSTALL_DIR: &str = "/opt/gbo";
|
||||
const TEMP_DIR: &str = "/tmp/gbotemp";
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Component {
|
||||
name: &'static str,
|
||||
bin_dir: &'static str,
|
||||
download_url: Option<&'static str>,
|
||||
archive_type: ArchiveType,
|
||||
binaries: Vec<&'static str>,
|
||||
config_files: Vec<ConfigFile>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ConfigFile {
|
||||
src_url: Option<&'static str>,
|
||||
src_path: Option<&'static str>,
|
||||
dest_name: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ArchiveType {
|
||||
TarGz,
|
||||
Zip,
|
||||
Binary,
|
||||
}
|
||||
|
||||
pub fn doIt() -> io::Result<()> {
|
||||
// Define all components
|
||||
let components = [
|
||||
// Directory (Zitadel)
|
||||
Component {
|
||||
name: "zitadel",
|
||||
bin_dir: "directory",
|
||||
download_url: Some("https://github.com/zitadel/zitadel/releases/latest/download/zitadel_Linux_x86_64.tar.gz"),
|
||||
archive_type: ArchiveType::TarGz,
|
||||
binaries: vec!["zitadel"],
|
||||
config_files: vec![ConfigFile {
|
||||
src_url: None,
|
||||
src_path: Some("src/config/directory/zitadel.yaml"),
|
||||
dest_name: "zitadel.yaml",
|
||||
}],
|
||||
},
|
||||
// Mail (Stalwart)
|
||||
Component {
|
||||
name: "stalwart-mail",
|
||||
bin_dir: "mail",
|
||||
download_url: Some("https://github.com/stalwartlabs/mail-server/releases/latest/download/stalwart-linux-x86_64.tar.gz"),
|
||||
archive_type: ArchiveType::TarGz,
|
||||
binaries: vec!["stalwart-mail"],
|
||||
config_files: vec![ConfigFile {
|
||||
src_url: Some("https://raw.githubusercontent.com/stalwartlabs/mail-server/main/resources/config/config.toml"),
|
||||
src_path: None,
|
||||
dest_name: "config.toml",
|
||||
}],
|
||||
},
|
||||
// Tabular (PostgreSQL)
|
||||
Component {
|
||||
name: "postgresql",
|
||||
bin_dir: "tabular",
|
||||
download_url: Some("https://get.enterprisedb.com/postgresql/postgresql-14.10-1-linux-x64-binaries.tar.gz"),
|
||||
archive_type: ArchiveType::TarGz,
|
||||
binaries: vec!["postgres", "pg_ctl", "psql", "pg_dump", "pg_restore"],
|
||||
config_files: vec![],
|
||||
},
|
||||
// Object (MinIO)
|
||||
Component {
|
||||
name: "minio",
|
||||
bin_dir: "object",
|
||||
download_url: Some("https://dl.min.io/server/minio/release/linux-amd64/minio"),
|
||||
archive_type: ArchiveType::Binary,
|
||||
binaries: vec!["minio"],
|
||||
config_files: vec![],
|
||||
},
|
||||
// Webserver (Caddy)
|
||||
Component {
|
||||
name: "caddy",
|
||||
bin_dir: "webserver",
|
||||
download_url: Some("https://github.com/caddyserver/caddy/releases/latest/download/caddy_linux_amd64.tar.gz"),
|
||||
archive_type: ArchiveType::TarGz,
|
||||
binaries: vec!["caddy"],
|
||||
config_files: vec![ConfigFile {
|
||||
src_url: None,
|
||||
src_path: Some("src/config/webserver/Caddyfile"),
|
||||
dest_name: "Caddyfile",
|
||||
}],
|
||||
},
|
||||
];
|
||||
|
||||
// Create directories
|
||||
create_directories()?;
|
||||
|
||||
// Install dependencies
|
||||
install_dependencies()?;
|
||||
|
||||
// Create HTTP client
|
||||
let client = Client::new();
|
||||
|
||||
// Process all components
|
||||
for component in components.iter() {
|
||||
install_component(&component, &client)?;
|
||||
}
|
||||
|
||||
// Clean up temp directory
|
||||
fs::remove_dir_all(TEMP_DIR)?;
|
||||
|
||||
println!("All binaries downloaded to {}", INSTALL_DIR);
|
||||
println!("Use the start-stop script to manually control all components");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_directories() -> io::Result<()> {
|
||||
println!("Creating directories...");
|
||||
|
||||
// Main directories
|
||||
fs::create_dir_all(INSTALL_DIR)?;
|
||||
Command::new("chmod").args(["777", INSTALL_DIR]).status()?;
|
||||
fs::create_dir_all(TEMP_DIR)?;
|
||||
|
||||
// Component directories
|
||||
let dirs = [
|
||||
"bin/bot", "bin/mail", "bin/tabular", "bin/object",
|
||||
"bin/directory", "bin/alm", "bin/webserver", "bin/meeting",
|
||||
"config/bot", "config/mail", "config/tabular", "config/object",
|
||||
"config/directory", "config/alm", "config/webserver", "config/meeting",
|
||||
"data/bot", "data/mail", "data/tabular", "data/object",
|
||||
"data/directory", "data/alm", "data/webserver", "data/meeting",
|
||||
"logs", "certs"
|
||||
];
|
||||
|
||||
for dir in dirs {
|
||||
fs::create_dir_all(format!("{}/{}", INSTALL_DIR, dir))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn install_dependencies() -> io::Result<()> {
|
||||
println!("Installing system dependencies...");
|
||||
Command::new("apt-get").args(["update"]).status()?;
|
||||
Command::new("apt-get").args(["install", "-y",
|
||||
"apt-transport-https", "ca-certificates", "curl",
|
||||
"software-properties-common", "gnupg", "wget",
|
||||
"unzip", "tar", "postgresql-client", "redis-tools"
|
||||
]).status()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn install_component(component: &Component, client: &Client) -> io::Result<()> {
|
||||
println!("Installing {}...", component.name);
|
||||
|
||||
if let Some(url) = component.download_url {
|
||||
let temp_path = format!("{}/{}", TEMP_DIR, component.name);
|
||||
let target_dir = format!("{}/bin/{}", INSTALL_DIR, component.bin_dir);
|
||||
|
||||
// Download the file
|
||||
download_file(client, url, &temp_path)?;
|
||||
|
||||
match component.archive_type {
|
||||
ArchiveType::TarGz => {
|
||||
// Extract tar.gz archive
|
||||
let tar_gz = File::open(&temp_path)?;
|
||||
let tar = GzDecoder::new(tar_gz);
|
||||
let mut archive = Archive::new(tar);
|
||||
archive.unpack(TEMP_DIR)?;
|
||||
|
||||
// Move binaries to target directory
|
||||
for binary in &component.binaries {
|
||||
let src = format!("{}/{}", TEMP_DIR, binary);
|
||||
let dest = format!("{}/{}", target_dir, binary);
|
||||
|
||||
if Path::new(&src).exists() {
|
||||
fs::rename(&src, &dest)?;
|
||||
set_executable(&dest)?;
|
||||
} else {
|
||||
// For PostgreSQL which has binaries in pgsql/bin/
|
||||
let pg_src = format!("{}/pgsql/bin/{}", TEMP_DIR, binary);
|
||||
if Path::new(&pg_src).exists() {
|
||||
fs::rename(&pg_src, &dest)?;
|
||||
set_executable(&dest)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
ArchiveType::Zip => {
|
||||
// Extract zip archive
|
||||
let file = File::open(&temp_path)?;
|
||||
let mut archive = ZipArchive::new(file)?;
|
||||
archive.extract(TEMP_DIR)?;
|
||||
|
||||
// Move binaries to target directory
|
||||
for binary in &component.binaries {
|
||||
let src = format!("{}/{}", TEMP_DIR, binary);
|
||||
let dest = format!("{}/{}", target_dir, binary);
|
||||
|
||||
if Path::new(&src).exists() {
|
||||
fs::rename(&src, &dest)?;
|
||||
set_executable(&dest)?;
|
||||
}
|
||||
}
|
||||
},
|
||||
ArchiveType::Binary => {
|
||||
// Single binary - just move to target location
|
||||
let dest = format!("{}/{}", target_dir, component.name);
|
||||
fs::rename(&temp_path, &dest)?;
|
||||
set_executable(&dest)?;
|
||||
},
|
||||
}
|
||||
|
||||
// Clean up downloaded file
|
||||
fs::remove_file(temp_path)?;
|
||||
}
|
||||
|
||||
// Handle config files
|
||||
for config in &component.config_files {
|
||||
let config_dir = format!("{}/config/{}", INSTALL_DIR, component.bin_dir);
|
||||
let dest_path = format!("{}/{}", config_dir, config.dest_name);
|
||||
|
||||
if let Some(url) = config.src_url {
|
||||
// Download config from URL
|
||||
download_file(client, url, &dest_path)?;
|
||||
} else if let Some(src_path) = config.src_path {
|
||||
// Copy config from local source (placeholder)
|
||||
println!("Would copy config from {} to {}", src_path, dest_path);
|
||||
// fs::copy(src_path, dest_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
println!("{} installed successfully!", component.name);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn download_file(client: &Client, url: &str, dest_path: &str) -> io::Result<()> {
|
||||
println!("Downloading {} to {}", url, dest_path);
|
||||
|
||||
let mut response = client.get(url)
|
||||
.send()
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!("Failed to download file: HTTP {}", response.status())
|
||||
));
|
||||
}
|
||||
|
||||
let mut dest_file = File::create(dest_path)?;
|
||||
response.copy_to(&mut dest_file)
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_executable(path: &str) -> io::Result<()> {
|
||||
Command::new("chmod")
|
||||
.args(["+x", path])
|
||||
.status()?;
|
||||
Ok(())
|
||||
}
|
|
@ -73,33 +73,27 @@ export OPENCV4NODEJS_DISABLE_AUTOBUILD=1
|
|||
export OPENCV_LIB_DIR=/usr/lib/x86_64-linux-gnu
|
||||
|
||||
sudo apt install -y curl gnupg ca-certificates git
|
||||
|
||||
# Install Node.js 22.x
|
||||
curl -fsSL https://deb.nodesource.com/setup_22.x | sudo bash -
|
||||
sudo apt install -y nodejs
|
||||
npm install -g pnpm@latest
|
||||
|
||||
# Install rust 1.85
|
||||
apt-get install -y libssl-dev pkg-config
|
||||
sudo apt-get install -y \
|
||||
apt-transport-https \
|
||||
software-properties-common \
|
||||
gnupg \
|
||||
cmake \
|
||||
build-essential \
|
||||
clang \
|
||||
libclang-dev \
|
||||
libz-dev \
|
||||
libssl-dev \
|
||||
pkg-config
|
||||
wget \
|
||||
unzip \
|
||||
tar
|
||||
|
||||
|
||||
# Install Node.js 22.x
|
||||
curl -fsSL https://deb.nodesource.com/setup_22.x | sudo bash -
|
||||
sudo apt install -y nodejs
|
||||
|
||||
# Install rust 1.85
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- --default-toolchain 1.85.1 -y
|
||||
source ~/.cargo/env
|
||||
rustc --version
|
||||
|
||||
|
||||
# Install Xvfb and other dependencies
|
||||
sudo apt install -y xvfb libgbm-dev lxd-client
|
||||
sudo apt install -y xvfb libgbm-dev
|
||||
|
||||
# Create directories
|
||||
mkdir -p \"$BIN_PATH\" /opt/gbo/data /opt/gbo/conf /opt/gbo/logs || { echo 'Directory creation failed'; exit 1; }
|
|
@ -12,7 +12,6 @@ lxc launch images:debian/12 "$PARAM_TENANT"-bot -c security.privileged=true
|
|||
sleep 15
|
||||
|
||||
lxc exec "$PARAM_TENANT"-bot -- bash -c "
|
||||
|
||||
apt-get update && apt-get install -y \
|
||||
build-essential cmake git pkg-config libjpeg-dev libtiff-dev \
|
||||
libpng-dev libavcodec-dev libavformat-dev libswscale-dev \
|
||||
|
@ -59,19 +58,19 @@ mkdir -p /opt/gbo/data /opt/gbo/conf /opt/gbo/logs
|
|||
sudo apt update
|
||||
sudo apt install -y curl gnupg ca-certificates git
|
||||
|
||||
# Install Node.js 22.x
|
||||
curl -fsSL https://deb.nodesource.com/setup_22.x | sudo bash -
|
||||
sudo apt install -y nodejs
|
||||
|
||||
# Install Xvfb and other dependencies
|
||||
sudo apt install -y xvfb libgbm-dev
|
||||
|
||||
wget https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_128.0.6613.119-1_amd64.deb
|
||||
sudo apt install ./google-chrome-stable_128.0.6613.119-1_amd64.deb
|
||||
|
||||
# Clone and setup bot server
|
||||
cd /opt/gbo/data
|
||||
git clone https://alm.pragmatismo.com.br/generalbots/botserver.git
|
||||
cd botserver
|
||||
npm install
|
||||
|
||||
npx puppeteer browsers install chrome
|
||||
./node_modules/.bin/tsc
|
||||
cd packages/default.gbui
|
||||
npm install
|
||||
|
@ -110,4 +109,4 @@ sudo systemctl start bot.service
|
|||
lxc config device remove "$PARAM_TENANT"-bot bot-proxy 2>/dev/null || true
|
||||
lxc config device add "$PARAM_TENANT"-bot bot-proxy proxy \
|
||||
listen=tcp:0.0.0.0:"$PARAM_BOT_PORT" \
|
||||
connect=tcp:127.0.0.1:"$PARAM_BOT_PORT"
|
||||
connect=tcp:127.0.0.1:"$PARAM_BOT_PORT"
|
|
@ -36,12 +36,12 @@ curl -s https://brave-browser-apt-release.s3.brave.com/brave-core.asc | gpg --de
|
|||
echo "deb [arch=amd64 signed-by=/usr/share/keyrings/brave-browser-archive-keyring.gpg] https://brave-browser-apt-release.s3.brave.com/ stable main" > /etc/apt/sources.list.d/brave-browser-release.list
|
||||
apt update && apt install -y brave-browser
|
||||
|
||||
sudo apt install gnome-tweaks
|
||||
/etc/environment
|
||||
GTK_IM_MODULE=cedilla
|
||||
QT_IM_MODULE=cedilla
|
||||
|
||||
|
||||
|
||||
|
||||
"
|
||||
port=3389
|
||||
lxc config device remove "$PARAM_TENANT"-desktop "port-$port" 2>/dev/null || true
|
||||
lxc config device add "$PARAM_TENANT"-desktop "port-$port" proxy listen=tcp:0.0.0.0:$port connect=tcp:127.0.0.1:$port
|
||||
|
||||
|
||||
sudo iptables -t nat -A PREROUTING -p tcp --dport 3389 -j DNAT --to-destination CONTAINER_IP:3389
|
||||
sudo iptables -A FORWARD -p tcp -d CONTAINER_IP --dport 3389 -j ACCEPT
|
|
@ -32,7 +32,7 @@ User=minio-user
|
|||
Group=minio-user
|
||||
Environment="MINIO_ROOT_USER='"${PARAM_DRIVE_USER}"'"
|
||||
Environment="MINIO_ROOT_PASSWORD='"${PARAM_DRIVE_PASSWORD}"'"
|
||||
ExecStart=/usr/local/bin/minio server --address ":'"${PARAM_DRIVE_PORT}"'" --console-address ":'"${PARAM_PORT}"'" /data
|
||||
ExecStart=/usr/local/bin/minio server --console-address ":'"${PARAM_DRIVE_PORT}"'" /data
|
||||
StandardOutput=append:/var/log/minio/output.log
|
||||
StandardError=append:/var/log/minio/error.log
|
||||
|
||||
|
@ -53,4 +53,4 @@ lxc config device add "${PARAM_TENANT}-drive" minio-proxy proxy \
|
|||
lxc config device remove "${PARAM_TENANT}-drive" console-proxy 2>/dev/null || true
|
||||
lxc config device add "${PARAM_TENANT}-drive" console-proxy proxy \
|
||||
listen=tcp:0.0.0.0:"${PARAM_DRIVE_PORT}" \
|
||||
connect=tcp:127.0.0.1:"${PARAM_DRIVE_PORT}"
|
||||
connect=tcp:127.0.0.1:"${PARAM_DRIVE_PORT}"
|
|
@ -1,6 +1,11 @@
|
|||
#!/bin/bash
|
||||
PUBLIC_INTERFACE="eth0" # Your host's public network interface
|
||||
|
||||
# Enable IP forwarding
|
||||
echo "[HOST] Enabling IP forwarding..."
|
||||
echo "net.ipv4.ip_forward=1" | sudo tee -a /etc/sysctl.conf
|
||||
sudo sysctl -p
|
||||
|
||||
# Configure firewall
|
||||
echo "[HOST] Configuring firewall..."
|
||||
sudo iptables -A FORWARD -i $PUBLIC_INTERFACE -o lxcbr0 -p tcp -m multiport --dports 25,80,110,143,465,587,993,995,4190 -j ACCEPT
|
||||
|
@ -17,7 +22,7 @@ fi
|
|||
|
||||
# Create directory structure
|
||||
echo "[CONTAINER] Creating directories..."
|
||||
HOST_BASE="/opt/email"
|
||||
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/email"
|
||||
HOST_DATA="$HOST_BASE/data"
|
||||
HOST_CONF="$HOST_BASE/conf"
|
||||
HOST_LOGS="$HOST_BASE/logs"
|
||||
|
@ -32,13 +37,9 @@ sleep 15
|
|||
|
||||
echo "[CONTAINER] Installing Stalwart Mail..."
|
||||
lxc exec "$PARAM_TENANT"-email -- bash -c "
|
||||
|
||||
echo "nameserver $PARAM_DNS_INTERNAL_IP" > /etc/resolv.conf
|
||||
|
||||
apt install resolvconf -y
|
||||
apt-get update && apt-get install -y wget libcap2-bin
|
||||
wget -O /tmp/stalwart.tar.gz https://github.com/stalwartlabs/stalwart/releases/download/v0.13.1/stalwart-x86_64-unknown-linux-gnu.tar.gz
|
||||
|
||||
wget -O /tmp/stalwart.tar.gz https://github.com/stalwartlabs/stalwart/releases/download/v0.12.4/stalwart-x86_64-unknown-linux-gnu.tar.gz
|
||||
|
||||
tar -xzf /tmp/stalwart.tar.gz -C /tmp
|
||||
mkdir -p /opt/gbo/bin
|
||||
mv /tmp/stalwart /opt/gbo/bin/stalwart
|
||||
|
@ -61,9 +62,9 @@ sudo chown -R "$HOST_EMAIL_UID:$HOST_EMAIL_GID" "$HOST_BASE"
|
|||
|
||||
# Mount directories
|
||||
echo "[CONTAINER] Mounting directories..."
|
||||
lxc config device add emailprofile emaildata disk source="$HOST_DATA" path=/opt/gbo/data
|
||||
lxc config device add emailprofile emailconf disk source="$HOST_CONF" path=/opt/gbo/conf
|
||||
lxc config device add emailprofile emaillogs disk source="$HOST_LOGS" path=/opt/gbo/logs
|
||||
lxc config device add "$PARAM_TENANT"-email emaildata disk source="$HOST_DATA" path=/opt/gbo/data
|
||||
lxc config device add "$PARAM_TENANT"-email emailconf disk source="$HOST_CONF" path=/opt/gbo/conf
|
||||
lxc config device add "$PARAM_TENANT"-email emaillogs disk source="$HOST_LOGS" path=/opt/gbo/logs
|
||||
|
||||
# Create systemd service
|
||||
echo "[CONTAINER] Creating email service..."
|
||||
|
@ -91,8 +92,3 @@ systemctl daemon-reload
|
|||
systemctl enable email
|
||||
systemctl start email
|
||||
"
|
||||
|
||||
for port in 25 80 110 143 465 587 993 995 4190; do
|
||||
lxc config device remove email "port-$port" 2>/dev/null || true
|
||||
lxc config device add email "port-$port" proxy listen=tcp:0.0.0.0:$port connect=tcp:127.0.0.1:$port
|
||||
done
|
|
@ -47,10 +47,10 @@ chown -R gbuser:gbuser /opt/gbo/{bin,data,conf,logs}
|
|||
systemctl enable proxy
|
||||
"
|
||||
|
||||
for port in 80 443; do
|
||||
lxc config device remove "$PARAM_TENANT"-proxy "port-$port" 2>/dev/null || true
|
||||
lxc config device add "$PARAM_TENANT"-proxy "port-$port" proxy listen=tcp:0.0.0.0:$port connect=tcp:127.0.0.1:$port
|
||||
done
|
||||
for port in 80 443; do
|
||||
lxc config device remove "$PARAM_TENANT"-proxy "port-$port" 2>/dev/null || true
|
||||
lxc config device add "$PARAM_TENANT"-proxy "port-$port" proxy listen=tcp:0.0.0.0:$port connect=tcp:127.0.0.1:$port
|
||||
done
|
||||
|
||||
lxc config set "$PARAM_TENANT"-proxy security.syscalls.intercept.mknod true
|
||||
lxc config set "$PARAM_TENANT"-proxy security.syscalls.intercept.setxattr true
|
||||
lxc config set "$PARAM_TENANT"-proxy security.syscalls.intercept.setxattr true
|
|
@ -0,0 +1,5 @@
|
|||
printf "%-20s %-10s %-10s %-10s %-6s %s\n" "CONTAINER" "USED" "AVAIL" "TOTAL" "USE%" "MOUNT"
|
||||
for container in $(lxc list -c n --format csv); do
|
||||
disk_info=$(lxc exec $container -- df -h / --output=used,avail,size,pcent | tail -n 1)
|
||||
printf "%-20s %s\n" "$container" "$disk_info"
|
||||
done
|
|
@ -3,40 +3,36 @@
|
|||
# Define container limits in an associative array
|
||||
declare -A container_limits=(
|
||||
# Pattern Memory CPU Allowance
|
||||
["*tables*"]="4096MB:100ms/100ms"
|
||||
["*dns*"]="2048MB:100ms/100ms"
|
||||
["*doc-editor*"]="512MB:10ms/100ms"
|
||||
["*proxy*"]="2048MB:100ms/100ms"
|
||||
["*directory*"]="1024MB:50ms/100ms"
|
||||
["*drive*"]="4096MB:50ms/100ms"
|
||||
["*email*"]="4096MB:100ms/100ms"
|
||||
["*webmail*"]="4096MB:100ms/100ms"
|
||||
["*bot*"]="4096MB:50ms/100ms"
|
||||
["*meeting*"]="4096MB:100ms/100ms"
|
||||
["*alm*"]="512MB:50ms/100ms"
|
||||
["*alm-ci*"]="4096MB:100ms/100ms"
|
||||
["*system*"]="4096MB:50ms/100ms"
|
||||
["*mailer*"]="4096MB:25ms/100ms"
|
||||
["*tables*"]="2048MB:25ms/100ms"
|
||||
["*proxy*"]="512MB:25ms/100ms"
|
||||
["*directory*"]="512MB:25ms/100ms"
|
||||
["*drive*"]="1024MB:25ms/100ms"
|
||||
["*email*"]="1024MB:20ms/100ms"
|
||||
["*webmail*"]="1024MB:20ms/100ms"
|
||||
["*bot*"]="2048MB:50ms/100ms"
|
||||
["*meeting*"]="1024MB:20ms/100ms"
|
||||
["*alm*"]="512MB:20ms/100ms"
|
||||
["*alm-ci*"]="1024MB:20ms/100ms"
|
||||
)
|
||||
|
||||
# Default values (for containers that don't match any pattern)
|
||||
DEFAULT_MEMORY="1024MB"
|
||||
DEFAULT_CPU_ALLOWANCE="15ms/100ms"
|
||||
CPU_COUNT=2
|
||||
CPU_PRIORITY=10
|
||||
CPU_PRIORITY=5
|
||||
|
||||
for pattern in "${!container_limits[@]}"; do
|
||||
echo "Configuring $container..."
|
||||
|
||||
|
||||
memory=$DEFAULT_MEMORY
|
||||
cpu_allowance=$DEFAULT_CPU_ALLOWANCE
|
||||
|
||||
|
||||
# Configure all containers
|
||||
for container in $(lxc list -c n --format csv); do
|
||||
# Check if container matches any pattern
|
||||
if [[ $container == $pattern ]]; then
|
||||
IFS=':' read -r memory cpu_allowance <<< "${container_limits[$pattern]}"
|
||||
|
||||
|
||||
# Apply configuration
|
||||
lxc config set "$container" limits.memory "$memory"
|
||||
lxc config set "$container" limits.cpu.allowance "$cpu_allowance"
|
||||
|
@ -50,4 +46,4 @@ for pattern in "${!container_limits[@]}"; do
|
|||
break
|
||||
fi
|
||||
done
|
||||
done
|
||||
done
|
|
@ -0,0 +1,33 @@
|
|||
for container in $(lxc list --format csv -c n); do
|
||||
echo "Processing $container..."
|
||||
|
||||
# Stop container safely
|
||||
lxc stop "$container"
|
||||
|
||||
# Set new 5GB limit (works for most drivers)
|
||||
if ! lxc config device override "$container" root size=5GB; then
|
||||
echo "Failed to set config, trying alternative method..."
|
||||
lxc config device set "$container" root size=5GB
|
||||
fi
|
||||
|
||||
# Start container
|
||||
lxc start "$container"
|
||||
|
||||
# Find root device inside container
|
||||
ROOT_DEV=$(lxc exec "$container" -- df / --output=source | tail -1)
|
||||
|
||||
# Resize filesystem (with proper error handling)
|
||||
if lxc exec "$container" -- which resize2fs >/dev/null 2>&1; then
|
||||
echo "Resizing filesystem for $container..."
|
||||
if [[ "$ROOT_DEV" == /dev/* ]]; then
|
||||
lxc exec "$container" -- growpart "$(dirname "$ROOT_DEV")" "$(basename "$ROOT_DEV")"
|
||||
lxc exec "$container" -- resize2fs "$ROOT_DEV"
|
||||
else
|
||||
echo "Non-standard root device $ROOT_DEV - manual resize needed"
|
||||
fi
|
||||
else
|
||||
echo "resize2fs not available in $container - install it first"
|
||||
fi
|
||||
|
||||
echo "Completed $container"
|
||||
done
|
|
@ -8,3 +8,4 @@ chmod +x /etc/profile.d/notimeout.sh
|
|||
sed -i '/pam_exec.so/s/quiet/quiet set_timeout=0/' /etc/pam.d/sshd 2>/dev/null
|
||||
source /etc/profile
|
||||
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
#!/bin/bash
|
||||
STORAGE_PATH="/opt/gbo/tenants/$PARAM_TENANT/system/data"
|
||||
LOGS_PATH="/opt/gbo/tenants/$PARAM_TENANT/system/logs"
|
||||
|
||||
mkdir -p "${STORAGE_PATH}" "${LOGS_PATH}"
|
||||
chmod -R 770 "${STORAGE_PATH}" "${LOGS_PATH}"
|
||||
chown -R 100999:100999 "${STORAGE_PATH}" "${LOGS_PATH}"
|
||||
|
||||
lxc launch images:debian/12 "${PARAM_TENANT}-system" -c security.privileged=true
|
||||
sleep 15
|
||||
|
||||
lxc config device add "${PARAM_TENANT}-system" storage disk source="${STORAGE_PATH}" path=/data
|
||||
lxc config device add "${PARAM_TENANT}-system" logs disk source="${LOGS_PATH}" path=/var/log/minio
|
||||
|
||||
lxc exec "${PARAM_TENANT}-system" -- bash -c '
|
||||
|
||||
apt-get update && apt-get install -y wget
|
||||
wget https://dl.min.io/server/minio/release/linux-amd64/minio -O /usr/local/bin/minio
|
||||
chmod +x /usr/local/bin/minio
|
||||
|
||||
useradd -r -s /bin/false minio-user || true
|
||||
mkdir -p /var/log/minio /data
|
||||
chown -R minio-user:minio-user /var/log/minio /data
|
||||
|
||||
cat > /etc/systemd/system/minio.service <<EOF
|
||||
[Unit]
|
||||
Description=MinIO
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=minio-user
|
||||
Group=minio-user
|
||||
Environment="MINIO_ROOT_USER='"${PARAM_system_USER}"'"
|
||||
Environment="MINIO_ROOT_PASSWORD='"${PARAM_system_PASSWORD}"'"
|
||||
ExecStart=/usr/local/bin/minio server --console-address ":'"${PARAM_system_PORT}"'" /data
|
||||
StandardOutput=append:/var/log/minio/output.log
|
||||
StandardError=append:/var/log/minio/error.log
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
systemctl daemon-reload
|
||||
systemctl enable minio
|
||||
systemctl start minio
|
||||
'
|
||||
|
||||
lxc config device remove "${PARAM_TENANT}-system" minio-proxy 2>/dev/null || true
|
||||
lxc config device add "${PARAM_TENANT}-system" minio-proxy proxy \
|
||||
listen=tcp:0.0.0.0:"${PARAM_system_API_PORT}" \
|
||||
connect=tcp:127.0.0.1:"${PARAM_system_API_PORT}"
|
||||
|
||||
lxc config device remove "${PARAM_TENANT}-system" console-proxy 2>/dev/null || true
|
||||
lxc config device add "${PARAM_TENANT}-system" console-proxy proxy \
|
||||
listen=tcp:0.0.0.0:"${PARAM_system_PORT}" \
|
||||
connect=tcp:127.0.0.1:"${PARAM_system_PORT}"
|
|
@ -0,0 +1,91 @@
|
|||
|
||||
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/tables"
|
||||
HOST_DATA="$HOST_BASE/data"
|
||||
HOST_CONF="$HOST_BASE/conf"
|
||||
HOST_LOGS="$HOST_BASE/logs"
|
||||
|
||||
mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS"
|
||||
|
||||
lxc launch images:debian/12 "$PARAM_TENANT"-tables -c security.privileged=true
|
||||
|
||||
until lxc exec "$PARAM_TENANT"-tables -- test -f /bin/bash; do
|
||||
|
||||
sleep 5
|
||||
done
|
||||
sleep 10
|
||||
|
||||
lxc exec "$PARAM_TENANT"-tables -- bash -c "
|
||||
set -e
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt-get update
|
||||
apt-get install -y wget gnupg2 sudo lsb-release
|
||||
CODENAME=\$(lsb_release -cs)
|
||||
|
||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor > /etc/apt/trusted.gpg.d/postgresql.gpg
|
||||
apt-get install -y postgresql-14 postgresql-client-14
|
||||
if ! id postgres &>/dev/null; then
|
||||
|
||||
exit 1
|
||||
fi
|
||||
systemctl stop postgresql@14-main 2>/dev/null || systemctl stop postgresql 2>/dev/null || true
|
||||
"
|
||||
|
||||
POSTGRES_UID=$(lxc exec "$PARAM_TENANT"-tables -- id -u postgres)
|
||||
POSTGRES_GID=$(lxc exec "$PARAM_TENANT"-tables -- id -g postgres)
|
||||
|
||||
HOST_POSTGRES_UID=$((100000 + POSTGRES_UID))
|
||||
HOST_POSTGRES_GID=$((100000 + POSTGRES_GID))
|
||||
|
||||
chown -R "$HOST_POSTGRES_UID:$HOST_POSTGRES_GID" "$HOST_BASE"
|
||||
chmod -R 750 "$HOST_BASE"
|
||||
|
||||
lxc config device add "$PARAM_TENANT"-tables pgdata disk source="$HOST_DATA" path=/var/lib/postgresql/14/main
|
||||
lxc config device add "$PARAM_TENANT"-tables pgconf disk source="$HOST_CONF" path=/etc/postgresql/14/main
|
||||
lxc config device add "$PARAM_TENANT"-tables pglogs disk source="$HOST_LOGS" path=/var/log/postgresql
|
||||
|
||||
mkdir -p /var/lib/postgresql/14/main
|
||||
mkdir -p /etc/postgresql/14/main
|
||||
mkdir -p /var/log/postgresql
|
||||
chown -R postgres:postgres /var/lib/postgresql/14/main
|
||||
chown -R postgres:postgres /etc/postgresql/14/main
|
||||
chown -R postgres:postgres /var/log/postgresql
|
||||
chmod 700 /var/lib/postgresql/14/main
|
||||
|
||||
sudo -u postgres /usr/lib/postgresql/14/bin/initdb -D /var/lib/postgresql/14/main
|
||||
|
||||
cat > /etc/postgresql/14/main/postgresql.conf <<EOF
|
||||
data_directory = '/var/lib/postgresql/14/main'
|
||||
hba_file = '/etc/postgresql/14/main/pg_hba.conf'
|
||||
ident_file = '/etc/postgresql/14/main/pg_ident.conf'
|
||||
listen_addresses = '*'
|
||||
port = $PARAM_TABLES_PORT
|
||||
max_connections = 100
|
||||
shared_buffers = 128MB
|
||||
log_destination = 'stderr'
|
||||
logging_collector = on
|
||||
log_directory = '/var/log/postgresql'
|
||||
log_filename = 'postgresql-%Y-%m-%d_%H%M%S.log'
|
||||
EOF
|
||||
|
||||
cat > /etc/postgresql/14/main/pg_hba.conf <<EOF
|
||||
local all postgres peer
|
||||
local all all peer
|
||||
host all all 127.0.0.1/32 md5
|
||||
host all all ::1/128 md5
|
||||
host all all 0.0.0.0/0 md5
|
||||
systemctl start postgresql@14-main
|
||||
systemctl enable postgresql@14-main
|
||||
EOF
|
||||
|
||||
lxc config device remove "$PARAM_TENANT"-tables postgres-proxy 2>/dev/null || true
|
||||
lxc config device add "$PARAM_TENANT"-tables postgres-proxy proxy \
|
||||
listen=tcp:0.0.0.0:"$PARAM_TABLES_PORT" \
|
||||
connect=tcp:127.0.0.1:"$PARAM_TABLES_PORT"
|
||||
|
||||
cd /var/lib/postgresql
|
||||
until sudo -u postgres psql -p $PARAM_TABLES_PORT -c '\q' 2>/dev/null; do
|
||||
|
||||
sleep 3
|
||||
sudo -u "$PARAM_TABLES_USER" psql -p $PARAM_TABLES_PORT -c \"CREATE USER $PARAM_TENANT WITH PASSWORD '$PARAM_TABLES_PASSWORD';\" 2>/dev/null
|
||||
sudo -u "$PARAM_TABLES_USER" psql -p $PARAM_TABLES_PORT -c \"CREATE DATABASE ${PARAM_TENANT}_db OWNER $PARAM_TENANT;\" 2>/dev/null
|
||||
sudo -u "$PARAM_TABLES_USER" psql -p $PARAM_TABLES_PORT -c \"GRANT ALL PRIVILEGES ON DATABASE ${PARAM_TENANT}_db TO $PARAM_TENANT;\" 2>/dev/null
|
|
@ -6,20 +6,20 @@ company = QUERY "SELECT Company FROM Opportunities WHERE Id = ${opportunity}"
|
|||
|
||||
doc = FILL template
|
||||
|
||||
' Generate email subject and content based on conversation history
|
||||
# Generate email subject and content based on conversation history
|
||||
subject = REWRITE "Based on this ${history}, generate a subject for a proposal email to ${company}"
|
||||
contents = REWRITE "Based on this ${history}, and ${subject}, generate the e-mail body for ${to}, signed by ${user}, including key points from our proposal"
|
||||
|
||||
' Add proposal to CRM
|
||||
# Add proposal to CRM
|
||||
CALL "/files/upload", ".gbdrive/Proposals/${company}-proposal.docx", doc
|
||||
CALL "/files/permissions", ".gbdrive/Proposals/${company}-proposal.docx", "sales-team", "edit"
|
||||
|
||||
' Record activity in CRM
|
||||
# Record activity in CRM
|
||||
CALL "/crm/activities/create", opportunity, "email_sent", {
|
||||
"subject": subject,
|
||||
"description": "Proposal sent to " + company,
|
||||
"date": NOW()
|
||||
}
|
||||
|
||||
' Send the email
|
||||
# Send the email
|
||||
CALL "/comm/email/send", to, subject, contents, doc
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue