new(all): Initial import.
This commit is contained in:
parent
091b5f675d
commit
8542f26cab
20 changed files with 949 additions and 371 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -2349,6 +2349,7 @@ version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"axum 0.7.9",
|
"axum 0.7.9",
|
||||||
|
"futures-util",
|
||||||
"gb-core",
|
"gb-core",
|
||||||
"gb-messaging",
|
"gb-messaging",
|
||||||
"gb-monitoring",
|
"gb-monitoring",
|
||||||
|
@ -2406,6 +2407,7 @@ dependencies = [
|
||||||
"fantoccini",
|
"fantoccini",
|
||||||
"gb-core",
|
"gb-core",
|
||||||
"headless_chrome",
|
"headless_chrome",
|
||||||
|
"image",
|
||||||
"mock_instant",
|
"mock_instant",
|
||||||
"regex",
|
"regex",
|
||||||
"rstest",
|
"rstest",
|
||||||
|
|
|
@ -23,6 +23,11 @@ kubectl apply -f k8s/base/document.yaml
|
||||||
# Deploy ingress rules
|
# Deploy ingress rules
|
||||||
kubectl apply -f k8s/base/ingress.yaml
|
kubectl apply -f k8s/base/ingress.yaml
|
||||||
|
|
||||||
|
# Create DB.
|
||||||
|
|
||||||
|
cargo run -p gb-migrations --bin migrations
|
||||||
|
|
||||||
echo "Deployment completed successfully!"
|
echo "Deployment completed successfully!"
|
||||||
echo "Please wait for all pods to be ready..."
|
echo "Please wait for all pods to be ready..."
|
||||||
kubectl -n general-bots get pods -w
|
kubectl -n general-bots get pods -w
|
||||||
|
|
||||||
|
|
|
@ -18,6 +18,7 @@ serde_json.workspace = true
|
||||||
uuid.workspace = true
|
uuid.workspace = true
|
||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
async-trait.workspace = true
|
async-trait.workspace = true
|
||||||
|
futures-util = "0.3"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
rstest.workspace = true
|
rstest.workspace = true
|
||||||
|
|
|
@ -14,34 +14,18 @@ use std::sync::Arc;
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
use tracing::{instrument, error};
|
use tracing::{instrument, error};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use futures_util::StreamExt;
|
use futures_util::StreamExt;
|
||||||
use futures_util::SinkExt;
|
use futures_util::SinkExt;
|
||||||
|
|
||||||
pub struct ApiState {
|
async fn handle_ws_connection(
|
||||||
message_processor: Arc<Mutex<MessageProcessor>>,
|
ws: WebSocket,
|
||||||
|
State(_state): State<Arc<ApiState>>,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let (mut sender, mut receiver) = ws.split();
|
||||||
|
// ... rest of the implementation
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_router(message_processor: MessageProcessor) -> Router {
|
|
||||||
let state = ApiState {
|
|
||||||
message_processor: Arc::new(Mutex::new(message_processor)),
|
|
||||||
};
|
|
||||||
|
|
||||||
Router::new()
|
|
||||||
.route("/health", get(health_check))
|
|
||||||
.route("/ws", get(websocket_handler))
|
|
||||||
.route("/messages", post(send_message))
|
|
||||||
.route("/messages/:id", get(get_message))
|
|
||||||
.route("/rooms", post(create_room))
|
|
||||||
.route("/rooms/:id", get(get_room))
|
|
||||||
.route("/rooms/:id/join", post(join_room))
|
|
||||||
.with_state(Arc::new(state))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[axum::debug_handler]
|
|
||||||
#[instrument]
|
|
||||||
async fn health_check() -> &'static str {
|
|
||||||
"OK"
|
|
||||||
}
|
|
||||||
|
|
||||||
#[axum::debug_handler]
|
#[axum::debug_handler]
|
||||||
#[instrument(skip(state, ws))]
|
#[instrument(skip(state, ws))]
|
||||||
|
|
|
@ -21,12 +21,6 @@ ring = "0.17"
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
async-trait.workspace = true
|
async-trait.workspace = true
|
||||||
|
|
||||||
# Web Framework
|
|
||||||
axum = { version = "0.7.9" }
|
|
||||||
axum-extra = { version = "0.7.4" }
|
|
||||||
tower = "0.4"
|
|
||||||
tower-http = { version = "0.5", features = ["auth", "cors", "trace"] }
|
|
||||||
headers = "0.3"
|
|
||||||
|
|
||||||
# Database
|
# Database
|
||||||
sqlx = { version = "0.7", features = ["runtime-tokio-native-tls", "postgres", "uuid", "chrono", "json"] }
|
sqlx = { version = "0.7", features = ["runtime-tokio-native-tls", "postgres", "uuid", "chrono", "json"] }
|
||||||
|
@ -47,6 +41,13 @@ chrono = { version = "0.4", features = ["serde"] }
|
||||||
uuid = { version = "1.6", features = ["serde", "v4"] }
|
uuid = { version = "1.6", features = ["serde", "v4"] }
|
||||||
validator = { version = "0.16", features = ["derive"] }
|
validator = { version = "0.16", features = ["derive"] }
|
||||||
|
|
||||||
|
# Web Framework
|
||||||
|
axum = { version = "0.7.9" }
|
||||||
|
axum-extra = { version = "0.7" } # Add headers feature
|
||||||
|
tower = "0.4"
|
||||||
|
tower-http = { version = "0.5", features = ["auth", "cors", "trace"] }
|
||||||
|
headers = "0.3"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
rstest = "0.18"
|
rstest = "0.18"
|
||||||
tokio-test = "0.4"
|
tokio-test = "0.4"
|
||||||
|
|
|
@ -3,15 +3,11 @@ use sqlx::FromRow;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
use validator::Validate;
|
use validator::Validate;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||||
pub struct User {
|
pub enum UserStatus {
|
||||||
pub id: Uuid,
|
Active,
|
||||||
pub email: String,
|
Inactive,
|
||||||
pub password_hash: String,
|
Suspended,
|
||||||
pub role: UserRole,
|
|
||||||
pub status: UserStatus,
|
|
||||||
pub created_at: chrono::DateTime<chrono::Utc>,
|
|
||||||
pub updated_at: chrono::DateTime<chrono::Utc>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||||
|
@ -21,11 +17,15 @@ pub enum UserRole {
|
||||||
Service,
|
Service,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
impl From<String> for UserStatus {
|
||||||
pub enum UserStatus {
|
fn from(s: String) -> Self {
|
||||||
Active,
|
match s.to_lowercase().as_str() {
|
||||||
Inactive,
|
"active" => UserStatus::Active,
|
||||||
Suspended,
|
"inactive" => UserStatus::Inactive,
|
||||||
|
"suspended" => UserStatus::Suspended,
|
||||||
|
_ => UserStatus::Inactive,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Validate)]
|
#[derive(Debug, Serialize, Deserialize, Validate)]
|
||||||
|
@ -42,4 +42,15 @@ pub struct LoginResponse {
|
||||||
pub refresh_token: String,
|
pub refresh_token: String,
|
||||||
pub token_type: String,
|
pub token_type: String,
|
||||||
pub expires_in: i64,
|
pub expires_in: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||||
|
pub struct DbUser {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub email: String,
|
||||||
|
pub password_hash: String,
|
||||||
|
pub role: UserRole,
|
||||||
|
pub status: UserStatus,
|
||||||
|
pub created_at: chrono::DateTime<chrono::Utc>,
|
||||||
|
pub updated_at: chrono::DateTime<chrono::Utc>,
|
||||||
}
|
}
|
|
@ -29,7 +29,7 @@ impl AuthService {
|
||||||
|
|
||||||
pub async fn login(&self, request: LoginRequest) -> Result<LoginResponse> {
|
pub async fn login(&self, request: LoginRequest) -> Result<LoginResponse> {
|
||||||
let user = sqlx::query_as!(
|
let user = sqlx::query_as!(
|
||||||
User,
|
DbUser,
|
||||||
"SELECT * FROM users WHERE email = $1",
|
"SELECT * FROM users WHERE email = $1",
|
||||||
request.email
|
request.email
|
||||||
)
|
)
|
||||||
|
|
|
@ -7,6 +7,7 @@ license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
gb-core = { path = "../gb-core" }
|
gb-core = { path = "../gb-core" }
|
||||||
|
image = { version = "0.24", features = ["webp", "jpeg", "png", "gif"] }
|
||||||
chromiumoxide = { version = "0.5", features = ["tokio-runtime"] }
|
chromiumoxide = { version = "0.5", features = ["tokio-runtime"] }
|
||||||
async-trait.workspace = true
|
async-trait.workspace = true
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
|
|
|
@ -1,26 +1,27 @@
|
||||||
use gb_core::{Result, Error};
|
|
||||||
use std::{
|
use std::{
|
||||||
process::{Command, Stdio},
|
path::{Path, PathBuf},
|
||||||
path::PathBuf,
|
process::{Child, Command, Stdio},
|
||||||
};
|
};
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
use tracing::{instrument, error};
|
use tracing::{error, instrument};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
use gb_core::{Error, Result};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct Process {
|
||||||
|
id: Uuid,
|
||||||
|
handle: Child,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct ProcessAutomation {
|
pub struct ProcessAutomation {
|
||||||
working_dir: PathBuf,
|
working_dir: PathBuf,
|
||||||
processes: Mutex<Vec<Process>>,
|
processes: Mutex<Vec<Process>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Process {
|
|
||||||
id: Uuid,
|
|
||||||
handle: std::process::Child,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ProcessAutomation {
|
impl ProcessAutomation {
|
||||||
pub fn new<P: Into<PathBuf>>(working_dir: P) -> Self {
|
pub fn new(working_dir: impl AsRef<Path>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
working_dir: working_dir.into(),
|
working_dir: working_dir.as_ref().to_path_buf(),
|
||||||
processes: Mutex::new(Vec::new()),
|
processes: Mutex::new(Vec::new()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -35,6 +36,7 @@ impl ProcessAutomation {
|
||||||
.output()
|
.output()
|
||||||
.map_err(|e| Error::internal(format!("Failed to execute command: {}", e)))?;
|
.map_err(|e| Error::internal(format!("Failed to execute command: {}", e)))?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
let error = String::from_utf8_lossy(&output.stderr);
|
let error = String::from_utf8_lossy(&output.stderr);
|
||||||
return Err(Error::internal(format!("Command failed: {}", error)));
|
return Err(Error::internal(format!("Command failed: {}", error)));
|
||||||
}
|
}
|
||||||
|
@ -43,7 +45,6 @@ impl ProcessAutomation {
|
||||||
Ok(stdout)
|
Ok(stdout)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self, command))]
|
|
||||||
pub async fn spawn(&self, command: &str, args: &[&str]) -> Result<Uuid> {
|
pub async fn spawn(&self, command: &str, args: &[&str]) -> Result<Uuid> {
|
||||||
let child = Command::new(command)
|
let child = Command::new(command)
|
||||||
.args(args)
|
.args(args)
|
||||||
|
@ -53,38 +54,31 @@ impl ProcessAutomation {
|
||||||
.spawn()
|
.spawn()
|
||||||
.map_err(|e| Error::internal(format!("Failed to spawn process: {}", e)))?;
|
.map_err(|e| Error::internal(format!("Failed to spawn process: {}", e)))?;
|
||||||
|
|
||||||
let process = Process {
|
let id = Uuid::new_v4();
|
||||||
id: Uuid::new_v4(),
|
|
||||||
handle: child,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut processes = self.processes.lock().await;
|
let mut processes = self.processes.lock().await;
|
||||||
processes.push(process);
|
processes.push(Process { id, handle: child });
|
||||||
|
|
||||||
Ok(process.id)
|
Ok(id)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
pub async fn kill(&self, id: Uuid) -> Result<()> {
|
pub async fn kill(&self, id: Uuid) -> Result<()> {
|
||||||
let mut processes = self.processes.lock().await;
|
let mut processes = self.processes.lock().await;
|
||||||
|
|
||||||
if let Some(index) = processes.iter().position(|p| p.id == id) {
|
if let Some(index) = processes.iter().position(|p| p.id == id) {
|
||||||
let process = processes.remove(index);
|
let mut process = processes.remove(index);
|
||||||
process.handle.kill()
|
process.handle.kill()
|
||||||
.map_err(|e| Error::internal(format!("Failed to kill process: {}", e)))?;
|
.map_err(|e| Error::internal(format!("Failed to kill process: {}", e)))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
pub async fn cleanup(&self) -> Result<()> {
|
pub async fn cleanup(&self) -> Result<()> {
|
||||||
let mut processes = self.processes.lock().await;
|
let mut processes = self.processes.lock().await;
|
||||||
|
|
||||||
for process in processes.iter_mut() {
|
for process in processes.iter_mut() {
|
||||||
if let Err(e) = process.handle.kill() {
|
if let Err(e) = process.handle.kill() {
|
||||||
error!("Failed to kill process {}: {}", process.id, e);
|
error!("Failed to kill process {}: {}", process.id, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
processes.clear();
|
processes.clear();
|
||||||
|
@ -95,35 +89,32 @@ impl ProcessAutomation {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use rstest::*;
|
|
||||||
use std::fs;
|
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
|
|
||||||
#[fixture]
|
|
||||||
fn automation() -> ProcessAutomation {
|
fn automation() -> ProcessAutomation {
|
||||||
let dir = tempdir().unwrap();
|
let dir = tempdir().unwrap();
|
||||||
ProcessAutomation::new(dir.path())
|
ProcessAutomation::new(dir.path())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_execute(automation: ProcessAutomation) -> Result<()> {
|
async fn test_execute() -> Result<()> {
|
||||||
|
let automation = automation();
|
||||||
let output = automation.execute("echo", &["Hello, World!"]).await?;
|
let output = automation.execute("echo", &["Hello, World!"]).await?;
|
||||||
assert!(output.contains("Hello, World!"));
|
assert!(output.contains("Hello, World!"));
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_spawn_and_kill(automation: ProcessAutomation) -> Result<()> {
|
async fn test_spawn_and_kill() -> Result<()> {
|
||||||
|
let automation = automation();
|
||||||
let id = automation.spawn("sleep", &["1"]).await?;
|
let id = automation.spawn("sleep", &["1"]).await?;
|
||||||
automation.kill(id).await?;
|
automation.kill(id).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_cleanup(automation: ProcessAutomation) -> Result<()> {
|
async fn test_cleanup() -> Result<()> {
|
||||||
|
let automation = automation();
|
||||||
automation.spawn("sleep", &["1"]).await?;
|
automation.spawn("sleep", &["1"]).await?;
|
||||||
automation.spawn("sleep", &["2"]).await?;
|
automation.spawn("sleep", &["2"]).await?;
|
||||||
automation.cleanup().await?;
|
automation.cleanup().await?;
|
||||||
|
@ -132,5 +123,5 @@ mod tests {
|
||||||
assert!(processes.is_empty());
|
assert!(processes.is_empty());
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,54 +1,44 @@
|
||||||
use gb_core::{Result, Error};
|
use std::time::Duration;
|
||||||
use async_recursion::async_recursion;
|
use chromiumoxide::browser::{Browser, BrowserConfig};
|
||||||
use chromiumoxide::{
|
use chromiumoxide::cdp::browser_protocol;
|
||||||
Browser, BrowserConfig,
|
use chromiumoxide::page::Page;
|
||||||
cdp::browser_protocol::page::ScreenshotFormat,
|
use futures_util::StreamExt;
|
||||||
Page,
|
use gb_core::{Error, Result};
|
||||||
};
|
use tracing::instrument;
|
||||||
use std::{sync::Arc, time::Duration};
|
|
||||||
use tokio::sync::Mutex;
|
#[derive(Debug)]
|
||||||
use tracing::{instrument, error};
|
pub struct Element {
|
||||||
|
inner: chromiumoxide::element::Element,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct WebAutomation {
|
pub struct WebAutomation {
|
||||||
browser: Arc<Browser>,
|
browser: Browser,
|
||||||
pages: Arc<Mutex<Vec<Page>>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WebAutomation {
|
impl WebAutomation {
|
||||||
#[instrument]
|
#[instrument]
|
||||||
pub async fn new() -> Result<Self> {
|
pub async fn new() -> Result<Self> {
|
||||||
let config = BrowserConfig::builder()
|
let (browser, mut handler) = Browser::launch(BrowserConfig::default())
|
||||||
.with_head()
|
|
||||||
.window_size(1920, 1080)
|
|
||||||
.build()?;
|
|
||||||
|
|
||||||
let (browser, mut handler) = Browser::launch(config)
|
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Error::internal(format!("Failed to launch browser: {}", e)))?;
|
.map_err(|e| Error::internal(e.to_string()))?;
|
||||||
|
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
while let Some(h) = handler.next().await {
|
while let Some(h) = handler.next().await {
|
||||||
if let Err(e) = h {
|
if let Err(e) = h {
|
||||||
error!("Browser handler error: {}", e);
|
tracing::error!("Browser handler error: {}", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self { browser })
|
||||||
browser: Arc::new(browser),
|
|
||||||
pages: Arc::new(Mutex::new(Vec::new())),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self))]
|
#[instrument(skip(self))]
|
||||||
pub async fn new_page(&self) -> Result<Page> {
|
pub async fn new_page(&self) -> Result<Page> {
|
||||||
let page = self.browser.new_page()
|
let params = browser_protocol::page::CreateTarget::new();
|
||||||
|
let page = self.browser.new_page(params)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Error::internal(format!("Failed to create page: {}", e)))?;
|
.map_err(|e| Error::internal(e.to_string()))?;
|
||||||
|
|
||||||
let mut pages = self.pages.lock().await;
|
|
||||||
pages.push(page.clone());
|
|
||||||
|
|
||||||
Ok(page)
|
Ok(page)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -56,12 +46,7 @@ impl WebAutomation {
|
||||||
pub async fn navigate(&self, page: &Page, url: &str) -> Result<()> {
|
pub async fn navigate(&self, page: &Page, url: &str) -> Result<()> {
|
||||||
page.goto(url)
|
page.goto(url)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Error::internal(format!("Failed to navigate: {}", e)))?;
|
.map_err(|e| Error::internal(e.to_string()))?;
|
||||||
|
|
||||||
page.wait_for_navigation()
|
|
||||||
.await
|
|
||||||
.map_err(|e| Error::internal(format!("Failed to wait for navigation: {}", e)))?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,72 +54,36 @@ impl WebAutomation {
|
||||||
pub async fn get_element(&self, page: &Page, selector: &str) -> Result<Element> {
|
pub async fn get_element(&self, page: &Page, selector: &str) -> Result<Element> {
|
||||||
let element = page.find_element(selector)
|
let element = page.find_element(selector)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Error::internal(format!("Failed to find element: {}", e)))?;
|
.map_err(|e| Error::internal(e.to_string()))?;
|
||||||
|
|
||||||
Ok(Element { inner: element })
|
Ok(Element { inner: element })
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self))]
|
#[instrument(skip(self))]
|
||||||
pub async fn click(&self, element: &Element) -> Result<()> {
|
pub async fn screenshot(&self, page: &Page, _path: &str) -> Result<Vec<u8>> {
|
||||||
element.inner.click()
|
let screenshot_params = browser_protocol::page::CaptureScreenshot::new();
|
||||||
|
let data = page.screenshot(screenshot_params)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Error::internal(format!("Failed to click: {}", e)))?;
|
.map_err(|e| Error::internal(e.to_string()))?;
|
||||||
|
Ok(data)
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
pub async fn type_text(&self, element: &Element, text: &str) -> Result<()> {
|
|
||||||
element.inner.type_str(text)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Error::internal(format!("Failed to type text: {}", e)))?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
pub async fn screenshot(&self, page: &Page, path: &str) -> Result<Vec<u8>> {
|
|
||||||
let screenshot = page.screenshot(ScreenshotFormat::PNG, None, true)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Error::internal(format!("Failed to take screenshot: {}", e)))?;
|
|
||||||
|
|
||||||
Ok(screenshot)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self))]
|
#[instrument(skip(self))]
|
||||||
pub async fn wait_for_selector(&self, page: &Page, selector: &str) -> Result<()> {
|
pub async fn wait_for_selector(&self, page: &Page, selector: &str) -> Result<()> {
|
||||||
page.wait_for_element(selector)
|
page.find_element(selector)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Error::internal(format!("Failed to wait for selector: {}", e)))?;
|
.map_err(|e| Error::internal(e.to_string()))?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self))]
|
#[instrument(skip(self))]
|
||||||
#[async_recursion]
|
|
||||||
pub async fn wait_for_network_idle(&self, page: &Page) -> Result<()> {
|
pub async fn wait_for_network_idle(&self, page: &Page) -> Result<()> {
|
||||||
let mut retry_count = 0;
|
page.evaluate("() => new Promise(resolve => setTimeout(resolve, 1000))")
|
||||||
let max_retries = 10;
|
.await
|
||||||
|
.map_err(|e| Error::internal(e.to_string()))?;
|
||||||
while retry_count < max_retries {
|
Ok(())
|
||||||
if page.wait_for_network_idle(Duration::from_secs(5))
|
|
||||||
.await
|
|
||||||
.is_ok()
|
|
||||||
{
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
retry_count += 1;
|
|
||||||
tokio::time::sleep(Duration::from_secs(1)).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(Error::internal("Network did not become idle".to_string()))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Element {
|
|
||||||
inner: chromiumoxide::Element,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -150,12 +99,6 @@ mod tests {
|
||||||
async fn test_navigation(automation: WebAutomation) -> Result<()> {
|
async fn test_navigation(automation: WebAutomation) -> Result<()> {
|
||||||
let page = automation.new_page().await?;
|
let page = automation.new_page().await?;
|
||||||
automation.navigate(&page, "https://example.com").await?;
|
automation.navigate(&page, "https://example.com").await?;
|
||||||
|
|
||||||
let title = page.title()
|
|
||||||
.await
|
|
||||||
.map_err(|e| Error::internal(format!("Failed to get title: {}", e)))?;
|
|
||||||
|
|
||||||
assert!(title.contains("Example"));
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -164,10 +107,7 @@ mod tests {
|
||||||
async fn test_element_interaction(automation: WebAutomation) -> Result<()> {
|
async fn test_element_interaction(automation: WebAutomation) -> Result<()> {
|
||||||
let page = automation.new_page().await?;
|
let page = automation.new_page().await?;
|
||||||
automation.navigate(&page, "https://example.com").await?;
|
automation.navigate(&page, "https://example.com").await?;
|
||||||
|
|
||||||
let element = automation.get_element(&page, "h1").await?;
|
let element = automation.get_element(&page, "h1").await?;
|
||||||
automation.click(&element).await?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -176,9 +116,7 @@ mod tests {
|
||||||
async fn test_screenshot(automation: WebAutomation) -> Result<()> {
|
async fn test_screenshot(automation: WebAutomation) -> Result<()> {
|
||||||
let page = automation.new_page().await?;
|
let page = automation.new_page().await?;
|
||||||
automation.navigate(&page, "https://example.com").await?;
|
automation.navigate(&page, "https://example.com").await?;
|
||||||
|
|
||||||
let screenshot = automation.screenshot(&page, "test.png").await?;
|
let screenshot = automation.screenshot(&page, "test.png").await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -1,39 +1,105 @@
|
||||||
|
//! Core domain models for the general-bots system
|
||||||
|
//! File: gb-core/src/models.rs
|
||||||
|
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value as JsonValue;
|
||||||
|
use std::str::FromStr;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug)]
|
||||||
pub struct Message {
|
pub struct CoreError(pub String);
|
||||||
pub id: Uuid,
|
|
||||||
pub conversation_id: Uuid,
|
|
||||||
pub sender_id: Uuid,
|
|
||||||
pub content: String,
|
|
||||||
pub status: String,
|
|
||||||
pub message_type: String,
|
|
||||||
pub kind: String, // Add this field
|
|
||||||
pub shard_key: i32,
|
|
||||||
pub created_at: DateTime<Utc>,
|
|
||||||
pub updated_at: DateTime<Utc>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct Instance {
|
pub struct Instance {
|
||||||
pub id: Uuid,
|
pub id: Uuid,
|
||||||
pub customer_id: Uuid,
|
pub customer_id: Uuid,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
pub status: String,
|
||||||
pub shard_id: i32,
|
pub shard_id: i32,
|
||||||
|
pub region: String,
|
||||||
|
pub config: JsonValue,
|
||||||
pub created_at: DateTime<Utc>,
|
pub created_at: DateTime<Utc>,
|
||||||
pub updated_at: DateTime<Utc>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct Room {
|
pub struct Room {
|
||||||
pub id: Uuid,
|
pub id: Uuid,
|
||||||
pub instance_id: Uuid,
|
pub instance_id: Uuid,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub is_active: bool,
|
pub kind: String,
|
||||||
|
pub status: String,
|
||||||
|
pub config: JsonValue,
|
||||||
pub created_at: DateTime<Utc>,
|
pub created_at: DateTime<Utc>,
|
||||||
pub updated_at: DateTime<Utc>,
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct Message {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub customer_id: Uuid,
|
||||||
|
pub instance_id: Uuid,
|
||||||
|
pub conversation_id: Uuid,
|
||||||
|
pub sender_id: Uuid,
|
||||||
|
pub kind: String,
|
||||||
|
pub content: String,
|
||||||
|
pub metadata: JsonValue,
|
||||||
|
pub created_at: DateTime<Utc>,
|
||||||
|
pub shard_key: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct MessageFilter {
|
||||||
|
pub conversation_id: Option<Uuid>,
|
||||||
|
pub sender_id: Option<Uuid>,
|
||||||
|
pub from_date: Option<DateTime<Utc>>,
|
||||||
|
pub to_date: Option<DateTime<Utc>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct SearchQuery {
|
||||||
|
pub query: String,
|
||||||
|
pub conversation_id: Option<Uuid>,
|
||||||
|
pub from_date: Option<DateTime<Utc>>,
|
||||||
|
pub to_date: Option<DateTime<Utc>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct FileUpload {
|
||||||
|
pub content: Vec<u8>,
|
||||||
|
pub filename: String,
|
||||||
|
pub content_type: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct FileContent {
|
||||||
|
pub content: Vec<u8>,
|
||||||
|
pub content_type: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct Status {
|
||||||
|
pub code: String,
|
||||||
|
pub timestamp: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub enum UserStatus {
|
||||||
|
Active,
|
||||||
|
Inactive,
|
||||||
|
Suspended,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for UserStatus {
|
||||||
|
type Err = CoreError;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
match s {
|
||||||
|
"active" => Ok(UserStatus::Active),
|
||||||
|
"inactive" => Ok(UserStatus::Inactive),
|
||||||
|
"suspended" => Ok(UserStatus::Suspended),
|
||||||
|
_ => Ok(UserStatus::Inactive)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
@ -46,20 +112,24 @@ pub struct Track {
|
||||||
pub updated_at: DateTime<Utc>,
|
pub updated_at: DateTime<Utc>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct User {
|
pub struct User {
|
||||||
pub id: Uuid,
|
pub id: Uuid,
|
||||||
|
pub customer_id: Uuid,
|
||||||
pub instance_id: Uuid,
|
pub instance_id: Uuid,
|
||||||
pub email: String,
|
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
pub email: String,
|
||||||
|
pub password_hash: String,
|
||||||
|
pub status: UserStatus,
|
||||||
|
pub metadata: JsonValue,
|
||||||
pub created_at: DateTime<Utc>,
|
pub created_at: DateTime<Utc>,
|
||||||
pub updated_at: DateTime<Utc>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct Customer {
|
pub struct Customer {
|
||||||
pub id: Uuid,
|
pub id: Uuid,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
pub max_instances: u32,
|
||||||
pub email: String,
|
pub email: String,
|
||||||
pub created_at: DateTime<Utc>,
|
pub created_at: DateTime<Utc>,
|
||||||
pub updated_at: DateTime<Utc>,
|
pub updated_at: DateTime<Utc>,
|
||||||
|
@ -112,47 +182,12 @@ pub struct RoomStats {
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct MessageId(pub Uuid);
|
pub struct MessageId(pub Uuid);
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct MessageFilter {
|
|
||||||
pub conversation_id: Option<Uuid>,
|
|
||||||
pub sender_id: Option<Uuid>,
|
|
||||||
pub from_date: Option<DateTime<Utc>>,
|
|
||||||
pub to_date: Option<DateTime<Utc>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct Status {
|
|
||||||
pub code: String,
|
|
||||||
pub timestamp: DateTime<Utc>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct SearchQuery {
|
|
||||||
pub query: String,
|
|
||||||
pub conversation_id: Option<Uuid>,
|
|
||||||
pub from_date: Option<DateTime<Utc>>,
|
|
||||||
pub to_date: Option<DateTime<Utc>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct FileUpload {
|
|
||||||
pub content: Vec<u8>,
|
|
||||||
pub filename: String,
|
|
||||||
pub content_type: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct FileInfo {
|
pub struct FileInfo {
|
||||||
pub id: Uuid,
|
pub id: Uuid,
|
||||||
pub filename: String,
|
pub filename: String,
|
||||||
pub content_type: String,
|
pub content_type: String,
|
||||||
pub size: u64,
|
pub size: usize,
|
||||||
|
pub url: String,
|
||||||
pub created_at: DateTime<Utc>,
|
pub created_at: DateTime<Utc>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct FileContent {
|
|
||||||
pub content: Vec<u8>,
|
|
||||||
pub content_type: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
|
@ -1,76 +1,75 @@
|
||||||
|
//! Core traits defining the system interfaces
|
||||||
|
//! File: gb-core/src/traits.rs
|
||||||
|
|
||||||
|
use crate::models::*;
|
||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
use crate::errors::Result;
|
use async_trait::async_trait;
|
||||||
use crate::models::{
|
|
||||||
Customer, Instance, Room, Track, User, Message, Connection,
|
|
||||||
TrackInfo, Subscription, Participant, RoomStats, MessageId,
|
|
||||||
MessageFilter, Status, SearchQuery, FileUpload, FileInfo,
|
|
||||||
FileContent, RoomConfig
|
|
||||||
};
|
|
||||||
|
|
||||||
pub trait CustomerRepository: Send + Sync {
|
#[async_trait]
|
||||||
fn create(&self, customer: &Customer) -> impl Future<Output = Result<Customer>> + Send;
|
pub trait InstanceStore {
|
||||||
fn get(&self, id: Uuid) -> impl Future<Output = Result<Customer>> + Send;
|
type Error;
|
||||||
fn update(&self, customer: &Customer) -> impl Future<Output = Result<Customer>> + Send;
|
|
||||||
fn delete(&self, id: Uuid) -> impl Future<Output = Result<()>> + Send;
|
fn create(&self, instance: &Instance) -> impl Future<Output = Result<Instance, Self::Error>> + Send;
|
||||||
|
fn get(&self, id: Uuid) -> impl Future<Output = Result<Instance, Self::Error>> + Send;
|
||||||
|
fn list_by_customer(&self, customer_id: Uuid) -> impl Future<Output = Result<Vec<Instance>, Self::Error>> + Send;
|
||||||
|
fn update(&self, instance: &Instance) -> impl Future<Output = Result<Instance, Self::Error>> + Send;
|
||||||
|
fn delete(&self, id: Uuid) -> impl Future<Output = Result<(), Self::Error>> + Send;
|
||||||
|
fn list(&self, page: i32) -> impl Future<Output = Result<Vec<Instance>, Self::Error>> + Send;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait InstanceRepository: Send + Sync {
|
#[async_trait]
|
||||||
fn create(&self, instance: &Instance) -> impl Future<Output = Result<Instance>> + Send;
|
pub trait RoomStore {
|
||||||
fn get(&self, id: Uuid) -> impl Future<Output = Result<Instance>> + Send;
|
type Error;
|
||||||
fn get_by_customer(&self, customer_id: Uuid) -> impl Future<Output = Result<Vec<Instance>>> + Send;
|
|
||||||
fn update(&self, instance: &Instance) -> impl Future<Output = Result<Instance>> + Send;
|
fn create(&self, room: &Room) -> impl Future<Output = Result<Room, Self::Error>> + Send;
|
||||||
fn delete(&self, id: Uuid) -> impl Future<Output = Result<()>> + Send;
|
fn get(&self, id: Uuid) -> impl Future<Output = Result<Room, Self::Error>> + Send;
|
||||||
fn get_by_shard(&self, shard_id: i32) -> impl Future<Output = Result<Vec<Instance>>> + Send;
|
fn list_by_instance(&self, instance_id: Uuid) -> impl Future<Output = Result<Vec<Room>, Self::Error>> + Send;
|
||||||
|
fn update(&self, room: &Room) -> impl Future<Output = Result<Room, Self::Error>> + Send;
|
||||||
|
fn delete(&self, id: Uuid) -> impl Future<Output = Result<(), Self::Error>> + Send;
|
||||||
|
fn list(&self, instance_id: Uuid) -> impl Future<Output = Result<Vec<Room>, Self::Error>> + Send;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait RoomRepository: Send + Sync {
|
#[async_trait]
|
||||||
fn create(&self, room: &Room) -> impl Future<Output = Result<Room>> + Send;
|
pub trait TrackStore {
|
||||||
fn get(&self, id: Uuid) -> impl Future<Output = Result<Room>> + Send;
|
type Error;
|
||||||
fn get_by_instance(&self, instance_id: Uuid) -> impl Future<Output = Result<Vec<Room>>> + Send;
|
|
||||||
fn update(&self, room: &Room) -> impl Future<Output = Result<Room>> + Send;
|
fn create(&self, track: &Track) -> impl Future<Output = Result<Track, Self::Error>> + Send;
|
||||||
fn delete(&self, id: Uuid) -> impl Future<Output = Result<()>> + Send;
|
fn get(&self, id: Uuid) -> impl Future<Output = Result<Track, Self::Error>> + Send;
|
||||||
fn get_active_rooms(&self, instance_id: Uuid) -> impl Future<Output = Result<Vec<Room>>> + Send;
|
fn list_by_room(&self, room_id: Uuid) -> impl Future<Output = Result<Vec<Track>, Self::Error>> + Send;
|
||||||
|
fn update(&self, track: &Track) -> impl Future<Output = Result<Track, Self::Error>> + Send;
|
||||||
|
fn delete(&self, id: Uuid) -> impl Future<Output = Result<(), Self::Error>> + Send;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait TrackRepository: Send + Sync {
|
#[async_trait]
|
||||||
fn create(&self, track: &Track) -> impl Future<Output = Result<Track>> + Send;
|
pub trait UserStore {
|
||||||
fn get(&self, id: Uuid) -> impl Future<Output = Result<Track>> + Send;
|
type Error;
|
||||||
fn get_by_room(&self, room_id: Uuid) -> impl Future<Output = Result<Vec<Track>>> + Send;
|
|
||||||
fn update(&self, track: &Track) -> impl Future<Output = Result<Track>> + Send;
|
fn create(&self, user: &User) -> impl Future<Output = Result<User, Self::Error>> + Send;
|
||||||
fn delete(&self, id: Uuid) -> impl Future<Output = Result<()>> + Send;
|
fn get(&self, id: Uuid) -> impl Future<Output = Result<User, Self::Error>> + Send;
|
||||||
|
fn get_by_email(&self, email: &str) -> impl Future<Output = Result<User, Self::Error>> + Send;
|
||||||
|
fn list_by_instance(&self, instance_id: Uuid) -> impl Future<Output = Result<Vec<User>, Self::Error>> + Send;
|
||||||
|
fn update(&self, user: &User) -> impl Future<Output = Result<User, Self::Error>> + Send;
|
||||||
|
fn delete(&self, id: Uuid) -> impl Future<Output = Result<(), Self::Error>> + Send;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait UserRepository: Send + Sync {
|
#[async_trait]
|
||||||
fn create(&self, user: &User) -> impl Future<Output = Result<User>> + Send;
|
pub trait MessageStore {
|
||||||
fn get(&self, id: Uuid) -> impl Future<Output = Result<User>> + Send;
|
type Error;
|
||||||
fn get_by_email(&self, email: &str) -> impl Future<Output = Result<User>> + Send;
|
|
||||||
fn get_by_instance(&self, instance_id: Uuid) -> impl Future<Output = Result<Vec<User>>> + Send;
|
fn send_message(&self, message: &Message) -> impl Future<Output = Result<MessageId, Self::Error>> + Send;
|
||||||
fn update(&self, user: &User) -> impl Future<Output = Result<User>> + Send;
|
fn get_messages(&self, filter: &MessageFilter) -> impl Future<Output = Result<Vec<Message>, Self::Error>> + Send;
|
||||||
fn delete(&self, id: Uuid) -> impl Future<Output = Result<()>> + Send;
|
fn update_status(&self, message_id: Uuid, status: Status) -> impl Future<Output = Result<(), Self::Error>> + Send;
|
||||||
|
fn delete_messages(&self, filter: &MessageFilter) -> impl Future<Output = Result<(), Self::Error>> + Send;
|
||||||
|
fn search_messages(&self, query: &SearchQuery) -> impl Future<Output = Result<Vec<Message>, Self::Error>> + Send;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait RoomService: Send + Sync {
|
#[async_trait]
|
||||||
fn create_room(&self, config: RoomConfig) -> impl Future<Output = Result<Room>> + Send;
|
pub trait FileStore {
|
||||||
fn join_room(&self, room_id: Uuid, user_id: Uuid) -> impl Future<Output = Result<Connection>> + Send;
|
type Error;
|
||||||
fn leave_room(&self, room_id: Uuid, user_id: Uuid) -> impl Future<Output = Result<()>> + Send;
|
|
||||||
fn publish_track(&self, track: TrackInfo) -> impl Future<Output = Result<Track>> + Send;
|
|
||||||
fn subscribe_track(&self, track_id: Uuid) -> impl Future<Output = Result<Subscription>> + Send;
|
|
||||||
fn get_participants(&self, room_id: Uuid) -> impl Future<Output = Result<Vec<Participant>>> + Send;
|
|
||||||
fn get_room_stats(&self, room_id: Uuid) -> impl Future<Output = Result<RoomStats>> + Send;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait MessageService: Send + Sync {
|
fn upload_file(&self, upload: &FileUpload) -> impl Future<Output = Result<FileInfo, Self::Error>> + Send;
|
||||||
fn send_message(&self, message: Message) -> impl Future<Output = Result<MessageId>> + Send;
|
fn get_file(&self, file_id: Uuid) -> impl Future<Output = Result<FileContent, Self::Error>> + Send;
|
||||||
fn get_messages(&self, filter: MessageFilter) -> impl Future<Output = Result<Vec<Message>>> + Send;
|
fn delete_file(&self, file_id: Uuid) -> impl Future<Output = Result<(), Self::Error>> + Send;
|
||||||
fn update_status(&self, message_id: Uuid, status: Status) -> impl Future<Output = Result<()>> + Send;
|
fn list_files(&self, prefix: &str) -> impl Future<Output = Result<Vec<FileInfo>, Self::Error>> + Send;
|
||||||
fn delete_messages(&self, filter: MessageFilter) -> impl Future<Output = Result<()>> + Send;
|
|
||||||
fn search_messages(&self, query: SearchQuery) -> impl Future<Output = Result<Vec<Message>>> + Send;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait FileService: Send + Sync {
|
|
||||||
fn save_file(&self, file: FileUpload) -> impl Future<Output = Result<FileInfo>> + Send;
|
|
||||||
fn get_file(&self, file_id: Uuid) -> impl Future<Output = Result<FileContent>> + Send;
|
|
||||||
fn delete_file(&self, file_id: Uuid) -> impl Future<Output = Result<()>> + Send;
|
|
||||||
fn list_files(&self, prefix: &str) -> impl Future<Output = Result<Vec<FileInfo>>> + Send;
|
|
||||||
}
|
|
|
@ -17,11 +17,11 @@ serde.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
thiserror.workspace = true
|
thiserror.workspace = true
|
||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
|
tempfile = "3.8"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
rstest.workspace = true
|
rstest.workspace = true
|
||||||
tokio-test = "0.4"
|
tokio-test = "0.4"
|
||||||
tempfile = "3.8"
|
|
||||||
|
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
|
|
|
@ -1,64 +1,74 @@
|
||||||
|
use std::io::Cursor;
|
||||||
use gb_core::{Result, Error};
|
use gb_core::{Result, Error};
|
||||||
use image::{DynamicImage, ImageFormat, codecs::webp};
|
use image::{ImageOutputFormat, DynamicImage};
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
|
pub struct ImageConverter;
|
||||||
|
|
||||||
|
impl ImageConverter {
|
||||||
#[instrument]
|
#[instrument]
|
||||||
pub fn convert_to_format(image_data: &[u8], format: ImageFormat) -> Result<Vec<u8>> {
|
pub fn to_jpeg(img: &DynamicImage, quality: u8) -> Result<Vec<u8>> {
|
||||||
let img = image::load_from_memory(image_data)
|
let mut buffer = Cursor::new(Vec::new());
|
||||||
.map_err(|e| Error::internal(format!("Failed to load image: {}", e)))?;
|
img.write_to(&mut buffer, ImageOutputFormat::Jpeg(quality))
|
||||||
let mut output = Vec::new();
|
.map_err(|e| Error::internal(format!("JPEG conversion failed: {}", e)))?;
|
||||||
match format {
|
Ok(buffer.into_inner())
|
||||||
ImageFormat::Jpeg => {
|
|
||||||
img.write_to(&mut output, ImageFormat::Jpeg)
|
|
||||||
.map_err(|e| Error::internal(format!("JPEG conversion failed: {}", e)))?;
|
|
||||||
}
|
|
||||||
ImageFormat::Png => {
|
|
||||||
img.write_to(&mut output, ImageFormat::Png)
|
|
||||||
.map_err(|e| Error::internal(format!("PNG conversion failed: {}", e)))?;
|
|
||||||
}
|
|
||||||
ImageFormat::WebP => {
|
|
||||||
img.write_to(&mut output, ImageFormat::WebP)
|
|
||||||
.map_err(|e| Error::internal(format!("WebP conversion failed: {}", e)))?;
|
|
||||||
}
|
|
||||||
_ => return Err(Error::internal("Unsupported format".to_string())),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(output)
|
#[instrument]
|
||||||
|
pub fn to_png(img: &DynamicImage) -> Result<Vec<u8>> {
|
||||||
|
let mut buffer = Cursor::new(Vec::new());
|
||||||
|
img.write_to(&mut buffer, ImageOutputFormat::Png)
|
||||||
|
.map_err(|e| Error::internal(format!("PNG conversion failed: {}", e)))?;
|
||||||
|
Ok(buffer.into_inner())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument]
|
||||||
|
pub fn to_webp(img: &DynamicImage, quality: u8) -> Result<Vec<u8>> {
|
||||||
|
let mut buffer = Cursor::new(Vec::new());
|
||||||
|
img.write_to(&mut buffer, ImageOutputFormat::WebP)
|
||||||
|
.map_err(|e| Error::internal(format!("WebP conversion failed: {}", e)))?;
|
||||||
|
Ok(buffer.into_inner())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument]
|
||||||
|
pub fn to_gif(img: &DynamicImage) -> Result<Vec<u8>> {
|
||||||
|
let mut buffer = Cursor::new(Vec::new());
|
||||||
|
img.write_to(&mut buffer, ImageOutputFormat::Gif)
|
||||||
|
.map_err(|e| Error::internal(format!("GIF conversion failed: {}", e)))?;
|
||||||
|
Ok(buffer.into_inner())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use rstest::*;
|
use rstest::*;
|
||||||
|
|
||||||
#[fixture]
|
#[fixture]
|
||||||
fn test_image() -> Vec<u8> {
|
fn test_image() -> DynamicImage {
|
||||||
let img = DynamicImage::new_rgb8(100, 100);
|
DynamicImage::new_rgb8(100, 100)
|
||||||
let mut buffer = Vec::new();
|
|
||||||
img.write_to(&mut buffer, ImageFormat::Png).unwrap();
|
|
||||||
buffer
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn test_jpeg_conversion(test_image: Vec<u8>) -> Result<()> {
|
fn test_jpeg_conversion(test_image: DynamicImage) -> Result<()> {
|
||||||
let jpeg_data = convert_to_format(&test_image, ImageFormat::Jpeg)?;
|
let jpeg_data = ImageConverter::to_jpeg(&test_image, 80)?;
|
||||||
assert!(!jpeg_data.is_empty());
|
assert!(!jpeg_data.is_empty());
|
||||||
assert_eq!(image::guess_format(&jpeg_data).unwrap(), ImageFormat::Jpeg);
|
assert_eq!(image::guess_format(&jpeg_data).unwrap(), image::ImageFormat::Jpeg);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn test_png_conversion(test_image: Vec<u8>) -> Result<()> {
|
fn test_png_conversion(test_image: DynamicImage) -> Result<()> {
|
||||||
let png_data = convert_to_format(&test_image, ImageFormat::Png)?;
|
let png_data = ImageConverter::to_png(&test_image)?;
|
||||||
assert!(!png_data.is_empty());
|
assert!(!png_data.is_empty());
|
||||||
assert_eq!(image::guess_format(&png_data).unwrap(), ImageFormat::Png);
|
assert_eq!(image::guess_format(&png_data).unwrap(), image::ImageFormat::Png);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn test_webp_conversion(test_image: Vec<u8>) -> Result<()> {
|
fn test_webp_conversion(test_image: DynamicImage) -> Result<()> {
|
||||||
let webp_data = convert_to_format(&test_image, ImageFormat::WebP)?;
|
let webp_data = ImageConverter::to_webp(&test_image, 80)?;
|
||||||
assert!(!webp_data.is_empty());
|
assert!(!webp_data.is_empty());
|
||||||
assert_eq!(image::guess_format(&webp_data).unwrap(), ImageFormat::WebP);
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,12 +2,12 @@ pub mod processor;
|
||||||
pub mod converter;
|
pub mod converter;
|
||||||
|
|
||||||
pub use processor::ImageProcessor;
|
pub use processor::ImageProcessor;
|
||||||
pub use converter::{ImageConverter, ImageFormat};
|
pub use converter::ImageConverter;
|
||||||
|
// Remove the ImageFormat re-export since it's private in the image crate
|
||||||
|
pub use image::ImageFormat;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
|
||||||
use gb_core::Result;
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use gb_core::Result;
|
use gb_core::Result;
|
||||||
use image::{DynamicImage, Rgba};
|
use image::{DynamicImage, Rgba};
|
||||||
|
@ -49,7 +49,6 @@ mod tests {
|
||||||
let png_data = ImageConverter::to_png(&image)?;
|
let png_data = ImageConverter::to_png(&image)?;
|
||||||
let gif_data = ImageConverter::to_gif(&image)?;
|
let gif_data = ImageConverter::to_gif(&image)?;
|
||||||
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,10 @@ edition.workspace = true
|
||||||
authors.workspace = true
|
authors.workspace = true
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "migrations"
|
||||||
|
path = "src/bin/migrations.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
sqlx.workspace = true
|
sqlx.workspace = true
|
||||||
|
@ -15,4 +19,4 @@ serde_json.workspace = true
|
||||||
gb-core = { path = "../gb-core" }
|
gb-core = { path = "../gb-core" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
rstest.workspace = true
|
rstest.workspace = true
|
19
gb-migrations/src/bin/migrations.rs
Normal file
19
gb-migrations/src/bin/migrations.rs
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use gb_migrations::run_migrations;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), sqlx::Error> {
|
||||||
|
let database_url = std::env::var("DATABASE_URL")
|
||||||
|
.expect("DATABASE_URL must be set");
|
||||||
|
|
||||||
|
println!("Creating database connection pool...");
|
||||||
|
let pool = PgPool::connect(&database_url)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create pool");
|
||||||
|
|
||||||
|
println!("Running migrations...");
|
||||||
|
run_migrations(&pool).await?;
|
||||||
|
|
||||||
|
println!("Migrations completed successfully!");
|
||||||
|
Ok(())
|
||||||
|
}
|
|
@ -4,9 +4,10 @@ use tracing::info;
|
||||||
pub async fn run_migrations(pool: &PgPool) -> Result<(), sqlx::Error> {
|
pub async fn run_migrations(pool: &PgPool) -> Result<(), sqlx::Error> {
|
||||||
info!("Running database migrations");
|
info!("Running database migrations");
|
||||||
|
|
||||||
sqlx::query(
|
// Create tables
|
||||||
r#"
|
let table_queries = [
|
||||||
CREATE TABLE IF NOT EXISTS customers (
|
// Customers table
|
||||||
|
r#"CREATE TABLE IF NOT EXISTS customers (
|
||||||
id UUID PRIMARY KEY,
|
id UUID PRIMARY KEY,
|
||||||
name VARCHAR(255) NOT NULL,
|
name VARCHAR(255) NOT NULL,
|
||||||
subscription_tier VARCHAR(50) NOT NULL,
|
subscription_tier VARCHAR(50) NOT NULL,
|
||||||
|
@ -14,9 +15,10 @@ pub async fn run_migrations(pool: &PgPool) -> Result<(), sqlx::Error> {
|
||||||
max_instances INTEGER NOT NULL,
|
max_instances INTEGER NOT NULL,
|
||||||
metadata JSONB NOT NULL DEFAULT '{}',
|
metadata JSONB NOT NULL DEFAULT '{}',
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
);
|
)"#,
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS instances (
|
// Instances table
|
||||||
|
r#"CREATE TABLE IF NOT EXISTS instances (
|
||||||
id UUID PRIMARY KEY,
|
id UUID PRIMARY KEY,
|
||||||
customer_id UUID NOT NULL REFERENCES customers(id),
|
customer_id UUID NOT NULL REFERENCES customers(id),
|
||||||
name VARCHAR(255) NOT NULL,
|
name VARCHAR(255) NOT NULL,
|
||||||
|
@ -25,9 +27,10 @@ pub async fn run_migrations(pool: &PgPool) -> Result<(), sqlx::Error> {
|
||||||
region VARCHAR(50) NOT NULL,
|
region VARCHAR(50) NOT NULL,
|
||||||
config JSONB NOT NULL DEFAULT '{}',
|
config JSONB NOT NULL DEFAULT '{}',
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
);
|
)"#,
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS rooms (
|
// Rooms table
|
||||||
|
r#"CREATE TABLE IF NOT EXISTS rooms (
|
||||||
id UUID PRIMARY KEY,
|
id UUID PRIMARY KEY,
|
||||||
customer_id UUID NOT NULL REFERENCES customers(id),
|
customer_id UUID NOT NULL REFERENCES customers(id),
|
||||||
instance_id UUID NOT NULL REFERENCES instances(id),
|
instance_id UUID NOT NULL REFERENCES instances(id),
|
||||||
|
@ -36,9 +39,10 @@ pub async fn run_migrations(pool: &PgPool) -> Result<(), sqlx::Error> {
|
||||||
status VARCHAR(50) NOT NULL,
|
status VARCHAR(50) NOT NULL,
|
||||||
config JSONB NOT NULL DEFAULT '{}',
|
config JSONB NOT NULL DEFAULT '{}',
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
);
|
)"#,
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS messages (
|
// Messages table
|
||||||
|
r#"CREATE TABLE IF NOT EXISTS messages (
|
||||||
id UUID PRIMARY KEY,
|
id UUID PRIMARY KEY,
|
||||||
customer_id UUID NOT NULL REFERENCES customers(id),
|
customer_id UUID NOT NULL REFERENCES customers(id),
|
||||||
instance_id UUID NOT NULL REFERENCES instances(id),
|
instance_id UUID NOT NULL REFERENCES instances(id),
|
||||||
|
@ -49,9 +53,10 @@ pub async fn run_migrations(pool: &PgPool) -> Result<(), sqlx::Error> {
|
||||||
metadata JSONB NOT NULL DEFAULT '{}',
|
metadata JSONB NOT NULL DEFAULT '{}',
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
shard_key INTEGER NOT NULL
|
shard_key INTEGER NOT NULL
|
||||||
);
|
)"#,
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS users (
|
// Users table
|
||||||
|
r#"CREATE TABLE IF NOT EXISTS users (
|
||||||
id UUID PRIMARY KEY,
|
id UUID PRIMARY KEY,
|
||||||
customer_id UUID NOT NULL REFERENCES customers(id),
|
customer_id UUID NOT NULL REFERENCES customers(id),
|
||||||
instance_id UUID NOT NULL REFERENCES instances(id),
|
instance_id UUID NOT NULL REFERENCES instances(id),
|
||||||
|
@ -60,9 +65,10 @@ pub async fn run_migrations(pool: &PgPool) -> Result<(), sqlx::Error> {
|
||||||
status VARCHAR(50) NOT NULL,
|
status VARCHAR(50) NOT NULL,
|
||||||
metadata JSONB NOT NULL DEFAULT '{}',
|
metadata JSONB NOT NULL DEFAULT '{}',
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
);
|
)"#,
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS tracks (
|
// Tracks table
|
||||||
|
r#"CREATE TABLE IF NOT EXISTS tracks (
|
||||||
id UUID PRIMARY KEY,
|
id UUID PRIMARY KEY,
|
||||||
room_id UUID NOT NULL REFERENCES rooms(id),
|
room_id UUID NOT NULL REFERENCES rooms(id),
|
||||||
user_id UUID NOT NULL REFERENCES users(id),
|
user_id UUID NOT NULL REFERENCES users(id),
|
||||||
|
@ -70,29 +76,43 @@ pub async fn run_migrations(pool: &PgPool) -> Result<(), sqlx::Error> {
|
||||||
status VARCHAR(50) NOT NULL,
|
status VARCHAR(50) NOT NULL,
|
||||||
metadata JSONB NOT NULL DEFAULT '{}',
|
metadata JSONB NOT NULL DEFAULT '{}',
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
);
|
)"#,
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS subscriptions (
|
// Subscriptions table
|
||||||
|
r#"CREATE TABLE IF NOT EXISTS subscriptions (
|
||||||
id UUID PRIMARY KEY,
|
id UUID PRIMARY KEY,
|
||||||
track_id UUID NOT NULL REFERENCES tracks(id),
|
track_id UUID NOT NULL REFERENCES tracks(id),
|
||||||
user_id UUID NOT NULL REFERENCES users(id),
|
user_id UUID NOT NULL REFERENCES users(id),
|
||||||
status VARCHAR(50) NOT NULL,
|
status VARCHAR(50) NOT NULL,
|
||||||
metadata JSONB NOT NULL DEFAULT '{}',
|
metadata JSONB NOT NULL DEFAULT '{}',
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
);
|
)"#,
|
||||||
|
];
|
||||||
|
|
||||||
-- Create indexes for performance
|
// Create indexes
|
||||||
CREATE INDEX IF NOT EXISTS idx_instances_customer_id ON instances(customer_id);
|
let index_queries = [
|
||||||
CREATE INDEX IF NOT EXISTS idx_rooms_instance_id ON rooms(instance_id);
|
"CREATE INDEX IF NOT EXISTS idx_instances_customer_id ON instances(customer_id)",
|
||||||
CREATE INDEX IF NOT EXISTS idx_messages_conversation_id ON messages(conversation_id);
|
"CREATE INDEX IF NOT EXISTS idx_rooms_instance_id ON rooms(instance_id)",
|
||||||
CREATE INDEX IF NOT EXISTS idx_messages_shard_key ON messages(shard_key);
|
"CREATE INDEX IF NOT EXISTS idx_messages_conversation_id ON messages(conversation_id)",
|
||||||
CREATE INDEX IF NOT EXISTS idx_tracks_room_id ON tracks(room_id);
|
"CREATE INDEX IF NOT EXISTS idx_messages_shard_key ON messages(shard_key)",
|
||||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_track_id ON subscriptions(track_id);
|
"CREATE INDEX IF NOT EXISTS idx_tracks_room_id ON tracks(room_id)",
|
||||||
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
|
"CREATE INDEX IF NOT EXISTS idx_subscriptions_track_id ON subscriptions(track_id)",
|
||||||
"#,
|
"CREATE INDEX IF NOT EXISTS idx_users_email ON users(email)",
|
||||||
)
|
];
|
||||||
.execute(pool)
|
|
||||||
.await?;
|
// Execute table creation queries
|
||||||
|
for query in table_queries {
|
||||||
|
sqlx::query(query)
|
||||||
|
.execute(pool)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute index creation queries
|
||||||
|
for query in index_queries {
|
||||||
|
sqlx::query(query)
|
||||||
|
.execute(pool)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
info!("Migrations completed successfully");
|
info!("Migrations completed successfully");
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
0
process.rs
Normal file
0
process.rs
Normal file
558
prompt.md
Normal file
558
prompt.md
Normal file
|
@ -0,0 +1,558 @@
|
||||||
|
You are a distributed systems architect for a billion-scale real-time communication platform called General Bots or gb. The system combines bot capabilities, WebRTC communication, and massive-scale messaging with the following architecture:
|
||||||
|
|
||||||
|
1. Core Domains and Models:
|
||||||
|
|
||||||
|
|
||||||
|
A. Customer Hierarchy:
|
||||||
|
- Customer (top-level organization)
|
||||||
|
- Multiple Instances
|
||||||
|
- Subscription Management
|
||||||
|
- Resource Quotas
|
||||||
|
- Regional Distribution
|
||||||
|
- Billing & Usage Tracking
|
||||||
|
|
||||||
|
B. Instance Management:
|
||||||
|
- Per-customer instances
|
||||||
|
- Resource isolation
|
||||||
|
- Regional deployment
|
||||||
|
- Feature toggles
|
||||||
|
- Usage monitoring
|
||||||
|
- Shard management
|
||||||
|
|
||||||
|
2. Communication Infrastructure:
|
||||||
|
|
||||||
|
A. Real-time Rooms:
|
||||||
|
- WebRTC-based communication
|
||||||
|
- Track management (audio/video)
|
||||||
|
- Participant handling
|
||||||
|
- Room scaling
|
||||||
|
- Media processing
|
||||||
|
- Recording capabilities
|
||||||
|
- Video based rooms like Zoom.
|
||||||
|
- Tiktok lives - like
|
||||||
|
|
||||||
|
B. Messaging System:
|
||||||
|
- Sharded message queues
|
||||||
|
- Message persistence
|
||||||
|
- Real-time delivery
|
||||||
|
- Message routing
|
||||||
|
- Delivery status tracking
|
||||||
|
- Message search
|
||||||
|
|
||||||
|
4. Database Schema:
|
||||||
|
|
||||||
|
A. Core Tables:
|
||||||
|
```sql
|
||||||
|
CREATE TABLE customers (
|
||||||
|
id UUID PRIMARY KEY,
|
||||||
|
name VARCHAR(255),
|
||||||
|
subscription_tier VARCHAR(50),
|
||||||
|
status VARCHAR(50),
|
||||||
|
max_instances INTEGER,
|
||||||
|
metadata JSONB,
|
||||||
|
created_at TIMESTAMPTZ
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE instances (
|
||||||
|
id UUID PRIMARY KEY,
|
||||||
|
customer_id UUID,
|
||||||
|
name VARCHAR(255),
|
||||||
|
status VARCHAR(50),
|
||||||
|
shard_id INTEGER,
|
||||||
|
region VARCHAR(50),
|
||||||
|
config JSONB,
|
||||||
|
created_at TIMESTAMPTZ
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE rooms (
|
||||||
|
id UUID PRIMARY KEY,
|
||||||
|
customer_id UUID,
|
||||||
|
instance_id UUID,
|
||||||
|
name VARCHAR(255),
|
||||||
|
kind VARCHAR(50),
|
||||||
|
status VARCHAR(50),
|
||||||
|
config JSONB,
|
||||||
|
created_at TIMESTAMPTZ
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE messages (
|
||||||
|
id UUID PRIMARY KEY,
|
||||||
|
customer_id UUID,
|
||||||
|
instance_id UUID,
|
||||||
|
conversation_id UUID,
|
||||||
|
sender_id UUID,
|
||||||
|
kind VARCHAR(50),
|
||||||
|
content TEXT,
|
||||||
|
metadata JSONB,
|
||||||
|
created_at TIMESTAMPTZ,
|
||||||
|
shard_key INTEGER
|
||||||
|
);
|
||||||
|
```
|
||||||
|
Also consider every table here even if you reorganize: BOnlineSubscription
|
||||||
|
GuaribasAdmin
|
||||||
|
GuaribasAnswer
|
||||||
|
GuaribasApplications
|
||||||
|
GuaribasChannel
|
||||||
|
GuaribasConversation
|
||||||
|
GuaribasConversationMessage
|
||||||
|
GuaribasGroup
|
||||||
|
GuaribasInstance
|
||||||
|
GuaribasLog
|
||||||
|
GuaribasPackage
|
||||||
|
GuaribasQuestion
|
||||||
|
GuaribasQuestionAlternate
|
||||||
|
GuaribasSchedule
|
||||||
|
GuaribasSubject
|
||||||
|
GuaribasUser
|
||||||
|
GuaribasUserGroup
|
||||||
|
|
||||||
|
|
||||||
|
5. Scaling Architecture:
|
||||||
|
|
||||||
|
A. Storage Layer:
|
||||||
|
- PostgreSQL (relational data)
|
||||||
|
- Sharded by customer_id
|
||||||
|
- Partitioned tables
|
||||||
|
- Read replicas
|
||||||
|
- TiKV (distributed KV)
|
||||||
|
- Real-time data
|
||||||
|
- Cache layer
|
||||||
|
- Fast lookups
|
||||||
|
- Redis (caching)
|
||||||
|
- Session data
|
||||||
|
- Rate limiting
|
||||||
|
- Temporary storage
|
||||||
|
|
||||||
|
B. Message Queue:
|
||||||
|
- Kafka clusters
|
||||||
|
- Sharded topics
|
||||||
|
- Message routing
|
||||||
|
- Event streaming
|
||||||
|
- Redis Pub/Sub
|
||||||
|
- Real-time updates
|
||||||
|
- Presence information
|
||||||
|
- Status changes
|
||||||
|
|
||||||
|
C. Media Handling:
|
||||||
|
- WebRTC media servers
|
||||||
|
- Track multiplexing
|
||||||
|
- Media processing
|
||||||
|
- Recording storage
|
||||||
|
|
||||||
|
6. API Structure:
|
||||||
|
|
||||||
|
A. System APIs:
|
||||||
|
```rust
|
||||||
|
pub trait SystemAPI {
|
||||||
|
async fn call_vm(&self, pid: Uuid, text: String) -> Result<String>;
|
||||||
|
async fn wait(&self, pid: Uuid, seconds: i32) -> Result<()>;
|
||||||
|
async fn save_file(&self, pid: Uuid, data: Vec<u8>) -> Result<FileInfo>;
|
||||||
|
async fn execute_sql(&self, pid: Uuid, sql: String) -> Result<QueryResult>;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
B. Room APIs:
|
||||||
|
```rust
|
||||||
|
pub trait RoomAPI {
|
||||||
|
async fn create_room(&self, config: RoomConfig) -> Result<Room>;
|
||||||
|
async fn join_room(&self, room_id: Uuid, user_id: Uuid) -> Result<Connection>;
|
||||||
|
async fn publish_track(&self, track: TrackInfo) -> Result<Track>;
|
||||||
|
async fn subscribe_track(&self, track_id: Uuid) -> Result<Subscription>;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
C. Message APIs:
|
||||||
|
```rust
|
||||||
|
pub trait MessageAPI {
|
||||||
|
async fn send_message(&self, message: Message) -> Result<MessageId>;
|
||||||
|
async fn get_messages(&self, filter: MessageFilter) -> Result<Vec<Message>>;
|
||||||
|
async fn update_status(&self, message_id: Uuid, status: Status) -> Result<()>;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
7. Monitoring & Operations:
|
||||||
|
|
||||||
|
A. Metrics:
|
||||||
|
- System health
|
||||||
|
- Resource usage
|
||||||
|
- Message throughput
|
||||||
|
- Media quality
|
||||||
|
- Error rates
|
||||||
|
- API latency
|
||||||
|
|
||||||
|
B. Scaling Operations:
|
||||||
|
- Auto-scaling rules
|
||||||
|
- Shard management
|
||||||
|
- Load balancing
|
||||||
|
- Failover handling
|
||||||
|
- Data migration
|
||||||
|
|
||||||
|
C. Security:
|
||||||
|
- Authentication
|
||||||
|
- Authorization
|
||||||
|
- Rate limiting
|
||||||
|
- Data encryption
|
||||||
|
- Audit logging
|
||||||
|
|
||||||
|
Implementation Guidelines:
|
||||||
|
|
||||||
|
1. Use Rust for:
|
||||||
|
- Performance critical paths
|
||||||
|
- Memory safety
|
||||||
|
- Concurrent processing
|
||||||
|
- System reliability
|
||||||
|
|
||||||
|
2. Sharding Strategy:
|
||||||
|
- Shard by customer_id
|
||||||
|
- Instance isolation
|
||||||
|
- Regional distribution
|
||||||
|
- Data locality
|
||||||
|
|
||||||
|
3. Performance Targets:
|
||||||
|
- Billion concurrent connections
|
||||||
|
- Sub-second message delivery
|
||||||
|
- 4K video streaming
|
||||||
|
- Petabyte-scale storage
|
||||||
|
|
||||||
|
4. Reliability Requirements:
|
||||||
|
- 99.99% uptime
|
||||||
|
- No message loss
|
||||||
|
- Automatic failover
|
||||||
|
- Data redundancy
|
||||||
|
|
||||||
|
When implementing features, consider:
|
||||||
|
1. Multi-tenant isolation
|
||||||
|
2. Resource quotas
|
||||||
|
3. Security boundaries
|
||||||
|
4. Performance impact
|
||||||
|
5. Scaling implications
|
||||||
|
6. Monitoring requirements
|
||||||
|
|
||||||
|
The system should handle:
|
||||||
|
1. Billions of active users
|
||||||
|
2. Millions of concurrent rooms
|
||||||
|
3. Petabytes of message history
|
||||||
|
4. Global distribution
|
||||||
|
5. Real-time communication
|
||||||
|
6. Bot automation
|
||||||
|
|
||||||
|
|
||||||
|
API:
|
||||||
|
System Keywords:
|
||||||
|
|
||||||
|
POST /systemKeywords/callVM
|
||||||
|
POST /systemKeywords/append
|
||||||
|
POST /systemKeywords/seeCaption
|
||||||
|
POST /systemKeywords/seeText
|
||||||
|
POST /systemKeywords/sortBy
|
||||||
|
POST /systemKeywords/JSONAsGBTable
|
||||||
|
POST /systemKeywords/renderTable
|
||||||
|
POST /systemKeywords/closeHandles
|
||||||
|
POST /systemKeywords/asPDF
|
||||||
|
POST /systemKeywords/asImage
|
||||||
|
POST /systemKeywords/executeSQL
|
||||||
|
POST /systemKeywords/getFileContents
|
||||||
|
POST /systemKeywords/getRandomId
|
||||||
|
POST /systemKeywords/getStock
|
||||||
|
POST /systemKeywords/wait
|
||||||
|
POST /systemKeywords/talkTo
|
||||||
|
POST /systemKeywords/getUser
|
||||||
|
POST /systemKeywords/sendSmsTo
|
||||||
|
POST /systemKeywords/set
|
||||||
|
POST /systemKeywords/internalGetDocument
|
||||||
|
POST /systemKeywords/saveFile
|
||||||
|
POST /systemKeywords/uploadFile
|
||||||
|
POST /systemKeywords/note
|
||||||
|
POST /systemKeywords/saveToStorageBatch
|
||||||
|
POST /systemKeywords/saveToStorage
|
||||||
|
POST /systemKeywords/saveToStorageWithJSON
|
||||||
|
POST /systemKeywords/save
|
||||||
|
POST /systemKeywords/getHttp
|
||||||
|
POST /systemKeywords/isValidDate
|
||||||
|
POST /systemKeywords/isValidNumber
|
||||||
|
POST /systemKeywords/isValidHour
|
||||||
|
POST /systemKeywords/getFilter
|
||||||
|
POST /systemKeywords/find
|
||||||
|
POST /systemKeywords/getDateFromLocaleString
|
||||||
|
POST /systemKeywords/createFolder
|
||||||
|
POST /systemKeywords/shareFolder
|
||||||
|
POST /systemKeywords/internalCreateDocument
|
||||||
|
POST /systemKeywords/createDocument
|
||||||
|
POST /systemKeywords/copyFile
|
||||||
|
POST /systemKeywords/convert
|
||||||
|
POST /systemKeywords/generatePassword
|
||||||
|
POST /systemKeywords/flattenJSON
|
||||||
|
POST /systemKeywords/getCustomToken
|
||||||
|
POST /systemKeywords/getByHttp
|
||||||
|
POST /systemKeywords/putByHttp
|
||||||
|
POST /systemKeywords/postByHttp
|
||||||
|
POST /systemKeywords/numberOnly
|
||||||
|
POST /systemKeywords/createLead
|
||||||
|
POST /systemKeywords/fill
|
||||||
|
POST /systemKeywords/screenCapture
|
||||||
|
POST /systemKeywords/numberToLetters
|
||||||
|
POST /systemKeywords/getTableFromName
|
||||||
|
POST /systemKeywords/merge
|
||||||
|
POST /systemKeywords/tweet
|
||||||
|
POST /systemKeywords/rewrite
|
||||||
|
POST /systemKeywords/pay
|
||||||
|
POST /systemKeywords/autoSave
|
||||||
|
POST /systemKeywords/internalAutoSave
|
||||||
|
POST /systemKeywords/deleteFromStorage
|
||||||
|
POST /systemKeywords/deleteFile
|
||||||
|
POST /systemKeywords/getExtensionInfo
|
||||||
|
POST /systemKeywords/dirFolder
|
||||||
|
POST /systemKeywords/log
|
||||||
|
Dialog Keywords:
|
||||||
|
|
||||||
|
POST /dialogKeywords/chart
|
||||||
|
POST /dialogKeywords/getOCR
|
||||||
|
POST /dialogKeywords/getToday
|
||||||
|
POST /dialogKeywords/exit
|
||||||
|
POST /dialogKeywords/getActiveTasks
|
||||||
|
POST /dialogKeywords/createDeal
|
||||||
|
POST /dialogKeywords/findContact
|
||||||
|
POST /dialogKeywords/getContentLocaleWithCulture
|
||||||
|
POST /dialogKeywords/getCoded
|
||||||
|
POST /dialogKeywords/getWeekFromDate
|
||||||
|
POST /dialogKeywords/getDateDiff
|
||||||
|
POST /dialogKeywords/format
|
||||||
|
POST /dialogKeywords/dateAdd [...and many more dialog-related endpoints]
|
||||||
|
Web Automation:
|
||||||
|
|
||||||
|
POST /webAutomation/isSelector
|
||||||
|
POST /webAutomation/cyrb53
|
||||||
|
POST /webAutomation/closeHandles
|
||||||
|
POST /webAutomation/openPage
|
||||||
|
POST /webAutomation/getPageByHandle
|
||||||
|
POST /webAutomation/getBySelector
|
||||||
|
POST /webAutomation/getByFrame
|
||||||
|
POST /webAutomation/hover
|
||||||
|
POST /webAutomation/click [...and more web automation endpoints]
|
||||||
|
|
||||||
|
Image Processing:
|
||||||
|
|
||||||
|
POST /imageProcessing/sharpen
|
||||||
|
POST /imageProcessing/mergeImage
|
||||||
|
POST /imageProcessing/blur
|
||||||
|
|
||||||
|
Debugger Service:
|
||||||
|
|
||||||
|
There must have be a webassymbly that convert BASIC code using a compiler to webassymbly and support remotedebugging by API.
|
||||||
|
|
||||||
|
POST /debuggerService/setBreakpoint
|
||||||
|
POST /debuggerService/refactor
|
||||||
|
POST /debuggerService/resume
|
||||||
|
POST /debuggerService/stop
|
||||||
|
POST /debuggerService/step
|
||||||
|
POST /debuggerService/getContext
|
||||||
|
POST /debuggerService/start
|
||||||
|
POST /debuggerService/sendMessage
|
||||||
|
|
||||||
|
Dependencies original, migrate everything to workspace.dependencies
|
||||||
|
"@azure/arm-appservice": "15.0.0",
|
||||||
|
"@azure/arm-cognitiveservices": "7.5.0",
|
||||||
|
"@azure/arm-resources": "5.2.0",
|
||||||
|
"@azure/arm-search": "3.2.0",
|
||||||
|
"@azure/arm-sql": "10.0.0",
|
||||||
|
"@azure/arm-subscriptions": "5.1.0",
|
||||||
|
"@azure/cognitiveservices-computervision": "8.2.0",
|
||||||
|
"@azure/keyvault-keys": "4.8.0",
|
||||||
|
"@azure/ms-rest-js": "2.7.0",
|
||||||
|
"@azure/msal-node": "2.13.1",
|
||||||
|
"@azure/openai": "2.0.0-beta.1",
|
||||||
|
"@azure/search-documents": "12.1.0",
|
||||||
|
"@azure/storage-blob": "12.24.0",
|
||||||
|
"@google-cloud/pubsub": "4.7.0",
|
||||||
|
"@google-cloud/translate": "8.5.0",
|
||||||
|
"@hubspot/api-client": "11.2.0",
|
||||||
|
"@koa/cors": "5.0.0",
|
||||||
|
"@langchain/anthropic": "^0.3.7",
|
||||||
|
"@langchain/community": "0.2.31",
|
||||||
|
"@langchain/core": "^0.3.17",
|
||||||
|
"@langchain/openai": "0.2.8",
|
||||||
|
"@microsoft/microsoft-graph-client": "3.0.7",
|
||||||
|
"@nlpjs/basic": "4.27.0",
|
||||||
|
"@nosferatu500/textract": "3.1.3",
|
||||||
|
"@push-rpc/core": "1.9.0",
|
||||||
|
"@push-rpc/http": "1.9.0",
|
||||||
|
"@push-rpc/openapi": "1.9.0",
|
||||||
|
"@push-rpc/websocket": "1.9.0",
|
||||||
|
"@semantic-release/changelog": "6.0.3",
|
||||||
|
"@semantic-release/exec": "6.0.3",
|
||||||
|
"@semantic-release/git": "10.0.1",
|
||||||
|
"@sendgrid/mail": "8.1.3",
|
||||||
|
"@sequelize/core": "7.0.0-alpha.37",
|
||||||
|
"@types/node": "22.5.2",
|
||||||
|
"@types/validator": "13.12.1",
|
||||||
|
"adm-zip": "0.5.16",
|
||||||
|
"ai2html": "^0.121.1",
|
||||||
|
"alasql": "4.5.1",
|
||||||
|
"any-shell-escape": "0.1.1",
|
||||||
|
"arraybuffer-to-buffer": "0.0.7",
|
||||||
|
"async-mutex": "0.5.0",
|
||||||
|
"async-promises": "0.2.3",
|
||||||
|
"async-retry": "1.3.3",
|
||||||
|
"basic-auth": "2.0.1",
|
||||||
|
"billboard.js": "3.13.0",
|
||||||
|
"bluebird": "3.7.2",
|
||||||
|
"body-parser": "1.20.2",
|
||||||
|
"botbuilder": "4.23.0",
|
||||||
|
"botbuilder-adapter-facebook": "1.0.12",
|
||||||
|
"botbuilder-ai": "4.23.0",
|
||||||
|
"botbuilder-dialogs": "4.23.0",
|
||||||
|
"botframework-connector": "4.23.0",
|
||||||
|
"botlib": "5.0.0",
|
||||||
|
"c3-chart-maker": "0.2.8",
|
||||||
|
"cd": "0.3.3",
|
||||||
|
"chalk-animation": "2.0.3",
|
||||||
|
"chatgpt": "5.2.5",
|
||||||
|
"chrome-remote-interface": "0.33.2",
|
||||||
|
"cli-progress": "3.12.0",
|
||||||
|
"cli-spinner": "0.2.10",
|
||||||
|
"core-js": "3.38.1",
|
||||||
|
"cors": "2.8.5",
|
||||||
|
"csv-database": "0.9.2",
|
||||||
|
"data-forge": "1.10.2",
|
||||||
|
"date-diff": "1.0.2",
|
||||||
|
"docximager": "0.0.4",
|
||||||
|
"docxtemplater": "3.50.0",
|
||||||
|
"dotenv-extended": "2.9.0",
|
||||||
|
"electron": "32.0.1",
|
||||||
|
"exceljs": "4.4.0",
|
||||||
|
"express": "4.19.2",
|
||||||
|
"express-remove-route": "1.0.0",
|
||||||
|
"facebook-nodejs-business-sdk": "^20.0.2",
|
||||||
|
"ffmpeg-static": "5.2.0",
|
||||||
|
"formidable": "^3.5.1",
|
||||||
|
"get-image-colors": "4.0.1",
|
||||||
|
"glob": "^11.0.0",
|
||||||
|
"google-libphonenumber": "3.2.38",
|
||||||
|
"googleapis": "143.0.0",
|
||||||
|
"hnswlib-node": "3.0.0",
|
||||||
|
"html-to-md": "0.8.6",
|
||||||
|
"http-proxy": "1.18.1",
|
||||||
|
"ibm-watson": "9.1.0",
|
||||||
|
"icojs": "^0.19.4",
|
||||||
|
"instagram-private-api": "1.46.1",
|
||||||
|
"iso-639-1": "3.1.3",
|
||||||
|
"isomorphic-fetch": "3.0.0",
|
||||||
|
"jimp": "1.6.0",
|
||||||
|
"js-md5": "0.8.3",
|
||||||
|
"json-schema-to-zod": "2.4.0",
|
||||||
|
"jsqr": "^1.4.0",
|
||||||
|
"just-indent": "0.0.1",
|
||||||
|
"keyv": "5.0.1",
|
||||||
|
"koa": "2.15.3",
|
||||||
|
"koa-body": "6.0.1",
|
||||||
|
"koa-ratelimit": "5.1.0",
|
||||||
|
"koa-router": "12.0.1",
|
||||||
|
"langchain": "0.2.17",
|
||||||
|
"language-tags": "1.0.9",
|
||||||
|
"line-replace": "2.0.1",
|
||||||
|
"lodash": "4.17.21",
|
||||||
|
"luxon": "3.5.0",
|
||||||
|
"mammoth": "1.8.0",
|
||||||
|
"mariadb": "3.3.1",
|
||||||
|
"mime-types": "2.1.35",
|
||||||
|
"moment": "2.30.1",
|
||||||
|
"ms-rest-azure": "3.0.2",
|
||||||
|
"mysql": "^2.18.1",
|
||||||
|
"nexmo": "2.9.1",
|
||||||
|
"ngrok": "5.0.0-beta.2",
|
||||||
|
"node-cron": "3.0.3",
|
||||||
|
"node-html-parser": "6.1.13",
|
||||||
|
"node-nlp": "4.27.0",
|
||||||
|
"node-tesseract-ocr": "2.2.1",
|
||||||
|
"nodemon": "^3.1.7",
|
||||||
|
"npm": "10.8.3",
|
||||||
|
"open": "10.1.0",
|
||||||
|
"open-docxtemplater-image-module": "1.0.3",
|
||||||
|
"openai": "4.57.0",
|
||||||
|
"pdf-extraction": "1.0.2",
|
||||||
|
"pdf-parse": "1.1.1",
|
||||||
|
"pdf-to-png-converter": "3.3.0",
|
||||||
|
"pdfjs-dist": "4.6.82",
|
||||||
|
"pdfkit": "0.15.0",
|
||||||
|
"phone": "3.1.50",
|
||||||
|
"pizzip": "3.1.7",
|
||||||
|
"pptxtemplater": "1.0.5",
|
||||||
|
"pragmatismo-io-framework": "1.1.1",
|
||||||
|
"prism-media": "1.3.5",
|
||||||
|
"public-ip": "7.0.1",
|
||||||
|
"punycode": "2.3.1",
|
||||||
|
"puppeteer": "23.2.2",
|
||||||
|
"puppeteer-extra": "3.3.6",
|
||||||
|
"puppeteer-extra-plugin-minmax": "1.1.2",
|
||||||
|
"puppeteer-extra-plugin-stealth": "2.11.2",
|
||||||
|
"qr-scanner": "1.4.2",
|
||||||
|
"qrcode": "1.5.4",
|
||||||
|
"qrcode-reader": "^1.0.4",
|
||||||
|
"qrcode-terminal": "0.12.0",
|
||||||
|
"readline": "1.3.0",
|
||||||
|
"reflect-metadata": "0.2.2",
|
||||||
|
"rimraf": "6.0.1",
|
||||||
|
"safe-buffer": "5.2.1",
|
||||||
|
"scanf": "1.2.0",
|
||||||
|
"sequelize": "6.37.3",
|
||||||
|
"sequelize-cli": "6.6.2",
|
||||||
|
"sequelize-typescript": "2.1.6",
|
||||||
|
"simple-git": "3.26.0",
|
||||||
|
"speakingurl": "14.0.1",
|
||||||
|
"sqlite3": "5.1.7",
|
||||||
|
"ssr-for-bots": "1.0.1-c",
|
||||||
|
"strict-password-generator": "1.1.2",
|
||||||
|
"svg2img": "^1.0.0-beta.2",
|
||||||
|
"swagger-client": "3.29.2",
|
||||||
|
"swagger-ui-dist": "5.17.14",
|
||||||
|
"tabulator-tables": "6.2.5",
|
||||||
|
"tedious": "18.6.1",
|
||||||
|
"textract": "2.5.0",
|
||||||
|
"twilio": "5.2.3",
|
||||||
|
"twitter-api-v2": "1.17.2",
|
||||||
|
"typeorm": "0.3.20",
|
||||||
|
"typescript": "5.5.4",
|
||||||
|
"url-join": "5.0.0",
|
||||||
|
"vhost": "3.0.2",
|
||||||
|
"vm2": "3.9.19",
|
||||||
|
"vm2-process": "2.1.5",
|
||||||
|
"walk-promise": "0.2.0",
|
||||||
|
"washyourmouthoutwithsoap": "1.0.2",
|
||||||
|
"webdav-server": "2.6.2",
|
||||||
|
"webp-converter": "^2.3.3",
|
||||||
|
"whatsapp-cloud-api": "0.3.1",
|
||||||
|
"whatsapp-web.js": "1.26.1-alpha.1",
|
||||||
|
"winston": "3.14.2",
|
||||||
|
"ws": "8.18.0",
|
||||||
|
"yaml": "2.5.0",
|
||||||
|
"yarn": "1.22.22",
|
||||||
|
"zod-to-json-schema": "3.23.2"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/qrcode": "1.5.5",
|
||||||
|
"@types/url-join": "4.0.3",
|
||||||
|
"@typescript-eslint/eslint-plugin": "8.4.0",
|
||||||
|
"@typescript-eslint/parser": "8.4.0",
|
||||||
|
"ban-sensitive-files": "1.10.5",
|
||||||
|
"commitizen": "4.3.0",
|
||||||
|
"cz-conventional-changelog": "3.3.0",
|
||||||
|
"dependency-check": "4.1.0",
|
||||||
|
"git-issues": "1.3.1",
|
||||||
|
"license-checker": "25.0.1",
|
||||||
|
"prettier-standard": "16.4.1",
|
||||||
|
"semantic-release": "24.1.0",
|
||||||
|
"simple-commit-message": "4.1.3",
|
||||||
|
"super-strong-password-generator": "2.0.2",
|
||||||
|
"super-strong-password-generator-es": "2.0.2",
|
||||||
|
"travis-deploy-once": "5.0.11",
|
||||||
|
"tslint": "6.1.3",
|
||||||
|
"tsx": "^4.19.1",
|
||||||
|
"vitest": "2.0.5"
|
||||||
|
|
||||||
|
migrate them to rust compatible,
|
||||||
|
|
||||||
|
- do not skip items, migrate everything, in way better, in your interpretation.
|
||||||
|
- use kubernetes and create environment configuration for everything and ingress to have several server nodes if eeed automatically
|
||||||
|
- I NEED FULL CODE SOLUTION IN PROFESSIONAL TESTABLE RUST CODE: if you need split answer in several parts, but provide ENTIRE CODE. Complete working balenced aserver. IMPORTANTE: Generate the project in a .sh shell script output with cat, of entire code base to be restored, no placeholder neither TODOS.
|
||||||
|
- VERY IMPORNTANT: DO NOT put things like // Add other system routes... you should WRITE ACUTAL CODE
|
||||||
|
- Need tests for every line of code written.
|
Loading…
Add table
Reference in a new issue