feat: add diesel_migrations and update dependencies

Added diesel_migrations crate (v2.3.0) to enable database migration functionality. Updated Cargo.toml and Cargo.lock to include the new dependency along with its required sub-dependencies (migrations_internals and migrations_macros). Also made minor cleanups in the codebase:
- Removed unused UI code from platform README
- Cleaned up LLM server initialization code
- Added additional build dependencies in documentation
This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-11-12 12:48:06 -03:00
parent b68f4ee760
commit 4cf71b2c6e
25 changed files with 234 additions and 195 deletions

33
Cargo.lock generated
View file

@ -1349,6 +1349,7 @@ dependencies = [
"crossterm 0.29.0",
"csv",
"diesel",
"diesel_migrations",
"dotenvy",
"downloader",
"env_logger",
@ -2472,6 +2473,17 @@ dependencies = [
"syn 2.0.108",
]
[[package]]
name = "diesel_migrations"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee060f709c3e3b1cadd83fcd0f61711f7a8cf493348f758d3a1c1147d70b3c97"
dependencies = [
"diesel",
"migrations_internals",
"migrations_macros",
]
[[package]]
name = "diesel_table_macro_syntax"
version = "0.3.0"
@ -4736,6 +4748,27 @@ dependencies = [
"autocfg",
]
[[package]]
name = "migrations_internals"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36c791ecdf977c99f45f23280405d7723727470f6689a5e6dbf513ac547ae10d"
dependencies = [
"serde",
"toml 0.9.8",
]
[[package]]
name = "migrations_macros"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36fc5ac76be324cfd2d3f2cf0fdf5d5d3c4f14ed8aaebadb09e304ba42282703"
dependencies = [
"migrations_internals",
"proc-macro2",
"quote",
]
[[package]]
name = "mime"
version = "0.3.17"

View file

@ -66,6 +66,7 @@ cron = "0.15.0"
crossterm = "0.29.0"
csv = "1.3"
diesel = { version = "2.1", features = ["postgres", "uuid", "chrono", "serde_json", "r2d2"] }
diesel_migrations = "2.1.0"
dotenvy = "0.15"
downloader = "0.2"
env_logger = "0.11"

View file

@ -10,9 +10,17 @@ source "$HOME/.cargo/env"
git clone https://alm.pragmatismo.com.br/generalbots/gbserver
apt install -y build-essential \
libcap2-bin \
pkg-config \
libssl-dev \
gcc-multilib \
git-fs \
curl \
coturn \
gnupg \
ca-certificates \
build-essential \
g++-multilib \
clang \
lld \

View file

@ -0,0 +1,12 @@
DROP TABLE public.usage_analytics;
DROP TABLE public.message_history;
DROP TABLE public.context_injections;
DROP TABLE public.whatsapp_numbers;
DROP TABLE public.user_sessions;
DROP TABLE public.bot_channels;
DROP TABLE public.users;
DROP TABLE public.tools;
DROP TABLE public.system_automations;
DROP TABLE public.organizations;
DROP TABLE public.clicks;
DROP TABLE public.bots;

View file

@ -0,0 +1,3 @@
DROP INDEX idx_bot_memories_key;
DROP INDEX idx_bot_memories_bot_id;
DROP TABLE bot_memories;

View file

@ -0,0 +1,23 @@
-- Drop triggers
DROP TRIGGER IF EXISTS update_basic_tools_updated_at ON basic_tools;
DROP TRIGGER IF EXISTS update_kb_collections_updated_at ON kb_collections;
DROP TRIGGER IF EXISTS update_kb_documents_updated_at ON kb_documents;
-- Drop function
DROP FUNCTION IF EXISTS update_updated_at_column;
-- Drop indexes
DROP INDEX IF EXISTS idx_basic_tools_active;
DROP INDEX IF EXISTS idx_basic_tools_name;
DROP INDEX IF EXISTS idx_basic_tools_bot_id;
DROP INDEX IF EXISTS idx_kb_collections_name;
DROP INDEX IF EXISTS idx_kb_collections_bot_id;
DROP INDEX IF EXISTS idx_kb_documents_indexed_at;
DROP INDEX IF EXISTS idx_kb_documents_hash;
DROP INDEX IF EXISTS idx_kb_documents_collection;
DROP INDEX IF EXISTS idx_kb_documents_bot_id;
-- Drop tables
DROP TABLE IF EXISTS basic_tools;
DROP TABLE IF EXISTS kb_collections;
DROP TABLE IF EXISTS kb_documents;

View file

@ -0,0 +1,11 @@
-- Drop indexes
DROP INDEX IF EXISTS idx_session_tool_name;
DROP INDEX IF EXISTS idx_session_tool_session;
DROP INDEX IF EXISTS idx_user_kb_website;
DROP INDEX IF EXISTS idx_user_kb_name;
DROP INDEX IF EXISTS idx_user_kb_bot_id;
DROP INDEX IF EXISTS idx_user_kb_user_id;
-- Drop tables
DROP TABLE IF EXISTS session_tool_associations;
DROP TABLE IF EXISTS user_kb_associations;

View file

@ -0,0 +1,54 @@
-- Drop indexes first
DROP INDEX IF EXISTS idx_gbot_sync_bot;
DROP INDEX IF EXISTS idx_component_logs_created;
DROP INDEX IF EXISTS idx_component_logs_level;
DROP INDEX IF EXISTS idx_component_logs_component;
DROP INDEX IF EXISTS idx_component_status;
DROP INDEX IF EXISTS idx_component_name;
DROP INDEX IF EXISTS idx_connection_config_active;
DROP INDEX IF EXISTS idx_connection_config_name;
DROP INDEX IF EXISTS idx_connection_config_bot;
DROP INDEX IF EXISTS idx_model_config_default;
DROP INDEX IF EXISTS idx_model_config_active;
DROP INDEX IF EXISTS idx_model_config_type;
DROP INDEX IF EXISTS idx_bot_config_key;
DROP INDEX IF EXISTS idx_bot_config_bot;
DROP INDEX IF EXISTS idx_tenant_config_key;
DROP INDEX IF EXISTS idx_tenant_config_tenant;
DROP INDEX IF EXISTS idx_server_config_type;
DROP INDEX IF EXISTS idx_server_config_key;
-- Drop tables
DROP TABLE IF EXISTS gbot_config_sync;
DROP TABLE IF EXISTS component_logs;
DROP TABLE IF EXISTS component_installations;
DROP TABLE IF EXISTS connection_configurations;
DROP TABLE IF EXISTS model_configurations;
DROP TABLE IF EXISTS bot_configuration;
DROP TABLE IF EXISTS tenant_configuration;
DROP TABLE IF EXISTS server_configuration;
-- Remove added columns if they exist
DO $$
BEGIN
IF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'user_sessions' AND column_name = 'tenant_id'
) THEN
ALTER TABLE user_sessions DROP COLUMN tenant_id;
END IF;
IF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'bots' AND column_name = 'tenant_id'
) THEN
ALTER TABLE bots DROP COLUMN tenant_id;
END IF;
END $$;
-- Drop tenant indexes if they exist
DROP INDEX IF EXISTS idx_user_sessions_tenant;
DROP INDEX IF EXISTS idx_bots_tenant;
-- Remove default tenant
DELETE FROM tenants WHERE slug = 'default';

View file

@ -0,0 +1,26 @@
-- Revert clicks table changes
CREATE TABLE IF NOT EXISTS public.old_clicks (
campaign_id text NOT NULL,
email text NOT NULL,
updated_at timestamptz DEFAULT now() NULL,
CONSTRAINT clicks_campaign_id_email_key UNIQUE (campaign_id, email)
);
INSERT INTO public.old_clicks (campaign_id, email, updated_at)
SELECT campaign_id, email, updated_at FROM public.clicks;
DROP TABLE public.clicks;
ALTER TABLE public.old_clicks RENAME TO clicks;
-- Remove system_automations constraints and indexes
DROP INDEX IF EXISTS idx_system_automations_bot_kind_param;
ALTER TABLE public.system_automations DROP CONSTRAINT IF EXISTS system_automations_bot_kind_param_unique;
DROP INDEX IF EXISTS idx_system_automations_bot_id;
ALTER TABLE public.system_automations DROP COLUMN IF EXISTS bot_id;
DROP INDEX IF EXISTS idx_system_automations_name;
ALTER TABLE public.system_automations DROP COLUMN IF EXISTS name;
-- Remove bot_configuration constraint
ALTER TABLE bot_configuration DROP CONSTRAINT IF EXISTS bot_configuration_config_key_unique;

View file

@ -12,6 +12,7 @@ When initial attempts fail, sequentially try these LLMs:
- **On unresolved error**: Stop and use add-req.sh, and consult Claude for guidance. with DeepThining in DeepSeek also, with Web turned on.
- **Change progression**: Start with DeepSeek, conclude with gpt-oss-120b
- If a big req. fail, specify a @code file that has similar pattern or sample from official docs.
- **Warning removal**: Last task before commiting, create a task list of warning removal and work with cargo check.
- **Final validation**: Use prompt "cargo check" with gpt-oss-120b
- Be humble, one requirement, one commit. But sometimes, freedom of caos is welcome - when no deadlines are set.
- Fix manually in case of dangerous trouble.

View file

@ -4,7 +4,7 @@ use crate::shared::utils::establish_pg_connection;
use anyhow::Result;
use aws_config::BehaviorVersion;
use aws_sdk_s3::Client;
use diesel::connection::SimpleConnection;
use dotenvy::dotenv;
use log::{error, trace};
use rand::distr::Alphanumeric;
use std::io::{self, Write};
@ -73,28 +73,23 @@ impl BootstrapManager {
.collect()
}
pub async fn bootstrap(&mut self) {
let db_env_path = std::env::current_dir().unwrap().join(".env");
pub async fn bootstrap(&mut self) -> Result<()> {
let env_path = std::env::current_dir().unwrap().join(".env");
let db_password = self.generate_secure_password(32);
let database_url = std::env::var("DATABASE_URL").unwrap_or_else(|_| {
format!("postgres://gbuser:{}@localhost:5432/botserver", db_password)
});
let db_line = format!("DATABASE_URL={}\n", database_url);
let drive_password = self.generate_secure_password(16);
let drive_user = "gbdriveuser".to_string();
let env_path = std::env::current_dir().unwrap().join(".env");
let env_content = format!(
let drive_env = format!(
"\nDRIVE_SERVER=http://localhost:9000\nDRIVE_ACCESSKEY={}\nDRIVE_SECRET={}\n",
drive_user, drive_password
);
let _ = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(&env_path)
.and_then(|mut file| std::io::Write::write_all(&mut file, env_content.as_bytes()));
let contents_env = format!("DATABASE_URL={}\n{}", database_url, drive_env);
let _ = std::fs::write(&env_path, contents_env);
dotenv().ok();
let _ = std::fs::write(&db_env_path, db_line);
let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone()).unwrap();
let required_components = vec!["tables", "drive", "cache", "llm"];
@ -136,38 +131,11 @@ impl BootstrapManager {
_ = pm.install(component).await;
if component == "tables" {
let mut conn = establish_pg_connection().unwrap();
let migration_dir = include_dir::include_dir!("./migrations");
let mut migration_files: Vec<_> = migration_dir
.files()
.filter_map(|file| {
let path = file.path();
if path.extension()? == "sql" {
Some(file)
} else {
None
}
})
.collect();
migration_files.sort_by_key(|f| f.path());
for migration_file in migration_files {
let migration = migration_file
.contents_utf8()
.ok_or_else(|| anyhow::anyhow!("Migration file is not valid UTF-8"));
if let Err(e) = conn.batch_execute(migration.unwrap()) {
log::error!(
"Failed to execute migration {}: {}",
migration_file.path().display(),
e
);
}
trace!(
"Successfully executed migration: {}",
migration_file.path().display()
);
}
self.apply_migrations(&mut conn)?;
}
}
}
Ok(())
}
async fn get_drive_client(config: &AppConfig) -> Client {
@ -304,38 +272,18 @@ impl BootstrapManager {
Ok(())
})
}
fn apply_migrations(&self, conn: &mut diesel::PgConnection) -> Result<()> {
let migrations_dir = std::path::Path::new("migrations");
if !migrations_dir.exists() {
return Ok(());
}
let mut sql_files: Vec<_> = std::fs::read_dir(migrations_dir)?
.filter_map(|entry| entry.ok())
.filter(|entry| {
entry
.path()
.extension()
.and_then(|s| s.to_str())
.map(|s| s == "sql")
.unwrap_or(false)
})
.collect();
sql_files.sort_by_key(|entry| entry.path());
for entry in sql_files {
let path = entry.path();
let filename = path.file_name().unwrap().to_string_lossy();
match std::fs::read_to_string(&path) {
Ok(sql) => match conn.batch_execute(&sql) {
Err(e) => {
log::warn!("Migration {} failed: {}", filename, e);
}
_ => {}
},
Err(e) => {
log::warn!("Failed to read migration {}: {}", filename, e);
}
}
pub fn apply_migrations(&self, conn: &mut diesel::PgConnection) -> Result<()> {
use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness};
use diesel_migrations::HarnessWithOutput;
const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations");
let mut harness = HarnessWithOutput::write_to_stdout(conn);
if let Err(e) = harness.run_pending_migrations(MIGRATIONS) {
error!("Failed to apply migrations: {}", e);
return Err(anyhow::anyhow!("Migration error: {}", e));
}
Ok(())
}
}

View file

@ -1,5 +1,4 @@
mod ui;
use crate::bot::ui::BotUI;
use crate::config::ConfigManager;
use crate::drive_monitor::DriveMonitor;
use crate::llm::OpenAIClient;
@ -355,7 +354,7 @@ impl BotOrchestrator {
.rposition(|(role, _content)| role == "compact")
{
history = history.split_off(last_compacted_index);
for (role, content) in history.iter_mut() {
for (role, _content) in history.iter_mut() {
if role == "compact" {
*role = "user".to_string();
}
@ -513,7 +512,6 @@ impl BotOrchestrator {
"Total tokens (context + prompt + response): {}",
total_tokens
);
let config_manager = ConfigManager::new(self.state.conn.clone());
{
let mut sm = self.state.session_manager.lock().await;
sm.save_message(session.id, user_id, 2, &full_response, 1)?;

View file

@ -1,33 +1 @@
use ratatui::{
backend::CrosstermBackend,
layout::{Constraint, Direction, Layout},
style::{Color, Modifier, Style},
widgets::{Block, Borders, Gauge, Paragraph},
Terminal,
};
use std::io::{self, Stdout};
use crate::nvidia::get_system_metrics;
pub struct BotUI {
terminal: Terminal<CrosstermBackend<Stdout>>,
}
impl BotUI {
pub fn new() -> io::Result<Self> {
let stdout = io::stdout();
let backend = CrosstermBackend::new(stdout);
let terminal = Terminal::new(backend)?;
Ok(Self { terminal })
}
fn render_warning(&mut self, message: &str) -> io::Result<()> {
self.terminal.draw(|f| {
let block = Block::default()
.title("⚠️ NVIDIA Warning")
.borders(Borders::ALL)
.border_style(Style::default().fg(Color::Red));
let paragraph = Paragraph::new(message)
.style(Style::default().fg(Color::Red).add_modifier(Modifier::BOLD))
.block(block);
f.render_widget(paragraph, f.area());
})?;
Ok(())
}
}

View file

@ -69,11 +69,6 @@ pub async fn ensure_llama_servers_running(
info!(" LLM Server Path: {}", llm_server_path);
info!("Restarting any existing llama-server processes...");
if let Err(e) = tokio::process::Command::new("sh")
.arg("-c")
.arg("pkill llama-server -9 || true")
@ -168,7 +163,7 @@ pub async fn ensure_llama_servers_running(
info!("All llama.cpp servers are ready and responding!");
// Update LLM provider with new endpoints
let llm_provider1 = Arc::new(crate::llm::OpenAIClient::new(
let _llm_provider1 = Arc::new(crate::llm::OpenAIClient::new(
llm_model.clone(),
Some(llm_url.clone()),
));

View file

@ -28,7 +28,6 @@ pub trait LLMProvider: Send + Sync {
}
pub struct OpenAIClient {
client: reqwest::Client,
api_key: String,
base_url: String,
}
@ -83,7 +82,7 @@ impl LLMProvider for OpenAIClient {
.post(&format!("{}/v1/chat/completions", self.base_url))
.header("Authorization", format!("Bearer {}", key))
.json(&serde_json::json!({
"model": model.clone(),
"model": model,
"messages": if messages.is_array() && !messages.as_array().unwrap().is_empty() {
info!("Using provided messages: {:?}", messages);
messages
@ -127,10 +126,9 @@ impl LLMProvider for OpenAIClient {
}
impl OpenAIClient {
pub fn new(api_key: String, base_url: Option<String>) -> Self {
pub fn new(_api_key: String, base_url: Option<String>) -> Self {
Self {
client: reqwest::Client::new(),
api_key,
base_url: base_url.unwrap()
}
}

View file

@ -176,7 +176,7 @@ async fn main() -> std::io::Result<()> {
Err(_) => AppConfig::from_env().expect("Failed to load config from env"),
}
} else {
bootstrap.bootstrap().await;
_ = bootstrap.bootstrap().await;
progress_tx_clone
.send(BootstrapProgress::StartingComponent(

View file

@ -1,8 +1,8 @@
use crate::package_manager::component::ComponentConfig;
use crate::package_manager::installer::PackageManager;
use crate::package_manager::OsType;
use crate::shared::utils;
use crate::package_manager::InstallMode;
use crate::package_manager::OsType;
use crate::shared::utils::{self, parse_database_url};
use anyhow::{Context, Result};
use log::{error, trace, warn};
use reqwest::Client;
@ -69,7 +69,11 @@ impl PackageManager {
if !component.data_download_list.is_empty() {
for url in &component.data_download_list {
let filename = url.split('/').last().unwrap_or("download.tmp");
let output_path = self.base_path.join("data").join(&component.name).join(filename);
let output_path = self
.base_path
.join("data")
.join(&component.name)
.join(filename);
utils::download_file(url, output_path.to_str().unwrap()).await?;
}
}
@ -123,7 +127,7 @@ impl PackageManager {
let pkg_list = packages.join(" ");
self.exec_in_container(
&container_name,
&format!("apt-get update && apt-get install -y {}", pkg_list),
&format!("apt-get install -y {}", pkg_list),
)?;
}
if let Some(url) = &component.download_url {
@ -552,7 +556,12 @@ impl PackageManager {
exec_cmd: &str,
env_vars: &HashMap<String, String>,
) -> Result<()> {
let database_url = std::env::var("DATABASE_URL").unwrap();
let (_db_username, db_password, _db_server, _db_port, _db_name) =
parse_database_url(&database_url);
let rendered_cmd = exec_cmd
.replace("{{DB_PASSWORD}}", &db_password)
.replace("{{BIN_PATH}}", "/opt/gbo/bin")
.replace("{{DATA_PATH}}", "/opt/gbo/data")
.replace("{{CONF_PATH}}", "/opt/gbo/conf")

View file

@ -1,7 +1,6 @@
use crate::package_manager::component::ComponentConfig;
use crate::package_manager::os::detect_os;
use crate::package_manager::{InstallMode, OsType};
use crate::shared::utils::parse_database_url;
use anyhow::Result;
use log::trace;
use std::collections::HashMap;
@ -53,8 +52,7 @@ impl PackageManager {
self.register_doc_editor();
self.register_desktop();
self.register_devtools();
self.register_bot();
self.register_system();
self.register_botserver();
self.register_vector_db();
self.register_host();
}
@ -82,8 +80,8 @@ impl PackageManager {
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::from([
("MINIO_ROOT_USER".to_string(), "$DRIVE_USER".to_string()),
("MINIO_ROOT_PASSWORD".to_string(), "$DRIVE_ACCESSKEY".to_string()),
("MINIO_ROOT_USER".to_string(), "$DRIVE_ACCESSKEY".to_string()),
("MINIO_ROOT_PASSWORD".to_string(), "$DRIVE_SECRET".to_string()),
]),
data_download_list: Vec::new(),
exec_cmd: "nohup {{BIN_PATH}}/minio server {{DATA_PATH}} --address :9000 --console-address :9001 > {{LOGS_PATH}}/minio.log 2>&1 &".to_string(),
@ -97,9 +95,6 @@ impl PackageManager {
fn register_tables(&mut self) {
let database_url = std::env::var("DATABASE_URL").unwrap();
let (_db_username, db_password, _db_server, _db_port, _db_name) =
parse_database_url(&database_url);
self.components.insert(
"tables".to_string(),
@ -117,7 +112,7 @@ impl PackageManager {
pre_install_cmds_linux: vec![],
post_install_cmds_linux: vec![
"chmod +x ./bin/*".to_string(),
format!("if [ ! -d \"{{{{DATA_PATH}}}}/pgdata\" ]; then PG_PASSWORD={} ./bin/initdb -D {{{{DATA_PATH}}}}/pgdata -U gbuser --pwfile=<(echo $PG_PASSWORD); fi", db_password),
format!("if [ ! -d \"{{{{DATA_PATH}}}}/pgdata\" ]; then PG_PASSWORD={{DB_PASSWORD}} ./bin/initdb -D {{{{DATA_PATH}}}}/pgdata -U gbuser --pwfile=<(echo $PG_PASSWORD); fi"),
"echo \"data_directory = '{{DATA_PATH}}/pgdata'\" > {{CONF_PATH}}/postgresql.conf".to_string(),
"echo \"ident_file = '{{CONF_PATH}}/pg_ident.conf'\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
"echo \"port = 5432\" >> {{CONF_PATH}}/postgresql.conf".to_string(),
@ -130,12 +125,12 @@ impl PackageManager {
"sleep 5".to_string(),
"for i in $(seq 1 30); do ./bin/pg_isready -h localhost -p 5432 -U gbuser >/dev/null 2>&1 && echo 'PostgreSQL is ready' && break || echo \"Waiting for PostgreSQL... attempt $i/30\" >&2; sleep 2; done".to_string(),
"./bin/pg_isready -h localhost -p 5432 -U gbuser || { echo 'ERROR: PostgreSQL failed to start properly' >&2; cat {{LOGS_PATH}}/postgres.log >&2; exit 1; }".to_string(),
format!("PGPASSWORD={} ./bin/psql -h localhost -p 5432 -U gbuser -d postgres -c \"CREATE DATABASE botserver WITH OWNER gbuser\" 2>&1 | grep -v 'already exists' || true", db_password),
format!("PGPASSWORD={{DB_PASSWORD}} ./bin/psql -h localhost -p 5432 -U gbuser -d postgres -c \"CREATE DATABASE botserver WITH OWNER gbuser\" 2>&1 | grep -v 'already exists' || true"),
],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![
"chmod +x ./bin/*".to_string(),
"if [ ! -d \"{{DATA_PATH}}/pgdata\" ]; then ./bin/initdb -D {{DATA_PATH}}/pgdata -U postgres; fi".to_string(),
"if [ ! -d \"{{DATA_PATH}}/pgdata\" ]; then ./bin/initdb -A -D {{DATA_PATH}}/pgdata -U postgres; fi".to_string(),
],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
@ -186,8 +181,8 @@ impl PackageManager {
ports: vec![8081, 8082],
dependencies: vec![],
linux_packages: vec!["unzip".to_string()],
macos_packages: vec!["unzip".to_string()],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
"https://github.com/ggml-org/llama.cpp/releases/download/b6148/llama-b6148-bin-ubuntu-x64.zip".to_string(),
@ -217,7 +212,7 @@ impl PackageManager {
name: "email".to_string(),
ports: vec![25, 80, 110, 143, 465, 587, 993, 995, 4190],
dependencies: vec![],
linux_packages: vec!["libcap2-bin".to_string(), "resolvconf".to_string()],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
@ -247,7 +242,7 @@ impl PackageManager {
name: "proxy".to_string(),
ports: vec![80, 443],
dependencies: vec![],
linux_packages: vec!["libcap2-bin".to_string()],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
@ -278,7 +273,7 @@ impl PackageManager {
ports: vec![8080],
dependencies: vec![],
linux_packages: vec!["libcap2-bin".to_string()],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
@ -309,8 +304,8 @@ impl PackageManager {
ports: vec![3000],
dependencies: vec![],
linux_packages: vec!["git".to_string(), "git-lfs".to_string()],
macos_packages: vec!["git".to_string(), "git-lfs".to_string()],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
"https://codeberg.org/forgejo/forgejo/releases/download/v10.0.2/forgejo-10.0.2-linux-amd64".to_string(),
@ -342,11 +337,6 @@ impl PackageManager {
ports: vec![],
dependencies: vec!["alm".to_string()],
linux_packages: vec![
"git".to_string(),
"curl".to_string(),
"gnupg".to_string(),
"ca-certificates".to_string(),
"build-essential".to_string(),
],
macos_packages: vec!["git".to_string(), "node".to_string()],
windows_packages: vec![],
@ -355,12 +345,10 @@ impl PackageManager {
),
binary_name: Some("forgejo-runner".to_string()),
pre_install_cmds_linux: vec![
"curl -fsSL https://deb.nodesource.com/setup_22.x | bash -".to_string(),
"apt-get install -y nodejs".to_string(),
],
post_install_cmds_linux: vec!["npm install -g pnpm@latest".to_string()],
post_install_cmds_linux: vec![],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec!["npm install -g pnpm@latest".to_string()],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::new(),
@ -444,7 +432,7 @@ impl PackageManager {
ports: vec![7880, 3478],
dependencies: vec![],
linux_packages: vec!["coturn".to_string()],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some(
@ -473,7 +461,7 @@ impl PackageManager {
ports: vec![5757],
dependencies: vec!["tables".to_string()],
linux_packages: vec!["curl".to_string()],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: Some("http://get.nocodb.com/linux-x64".to_string()),
@ -500,7 +488,7 @@ impl PackageManager {
ports: vec![9980],
dependencies: vec![],
linux_packages: vec!["gnupg".to_string()],
linux_packages: vec![],
macos_packages: vec![],
windows_packages: vec![],
download_url: None,
@ -573,42 +561,7 @@ impl PackageManager {
);
}
fn register_bot(&mut self) {
self.components.insert(
"bot".to_string(),
ComponentConfig {
name: "bot".to_string(),
ports: vec![3000],
dependencies: vec![],
linux_packages: vec![
"curl".to_string(),
"gnupg".to_string(),
"ca-certificates".to_string(),
"git".to_string(),
],
macos_packages: vec!["node".to_string()],
windows_packages: vec![],
download_url: None,
binary_name: None,
pre_install_cmds_linux: vec![
"curl -fsSL https://deb.nodesource.com/setup_22.x | bash -".to_string(),
"apt-get install -y nodejs".to_string(),
],
post_install_cmds_linux: vec![],
pre_install_cmds_macos: vec![],
post_install_cmds_macos: vec![],
pre_install_cmds_windows: vec![],
post_install_cmds_windows: vec![],
env_vars: HashMap::from([("DISPLAY".to_string(), ":99".to_string())]),
data_download_list: Vec::new(),
exec_cmd: "".to_string(),
check_cmd: "".to_string(),
},
);
}
fn register_system(&mut self) {
fn register_botserver(&mut self) {
self.components.insert(
"system".to_string(),
ComponentConfig {
@ -738,11 +691,9 @@ impl PackageManager {
rendered_cmd
);
// Create new env vars map with evaluated $VAR references
let mut evaluated_envs = HashMap::new();
for (k, v) in &component.env_vars {
for (k, v) in C&component.env_vars {
if v.starts_with('$') {
let var_name = &v[1..];
evaluated_envs.insert(k.clone(), std::env::var(var_name).unwrap_or_default());

View file

@ -27,11 +27,11 @@ impl StatusPanel {
pub async fn update(&mut self) -> Result<(), std::io::Error> {
self.system.refresh_all();
// Force fresh metrics by using different token counts
let tokens = (std::time::SystemTime::now()
let _tokens = (std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs() % 1000) as usize;
let system_metrics = nvidia::get_system_metrics().unwrap_or_default();
let _system_metrics = nvidia::get_system_metrics().unwrap_or_default();
self.cached_content = self.render(None);
self.last_update = std::time::Instant::now();
Ok(())