- Tables is installing.
This commit is contained in:
parent
88ca214366
commit
6f30517526
8 changed files with 61 additions and 60 deletions
|
|
@ -2,11 +2,13 @@
|
|||
{
|
||||
"label": "Debug BotServer",
|
||||
"build": {
|
||||
"command": "cargo",
|
||||
"args": ["run"]
|
||||
"command": "rm -rf ./botserver-stack && cargo",
|
||||
"args": ["build"]
|
||||
},
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/botserver",
|
||||
|
||||
"env": {
|
||||
"RUST_LOG": "trace"
|
||||
},
|
||||
"sourceLanguages": ["rust"],
|
||||
"request": "launch",
|
||||
"adapter": "CodeLLDB"
|
||||
|
|
|
|||
3
gbot.sh
3
gbot.sh
|
|
@ -1,3 +1,2 @@
|
|||
clear && \
|
||||
cargo build && \
|
||||
sudo RUST_BACKTRACE=1 ./target/debug/botserver install tables
|
||||
RUST_LOG=trace cargo run install tables
|
||||
|
|
|
|||
|
|
@ -11,7 +11,8 @@ MOST IMPORTANT CODE GENERATION RULES:
|
|||
- Every part must be executable and self-contained, with real implementations - only.
|
||||
- DO NOT WRITE ANY ERROR HANDLING CODE LET IT CRASH.
|
||||
- Never generate two ore more trace mensages that are equal!
|
||||
- Return *only the modified* files as a single `.sh` script using `cat`, so the code can be restored directly.
|
||||
- Return *only the modified* files as a single `.sh` script using `cat`, so the code can be - restored directly.
|
||||
- Pay attention to shared::utils and shared::models to reuse shared things.
|
||||
- NEVER return a untouched file in output. Just files that need to be updated.
|
||||
|
||||
- You MUST return exactly this example format:
|
||||
|
|
|
|||
|
|
@ -25,9 +25,10 @@ impl BootstrapManager {
|
|||
|
||||
let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?;
|
||||
|
||||
let required_components = vec!["drive", "cache", "tables", "llm"];
|
||||
let required_components = vec!["tables", "cache", "drive", "llm"];
|
||||
|
||||
for component in required_components {
|
||||
info!("Checking component: {}", component);
|
||||
if !pm.is_installed(component) {
|
||||
info!("Installing required component: {}", component);
|
||||
futures::executor::block_on(pm.install(component))?;
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ use crate::web_server::{index, static_files};
|
|||
use crate::whatsapp::whatsapp_webhook_verify;
|
||||
use crate::whatsapp::WhatsAppAdapter;
|
||||
|
||||
#[actix_web::main]
|
||||
#[tokio::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
|
||||
|
|
|
|||
|
|
@ -2,10 +2,11 @@ use anyhow::{Context, Result};
|
|||
use log::{debug, info, trace, warn};
|
||||
use reqwest::Client;
|
||||
use std::collections::HashMap;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use crate::shared::utils; // Adjust based on your actual module structure
|
||||
|
||||
use crate::package_manager::component::ComponentConfig;
|
||||
use crate::package_manager::installer::PackageManager;
|
||||
use crate::package_manager::OsType;
|
||||
|
|
@ -79,11 +80,6 @@ impl PackageManager {
|
|||
.await?;
|
||||
self.run_commands(post_cmds, "local", &component.name)?;
|
||||
}
|
||||
|
||||
if self.os_type == OsType::Linux && !component.exec_cmd.is_empty() {
|
||||
self.create_service_file(&component.name, &component.exec_cmd, &component.env_vars)?;
|
||||
}
|
||||
|
||||
info!("Local installation of '{}' completed", component.name);
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -342,7 +338,11 @@ impl PackageManager {
|
|||
std::fs::create_dir_all(&bin_path)?;
|
||||
|
||||
let filename = url.split('/').last().unwrap_or("download.tmp");
|
||||
let temp_file = bin_path.join(filename);
|
||||
let temp_file = if filename.starts_with('/') {
|
||||
PathBuf::from(filename)
|
||||
} else {
|
||||
bin_path.join(filename)
|
||||
};
|
||||
|
||||
info!("Downloading from: {} to {:?}", url, temp_file);
|
||||
|
||||
|
|
@ -406,24 +406,23 @@ impl PackageManager {
|
|||
|
||||
pub async fn attempt_reqwest_download(
|
||||
&self,
|
||||
client: &Client,
|
||||
_client: &Client, // We won't use this if using shared utils
|
||||
url: &str,
|
||||
temp_file: &PathBuf,
|
||||
) -> Result<u64> {
|
||||
let response = client
|
||||
.get(url)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to send request")?;
|
||||
info!("Downloading from: {} to {:?}", url, temp_file);
|
||||
|
||||
let mut file = std::fs::File::create(temp_file).context("Failed to create output file")?;
|
||||
let bytes = response
|
||||
.bytes()
|
||||
// Convert PathBuf to string for the shared function
|
||||
let output_path = temp_file.to_str().context("Invalid temp file path")?;
|
||||
|
||||
// Use the shared download_file utility
|
||||
utils::download_file(url, output_path)
|
||||
.await
|
||||
.context("Failed to read response bytes")?;
|
||||
let size = bytes.len() as u64;
|
||||
file.write_all(&bytes)
|
||||
.context("Failed to write response to file")?;
|
||||
.map_err(|e| anyhow::anyhow!("Failed to download file using shared utility: {}", e))?;
|
||||
|
||||
// Get file size to return
|
||||
let metadata = std::fs::metadata(temp_file).context("Failed to get file metadata")?;
|
||||
let size = metadata.len();
|
||||
|
||||
info!("Downloaded {} bytes", size);
|
||||
Ok(size)
|
||||
|
|
@ -470,10 +469,9 @@ impl PackageManager {
|
|||
|
||||
pub fn extract_tar_gz(&self, temp_file: &PathBuf, bin_path: &PathBuf) -> Result<()> {
|
||||
info!("Extracting tar.gz archive to {:?}", bin_path);
|
||||
|
||||
let output = Command::new("tar")
|
||||
.current_dir(bin_path)
|
||||
.args(&["-xzf", temp_file.to_str().unwrap()])
|
||||
.args(&["-xzf", temp_file.to_str().unwrap(), "--strip-components=1"])
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ impl PackageManager {
|
|||
let base_path = if mode == InstallMode::Container {
|
||||
PathBuf::from("/opt/gbo")
|
||||
} else {
|
||||
PathBuf::from("./botserver-stack")
|
||||
std::env::current_dir()?.join("botserver-stack")
|
||||
};
|
||||
|
||||
let tenant = tenant.unwrap_or_else(|| "default".to_string());
|
||||
|
|
@ -45,9 +45,9 @@ impl PackageManager {
|
|||
}
|
||||
|
||||
fn register_components(&mut self) {
|
||||
self.register_drive();
|
||||
self.register_cache();
|
||||
self.register_tables();
|
||||
self.register_cache();
|
||||
self.register_drive();
|
||||
self.register_llm();
|
||||
self.register_email();
|
||||
self.register_proxy();
|
||||
|
|
@ -110,8 +110,8 @@ impl PackageManager {
|
|||
download_url: None,
|
||||
binary_name: Some("valkey-server".to_string()),
|
||||
pre_install_cmds_linux: vec![
|
||||
"curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/valkey.gpg".to_string(),
|
||||
"echo 'deb [signed-by=/usr/share/keyrings/valkey.gpg] https://packages.redis.io/deb $(lsb_release -cs) main' | tee /etc/apt/sources.list.d/valkey.list".to_string(),
|
||||
"if [ ! -f /usr/share/keyrings/valkey.gpg ]; then curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/valkey.gpg; fi".to_string(),
|
||||
"if [ ! -f /etc/apt/sources.list.d/valkey.list ]; then echo 'deb [signed-by=/usr/share/keyrings/valkey.gpg] https://packages.redis.io/deb $(lsb_release -cs) main' | tee /etc/apt/sources.list.d/valkey.list; fi".to_string(),
|
||||
"apt-get update && apt-get install -y valkey".to_string()
|
||||
],
|
||||
post_install_cmds_linux: vec![],
|
||||
|
|
@ -137,9 +137,7 @@ impl PackageManager {
|
|||
binary_name: Some("postgres".to_string()),
|
||||
pre_install_cmds_linux: vec![],
|
||||
post_install_cmds_linux: vec![
|
||||
"tar -xzf postgresql-18.0.0-x86_64-unknown-linux-gnu.tar.gz".to_string(),
|
||||
"mv pgsql/* . && rm -rf pgsql".to_string(),
|
||||
"if [ ! -d \"{{DATA_PATH}}/pgdata\" ]; then ./initdb -D {{DATA_PATH}}/pgdata -U postgres; fi".to_string(),
|
||||
"if [ ! -d \"{{DATA_PATH}}/pgdata\" ]; then ./bin/initdb -D {{DATA_PATH}}/pgdata -U postgres; fi".to_string(),
|
||||
"if [ ! -f \"{{CONF_PATH}}/postgresql.conf\" ]; then echo \"data_directory = '{{DATA_PATH}}/pgdata'\" > {{CONF_PATH}}/postgresql.conf; fi".to_string(),
|
||||
"if [ ! -f \"{{CONF_PATH}}/postgresql.conf\" ]; then echo \"hba_file = '{{CONF_PATH}}/pg_hba.conf'\" >> {{CONF_PATH}}/postgresql.conf; fi".to_string(),
|
||||
"if [ ! -f \"{{CONF_PATH}}/postgresql.conf\" ]; then echo \"ident_file = '{{CONF_PATH}}/pg_ident.conf'\" >> {{CONF_PATH}}/postgresql.conf; fi".to_string(),
|
||||
|
|
@ -149,18 +147,16 @@ impl PackageManager {
|
|||
"if [ ! -f \"{{CONF_PATH}}/postgresql.conf\" ]; then echo \"logging_collector = on\" >> {{CONF_PATH}}/postgresql.conf; fi".to_string(),
|
||||
"if [ ! -f \"{{CONF_PATH}}/pg_hba.conf\" ]; then echo \"host all all all md5\" > {{CONF_PATH}}/pg_hba.conf; fi".to_string(),
|
||||
"if [ ! -f \"{{CONF_PATH}}/pg_ident.conf\" ]; then touch {{CONF_PATH}}/pg_ident.conf; fi".to_string(),
|
||||
"if [ ! -d \"{{DATA_PATH}}/pgdata\" ]; then ./pg_ctl -D {{DATA_PATH}}/pgdata -l {{LOGS_PATH}}/postgres.log start; sleep 5; ./psql -p 5432 -d postgres -c \"CREATE USER default WITH PASSWORD 'defaultpass'\"; ./psql -p 5432 -d postgres -c \"CREATE DATABASE default_db OWNER default\"; ./psql -p 5432 -d postgres -c \"GRANT ALL PRIVILEGES ON DATABASE default_db TO default\"; ./pg_ctl -D {{DATA_PATH}}/pgdata stop; fi".to_string()
|
||||
"if [ ! -d \"{{DATA_PATH}}/pgdata\" ]; then ./bin/pg_ctl -D {{DATA_PATH}}/pgdata -l {{LOGS_PATH}}/postgres.log start; sleep 5; ./bin/psql -p 5432 -d postgres -c \" CREATE USER default WITH PASSWORD 'defaultpass'\"; ./bin/psql -p 5432 -d postgres -c \"CREATE DATABASE default_db OWNER default\"; ./bin/psql -p 5432 -d postgres -c \"GRANT ALL PRIVILEGES ON DATABASE default_db TO default\"; pkill postgres; fi".to_string()
|
||||
],
|
||||
pre_install_cmds_macos: vec![],
|
||||
post_install_cmds_macos: vec![
|
||||
"tar -xzf postgresql-18.0-1-linux-x64-binaries.tar.gz".to_string(),
|
||||
"mv pgsql/* . && rm -rf pgsql".to_string(),
|
||||
"if [ ! -d \"{{DATA_PATH}}/pgdata\" ]; then ./initdb -D {{DATA_PATH}}/pgdata -U postgres; fi".to_string(),
|
||||
"if [ ! -d \"{{DATA_PATH}}/pgdata\" ]; then ./bin/initdb -D {{DATA_PATH}}/pgdata -U postgres; fi".to_string(),
|
||||
],
|
||||
pre_install_cmds_windows: vec![],
|
||||
post_install_cmds_windows: vec![],
|
||||
env_vars: HashMap::new(),
|
||||
exec_cmd: "./pg_ctl -D {{DATA_PATH}}/pgdata -l {{LOGS_PATH}}/postgres.log start".to_string(),
|
||||
exec_cmd: "./bin/pg_ctl -D {{DATA_PATH}}/pgdata -l {{LOGS_PATH}}/postgres.log start".to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ use std::fs::File;
|
|||
use std::io::BufReader;
|
||||
use std::path::Path;
|
||||
use tokio::fs::File as TokioFile;
|
||||
use tokio_stream::StreamExt;
|
||||
|
||||
use zip::ZipArchive;
|
||||
|
||||
use crate::config::AIConfig;
|
||||
|
|
@ -79,24 +79,28 @@ pub fn to_array(value: Dynamic) -> Array {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn download_file(url: &str, output_path: &str) -> Result<(), Box<dyn std::error::Error>> {
|
||||
pub async fn download_file(
|
||||
url: &str,
|
||||
output_path: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let url = url.to_string();
|
||||
let output_path = output_path.to_string();
|
||||
let download_handle = tokio::spawn(async move {
|
||||
let client = Client::new();
|
||||
let response = client.get(url).send().await?;
|
||||
let response = client.get(&url).send().await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let mut file = TokioFile::create(output_path).await?;
|
||||
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
while let Some(chunk) = stream.next().await {
|
||||
file.write_all(&chunk?).await?;
|
||||
}
|
||||
let mut file = TokioFile::create(&output_path).await?;
|
||||
let bytes = response.bytes().await?;
|
||||
file.write_all(&bytes).await?;
|
||||
debug!("File downloaded successfully to {}", output_path);
|
||||
} else {
|
||||
return Err("Failed to download file".into());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!("HTTP {}: {}", response.status(), url).into())
|
||||
}
|
||||
});
|
||||
|
||||
download_handle.await?
|
||||
}
|
||||
|
||||
pub fn parse_filter(filter_str: &str) -> Result<(String, Vec<String>), Box<dyn Error>> {
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue