feat: add offline installer cache and health endpoints
- Add /health and /api/health endpoints for botui connectivity - Create 3rdparty.toml with all download URLs for offline bundles - Add botserver-installers/ cache directory for downloaded files - Implement DownloadCache module with: - Automatic cache lookup before downloading - Support for pre-populated offline bundles - SHA256 checksum verification (optional) - Cache management utilities (list, clear, size) - Update download_and_install to use cache system - Data files (models) also cached for reuse Cache behavior: - First run: downloads to botserver-installers/ - Subsequent runs: uses cached files - Delete botserver-stack/ without losing downloads - Pre-populate cache for fully offline installation
This commit is contained in:
parent
89ff716bef
commit
89e92a4739
7 changed files with 905 additions and 36 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -12,3 +12,5 @@ botserver-stack
|
|||
*-log*
|
||||
docs/book
|
||||
*.rdb
|
||||
botserver-installers/*
|
||||
!botserver-installers/.gitkeep
|
||||
|
|
|
|||
154
3rdparty.toml
Normal file
154
3rdparty.toml
Normal file
|
|
@ -0,0 +1,154 @@
|
|||
# Third-Party Dependencies Configuration
|
||||
# ======================================
|
||||
# This file lists all external downloads required by botserver.
|
||||
#
|
||||
# Caching Behavior:
|
||||
# - On first run, files are downloaded from the URLs below
|
||||
# - Downloaded files are cached in ./botserver-installers/
|
||||
# - On subsequent runs, cached files are used instead of downloading
|
||||
# - To force re-download, delete the cached file
|
||||
#
|
||||
# Offline Installation:
|
||||
# - Pre-download all files to ./botserver-installers/
|
||||
# - The installer will use cached files automatically
|
||||
# - You can safely delete ./botserver-stack/ without losing downloads
|
||||
|
||||
[cache_settings]
|
||||
# Directory where downloaded files are cached (relative to botserver root)
|
||||
cache_dir = "botserver-installers"
|
||||
|
||||
# Components
|
||||
# ==========
|
||||
# Each component has:
|
||||
# - url: Download URL
|
||||
# - filename: Local filename in cache
|
||||
# - sha256: Optional checksum for verification (empty = skip verification)
|
||||
|
||||
[components.drive]
|
||||
name = "MinIO Object Storage"
|
||||
url = "https://dl.min.io/server/minio/release/linux-amd64/minio"
|
||||
filename = "minio"
|
||||
sha256 = ""
|
||||
|
||||
[components.tables]
|
||||
name = "PostgreSQL Database"
|
||||
url = "https://github.com/theseus-rs/postgresql-binaries/releases/download/17.2.0/postgresql-17.2.0-x86_64-unknown-linux-gnu.tar.gz"
|
||||
filename = "postgresql-17.2.0-x86_64-unknown-linux-gnu.tar.gz"
|
||||
sha256 = ""
|
||||
|
||||
[components.cache]
|
||||
name = "Valkey Cache (Redis-compatible)"
|
||||
# Note: Valkey doesn't provide prebuilt binaries, using source tarball
|
||||
# You may need to compile from source or use system package manager
|
||||
url = "https://github.com/valkey-io/valkey/archive/refs/tags/8.0.2.tar.gz"
|
||||
filename = "valkey-8.0.2.tar.gz"
|
||||
sha256 = ""
|
||||
# Alternative: Use Redis from system package or Docker
|
||||
# For prebuilt, consider: https://download.redis.io/releases/redis-7.2.4.tar.gz
|
||||
|
||||
[components.llm]
|
||||
name = "Llama.cpp Server"
|
||||
url = "https://github.com/ggml-org/llama.cpp/releases/download/b4547/llama-b4547-bin-ubuntu-x64.zip"
|
||||
filename = "llama-b4547-bin-ubuntu-x64.zip"
|
||||
sha256 = ""
|
||||
|
||||
[components.email]
|
||||
name = "Stalwart Mail Server"
|
||||
url = "https://github.com/stalwartlabs/mail-server/releases/download/v0.10.7/stalwart-mail-x86_64-linux.tar.gz"
|
||||
filename = "stalwart-mail-x86_64-linux.tar.gz"
|
||||
sha256 = ""
|
||||
|
||||
[components.proxy]
|
||||
name = "Caddy Web Server"
|
||||
url = "https://github.com/caddyserver/caddy/releases/download/v2.9.1/caddy_2.9.1_linux_amd64.tar.gz"
|
||||
filename = "caddy_2.9.1_linux_amd64.tar.gz"
|
||||
sha256 = ""
|
||||
|
||||
[components.directory]
|
||||
name = "Zitadel Identity Provider"
|
||||
url = "https://github.com/zitadel/zitadel/releases/download/v2.70.4/zitadel-linux-amd64.tar.gz"
|
||||
filename = "zitadel-linux-amd64.tar.gz"
|
||||
sha256 = ""
|
||||
|
||||
[components.alm]
|
||||
name = "Forgejo Git Server"
|
||||
url = "https://codeberg.org/forgejo/forgejo/releases/download/v10.0.2/forgejo-10.0.2-linux-amd64"
|
||||
filename = "forgejo-10.0.2-linux-amd64"
|
||||
sha256 = ""
|
||||
|
||||
[components.alm_ci]
|
||||
name = "Forgejo Actions Runner"
|
||||
url = "https://code.forgejo.org/forgejo/runner/releases/download/v6.3.1/forgejo-runner-6.3.1-linux-amd64"
|
||||
filename = "forgejo-runner-6.3.1-linux-amd64"
|
||||
sha256 = ""
|
||||
|
||||
[components.dns]
|
||||
name = "CoreDNS Server"
|
||||
url = "https://github.com/coredns/coredns/releases/download/v1.11.1/coredns_1.11.1_linux_amd64.tgz"
|
||||
filename = "coredns_1.11.1_linux_amd64.tgz"
|
||||
sha256 = ""
|
||||
|
||||
[components.webmail]
|
||||
name = "Roundcube Webmail"
|
||||
url = "https://github.com/roundcube/roundcubemail/releases/download/1.6.6/roundcubemail-1.6.6-complete.tar.gz"
|
||||
filename = "roundcubemail-1.6.6-complete.tar.gz"
|
||||
sha256 = ""
|
||||
|
||||
[components.meet]
|
||||
name = "LiveKit Media Server"
|
||||
url = "https://github.com/livekit/livekit/releases/download/v2.8.2/livekit_2.8.2_linux_amd64.tar.gz"
|
||||
filename = "livekit_2.8.2_linux_amd64.tar.gz"
|
||||
sha256 = ""
|
||||
|
||||
[components.table_editor]
|
||||
name = "NocoDB"
|
||||
url = "http://get.nocodb.com/linux-x64"
|
||||
filename = "nocodb-linux-x64"
|
||||
sha256 = ""
|
||||
|
||||
[components.vector_db]
|
||||
name = "Qdrant Vector Database"
|
||||
url = "https://github.com/qdrant/qdrant/releases/latest/download/qdrant-x86_64-unknown-linux-gnu.tar.gz"
|
||||
filename = "qdrant-x86_64-unknown-linux-gnu.tar.gz"
|
||||
sha256 = ""
|
||||
|
||||
[components.timeseries_db]
|
||||
name = "InfluxDB Time Series Database"
|
||||
url = "https://download.influxdata.com/influxdb/releases/influxdb2-2.7.5-linux-amd64.tar.gz"
|
||||
filename = "influxdb2-2.7.5-linux-amd64.tar.gz"
|
||||
sha256 = ""
|
||||
|
||||
[components.vault]
|
||||
name = "HashiCorp Vault"
|
||||
url = "https://releases.hashicorp.com/vault/1.15.4/vault_1.15.4_linux_amd64.zip"
|
||||
filename = "vault_1.15.4_linux_amd64.zip"
|
||||
sha256 = ""
|
||||
|
||||
[components.observability]
|
||||
name = "Vector Log Aggregator"
|
||||
url = "https://packages.timber.io/vector/0.35.0/vector-0.35.0-x86_64-unknown-linux-gnu.tar.gz"
|
||||
filename = "vector-0.35.0-x86_64-unknown-linux-gnu.tar.gz"
|
||||
sha256 = ""
|
||||
|
||||
# LLM Models
|
||||
# ==========
|
||||
# Large model files for AI/ML functionality
|
||||
|
||||
[models.deepseek_small]
|
||||
name = "DeepSeek R1 Distill Qwen 1.5B (Q3_K_M)"
|
||||
url = "https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-1.5B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-1.5B-Q3_K_M.gguf"
|
||||
filename = "DeepSeek-R1-Distill-Qwen-1.5B-Q3_K_M.gguf"
|
||||
sha256 = ""
|
||||
|
||||
[models.bge_embedding]
|
||||
name = "BGE Small EN v1.5 Embedding Model"
|
||||
url = "https://huggingface.co/CompendiumLabs/bge-small-en-v1.5-gguf/resolve/main/bge-small-en-v1.5-f32.gguf"
|
||||
filename = "bge-small-en-v1.5-f32.gguf"
|
||||
sha256 = ""
|
||||
|
||||
# Optional larger models (uncomment to include)
|
||||
# [models.gpt_oss_20b]
|
||||
# name = "GPT-OSS 20B F16 (requires 16GB+ VRAM)"
|
||||
# url = "https://huggingface.co/unsloth/gpt-oss-20b-GGUF/resolve/main/gpt-oss-20b-F16.gguf"
|
||||
# filename = "gpt-oss-20b-F16.gguf"
|
||||
# sha256 = ""
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"base_url": "http://localhost:8080",
|
||||
"default_org": {
|
||||
"id": "350139980594544654",
|
||||
"id": "350166510557200398",
|
||||
"name": "default",
|
||||
"domain": "default.localhost"
|
||||
},
|
||||
|
|
@ -13,8 +13,8 @@
|
|||
"first_name": "Admin",
|
||||
"last_name": "User"
|
||||
},
|
||||
"admin_token": "WFe8gHNf6oPO6B9S1sPlRyIgrAz1hsrao4k1NwuGOXb0GyGb4U2ZKysQb4jab0YLDMQKZ4o",
|
||||
"admin_token": "ljVnxOognfMMqF_EOrkBse3YPEV7pUJDn78z9RFzIgGdzejdC35UfiYK7TSljt2mVaV6p40",
|
||||
"project_id": "",
|
||||
"client_id": "350139982339440654",
|
||||
"client_secret": "iQ3yc8eQpjJtttpOvzZs2pcUp2tkRwnhqvzC13dbj2tKGrfaeksjRg5wMFO0pAKP"
|
||||
"client_id": "350166511245131790",
|
||||
"client_secret": "JovJLn0DFlriDW4mtaDCTZ7cZPWPWCkLQgL7dVoOmhRqD3zMEkEGPTmLb8rISKCO"
|
||||
}
|
||||
592
src/core/package_manager/cache.rs
Normal file
592
src/core/package_manager/cache.rs
Normal file
|
|
@ -0,0 +1,592 @@
|
|||
//! Download Cache Module
|
||||
//!
|
||||
//! Provides caching functionality for third-party downloads.
|
||||
//! Files are cached in `botserver-installers/` directory and reused
|
||||
//! on subsequent runs, allowing offline installation.
|
||||
//!
|
||||
//! Configuration is read from `3rdparty.toml` at the botserver root.
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use log::{debug, info, trace, warn};
|
||||
use serde::Deserialize;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
/// Default cache directory relative to botserver root
|
||||
const DEFAULT_CACHE_DIR: &str = "botserver-installers";
|
||||
|
||||
/// Configuration file name
|
||||
const CONFIG_FILE: &str = "3rdparty.toml";
|
||||
|
||||
/// Third-party dependencies configuration
|
||||
#[derive(Debug, Deserialize, Default)]
|
||||
pub struct ThirdPartyConfig {
|
||||
#[serde(default)]
|
||||
pub cache_settings: CacheSettings,
|
||||
#[serde(default)]
|
||||
pub components: HashMap<String, ComponentDownload>,
|
||||
#[serde(default)]
|
||||
pub models: HashMap<String, ComponentDownload>,
|
||||
}
|
||||
|
||||
/// Cache settings
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct CacheSettings {
|
||||
#[serde(default = "default_cache_dir")]
|
||||
pub cache_dir: String,
|
||||
}
|
||||
|
||||
impl Default for CacheSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
cache_dir: default_cache_dir(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_cache_dir() -> String {
|
||||
DEFAULT_CACHE_DIR.to_string()
|
||||
}
|
||||
|
||||
/// Component download configuration
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct ComponentDownload {
|
||||
pub name: String,
|
||||
pub url: String,
|
||||
pub filename: String,
|
||||
#[serde(default)]
|
||||
pub sha256: String,
|
||||
}
|
||||
|
||||
/// Download cache manager
|
||||
#[derive(Debug)]
|
||||
pub struct DownloadCache {
|
||||
/// Base path for the botserver (where 3rdparty.toml lives)
|
||||
base_path: PathBuf,
|
||||
/// Cache directory path
|
||||
cache_dir: PathBuf,
|
||||
/// Loaded configuration
|
||||
config: ThirdPartyConfig,
|
||||
}
|
||||
|
||||
impl DownloadCache {
|
||||
/// Create a new download cache manager
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `base_path` - Base path for botserver (typically current directory or botserver root)
|
||||
pub fn new(base_path: impl AsRef<Path>) -> Result<Self> {
|
||||
let base_path = base_path.as_ref().to_path_buf();
|
||||
let config = Self::load_config(&base_path)?;
|
||||
let cache_dir = base_path.join(&config.cache_settings.cache_dir);
|
||||
|
||||
// Ensure cache directory exists
|
||||
if !cache_dir.exists() {
|
||||
fs::create_dir_all(&cache_dir)
|
||||
.with_context(|| format!("Failed to create cache directory: {:?}", cache_dir))?;
|
||||
info!("Created cache directory: {:?}", cache_dir);
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
base_path,
|
||||
cache_dir,
|
||||
config,
|
||||
})
|
||||
}
|
||||
|
||||
/// Load configuration from 3rdparty.toml
|
||||
fn load_config(base_path: &Path) -> Result<ThirdPartyConfig> {
|
||||
let config_path = base_path.join(CONFIG_FILE);
|
||||
|
||||
if !config_path.exists() {
|
||||
debug!(
|
||||
"No {} found at {:?}, using defaults",
|
||||
CONFIG_FILE, config_path
|
||||
);
|
||||
return Ok(ThirdPartyConfig::default());
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(&config_path)
|
||||
.with_context(|| format!("Failed to read config file: {:?}", config_path))?;
|
||||
|
||||
let config: ThirdPartyConfig = toml::from_str(&content)
|
||||
.with_context(|| format!("Failed to parse config file: {:?}", config_path))?;
|
||||
|
||||
debug!(
|
||||
"Loaded {} with {} components and {} models",
|
||||
CONFIG_FILE,
|
||||
config.components.len(),
|
||||
config.models.len()
|
||||
);
|
||||
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
/// Get the cache directory path
|
||||
pub fn cache_dir(&self) -> &Path {
|
||||
&self.cache_dir
|
||||
}
|
||||
|
||||
/// Get the base path
|
||||
pub fn base_path(&self) -> &Path {
|
||||
&self.base_path
|
||||
}
|
||||
|
||||
/// Check if a file is cached
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `filename` - The filename to check in the cache
|
||||
pub fn is_cached(&self, filename: &str) -> bool {
|
||||
let cached_path = self.cache_dir.join(filename);
|
||||
if cached_path.exists() {
|
||||
// Also check that file is not empty
|
||||
if let Ok(metadata) = fs::metadata(&cached_path) {
|
||||
return metadata.len() > 0;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Get the cached file path if it exists
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `filename` - The filename to get from cache
|
||||
pub fn get_cached_path(&self, filename: &str) -> Option<PathBuf> {
|
||||
let cached_path = self.cache_dir.join(filename);
|
||||
if self.is_cached(filename) {
|
||||
Some(cached_path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the path where a file should be cached
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `filename` - The filename
|
||||
pub fn get_cache_path(&self, filename: &str) -> PathBuf {
|
||||
self.cache_dir.join(filename)
|
||||
}
|
||||
|
||||
/// Look up component download info by component name
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `component` - Component name (e.g., "drive", "tables", "llm")
|
||||
pub fn get_component(&self, component: &str) -> Option<&ComponentDownload> {
|
||||
self.config.components.get(component)
|
||||
}
|
||||
|
||||
/// Look up model download info by model name
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `model` - Model name (e.g., "deepseek_small", "bge_embedding")
|
||||
pub fn get_model(&self, model: &str) -> Option<&ComponentDownload> {
|
||||
self.config.models.get(model)
|
||||
}
|
||||
|
||||
/// Get all component downloads
|
||||
pub fn all_components(&self) -> &HashMap<String, ComponentDownload> {
|
||||
&self.config.components
|
||||
}
|
||||
|
||||
/// Get all model downloads
|
||||
pub fn all_models(&self) -> &HashMap<String, ComponentDownload> {
|
||||
&self.config.models
|
||||
}
|
||||
|
||||
/// Resolve a URL to either a cached file path or the original URL
|
||||
///
|
||||
/// This is the main method to use when downloading. It will:
|
||||
/// 1. Extract filename from URL
|
||||
/// 2. Check if file exists in cache
|
||||
/// 3. Return cached path if available, otherwise return original URL
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `url` - The download URL
|
||||
///
|
||||
/// # Returns
|
||||
/// * `CacheResult` - Either a cached file path or the URL to download from
|
||||
pub fn resolve_url(&self, url: &str) -> CacheResult {
|
||||
let filename = Self::extract_filename(url);
|
||||
|
||||
if let Some(cached_path) = self.get_cached_path(&filename) {
|
||||
info!("Using cached file: {:?}", cached_path);
|
||||
CacheResult::Cached(cached_path)
|
||||
} else {
|
||||
trace!("File not in cache, will download: {}", url);
|
||||
CacheResult::Download {
|
||||
url: url.to_string(),
|
||||
cache_path: self.get_cache_path(&filename),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve a URL for a specific component
|
||||
///
|
||||
/// Uses the filename from config if available, otherwise extracts from URL
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `component` - Component name
|
||||
/// * `url` - Fallback URL if component not in config
|
||||
pub fn resolve_component_url(&self, component: &str, url: &str) -> CacheResult {
|
||||
// Check if we have config for this component
|
||||
if let Some(comp) = self.get_component(component) {
|
||||
let cached_path = self.cache_dir.join(&comp.filename);
|
||||
if cached_path.exists()
|
||||
&& fs::metadata(&cached_path)
|
||||
.map(|m| m.len() > 0)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
info!("Using cached {} from: {:?}", comp.name, cached_path);
|
||||
return CacheResult::Cached(cached_path);
|
||||
}
|
||||
// Use URL from config
|
||||
trace!("Will download {} from config URL", comp.name);
|
||||
return CacheResult::Download {
|
||||
url: comp.url.clone(),
|
||||
cache_path: self.cache_dir.join(&comp.filename),
|
||||
};
|
||||
}
|
||||
|
||||
// Fall back to URL-based resolution
|
||||
self.resolve_url(url)
|
||||
}
|
||||
|
||||
/// Save a downloaded file to the cache
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `source` - Path to the downloaded file
|
||||
/// * `filename` - Filename to use in the cache
|
||||
pub fn save_to_cache(&self, source: &Path, filename: &str) -> Result<PathBuf> {
|
||||
let cache_path = self.cache_dir.join(filename);
|
||||
|
||||
// If source is already in the cache directory, just return it
|
||||
if source == cache_path {
|
||||
return Ok(cache_path);
|
||||
}
|
||||
|
||||
// Copy to cache
|
||||
fs::copy(source, &cache_path)
|
||||
.with_context(|| format!("Failed to copy {:?} to cache at {:?}", source, cache_path))?;
|
||||
|
||||
info!("Cached file: {:?}", cache_path);
|
||||
Ok(cache_path)
|
||||
}
|
||||
|
||||
/// Extract filename from a URL
|
||||
pub fn extract_filename(url: &str) -> String {
|
||||
url.split('/')
|
||||
.last()
|
||||
.unwrap_or("download.tmp")
|
||||
.split('?')
|
||||
.next()
|
||||
.unwrap_or("download.tmp")
|
||||
.to_string()
|
||||
}
|
||||
|
||||
/// Verify a cached file's checksum if sha256 is provided
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `filename` - The cached filename
|
||||
/// * `expected_sha256` - Expected SHA256 hash (empty string to skip)
|
||||
pub fn verify_checksum(&self, filename: &str, expected_sha256: &str) -> Result<bool> {
|
||||
if expected_sha256.is_empty() {
|
||||
return Ok(true); // Skip verification if no hash provided
|
||||
}
|
||||
|
||||
let cached_path = self.cache_dir.join(filename);
|
||||
if !cached_path.exists() {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let content = fs::read(&cached_path)?;
|
||||
let computed = sha256_hex(&content);
|
||||
|
||||
if computed == expected_sha256.to_lowercase() {
|
||||
trace!("Checksum verified for {}", filename);
|
||||
Ok(true)
|
||||
} else {
|
||||
warn!(
|
||||
"Checksum mismatch for {}: expected {}, got {}",
|
||||
filename, expected_sha256, computed
|
||||
);
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
|
||||
/// List all cached files
|
||||
pub fn list_cached(&self) -> Result<Vec<String>> {
|
||||
let mut files = Vec::new();
|
||||
|
||||
if self.cache_dir.exists() {
|
||||
for entry in fs::read_dir(&self.cache_dir)? {
|
||||
let entry = entry?;
|
||||
if entry.file_type()?.is_file() {
|
||||
if let Some(name) = entry.file_name().to_str() {
|
||||
files.push(name.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
/// Get total size of cached files in bytes
|
||||
pub fn cache_size(&self) -> Result<u64> {
|
||||
let mut total = 0u64;
|
||||
|
||||
if self.cache_dir.exists() {
|
||||
for entry in fs::read_dir(&self.cache_dir)? {
|
||||
let entry = entry?;
|
||||
if entry.file_type()?.is_file() {
|
||||
total += entry.metadata()?.len();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(total)
|
||||
}
|
||||
|
||||
/// Clear all cached files
|
||||
pub fn clear_cache(&self) -> Result<()> {
|
||||
if self.cache_dir.exists() {
|
||||
for entry in fs::read_dir(&self.cache_dir)? {
|
||||
let entry = entry?;
|
||||
if entry.file_type()?.is_file() {
|
||||
fs::remove_file(entry.path())?;
|
||||
}
|
||||
}
|
||||
info!("Cleared cache directory: {:?}", self.cache_dir);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Result of resolving a URL through the cache
|
||||
#[derive(Debug)]
|
||||
pub enum CacheResult {
|
||||
/// File was found in cache
|
||||
Cached(PathBuf),
|
||||
/// File needs to be downloaded
|
||||
Download {
|
||||
/// URL to download from
|
||||
url: String,
|
||||
/// Path where file should be cached
|
||||
cache_path: PathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
impl CacheResult {
|
||||
/// Check if result is a cached file
|
||||
pub fn is_cached(&self) -> bool {
|
||||
matches!(self, CacheResult::Cached(_))
|
||||
}
|
||||
|
||||
/// Get the path (either cached or target cache path)
|
||||
pub fn path(&self) -> &Path {
|
||||
match self {
|
||||
CacheResult::Cached(p) => p,
|
||||
CacheResult::Download { cache_path, .. } => cache_path,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the URL if this is a download result
|
||||
pub fn url(&self) -> Option<&str> {
|
||||
match self {
|
||||
CacheResult::Cached(_) => None,
|
||||
CacheResult::Download { url, .. } => Some(url),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute SHA256 hash of data and return as lowercase hex string
|
||||
fn sha256_hex(data: &[u8]) -> String {
|
||||
use sha2::{Digest, Sha256};
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(data);
|
||||
let result = hasher.finalize();
|
||||
hex::encode(result)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::io::Write;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn create_test_config(dir: &Path) -> Result<()> {
|
||||
let config = r#"
|
||||
[cache_settings]
|
||||
cache_dir = "test-cache"
|
||||
|
||||
[components.test]
|
||||
name = "Test Component"
|
||||
url = "https://example.com/test.tar.gz"
|
||||
filename = "test.tar.gz"
|
||||
sha256 = ""
|
||||
|
||||
[models.test_model]
|
||||
name = "Test Model"
|
||||
url = "https://example.com/model.gguf"
|
||||
filename = "model.gguf"
|
||||
sha256 = ""
|
||||
"#;
|
||||
let config_path = dir.join(CONFIG_FILE);
|
||||
fs::write(config_path, config)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_filename() {
|
||||
assert_eq!(
|
||||
DownloadCache::extract_filename("https://example.com/path/file.tar.gz"),
|
||||
"file.tar.gz"
|
||||
);
|
||||
assert_eq!(
|
||||
DownloadCache::extract_filename("https://example.com/file.zip?token=abc"),
|
||||
"file.zip"
|
||||
);
|
||||
assert_eq!(DownloadCache::extract_filename("https://example.com/"), "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_creation() -> Result<()> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
create_test_config(temp_dir.path())?;
|
||||
|
||||
let cache = DownloadCache::new(temp_dir.path())?;
|
||||
|
||||
assert!(cache.cache_dir().exists());
|
||||
assert_eq!(cache.cache_dir().file_name().unwrap(), "test-cache");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_cached() -> Result<()> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
create_test_config(temp_dir.path())?;
|
||||
|
||||
let cache = DownloadCache::new(temp_dir.path())?;
|
||||
|
||||
// Initially not cached
|
||||
assert!(!cache.is_cached("test.tar.gz"));
|
||||
|
||||
// Create a cached file
|
||||
let cache_path = cache.get_cache_path("test.tar.gz");
|
||||
let mut file = fs::File::create(&cache_path)?;
|
||||
file.write_all(b"test content")?;
|
||||
|
||||
// Now it should be cached
|
||||
assert!(cache.is_cached("test.tar.gz"));
|
||||
|
||||
// Empty file should not count as cached
|
||||
let empty_path = cache.get_cache_path("empty.tar.gz");
|
||||
fs::File::create(&empty_path)?;
|
||||
assert!(!cache.is_cached("empty.tar.gz"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_url() -> Result<()> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
create_test_config(temp_dir.path())?;
|
||||
|
||||
let cache = DownloadCache::new(temp_dir.path())?;
|
||||
|
||||
// Test with uncached URL
|
||||
let result = cache.resolve_url("https://example.com/newfile.tar.gz");
|
||||
assert!(!result.is_cached());
|
||||
assert_eq!(result.url(), Some("https://example.com/newfile.tar.gz"));
|
||||
|
||||
// Create cached file
|
||||
let cache_path = cache.get_cache_path("newfile.tar.gz");
|
||||
let mut file = fs::File::create(&cache_path)?;
|
||||
file.write_all(b"cached content")?;
|
||||
|
||||
// Now it should resolve to cached
|
||||
let result = cache.resolve_url("https://example.com/newfile.tar.gz");
|
||||
assert!(result.is_cached());
|
||||
assert!(result.url().is_none());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_component() -> Result<()> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
create_test_config(temp_dir.path())?;
|
||||
|
||||
let cache = DownloadCache::new(temp_dir.path())?;
|
||||
|
||||
let component = cache.get_component("test");
|
||||
assert!(component.is_some());
|
||||
assert_eq!(component.unwrap().name, "Test Component");
|
||||
|
||||
let missing = cache.get_component("nonexistent");
|
||||
assert!(missing.is_none());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_list_cached() -> Result<()> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
create_test_config(temp_dir.path())?;
|
||||
|
||||
let cache = DownloadCache::new(temp_dir.path())?;
|
||||
|
||||
// Create some cached files
|
||||
fs::write(cache.get_cache_path("file1.tar.gz"), "content1")?;
|
||||
fs::write(cache.get_cache_path("file2.zip"), "content2")?;
|
||||
|
||||
let files = cache.list_cached()?;
|
||||
assert_eq!(files.len(), 2);
|
||||
assert!(files.contains(&"file1.tar.gz".to_string()));
|
||||
assert!(files.contains(&"file2.zip".to_string()));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_size() -> Result<()> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
create_test_config(temp_dir.path())?;
|
||||
|
||||
let cache = DownloadCache::new(temp_dir.path())?;
|
||||
|
||||
// Initially empty
|
||||
assert_eq!(cache.cache_size()?, 0);
|
||||
|
||||
// Add files
|
||||
fs::write(cache.get_cache_path("file1.txt"), "12345")?; // 5 bytes
|
||||
fs::write(cache.get_cache_path("file2.txt"), "1234567890")?; // 10 bytes
|
||||
|
||||
assert_eq!(cache.cache_size()?, 15);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clear_cache() -> Result<()> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
create_test_config(temp_dir.path())?;
|
||||
|
||||
let cache = DownloadCache::new(temp_dir.path())?;
|
||||
|
||||
// Create some cached files
|
||||
fs::write(cache.get_cache_path("file1.tar.gz"), "content1")?;
|
||||
fs::write(cache.get_cache_path("file2.zip"), "content2")?;
|
||||
|
||||
assert_eq!(cache.list_cached()?.len(), 2);
|
||||
|
||||
cache.clear_cache()?;
|
||||
|
||||
assert_eq!(cache.list_cached()?.len(), 0);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
@ -1,10 +1,11 @@
|
|||
use crate::package_manager::cache::{CacheResult, DownloadCache};
|
||||
use crate::package_manager::component::ComponentConfig;
|
||||
use crate::package_manager::installer::PackageManager;
|
||||
use crate::package_manager::InstallMode;
|
||||
use crate::package_manager::OsType;
|
||||
use crate::shared::utils::{self, get_database_url_sync, parse_database_url};
|
||||
use anyhow::{Context, Result};
|
||||
use log::{error, trace, warn};
|
||||
use log::{error, info, trace, warn};
|
||||
use reqwest::Client;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
|
@ -67,14 +68,54 @@ impl PackageManager {
|
|||
.await?;
|
||||
}
|
||||
if !component.data_download_list.is_empty() {
|
||||
// Initialize cache for data files (models, etc.)
|
||||
let cache_base = self.base_path.parent().unwrap_or(&self.base_path);
|
||||
let cache = DownloadCache::new(cache_base).ok();
|
||||
|
||||
for url in &component.data_download_list {
|
||||
let filename = url.split('/').last().unwrap_or("download.tmp");
|
||||
let filename = DownloadCache::extract_filename(url);
|
||||
let output_path = self
|
||||
.base_path
|
||||
.join("data")
|
||||
.join(&component.name)
|
||||
.join(filename);
|
||||
utils::download_file(url, output_path.to_str().unwrap()).await?;
|
||||
.join(&filename);
|
||||
|
||||
// Check if already exists at destination
|
||||
if output_path.exists() {
|
||||
info!("Data file already exists: {:?}", output_path);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Ensure data directory exists
|
||||
if let Some(parent) = output_path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
// Check cache first
|
||||
if let Some(ref c) = cache {
|
||||
if let Some(cached_path) = c.get_cached_path(&filename) {
|
||||
info!("Using cached data file: {:?}", cached_path);
|
||||
std::fs::copy(&cached_path, &output_path)?;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Download to cache if available, otherwise directly to destination
|
||||
let download_target = if let Some(ref c) = cache {
|
||||
c.get_cache_path(&filename)
|
||||
} else {
|
||||
output_path.clone()
|
||||
};
|
||||
|
||||
info!("Downloading data file: {}", url);
|
||||
println!("Downloading {}", url);
|
||||
utils::download_file(url, download_target.to_str().unwrap()).await?;
|
||||
|
||||
// Copy from cache to destination if we downloaded to cache
|
||||
if cache.is_some() && download_target != output_path {
|
||||
std::fs::copy(&download_target, &output_path)?;
|
||||
info!("Copied cached file to: {:?}", output_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
self.run_commands(post_cmds, "local", &component.name)?;
|
||||
|
|
@ -121,10 +162,7 @@ impl PackageManager {
|
|||
};
|
||||
if !packages.is_empty() {
|
||||
let pkg_list = packages.join(" ");
|
||||
self.exec_in_container(
|
||||
&container_name,
|
||||
&format!("apt-get install -y {}", pkg_list),
|
||||
)?;
|
||||
self.exec_in_container(&container_name, &format!("apt-get install -y {}", pkg_list))?;
|
||||
}
|
||||
if let Some(url) = &component.download_url {
|
||||
self.download_in_container(
|
||||
|
|
@ -156,7 +194,7 @@ impl PackageManager {
|
|||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
pub fn remove(&self, component_name: &str) -> Result<()> {
|
||||
let component = self
|
||||
.components
|
||||
|
|
@ -272,25 +310,57 @@ impl PackageManager {
|
|||
) -> Result<()> {
|
||||
let bin_path = self.base_path.join("bin").join(component);
|
||||
std::fs::create_dir_all(&bin_path)?;
|
||||
let filename = url.split('/').last().unwrap_or("download.tmp");
|
||||
let temp_file = if filename.starts_with('/') {
|
||||
PathBuf::from(filename)
|
||||
} else {
|
||||
bin_path.join(filename)
|
||||
|
||||
// Initialize cache - use parent of base_path (botserver root) for cache
|
||||
let cache_base = self.base_path.parent().unwrap_or(&self.base_path);
|
||||
let cache = DownloadCache::new(cache_base).unwrap_or_else(|e| {
|
||||
warn!("Failed to initialize download cache: {}", e);
|
||||
// Create a fallback cache in base_path
|
||||
DownloadCache::new(&self.base_path).expect("Failed to create fallback cache")
|
||||
});
|
||||
|
||||
// Check cache first
|
||||
let cache_result = cache.resolve_component_url(component, url);
|
||||
|
||||
let source_file = match cache_result {
|
||||
CacheResult::Cached(cached_path) => {
|
||||
info!("Using cached file for {}: {:?}", component, cached_path);
|
||||
cached_path
|
||||
}
|
||||
CacheResult::Download {
|
||||
url: download_url,
|
||||
cache_path,
|
||||
} => {
|
||||
info!("Downloading {} from {}", component, download_url);
|
||||
println!("Downloading {}", download_url);
|
||||
|
||||
// Download to cache directory
|
||||
self.download_with_reqwest(&download_url, &cache_path, component)
|
||||
.await?;
|
||||
|
||||
info!("Cached {} to {:?}", component, cache_path);
|
||||
cache_path
|
||||
}
|
||||
};
|
||||
self.download_with_reqwest(url, &temp_file, component)
|
||||
.await?;
|
||||
self.handle_downloaded_file(&temp_file, &bin_path, binary_name)?;
|
||||
|
||||
// Now extract/install from the source file (either cached or freshly downloaded)
|
||||
self.handle_downloaded_file(&source_file, &bin_path, binary_name)?;
|
||||
Ok(())
|
||||
}
|
||||
pub async fn download_with_reqwest(
|
||||
&self,
|
||||
url: &str,
|
||||
temp_file: &PathBuf,
|
||||
target_file: &PathBuf,
|
||||
component: &str,
|
||||
) -> Result<()> {
|
||||
const MAX_RETRIES: u32 = 3;
|
||||
const RETRY_DELAY: std::time::Duration = std::time::Duration::from_secs(2);
|
||||
|
||||
// Ensure parent directory exists
|
||||
if let Some(parent) = target_file.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
let client = Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.user_agent("botserver-package-manager/1.0")
|
||||
|
|
@ -306,7 +376,10 @@ impl PackageManager {
|
|||
);
|
||||
std::thread::sleep(RETRY_DELAY * attempt);
|
||||
}
|
||||
match self.attempt_reqwest_download(&client, url, temp_file).await {
|
||||
match self
|
||||
.attempt_reqwest_download(&client, url, target_file)
|
||||
.await
|
||||
{
|
||||
Ok(_size) => {
|
||||
if attempt > 0 {
|
||||
trace!("Download succeeded on retry attempt {}", attempt);
|
||||
|
|
@ -316,7 +389,7 @@ impl PackageManager {
|
|||
Err(e) => {
|
||||
warn!("Download attempt {} failed: {}", attempt + 1, e);
|
||||
last_error = Some(e);
|
||||
let _ = std::fs::remove_file(temp_file);
|
||||
let _ = std::fs::remove_file(target_file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -446,7 +519,7 @@ impl PackageManager {
|
|||
} else {
|
||||
PathBuf::from("/opt/gbo/logs")
|
||||
};
|
||||
|
||||
|
||||
// Get DB password from Vault for commands that need it (e.g., PostgreSQL initdb)
|
||||
let db_password = match get_database_url_sync() {
|
||||
Ok(url) => {
|
||||
|
|
@ -460,7 +533,7 @@ impl PackageManager {
|
|||
String::new()
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
for cmd in commands {
|
||||
let rendered_cmd = cmd
|
||||
.replace("{{BIN_PATH}}", &bin_path.to_string_lossy())
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
pub mod cache;
|
||||
pub mod component;
|
||||
pub mod installer;
|
||||
pub mod os;
|
||||
pub mod setup;
|
||||
pub use cache::{CacheResult, DownloadCache};
|
||||
pub use installer::PackageManager;
|
||||
pub mod cli;
|
||||
pub mod facade;
|
||||
|
|
|
|||
66
src/main.rs
66
src/main.rs
|
|
@ -1,4 +1,6 @@
|
|||
use axum::extract::Extension;
|
||||
use axum::extract::{Extension, State};
|
||||
use axum::http::StatusCode;
|
||||
use axum::Json;
|
||||
use axum::{
|
||||
routing::{get, post},
|
||||
Router,
|
||||
|
|
@ -95,6 +97,42 @@ use crate::shared::utils::create_s3_operator;
|
|||
// Use BootstrapProgress from lib.rs
|
||||
use botserver::BootstrapProgress;
|
||||
|
||||
/// Health check endpoint handler
|
||||
/// Returns server health status for monitoring and load balancers
|
||||
async fn health_check(State(state): State<Arc<AppState>>) -> (StatusCode, Json<serde_json::Value>) {
|
||||
// Check database connectivity
|
||||
let db_ok = state.conn.get().is_ok();
|
||||
|
||||
let status = if db_ok { "healthy" } else { "degraded" };
|
||||
let code = if db_ok {
|
||||
StatusCode::OK
|
||||
} else {
|
||||
StatusCode::SERVICE_UNAVAILABLE
|
||||
};
|
||||
|
||||
(
|
||||
code,
|
||||
Json(serde_json::json!({
|
||||
"status": status,
|
||||
"service": "botserver",
|
||||
"version": env!("CARGO_PKG_VERSION"),
|
||||
"database": db_ok
|
||||
})),
|
||||
)
|
||||
}
|
||||
|
||||
/// Simple health check without state (for basic liveness probes)
|
||||
async fn health_check_simple() -> (StatusCode, Json<serde_json::Value>) {
|
||||
(
|
||||
StatusCode::OK,
|
||||
Json(serde_json::json!({
|
||||
"status": "ok",
|
||||
"service": "botserver",
|
||||
"version": env!("CARGO_PKG_VERSION")
|
||||
})),
|
||||
)
|
||||
}
|
||||
|
||||
async fn run_axum_server(
|
||||
app_state: Arc<AppState>,
|
||||
port: u16,
|
||||
|
|
@ -111,6 +149,9 @@ async fn run_axum_server(
|
|||
|
||||
// Build API router with module-specific routes
|
||||
let mut api_router = Router::new()
|
||||
// Health check endpoints - both /health and /api/health for compatibility
|
||||
.route("/health", get(health_check_simple))
|
||||
.route(ApiUrls::HEALTH, get(health_check))
|
||||
.route(ApiUrls::SESSIONS, post(create_session))
|
||||
.route(ApiUrls::SESSIONS, get(get_sessions))
|
||||
.route(
|
||||
|
|
@ -213,8 +254,13 @@ async fn run_axum_server(
|
|||
// Bind to address
|
||||
let addr = SocketAddr::from(([0, 0, 0, 0], port));
|
||||
|
||||
// Check if certificates exist
|
||||
if cert_path.exists() && key_path.exists() {
|
||||
// Check if TLS is disabled via environment variable (for local development)
|
||||
let disable_tls = std::env::var("BOTSERVER_DISABLE_TLS")
|
||||
.map(|v| v == "true" || v == "1")
|
||||
.unwrap_or(false);
|
||||
|
||||
// Check if certificates exist and TLS is not disabled
|
||||
if !disable_tls && cert_path.exists() && key_path.exists() {
|
||||
// Use HTTPS with existing certificates
|
||||
let tls_config = axum_server::tls_rustls::RustlsConfig::from_pem_file(cert_path, key_path)
|
||||
.await
|
||||
|
|
@ -228,15 +274,15 @@ async fn run_axum_server(
|
|||
.serve(app.into_make_service())
|
||||
.await
|
||||
} else {
|
||||
// Generate self-signed certificate if not present
|
||||
warn!("TLS certificates not found, generating self-signed certificate...");
|
||||
// Use HTTP - either TLS is disabled or certificates don't exist
|
||||
if disable_tls {
|
||||
info!("TLS disabled via BOTSERVER_DISABLE_TLS environment variable");
|
||||
} else {
|
||||
warn!("TLS certificates not found, using HTTP");
|
||||
}
|
||||
|
||||
// Fall back to HTTP temporarily (bootstrap will generate certs)
|
||||
let listener = tokio::net::TcpListener::bind(addr).await?;
|
||||
info!(
|
||||
"HTTP server listening on {} (certificates will be generated on next restart)",
|
||||
addr
|
||||
);
|
||||
info!("HTTP server listening on {}", addr);
|
||||
axum::serve(listener, app.into_make_service())
|
||||
.await
|
||||
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue