fix: Bootstrap should check Valkey is running and use correct component name
All checks were successful
BotServer CI / build (push) Successful in 8m56s
All checks were successful
BotServer CI / build (push) Successful in 8m56s
- Fix component name mismatch: "redis" -> "cache" in bootstrap_manager - Add cache_health_check() function to verify Valkey is responding - Add health check loop after starting cache (12s wait with PING test) - Ensures cache is ready before proceeding with bootstrap This fixes the issue where botserver would hang waiting for cache connection because the cache component was never started. Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
dec3a086ab
commit
ba70bd97bc
2 changed files with 54 additions and 9 deletions
|
|
@ -1,6 +1,6 @@
|
||||||
// Bootstrap manager implementation
|
// Bootstrap manager implementation
|
||||||
use crate::core::bootstrap::bootstrap_types::{BootstrapManager, BootstrapProgress};
|
use crate::core::bootstrap::bootstrap_types::{BootstrapManager, BootstrapProgress};
|
||||||
use crate::core::bootstrap::bootstrap_utils::{safe_pkill, vault_health_check};
|
use crate::core::bootstrap::bootstrap_utils::{cache_health_check, safe_pkill, vault_health_check};
|
||||||
use crate::core::config::AppConfig;
|
use crate::core::config::AppConfig;
|
||||||
use crate::core::package_manager::{InstallMode, PackageManager};
|
use crate::core::package_manager::{InstallMode, PackageManager};
|
||||||
use log::{info, warn};
|
use log::{info, warn};
|
||||||
|
|
@ -102,14 +102,30 @@ impl BootstrapManager {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if pm.is_installed("redis") {
|
if pm.is_installed("cache") {
|
||||||
info!("Starting Redis...");
|
let cache_already_running = cache_health_check();
|
||||||
match pm.start("redis") {
|
if cache_already_running {
|
||||||
Ok(_child) => {
|
info!("Valkey cache is already running");
|
||||||
info!("Redis started");
|
} else {
|
||||||
}
|
info!("Starting Valkey cache...");
|
||||||
Err(e) => {
|
match pm.start("cache") {
|
||||||
warn!("Failed to start Redis: {}", e);
|
Ok(_child) => {
|
||||||
|
info!("Valkey cache process started, waiting for readiness...");
|
||||||
|
// Wait for cache to be ready
|
||||||
|
for i in 0..12 {
|
||||||
|
sleep(Duration::from_secs(1)).await;
|
||||||
|
if cache_health_check() {
|
||||||
|
info!("Valkey cache is responding");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if i == 11 {
|
||||||
|
warn!("Valkey cache did not respond after 12 seconds");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn!("Failed to start Valkey cache: {}", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -91,6 +91,35 @@ pub fn vault_health_check() -> bool {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Check if Valkey/Redis cache is healthy
|
||||||
|
pub fn cache_health_check() -> bool {
|
||||||
|
// Try to PING the cache server
|
||||||
|
match Command::new("redis-cli")
|
||||||
|
.args(["-h", "127.0.0.1", "-p", "6379", "ping"])
|
||||||
|
.output()
|
||||||
|
{
|
||||||
|
Ok(output) => {
|
||||||
|
if output.status.success() {
|
||||||
|
let response = String::from_utf8_lossy(&output.stdout);
|
||||||
|
response.trim().to_uppercase() == "PONG"
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
// If redis-cli is not available, try TCP connection
|
||||||
|
match Command::new("sh")
|
||||||
|
.arg("-c")
|
||||||
|
.arg("timeout 1 bash -c '</dev/tcp/127.0.0.1/6379' 2>/dev/null")
|
||||||
|
.output()
|
||||||
|
{
|
||||||
|
Ok(output) => output.status.success(),
|
||||||
|
Err(_) => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Get current user safely
|
/// Get current user safely
|
||||||
pub fn safe_fuser() -> String {
|
pub fn safe_fuser() -> String {
|
||||||
// Return shell command that uses $USER environment variable
|
// Return shell command that uses $USER environment variable
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue