Refactor server code and add auth API fixes
This commit is contained in:
parent
a8f52451c5
commit
8f96cd1015
11 changed files with 891 additions and 1520 deletions
67
add-req.sh
Normal file → Executable file
67
add-req.sh
Normal file → Executable file
|
|
@ -3,7 +3,8 @@
|
||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
PROJECT_ROOT="$SCRIPT_DIR"
|
PROJECT_ROOT="$SCRIPT_DIR"
|
||||||
OUTPUT_FILE="$SCRIPT_DIR/prompt.out"
|
OUTPUT_FILE="$SCRIPT_DIR/prompt.out"
|
||||||
rm $OUTPUT_FILE
|
|
||||||
|
rm -f "$OUTPUT_FILE"
|
||||||
echo "Consolidated LLM Context" > "$OUTPUT_FILE"
|
echo "Consolidated LLM Context" > "$OUTPUT_FILE"
|
||||||
|
|
||||||
prompts=(
|
prompts=(
|
||||||
|
|
@ -13,8 +14,10 @@ prompts=(
|
||||||
)
|
)
|
||||||
|
|
||||||
for file in "${prompts[@]}"; do
|
for file in "${prompts[@]}"; do
|
||||||
cat "$file" >> "$OUTPUT_FILE"
|
if [ -f "$file" ]; then
|
||||||
echo "" >> "$OUTPUT_FILE"
|
cat "$file" >> "$OUTPUT_FILE"
|
||||||
|
echo "" >> "$OUTPUT_FILE"
|
||||||
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
dirs=(
|
dirs=(
|
||||||
|
|
@ -22,8 +25,8 @@ dirs=(
|
||||||
#"automation"
|
#"automation"
|
||||||
#"basic"
|
#"basic"
|
||||||
"bot"
|
"bot"
|
||||||
#"channels"
|
"channels"
|
||||||
"config"
|
#"config"
|
||||||
#"context"
|
#"context"
|
||||||
#"email"
|
#"email"
|
||||||
#"file"
|
#"file"
|
||||||
|
|
@ -37,25 +40,55 @@ dirs=(
|
||||||
#"web_automation"
|
#"web_automation"
|
||||||
#"whatsapp"
|
#"whatsapp"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
filter_rust_file() {
|
||||||
|
sed -E '/^\s*\/\//d' "$1" | \
|
||||||
|
sed -E '/info!\s*\(/d' | \
|
||||||
|
sed -E '/debug!\s*\(/d' | \
|
||||||
|
sed -E '/trace!\s*\(/d'
|
||||||
|
}
|
||||||
|
|
||||||
for dir in "${dirs[@]}"; do
|
for dir in "${dirs[@]}"; do
|
||||||
find "$PROJECT_ROOT/src/$dir" -name "*.rs" | while read file; do
|
find "$PROJECT_ROOT/src/$dir" -name "*.rs" | while read -r file; do
|
||||||
echo $file >> "$OUTPUT_FILE"
|
echo "$file" >> "$OUTPUT_FILE"
|
||||||
cat "$file" >> "$OUTPUT_FILE"
|
filter_rust_file "$file" >> "$OUTPUT_FILE"
|
||||||
echo "" >> "$OUTPUT_FILE"
|
echo "" >> "$OUTPUT_FILE"
|
||||||
done
|
done
|
||||||
done
|
done
|
||||||
|
|
||||||
# Also append the specific files you mentioned
|
# Additional specific files
|
||||||
echo "$PROJECT_ROOT/src/main.rs" >> "$OUTPUT_FILE"
|
files=(
|
||||||
cat "$PROJECT_ROOT/src/main.rs" >> "$OUTPUT_FILE"
|
"$PROJECT_ROOT/src/main.rs"
|
||||||
|
"$PROJECT_ROOT/src/basic/keywords/hear_talk.rs"
|
||||||
|
"$PROJECT_ROOT/templates/annoucements.gbai/annoucements.gbdialog/start.bas"
|
||||||
|
"$PROJECT_ROOT/templates/annoucements.gbai/annoucements.gbdialog/auth.bas"
|
||||||
|
"$PROJECT_ROOT/web/index.html"
|
||||||
|
)
|
||||||
|
|
||||||
cat "$PROJECT_ROOT/src/basic/keywords/hear_talk.rs" >> "$OUTPUT_FILE"
|
for file in "${files[@]}"; do
|
||||||
echo "$PROJECT_ROOT/src/basic/mod.rs">> "$OUTPUT_FILE"
|
if [[ "$file" == *.rs ]]; then
|
||||||
cat "$PROJECT_ROOT/src/basic/mod.rs" >> "$OUTPUT_FILE"
|
echo "$file" >> "$OUTPUT_FILE"
|
||||||
|
filter_rust_file "$file" >> "$OUTPUT_FILE"
|
||||||
|
else
|
||||||
|
echo "$file" >> "$OUTPUT_FILE"
|
||||||
|
cat "$file" >> "$OUTPUT_FILE"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
echo "$PROJECT_ROOT/templates/annoucements.gbai/annoucements.gbdialog/start.bas" >> "$OUTPUT_FILE"
|
# Remove all blank lines and reduce whitespace greater than 1 space
|
||||||
cat "$PROJECT_ROOT/templates/annoucements.gbai/annoucements.gbdialog/start.bas" >> "$OUTPUT_FILE"
|
sed -i 's/[[:space:]]*$//' "$OUTPUT_FILE"
|
||||||
|
sed -i '/^$/d' "$OUTPUT_FILE"
|
||||||
|
sed -i 's/ \+/ /g' "$OUTPUT_FILE"
|
||||||
|
|
||||||
|
# Calculate and display token count (approximation: words * 1.3)
|
||||||
|
WORD_COUNT=$(wc -w < "$OUTPUT_FILE")
|
||||||
|
TOKEN_COUNT=$(echo "$WORD_COUNT * 1.3 / 1" | bc)
|
||||||
|
FILE_SIZE=$(wc -c < "$OUTPUT_FILE")
|
||||||
|
|
||||||
echo "" >> "$OUTPUT_FILE"
|
echo "" >> "$OUTPUT_FILE"
|
||||||
|
|
||||||
# cargo build --message-format=short 2>&1 | grep -E 'error' >> "$OUTPUT_FILE"
|
echo "Approximate token count: $TOKEN_COUNT"
|
||||||
|
echo "Context size: $FILE_SIZE bytes"
|
||||||
|
|
||||||
|
cat "$OUTPUT_FILE" | xclip -selection clipboard
|
||||||
|
echo "Content copied to clipboard (xclip)"
|
||||||
|
|
|
||||||
|
|
@ -18,3 +18,10 @@ cargo install cargo-edit
|
||||||
|
|
||||||
cargo upgrade
|
cargo upgrade
|
||||||
cargo audit
|
cargo audit
|
||||||
|
|
||||||
|
apt install xclip
|
||||||
|
|
||||||
|
# Prompt add-ons
|
||||||
|
|
||||||
|
- Prompt add-ons: Fill the file with info!, trace! and debug! macros.
|
||||||
|
-
|
||||||
|
|
|
||||||
|
|
@ -2,9 +2,11 @@ MOST IMPORTANT CODE GENERATION RULES:
|
||||||
- No placeholders, never comment/uncomment code, no explanations, no filler text.
|
- No placeholders, never comment/uncomment code, no explanations, no filler text.
|
||||||
- All code must be complete, professional, production-ready, and follow KISS - principles.
|
- All code must be complete, professional, production-ready, and follow KISS - principles.
|
||||||
- NEVER return placeholders of any kind, neither commented code, only REAL PRODUCTION GRADE code.
|
- NEVER return placeholders of any kind, neither commented code, only REAL PRODUCTION GRADE code.
|
||||||
- Always increment logging with info! to give birth to the console.
|
- Always increment logging with (all-in-one-line) info!, debug!, trace! to give birth to the console.
|
||||||
- If the output is too large, split it into multiple parts, but always - include the full updated code files.
|
- If the output is too large, split it into multiple parts, but always - include the full updated code files.
|
||||||
- Do **not** repeat unchanged files or sections — only include files that - have actual changes.
|
- Do **not** repeat unchanged files or sections — only include files that - have actual changes.
|
||||||
- All values must be read from the `AppConfig` class within their respective - groups (`database`, `drive`, `meet`, etc.); never use hardcoded or magic - values.
|
- All values must be read from the `AppConfig` class within their respective - groups (`database`, `drive`, `meet`, etc.); never use hardcoded or magic - values.
|
||||||
- Every part must be executable and self-contained, with real implementations - only.
|
- Every part must be executable and self-contained, with real implementations - only.
|
||||||
- Only generated production ready enterprise grade VERY condensed no commented code.
|
- Only generated production ready enterprise grade VERY condensed no commented code.
|
||||||
|
- DO NOT WRITE ANY ERROR HANDLING CODE LET IT CRASH.
|
||||||
|
- Never generate two ore more trace mensages that are equal!
|
||||||
|
|
|
||||||
|
|
@ -141,7 +141,7 @@ pub fn set_user_keyword(state: Arc<AppState>, user: UserSession, engine: &mut En
|
||||||
|
|
||||||
if let Err(e) = session_manager.update_user_id(user_clone_spawn.id, user_id)
|
if let Err(e) = session_manager.update_user_id(user_clone_spawn.id, user_id)
|
||||||
{
|
{
|
||||||
debug!("Failed to update user ID in session: {}", e);
|
error!("Failed to update user ID in session: {}", e);
|
||||||
} else {
|
} else {
|
||||||
info!(
|
info!(
|
||||||
"Updated session {} to user ID: {}",
|
"Updated session {} to user ID: {}",
|
||||||
|
|
|
||||||
|
|
@ -82,8 +82,6 @@ impl ScriptService {
|
||||||
let trimmed = line.trim();
|
let trimmed = line.trim();
|
||||||
|
|
||||||
if trimmed.is_empty() || trimmed.starts_with("//") || trimmed.starts_with("REM") {
|
if trimmed.is_empty() || trimmed.starts_with("//") || trimmed.starts_with("REM") {
|
||||||
result.push_str(line);
|
|
||||||
result.push('\n');
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
600
src/bot/mod.rs
600
src/bot/mod.rs
File diff suppressed because it is too large
Load diff
|
|
@ -78,7 +78,7 @@ impl AppConfig {
|
||||||
|
|
||||||
pub fn from_env() -> Self {
|
pub fn from_env() -> Self {
|
||||||
let database = DatabaseConfig {
|
let database = DatabaseConfig {
|
||||||
username: env::var("TABLES_USERNAME").unwrap_or_else(|_| "user".to_string()),
|
username: env::var("TABLES_USERNAME").unwrap(),
|
||||||
password: env::var("TABLES_PASSWORD").unwrap_or_else(|_| "pass".to_string()),
|
password: env::var("TABLES_PASSWORD").unwrap_or_else(|_| "pass".to_string()),
|
||||||
server: env::var("TABLES_SERVER").unwrap_or_else(|_| "localhost".to_string()),
|
server: env::var("TABLES_SERVER").unwrap_or_else(|_| "localhost".to_string()),
|
||||||
port: env::var("TABLES_PORT")
|
port: env::var("TABLES_PORT")
|
||||||
|
|
|
||||||
30
src/main.rs
30
src/main.rs
|
|
@ -1,5 +1,4 @@
|
||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
|
|
||||||
use actix_cors::Cors;
|
use actix_cors::Cors;
|
||||||
use actix_web::middleware::Logger;
|
use actix_web::middleware::Logger;
|
||||||
use actix_web::{web, App, HttpServer};
|
use actix_web::{web, App, HttpServer};
|
||||||
|
|
@ -7,7 +6,6 @@ use dotenvy::dotenv;
|
||||||
use log::info;
|
use log::info;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
mod auth;
|
mod auth;
|
||||||
mod automation;
|
mod automation;
|
||||||
mod basic;
|
mod basic;
|
||||||
|
|
@ -25,10 +23,9 @@ mod session;
|
||||||
mod shared;
|
mod shared;
|
||||||
mod tools;
|
mod tools;
|
||||||
mod whatsapp;
|
mod whatsapp;
|
||||||
|
|
||||||
use crate::bot::{
|
use crate::bot::{
|
||||||
create_session, get_session_history, get_sessions, index, set_mode_handler, start_session,
|
auth_handler, create_session, get_session_history, get_sessions, index, set_mode_handler,
|
||||||
static_files, voice_start, voice_stop, websocket_handler, whatsapp_webhook,
|
start_session, static_files, voice_start, voice_stop, websocket_handler,
|
||||||
whatsapp_webhook_verify,
|
whatsapp_webhook_verify,
|
||||||
};
|
};
|
||||||
use crate::channels::{VoiceAdapter, WebChannelAdapter};
|
use crate::channels::{VoiceAdapter, WebChannelAdapter};
|
||||||
|
|
@ -49,14 +46,12 @@ async fn main() -> std::io::Result<()> {
|
||||||
dotenv().ok();
|
dotenv().ok();
|
||||||
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
|
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
|
||||||
|
|
||||||
info!("Starting General Bots 6.0...");
|
|
||||||
|
|
||||||
let cfg = AppConfig::from_env();
|
let cfg = AppConfig::from_env();
|
||||||
let config = std::sync::Arc::new(cfg.clone());
|
let config = std::sync::Arc::new(cfg.clone());
|
||||||
|
|
||||||
let db_pool = match diesel::Connection::establish(&cfg.database_url()) {
|
let db_pool = match diesel::Connection::establish(&cfg.database_url()) {
|
||||||
Ok(conn) => {
|
Ok(conn) => {
|
||||||
info!("Connected to main database");
|
info!("Connected to main database successfully");
|
||||||
Arc::new(Mutex::new(conn))
|
Arc::new(Mutex::new(conn))
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
|
@ -79,7 +74,7 @@ async fn main() -> std::io::Result<()> {
|
||||||
|
|
||||||
let db_custom_pool = match diesel::Connection::establish(&custom_db_url) {
|
let db_custom_pool = match diesel::Connection::establish(&custom_db_url) {
|
||||||
Ok(conn) => {
|
Ok(conn) => {
|
||||||
info!("Connected to custom database using constructed URL");
|
info!("Connected to custom database successfully");
|
||||||
Arc::new(Mutex::new(conn))
|
Arc::new(Mutex::new(conn))
|
||||||
}
|
}
|
||||||
Err(e2) => {
|
Err(e2) => {
|
||||||
|
|
@ -97,7 +92,7 @@ async fn main() -> std::io::Result<()> {
|
||||||
|
|
||||||
let redis_client = match redis::Client::open("redis://127.0.0.1/") {
|
let redis_client = match redis::Client::open("redis://127.0.0.1/") {
|
||||||
Ok(client) => {
|
Ok(client) => {
|
||||||
info!("Connected to Redis");
|
info!("Connected to Redis successfully");
|
||||||
Some(Arc::new(client))
|
Some(Arc::new(client))
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
|
@ -109,7 +104,6 @@ async fn main() -> std::io::Result<()> {
|
||||||
let tool_manager = Arc::new(tools::ToolManager::new());
|
let tool_manager = Arc::new(tools::ToolManager::new());
|
||||||
let llama_url =
|
let llama_url =
|
||||||
std::env::var("LLM_URL").unwrap_or_else(|_| "http://localhost:8081".to_string());
|
std::env::var("LLM_URL").unwrap_or_else(|_| "http://localhost:8081".to_string());
|
||||||
|
|
||||||
let llm_provider = Arc::new(crate::llm::OpenAIClient::new(
|
let llm_provider = Arc::new(crate::llm::OpenAIClient::new(
|
||||||
"empty".to_string(),
|
"empty".to_string(),
|
||||||
Some(llama_url.clone()),
|
Some(llama_url.clone()),
|
||||||
|
|
@ -121,13 +115,11 @@ async fn main() -> std::io::Result<()> {
|
||||||
"api_key".to_string(),
|
"api_key".to_string(),
|
||||||
"api_secret".to_string(),
|
"api_secret".to_string(),
|
||||||
));
|
));
|
||||||
|
|
||||||
let whatsapp_adapter = Arc::new(WhatsAppAdapter::new(
|
let whatsapp_adapter = Arc::new(WhatsAppAdapter::new(
|
||||||
"whatsapp_token".to_string(),
|
"whatsapp_token".to_string(),
|
||||||
"phone_number_id".to_string(),
|
"phone_number_id".to_string(),
|
||||||
"verify_token".to_string(),
|
"verify_token".to_string(),
|
||||||
));
|
));
|
||||||
|
|
||||||
let tool_api = Arc::new(tools::ToolApi::new());
|
let tool_api = Arc::new(tools::ToolApi::new());
|
||||||
|
|
||||||
let session_manager = Arc::new(tokio::sync::Mutex::new(session::SessionManager::new(
|
let session_manager = Arc::new(tokio::sync::Mutex::new(session::SessionManager::new(
|
||||||
|
|
@ -150,7 +142,14 @@ async fn main() -> std::io::Result<()> {
|
||||||
tool_manager: tool_manager.clone(),
|
tool_manager: tool_manager.clone(),
|
||||||
llm_provider: llm_provider.clone(),
|
llm_provider: llm_provider.clone(),
|
||||||
auth_service: auth_service.clone(),
|
auth_service: auth_service.clone(),
|
||||||
channels: Arc::new(Mutex::new(HashMap::new())),
|
channels: Arc::new(Mutex::new({
|
||||||
|
let mut map = HashMap::new();
|
||||||
|
map.insert(
|
||||||
|
"web".to_string(),
|
||||||
|
web_adapter.clone() as Arc<dyn crate::channels::ChannelAdapter>,
|
||||||
|
);
|
||||||
|
map
|
||||||
|
})),
|
||||||
response_channels: Arc::new(tokio::sync::Mutex::new(HashMap::new())),
|
response_channels: Arc::new(tokio::sync::Mutex::new(HashMap::new())),
|
||||||
web_adapter: web_adapter.clone(),
|
web_adapter: web_adapter.clone(),
|
||||||
voice_adapter: voice_adapter.clone(),
|
voice_adapter: voice_adapter.clone(),
|
||||||
|
|
@ -171,7 +170,6 @@ async fn main() -> std::io::Result<()> {
|
||||||
.max_age(3600);
|
.max_age(3600);
|
||||||
|
|
||||||
let app_state_clone = app_state.clone();
|
let app_state_clone = app_state.clone();
|
||||||
|
|
||||||
let mut app = App::new()
|
let mut app = App::new()
|
||||||
.wrap(cors)
|
.wrap(cors)
|
||||||
.wrap(Logger::default())
|
.wrap(Logger::default())
|
||||||
|
|
@ -183,8 +181,8 @@ async fn main() -> std::io::Result<()> {
|
||||||
.service(index)
|
.service(index)
|
||||||
.service(static_files)
|
.service(static_files)
|
||||||
.service(websocket_handler)
|
.service(websocket_handler)
|
||||||
|
.service(auth_handler)
|
||||||
.service(whatsapp_webhook_verify)
|
.service(whatsapp_webhook_verify)
|
||||||
.service(whatsapp_webhook)
|
|
||||||
.service(voice_start)
|
.service(voice_start)
|
||||||
.service(voice_stop)
|
.service(voice_stop)
|
||||||
.service(create_session)
|
.service(create_session)
|
||||||
|
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
TALK "Welcome to General Bots! What is your name?"
|
|
||||||
HEAR name
|
|
||||||
TALK "Hello, " + name
|
|
||||||
|
|
||||||
text = GET "default.pdf"
|
|
||||||
SET_CONTEXT text
|
|
||||||
|
|
||||||
resume = LLM "Build a resume from " + text
|
|
||||||
|
|
@ -1,5 +1,7 @@
|
||||||
TALK "Welcome to General Bots!"
|
TALK "Welcome to General Bots!"
|
||||||
TALK "What is your name?"
|
|
||||||
HEAR name
|
REM text = GET "default.pdf"
|
||||||
TALK "Hello " + name + ", nice to meet you!"
|
REM resume = LLM "Build a resume from " + text
|
||||||
SET_USER "92fcffaa-bf0a-41a9-8d99-5541709d695b"
|
REM TALK resume
|
||||||
|
|
||||||
|
REM SET_CONTEXT text
|
||||||
|
|
|
||||||
1679
web/index.html
1679
web/index.html
File diff suppressed because it is too large
Load diff
Loading…
Add table
Reference in a new issue