- Postgres updated to 18.

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-10-18 22:25:59 -03:00
parent 88a52f172e
commit d970d48aa7
6 changed files with 770 additions and 377 deletions

1
.gitignore vendored
View file

@ -4,3 +4,4 @@ target
work
*.out
bin
botserver-stack

View file

@ -38,3 +38,41 @@ valkey-cli -p 6379 monitor
- Prompt add-ons: Fill the file with info!, trace! and debug! macros.
-
# Zed Agents
```
"language_models": {
"openai_compatible": {
"Groq GPT 120b": {
"api_url": "https://api.groq.com/openai/v1",
"available_models": [
{
"name": "meta-llama/llama-4-scout-17b-16e-instruct",
"max_tokens": 30000,
"capabilities": {
"tools": true,
"images": false,
"parallel_tool_calls": false,
"prompt_cache_key": false
}
},
{
"name": "groq/compound",
"max_tokens": 70000
},
{
"name": "openai/gpt-oss-120b",
"max_tokens": 8000,
"capabilities": {
"tools": true,
"images": false,
"parallel_tool_calls": false,
"prompt_cache_key": false
}
}
]
}
}
},
```

View file

@ -2,14 +2,14 @@
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$SCRIPT_DIR"
OUTPUT_FILE="$SCRIPT_DIR/prompt.out"
OUTPUT_FILE="/tmp/prompt.out"
rm $OUTPUT_FILE
echo "Please, fix this consolidated LLM Context" > "$OUTPUT_FILE"
prompts=(
"./prompts/dev/shared.md"
"./prompts/dev/platform/shared.md"
"./Cargo.toml"
"./prompts/dev/fix.md"
"./prompts/dev/platform/fix-errors.md"
)
for file in "${prompts[@]}"; do
@ -21,7 +21,8 @@ dirs=(
#"auth"
#"automation"
#"basic"
"bot"
#"bot"
"bootstrap"
#"channels"
#"config"
#"context"
@ -49,11 +50,22 @@ done
echo "$PROJECT_ROOT/src/main.rs" >> "$OUTPUT_FILE"
cat "$PROJECT_ROOT/src/main.rs" >> "$OUTPUT_FILE"
cat "$PROJECT_ROOT/src/basic/keywords/hear_talk.rs" >> "$OUTPUT_FILE"
echo "$PROJECT_ROOT/src/basic/mod.rs">> "$OUTPUT_FILE"
cat "$PROJECT_ROOT/src/basic/mod.rs" >> "$OUTPUT_FILE"
echo "" >> "$OUTPUT_FILE"
cargo build --message-format=short 2>&1 | grep -E 'error' >> "$OUTPUT_FILE"
# Calculate and display token count (approximation: words * 1.3)
WORD_COUNT=$(wc -w < "$OUTPUT_FILE")
TOKEN_COUNT=$(echo "$WORD_COUNT * 1.3 / 1" | bc)
FILE_SIZE=$(wc -c < "$OUTPUT_FILE")
echo "" >> "$OUTPUT_FILE"
echo "Approximate token count: $TOKEN_COUNT"
echo "Context size: $FILE_SIZE bytes"
cat "$OUTPUT_FILE" | xclip -selection clipboard
echo "Content copied to clipboard (xclip)"
rm -f "$OUTPUT_FILE"

View file

@ -1,2 +1,2 @@
echo Starting General Bots...
npm run start
clear && cargo build && sudo RUST_BACKTRACE=1 ./target/debug/botserver

View file

@ -424,10 +424,10 @@ impl BootstrapManager {
fn generate_password(&self) -> String {
use rand::Rng;
const CHARSET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
let mut rng = rand::rng();
let mut rng = rand::thread_rng();
(0..16)
.map(|_| {
let idx = rng.random_range(0..CHARSET.len());
let idx = rng.gen_range(0..CHARSET.len());
CHARSET[idx] as char
})
.collect()
@ -479,9 +479,14 @@ impl BootstrapManager {
let start = std::time::Instant::now();
while start.elapsed().as_secs() < timeout_secs {
if TcpListener::bind((host, port)).is_err() {
info!("Service {}:{} is ready", host, port);
return Ok(());
match TcpListener::bind((host, port)) {
Ok(_) => {
thread::sleep(Duration::from_secs(1));
}
Err(_) => {
info!("Service {}:{} is ready", host, port);
return Ok(());
}
}
thread::sleep(Duration::from_secs(1));
}

File diff suppressed because it is too large Load diff