Migration to Rust and free from Azure.

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-10-06 10:30:17 -03:00
parent 1bb97ebee9
commit 9749893dd0
127 changed files with 16179 additions and 0 deletions

45
.vscode/launch.json vendored Normal file
View file

@ -0,0 +1,45 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "Debug executable 'gbserver'",
"cargo": {
"args": [
"build",
"--bin=gbserver",
"--package=gbserver"
],
"filter": {
"name": "gbserver",
"kind": "bin"
}
},
"args": [],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug unit tests in executable 'gbserver'",
"cargo": {
"args": [
"test",
"--no-run",
"--bin=gbserver",
"--package=gbserver"
],
"filter": {
"name": "gbserver",
"kind": "bin"
}
},
"args": [],
"cwd": "${workspaceFolder}"
}
]
}

14
.zed/debug.json Normal file
View file

@ -0,0 +1,14 @@
[
{
"label": "Build & Debug native binary",
"build": {
"command": "cargo",
"args": ["build"]
},
"program": "$ZED_WORKTREE_ROOT/target/debug/gbserver",
"sourceLanguages": ["rust"],
"request": "launch",
"adapter": "CodeLLDB"
}
]

61
Cargo.toml Normal file
View file

@ -0,0 +1,61 @@
[package]
name = "gbserver"
version = "0.1.0"
edition = "2021"
authors = ["Rodrigo Rodriguez <me@rodrigorodriguez.com>"]
description = "General Bots Server"
license = "AGPL-3.0"
repository = "https://alm.pragmatismo.com.br/generalbots/gbserver"
[features]
default = ["postgres", "qdrant"]
local_llm = []
postgres = ["sqlx/postgres"]
qdrant = ["langchain-rust/qdrant"]
[dependencies]
actix-cors = "0.7"
actix-multipart = "0.7"
actix-web = "4.9"
actix-ws = "0.3"
anyhow = "1.0"
async-stream = "0.3"
async-trait = "0.1"
aes-gcm = "0.10"
argon2 = "0.5"
base64 = "0.22"
bytes = "1.8"
chrono = { version = "0.4", features = ["serde"] }
dotenv = "0.15"
downloader = "0.2"
env_logger = "0.11"
futures = "0.3"
futures-util = "0.3"
imap = "2.4"
langchain-rust = { version = "4.6", features = ["qdrant", "postgres"] }
lettre = { version = "0.11", features = ["smtp-transport", "builder", "tokio1", "tokio1-native-tls"] }
livekit = "0.7"
log = "0.4"
mailparse = "0.15"
minio = { git = "https://github.com/minio/minio-rs", branch = "master" }
native-tls = "0.2"
num-format = "0.4"
qdrant-client = "1.12"
rhai = "1.22"
redis = { version = "0.27", features = ["tokio-comp"] }
regex = "1.11"
reqwest = { version = "0.12", features = ["json", "stream"] }
scraper = "0.20"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
smartstring = "1.0"
sqlx = { version = "0.8", features = ["time", "uuid", "runtime-tokio-rustls", "postgres", "chrono"] }
tempfile = "3"
thirtyfour = "0.34"
tokio = { version = "1.41", features = ["full"] }
tokio-stream = "0.1"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["fmt"] }
urlencoding = "2.1"
uuid = { version = "1.11", features = ["serde", "v4"] }
zip = "2.2"

519
README-6.md Normal file
View file

@ -0,0 +1,519 @@
# General Bots 6 (GB6) Platform
## Vision
GB6 is a billion-scale real-time communication platform integrating advanced bot capabilities, WebRTC multimedia, and enterprise-grade messaging, built with Rust for maximum performance and reliability and BASIC-WebAssembly VM.
## 🌟 Key Features
### Scale & Performance
- Billion+ active users support
- Sub-second message delivery
- 4K video streaming
- 99.99% uptime guarantee
- Zero message loss
- Petabyte-scale storage
### Core Services
- **API Service** (gb-server)
- Axum-based REST & WebSocket
- Multi-tenant request routing
- Authentication & Authorization
- File handling & streaming
- **Media Processing** (gb-media)
- WebRTC integration
- GStreamer transcoding
- Real-time track management
- Professional recording
- **Messaging** (gb-messaging)
- Kafka event processing
- RabbitMQ integration
- WebSocket communication
- Redis PubSub
- **Storage** (gb-storage)
- PostgreSQL with sharding
- Redis caching
- TiKV distributed storage
## 🏗 Architecture
### Multi-Tenant Core
- Organizations
- Instance management
- Resource quotas
- Usage analytics
### Communication Infrastructure
- WebRTC rooms
- Real-time messaging
- Media processing
- Video conferencing
## 🛠 Installation
### Prerequisites
- Rust 1.70+
- Kubernetes cluster
- PostgreSQL 13+
- Redis 6+
- Kafka 3.0+
- GStreamer
# Deploy platform
## Linux && Mac
```
sudo apt update
sudo apt install brave-browser-beta
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
source "$HOME/.cargo/env"
git clone https://alm.pragmatismo.com.br/generalbots/gbserver
apt install -y build-essential \
pkg-config \
libssl-dev \
gcc-multilib \
g++-multilib \
clang \
lld \
binutils-dev \
libudev-dev \
libdbus-1-dev
```
## Build
```
### Build & Run
```bash
# Build all services
cargo build --workspace
# Run tests
cargo test --workspace
# Start API service
cargo run -p gb-server
```
## 📊 Monitoring & Operations
### Health Metrics
- System performance
- Resource utilization
- Error rates
- Latency tracking
### Scaling Operations
- Auto-scaling rules
- Shard management
- Load balancing
- Failover systems
## 🔒 Security
### Authentication & Authorization
- Multi-factor auth
- Role-based access
- Rate limiting
- End-to-end encryption
### Data Protection
- Tenant isolation
- Encryption at rest
- Secure communications
- Audit logging
## 🚀 Development
### Project Structure
```
general-bots/
├── gb-server/ # API service
├── gb-core/ # Core functionality
├── gb-media/ # Media processing
├── gb-messaging/ # Message brokers
├── gb-storage/ # Data storage
├── gb-utils/ # Utilities
└── migrations/ # DB migrations
```
### Configuration
```env
DATABASE_URL=postgresql://user:password@localhost:5432/gbdb
REDIS_URL=redis://localhost:6379
KAFKA_BROKERS=localhost:9092
RABBIT_URL=amqp://guest:guest@localhost:5672
```
## 🌍 Deployment
### Global Infrastructure
- Edge presence
- Regional optimization
- Content delivery
- Traffic management
### Disaster Recovery
- Automated backups
- Multi-region failover
- Data replication
- System redundancy
## 🤝 Contributing
1. Fork repository
2. Create feature branch
3. Implement changes
4. Add tests
5. Submit PR
## 📝 License
Licensed under terms specified in workspace configuration.
## 🆘 Support
### Issues
- Check existing issues
- Provide reproduction steps
- Include relevant logs
- Follow up on discussions
### Documentation
- API references
- Integration guides
- Deployment docs
- Best practices
## 🔮 Roadmap
### Short Term
- Enhanced media processing
- Additional messaging protocols
- Improved scalability
- Extended monitoring
### Long Term
- Advanced analytics
- Global expansion
- Enterprise features
| ✓ | Requirement | Component | Standard | Implementation Steps |
|---|-------------|-----------|-----------|---------------------|
| ✅ | TLS 1.3 Configuration | Nginx | All | Configure modern SSL parameters and ciphers in `/etc/nginx/conf.d/ssl.conf` |
| ✅ | Access Logging | Nginx | All | Enable detailed access logs with privacy fields in `/etc/nginx/nginx.conf` |
| ⬜ | Rate Limiting | Nginx | ISO 27001 | Implement rate limiting rules in location blocks |
| ⬜ | WAF Rules | Nginx | HIPAA | Install and configure ModSecurity with OWASP rules |
| ✅ | Reverse Proxy Security | Nginx | All | Configure security headers (X-Frame-Options, HSTS, CSP) |
| ✅ | MFA Implementation | Zitadel | All | Enable and enforce MFA for all administrative accounts |
| ✅ | RBAC Configuration | Zitadel | All | Set up role-based access control with least privilege |
| ✅ | Password Policy | Zitadel | All | Configure strong password requirements (length, complexity, history) |
| ✅ | OAuth2/OIDC Setup | Zitadel | ISO 27001 | Configure secure OAuth flows and token policies |
| ✅ | Audit Logging | Zitadel | All | Enable comprehensive audit logging for user activities |
| ✅ | Encryption at Rest | MinIO | All | Configure encrypted storage with key management |
| ✅ | Bucket Policies | MinIO | All | Implement strict bucket access policies |
| ✅ | Object Versioning | MinIO | HIPAA | Enable versioning for data recovery capability |
| ✅ | Access Logging | MinIO | All | Enable detailed access logging for object operations |
| ⬜ | Lifecycle Rules | MinIO | LGPD | Configure data retention and deletion policies |
| ✅ | DKIM/SPF/DMARC | Stalwart | All | Configure email authentication mechanisms |
| ✅ | Mail Encryption | Stalwart | All | Enable TLS for mail transport |
| ✅ | Content Filtering | Stalwart | All | Implement content scanning and filtering rules |
| ⬜ | Mail Archiving | Stalwart | HIPAA | Configure compliant email archiving |
| ✅ | Sieve Filtering | Stalwart | All | Implement security-focused mail filtering rules |
| ⬜ | System Hardening | Ubuntu | All | Apply CIS Ubuntu Linux benchmarks |
| ✅ | System Updates | Ubuntu | All | Configure unattended-upgrades for security patches |
| ⬜ | Audit Daemon | Ubuntu | All | Configure auditd for system event logging |
| ✅ | Firewall Rules | Ubuntu | All | Configure UFW with restrictive rules |
| ⬜ | Disk Encryption | Ubuntu | All | Implement LUKS encryption for system disks |
| ⬜ | SELinux/AppArmor | Ubuntu | All | Enable and configure mandatory access control |
| ✅ | Monitoring Setup | All | All | Install and configure Prometheus + Grafana |
| ✅ | Log Aggregation | All | All | Implement centralized logging (e.g., ELK Stack) |
| ⬜ | Backup System | All | All | Configure automated backup system with encryption |
| ✅ | Network Isolation | All | All | Implement proper network segmentation |
| ✅ | Data Classification | All | HIPAA/LGPD | Document data types and handling procedures |
| ✅ | Session Management | Zitadel | All | Configure secure session timeouts and invalidation |
| ✅ | Certificate Management | All | All | Implement automated certificate renewal with Let's Encrypt |
| ✅ | Vulnerability Scanning | All | ISO 27001 | Regular automated scanning with tools like OpenVAS |
| ✅ | Incident Response Plan | All | All | Document and test incident response procedures |
| ✅ | Disaster Recovery | All | HIPAA | Implement and test disaster recovery procedures |
## Documentation Requirements
1. **Security Policies**
- Information Security Policy
- Access Control Policy
- Password Policy
- Data Protection Policy
- Incident Response Plan
2. **Procedures**
- Backup and Recovery Procedures
- Change Management Procedures
- Access Review Procedures
- Security Incident Procedures
- Data Breach Response Procedures
3. **Technical Documentation**
- Network Architecture Diagrams
- System Configuration Documentation
- Security Controls Documentation
- Encryption Standards Documentation
- Logging and Monitoring Documentation
4. **Compliance Records**
- Risk Assessment Reports
- Audit Logs
- Training Records
- Incident Reports
- Access Review Records
## Regular Maintenance Tasks
- Weekly security updates
- Monthly access reviews
- Quarterly compliance audits
- Annual penetration testing
- Bi-annual disaster recovery testing
### **Key Open Source Tools in Rust/Go**:
1. **Zitadel (Go)**: Identity and access management for secure authentication.
2. **Stalwart (Rust)**: Secure email server for threat detection.
3. **MinIO (Go)**: High-performance object storage for unstructured data.
4. **Ubuntu Advantage (Go/Rust tools)**: Compliance and security tools for Ubuntu.
5. **Tantivy (Rust)**: Full-text search engine for data discovery.
6. **Drone (Go)**: CI/CD platform for DevOps automation.
7. **Temporal (Go)**: Workflow orchestration engine.
8. **Caddy (Go)**: Web server for seamless customer experiences.
9. **SeaweedFS (Go)**: Distributed file system for secure file sharing.
10. **Vector (Rust)**: Observability pipeline for monitoring.
11. **Tyk (Go)**: API gateway for secure API management.
12. **Vault (Go)**: Secrets management and encryption.
13. **Hugging Face Transformers (Rust/Go bindings)**: LLM integration and fine-tuning.
14. **Kubernetes (Go)**: Container orchestration for scalable deployments.
15. **Matrix (Rust)**: Real-time communication and collaboration.
# API:
## **File & Document Management**
/files/upload
/files/download
/files/copy
/files/move
/files/delete
/files/getContents
/files/save
/files/createFolder
/files/shareFolder
/files/dirFolder
/files/list
/files/search
/files/recent
/files/favorite
/files/versions
/files/restore
/files/permissions
/files/quota
/files/shared
/files/sync/status
/files/sync/start
/files/sync/stop
---
### **Document Processing**
/docs/merge
/docs/convert
/docs/fill
/docs/export
/docs/import
---
### **Groups & Organizations**
/groups/create
/groups/update
/groups/delete
/groups/list
/groups/search
/groups/members
/groups/members/add
/groups/members/remove
/groups/permissions
/groups/settings
/groups/analytics
/groups/join/request
/groups/join/approve
/groups/join/reject
/groups/invites/send
/groups/invites/list
---
### **Conversations & Real-time Communication**
/conversations/create
/conversations/join
/conversations/leave
/conversations/members
/conversations/messages
/conversations/messages/send
/conversations/messages/edit
/conversations/messages/delete
/conversations/messages/react
/conversations/messages/pin
/conversations/messages/search
/conversations/calls/start
/conversations/calls/join
/conversations/calls/leave
/conversations/calls/mute
/conversations/calls/unmute
/conversations/screen/share
/conversations/screen/stop
/conversations/recording/start
/conversations/recording/stop
/conversations/whiteboard/create
/conversations/whiteboard/collaborate
---
### **Communication Services**
/comm/email/send
/comm/email/template
/comm/email/schedule
/comm/email/cancel
/comm/sms/send
/comm/sms/bulk
/comm/notifications/send
/comm/notifications/preferences
/comm/broadcast/send
/comm/contacts/import
/comm/contacts/export
/comm/contacts/sync
/comm/contacts/groups
---
### **User Management & Authentication**
/users/create
/users/update
/users/delete
/users/list
/users/search
/users/profile
/users/profile/update
/users/settings
/users/permissions
/users/roles
/users/status
/users/presence
/users/activity
/users/security/2fa/enable
/users/security/2fa/disable
/users/security/devices
/users/security/sessions
/users/notifications/settings
---
### **Calendar & Task Management**
/calendar/events/create
/calendar/events/update
/calendar/events/delete
/calendar/events/list
/calendar/events/search
/calendar/availability/check
/calendar/schedule/meeting
/calendar/reminders/set
/tasks/create
/tasks/update
/tasks/delete
/tasks/list
/tasks/assign
/tasks/status/update
/tasks/priority/set
/tasks/dependencies/set
---
### **Storage & Data Management**
/storage/save
/storage/batch
/storage/json
/storage/delete
/storage/quota/check
/storage/cleanup
/storage/backup/create
/storage/backup/restore
/storage/archive
/storage/metrics
---
### **Analytics & Reporting**
/analytics/dashboard
/analytics/reports/generate
/analytics/reports/schedule
/analytics/metrics/collect
/analytics/insights/generate
/analytics/trends/analyze
/analytics/export
---
### **System & Administration**
/admin/system/status
/admin/system/metrics
/admin/logs/view
/admin/logs/export
/admin/config/update
/admin/maintenance/schedule
/admin/backup/create
/admin/backup/restore
/admin/users/manage
/admin/roles/manage
/admin/quotas/manage
/admin/licenses/manage
---
### **AI & Machine Learning**
/ai/analyze/text
/ai/analyze/image
/ai/generate/text
/ai/generate/image
/ai/translate
/ai/summarize
/ai/recommend
/ai/train/model
/ai/predict
---
### **Security & Compliance**
/security/audit/logs
/security/compliance/check
/security/threats/scan
/security/access/review
/security/encryption/manage
/security/certificates/manage
---
### **Health & Monitoring**
/health
/health/detailed
/monitoring/status
/monitoring/alerts
/monitoring/metrics
Built with ❤️ from Brazil, using Rust for maximum performance and reliability.

201
docs/keywords/PROMPT.md Normal file
View file

@ -0,0 +1,201 @@
# Modelo de Prompt para Aprendizado de BASIC em Markdown
## 🎯 **ESTRUTURA PARA APRENDIZ DE BASIC**
```
**CONCEITO BASIC:**
[Nome do conceito ou comando]
**NÍVEL:**
☐ Iniciante ☐ Intermediário ☐ Avançado
**OBJETIVO DE APRENDIZADO:**
[O que você quer entender ou criar]
**CÓDIGO EXEMPLO:**
```basic
[Seu código ou exemplo aqui]
```
**DÚVIDAS ESPECÍFICAS:**
- [Dúvida 1 sobre o conceito]
- [Dúvida 2 sobre sintaxe]
- [Dúvida 3 sobre aplicação]
**CONTEXTO DO PROJETO:**
[Descrição do que está tentando fazer]
**RESULTADO ESPERADO:**
[O que o código deve fazer]
**PARTES QUE NÃO ENTENDE:**
- [Trecho específico do código]
- [Mensagem de erro]
- [Lógica confusa]
```
---
## 📚 **EXEMPLO PRÁTICO: LOOP FOR**
```
**CONCEITO BASIC:**
LOOP FOR
**NÍVEL:**
☒ Iniciante ☐ Intermediário ☐ Avançado
**OBJETIVO DE APRENDIZADO:**
Entender como criar um contador de 1 a 10
**CÓDIGO EXEMPLO:**
```basic
10 FOR I = 1 TO 10
20 PRINT "Número: "; I
30 NEXT I
```
**DÚVIDAS ESPECÍFICAS:**
- O que significa "NEXT I"?
- Posso usar outras letras além de "I"?
- Como fazer contagem regressiva?
**CONTEXTO DO PROJETO:**
Estou criando um programa que lista números
**RESULTADO ESPERADO:**
Que apareça: Número: 1, Número: 2, etc.
**PARTES QUE NÃO ENTENDE:**
- Por que precisa do número 10 na linha 10?
- O que acontece se esquecer o NEXT?
```
---
## 🛠️ **MODELO PARA RESOLVER ERROS**
```
**ERRO NO BASIC:**
[Mensagem de erro ou comportamento estranho]
**MEU CÓDIGO:**
```basic
[Coloque seu código completo]
```
**LINHA COM PROBLEMA:**
[Linha específica onde ocorre o erro]
**COMPORTAMENTO ESPERADO:**
[O que deveria acontecer]
**COMPORTAMENTO ATUAL:**
[O que está acontecendo de errado]
**O QUE JÁ TENTEI:**
- [Tentativa 1 de correção]
- [Tentativa 2]
- [Tentativa 3]
**VERSÃO DO BASIC:**
[QBASIC, GW-BASIC, FreeBASIC, etc.]
```
---
## 📖 **MODELO PARA EXPLICAR COMANDOS**
```
**COMANDO:**
[Nome do comando - ex: PRINT, INPUT, GOTO]
**SYNTAX:**
[Como escrever corretamente]
**PARÂMETROS:**
- Parâmetro 1: [Função]
- Parâmetro 2: [Função]
**EXEMPLO SIMPLES:**
```basic
[Exemplo mínimo e funcional]
```
**EXEMPLO PRÁTICO:**
```basic
[Exemplo em contexto real]
```
**ERROS COMUNS:**
- [Erro frequente 1]
- [Erro frequente 2]
**DICA PARA INICIANTES:**
[Dica simples para não errar]
**EXERCÍCIO SUGERIDO:**
[Pequeno exercício para praticar]
```
---
## 🎨 **FORMATAÇÃO MARKDOWN PARA BASIC**
### **Como documentar seu código em .md:**
```markdown
# [NOME DO PROGRAMA]
## 🎯 OBJETIVO
[O que o programa faz]
## 📋 COMO USAR
1. [Passo 1]
2. [Passo 2]
## 🧩 CÓDIGO FONTE
```basic
[Seu código aqui]
```
## 🔍 EXPLICAÇÃO
- **Linha X**: [Explicação]
- **Linha Y**: [Explicação]
## 🚀 EXEMPLO DE EXECUÇÃO
```
[Saída do programa]
```
```
---
## 🏆 **MODELO DE PROJETO COMPLETO**
```
# PROJETO BASIC: [NOME]
## 📝 DESCRIÇÃO
[Descrição do que o programa faz]
## 🎨 FUNCIONALIDADES
- [ ] Funcionalidade 1
- [ ] Funcionalidade 2
- [ ] Funcionalidade 3
## 🧩 ESTRUTURA DO CÓDIGO
```basic
[Seu código organizado]
```
## 🎯 APRENDIZADOS
- [Conceito 1 aprendido]
- [Conceito 2 aprendido]
## ❓ DÚVIDAS PARA EVOLUIR
- [Dúvida para melhorar]
- [O que gostaria de fazer depois]
```
gerenerate several examples
for this keyword written in rhai do this only for basic audience:

402
docs/keywords/format.md Normal file
View file

@ -0,0 +1,402 @@
# 📚 **BASIC LEARNING EXAMPLES - FORMAT Function**
## 🎯 **EXAMPLE 1: BASIC CONCEPT OF FORMAT FUNCTION**
```
**BASIC CONCEPT:**
FORMAT FUNCTION - Value formatting
**LEVEL:**
☒ Beginner ☐ Intermediate ☐ Advanced
**LEARNING OBJECTIVE:**
Understand how to format numbers, dates, and text
**CODE EXAMPLE:**
```basic
10 NUMBER = 1234.56
20 TEXT$ = "John"
30 DATE$ = "2024-03-15 14:30:00"
40
50 PRINT FORMAT(NUMBER, "n") ' 1234.56
60 PRINT FORMAT(NUMBER, "F") ' 1234.56
70 PRINT FORMAT(TEXT$, "Hello @!") ' Hello John!
80 PRINT FORMAT(DATE$, "dd/MM/yyyy") ' 15/03/2024
```
**SPECIFIC QUESTIONS:**
- What's the difference between "n" and "F"?
- What does "@" mean in text?
- How to format dates in Brazilian format?
**PROJECT CONTEXT:**
I need to display data in a nicer way
**EXPECTED RESULT:**
Values formatted according to the pattern
**PARTS I DON'T UNDERSTAND:**
- When to use each type of formatting
- How it works internally
```
---
## 🛠️ **EXAMPLE 2: NUMERIC FORMATTING**
```
**BASIC CONCEPT:**
NUMBER FORMATTING
**LEVEL:**
☒ Beginner ☐ Intermediate ☐ Advanced
**LEARNING OBJECTIVE:**
Learn to format numbers as currency and with separators
**CODE EXAMPLE:**
```basic
10 VALUE = 1234567.89
20
30 PRINT "Standard: "; FORMAT(VALUE, "n") ' 1234567.89
40 PRINT "Decimal: "; FORMAT(VALUE, "F") ' 1234567.89
45 PRINT "Integer: "; FORMAT(VALUE, "f") ' 1234567
50 PRINT "Percentage: "; FORMAT(0.856, "0%") ' 86%
60
70 ' Formatting with locale
80 PRINT "Dollar: "; FORMAT(VALUE, "C2[en]") ' $1,234,567.89
90 PRINT "Real: "; FORMAT(VALUE, "C2[pt]") ' R$ 1.234.567,89
100 PRINT "Euro: "; FORMAT(VALUE, "C2[fr]") ' €1,234,567.89
```
**SPECIFIC QUESTIONS:**
- What does "C2[pt]" mean?
- How to change decimal places?
- Which locales are available?
**PROJECT CONTEXT:**
Multi-currency financial system
**EXPECTED RESULT:**
Numbers formatted according to regional standards
**PARTS I DON'T UNDERSTAND:**
- Syntax of complex patterns
- Differences between locales
```
---
## 📖 **EXAMPLE 3: EXPLAINING FORMAT COMMAND**
```
**COMMAND:**
FORMAT - Formats values
**SYNTAX:**
```basic
RESULT$ = FORMAT(VALUE, PATTERN$)
```
**PARAMETERS:**
- VALUE: Number, date or text to format
- PATTERN$: String with formatting pattern
**SIMPLE EXAMPLE:**
```basic
10 PRINT FORMAT(123.45, "n") ' 123.45
20 PRINT FORMAT("Mary", "Ms. @") ' Ms. Mary
```
**PRACTICAL EXAMPLE:**
```basic
10 INPUT "Name: "; NAME$
20 INPUT "Salary: "; SALARY
30 INPUT "Birth date: "; BIRTH_DATE$
40
50 PRINT "Record:"
60 PRINT "Name: "; FORMAT(NAME$, "!") ' UPPERCASE
70 PRINT "Salary: "; FORMAT(SALARY, "C2[en]") ' $1,234.56
80 PRINT "Birth: "; FORMAT(BIRTH_DATE$, "MM/dd/yyyy")
```
**COMMON ERRORS:**
- Using wrong pattern for data type
- Forgetting it returns string
- Formatting date without correct format
**BEGINNER TIP:**
Test each pattern separately before using in project
**SUGGESTED EXERCISE:**
Create a bank statement with professional formatting
```
---
## 🎨 **EXAMPLE 4: DATE AND TIME FORMATTING**
```
**BASIC CONCEPT:**
DATE AND TIME FORMATTING
**LEVEL:**
☐ Beginner ☒ Intermediate ☐ Advanced
**LEARNING OBJECTIVE:**
Learn all date formatting patterns
**CODE EXAMPLE:**
```basic
10 DATE$ = "2024-03-15 14:30:25"
20
30 PRINT "Brazilian: "; FORMAT(DATE$, "dd/MM/yyyy") ' 15/03/2024
40 PRINT "Complete: "; FORMAT(DATE$, "dd/MM/yyyy HH:mm") ' 15/03/2024 14:30
50 PRINT "US: "; FORMAT(DATE$, "MM/dd/yyyy") ' 03/15/2024
60 PRINT "International: "; FORMAT(DATE$, "yyyy-MM-dd") ' 2024-03-15
70
80 PRINT "24h Time: "; FORMAT(DATE$, "HH:mm:ss") ' 14:30:25
90 PRINT "12h Time: "; FORMAT(DATE$, "hh:mm:ss tt") ' 02:30:25 PM
100 PRINT "Long date: "; FORMAT(DATE$, "dd 'of' MMMM 'of' yyyy")
```
**SPECIFIC QUESTIONS:**
- What's the difference between HH and hh?
- How to show month name?
- What is "tt"?
**PROJECT CONTEXT:**
Scheduling system and reports
**EXPECTED RESULT:**
Dates formatted according to needs
**PARTS I DON'T UNDERSTAND:**
- All formatting codes
- How milliseconds work
```
---
## 🏆 **EXAMPLE 5: COMPLETE PROJECT - BANK STATEMENT**
```
# BASIC PROJECT: FORMATTED BANK STATEMENT
## 📝 DESCRIPTION
System that generates bank statement with professional formatting
## 🎨 FEATURES
- [x] Currency formatting
- [x] Date formatting
- [x] Value alignment
## 🧩 CODE STRUCTURE
```basic
10 ' Customer data
20 NAME$ = "Carlos Silva"
30 BALANCE = 12567.89
40
50 ' Transactions
60 DIM DATES$(3), DESCRIPTIONS$(3), AMOUNTS(3)
70 DATES$(1) = "2024-03-10 09:15:00" : DESCRIPTIONS$(1) = "Deposit" : AMOUNTS(1) = 2000
80 DATES$(2) = "2024-03-12 14:20:00" : DESCRIPTIONS$(2) = "Withdrawal" : AMOUNTS(2) = -500
90 DATES$(3) = "2024-03-14 11:30:00" : DESCRIPTIONS$(3) = "Transfer" : AMOUNTS(3) = -150.50
100
110 ' Header
120 PRINT FORMAT("BANK STATEMENT", "!")
130 PRINT "Customer: "; FORMAT(NAME$, "&")
140 PRINT "Date: "; FORMAT("2024-03-15 08:00:00", "dd/MM/yyyy HH:mm")
150 PRINT STRING$(40, "-")
160
170 ' Transactions
180 FOR I = 1 TO 3
190 FORMATTED_DATE$ = FORMAT(DATES$(I), "dd/MM HH:mm")
200 FORMATTED_AMOUNT$ = FORMAT(AMOUNTS(I), "C2[en]")
210
220 PRINT FORMATTED_DATE$; " - ";
230 PRINT DESCRIPTIONS$(I);
240 PRINT TAB(30); FORMATTED_AMOUNT$
250 NEXT I
260
270 ' Balance
280 PRINT STRING$(40, "-")
290 PRINT "Balance: "; TAB(30); FORMAT(BALANCE, "C2[en]")
```
## 🎯 LEARNINGS
- Currency formatting with locale
- Date formatting
- Composition of multiple formats
## ❓ QUESTIONS TO EVOLVE
- How to perfectly align columns?
- How to format negative numbers in red?
- How to add more locales?
```
---
## 🛠️ **EXAMPLE 6: TEXT FORMATTING**
```
**BASIC CONCEPT:**
STRING/TEXT FORMATTING
**LEVEL:**
☒ Beginner ☐ Intermediate ☐ Advanced
**LEARNING OBJECTIVE:**
Learn to use placeholders in text
**CODE EXAMPLE:**
```basic
10 NAME$ = "Mary"
20 CITY$ = "são paulo"
21 COUNTRY$ = "BRAZIL"
22 AGE = 25
30
40 PRINT FORMAT(NAME$, "Hello @!") ' Hello Mary!
50 PRINT FORMAT(NAME$, "Welcome, @") ' Welcome, Mary
60 PRINT FORMAT(CITY$, "City: !") ' City: SÃO PAULO
70 PRINT FORMAT(CITY$, "City: &") ' City: são paulo
80 PRINT FORMAT(COUNTRY$, "Country: &") ' Country: brazil
90
100 ' Combining with numbers
110 PRINT FORMAT(NAME$, "@ is ") + FORMAT(AGE, "n") + " years old"
120 ' Mary is 25 years old
```
**SPECIFIC QUESTIONS:**
- What's the difference between @, ! and &?
- Can I use multiple placeholders?
- How to escape special characters?
**PROJECT CONTEXT:**
Personalized report generation
**EXPECTED RESULT:**
Dynamic texts formatted automatically
**PARTS I DON'T UNDERSTAND:**
- Placeholder limitations
- How to mix different types
```
---
## 📚 **EXAMPLE 7: PRACTICAL EXERCISES**
```
# EXERCISES: PRACTICING WITH FORMAT
## 🎯 EXERCISE 1 - BASIC
Create a program that formats product prices.
**SOLUTION:**
```basic
10 DIM PRODUCTS$(3), PRICES(3)
20 PRODUCTS$(1) = "Laptop" : PRICES(1) = 2500.99
30 PRODUCTS$(2) = "Mouse" : PRICES(2) = 45.5
40 PRODUCTS$(3) = "Keyboard" : PRICES(3) = 120.75
50
60 FOR I = 1 TO 3
70 PRINT FORMAT(PRODUCTS$(I), "@: ") + FORMAT(PRICES(I), "C2[en]")
80 NEXT I
```
## 🎯 EXERCISE 2 - INTERMEDIATE
Make a program that shows dates in different formats.
**SOLUTION:**
```basic
10 DATE$ = "2024-12-25 20:00:00"
20
30 PRINT "Christmas: "; FORMAT(DATE$, "dd/MM/yyyy")
40 PRINT "US: "; FORMAT(DATE$, "MM/dd/yyyy")
50 PRINT "Dinner: "; FORMAT(DATE$, "HH'h'mm")
60 PRINT "Formatted: "; FORMAT(DATE$, "dd 'of' MMMM 'of' yyyy 'at' HH:mm")
```
## 🎯 EXERCISE 3 - ADVANCED
Create a school report card system with formatting.
**SOLUTION:**
```basic
10 NAME$ = "ana silva"
20 AVERAGE = 8.75
21 ATTENDANCE = 0.92
30 REPORT_DATE$ = "2024-03-15 10:00:00"
40
50 PRINT FORMAT("SCHOOL REPORT CARD", "!")
60 PRINT "Student: "; FORMAT(NAME$, "&")
70 PRINT "Date: "; FORMAT(REPORT_DATE$, "dd/MM/yyyy")
80 PRINT "Average: "; FORMAT(AVERAGE, "n")
90 PRINT "Attendance: "; FORMAT(ATTENDANCE, "0%")
```
## 💡 TIPS
- Always test patterns before using
- Use PRINT to see each formatting result
- Combine simple formats to create complex ones
```
---
## 🎨 **EXAMPLE 8: COMPLETE REFERENCE GUIDE**
```markdown
# FORMAT FUNCTION - COMPLETE GUIDE
## 🎯 OBJECTIVE
Format numbers, dates and text professionally
## 📋 SYNTAX
```basic
RESULT$ = FORMAT(VALUE, PATTERN$)
```
## 🔢 NUMERIC FORMATTING
| Pattern | Example | Result |
|---------|---------|--------|
| "n" | `FORMAT(1234.5, "n")` | 1234.50 |
| "F" | `FORMAT(1234.5, "F")` | 1234.50 |
| "f" | `FORMAT(1234.5, "f")` | 1234 |
| "0%" | `FORMAT(0.85, "0%")` | 85% |
| "C2[en]" | `FORMAT(1234.5, "C2[en]")` | $1,234.50 |
| "C2[pt]" | `FORMAT(1234.5, "C2[pt]")` | R$ 1.234,50 |
## 📅 DATE FORMATTING
| Code | Meaning | Example |
|------|---------|---------|
| yyyy | 4-digit year | 2024 |
| yy | 2-digit year | 24 |
| MM | 2-digit month | 03 |
| M | 1-2 digit month | 3 |
| dd | 2-digit day | 05 |
| d | 1-2 digit day | 5 |
| HH | 24h hour 2-digit | 14 |
| H | 24h hour 1-2 digit | 14 |
| hh | 12h hour 2-digit | 02 |
| h | 12h hour 1-2 digit | 2 |
| mm | 2-digit minute | 05 |
| m | 1-2 digit minute | 5 |
| ss | 2-digit second | 09 |
| s | 1-2 digit second | 9 |
| tt | AM/PM | PM |
| t | A/P | P |
## 📝 TEXT FORMATTING
| Placeholder | Function | Example |
|-------------|----------|---------|
| @ | Insert original text | `FORMAT("John", "@")` → John |
| ! | Text in UPPERCASE | `FORMAT("John", "!")` → JOHN |
| & | Text in lowercase | `FORMAT("John", "&")` → john |
## ⚠️ LIMITATIONS
- Dates must be in "YYYY-MM-DD HH:MM:SS" format
- Very large numbers may have issues
- Supported locales: en, pt, fr, de, es, it
```
These examples cover from basic to advanced applications of the FORMAT function! 🚀

348
docs/keywords/last.md Normal file
View file

@ -0,0 +1,348 @@
# 📚 **BASIC LEARNING EXAMPLES - LAST Function**
## 🎯 **EXAMPLE 1: BASIC CONCEPT OF LAST FUNCTION**
```
**BASIC CONCEPT:**
LAST FUNCTION - Extract last word
**LEVEL:**
☒ Beginner ☐ Intermediate ☐ Advanced
**LEARNING OBJECTIVE:**
Understand how the LAST function extracts the last word from text
**CODE EXAMPLE:**
```basic
10 PALAVRA$ = "The mouse chewed the clothes"
20 ULTIMA$ = LAST(PALAVRA$)
30 PRINT "Last word: "; ULTIMA$
```
**SPECIFIC QUESTIONS:**
- How does the function know where the last word ends?
- What happens if there are extra spaces?
- Can I use it with numeric variables?
**PROJECT CONTEXT:**
I'm creating a program that analyzes sentences
**EXPECTED RESULT:**
Should display: "Last word: clothes"
**PARTS I DON'T UNDERSTAND:**
- Why are parentheses needed?
- How does the function work internally?
```
---
## 🛠️ **EXAMPLE 2: SOLVING ERROR WITH LAST**
```
**BASIC ERROR:**
"Syntax error" when using LAST
**MY CODE:**
```basic
10 TEXTO$ = "Good day world"
20 RESULTADO$ = LAST TEXTO$
30 PRINT RESULTADO$
```
**PROBLEM LINE:**
Line 20
**EXPECTED BEHAVIOR:**
Show "world" on screen
**CURRENT BEHAVIOR:**
Syntax error
**WHAT I'VE TRIED:**
- Tried without parentheses
- Tried with different quotes
- Tried changing variable name
**BASIC VERSION:**
QBASIC with Rhai extension
**CORRECTED SOLUTION:**
```basic
10 TEXTO$ = "Good day world"
20 RESULTADO$ = LAST(TEXTO$)
30 PRINT RESULTADO$
```
```
---
## 📖 **EXAMPLE 3: EXPLAINING LAST COMMAND**
```
**COMMAND:**
LAST - Extracts last word
**SYNTAX:**
```basic
ULTIMA$ = LAST(TEXTO$)
```
**PARAMETERS:**
- TEXTO$: String from which to extract the last word
**SIMPLE EXAMPLE:**
```basic
10 FRASE$ = "The sun is bright"
20 ULTIMA$ = LAST(FRASE$)
30 PRINT ULTIMA$ ' Shows: bright
```
**PRACTICAL EXAMPLE:**
```basic
10 INPUT "Enter your full name: "; NOME$
20 SOBRENOME$ = LAST(NOME$)
30 PRINT "Hello Mr./Mrs. "; SOBRENOME$
```
**COMMON ERRORS:**
- Forgetting parentheses: `LAST TEXTO$`
- Using with numbers: `LAST(123)`
- Forgetting to assign to a variable
**BEGINNER TIP:**
Always use parentheses and ensure content is text
**SUGGESTED EXERCISE:**
Create a program that asks for a sentence and shows the first and last word
```
---
## 🎨 **EXAMPLE 4: COMPLETE PROJECT WITH LAST**
```
# BASIC PROJECT: SENTENCE ANALYZER
## 📝 DESCRIPTION
Program that analyzes sentences and extracts useful information
## 🎨 FEATURES
- [x] Extract last word
- [x] Count words
- [x] Show statistics
## 🧩 CODE STRUCTURE
```basic
10 PRINT "=== SENTENCE ANALYZER ==="
20 INPUT "Enter a sentence: "; FRASE$
30
40 ' Extract last word
50 ULTIMA$ = LAST(FRASE$)
60
70 ' Count words (simplified)
80 PALAVRAS = 1
90 FOR I = 1 TO LEN(FRASE$)
100 IF MID$(FRASE$, I, 1) = " " THEN PALAVRAS = PALAVRAS + 1
110 NEXT I
120
130 PRINT
140 PRINT "Last word: "; ULTIMA$
150 PRINT "Total words: "; PALAVRAS
160 PRINT "Original sentence: "; FRASE$
```
## 🎯 LEARNINGS
- How to use LAST function
- How to count words manually
- String manipulation in BASIC
## ❓ QUESTIONS TO EVOLVE
- How to extract the first word?
- How to handle punctuation?
- How to work with multiple sentences?
```
---
## 🏆 **EXAMPLE 5: SPECIAL CASES AND TESTS**
```
**BASIC CONCEPT:**
SPECIAL CASES OF LAST FUNCTION
**LEVEL:**
☐ Beginner ☒ Intermediate ☐ Advanced
**LEARNING OBJECTIVE:**
Understand how LAST behaves in special situations
**CODE EXAMPLES:**
```basic
' Case 1: Empty string
10 TEXTO$ = ""
20 PRINT LAST(TEXTO$) ' Result: ""
' Case 2: Single word only
30 TEXTO$ = "Sun"
40 PRINT LAST(TEXTO$) ' Result: "Sun"
' Case 3: Multiple spaces
50 TEXTO$ = "Hello World "
60 PRINT LAST(TEXTO$) ' Result: "World"
' Case 4: With tabs and newlines
70 TEXTO$ = "Line1" + CHR$(9) + "Line2" + CHR$(13)
80 PRINT LAST(TEXTO$) ' Result: "Line2"
```
**SPECIFIC QUESTIONS:**
- What happens with empty strings?
- How does it work with special characters?
- Is it case-sensitive?
**PROJECT CONTEXT:**
I need to robustly validate user inputs
**EXPECTED RESULT:**
Consistent behavior in all cases
**PARTS I DON'T UNDERSTAND:**
- How the function handles whitespace?
- What are CHR$(9) and CHR$(13)?
```
---
## 🛠️ **EXAMPLE 6: INTEGRATION WITH OTHER FUNCTIONS**
```
**BASIC CONCEPT:**
COMBINING LAST WITH OTHER FUNCTIONS
**LEVEL:**
☐ Beginner ☒ Intermediate ☐ Advanced
**LEARNING OBJECTIVE:**
Learn to use LAST in more complex expressions
**CODE EXAMPLE:**
```basic
10 ' Example 1: With concatenation
20 PARTE1$ = "Programming"
30 PARTE2$ = " in BASIC"
40 FRASE_COMPLETA$ = PARTE1$ + PARTE2$
50 PRINT LAST(FRASE_COMPLETA$) ' Result: "BASIC"
60 ' Example 2: With string functions
70 NOME_COMPLETO$ = "Maria Silva Santos"
80 SOBRENOME$ = LAST(NOME_COMPLETO$)
90 PRINT "Mr./Mrs. "; SOBRENOME$
100 ' Example 3: In conditional expressions
110 FRASE$ = "The sky is blue"
120 IF LAST(FRASE$) = "blue" THEN PRINT "The last word is blue!"
```
**SPECIFIC QUESTIONS:**
- Can I use LAST directly in IF?
- How to combine with LEFT$, RIGHT$, MID$?
- Is there a size limit for the string?
**PROJECT CONTEXT:**
Creating validations and text processing
**EXPECTED RESULT:**
Use LAST flexibly in different contexts
**PARTS I DON'T UNDERSTAND:**
- Expression evaluation order
- Performance with very large strings
```
---
## 📚 **EXAMPLE 7: PRACTICAL EXERCISES**
```
# EXERCISES: PRACTICING WITH LAST
## 🎯 EXERCISE 1 - BASIC
Create a program that asks for the user's full name and greets using only the last name.
**SOLUTION:**
```basic
10 INPUT "Enter your full name: "; NOME$
20 SOBRENOME$ = LAST(NOME$)
30 PRINT "Hello, Mr./Mrs. "; SOBRENOME$; "!"
```
## 🎯 EXERCISE 2 - INTERMEDIATE
Make a program that analyzes if the last word of a sentence is "end".
**SOLUTION:**
```basic
10 INPUT "Enter a sentence: "; FRASE$
20 IF LAST(FRASE$) = "end" THEN PRINT "Sentence ends with 'end'" ELSE PRINT "Sentence doesn't end with 'end'"
```
## 🎯 EXERCISE 3 - ADVANCED
Create a program that processes multiple sentences and shows statistics.
**SOLUTION:**
```basic
10 DIM FRASES$(3)
20 FRASES$(1) = "The sun shines"
30 FRASES$(2) = "The rain falls"
40 FRASES$(3) = "The wind blows"
50
60 FOR I = 1 TO 3
70 PRINT "Sentence "; I; ": "; FRASES$(I)
80 PRINT "Last word: "; LAST(FRASES$(I))
90 PRINT
100 NEXT I
```
## 💡 TIPS
- Always test with different inputs
- Use PRINT for debugging
- Start with simple examples
```
---
## 🎨 **EXAMPLE 8: MARKDOWN DOCUMENTATION**
```markdown
# LAST FUNCTION - COMPLETE GUIDE
## 🎯 OBJECTIVE
Extract the last word from a string
## 📋 SYNTAX
```basic
RESULTADO$ = LAST(TEXTO$)
```
## 🧩 PARAMETERS
- `TEXTO$`: Input string
## 🔍 BEHAVIOR
- Splits string by spaces
- Returns the last part
- Ignores extra spaces at beginning/end
## 🚀 EXAMPLES
```basic
10 PRINT LAST("hello world") ' Output: world
20 PRINT LAST("one word") ' Output: word
30 PRINT LAST(" spaces ") ' Output: spaces
```
## ⚠️ LIMITATIONS
- Doesn't work with numbers
- Requires parentheses
- Considers only spaces as separators
```
These examples cover from the basic concept to practical applications of the LAST function, always focusing on BASIC beginners! 🚀

View file

@ -0,0 +1,45 @@
PARAM customer_id AS STRING
PARAM time_period AS INTEGER DEFAULT 30
# Gather customer communications
emails = CALL "/storage/json", ".gbdata/communication_logs",
"to = '${customer_id}' OR from = '${customer_id}' AND timestamp > NOW() - DAYS(${time_period})"
support_tickets = CALL "/crm/tickets/list", {
"customer_id": customer_id,
"created_after": NOW() - DAYS(time_period)
}
meeting_notes = CALL "/crm/meetings/list", {
"customer_id": customer_id,
"date_after": NOW() - DAYS(time_period)
}
# Combine all text for analysis
all_text = ""
FOR EACH email IN emails
all_text = all_text + email.subject + " " + email.body + " "
NEXT
FOR EACH ticket IN support_tickets
all_text = all_text + ticket.description + " " + ticket.resolution + " "
NEXT
FOR EACH meeting IN meeting_notes
all_text = all_text + meeting.notes + " "
NEXT
# Analyze sentiment
sentiment = CALL "/ai/analyze/text", all_text, "sentiment"
# Generate insights
insights = CALL "/ai/analyze/text", all_text, "key_topics"
RETURN {
"customer_id": customer_id,
"time_period": time_period + " days",
"sentiment_score": sentiment.score,
"sentiment_label": sentiment.label,
"key_topics": insights.topics,
"recommendations": insights.recommendations
}

View file

@ -0,0 +1,83 @@
PARAM period AS STRING DEFAULT "month"
PARAM team_id AS STRING OPTIONAL
# Determine date range
IF period = "week" THEN
start_date = NOW() - DAYS(7)
ELSEIF period = "month" THEN
start_date = NOW() - DAYS(30)
ELSEIF period = "quarter" THEN
start_date = NOW() - DAYS(90)
ELSEIF period = "year" THEN
start_date = NOW() - DAYS(365)
ELSE
RETURN "Invalid period specified. Use 'week', 'month', 'quarter', or 'year'."
END IF
# Construct team filter
team_filter = ""
IF team_id IS NOT NULL THEN
team_filter = " AND team_id = '" + team_id + "'"
END IF
# Get sales data
opportunities = QUERY "SELECT * FROM Opportunities WHERE close_date >= '${start_date}'" + team_filter
closed_won = QUERY "SELECT * FROM Opportunities WHERE status = 'Won' AND close_date >= '${start_date}'" + team_filter
closed_lost = QUERY "SELECT * FROM Opportunities WHERE status = 'Lost' AND close_date >= '${start_date}'" + team_filter
# Calculate metrics
total_value = 0
FOR EACH opp IN closed_won
total_value = total_value + opp.value
NEXT
win_rate = LEN(closed_won) / (LEN(closed_won) + LEN(closed_lost)) * 100
# Get performance by rep
sales_reps = QUERY "SELECT owner_id, COUNT(*) as deals, SUM(value) as total_value FROM Opportunities WHERE status = 'Won' AND close_date >= '${start_date}'" + team_filter + " GROUP BY owner_id"
# Generate report
report = CALL "/analytics/reports/generate", {
"title": "Sales Performance Report - " + UPPER(period),
"date_range": "From " + FORMAT_DATE(start_date) + " to " + FORMAT_DATE(NOW()),
"metrics": {
"total_opportunities": LEN(opportunities),
"won_opportunities": LEN(closed_won),
"lost_opportunities": LEN(closed_lost),
"win_rate": win_rate,
"total_value": total_value
},
"rep_performance": sales_reps,
"charts": [
{
"type": "bar",
"title": "Won vs Lost Opportunities",
"data": {"Won": LEN(closed_won), "Lost": LEN(closed_lost)}
},
{
"type": "line",
"title": "Sales Trend",
"data": QUERY "SELECT DATE_FORMAT(close_date, '%Y-%m-%d') as date, COUNT(*) as count, SUM(value) as value FROM Opportunities WHERE status = 'Won' AND close_date >= '${start_date}'" + team_filter + " GROUP BY DATE_FORMAT(close_date, '%Y-%m-%d')"
}
]
}
# Save report
report_file = ".gbdrive/Reports/Sales/sales_performance_" + period + "_" + FORMAT_DATE(NOW(), "Ymd") + ".pdf"
CALL "/files/save", report_file, report
# Share report
IF team_id IS NOT NULL THEN
CALL "/files/shareFolder", report_file, team_id
# Notify team manager
manager = QUERY "SELECT manager_id FROM Teams WHERE id = '${team_id}'"
IF LEN(manager) > 0 THEN
CALL "/comm/email/send", manager[0],
"Sales Performance Report - " + UPPER(period),
"The latest sales performance report for your team is now available.",
[report_file]
END IF
END IF
RETURN "Sales performance report generated: " + report_file

View file

@ -0,0 +1,5 @@
PARAM text as STRING
DESCRIPTION "Called when someone wants to create a customer by pasting unstructured text, like and e-mail answer."
SAVE_FROM_UNSTRUCTURED "rob", text

View file

@ -0,0 +1,31 @@
let items = FIND "gb.rob", "ACTION=EMUL"
FOR EACH item IN items
PRINT item.company
let website = item.website ?? ""
if item.website == "" {
website = WEBSITE OF item.company
SET "gb.rob", "id="+ item.id, "website=" + website
PRINT website
}
let page = GET website
let prompt = "Build the same simulator, keep js, svg, css, assets paths, just change title, keep six cases of six messages each (change and return VALID JSON with a minium of 6 cases and 6-8 messages each), but for " + item.company + " using just *content about the company* " + item.llm_notes + " from its website, so it is possible to create a good and useful emulator in the same langue as the content: " + page
let alias = LLM "Return a single word for " + item.company + " like a token, no spaces, no special characters, no numbers, no uppercase letters."
CREATE_SITE alias, "gb-emulator-base", prompt
let to = item.emailcto
let subject = "Simulador " + alias + " ficou pronto"
let name = FIRST(item.contact)
let body = "Oi, " + name + ". Tudo bem? Para vocês terem uma ideia do ambiente conversacional em AI e algumas possibilidades, preparamos o " + alias + " especificamente para vocês!" + "\n\n Acesse o site: https://sites.pragmatismo.com.br/" + alias + "\n\n" + "Para acessar o simulador, clique no link acima ou copie e cole no seu navegador." + "\n\n" + "Para iniciar, escolha um dos casos conversacionais." + "\n\n" + "Atenciosamente,\nRodrigo Rodriguez\n\n"
let body = LLM "Melhora este e-mail: ------ " + body + " ----- mas mantem o link e inclui alguma referência ao histórico com o cliente: " + item.history
CREATE_DRAFT to, subject, body
SET "gb.rob", "id="+ item.id, "ACTION=CALL"
SET "gb.rob", "id="+ item.id, "emulator=true"
WAIT 3000
NEXT item

View file

@ -0,0 +1,12 @@
let items = FIND "gb.rob", "ACTION=EMUL_ASK"
FOR EACH item IN items
let to = item.emailcto
let subject = "Sobre o Simulador de AI enviado"
let name = FIRST(item.contact)
let body = GET "/EMUL-message.html"
CREATE_DRAFT to, subject, body
SET "gb.rob", "id="+ item.id, "ACTION=EMUL_ASKED"
NEXT item

View file

@ -0,0 +1,2 @@
Based on this ${history}, generate the response for
${to}, signed by ${user}

View file

@ -0,0 +1,25 @@
PARAM to AS STRING
PARAM template AS STRING
PARAM opportunity AS STRING
company = QUERY "SELECT Company FROM Opportunities WHERE Id = ${opportunity}"
doc = FILL template
' Generate email subject and content based on conversation history
subject = REWRITE "Based on this ${history}, generate a subject for a proposal email to ${company}"
contents = REWRITE "Based on this ${history}, and ${subject}, generate the e-mail body for ${to}, signed by ${user}, including key points from our proposal"
' Add proposal to CRM
CALL "/files/upload", ".gbdrive/Proposals/${company}-proposal.docx", doc
CALL "/files/permissions", ".gbdrive/Proposals/${company}-proposal.docx", "sales-team", "edit"
' Record activity in CRM
CALL "/crm/activities/create", opportunity, "email_sent", {
"subject": subject,
"description": "Proposal sent to " + company,
"date": NOW()
}
' Send the email
CALL "/comm/email/send", to, subject, contents, doc

View file

@ -0,0 +1,36 @@
PARAM attendees AS ARRAY
PARAM topic AS STRING
PARAM duration AS INTEGER
PARAM preferred_date AS DATE OPTIONAL
# Find available time for all attendees
IF preferred_date IS NULL THEN
available_slots = CALL "/calendar/availability/check", attendees, NOW(), NOW() + DAYS(7), duration
ELSE
available_slots = CALL "/calendar/availability/check", attendees, preferred_date, preferred_date + DAYS(1), duration
END IF
IF LEN(available_slots) = 0 THEN
RETURN "No available time slots found for all attendees."
END IF
# Create meeting description
description = REWRITE "Generate a concise meeting description for topic: ${topic}"
# Schedule the meeting
event_id = CALL "/calendar/events/create", {
"subject": topic,
"description": description,
"start_time": available_slots[0].start,
"end_time": available_slots[0].end,
"attendees": attendees,
"location": "Virtual Meeting"
}
# Notify attendees
FOR EACH person IN attendees
CALL "/comm/notifications/send", person, "Meeting Scheduled: " + topic,
"You have been invited to a meeting on " + FORMAT_DATE(available_slots[0].start)
NEXT
RETURN "Meeting scheduled for " + FORMAT_DATE(available_slots[0].start)

View file

@ -0,0 +1,5 @@
BEGIN SYSTEM PROMPT
No comments, no echo, condensed.
END SYSTEM PROMPT

View file

@ -0,0 +1,23 @@
PARAM from AS STRING
PARAM to AS STRING
PARAM subject AS STRING
PARAM body AS STRING
PARAM attachments AS ARRAY
# Track in communication history
CALL "/storage/save", ".gbdata/communication_logs", {
"from": from,
"to": to,
"subject": subject,
"timestamp": NOW(),
"type": "email"
}
# Send actual email
CALL "/comm/email/send", from, to, subject, body, attachments
# If WITH HISTORY flag present, include prior communication
IF WITH_HISTORY THEN
prevComms = CALL "/storage/json", ".gbdata/communication_logs", "to = '" + to + "' ORDER BY timestamp DESC LIMIT 5"
APPEND body WITH FORMAT_HISTORY(prevComms)
END IF

View file

@ -0,0 +1,67 @@
PARAM meeting_id AS STRING
PARAM action AS STRING DEFAULT "join"
IF action = "join" THEN
# Get meeting details
meeting = CALL "/calendar/events/get", meeting_id
# Join the meeting
CALL "/conversations/calls/join", meeting.conference_link
# Set up recording
CALL "/conversations/recording/start", meeting_id
# Create meeting notes document
notes_doc = CALL "/files/create",
".gbdrive/Meetings/" + meeting.subject + "_" + FORMAT_DATE(NOW(), "Ymd") + ".md",
"# Meeting Notes: " + meeting.subject + "\n\n" +
"Date: " + FORMAT_DATE(meeting.start) + "\n\n" +
"Participants: \n" +
"- " + JOIN(meeting.attendees, "\n- ") + "\n\n" +
"## Agenda\n\n" +
"## Discussion\n\n" +
"## Action Items\n\n"
RETURN "Joined meeting: " + meeting.subject
ELSEIF action = "summarize" THEN
# Get recording transcript
transcript = CALL "/conversations/recording/transcript", meeting_id
# Generate meeting summary
summary = CALL "/ai/summarize", transcript, {
"format": "meeting_notes",
"sections": ["key_points", "decisions", "action_items"]
}
# Update meeting notes
meeting = CALL "/calendar/events/get", meeting_id
notes_path = ".gbdrive/Meetings/" + meeting.subject + "_" + FORMAT_DATE(NOW(), "Ymd") + ".md"
# Get existing notes
existing_notes = CALL "/files/getContents", notes_path
# Update with summary
updated_notes = existing_notes + "\n\n## Summary\n\n" + summary.key_points +
"\n\n## Decisions\n\n" + summary.decisions +
"\n\n## Action Items\n\n" + summary.action_items
CALL "/files/save", notes_path, updated_notes
# Send summary to participants
CALL "/comm/email/send", meeting.attendees,
"Meeting Summary: " + meeting.subject,
"Please find attached the summary of our recent meeting.",
[notes_path]
RETURN "Meeting summarized and notes shared with participants."
ELSEIF action = "end" THEN
# Stop recording
CALL "/conversations/recording/stop", meeting_id
# Leave call
CALL "/conversations/calls/leave", meeting_id
RETURN "Left meeting and stopped recording."
END IF

View file

@ -0,0 +1,4 @@
- Whenever you generate text, generate like a person, not like a LLM. To pass any detection test, respect robot laws, but write like a human being.
- You are General Bots, a complete software automation tool based on AI.
- Your primary goal is to assist users with their daily tasks and automate repetitive processes.
- Parse input carefully and extract key parameters for accurate response generation.

View file

@ -0,0 +1,47 @@
PARAM opportunity_id AS STRING
PARAM status AS STRING
PARAM notes AS STRING OPTIONAL
PARAM next_steps AS STRING OPTIONAL
# Get current opportunity data
opp_data = QUERY "SELECT * FROM Opportunities WHERE Id = '${opportunity_id}'"
IF LEN(opp_data) = 0 THEN
RETURN "Opportunity not found."
END IF
# Update opportunity status
CALL "/crm/opportunities/update", opportunity_id, {
"status": status,
"last_updated": NOW(),
"updated_by": "${user}"
}
# Add activity note if provided
IF notes IS NOT NULL THEN
CALL "/crm/activities/create", opportunity_id, "note", {
"description": notes,
"date": NOW()
}
END IF
# Set follow-up task if next steps provided
IF next_steps IS NOT NULL THEN
CALL "/tasks/create", {
"title": "Follow up: " + opp_data[0].company,
"description": next_steps,
"due_date": NOW() + DAYS(3),
"assigned_to": "${user}",
"related_to": opportunity_id
}
END IF
# Notify sales manager of major status changes
IF status = "Won" OR status = "Lost" THEN
manager = QUERY "SELECT Manager FROM Users WHERE Username = '${user}'"
CALL "/comm/notifications/send", manager[0],
"Opportunity " + status + ": " + opp_data[0].company,
"The opportunity with " + opp_data[0].company + " has been marked as " + status + " by ${user}."
END IF
RETURN "Opportunity status updated to " + status

36
prompts/dev/fix.md Normal file
View file

@ -0,0 +1,36 @@
You are fixing Rust code in a Cargo project. The user is providing problematic code that needs to be corrected.
## Your Task
Fix ALL compiler errors and logical issues while maintaining the original intent. Return the COMPLETE corrected files as a SINGLE .sh script that can be executed from project root.
Use Cargo.toml as reference, do not change it.
Only return input files, all other files already exists.
If something, need to be added to a external file, inform it separated.
## Critical Requirements
1. **Return as SINGLE .sh script** - Output must be a complete shell script using `cat > file << 'EOF'` pattern
2. **Include ALL files** - Every corrected file must be included in the script
3. **Respect Cargo.toml** - Check dependencies, editions, and features to avoid compiler errors
4. **Type safety** - Ensure all types match and trait bounds are satisfied
5. **Ownership rules** - Fix borrowing, ownership, and lifetime issues
## Output Format Requirements
You MUST return exactly this example format:
```sh
#!/bin/bash
# Restore fixed Rust project
cat > src/<filenamehere>.rs << 'EOF'
use std::io;
// test
cat > src/<anotherfile>.rs << 'EOF'
// Fixed library code
pub fn add(a: i32, b: i32) -> i32 {
a + b
}
EOF
----

8
prompts/dev/general.md Normal file
View file

@ -0,0 +1,8 @@
* Preffer imports than using :: to call methods,
* Output a single `.sh` script using `cat` so it can be restored directly.
* No placeholders, only real, production-ready code.
* No comments, no explanations, no extra text.
* Follow KISS principles.
* Provide a complete, professional, working solution.
* If the script is too long, split into multiple parts, but always return the **entire code**.
* Output must be **only the code**, nothing else.

94
prompts/dev/model.md Normal file
View file

@ -0,0 +1,94 @@
Create a Rust data model for database storage with optimal size and performance characteristics. Follow these specifications:
**REQUIREMENTS:**
1. Use appropriate integer types (i32, i16, i8, etc.) based on expected value ranges
2. Use `Option<T>` for nullable fields to avoid memory overhead
3. Use `Vec<u8>` for binary data instead of strings when appropriate
4. Prefer enum representations as integers rather than strings
5. Use `chrono::DateTime<Utc>` for timestamps
6. Use `uuid::Uuid` for unique identifiers
7. Implement necessary traits: `Debug`, `Clone`, `Serialize`, `Deserialize`, `FromRow`
8. Include validation where appropriate
9. Consider database index strategy in field design
**CONTEXT:**
- Database: PostgreSQL/SQLx compatible
- Serialization: Serde for JSON
- ORM: SQLx for database operations
**OUTPUT FORMAT:**
Provide the complete Rust struct with:
- Struct definition with fields
- Enum definitions with integer representations
- Conversion implementations
- Basic validation if needed
**EXAMPLE REFERENCE:**
```rust
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sqlx::FromRow;
use uuid::Uuid;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Status {
Pending = 0,
Active = 1,
Inactive = 2,
}
impl Status {
pub fn from_i16(value: i16) -> Option<Self> {
match value {
0 => Some(Self::Pending),
1 => Some(Self::Active),
2 => Some(Self::Inactive),
_ => None,
}
}
}
#[derive(Debug, FromRow, Serialize, Deserialize)]
pub struct User {
pub id: Uuid,
pub status: i16, // Using i16 for enum storage
pub email: String,
pub age: Option<i16>, // Nullable small integer
pub metadata: Vec<u8>, // Binary data for flexibility
pub created_at: DateTime<Utc>,
}
```
Generate a similar model for: [YOUR DOMAIN HERE]
```
## Specialized Variants
### For High-Performance Applications
```
Add these additional requirements:
- Use `#[repr(u8)]` for enums to ensure minimal size
- Consider `Box<str>` instead of `String` for reduced heap overhead
- Use `arrayvec::ArrayString` for fixed-size short strings
- Implement `PartialEq` and `Eq` for hash-based operations
- Include `#[derive(Default)]` where appropriate
```
### For Embedded/Memory-Constrained Systems
```
Add these constraints:
- Prefer `i16` over `i32` where possible
- Use `u32` instead of `Uuid` if sequential IDs are acceptable
- Consider `bitflags` for multiple boolean flags in single byte
- Use `smol_str::SmolStr` for string optimization
- Avoid `Vec` in favor of arrays with capacity limits
```
### For Time-Series Data
```
Add time-series specific optimizations:
- Use `i64` for timestamps as nanoseconds since epoch
- Use `f32` instead of `f64` for measurements where precision allows
- Consider `ordered_float::OrderedFloat` for floating-point comparisons
- Use `#[serde(with = "chrono::serde::ts_seconds")]` for compact serialization

57
prompts/dev/service.md Normal file
View file

@ -0,0 +1,57 @@
Generate a Rust service module following these patterns:
Core Structure:
Use actix-web for HTTP endpoints (get, post, etc.)
Isolate shared resources (DB, clients, config) in AppState
Split logic into reusable helper functions
do not create main logic
Endpoints:
Follow REST conventions (e.g., POST /{resource}/create) use anotations in methods.
Use web::Path for route parameters, web::Json for payloads
Return consistent responses (e.g., HttpResponse::Ok().json(data))
Error Handling:
Wrap fallible operations in Result
Use map_err to convert errors to actix_web::Error
Provide clear error messages (e.g., ErrorInternalServerError)
Async Patterns:
Use async/await for I/O (DB, external APIs)
Leverage streams for pagination/large datasets
Isolate blocking ops in spawn_blocking if needed
Configuration:
Load settings (e.g., URLs, credentials) from AppConfig
Initialize clients (DB, SDKs) at startup (e.g., init_*() helpers)
Documentation:
Add brief doc comments for public functions
Note safety assumptions (e.g., #[post] invariants)
postgres sqlx
Omit domain-specific logic (e.g., file/email details), focusing on the scaffolding."
Key Features:
Generic (applies to any service: auth, payments, etc.)
KISS (avoids over-engineering)
Copy-paste friendly (clear patterns without verbosity)

View file

@ -0,0 +1,36 @@
PARAM query AS STRING
PARAM location AS STRING OPTIONAL
PARAM file_type AS STRING OPTIONAL
PARAM date_range AS ARRAY OPTIONAL
search_params = {
"query": query
}
IF location IS NOT NULL THEN
search_params["location"] = location
END IF
IF file_type IS NOT NULL THEN
search_params["file_type"] = file_type
END IF
IF date_range IS NOT NULL THEN
search_params["created_after"] = date_range[0]
search_params["created_before"] = date_range[1]
END IF
results = CALL "/files/search", search_params
IF LEN(results) = 0 THEN
RETURN "No documents found matching your criteria."
END IF
# Format results for display
formatted_results = "Found " + LEN(results) + " documents:\n\n"
FOR EACH doc IN results
formatted_results = formatted_results + "- " + doc.name + " (" + FORMAT_DATE(doc.modified) + ")\n"
formatted_results = formatted_results + " Location: " + doc.path + "\n"
NEXT
RETURN formatted_results

28
prompts/geral.bas Normal file
View file

@ -0,0 +1,28 @@
My Work
General
Sales Manager
Project Management
CRM
You should use files in .gbdrive/Proposals to search proposals.
You should use table RoB present in .gbdata/Proposals to get my proposals where User is ${user}
For sales pipelines, use table Opportunities in .gbdata/Sales.
Files
Use API endpoints under /files/* for document management.
CALL "/files/upload" uploads files to the system.
CALL "/files/search" finds relevant documents.
HR
People are in .gbdata/People
You should use files in .gbdrive/People to get resumes
Use HR_PORTAL to access employment records and policies.
ALM
My issues are in .gbservice/forgejo
CALL "/tasks/create" creates new project tasks.
CALL "/tasks/status/update" updates existing task status.
SETTINGS
API_KEYS stored in .gbsecure/keys
PREFERENCES in .gbdata/user-settings

View file

@ -0,0 +1,76 @@
PARAM name AS STRING
PARAM members AS ARRAY
PARAM description AS STRING OPTIONAL
PARAM team_type AS STRING DEFAULT "project"
# Create the group
group_id = CALL "/groups/create", {
"name": name,
"description": description,
"type": team_type
}
# Add members
FOR EACH member IN members
CALL "/groups/members/add", group_id, member
NEXT
# Create standard workspace structure
CALL "/files/createFolder", ".gbdrive/Workspaces/" + name + "/Documents"
CALL "/files/createFolder", ".gbdrive/Workspaces/" + name + "/Meetings"
CALL "/files/createFolder", ".gbdrive/Workspaces/" + name + "/Resources"
# Create default workspace components
IF team_type = "project" THEN
# Create project board
board_id = CALL "/tasks/create", {
"title": name + " Project Board",
"description": "Task board for " + name,
"type": "project_board"
}
# Create standard task lanes
lanes = ["Backlog", "To Do", "In Progress", "Review", "Done"]
FOR EACH lane IN lanes
CALL "/tasks/lanes/create", board_id, lane
NEXT
# Link group to project board
CALL "/groups/settings", group_id, "project_board", board_id
END IF
# Set up communication channel
channel_id = CALL "/conversations/create", {
"name": name,
"description": description,
"type": "group_chat"
}
# Add all members to channel
FOR EACH member IN members
CALL "/conversations/members/add", channel_id, member
NEXT
# Link group to channel
CALL "/groups/settings", group_id, "conversation", channel_id
# Create welcome message
welcome_msg = REWRITE "Create a welcome message for a new workspace called ${name} with purpose: ${description}"
CALL "/conversations/messages/send", channel_id, {
"text": welcome_msg,
"pinned": TRUE
}
# Notify members
FOR EACH member IN members
CALL "/comm/notifications/send", member,
"You've been added to " + name,
"You have been added to the new workspace: " + name
NEXT
RETURN {
"group_id": group_id,
"channel_id": channel_id,
"workspace_location": ".gbdrive/Workspaces/" + name
}

View file

@ -0,0 +1,58 @@
PARAM components AS ARRAY OPTIONAL
PARAM notify AS BOOLEAN DEFAULT TRUE
# Check all components by default
IF components IS NULL THEN
components = ["storage", "api", "database", "integrations", "security"]
END IF
status_report = {}
FOR EACH component IN components
status = CALL "/health/detailed", component
status_report[component] = status
NEXT
# Calculate overall health score
total_score = 0
FOR EACH component IN components
total_score = total_score + status_report[component].health_score
NEXT
overall_health = total_score / LEN(components)
status_report["overall_health"] = overall_health
status_report["timestamp"] = NOW()
# Save status report
CALL "/storage/save", ".gbdata/health/status_" + FORMAT_DATE(NOW(), "Ymd_His") + ".json", status_report
# Check for critical issues
critical_issues = []
FOR EACH component IN components
IF status_report[component].health_score < 0.7 THEN
APPEND critical_issues, {
"component": component,
"score": status_report[component].health_score,
"issues": status_report[component].issues
}
END IF
NEXT
# Notify if critical issues found
IF LEN(critical_issues) > 0 AND notify THEN
issue_summary = "Critical system health issues detected:\n\n"
FOR EACH issue IN critical_issues
issue_summary = issue_summary + "- " + issue.component + " (Score: " + issue.score + ")\n"
FOR EACH detail IN issue.issues
issue_summary = issue_summary + " * " + detail + "\n"
NEXT
issue_summary = issue_summary + "\n"
NEXT
CALL "/comm/notifications/send", "admin-team",
"ALERT: System Health Issues Detected",
issue_summary,
"high"
END IF
RETURN status_report

View file

@ -0,0 +1,5 @@
PARAM idea as STRING
DESCRIPTION "Called when someone have an idea and wants to keep it."
SAVE "marketing_ideas", idea, username

View file

@ -0,0 +1,51 @@
SET SCHEDULE every 1 hour
# Check emails
unread_emails = CALL "/comm/email/list", {
"status": "unread",
"folder": "inbox",
"max_age": "24h"
}
# Check calendar
upcoming_events = CALL "/calendar/events/list", {
"start": NOW(),
"end": NOW() + HOURS(24)
}
# Check tasks
due_tasks = CALL "/tasks/list", {
"status": "open",
"due_before": NOW() + HOURS(24)
}
# Check important documents
new_documents = CALL "/files/recent", {
"folders": [".gbdrive/papers", ".gbdrive/Proposals"],
"since": NOW() - HOURS(24)
}
# Prepare notification message
notification = "Daily Update:\n"
IF LEN(unread_emails) > 0 THEN
notification = notification + "- You have " + LEN(unread_emails) + " unread emails\n"
END IF
IF LEN(upcoming_events) > 0 THEN
notification = notification + "- You have " + LEN(upcoming_events) + " upcoming meetings in the next 24 hours\n"
notification = notification + " Next: " + upcoming_events[0].subject + " at " + FORMAT_TIME(upcoming_events[0].start) + "\n"
END IF
IF LEN(due_tasks) > 0 THEN
notification = notification + "- You have " + LEN(due_tasks) + " tasks due in the next 24 hours\n"
END IF
IF LEN(new_documents) > 0 THEN
notification = notification + "- " + LEN(new_documents) + " new documents have been added to your monitored folders\n"
END IF
# Send notification
IF LEN(notification) > "Daily Update:\n" THEN
CALL "/comm/notifications/send", "${user}", "Daily Status Update", notification
END IF

View file

@ -0,0 +1,63 @@
PARAM resource_path AS STRING
PARAM review_period AS INTEGER DEFAULT 90
# Get current permissions
current_perms = CALL "/files/permissions", resource_path
# Get access logs
access_logs = CALL "/security/audit/logs", {
"resource": resource_path,
"action": "access",
"timeframe": NOW() - DAYS(review_period)
}
# Identify inactive users with access
inactive_users = []
FOR EACH user IN current_perms
# Check if user has accessed in review period
user_logs = FILTER access_logs WHERE user_id = user.id
IF LEN(user_logs) = 0 THEN
APPEND inactive_users, {
"user_id": user.id,
"access_level": user.access_level,
"last_access": CALL "/security/audit/logs", {
"resource": resource_path,
"action": "access",
"user_id": user.id,
"limit": 1
}
}
END IF
NEXT
# Generate review report
review_report = {
"resource": resource_path,
"review_date": NOW(),
"total_users_with_access": LEN(current_perms),
"inactive_users": inactive_users,
"recommendations": []
}
# Add recommendations
IF LEN(inactive_users) > 0 THEN
review_report.recommendations.APPEND("Remove access for " + LEN(inactive_users) + " inactive users")
END IF
excessive_admins = FILTER current_perms WHERE access_level = "admin"
IF LEN(excessive_admins) > 3 THEN
review_report.recommendations.APPEND("Reduce number of admin users (currently " + LEN(excessive_admins) + ")")
END IF
# Save review report
report_file = ".gbdata/security/access_reviews/" + REPLACE(resource_path, "/", "_") + "_" + FORMAT_DATE(NOW(), "Ymd") + ".json"
CALL "/files/save", report_file, review_report
# Notify security team
CALL "/comm/email/send", "security-team",
"Access Review Report: " + resource_path,
"A new access review report has been generated for " + resource_path + ".",
[report_file]
RETURN review_report

View file

@ -0,0 +1,59 @@
PARAM sender AS STRING
PARAM subject AS STRING
PARAM body AS STRING
# Get history for this sender
history = CALL "/storage/json", ".gbdata/communication_logs", "from = '${sender}' OR to = '${sender}' ORDER BY timestamp DESC LIMIT 10"
# Check if this is a known customer
customer = CALL "/crm/customers/get", sender
# Analyze email content
urgency = CALL "/ai/analyze/text", body, "urgency"
intent = CALL "/ai/analyze/text", body, "intent"
sentiment = CALL "/ai/analyze/text", body, "sentiment"
# Determine if auto-reply needed
should_auto_reply = FALSE
IF urgency.score > 0.8 THEN
should_auto_reply = TRUE
END IF
IF customer IS NOT NULL AND customer.tier = "premium" THEN
should_auto_reply = TRUE
END IF
IF intent.category = "support_request" THEN
# Create support ticket
ticket_id = CALL "/crm/tickets/create", {
"customer": sender,
"subject": subject,
"description": body,
"priority": urgency.score > 0.7 ? "High" : "Normal"
}
should_auto_reply = TRUE
# Notify support team
CALL "/comm/notifications/send", "support-team",
"New Support Ticket: " + subject,
"A new support ticket has been created from an email by " + sender
END IF
IF should_auto_reply THEN
reply_template = intent.category = "support_request" ? "support_acknowledgment" : "general_acknowledgment"
reply_text = REWRITE "Based on this email: ${body}
And this sender history: ${history}
Generate a personalized auto-reply message using the ${reply_template} style.
Include appropriate next steps and expected timeframe for response."
CALL "/comm/email/send", "${user}", sender, "Re: " + subject, reply_text
CALL "/storage/save", ".gbdata/auto_replies", {
"to": sender,
"subject": "Re: " + subject,
"timestamp": NOW()
}
END IF

View file

@ -0,0 +1,16 @@
do not comment or echo anything
keep lines condensed
always call it <kind> not own name. Eg.: proxy instead of Caddy. alm instead of forgejo.
use KISS priciple
use local /opt/gbo/{logs, data, conf} exposed as
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/<kind>"
HOST_DATA="$HOST_BASE/data"
HOST_CONF="$HOST_BASE/conf"
HOST_LOGS="$HOST_BASE/logs"
instead of using app original paths.
and use /opt/gbo/bin to put local binaries of installations
during sh exection, never touch files in /opt/gbo/{logs, data, conf}
use wget
use gbuser as system user

View file

@ -0,0 +1,173 @@
#!/bin/bash
# Configuration
ALM_CI_NAME="CI"
ALM_CI_LABELS="gbo"
FORGEJO_RUNNER_VERSION="v6.3.1"
FORGEJO_RUNNER_BINARY="forgejo-runner-6.3.1-linux-amd64"
CONTAINER_IMAGE="images:debian/12"
# Paths
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/alm-ci"
HOST_DATA="$HOST_BASE/data"
HOST_CONF="$HOST_BASE/conf"
HOST_LOGS="$HOST_BASE/logs"
BIN_PATH="/opt/gbo/bin"
CONTAINER_NAME="${PARAM_TENANT}-alm-ci"
# Create host directories
mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS" || exit 1
chmod -R 750 "$HOST_BASE" || exit 1
# Launch container
if ! lxc launch "$CONTAINER_IMAGE" "$CONTAINER_NAME" -c security.privileged=true; then
echo "Failed to launch container"
exit 1
fi
# Wait for container to be ready
for i in {1..10}; do
if lxc exec "$CONTAINER_NAME" -- bash -c "true"; then
break
fi
sleep 3
done
# Container setup
lxc exec "$CONTAINER_NAME" -- bash -c "
set -e
useradd --system --no-create-home --shell /bin/false $CONTAINER_NAME
# Update and install dependencies
apt-get update && apt-get install -y wget git || { echo 'Package installation failed'; exit 1; }
sudo apt update
sudo apt install -y curl gnupg ca-certificates git
apt-get update && apt-get install -y \
build-essential cmake git pkg-config libjpeg-dev libtiff-dev \
libpng-dev libavcodec-dev libavformat-dev libswscale-dev \
libv4l-dev libatlas-base-dev gfortran python3-dev cpulimit \
expect libxtst-dev libpng-dev
sudo apt-get install -y libcairo2-dev libpango1.0-dev libgif-dev librsvg2-dev
sudo apt install xvfb -y
sudo apt install -y \
libnss3 \
libatk1.0-0 \
libatk-bridge2.0-0 \
libcups2 \
libdrm2 \
libxkbcommon0 \
libxcomposite1 \
libxdamage1 \
libxfixes3 \
libxrandr2 \
libgbm1 \
libasound2 \
libpangocairo-1.0-0
export OPENCV4NODEJS_DISABLE_AUTOBUILD=1
export OPENCV_LIB_DIR=/usr/lib/x86_64-linux-gnu
sudo apt install -y curl gnupg ca-certificates git
# Install Node.js 22.x
curl -fsSL https://deb.nodesource.com/setup_22.x | sudo bash -
sudo apt install -y nodejs
npm install -g pnpm@latest
# Install rust 1.85
apt-get install -y libssl-dev pkg-config
sudo apt-get install -y \
apt-transport-https \
software-properties-common \
gnupg \
cmake \
build-essential \
clang \
libclang-dev \
libz-dev \
libssl-dev \
pkg-config
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- --default-toolchain 1.85.1 -y
source ~/.cargo/env
rustc --version
# Install Xvfb and other dependencies
sudo apt install -y xvfb libgbm-dev lxd-client
# Create directories
mkdir -p \"$BIN_PATH\" /opt/gbo/data /opt/gbo/conf /opt/gbo/logs || { echo 'Directory creation failed'; exit 1; }
# Download and install forgejo-runner
wget -O \"$BIN_PATH/forgejo-runner\" \"https://code.forgejo.org/forgejo/runner/releases/download/$FORGEJO_RUNNER_VERSION/$FORGEJO_RUNNER_BINARY\" || { echo 'Download failed'; exit 1; }
chmod +x \"$BIN_PATH/forgejo-runner\" || { echo 'chmod failed'; exit 1; }
cd \"$BIN_PATH\"
# Register runner
\"$BIN_PATH/forgejo-runner\" register --no-interactive \\
--name \"$ALM_CI_NAME\" \\
--instance \"$PARAM_ALM_CI_INSTANCE\" \\
--token \"$PARAM_ALM_CI_TOKEN\" \\
--labels \"$ALM_CI_LABELS\" || { echo 'Runner registration failed'; exit 1; }
chown -R $CONTAINER_NAME:$CONTAINER_NAME /opt/gbo/bin /opt/gbo/data /opt/gbo/conf /opt/gbo/logs
"
# Set permissions
echo "[CONTAINER] Setting permissions..."
EMAIL_UID=$(lxc exec "$PARAM_TENANT"-alm-ci -- id -u $CONTAINER_NAME)
EMAIL_GID=$(lxc exec "$PARAM_TENANT"-alm-ci -- id -g $CONTAINER_NAME)
HOST_EMAIL_UID=$((100000 + EMAIL_UID))
HOST_EMAIL_GID=$((100000 + EMAIL_GID))
sudo chown -R "$HOST_EMAIL_UID:$HOST_EMAIL_GID" "$HOST_BASE"
# Add directory mappings
lxc config device add "$CONTAINER_NAME" almdata disk source="$HOST_DATA" path=/opt/gbo/data || exit 1
lxc config device add "$CONTAINER_NAME" almconf disk source="$HOST_CONF" path=/opt/gbo/conf || exit 1
lxc config device add "$CONTAINER_NAME" almlogs disk source="$HOST_LOGS" path=/opt/gbo/logs || exit 1
lxc exec "$CONTAINER_NAME" -- bash -c "
# Create systemd service
cat > /etc/systemd/system/alm-ci.service <<EOF
[Unit]
Description=ALM CI Runner
After=network.target
[Service]
Type=simple
User=$CONTAINER_NAME
Group=$CONTAINER_NAME
ExecStart=$BIN_PATH/forgejo-runner daemon
Restart=always
RestartSec=5
StandardOutput=append:/opt/gbo/logs/output.log
StandardError=append:/opt/gbo/logs/error.log
[Install]
WantedBy=multi-user.target
EOF
# Enable and start service
systemctl daemon-reload || { echo 'daemon-reload failed'; exit 1; }
systemctl enable alm-ci || { echo 'enable service failed'; exit 1; }
systemctl start alm-ci || { echo 'start service failed'; exit 1; }
"
LXC_BOT="/opt/gbo/tenants/$PARAM_TENANT/bot/data"
LXC_PROXY="/opt/gbo/tenants/$PARAM_TENANT/proxy/data/websites"
LXC_SYSTEM="/opt/gbo/tenants/$PARAM_TENANT/system/bin"
lxc config device add "$CONTAINER_NAME" almbot disk source="$LXC_BOT" path=/opt/gbo/bin/bot
lxc config device add "$CONTAINER_NAME" almproxy disk source="$LXC_PROXY" path=/opt/gbo/bin/proxy
lxc config device add "$CONTAINER_NAME" almsystem disk source="$LXC_SYSTEM" path=/opt/gbo/bin/syst em || exit 1

65
scripts/containers/alm.sh Normal file
View file

@ -0,0 +1,65 @@
#!/bin/bash
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/alm"
HOST_DATA="$HOST_BASE/data"
HOST_CONF="$HOST_BASE/conf"
HOST_LOGS="$HOST_BASE/logs"
ALM_PATH=/opt/gbo/bin
mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS"
chmod -R 750 "$HOST_BASE"
lxc launch images:debian/12 "$PARAM_TENANT"-alm -c security.privileged=true
sleep 15
lxc exec "$PARAM_TENANT"-alm -- bash -c "
apt-get update && apt-get install -y git git-lfs wget
mkdir -p /opt/gbo/bin
wget https://codeberg.org/forgejo/forgejo/releases/download/v10.0.2/forgejo-10.0.2-linux-amd64 -O $ALM_PATH/forgejo
chmod +x $ALM_PATH/forgejo
useradd --system --no-create-home --shell /bin/false alm
"
FORGEJO_UID=$(lxc exec "$PARAM_TENANT"-alm -- id -u alm)
FORGEJO_GID=$(lxc exec "$PARAM_TENANT"-alm -- id -g alm)
HOST_FORGEJO_UID=$((100000 + FORGEJO_UID))
HOST_FORGEJO_GID=$((100000 + FORGEJO_GID))
chown -R "$HOST_FORGEJO_UID:$HOST_FORGEJO_GID" "$HOST_BASE"
lxc config device add "$PARAM_TENANT"-alm almdata disk source="$HOST_DATA" path=/opt/gbo/data
lxc config device add "$PARAM_TENANT"-alm almconf disk source="$HOST_CONF" path=/opt/gbo/conf
lxc config device add "$PARAM_TENANT"-alm almlogs disk source="$HOST_LOGS" path=/opt/gbo/logs
lxc exec "$PARAM_TENANT"-alm -- bash -c "
mkdir -p /opt/gbo/data /opt/gbo/conf /opt/gbo/logs
chown -R alm:alm /opt/gbo
cat > /etc/systemd/system/alm.service <<EOF
[Unit]
Description=alm
After=network.target
[Service]
User=alm
Group=alm
WorkingDirectory=/opt/gbo/data
ExecStart=/opt/gbo/bin/forgejo web --config /opt/gbo/conf/app.ini
Restart=always
Environment=USER=alm HOME=/opt/gbo/data
StandardOutput=append:/opt/gbo/logs/stdout.log
StandardError=append:/opt/gbo/logs/stderr.log
[Install]
WantedBy=multi-user.target
EOF
systemctl daemon-reload
systemctl enable alm
systemctl start alm
"
lxc config device remove "$PARAM_TENANT"-alm alm-proxy 2>/dev/null || true
lxc config device add "$PARAM_TENANT"-alm alm-proxy proxy \
listen=tcp:0.0.0.0:"$PARAM_ALM_PORT" \
connect=tcp:127.0.0.1:"$PARAM_ALM_PORT"

113
scripts/containers/bot.sh Normal file
View file

@ -0,0 +1,113 @@
#!/bin/bash
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/bot"
HOST_DATA="$HOST_BASE/data"
HOST_CONF="$HOST_BASE/conf"
HOST_LOGS="$HOST_BASE/logs"
mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS"
chmod -R 750 "$HOST_BASE"
lxc launch images:debian/12 "$PARAM_TENANT"-bot -c security.privileged=true
sleep 15
lxc exec "$PARAM_TENANT"-bot -- bash -c "
apt-get update && apt-get install -y \
build-essential cmake git pkg-config libjpeg-dev libtiff-dev \
libpng-dev libavcodec-dev libavformat-dev libswscale-dev \
libv4l-dev libatlas-base-dev gfortran python3-dev cpulimit \
expect libxtst-dev libpng-dev
sudo apt-get install -y libcairo2-dev libpango1.0-dev libgif-dev librsvg2-dev
sudo apt install xvfb -y
sudo apt install -y \
libnss3 \
libatk1.0-0 \
libatk-bridge2.0-0 \
libcups2 \
libdrm2 \
libxkbcommon0 \
libxcomposite1 \
libxdamage1 \
libxfixes3 \
libxrandr2 \
libgbm1 \
libasound2 \
libpangocairo-1.0-0
export OPENCV4NODEJS_DISABLE_AUTOBUILD=1
export OPENCV_LIB_DIR=/usr/lib/x86_64-linux-gnu
useradd --system --no-create-home --shell /bin/false gbuser
"
BOT_UID=$(lxc exec "$PARAM_TENANT"-bot -- id -u gbuser)
BOT_GID=$(lxc exec "$PARAM_TENANT"-bot -- id -g gbuser)
HOST_BOT_UID=$((100000 + BOT_UID))
HOST_BOT_GID=$((100000 + BOT_GID))
chown -R "$HOST_BOT_UID:$HOST_BOT_GID" "$HOST_BASE"
lxc config device add "$PARAM_TENANT"-bot botdata disk source="$HOST_DATA" path=/opt/gbo/data
lxc config device add "$PARAM_TENANT"-bot botconf disk source="$HOST_CONF" path=/opt/gbo/conf
lxc config device add "$PARAM_TENANT"-bot botlogs disk source="$HOST_LOGS" path=/opt/gbo/logs
lxc exec "$PARAM_TENANT"-bot -- bash -c '
mkdir -p /opt/gbo/data /opt/gbo/conf /opt/gbo/logs
sudo apt update
sudo apt install -y curl gnupg ca-certificates git
curl -fsSL https://deb.nodesource.com/setup_22.x | sudo bash -
sudo apt install -y nodejs
sudo apt install -y xvfb libgbm-dev
wget https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_128.0.6613.119-1_amd64.deb
sudo apt install ./google-chrome-stable_128.0.6613.119-1_amd64.deb
cd /opt/gbo/data
git clone https://alm.pragmatismo.com.br/generalbots/botserver.git
cd botserver
npm install
./node_modules/.bin/tsc
cd packages/default.gbui
npm install
npm run build
chown -R gbuser:gbuser /opt/gbo
# Create systemd service
sudo tee /etc/systemd/system/bot.service > /dev/null <<EOF
[Unit]
Description=Bot Server
After=network.target
[Service]
User=gbuser
Group=gbuser
Environment="DISPLAY=:99"
ExecStartPre=/bin/bash -c "/usr/bin/Xvfb :99 -screen 0 1024x768x24 -ac +extension GLX +render -noreset &"
WorkingDirectory=/opt/gbo/data/botserver
ExecStart=/usr/bin/node /opt/gbo/data/botserver/boot.mjs
Restart=always
RestartSec=5
StandardOutput=append:/opt/gbo/logs/stdout.log
StandardError=append:/opt/gbo/logs/stderr.log
[Install]
WantedBy=multi-user.target
EOF
# Reload and start service
sudo systemctl daemon-reload
sudo systemctl enable bot.service
sudo systemctl start bot.service
'
lxc config device remove "$PARAM_TENANT"-bot bot-proxy 2>/dev/null || true
lxc config device add "$PARAM_TENANT"-bot bot-proxy proxy \
listen=tcp:0.0.0.0:"$PARAM_BOT_PORT" \
connect=tcp:127.0.0.1:"$PARAM_BOT_PORT"

View file

@ -0,0 +1,7 @@
curl -fsSL https://packages.redis.io/gpg | sudo gpg --dearmor -o /usr/share/keyrings/valkey.gpg
echo "deb [signed-by=/usr/share/keyrings/valkey.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/valkey.list
sudo apt update
sudo apt install valkey-server
sudo systemctl enable valkey-server
sudo systemctl start valkey-server

View file

@ -0,0 +1,47 @@
#!/bin/bash
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/desktop"
HOST_DATA="$HOST_BASE/data"
HOST_CONF="$HOST_BASE/conf"
HOST_LOGS="$HOST_BASE/logs"
mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS"
chmod -R 750 "$HOST_BASE"
lxc launch images:debian/12 "$PARAM_TENANT"-desktop -c security.privileged=true
sleep 15
lxc exec "$PARAM_TENANT"-desktop -- bash -c "
apt-get update
apt-get install -y xvfb xrdp xfce4 xfce4-goodies
cat > /etc/xrdp/startwm.sh <<EOF
#!/bin/sh
if [ -r /etc/default/locale ]; then
. /etc/default/locale
export LANG LANGUAGE
fi
startxfce4
EOF
chmod +x /etc/xrdp/startwm.sh
systemctl restart xrdp
systemctl enable xrdp
# For the root user (since you're logging in as root)
echo "exec startxfce4" > /root/.xsession
chmod +x /root/.xsession
apt install -y curl apt-transport-https gnupg
curl -s https://brave-browser-apt-release.s3.brave.com/brave-core.asc | gpg --dearmor > /usr/share/keyrings/brave-browser-archive-keyring.gpg
echo "deb [arch=amd64 signed-by=/usr/share/keyrings/brave-browser-archive-keyring.gpg] https://brave-browser-apt-release.s3.brave.com/ stable main" > /etc/apt/sources.list.d/brave-browser-release.list
apt update && apt install -y brave-browser
sudo apt install gnome-tweaks
/etc/environment
GTK_IM_MODULE=cedilla
QT_IM_MODULE=cedilla
"
port=3389
lxc config device remove "$PARAM_TENANT"-desktop "port-$port" 2>/dev/null || true
lxc config device add "$PARAM_TENANT"-desktop "port-$port" proxy listen=tcp:0.0.0.0:$port connect=tcp:127.0.0.1:$port

View file

@ -0,0 +1,67 @@
#!/bin/bash
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/directory"
HOST_DATA="$HOST_BASE/data"
HOST_CONF="$HOST_BASE/conf"
HOST_LOGS="$HOST_BASE/logs"
sudo mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS"
sudo chmod -R 750 "$HOST_BASE"
lxc launch images:debian/12 "$PARAM_TENANT"-directory -c security.privileged=true
sleep 15
lxc exec "$PARAM_TENANT"-directory -- bash -c "
apt-get update && apt-get install -y wget libcap2-bin
wget -c https://github.com/zitadel/zitadel/releases/download/v2.71.2/zitadel-linux-amd64.tar.gz -O - | tar -xz -C /tmp
mkdir -p /opt/gbo/bin
mv /tmp/zitadel-linux-amd64/zitadel /opt/gbo/bin/zitadel
chmod +x /opt/gbo/bin/zitadel
sudo setcap 'cap_net_bind_service=+ep' /opt/gbo/bin/zitadel
useradd --system --no-create-home --shell /bin/false gbuser
mkdir -p /opt/gbo/data /opt/gbo/conf /opt/gbo/logs
chown -R gbuser:gbuser /opt/gbo/data /opt/gbo/conf /opt/gbo/logs /opt/gbo/bin
"
GBUSER_UID=$(lxc exec "$PARAM_TENANT"-directory -- id -u gbuser)
GBUSER_GID=$(lxc exec "$PARAM_TENANT"-directory -- id -g gbuser)
HOST_GBUSER_UID=$((100000 + GBUSER_UID))
HOST_GBUSER_GID=$((100000 + GBUSER_GID))
sudo chown -R "$HOST_GBUSER_UID:$HOST_GBUSER_GID" "$HOST_BASE"
lxc config device add "$PARAM_TENANT"-directory directorydata disk source="$HOST_DATA" path=/opt/gbo/data
lxc config device add "$PARAM_TENANT"-directory directoryconf disk source="$HOST_CONF" path=/opt/gbo/conf
lxc config device add "$PARAM_TENANT"-directory directorylogs disk source="$HOST_LOGS" path=/opt/gbo/logs
lxc exec "$PARAM_TENANT"-directory -- bash -c "
chown -R gbuser:gbuser /opt/gbo/data /opt/gbo/conf /opt/gbo/logs /opt/gbo/bin
cat > /etc/systemd/system/directory.service <<EOF
[Unit]
Description=Directory Service
After=network.target
[Service]
Type=simple
User=gbuser
Group=gbuser
ExecStart=/opt/gbo/bin/zitadel start --masterkey $PARAM_DIRECTORY_MASTERKEY --config /opt/gbo/conf/config.yaml --tlsMode external
WorkingDirectory=/opt/gbo/bin
StandardOutput=append:/opt/gbo/logs/output.log
StandardError=append:/opt/gbo/logs/error.log
Restart=always
[Install]
WantedBy=multi-user.target
EOF
systemctl daemon-reload
systemctl enable directory
systemctl start directory
"
lxc config device remove "$PARAM_TENANT"-directory directory-proxy 2>/dev/null || true
lxc config device add "$PARAM_TENANT"-directory directory-proxy proxy \
listen=tcp:0.0.0.0:"$PARAM_DIRECTORY_PORT" \
connect=tcp:127.0.0.1:"$PARAM_DIRECTORY_PORT"

88
scripts/containers/dns.sh Normal file
View file

@ -0,0 +1,88 @@
#!/bin/bash
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/dns"
HOST_CONF="$HOST_BASE/conf"
HOST_DATA="$HOST_BASE/data"
HOST_LOGS="$HOST_BASE/logs"
mkdir -p "$HOST_BASE" "$HOST_CONF" "$HOST_DATA" "$HOST_LOGS"
chmod -R 750 "$HOST_BASE"
lxc network set lxdbr0 user.dns.nameservers $PARAM_DNS_INTERNAL_IP,8.8.8.8,1.1.1.1
lxc network set lxdbr0 dns.mode managed
# Clear existing rules
sudo iptables -F
# Allow DNS traffic
sudo iptables -A INPUT -p udp --dport 53 -j ACCEPT
sudo iptables -A INPUT -p tcp --dport 53 -j ACCEPT
sudo iptables -A FORWARD -p udp --dport 53 -j ACCEPT
sudo iptables -A FORWARD -p tcp --dport 53 -j ACCEPT
# Enable NAT
sudo iptables -t nat -A POSTROUTING -o eth0 -j MASQUERADE
# Save rules (if using iptables-persistent)
sudo netfilter-persistent save
lxc launch images:debian/12 "${PARAM_TENANT}-dns" -c security.privileged=true
until lxc exec "${PARAM_TENANT}-dns" -- true; do sleep 3; done
lxc config device remove pragmatismo-dns dns-udp
lxc config device remove pragmatismo-dns dns-tcp
# Forward HOST's public IP:53 → CONTAINER's 0.0.0.0:53
lxc config device add pragmatismo-dns dns-udp proxy listen=udp:$GB_PUBLIC_IP:53 connect=udp:0.0.0.0:53
lxc config device add pragmatismo-dns dns-tcp proxy listen=tcp:$GB_PUBLIC_IP:53 connect=tcp:0.0.0.0:53
lxc exec "${PARAM_TENANT}-dns" -- bash -c "
mkdir /opt/gbo
mkdir /opt/gbo/{bin,conf,data,logs}
echo 'nameserver 8.8.8.8' > /etc/resolv.conf
apt-get upgrade -y && apt-get install -y wget
wget -qO /opt/gbo/bin/coredns https://github.com/coredns/coredns/releases/download/v1.12.4/coredns_1.12.4_linux_amd64.tgz
tar -xzf /opt/gbo/bin/coredns -C /opt/gbo/bin/
useradd --system --no-create-home --shell /bin/false gbuser
setcap cap_net_bind_service=+ep /opt/gbo/bin/coredns
cat > /etc/systemd/system/dns.service <<EOF2
[Unit]
Description=DNS
After=network.target
[Service]
User=gbuser
ExecStart=/opt/gbo/bin/coredns -conf /opt/gbo/conf/Corefile
Restart=always
StandardOutput=append:/opt/gbo/logs/stdout.log
StandardError=append:/opt/gbo/logs/stderr.log
[Install]
WantedBy=multi-user.target
EOF2
systemctl stop systemd-resolved
systemctl disable systemd-resolved
rm /etc/resolv.conf
systemctl daemon-reload
systemctl enable dns
"
GBUSER_UID=$(lxc exec "${PARAM_TENANT}-dns" -- id -u gbuser)
HOST_UID=$((100000 + GBUSER_UID))
chown -R "$HOST_UID:$HOST_UID" "$HOST_BASE"
lxc exec "${PARAM_TENANT}-dns" -- bash -c "
chown -R gbuser:gbuser /opt/gbo
"
lxc config device add "${PARAM_TENANT}-dns" dnsdata disk source="$HOST_DATA" path=/opt/gbo/data
lxc config device add "${PARAM_TENANT}-dns" dnsconf disk source="$HOST_CONF" path=/opt/gbo/conf
lxc config device add "${PARAM_TENANT}-dns" dnslogs disk source="$HOST_LOGS" path=/opt/gbo/logs
lxc exec "${PARAM_TENANT}-dns" -- systemctl start dns

View file

@ -0,0 +1,30 @@
#!/bin/bash
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/doc-editor"
lxc launch images:debian/12 "${PARAM_TENANT}-doc-editor" \
-c security.privileged=true \
-c limits.cpu=2 \
-c limits.memory=4096MB \
sleep 10
lxc exec "$PARAM_TENANT"-doc-editor -- bash -c "
cd /usr/share/keyrings
wget https://collaboraoffice.com/downloads/gpg/collaboraonline-release-keyring.gpg
cat << EOF > /etc/apt/sources.list.d/collaboraonline.sources
Types: deb
URIs: https://www.collaboraoffice.com/repos/CollaboraOnline/24.04/customer-deb-$customer_hash
Suites: ./
Signed-By: /usr/share/keyrings/collaboraonline-release-keyring.gpg
EOF
apt update && apt install coolwsd collabora-online-brand
"
lxc config device remove "$PARAM_TENANT"-doc-editor doc-proxy 2>/dev/null || true
lxc config device add "$PARAM_TENANT"-doc-editor doc-proxy proxy \
listen=tcp:0.0.0.0:"$PARAM_DOC_PORT" \
connect=tcp:127.0.0.1:9980

View file

@ -0,0 +1,60 @@
#!/bin/bash
STORAGE_PATH="/opt/gbo/tenants/$PARAM_TENANT/drive/data"
LOGS_PATH="/opt/gbo/tenants/$PARAM_TENANT/drive/logs"
mkdir -p "${STORAGE_PATH}" "${LOGS_PATH}"
chmod -R 770 "${STORAGE_PATH}" "${LOGS_PATH}"
chown -R 100999:100999 "${STORAGE_PATH}" "${LOGS_PATH}"
lxc launch images:debian/12 "${PARAM_TENANT}-drive" -c security.privileged=true
sleep 15
lxc config device add "${PARAM_TENANT}-drive" storage disk source="${STORAGE_PATH}" path=/data
lxc config device add "${PARAM_TENANT}-drive" logs disk source="${LOGS_PATH}" path=/var/log/minio
lxc exec "${PARAM_TENANT}-drive" -- bash -c '
apt-get update && apt-get install -y wget
wget https://dl.min.io/server/minio/release/linux-amd64/minio -O /usr/local/bin/minio
chmod +x /usr/local/bin/minio
wget https://dl.min.io/client/mc/release/linux-amd64/mc -O /usr/local/bin/mc
chmod +x /usr/local/bin/mc
useradd -r -s /bin/false minio-user || true
mkdir -p /var/log/minio /data
chown -R minio-user:minio-user /var/log/minio /data
cat > /etc/systemd/system/minio.service <<EOF
[Unit]
Description=MinIO
After=network.target
[Service]
Type=simple
User=minio-user
Group=minio-user
Environment="MINIO_ROOT_USER='"${PARAM_DRIVE_USER}"'"
Environment="MINIO_ROOT_PASSWORD='"${PARAM_DRIVE_PASSWORD}"'"
ExecStart=/usr/local/bin/minio server --address ":'"${PARAM_DRIVE_PORT}"'" --console-address ":'"${PARAM_PORT}"'" /data
StandardOutput=append:/var/log/minio/output.log
StandardError=append:/var/log/minio/error.log
[Install]
WantedBy=multi-user.target
EOF
systemctl daemon-reload
systemctl enable minio
systemctl start minio
'
lxc config device remove "${PARAM_TENANT}-drive" minio-proxy 2>/dev/null || true
lxc config device add "${PARAM_TENANT}-drive" minio-proxy proxy \
listen=tcp:0.0.0.0:"${PARAM_DRIVE_API_PORT}" \
connect=tcp:127.0.0.1:"${PARAM_DRIVE_API_PORT}"
lxc config device remove "${PARAM_TENANT}-drive" console-proxy 2>/dev/null || true
lxc config device add "${PARAM_TENANT}-drive" console-proxy proxy \
listen=tcp:0.0.0.0:"${PARAM_DRIVE_PORT}" \
connect=tcp:127.0.0.1:"${PARAM_DRIVE_PORT}"

107
scripts/containers/email.sh Normal file
View file

@ -0,0 +1,107 @@
#!/bin/bash
PUBLIC_INTERFACE="eth0" # Your host's public network interface
# Configure firewall
echo "[HOST] Configuring firewall..."
sudo iptables -A FORWARD -i $PUBLIC_INTERFACE -o lxcbr0 -p tcp -m multiport --dports 25,80,110,143,465,587,993,995,4190 -j ACCEPT
sudo iptables -A FORWARD -i lxcbr0 -o $PUBLIC_INTERFACE -m state --state RELATED,ESTABLISHED -j ACCEPT
sudo iptables -t nat -A POSTROUTING -o $PUBLIC_INTERFACE -j MASQUERADE
# IPv6 firewall
sudo ip6tables -A FORWARD -i $PUBLIC_INTERFACE -o lxcbr0 -p tcp -m multiport --dports 25,80,110,143,465,587,993,995,4190 -j ACCEPT
sudo ip6tables -A FORWARD -i lxcbr0 -o $PUBLIC_INTERFACE -m state --state RELATED,ESTABLISHED -j ACCEPT
# Save iptables rules permanently (adjust based on your distro)
if command -v iptables-persistent >/dev/null; then
sudo iptables-save | sudo tee /etc/iptables/rules.v4
sudo ip6tables-save | sudo tee /etc/iptables/rules.v6
fi
# ------------------------- CONTAINER SETUP -------------------------
# Create directory structure
echo "[CONTAINER] Creating directories..."
HOST_BASE="/opt/email"
HOST_DATA="$HOST_BASE/data"
HOST_CONF="$HOST_BASE/conf"
HOST_LOGS="$HOST_BASE/logs"
sudo mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS"
sudo chmod -R 750 "$HOST_BASE"
# Launch container
echo "[CONTAINER] Launching LXC container..."
lxc launch images:debian/12 "$PARAM_TENANT"-email -c security.privileged=true
sleep 15
echo "[CONTAINER] Installing Stalwart Mail..."
lxc exec "$PARAM_TENANT"-email -- bash -c "
echo "nameserver $PARAM_DNS_INTERNAL_IP" > /etc/resolv.conf
apt install resolvconf -y
apt-get update && apt-get install -y wget libcap2-bin
wget -O /tmp/stalwart.tar.gz https://github.com/stalwartlabs/stalwart/releases/download/v0.13.1/stalwart-x86_64-unknown-linux-gnu.tar.gz
tar -xzf /tmp/stalwart.tar.gz -C /tmp
mkdir -p /opt/gbo/bin
mv /tmp/stalwart /opt/gbo/bin/stalwart
chmod +x /opt/gbo/bin/stalwart
sudo setcap 'cap_net_bind_service=+ep' /opt/gbo/bin/stalwart
rm /tmp/stalwart.tar.gz
useradd --system --no-create-home --shell /bin/false email
mkdir -p /opt/gbo/data /opt/gbo/conf /opt/gbo/logs
chown -R email:email /opt/gbo/data /opt/gbo/conf /opt/gbo/logs /opt/gbo/bin
"
# Set permissions
echo "[CONTAINER] Setting permissions..."
EMAIL_UID=$(lxc exec "$PARAM_TENANT"-email -- id -u email)
EMAIL_GID=$(lxc exec "$PARAM_TENANT"-email -- id -g email)
HOST_EMAIL_UID=$((100000 + EMAIL_UID))
HOST_EMAIL_GID=$((100000 + EMAIL_GID))
sudo chown -R "$HOST_EMAIL_UID:$HOST_EMAIL_GID" "$HOST_BASE"
# Mount directories
echo "[CONTAINER] Mounting directories..."
lxc config device add "$PARAM_TENANT"-email emaildata disk source="$HOST_DATA" path=/opt/gbo/data
lxc config device add "$PARAM_TENANT"-email emailconf disk source="$HOST_CONF" path=/opt/gbo/conf
lxc config device add "$PARAM_TENANT"-email emaillogs disk source="$HOST_LOGS" path=/opt/gbo/logs
# Create systemd service
echo "[CONTAINER] Creating email service..."
lxc exec "$PARAM_TENANT"-email -- bash -c "
chown -R email:email /opt/gbo/data /opt/gbo/conf /opt/gbo/logs /opt/gbo/bin
cat > /etc/systemd/system/email.service <<EOF
[Unit]
Description=Email Service
After=network.target
[Service]
Type=simple
User=email
Group=email
ExecStart=/opt/gbo/bin/stalwart --config /opt/gbo/conf/config.toml
WorkingDirectory=/opt/gbo/bin
Restart=always
[Install]
WantedBy=multi-user.target
EOF
systemctl daemon-reload
systemctl enable email
systemctl start email
"
# FIXED: IPv4 + IPv6 proxy devices
for port in 25 80 110 143 465 587 993 995 4190; do
lxc config device remove "$PARAM_TENANT"-email "port-$port" 2>/dev/null || true
lxc config device add "$PARAM_TENANT"-email "port-$port" proxy \
listen=tcp:0.0.0.0:$port \
listen=tcp:[::]:$port \
connect=tcp:127.0.0.1:$port
done

View file

@ -0,0 +1,30 @@
sudo apt install sshfs -y
lxc init
lxc storage create default dir
lxc profile device add default root disk path=/ pool=default
sudo apt update && sudo apt install -y bridge-utils
# Enable IP forwarding
echo "[HOST] Enabling IP forwarding..."
echo "net.ipv4.ip_forward=1" | sudo tee -a /etc/sysctl.conf
sudo sysctl -p
wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2404/x86_64/cuda-keyring_1.1-1_all.deb
sudo dpkg -i cuda-keyring_1.1-1_all.deb
sudo apt-get update
sudo apt purge '^nvidia-*' # Clean existing drivers
sudo add-apt-repository ppa:graphics-drivers/ppa
sudo apt update
sudo apt install nvidia-driver-470-server # Most stable for Kepler GPUs
wget https://developer.download.nvidia.com/compute/cuda/11.0.3/local_installers/cuda_11.0.3_450.51.06_linux.run
sudo sh cuda_11.0.3_450.51.06_linux.run --override

View file

@ -0,0 +1,9 @@
wget https://github.com/ggml-org/llama.cpp/releases/download/b6148/llama-b6148-bin-ubuntu-x64.zip
wget https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat-v1.0.Q4_0.gguf?download=true
wget https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-1.5B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-1.5B-Q3_K_M.gguf
# Phi-3.5-mini-instruct-IQ2_M.gguf
# ./llama-cli -m tinyllama-1.1b-chat-v1.0.Q4_0.gguf --reasoning-budget 0 --reasoning-format none -mli
# ./llama-cli -m DeepSeek-R1-Distill-Qwen-1.5B-Q3_K_M.gguf --system-prompt "<think> </think>Output as JSON: Name 3 colors and their HEX codes. Use format: [{\"name\": \"red\", \"hex\": \"#FF0000\"}]" --reasoning-budget 0 --reasoning-format none -mli

View file

@ -0,0 +1,89 @@
#!/bin/bash
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/meeting"
HOST_DATA="$HOST_BASE/data"
HOST_CONF="$HOST_BASE/conf"
HOST_LOGS="$HOST_BASE/logs"
mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS"
chmod -R 750 "$HOST_BASE"
lxc launch images:debian/12 "$PARAM_TENANT"-meeting -c security.privileged=true
sleep 15
lxc exec "$PARAM_TENANT"-meeting -- bash -c "
apt-get update && apt-get install -y wget coturn
mkdir -p /opt/gbo/bin
cd /opt/gbo/bin
wget -q https://github.com/livekit/livekit/releases/download/v1.8.4/livekit_1.8.4_linux_amd64.tar.gz
tar -xzf livekit*.tar.gz
rm livekit_1.8.4_linux_amd64.tar.gz
chmod +x livekit-server
while netstat -tuln | grep -q \":$PARAM_MEETING_TURN_PORT \"; do
((PARAM_MEETING_TURN_PORT++))
done
useradd --system --no-create-home --shell /bin/false gbuser
"
MEETING_UID=$(lxc exec "$PARAM_TENANT"-meeting -- id -u gbuser)
MEETING_GID=$(lxc exec "$PARAM_TENANT"-meeting -- id -g gbuser)
HOST_MEETING_UID=$((100000 + MEETING_UID))
HOST_MEETING_GID=$((100000 + MEETING_GID))
chown -R "$HOST_MEETING_UID:$HOST_MEETING_GID" "$HOST_BASE"
lxc config device add "$PARAM_TENANT"-meeting meetingdata disk source="$HOST_DATA" path=/opt/gbo/data
lxc config device add "$PARAM_TENANT"-meeting meetingconf disk source="$HOST_CONF" path=/opt/gbo/conf
lxc config device add "$PARAM_TENANT"-meeting meetinglogs disk source="$HOST_LOGS" path=/opt/gbo/logs
lxc exec "$PARAM_TENANT"-meeting -- bash -c "
mkdir -p /opt/gbo/data /opt/gbo/conf /opt/gbo/logs
chown -R gbuser:gbuser /opt/gbo/data /opt/gbo/conf /opt/gbo/logs
sudo chown gbuser:gbuser /var/run/turnserver.pid
cat > /etc/systemd/system/meeting.service <<EOF
[Unit]
Description=LiveKit Server
After=network.target
[Service]
User=gbuser
Group=gbuser
ExecStart=/opt/gbo/bin/livekit-server --config /opt/gbo/conf/config.yaml
Restart=always
Environment=TURN_PORT=$PARAM_MEETING_TURN_PORT
[Install]
WantedBy=multi-user.target
EOF
cat > /etc/systemd/system/meeting-turn.service <<EOF
[Unit]
Description=TURN Server
After=network.target
[Service]
User=gbuser
Group=gbuser
ExecStart=/usr/bin/turnserver -c /opt/gbo/conf/turnserver.conf
Restart=always
[Install]
WantedBy=multi-user.target
EOF
systemctl daemon-reload
systemctl enable meeting meeting-turn
systemctl start meeting meeting-turn
"
lxc config device remove "$PARAM_TENANT"-meeting meeting-proxy 2>/dev/null || true
lxc config device add "$PARAM_TENANT"-meeting meeting-proxy proxy \
listen=tcp:0.0.0.0:"$PARAM_MEETING_PORT" \
connect=tcp:127.0.0.1:"$PARAM_MEETING_PORT"

View file

@ -0,0 +1,56 @@
#!/bin/bash
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/proxy"
HOST_DATA="$HOST_BASE/data"
HOST_CONF="$HOST_BASE/conf"
HOST_LOGS="$HOST_BASE/logs"
mkdir -p "$HOST_BASE" "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS"
chmod 750 "$HOST_BASE" "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS"
lxc launch images:debian/12 "$PARAM_TENANT"-proxy -c security.privileged=true
sleep 15
lxc exec "$PARAM_TENANT"-proxy -- bash -c "
mkdir -p /opt/gbo/{bin,data,conf,logs}
apt-get update && apt-get install -y wget libcap2-bin
wget -q https://github.com/caddyserver/caddy/releases/download/v2.10.0-beta.3/caddy_2.10.0-beta.3_linux_amd64.tar.gz
tar -xzf caddy_2.10.0-beta.3_linux_amd64.tar.gz -C /opt/gbo/bin
rm caddy_2.10.0-beta.3_linux_amd64.tar.gz
chmod 750 /opt/gbo/bin/caddy
setcap 'cap_net_bind_service=+ep' /opt/gbo/bin/caddy
useradd --create-home --system --shell /usr/sbin/nologin gbuser
chown -R gbuser:gbuser /opt/gbo/{bin,data,conf,logs}
"
lxc config device add "$PARAM_TENANT"-proxy data disk source="$HOST_DATA" path=/opt/gbo/data
lxc config device add "$PARAM_TENANT"-proxy conf disk source="$HOST_CONF" path=/opt/gbo/conf
lxc config device add "$PARAM_TENANT"-proxy logs disk source="$HOST_LOGS" path=/opt/gbo/logs
lxc exec "$PARAM_TENANT"-proxy -- bash -c "
cat > /etc/systemd/system/proxy.service <<EOF
[Unit]
Description=Proxy
After=network.target
[Service]
User=gbuser
Group=gbuser
Environment=XDG_DATA_HOME=/opt/gbo/data
ExecStart=/opt/gbo/bin/caddy run --config /opt/gbo/conf/config --adapter caddyfile
AmbientCapabilities=CAP_NET_BIND_SERVICE
CapabilityBoundingSet=CAP_NET_BIND_SERVICE
[Install]
WantedBy=multi-user.target
EOF
systemctl daemon-reload
chown -R gbuser:gbuser /opt/gbo/{bin,data,conf,logs}
systemctl enable proxy
"
for port in 80 443; do
lxc config device remove "$PARAM_TENANT"-proxy "port-$port" 2>/dev/null || true
lxc config device add "$PARAM_TENANT"-proxy "port-$port" proxy listen=tcp:0.0.0.0:$port connect=tcp:127.0.0.1:$port
done
lxc config set "$PARAM_TENANT"-proxy security.syscalls.intercept.mknod true
lxc config set "$PARAM_TENANT"-proxy security.syscalls.intercept.setxattr true

View file

@ -0,0 +1 @@
https://www.brasil247.com/mundo/meta-quer-automatizar-totalmente-publicidade-com-ia-ate-2026-diz-wsj

View file

@ -0,0 +1,93 @@
#!/bin/bash
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/system"
HOST_DATA="$HOST_BASE/data"
HOST_CONF="$HOST_BASE/conf"
HOST_LOGS="$HOST_BASE/logs"
HOST_BIN="$HOST_BASE/bin"
BIN_PATH="/opt/gbo/bin"
CONTAINER_NAME="${PARAM_TENANT}-system"
# Create host directories
mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS" || exit 1
chmod -R 750 "$HOST_BASE" || exit 1
lxc launch images:debian/12 $CONTAINER_NAME -c security.privileged=true
sleep 15
lxc exec $CONTAINER_NAME -- bash -c '
apt-get update && apt-get install -y wget curl unzip git
useradd -r -s /bin/false gbuser || true
mkdir -p /opt/gbo/logs /opt/gbo/bin /opt/gbo/data /opt/gbo/conf
chown -R gbuser:gbuser /opt/gbo/
wget https://github.com/ggml-org/llama.cpp/releases/download/b6148/llama-b6148-bin-ubuntu-x64.zip
mkdir llm
mv llama-b6148-bin-ubuntu-x64.zip llm
cd llm
unzip llama-b6148-bin-ubuntu-x64.zip
mv build/bin/* .
rm build/bin -r
rm llama-b6148-bin-ubuntu-x64.zip
wget https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-1.5B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-1.5B-Q3_K_M.gguf
wget https://huggingface.co/CompendiumLabs/bge-small-en-v1.5-gguf/resolve/main/bge-small-en-v1.5-f32.gguf
sudo curl -fsSLo /usr/share/keyrings/brave-browser-beta-archive-keyring.gpg https://brave-browser-apt-beta.s3.brave.com/brave-browser-beta-archive-keyring.gpg
sudo curl -fsSLo /etc/apt/sources.list.d/brave-browser-beta.sources https://brave-browser-apt-beta.s3.brave.com/brave-browser.sources
sudo apt update
sudo apt install brave-browser-beta
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
source "$HOME/.cargo/env"
git clone https://alm.pragmatismo.com.br/generalbots/gbserver
apt install -y build-essential \
pkg-config \
libssl-dev \
gcc-multilib \
g++-multilib \
clang \
lld \
binutils-dev \
libudev-dev \
libdbus-1-dev
cat > /etc/systemd/system/system.service <<EOF
[Unit]
Description=General Bots System Service
After=network.target
[Service]
Type=simple
User=gbuser
Group=gbuser
ExecStart=/opt/gbo/bin/gbserver
StandardOutput=append:/opt/gbo/logs/output.log
StandardError=append:/opt/gbo/logs/error.log
[Install]
WantedBy=multi-user.target
EOF
systemctl daemon-reload
systemctl enable system
systemctl start system
'
lxc config device add $CONTAINER_NAME bin disk source="${HOST_BIN}" path=/opt/gbo/bin
lxc config device add $CONTAINER_NAME data disk source="${HOST_DATA}" path=/opt/gbo/data
lxc config device add $CONTAINER_NAME conf disk source="${HOST_CONF}" path=/opt/gbo/conf
lxc config device add $CONTAINER_NAME logs disk source="${HOST_LOGS}" path=/opt/gbo/logs
lxc config device add $CONTAINER_NAME system-proxy disk source="/opt/gbo/tenants/$PARAM_TENANT/proxy" path=/opt/gbo/refs/proxy
lxc config device remove $CONTAINER_NAME proxy 2>/dev/null || true
lxc config device add $CONTAINER_NAME proxy proxy \
listen=tcp:0.0.0.0:"${PARAM_SYSTEM_PORT}" \
connect=tcp:127.0.0.1:"${PARAM_SYSTEM_PORT}"

View file

@ -0,0 +1,86 @@
#!/bin/bash
# Fixed container name
CONTAINER_NAME="$PARAM_TENANT-table-editor"
TABLE_EDITOR_PORT="5757"
# Paths
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/table-editor"
HOST_DATA="$HOST_BASE/data"
HOST_CONF="$HOST_BASE/conf"
HOST_LOGS="$HOST_BASE/logs"
BIN_PATH="/opt/gbo/bin"
# Create host directories
mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS"
chmod -R 750 "$HOST_BASE"
# Launch container
lxc launch images:debian/12 "$CONTAINER_NAME" -c security.privileged=true
# Wait for container to be ready
sleep 10
# Container setup
lxc exec "$CONTAINER_NAME" -- bash -c "
useradd --system --no-create-home --shell /bin/false gbuser
apt-get update
apt-get install -y wget curl
# Create directories
mkdir -p \"$BIN_PATH\" /opt/gbo/data /opt/gbo/conf /opt/gbo/logs
# Download and install NocoDB binary
cd \"$BIN_PATH\"
curl http://get.nocodb.com/linux-x64 -o nocodb -L
chmod +x nocodb
"
# Set permissions
TE_UID=$(lxc exec "$CONTAINER_NAME" -- id -u gbuser)
TE_GID=$(lxc exec "$CONTAINER_NAME" -- id -g gbuser)
HOST_TE_UID=$((100000 + TE_UID))
HOST_TE_GID=$((100000 + TE_GID))
chown -R "$HOST_TE_UID:$HOST_TE_GID" "$HOST_BASE"
# Add directory mappings
lxc config device add "$CONTAINER_NAME" tedata disk source="$HOST_DATA" path=/opt/gbo/data
lxc config device add "$CONTAINER_NAME" teconf disk source="$HOST_CONF" path=/opt/gbo/conf
lxc config device add "$CONTAINER_NAME" telogs disk source="$HOST_LOGS" path=/opt/gbo/logs
# Create systemd service
lxc exec "$CONTAINER_NAME" -- bash -c "
cat > /etc/systemd/system/table-editor.service <<EOF
[Unit]
Description=NocoDB Table Editor
After=network.target
[Service]
Type=simple
User=gbuser
Group=gbuser
WorkingDirectory=$BIN_PATH
Environment=PORT=${PARAM_TABLE_EDITOR_PORT}
Environment=DATABASE_URL=postgres://${PARAM_TABLES_USER}:${PARAM_TABLES_PASSWORD}@${PARAM_TABLES_HOST}:${PARAM_TABLES_PORT}/${PARAM_TABLE_EDITOR_DATABASE}
ExecStart=$BIN_PATH/nocodb
Restart=always
StandardOutput=append:/opt/gbo/logs/out.log
StandardError=append:/opt/gbo/logs/err.log
[Install]
WantedBy=multi-user.target
EOF
systemctl daemon-reload
systemctl enable table-editor
systemctl start table-editor
"
# Expose the NocoDB port
lxc config device add "$CONTAINER_NAME" http proxy listen=tcp:0.0.0.0:$TABLE_EDITOR_PORT connect=tcp:127.0.0.1:$TABLE_EDITOR_PORT

View file

@ -0,0 +1,50 @@
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/tables"
HOST_DATA="$HOST_BASE/data"
HOST_CONF="$HOST_BASE/conf"
HOST_LOGS="$HOST_BASE/logs"
mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS"
lxc launch images:debian/12 "$PARAM_TENANT"-tables -c security.privileged=true
until lxc exec "$PARAM_TENANT"-tables -- test -f /bin/bash; do
sleep 5
done
sleep 10
lxc exec "$PARAM_TENANT"-tables -- bash -c "
set -e
export DEBIAN_FRONTEND=noninteractive
apt-get update
apt-get install -y wget gnupg2 sudo lsb-release curl
sudo apt install -y postgresql-common
sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh
apt install -y postgresql
# TODO: Open listener on *.
until sudo -u postgres psql -p $PARAM_TABLES_PORT -c '\q' 2>/dev/null; do
echo \"Waiting for PostgreSQL to start on port $PARAM_TABLES_PORT...\"
sleep 3
done
sudo -u postgres psql -p $PARAM_TABLES_PORT -c \"CREATE USER $PARAM_TENANT WITH PASSWORD '$PARAM_TABLES_PASSWORD';\"
sudo -u postgres psql -p $PARAM_TABLES_PORT -c \"CREATE DATABASE ${PARAM_TENANT}_db OWNER $PARAM_TENANT;\"
sudo -u postgres psql -p $PARAM_TABLES_PORT -c \"GRANT ALL PRIVILEGES ON DATABASE ${PARAM_TENANT}_db TO $PARAM_TENANT;\"
"
lxc config device remove "$PARAM_TENANT"-tables postgres-proxy 2>/dev/null || true
lxc config device add "$PARAM_TENANT"-tables postgres-proxy proxy \
listen=tcp:0.0.0.0:"$PARAM_TABLES_PORT" \
connect=tcp:127.0.0.1:"$PARAM_TABLES_PORT"
echo "PostgreSQL setup completed successfully!"
echo "Database: ${PARAM_TENANT}_db"
echo "User: $PARAM_TENANT"
echo "Password: $PARAM_TABLES_PASSWORD"
echo "Port: $PARAM_TABLES_PORT"

View file

@ -0,0 +1,4 @@
#!/bin/bash
wget https://github.com/qdrant/qdrant/releases/latest/download/qdrant-x86_64-unknown-linux-gnu.tar.gz
tar -xzf qdrant-x86_64-unknown-linux-gnu.tar.gz
./qdrant

View file

@ -0,0 +1,103 @@
#!/bin/bash
HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/webmail"
HOST_DATA="$HOST_BASE/data"
HOST_CONF="$HOST_BASE/conf"
HOST_LOGS="$HOST_BASE/logs"
PARAM_RC_VERSION="1.6.6"
mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS"
chmod -R 750 "$HOST_BASE"
lxc launch images:debian/12 "$PARAM_TENANT"-webmail -c security.privileged=true
sleep 15
RC_PATH="/opt/gbo/data"
lxc exec "$PARAM_TENANT"-webmail -- bash -c '
# Install prerequisites
apt install -y ca-certificates apt-transport-https lsb-release gnupg wget
# Add the Sury PHP repository (official for Debian)
wget -O /etc/apt/trusted.gpg.d/php.gpg https://packages.sury.org/php/apt.gpg
sh -c '\''echo "deb https://packages.sury.org/php/ $(lsb_release -sc) main" > /etc/apt/sources.list.d/php.list'\''
# Update and install PHP 8.1
apt update
apt install -y \
php8.1 \
php8.1-fpm \
php8.1-imap \
php8.1-pgsql \
php8.1-mbstring \
php8.1-xml \
php8.1-curl \
php8.1-zip \
php8.1-cli \
php8.1-intl \
php8.1-dom
# Restart PHP-FPM
systemctl restart php8.1-fpm
mkdir -p '"$RC_PATH"'
wget -q https://github.com/roundcube/roundcubemail/releases/download/'"$PARAM_RC_VERSION"'/roundcubemail-'"$PARAM_RC_VERSION"'-complete.tar.gz
tar -xzf roundcubemail-*.tar.gz
mv roundcubemail-'"$PARAM_RC_VERSION"'/* '"$RC_PATH"'
rm -rf roundcubemail-*
mkdir -p /opt/gbo/logs
chmod 750 '"$RC_PATH"'
find '"$RC_PATH"' -type d -exec chmod 750 {} \;
find '"$RC_PATH"' -type f -exec chmod 640 {} \;
'
WEBMAIL_UID=$(lxc exec "$PARAM_TENANT"-webmail -- id -u www-data)
WEBMAIL_GID=$(lxc exec "$PARAM_TENANT"-webmail -- id -g www-data)
HOST_WEBMAIL_UID=$((100000 + WEBMAIL_UID))
HOST_WEBMAIL_GID=$((100000 + WEBMAIL_GID))
chown -R "$HOST_WEBMAIL_UID:$HOST_WEBMAIL_GID" "$HOST_BASE"
lxc config device add "$PARAM_TENANT"-webmail webmaildata disk source="$HOST_DATA" path="$RC_PATH"
lxc config device add "$PARAM_TENANT"-webmail webmaillogs disk source="$HOST_LOGS" path=/opt/gbo/logs
lxc exec "$PARAM_TENANT"-webmail -- bash -c "
chown -R www-data:www-data '"$RC_PATH"' /opt/gbo/logs
cat > /etc/systemd/system/webmail.service <<EOF
[Unit]
Description=Roundcube Webmail
After=network.target php8.1-fpm.service
[Service]
User=www-data
Group=www-data
WorkingDirectory=$RC_PATH
ExecStart=/usr/bin/php -S 0.0.0.0:$PARAM_WEBMAIL_PORT -t $RC_PATH/wwwroot/public_html
Restart=always
StandardOutput=append:/opt/gbo/logs/stdout.log
StandardError=append:/opt/gbo/logs/stderr.log
[Install]
WantedBy=multi-user.target
EOF
systemctl daemon-reload
systemctl enable webmail
systemctl restart php8.1-fpm
systemctl start webmail
"
# Check if port is available before adding proxy
if lsof -i :$PARAM_WEBMAIL_PORT >/dev/null; then
echo "Port $PARAM_WEBMAIL_PORT is already in use. Please choose a different port."
exit 1
fi
lxc config device remove "$PARAM_TENANT"-webmail webmail-proxy 2>/dev/null || true
lxc config device add "$PARAM_TENANT"-webmail webmail-proxy proxy \
listen=tcp:0.0.0.0:"$PARAM_WEBMAIL_PORT" \
connect=tcp:127.0.0.1:"$PARAM_WEBMAIL_PORT"

View file

@ -0,0 +1,6 @@
CREATE TABLE clicks (
campaign_id TEXT NOT NULL,
email TEXT NOT NULL,
updated_at TIMESTAMP DEFAULT NOW(),
UNIQUE(campaign_id, email)
);

11
scripts/database/0002.sql Normal file
View file

@ -0,0 +1,11 @@
CREATE TABLE public.system_automations (
id uuid NOT NULL,
kind int4 NULL,
target varchar(32) NULL,
schedule bpchar(12) NULL,
param varchar(32) NOT NULL,
is_active bool DEFAULT true NOT NULL,
last_triggered timestamptz NULL,
CONSTRAINT system_automations_pkey PRIMARY KEY (id)
);
CREATE INDEX idx_active_automations ON public.system_automations USING btree (kind) WHERE is_active;

13
scripts/database/0003.sql Normal file
View file

@ -0,0 +1,13 @@
CREATE TABLE IF NOT EXISTS user_sessions (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id TEXT NOT NULL,
bot_id TEXT NOT NULL,
answer_mode TEXT NOT NULL DEFAULT 'direct',
context JSONB NOT NULL DEFAULT '{}',
current_tool TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
UNIQUE(user_id, bot_id)
);
CREATE INDEX IF NOT EXISTS idx_user_sessions_user_bot ON user_sessions(user_id, bot_id);

129
scripts/database/0004.sql Normal file
View file

@ -0,0 +1,129 @@
-- User authentication and profiles
CREATE TABLE IF NOT EXISTS users (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
username VARCHAR(255) UNIQUE NOT NULL,
email VARCHAR(255) UNIQUE NOT NULL,
password_hash VARCHAR(255) NOT NULL,
phone_number VARCHAR(50),
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
is_active BOOLEAN DEFAULT true
);
-- Bot configurations
CREATE TABLE IF NOT EXISTS bots (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name VARCHAR(255) NOT NULL,
description TEXT,
llm_provider VARCHAR(100) NOT NULL,
llm_config JSONB NOT NULL DEFAULT '{}',
context_provider VARCHAR(100) NOT NULL,
context_config JSONB NOT NULL DEFAULT '{}',
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
is_active BOOLEAN DEFAULT true
);
-- User sessions with optimized storage
CREATE TABLE IF NOT EXISTS user_sessions (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
title VARCHAR(500) NOT NULL DEFAULT 'New Conversation',
answer_mode VARCHAR(50) NOT NULL DEFAULT 'direct',
context_data JSONB NOT NULL DEFAULT '{}',
current_tool VARCHAR(255),
message_count INTEGER NOT NULL DEFAULT 0,
total_tokens INTEGER NOT NULL DEFAULT 0,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
last_activity TIMESTAMPTZ NOT NULL DEFAULT NOW(),
UNIQUE(user_id, bot_id, title)
);
-- Encrypted message history with analytics-friendly structure
CREATE TABLE IF NOT EXISTS message_history (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
session_id UUID NOT NULL REFERENCES user_sessions(id) ON DELETE CASCADE,
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
role VARCHAR(50) NOT NULL CHECK (role IN ('user', 'assistant', 'system')),
content_encrypted TEXT NOT NULL,
message_type VARCHAR(50) NOT NULL DEFAULT 'text',
media_url TEXT,
token_count INTEGER NOT NULL DEFAULT 0,
processing_time_ms INTEGER,
llm_model VARCHAR(100),
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
message_index INTEGER NOT NULL
);
-- Bot channel configurations
CREATE TABLE IF NOT EXISTS bot_channels (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
channel_type VARCHAR(50) NOT NULL CHECK (channel_type IN ('web', 'whatsapp', 'meet', 'api')),
config JSONB NOT NULL DEFAULT '{}',
is_active BOOLEAN DEFAULT true,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
UNIQUE(bot_id, channel_type)
);
-- WhatsApp number mappings
CREATE TABLE IF NOT EXISTS whatsapp_numbers (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
phone_number VARCHAR(50) NOT NULL,
is_active BOOLEAN DEFAULT true,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
UNIQUE(phone_number, bot_id)
);
-- User email mappings for web channel
CREATE TABLE IF NOT EXISTS user_emails (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
email VARCHAR(255) NOT NULL,
is_primary BOOLEAN DEFAULT false,
verified BOOLEAN DEFAULT false,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
UNIQUE(email)
);
-- Tools registry
CREATE TABLE IF NOT EXISTS tools (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name VARCHAR(255) UNIQUE NOT NULL,
description TEXT NOT NULL,
parameters JSONB NOT NULL DEFAULT '{}',
script TEXT NOT NULL,
is_active BOOLEAN DEFAULT true,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
-- Manual context injections
CREATE TABLE IF NOT EXISTS context_injections (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
session_id UUID NOT NULL REFERENCES user_sessions(id) ON DELETE CASCADE,
injected_by UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
context_data JSONB NOT NULL,
reason TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
-- Analytics tables
CREATE TABLE IF NOT EXISTS usage_analytics (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
session_id UUID NOT NULL REFERENCES user_sessions(id) ON DELETE CASCADE,
date DATE NOT NULL DEFAULT CURRENT_DATE,
message_count INTEGER NOT NULL DEFAULT 0,
total_tokens INTEGER NOT NULL DEFAULT 0,
total_processing_time_ms INTEGER NOT NULL DEFAULT 0
);
-- Indexes for performance
CREATE INDEX IF NOT EXISTS idx_message_history_session_id ON message_history(session_id);
CREATE INDEX IF NOT EXISTS idx_message_history_created_at ON message_history(created_at);
CREATE INDEX IF NOT EXISTS idx_user_sessions_user_bot ON user_sessions(user_id, bot_id);
CREATE INDEX IF NOT EXISTS idx_usage_analytics_date ON usage_analytics(date);

View file

@ -0,0 +1,57 @@
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
CREATE TABLE IF NOT EXISTS users (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
username VARCHAR(255) UNIQUE NOT NULL,
email VARCHAR(255) UNIQUE NOT NULL,
password_hash VARCHAR(255) NOT NULL,
is_active BOOLEAN DEFAULT true,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS bots (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
name VARCHAR(255) NOT NULL,
llm_provider VARCHAR(100) NOT NULL,
config JSONB DEFAULT '{}',
is_active BOOLEAN DEFAULT true,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS user_sessions (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE,
title VARCHAR(500) NOT NULL,
context_data JSONB DEFAULT '{}',
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
UNIQUE(user_id, bot_id)
);
CREATE TABLE IF NOT EXISTS message_history (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
session_id UUID NOT NULL REFERENCES user_sessions(id) ON DELETE CASCADE,
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
role VARCHAR(50) NOT NULL,
content_encrypted TEXT NOT NULL,
message_type VARCHAR(50) DEFAULT 'text',
message_index INTEGER NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_user_sessions_user_id ON user_sessions(user_id);
CREATE INDEX IF NOT EXISTS idx_user_sessions_bot_id ON user_sessions(bot_id);
CREATE INDEX IF NOT EXISTS idx_message_history_session_id ON message_history(session_id);
CREATE INDEX IF NOT EXISTS idx_message_history_user_id ON message_history(user_id);
CREATE INDEX IF NOT EXISTS idx_message_history_created_at ON message_history(created_at);
INSERT INTO bots (id, name, llm_provider)
VALUES ('00000000-0000-0000-0000-000000000000', 'Default Bot', 'mock')
ON CONFLICT (id) DO NOTHING;
INSERT INTO users (id, username, email, password_hash)
VALUES ('00000000-0000-0000-0000-000000000001', 'demo', 'demo@example.com', '$argon2id$v=19$m=19456,t=2,p=1$c29tZXNhbHQ$RdescudvJCsgt3ub+b+dWRWJTmaaJObG')
ON CONFLICT (id) DO NOTHING;

2625
scripts/dev/llm_context.txt Normal file

File diff suppressed because it is too large Load diff

35
scripts/dev/llm_fix.sh Executable file
View file

@ -0,0 +1,35 @@
#!/bin/bash
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
OUTPUT_FILE="$SCRIPT_DIR/llm_context.txt"
echo "Consolidated LLM Context" > "$OUTPUT_FILE"
prompts=(
"../../prompts/dev/general.md"
"../../Cargo.toml"
"../../prompts/dev/fix.md"
)
for file in "${prompts[@]}"; do
cat "$file" >> "$OUTPUT_FILE"
echo "" >> "$OUTPUT_FILE"
done
dirs=(
"src/shared"
"src/bot"
"src/session"
"src/tools"
)
for dir in "${dirs[@]}"; do
find "$PROJECT_ROOT/$dir" -name "*.rs" | while read file; do
cat "$file" >> "$OUTPUT_FILE"
echo "" >> "$OUTPUT_FILE"
done
done
cd "$PROJECT_ROOT"
tree -P '*.rs' -I 'target|*.lock' --prune | grep -v '[0-9] directories$' >> "$OUTPUT_FILE"

View file

@ -0,0 +1,2 @@
# apt install tree
tree -P '*.rs' -I 'target|*.lock' --prune | grep -v '[0-9] directories$'

View file

@ -0,0 +1,27 @@
export BOT_ID=
./mc alias set minio http://localhost:9000 user pass
./mc admin user add minio $BOT_ID
cat > $BOT_ID-policy.json <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"s3:ListBucket",
"s3:GetObject",
"s3:PutObject",
"s3:DeleteObject"
],
"Resource": [
"arn:aws:s3:::pragmatismo-$BOT_ID.gbai",
"arn:aws:s3:::pragmatismo-$BOT_ID.gbai/*"
]
}
]
}
EOF
./mc admin policy create minio $BOT_ID-policy $BOT_ID-policy.json
./mc admin policy attach minio $BOT_ID-policy --user $BOT_ID

View file

@ -0,0 +1,29 @@
df -h
printf "%-20s %-10s %-10s %-10s %-6s %s\n" "CONTAINER" "USED" "AVAIL" "TOTAL" "USE%" "MOUNT"
for container in $(lxc list -c n --format csv); do
disk_info=$(lxc exec $container -- df -h / --output=used,avail,size,pcent | tail -n 1)
printf "%-20s %s\n" "$container" "$disk_info"
done
#!/bin/bash
# Directory to analyze
TARGET_DIR="/opt/gbo/tenants/pragmatismo"
echo "Calculating sizes for directories in $TARGET_DIR..."
echo ""
# Check if directory exists
if [ ! -d "$TARGET_DIR" ]; then
echo "Error: Directory $TARGET_DIR does not exist"
exit 1
fi
# Get the size of each subdirectory
echo "Directory Size Report:"
echo "----------------------"
du -h --max-depth=1 "$TARGET_DIR" | sort -hr | awk -F'\t' '{printf "%-50s %s\n", $2, $1}'
echo ""
echo "Total size:"
du -sh "$TARGET_DIR"

71
scripts/utils/cleaner.sh Executable file
View file

@ -0,0 +1,71 @@
#!/bin/bash
# Cleanup script for Ubuntu Server and LXC containers
# Run with sudo privileges
echo "Starting system cleanup..."
### Host System Cleanup ###
echo -e "\n[ HOST SYSTEM CLEANUP ]"
# Package manager cache
echo "Cleaning package cache..."
apt clean
apt autoclean
apt autoremove -y
# Journal logs
echo "Cleaning journal logs..."
journalctl --vacuum-time=2d 2>/dev/null
# Temporary files
echo "Cleaning temporary files..."
rm -rf /tmp/* /var/tmp/*
# Thumbnail cache
echo "Cleaning thumbnail cache..."
rm -rf ~/.cache/thumbnails/* /root/.cache/thumbnails/*
# DNS cache
echo "Flushing DNS cache..."
systemd-resolve --flush-caches 2>/dev/null || true
# Old kernels (keep 2 latest)
echo "Removing old kernels..."
apt purge -y $(dpkg -l | awk '/^ii linux-image-*/{print $2}' | grep -v $(uname -r) | head -n -2) 2>/dev/null
# Crash reports
echo "Clearing crash reports..."
rm -f /var/crash/*
### LXC Containers Cleanup ###
echo -e "\n[ LXC CONTAINERS CLEANUP ]"
# Check if LXC is installed
if command -v lxc >/dev/null 2>&1; then
for container in $(lxc list -c n --format csv | grep -v "^$"); do
echo -e "\nCleaning container: $container"
# Execute cleanup commands in container
lxc exec "$container" -- bash -c "
echo 'Cleaning package cache...'
apt clean && apt autoclean && apt autoremove -y
echo 'Cleaning temporary files...'
rm -rf /tmp/* /var/tmp/*
echo 'Cleaning logs...'
rm -rf /opt/gbo/logs/*
echo 'Cleaning journal logs...'
journalctl --vacuum-time=1d 2>/dev/null || true
echo 'Cleaning thumbnail cache...'
rm -rf /home/*/.cache/thumbnails/* /root/.cache/thumbnails/*
" 2>/dev/null
done
else
echo "LXC not installed, skipping container cleanup."
fi
echo -e "\nCleanup completed!"

View file

@ -0,0 +1,6 @@
lxc list --format json | jq -r '.[].name' | while read container; do
echo -n "$container: "
lxc exec $container -- df -h / --output=used < /dev/null | tail -n1
done
du -h --max-depth=1 "." 2>/dev/null | sort -rh | head -n 50 | awk '{printf "%-10s %s\n", $1, $2}'

View file

@ -0,0 +1,8 @@
az network public-ip list --resource-group "$CLOUD_GROUP" \
--query "[].{Name:name, IP:ipAddress, ReverseDNS:dnsSettings.reverseFqdn}" \
-o table
az network public-ip update --resource-group "$CLOUD_GROUP"
--name "pip-network-adapter-name"
--reverse-fqdn "outbound14.domain.com.br"

View file

@ -0,0 +1,65 @@
sudo apt install -y cloud-guest-utils e2fsprogs
apt install -y make g++ build-essential
apt install -y openjdk-17-jdk ant
apt install -y sudo systemd wget zip procps ccache
apt install -y automake bison flex git gperf graphviz junit4 libtool m4 nasm
apt install -y libcairo2-dev libjpeg-dev libegl1-mesa-dev libfontconfig1-dev \
libgl1-mesa-dev libgif-dev libgtk-3-dev librsvg2-dev libpango1.0-dev
apt install -y libcap-dev libcap2-bin libkrb5-dev libpcap0.8-dev openssl libssl-dev
apt install -y libxcb-dev libx11-xcb-dev libxkbcommon-x11-dev libxtst-dev \
libxrender-dev libxslt1-dev libxt-dev xsltproc
apt install -y libcunit1-dev libcppunit-dev libpam0g-dev libcups2-dev libzstd-dev uuid-runtime
apt install -y python3-dev python3-lxml python3-pip python3-polib
apt install -y nodejs npm
apt install -y libpoco-dev libpococrypto80
apt install -y libreoffice-dev
mkdir -p /opt/lo && cd /opt/lo
wget https://github.com/CollaboraOnline/online/releases/download/for-code-assets/core-co-24.04-assets.tar.gz
tar xf core-co-24.04-assets.tar.gz && rm core-co-24.04-assets.tar.gz
useradd cool -G sudo
mkdir -p /opt/cool && chown cool:cool /opt/cool
cd /opt/cool
sudo -Hu cool git clone https://github.com/CollaboraOnline/online.git
cd online && sudo -Hu cool ./autogen.sh
export CPPFLAGS=-I/opt/lo/include
export LDFLAGS=-L/opt/lo/instdir/program
./configure --with-lokit-path=/opt/lo --with-lo-path=/opt/lo/instdir --with-poco-includes=/usr/local/include --with-poco-libs=/usr/local/lib
sudo -Hu cool make -j$(nproc)
make install
mkdir -p /etc/coolwsd /usr/local/var/cache/coolwsd
chown cool:cool /usr/local/var/cache/coolwsd
admin_pwd=$(openssl rand -hex 6)
cat <<EOT > /lib/systemd/system/coolwsd.service
[Unit]
Description=Collabora Online WebSocket Daemon
After=network.target
[Service]
ExecStart=/opt/cool/online/coolwsd --o:sys_template_path=/opt/cool/online/systemplate \
--o:lo_template_path=/opt/lo/instdir --o:child_root_path=/opt/cool/online/jails \
--o:admin_console.username=admin --o:admin_console.password=$DOC_EDITOR_ADMIN_PWD \
--o:ssl.enable=false
User=cool
[Install]
WantedBy=multi-user.target
EOT
systemctl daemon-reload
systemctl enable coolwsd.service
systemctl start coolwsd.service
"
echo "Installation complete!"
echo "Admin password: $admin_pwd"
echo "Access at: https://localhost:9980"

View file

@ -0,0 +1,53 @@
#!/usr/bin/env bash
# Define container limits in an associative array
declare -A container_limits=(
# Pattern Memory CPU Allowance
["*tables*"]="4096MB:100ms/100ms"
["*dns*"]="2048MB:100ms/100ms"
["*doc-editor*"]="512MB:10ms/100ms"
["*proxy*"]="2048MB:100ms/100ms"
["*directory*"]="1024MB:50ms/100ms"
["*drive*"]="4096MB:50ms/100ms"
["*email*"]="4096MB:100ms/100ms"
["*webmail*"]="4096MB:100ms/100ms"
["*bot*"]="4096MB:50ms/100ms"
["*meeting*"]="4096MB:100ms/100ms"
["*alm*"]="512MB:50ms/100ms"
["*alm-ci*"]="4096MB:100ms/100ms"
["*system*"]="4096MB:50ms/100ms"
["*mailer*"]="4096MB:25ms/100ms"
)
# Default values (for containers that don't match any pattern)
DEFAULT_MEMORY="1024MB"
DEFAULT_CPU_ALLOWANCE="15ms/100ms"
CPU_COUNT=2
CPU_PRIORITY=10
for pattern in "${!container_limits[@]}"; do
echo "Configuring $container..."
memory=$DEFAULT_MEMORY
cpu_allowance=$DEFAULT_CPU_ALLOWANCE
# Configure all containers
for container in $(lxc list -c n --format csv); do
# Check if container matches any pattern
if [[ $container == $pattern ]]; then
IFS=':' read -r memory cpu_allowance <<< "${container_limits[$pattern]}"
# Apply configuration
lxc config set "$container" limits.memory "$memory"
lxc config set "$container" limits.cpu.allowance "$cpu_allowance"
lxc config set "$container" limits.cpu "$CPU_COUNT"
lxc config set "$container" limits.cpu.priority "$CPU_PRIORITY"
echo "Restarting $container..."
lxc restart "$container"
lxc config show "$container" | grep -E "memory|cpu"
break
fi
done
done

View file

@ -0,0 +1,7 @@
lxc config device override $CONTAINER_NAME root
lxc config device set $CONTAINER_NAME root size 6GB
zpool set autoexpand=on default
zpool online -e default /var/snap/lxd/common/lxd/disks/default.img
zpool list
zfs list

View file

@ -0,0 +1,6 @@
# Host
sudo lxc config set core.trust_password "$LXC_TRUST_PASSWORD"
# ALM-CI
lxc remote add bot 10.16.164.? --accept-certificate --password "$LXC_TRUST_PASSWORD"

10
scripts/utils/startup.sh Normal file
View file

@ -0,0 +1,10 @@
#!/bin/bash
# Disable shell timeout
sed -i '/TMOUT/d' /etc/profile /etc/bash.bashrc /etc/profile.d/*
echo 'export TMOUT=0' > /etc/profile.d/notimeout.sh
chmod +x /etc/profile.d/notimeout.sh
sed -i '/pam_exec.so/s/quiet/quiet set_timeout=0/' /etc/pam.d/sshd 2>/dev/null
source /etc/profile

137
src/auth/mod.rs Normal file
View file

@ -0,0 +1,137 @@
use argon2::{
password_hash::{rand_core::OsRng, PasswordHash, PasswordHasher, PasswordVerifier, SaltString},
Argon2,
};
use redis::Client;
use sqlx::{PgPool, Row}; // <-- required for .get()
use std::sync::Arc;
use uuid::Uuid;
pub struct AuthService {
pub pool: PgPool,
pub redis: Option<Arc<Client>>,
}
impl AuthService {
#[allow(clippy::new_without_default)]
pub fn new(pool: PgPool, redis: Option<Arc<Client>>) -> Self {
Self { pool, redis }
}
pub async fn verify_user(
&self,
username: &str,
password: &str,
) -> Result<Option<Uuid>, Box<dyn std::error::Error>> {
let user = sqlx::query(
"SELECT id, password_hash FROM users WHERE username = $1 AND is_active = true",
)
.bind(username)
.fetch_optional(&self.pool)
.await?;
if let Some(row) = user {
let user_id: Uuid = row.get("id");
let password_hash: String = row.get("password_hash");
if let Ok(parsed_hash) = PasswordHash::new(&password_hash) {
if Argon2::default()
.verify_password(password.as_bytes(), &parsed_hash)
.is_ok()
{
return Ok(Some(user_id));
}
}
}
Ok(None)
}
pub async fn create_user(
&self,
username: &str,
email: &str,
password: &str,
) -> Result<Uuid, Box<dyn std::error::Error>> {
let salt = SaltString::generate(&mut OsRng);
let argon2 = Argon2::default();
let password_hash = match argon2.hash_password(password.as_bytes(), &salt) {
Ok(ph) => ph.to_string(),
Err(e) => {
return Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
e.to_string(),
)))
}
};
let row = sqlx::query(
"INSERT INTO users (username, email, password_hash) VALUES ($1, $2, $3) RETURNING id",
)
.bind(username)
.bind(email)
.bind(&password_hash)
.fetch_one(&self.pool)
.await?;
Ok(row.get::<Uuid, _>("id"))
}
pub async fn delete_user_cache(
&self,
username: &str,
) -> Result<(), Box<dyn std::error::Error>> {
if let Some(redis_client) = &self.redis {
let mut conn = redis_client.get_multiplexed_async_connection().await?;
let cache_key = format!("auth:user:{}", username);
let _: () = redis::Cmd::del(&cache_key).query_async(&mut conn).await?;
}
Ok(())
}
pub async fn update_user_password(
&self,
user_id: Uuid,
new_password: &str,
) -> Result<(), Box<dyn std::error::Error>> {
let salt = SaltString::generate(&mut OsRng);
let argon2 = Argon2::default();
let password_hash = match argon2.hash_password(new_password.as_bytes(), &salt) {
Ok(ph) => ph.to_string(),
Err(e) => {
return Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
e.to_string(),
)))
}
};
sqlx::query("UPDATE users SET password_hash = $1, updated_at = NOW() WHERE id = $2")
.bind(&password_hash)
.bind(user_id)
.execute(&self.pool)
.await?;
if let Some(user_row) = sqlx::query("SELECT username FROM users WHERE id = $1")
.bind(user_id)
.fetch_optional(&self.pool)
.await?
{
let username: String = user_row.get("username");
self.delete_user_cache(&username).await?;
}
Ok(())
}
}
{{END_REWRITTEN_CODE}}
impl Clone for AuthService {
fn clone(&self) -> Self {
Self {
pool: self.pool.clone(),
redis: self.redis.clone(),
}
}
}

197
src/automation/mod.rs Normal file
View file

@ -0,0 +1,197 @@
use crate::basic::ScriptService;
use crate::shared::models::automation_model::{Automation, TriggerKind};
use crate::shared::state::AppState;
use chrono::Datelike;
use chrono::Timelike;
use chrono::{DateTime, Utc};
use log::{error, info};
use std::path::Path;
use tokio::time::Duration;
use uuid::Uuid;
pub struct AutomationService {
state: AppState, // Use web::Data directly
scripts_dir: String,
}
impl AutomationService {
pub fn new(state: AppState, scripts_dir: &str) -> Self {
Self {
state,
scripts_dir: scripts_dir.to_string(),
}
}
pub fn spawn(self) -> tokio::task::JoinHandle<()> {
tokio::spawn(async move {
let mut interval = tokio::time::interval(Duration::from_secs(5));
let mut last_check = Utc::now();
loop {
interval.tick().await;
if let Err(e) = self.run_cycle(&mut last_check).await {
error!("Automation cycle error: {}", e);
}
}
})
}
async fn run_cycle(
&self,
last_check: &mut DateTime<Utc>,
) -> Result<(), Box<dyn std::error::Error>> {
let automations = self.load_active_automations().await?;
self.check_table_changes(&automations, *last_check).await;
self.process_schedules(&automations).await;
*last_check = Utc::now();
Ok(())
}
async fn load_active_automations(&self) -> Result<Vec<Automation>, sqlx::Error> {
if let Some(pool) = &self.state.db {
sqlx::query_as::<_, Automation>(
r#"
SELECT id, kind, target, schedule, param, is_active, last_triggered
FROM public.system_automations
WHERE is_active = true
"#,
)
.fetch_all(pool)
.await
} else {
Err(sqlx::Error::PoolClosed)
}
}
async fn check_table_changes(&self, automations: &[Automation], since: DateTime<Utc>) {
if let Some(pool) = &self.state.db_custom {
for automation in automations {
if let Some(trigger_kind) = TriggerKind::from_i32(automation.kind) {
if matches!(
trigger_kind,
TriggerKind::TableUpdate
| TriggerKind::TableInsert
| TriggerKind::TableDelete
) {
if let Some(table) = &automation.target {
let column = match trigger_kind {
TriggerKind::TableInsert => "created_at",
_ => "updated_at",
};
let query =
format!("SELECT COUNT(*) FROM {} WHERE {} > $1", table, column);
match sqlx::query_scalar::<_, i64>(&query)
.bind(since)
.fetch_one(pool)
.await
{
Ok(count) => {
if count > 0 {
self.execute_action(&automation.param).await;
self.update_last_triggered(automation.id).await;
}
}
Err(e) => {
error!("Error checking changes for table {}: {}", table, e);
}
}
}
}
}
}
}
}
async fn process_schedules(&self, automations: &[Automation]) {
let now = Utc::now().timestamp();
for automation in automations {
if let Some(TriggerKind::Scheduled) = TriggerKind::from_i32(automation.kind) {
if let Some(pattern) = &automation.schedule {
if Self::should_run_cron(pattern, now) {
self.execute_action(&automation.param).await;
self.update_last_triggered(automation.id).await;
}
}
}
}
}
async fn update_last_triggered(&self, automation_id: Uuid) {
if let Some(pool) = &self.state.db {
if let Err(e) = sqlx::query!(
"UPDATE public.system_automations SET last_triggered = $1 WHERE id = $2",
Utc::now(),
automation_id
)
.execute(pool)
.await
{
error!(
"Failed to update last_triggered for automation {}: {}",
automation_id, e
);
}
}
}
fn should_run_cron(pattern: &str, timestamp: i64) -> bool {
let parts: Vec<&str> = pattern.split_whitespace().collect();
if parts.len() != 5 {
return false;
}
let dt = chrono::DateTime::from_timestamp(timestamp, 0).unwrap();
let minute = dt.minute() as i32;
let hour = dt.hour() as i32;
let day = dt.day() as i32;
let month = dt.month() as i32;
let weekday = dt.weekday().num_days_from_monday() as i32;
[minute, hour, day, month, weekday]
.iter()
.enumerate()
.all(|(i, &val)| Self::cron_part_matches(parts[i], val))
}
fn cron_part_matches(part: &str, value: i32) -> bool {
if part == "*" {
return true;
}
if part.contains('/') {
let parts: Vec<&str> = part.split('/').collect();
if parts.len() != 2 {
return false;
}
let step: i32 = parts[1].parse().unwrap_or(1);
if parts[0] == "*" {
return value % step == 0;
}
}
part.parse::<i32>().map_or(false, |num| num == value)
}
async fn execute_action(&self, param: &str) {
let full_path = Path::new(&self.scripts_dir).join(param);
match tokio::fs::read_to_string(&full_path).await {
Ok(script_content) => {
info!("Executing action with param: {}", param);
let script_service = ScriptService::new(&self.state.clone());
match script_service.compile(&script_content) {
Ok(ast) => match script_service.run(&ast) {
Ok(result) => info!("Script executed successfully: {:?}", result),
Err(e) => error!("Error executing script: {}", e),
},
Err(e) => error!("Error compiling script: {}", e),
}
}
Err(e) => {
error!("Failed to execute action {}: {}", full_path.display(), e);
}
}
}
}

View file

@ -0,0 +1,69 @@
use crate::email::save_email_draft;
use crate::email::{fetch_latest_sent_to, SaveDraftRequest};
use crate::shared::state::AppState;
use rhai::Dynamic;
use rhai::Engine;
pub fn create_draft_keyword(state: &AppState, engine: &mut Engine) {
let state_clone = state.clone();
engine
.register_custom_syntax(
&["CREATE_DRAFT", "$expr$", ",", "$expr$", ",", "$expr$"],
true, // Statement
move |context, inputs| {
// Extract arguments
let to = context.eval_expression_tree(&inputs[0])?.to_string();
let subject = context.eval_expression_tree(&inputs[1])?.to_string();
let reply_text = context.eval_expression_tree(&inputs[2])?.to_string();
// Execute async operations using the same pattern as FIND
let fut = execute_create_draft(&state_clone, &to, &subject, &reply_text);
let result =
tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut))
.map_err(|e| format!("Draft creation error: {}", e))?;
Ok(Dynamic::from(result))
},
)
.unwrap();
}
async fn execute_create_draft(
state: &AppState,
to: &str,
subject: &str,
reply_text: &str,
) -> Result<String, String> {
let get_result = fetch_latest_sent_to(&state.config.clone().unwrap().email, to).await;
let email_body = if let Ok(get_result_str) = get_result {
if !get_result_str.is_empty() {
let email_separator = "<br><hr><br>"; // Horizontal rule in HTML
let formatted_reply_text = reply_text.to_string();
let formatted_old_text = get_result_str.replace("\n", "<br>");
let fixed_reply_text = formatted_reply_text.replace("FIX", "Fixed");
format!(
"{}{}{}",
fixed_reply_text, email_separator, formatted_old_text
)
} else {
reply_text.to_string()
}
} else {
reply_text.to_string()
};
// Create and save draft
let draft_request = SaveDraftRequest {
to: to.to_string(),
subject: subject.to_string(),
cc: None,
text: email_body,
};
let save_result = save_email_draft(&state.config.clone().unwrap().email, &draft_request).await;
match save_result {
Ok(_) => Ok("Draft saved successfully".to_string()),
Err(e) => Err(e.to_string()),
}
}

View file

@ -0,0 +1,94 @@
use log::info;
use rhai::Dynamic;
use rhai::Engine;
use std::error::Error;
use std::fs;
use std::io::Read;
use std::path::PathBuf;
use crate::shared::state::AppState;
use crate::shared::utils;
pub fn create_site_keyword(state: &AppState, engine: &mut Engine) {
let state_clone = state.clone();
engine
.register_custom_syntax(
&["CREATE_SITE", "$expr$", ",", "$expr$", ",", "$expr$"],
true,
move |context, inputs| {
if inputs.len() < 3 {
return Err("Not enough arguments for CREATE SITE".into());
}
let alias = context.eval_expression_tree(&inputs[0])?;
let template_dir = context.eval_expression_tree(&inputs[1])?;
let prompt = context.eval_expression_tree(&inputs[2])?;
let config = state_clone
.config
.as_ref()
.expect("Config must be initialized")
.clone();
let fut = create_site(&config, alias, template_dir, prompt);
let result =
tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut))
.map_err(|e| format!("Site creation failed: {}", e))?;
Ok(Dynamic::from(result))
},
)
.unwrap();
}
async fn create_site(
config: &crate::config::AppConfig,
alias: Dynamic,
template_dir: Dynamic,
prompt: Dynamic,
) -> Result<String, Box<dyn Error + Send + Sync>> {
// Convert paths to platform-specific format
let base_path = PathBuf::from(&config.site_path);
let template_path = base_path.join(template_dir.to_string());
let alias_path = base_path.join(alias.to_string());
// Create destination directory
fs::create_dir_all(&alias_path).map_err(|e| e.to_string())?;
// Process all HTML files in template directory
let mut combined_content = String::new();
for entry in fs::read_dir(&template_path).map_err(|e| e.to_string())? {
let entry = entry.map_err(|e| e.to_string())?;
let path = entry.path();
if path.extension().map_or(false, |ext| ext == "html") {
let mut file = fs::File::open(&path).map_err(|e| e.to_string())?;
let mut contents = String::new();
file.read_to_string(&mut contents)
.map_err(|e| e.to_string())?;
combined_content.push_str(&contents);
combined_content.push_str("\n\n--- TEMPLATE SEPARATOR ---\n\n");
}
}
// Combine template content with prompt
let full_prompt = format!(
"TEMPLATE FILES:\n{}\n\nPROMPT: {}\n\nGenerate a new HTML file cloning all previous TEMPLATE (keeping only the local _assets libraries use, no external resources), but turning this into this prompt:",
combined_content,
prompt.to_string()
);
// Call LLM with the combined prompt
info!("Asking LLM to create site.");
let llm_result = utils::call_llm(&full_prompt, &config.ai).await?;
// Write the generated HTML file
let index_path = alias_path.join("index.html");
fs::write(index_path, llm_result).map_err(|e| e.to_string())?;
info!("Site created at: {}", alias_path.display());
Ok(alias_path.to_string_lossy().into_owned())
}

View file

@ -0,0 +1,87 @@
use log::{error, info};
use rhai::Dynamic;
use rhai::Engine;
use serde_json::{json, Value};
use sqlx::PgPool;
use crate::shared::state::AppState;
use crate::shared::utils;
use crate::shared::utils::row_to_json;
use crate::shared::utils::to_array;
pub fn find_keyword(state: &AppState, engine: &mut Engine) {
let db = state.db_custom.clone();
engine
.register_custom_syntax(&["FIND", "$expr$", ",", "$expr$"], false, {
let db = db.clone();
move |context, inputs| {
let table_name = context.eval_expression_tree(&inputs[0])?;
let filter = context.eval_expression_tree(&inputs[1])?;
let binding = db.as_ref().unwrap();
// Use the current async context instead of creating a new runtime
let binding2 = table_name.to_string();
let binding3 = filter.to_string();
let fut = execute_find(binding, &binding2, &binding3);
// Use tokio::task::block_in_place + tokio::runtime::Handle::current().block_on
let result =
tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut))
.map_err(|e| format!("DB error: {}", e))?;
if let Some(results) = result.get("results") {
let array = to_array(utils::json_value_to_dynamic(results));
Ok(Dynamic::from(array))
} else {
Err("No results".into())
}
}
})
.unwrap();
}
pub async fn execute_find(
pool: &PgPool,
table_str: &str,
filter_str: &str,
) -> Result<Value, String> {
// Changed to String error like your Actix code
info!(
"Starting execute_find with table: {}, filter: {}",
table_str, filter_str
);
let (where_clause, params) = utils::parse_filter(filter_str).map_err(|e| e.to_string())?;
let query = format!(
"SELECT * FROM {} WHERE {} LIMIT 10",
table_str, where_clause
);
info!("Executing query: {}", query);
// Use the same simple pattern as your Actix code - no timeout wrapper
let rows = sqlx::query(&query)
.bind(&params[0]) // Simplified like your working code
.fetch_all(pool)
.await
.map_err(|e| {
error!("SQL execution error: {}", e);
e.to_string()
})?;
info!("Query successful, got {} rows", rows.len());
let mut results = Vec::new();
for row in rows {
results.push(row_to_json(row).map_err(|e| e.to_string())?);
}
Ok(json!({
"command": "find",
"table": table_str,
"filter": filter_str,
"results": results
}))
}

185
src/basic/keywords/first.rs Normal file
View file

@ -0,0 +1,185 @@
use rhai::Dynamic;
use rhai::Engine;
pub fn first_keyword(engine: &mut Engine) {
engine
.register_custom_syntax(&["FIRST", "$expr$"], false, {
move |context, inputs| {
let input_string = context.eval_expression_tree(&inputs[0])?;
let input_str = input_string.to_string();
// Extract first word by splitting on whitespace
let first_word = input_str
.split_whitespace()
.next()
.unwrap_or("")
.to_string();
Ok(Dynamic::from(first_word))
}
})
.unwrap();
}
#[cfg(test)]
mod tests {
use super::*;
use rhai::Engine;
fn setup_engine() -> Engine {
let mut engine = Engine::new();
first_keyword(&mut engine);
engine
}
#[test]
fn test_first_keyword_basic() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST "hello world"
"#,
)
.unwrap();
assert_eq!(result, "hello");
}
#[test]
fn test_first_keyword_single_word() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST "single"
"#,
)
.unwrap();
assert_eq!(result, "single");
}
#[test]
fn test_first_keyword_multiple_spaces() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST " leading spaces"
"#,
)
.unwrap();
assert_eq!(result, "leading");
}
#[test]
fn test_first_keyword_empty_string() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST ""
"#,
)
.unwrap();
assert_eq!(result, "");
}
#[test]
fn test_first_keyword_whitespace_only() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST " "
"#,
)
.unwrap();
assert_eq!(result, "");
}
#[test]
fn test_first_keyword_with_tabs() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST " tab separated words"
"#,
)
.unwrap();
assert_eq!(result, "tab");
}
#[test]
fn test_first_keyword_with_variable() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
let text = "variable test";
FIRST text
"#,
)
.unwrap();
assert_eq!(result, "variable");
}
#[test]
fn test_first_keyword_with_expression() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST "one two " + "three four"
"#,
)
.unwrap();
assert_eq!(result, "one");
}
#[test]
fn test_first_keyword_mixed_whitespace() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST " multiple spaces between words "
"#,
)
.unwrap();
assert_eq!(result, "multiple");
}
#[test]
fn test_first_keyword_special_characters() {
let engine = setup_engine();
let result = engine
.eval::<String>(
r#"
FIRST "hello-world example"
"#,
)
.unwrap();
assert_eq!(result, "hello-world");
}
}

View file

@ -0,0 +1,83 @@
use crate::shared::state::AppState;
use log::info;
use rhai::Dynamic;
use rhai::Engine;
pub fn for_keyword(_state: &AppState, engine: &mut Engine) {
engine
.register_custom_syntax(&["EXIT", "FOR"], false, |_context, _inputs| {
Err("EXIT FOR".into())
})
.unwrap();
engine
.register_custom_syntax(
&[
"FOR", "EACH", "$ident$", "IN", "$expr$", "$block$", "NEXT", "$ident$",
],
true, // We're modifying the scope by adding the loop variable
|context, inputs| {
// Get the iterator variable names
let loop_var = inputs[0].get_string_value().unwrap();
let next_var = inputs[3].get_string_value().unwrap();
// Verify variable names match
if loop_var != next_var {
return Err(format!(
"NEXT variable '{}' doesn't match FOR EACH variable '{}'",
next_var, loop_var
)
.into());
}
// Evaluate the collection expression
let collection = context.eval_expression_tree(&inputs[1])?;
// Debug: Print the collection type
info!("Collection type: {}", collection.type_name());
let ccc = collection.clone();
// Convert to array - with proper error handling
let array = match collection.into_array() {
Ok(arr) => arr,
Err(err) => {
return Err(format!(
"foreach expected array, got {}: {}",
ccc.type_name(),
err
)
.into());
}
};
// Get the block as an expression tree
let block = &inputs[2];
// Remember original scope length
let orig_len = context.scope().len();
for item in array {
// Push the loop variable into the scope
context.scope_mut().push(loop_var, item);
// Evaluate the block with the current scope
match context.eval_expression_tree(block) {
Ok(_) => (),
Err(e) if e.to_string() == "EXIT FOR" => {
context.scope_mut().rewind(orig_len);
break;
}
Err(e) => {
// Rewind the scope before returning error
context.scope_mut().rewind(orig_len);
return Err(e);
}
}
// Remove the loop variable for next iteration
context.scope_mut().rewind(orig_len);
}
Ok(Dynamic::UNIT)
},
)
.unwrap();
}

View file

@ -0,0 +1,460 @@
use rhai::{Dynamic, Engine};
use chrono::{NaiveDateTime, Timelike, Datelike};
use num_format::{Locale, ToFormattedString};
use std::str::FromStr;
pub fn format_keyword(engine: &mut Engine) {
engine
.register_custom_syntax(&["FORMAT", "$expr$", "$expr$"], false, {
move |context, inputs| {
let value_dyn = context.eval_expression_tree(&inputs[0])?;
let pattern_dyn = context.eval_expression_tree(&inputs[1])?;
let value_str = value_dyn.to_string();
let pattern = pattern_dyn.to_string();
// --- NUMÉRICO ---
if let Ok(num) = f64::from_str(&value_str) {
let formatted = if pattern.starts_with("N") || pattern.starts_with("C") {
// extrai partes: prefixo, casas decimais, locale
let (prefix, decimals, locale_tag) = parse_pattern(&pattern);
let locale = get_locale(&locale_tag);
let symbol = if prefix == "C" {
get_currency_symbol(&locale_tag)
} else {
""
};
let int_part = num.trunc() as i64;
let frac_part = num.fract();
if decimals == 0 {
format!("{}{}", symbol, int_part.to_formatted_string(&locale))
} else {
let frac_scaled =
((frac_part * 10f64.powi(decimals as i32)).round()) as i64;
format!(
"{}{}.{:0width$}",
symbol,
int_part.to_formatted_string(&locale),
frac_scaled,
width = decimals
)
}
} else {
match pattern.as_str() {
"n" => format!("{:.2}", num),
"F" => format!("{:.2}", num),
"f" => format!("{}", num),
"0%" => format!("{:.0}%", num * 100.0),
_ => format!("{}", num),
}
};
return Ok(Dynamic::from(formatted));
}
// --- DATA ---
if let Ok(dt) = NaiveDateTime::parse_from_str(&value_str, "%Y-%m-%d %H:%M:%S") {
let formatted = apply_date_format(&dt, &pattern);
return Ok(Dynamic::from(formatted));
}
// --- TEXTO ---
let formatted = apply_text_placeholders(&value_str, &pattern);
Ok(Dynamic::from(formatted))
}
})
.unwrap();
}
// ======================
// Extração de locale + precisão
// ======================
fn parse_pattern(pattern: &str) -> (String, usize, String) {
let mut prefix = String::new();
let mut decimals: usize = 2; // padrão 2 casas
let mut locale_tag = "en".to_string();
// ex: "C2[pt]" ou "N3[fr]"
if pattern.starts_with('C') {
prefix = "C".to_string();
} else if pattern.starts_with('N') {
prefix = "N".to_string();
}
// procura número após prefixo
let rest = &pattern[1..];
let mut num_part = String::new();
for ch in rest.chars() {
if ch.is_ascii_digit() {
num_part.push(ch);
} else {
break;
}
}
if !num_part.is_empty() {
decimals = num_part.parse().unwrap_or(2);
}
// procura locale entre colchetes
if let Some(start) = pattern.find('[') {
if let Some(end) = pattern.find(']') {
if end > start {
locale_tag = pattern[start + 1..end].to_string();
}
}
}
(prefix, decimals, locale_tag)
}
fn get_locale(tag: &str) -> Locale {
match tag {
"en" => Locale::en,
"fr" => Locale::fr,
"de" => Locale::de,
"pt" => Locale::pt,
"it" => Locale::it,
"es" => Locale::es,
_ => Locale::en,
}
}
fn get_currency_symbol(tag: &str) -> &'static str {
match tag {
"en" => "$",
"pt" => "R$ ",
"fr" | "de" | "es" | "it" => "",
_ => "$",
}
}
// ==================
// SUPORTE A DATAS
// ==================
fn apply_date_format(dt: &NaiveDateTime, pattern: &str) -> String {
let mut output = pattern.to_string();
let year = dt.year();
let month = dt.month();
let day = dt.day();
let hour24 = dt.hour();
let minute = dt.minute();
let second = dt.second();
let millis = dt.and_utc().timestamp_subsec_millis();
output = output.replace("yyyy", &format!("{:04}", year));
output = output.replace("yy", &format!("{:02}", year % 100));
output = output.replace("MM", &format!("{:02}", month));
output = output.replace("M", &format!("{}", month));
output = output.replace("dd", &format!("{:02}", day));
output = output.replace("d", &format!("{}", day));
output = output.replace("HH", &format!("{:02}", hour24));
output = output.replace("H", &format!("{}", hour24));
let mut hour12 = hour24 % 12;
if hour12 == 0 { hour12 = 12; }
output = output.replace("hh", &format!("{:02}", hour12));
output = output.replace("h", &format!("{}", hour12));
output = output.replace("mm", &format!("{:02}", minute));
output = output.replace("m", &format!("{}", minute));
output = output.replace("ss", &format!("{:02}", second));
output = output.replace("s", &format!("{}", second));
output = output.replace("fff", &format!("{:03}", millis));
output = output.replace("tt", if hour24 < 12 { "AM" } else { "PM" });
output = output.replace("t", if hour24 < 12 { "A" } else { "P" });
output
}
// ==================
// SUPORTE A TEXTO
// ==================
fn apply_text_placeholders(value: &str, pattern: &str) -> String {
let mut result = String::new();
for ch in pattern.chars() {
match ch {
'@' => result.push_str(value),
'&' | '<' => result.push_str(&value.to_lowercase()),
'>' | '!' => result.push_str(&value.to_uppercase()),
_ => result.push(ch), // copia qualquer caractere literal
}
}
result
}
#[cfg(test)]
mod tests {
use super::*;
use rhai::Engine;
fn create_engine() -> Engine {
let mut engine = Engine::new();
format_keyword(&mut engine);
engine
}
#[test]
fn test_numeric_formatting_basic() {
let engine = create_engine();
// Teste formatação básica
assert_eq!(
engine.eval::<String>("FORMAT 1234.567 \"n\"").unwrap(),
"1234.57"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.5 \"F\"").unwrap(),
"1234.50"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.567 \"f\"").unwrap(),
"1234.567"
);
assert_eq!(
engine.eval::<String>("FORMAT 0.85 \"0%\"").unwrap(),
"85%"
);
}
#[test]
fn test_numeric_formatting_with_locale() {
let engine = create_engine();
// Teste formatação numérica com locale
assert_eq!(
engine.eval::<String>("FORMAT 1234.56 \"N[en]\"").unwrap(),
"1,234.56"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.56 \"N[pt]\"").unwrap(),
"1.234,56"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.56 \"N[fr]\"").unwrap(),
"1234,56"
);
}
#[test]
fn test_currency_formatting() {
let engine = create_engine();
// Teste formatação monetária
assert_eq!(
engine.eval::<String>("FORMAT 1234.56 \"C[en]\"").unwrap(),
"$1,234.56"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.56 \"C[pt]\"").unwrap(),
"R$ 1.234,56"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.56 \"C[fr]\"").unwrap(),
"€1234,56"
);
}
#[test]
fn test_numeric_decimals_precision() {
let engine = create_engine();
// Teste precisão decimal
assert_eq!(
engine.eval::<String>("FORMAT 1234.5678 \"N0[en]\"").unwrap(),
"1,235"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.5678 \"N1[en]\"").unwrap(),
"1,234.6"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.5678 \"N3[en]\"").unwrap(),
"1,234.568"
);
assert_eq!(
engine.eval::<String>("FORMAT 1234.5 \"C0[en]\"").unwrap(),
"$1,235"
);
}
#[test]
fn test_date_formatting() {
let engine = create_engine();
// Teste formatação de datas
let result = engine.eval::<String>("FORMAT \"2024-03-15 14:30:25\" \"yyyy-MM-dd HH:mm:ss\"").unwrap();
assert_eq!(result, "2024-03-15 14:30:25");
let result = engine.eval::<String>("FORMAT \"2024-03-15 14:30:25\" \"dd/MM/yyyy\"").unwrap();
assert_eq!(result, "15/03/2024");
let result = engine.eval::<String>("FORMAT \"2024-03-15 14:30:25\" \"MM/dd/yy\"").unwrap();
assert_eq!(result, "03/15/24");
let result = engine.eval::<String>("FORMAT \"2024-03-15 14:30:25\" \"HH:mm\"").unwrap();
assert_eq!(result, "14:30");
}
#[test]
fn test_date_formatting_12h() {
let engine = create_engine();
// Teste formato 12h
let result = engine.eval::<String>("FORMAT \"2024-03-15 14:30:25\" \"hh:mm tt\"").unwrap();
assert_eq!(result, "02:30 PM");
let result = engine.eval::<String>("FORMAT \"2024-03-15 09:30:25\" \"hh:mm tt\"").unwrap();
assert_eq!(result, "09:30 AM");
let result = engine.eval::<String>("FORMAT \"2024-03-15 00:30:25\" \"h:mm t\"").unwrap();
assert_eq!(result, "12:30 A");
}
#[test]
fn test_text_formatting() {
let engine = create_engine();
// Teste formatação de texto
assert_eq!(
engine.eval::<String>("FORMAT \"hello\" \"Prefix: @\"").unwrap(),
"Prefix: hello"
);
assert_eq!(
engine.eval::<String>("FORMAT \"HELLO\" \"Result: &!\"").unwrap(),
"Result: hello!"
);
assert_eq!(
engine.eval::<String>("FORMAT \"hello\" \"RESULT: >\"").unwrap(),
"RESULT: HELLO"
);
assert_eq!(
engine.eval::<String>("FORMAT \"Hello\" \"<>\"").unwrap(),
"hello>"
);
}
#[test]
fn test_mixed_patterns() {
let engine = create_engine();
// Teste padrões mistos
assert_eq!(
engine.eval::<String>("FORMAT \"hello\" \"@ World!\"").unwrap(),
"hello World!"
);
assert_eq!(
engine.eval::<String>("FORMAT \"test\" \"< & > ! @\"").unwrap(),
"test test TEST ! test"
);
}
#[test]
fn test_edge_cases() {
let engine = create_engine();
// Teste casos extremos
assert_eq!(
engine.eval::<String>("FORMAT 0 \"n\"").unwrap(),
"0.00"
);
assert_eq!(
engine.eval::<String>("FORMAT -1234.56 \"N[en]\"").unwrap(),
"-1,234.56"
);
assert_eq!(
engine.eval::<String>("FORMAT \"\" \"@\"").unwrap(),
""
);
assert_eq!(
engine.eval::<String>("FORMAT \"test\" \"\"").unwrap(),
""
);
}
#[test]
fn test_invalid_patterns_fallback() {
let engine = create_engine();
// Teste padrões inválidos (devem fallback para string)
assert_eq!(
engine.eval::<String>("FORMAT 123.45 \"invalid\"").unwrap(),
"123.45"
);
assert_eq!(
engine.eval::<String>("FORMAT \"text\" \"unknown\"").unwrap(),
"unknown"
);
}
#[test]
fn test_milliseconds_formatting() {
let engine = create_engine();
// Teste milissegundos
let result = engine.eval::<String>("FORMAT \"2024-03-15 14:30:25.123\" \"HH:mm:ss.fff\"").unwrap();
assert_eq!(result, "14:30:25.123");
}
#[test]
fn test_parse_pattern_function() {
// Teste direto da função parse_pattern
assert_eq!(parse_pattern("C[en]"), ("C".to_string(), 2, "en".to_string()));
assert_eq!(parse_pattern("N3[pt]"), ("N".to_string(), 3, "pt".to_string()));
assert_eq!(parse_pattern("C0[fr]"), ("C".to_string(), 0, "fr".to_string()));
assert_eq!(parse_pattern("N"), ("N".to_string(), 2, "en".to_string()));
assert_eq!(parse_pattern("C2"), ("C".to_string(), 2, "en".to_string()));
}
#[test]
fn test_locale_functions() {
// Teste funções de locale
assert!(matches!(get_locale("en"), Locale::en));
assert!(matches!(get_locale("pt"), Locale::pt));
assert!(matches!(get_locale("fr"), Locale::fr));
assert!(matches!(get_locale("invalid"), Locale::en)); // fallback
assert_eq!(get_currency_symbol("en"), "$");
assert_eq!(get_currency_symbol("pt"), "R$ ");
assert_eq!(get_currency_symbol("fr"), "");
assert_eq!(get_currency_symbol("invalid"), "$"); // fallback
}
#[test]
fn test_apply_text_placeholders() {
// Teste direto da função apply_text_placeholders
assert_eq!(apply_text_placeholders("Hello", "@"), "Hello");
assert_eq!(apply_text_placeholders("Hello", "&"), "hello");
assert_eq!(apply_text_placeholders("Hello", ">"), "HELLO");
assert_eq!(apply_text_placeholders("Hello", "Prefix: @!"), "Prefix: Hello!");
assert_eq!(apply_text_placeholders("Hello", "<>"), "hello>");
}
#[test]
fn test_expression_parameters() {
let engine = create_engine();
// Teste com expressões como parâmetros
assert_eq!(
engine.eval::<String>("let x = 1000.50; FORMAT x \"N[en]\"").unwrap(),
"1,000.50"
);
assert_eq!(
engine.eval::<String>("FORMAT (500 + 500) \"n\"").unwrap(),
"1000.00"
);
assert_eq!(
engine.eval::<String>("let pattern = \"@ World\"; FORMAT \"Hello\" pattern").unwrap(),
"Hello World"
);
}
}

97
src/basic/keywords/get.rs Normal file
View file

@ -0,0 +1,97 @@
use log::info;
use crate::shared::state::AppState;
use reqwest::{self, Client};
use rhai::{Dynamic, Engine};
use scraper::{Html, Selector};
use std::error::Error;
pub fn get_keyword(_state: &AppState, engine: &mut Engine) {
let _ = engine.register_custom_syntax(
&["GET", "$expr$"],
false, // Expression, not statement
move |context, inputs| {
let url = context.eval_expression_tree(&inputs[0])?;
let url_str = url.to_string();
// Prevent path traversal attacks
if url_str.contains("..") {
return Err("URL contains invalid path traversal sequences like '..'.".into());
}
let modified_url = if url_str.starts_with("/") {
let work_root = std::env::var("WORK_ROOT").unwrap_or_else(|_| "./work".to_string());
let full_path = std::path::Path::new(&work_root)
.join(url_str.trim_start_matches('/'))
.to_string_lossy()
.into_owned();
let base_url = "file://";
format!("{}{}", base_url, full_path)
} else {
url_str.to_string()
};
if modified_url.starts_with("https://") {
info!("HTTPS GET request: {}", modified_url);
let fut = execute_get(&modified_url);
let result =
tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut))
.map_err(|e| format!("HTTP request failed: {}", e))?;
Ok(Dynamic::from(result))
} else if modified_url.starts_with("file://") {
// Handle file:// URLs
let file_path = modified_url.trim_start_matches("file://");
match std::fs::read_to_string(file_path) {
Ok(content) => Ok(Dynamic::from(content)),
Err(e) => Err(format!("Failed to read file: {}", e).into()),
}
} else {
Err(
format!("GET request failed: URL must begin with 'https://' or 'file://'")
.into(),
)
}
},
);
}
pub async fn execute_get(url: &str) -> Result<String, Box<dyn Error + Send + Sync>> {
info!("Starting execute_get with URL: {}", url);
// Create a client that ignores invalid certificates
let client = Client::builder()
.danger_accept_invalid_certs(true)
.build()?;
let response = client.get(url).send().await?;
let html_content = response.text().await?;
// Parse HTML and extract text only if it appears to be HTML
if html_content.trim_start().starts_with("<!DOCTYPE html")
|| html_content.trim_start().starts_with("<html")
{
let document = Html::parse_document(&html_content);
let selector = Selector::parse("body").unwrap_or_else(|_| Selector::parse("*").unwrap());
let text_content = document
.select(&selector)
.flat_map(|element| element.text())
.collect::<Vec<_>>()
.join(" ");
// Clean up the text
let cleaned_text = text_content
.replace('\n', " ")
.replace('\t', " ")
.split_whitespace()
.collect::<Vec<_>>()
.join(" ");
Ok(cleaned_text)
} else {
Ok(html_content) // Return plain content as is if not HTML
}
}

View file

@ -0,0 +1,133 @@
use crate::{state::AppState, web_automation::BrowserPool};
use log::info;
use rhai::{Dynamic, Engine};
use std::error::Error;
use std::sync::Arc;
use std::time::Duration;
use thirtyfour::{By, WebDriver};
use tokio::time::sleep;
pub fn get_website_keyword(state: &AppState, engine: &mut Engine) {
let browser_pool = state.browser_pool.clone(); // Assuming AppState has browser_pool field
engine
.register_custom_syntax(
&["WEBSITE", "OF", "$expr$"],
false,
move |context, inputs| {
let search_term = context.eval_expression_tree(&inputs[0])?.to_string();
info!("GET WEBSITE executed - Search: '{}'", search_term);
let browser_pool_clone = browser_pool.clone();
let fut = execute_headless_browser_search(browser_pool_clone, &search_term);
let result =
tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut))
.map_err(|e| format!("Headless browser search failed: {}", e))?;
Ok(Dynamic::from(result))
},
)
.unwrap();
}
pub async fn execute_headless_browser_search(
browser_pool: Arc<BrowserPool>, // Adjust path as needed
search_term: &str,
) -> Result<String, Box<dyn Error + Send + Sync>> {
info!("Starting headless browser search: '{}' ", search_term);
let search_term = search_term.to_string();
let result = browser_pool
.with_browser(|driver| Box::pin(async move { perform_search(driver, &search_term).await }))
.await?;
Ok(result)
}
async fn perform_search(
driver: WebDriver,
search_term: &str,
) -> Result<String, Box<dyn Error + Send + Sync>> {
// Navigate to DuckDuckGo
driver.goto("https://duckduckgo.com").await?;
// Wait for search box and type query
let search_input = driver.find(By::Id("searchbox_input")).await?;
search_input.click().await?;
search_input.send_keys(search_term).await?;
// Submit search by pressing Enter
search_input.send_keys("\n").await?;
// Wait for results to load - using a modern result selector
driver.find(By::Css("[data-testid='result']")).await?;
sleep(Duration::from_millis(2000)).await;
// Extract results
let results = extract_search_results(&driver).await?;
driver.close_window().await?;
if !results.is_empty() {
Ok(results[0].clone())
} else {
Ok("No results found".to_string())
}
}
async fn extract_search_results(
driver: &WebDriver,
) -> Result<Vec<String>, Box<dyn Error + Send + Sync>> {
let mut results = Vec::new();
// Try different selectors for search results, ordered by most specific to most general
let selectors = [
// Modern DuckDuckGo (as seen in the HTML)
"a[data-testid='result-title-a']", // Primary result links
"a[data-testid='result-extras-url-link']", // URL links in results
"a.eVNpHGjtxRBq_gLOfGDr", // Class-based selector for result titles
"a.Rn_JXVtoPVAFyGkcaXyK", // Class-based selector for URL links
".ikg2IXiCD14iVX7AdZo1 a", // Heading container links
".OQ_6vPwNhCeusNiEDcGp a", // URL container links
// Fallback selectors
".result__a", // Classic DuckDuckGo
"a.result-link", // Alternative
".result a[href]", // Generic result links
];
for selector in &selectors {
if let Ok(elements) = driver.find_all(By::Css(selector)).await {
for element in elements {
if let Ok(Some(href)) = element.attr("href").await {
// Filter out internal and non-http links
if href.starts_with("http")
&& !href.contains("duckduckgo.com")
&& !href.contains("duck.co")
&& !results.contains(&href)
{
// Get the display URL for verification
let display_url = if let Ok(text) = element.text().await {
text.trim().to_string()
} else {
String::new()
};
// Only add if it looks like a real result (not an ad or internal link)
if !display_url.is_empty() && !display_url.contains("Ad") {
results.push(href);
}
}
}
}
if !results.is_empty() {
break;
}
}
}
// Deduplicate results
results.dedup();
Ok(results)
}

250
src/basic/keywords/last.rs Normal file
View file

@ -0,0 +1,250 @@
use rhai::Dynamic;
use rhai::Engine;
pub fn last_keyword(engine: &mut Engine) {
engine
.register_custom_syntax(&["LAST", "(", "$expr$", ")"], false, {
move |context, inputs| {
let input_string = context.eval_expression_tree(&inputs[0])?;
let input_str = input_string.to_string();
// Extrai a última palavra dividindo por espaço
let last_word = input_str
.split_whitespace()
.last()
.unwrap_or("")
.to_string();
Ok(Dynamic::from(last_word))
}
})
.unwrap();
}
#[cfg(test)]
mod tests {
use super::*;
use rhai::{Engine, Scope};
#[test]
fn test_last_keyword_basic() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval("LAST(\"hello world\")").unwrap();
assert_eq!(result, "world");
}
#[test]
fn test_last_keyword_single_word() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval("LAST(\"hello\")").unwrap();
assert_eq!(result, "hello");
}
#[test]
fn test_last_keyword_empty_string() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval("LAST(\"\")").unwrap();
assert_eq!(result, "");
}
#[test]
fn test_last_keyword_multiple_spaces() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval("LAST(\"hello world \")").unwrap();
assert_eq!(result, "world");
}
#[test]
fn test_last_keyword_tabs_and_newlines() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval("LAST(\"hello\tworld\n\")").unwrap();
assert_eq!(result, "world");
}
#[test]
fn test_last_keyword_with_variable() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let mut scope = Scope::new();
scope.push("text", "this is a test");
let result: String = engine.eval_with_scope(&mut scope, "LAST(text)").unwrap();
assert_eq!(result, "test");
}
#[test]
fn test_last_keyword_whitespace_only() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval("LAST(\" \")").unwrap();
assert_eq!(result, "");
}
#[test]
fn test_last_keyword_mixed_whitespace() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval("LAST(\"hello\t \n world \t final\")").unwrap();
assert_eq!(result, "final");
}
#[test]
fn test_last_keyword_expression() {
let mut engine = Engine::new();
last_keyword(&mut engine);
// Test with string concatenation
let result: String = engine.eval("LAST(\"hello\" + \" \" + \"world\")").unwrap();
assert_eq!(result, "world");
}
#[test]
fn test_last_keyword_unicode() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let result: String = engine.eval("LAST(\"hello 世界 мир world\")").unwrap();
assert_eq!(result, "world");
}
#[test]
fn test_last_keyword_in_expression() {
let mut engine = Engine::new();
last_keyword(&mut engine);
// Test using the result in another expression
let result: bool = engine.eval("LAST(\"hello world\") == \"world\"").unwrap();
assert!(result);
}
#[test]
fn test_last_keyword_complex_scenario() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let mut scope = Scope::new();
scope.push("sentence", "The quick brown fox jumps over the lazy dog");
let result: String = engine.eval_with_scope(&mut scope, "LAST(sentence)").unwrap();
assert_eq!(result, "dog");
}
#[test]
#[should_panic] // This should fail because the syntax expects parentheses
fn test_last_keyword_missing_parentheses() {
let mut engine = Engine::new();
last_keyword(&mut engine);
// This should fail - missing parentheses
let _: String = engine.eval("LAST \"hello world\"").unwrap();
}
#[test]
#[should_panic] // This should fail because of incomplete syntax
fn test_last_keyword_missing_closing_parenthesis() {
let mut engine = Engine::new();
last_keyword(&mut engine);
// This should fail - missing closing parenthesis
let _: String = engine.eval("LAST(\"hello world\"").unwrap();
}
#[test]
#[should_panic] // This should fail because of incomplete syntax
fn test_last_keyword_missing_opening_parenthesis() {
let mut engine = Engine::new();
last_keyword(&mut engine);
// This should fail - missing opening parenthesis
let _: String = engine.eval("LAST \"hello world\")").unwrap();
}
#[test]
fn test_last_keyword_dynamic_type() {
let mut engine = Engine::new();
last_keyword(&mut engine);
// Test that the function returns the correct Dynamic type
let result = engine.eval::<Dynamic>("LAST(\"test string\")").unwrap();
assert!(result.is::<String>());
assert_eq!(result.to_string(), "string");
}
#[test]
fn test_last_keyword_nested_expression() {
let mut engine = Engine::new();
last_keyword(&mut engine);
// Test with a more complex nested expression
let result: String = engine.eval("LAST(\"The result is: \" + \"hello world\")").unwrap();
assert_eq!(result, "world");
}
}
#[cfg(test)]
mod integration_tests {
use super::*;
#[test]
fn test_last_keyword_in_script() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let script = r#"
let sentence1 = "first second third";
let sentence2 = "alpha beta gamma";
let last1 = LAST(sentence1);
let last2 = LAST(sentence2);
last1 + " and " + last2
"#;
let result: String = engine.eval(script).unwrap();
assert_eq!(result, "third and gamma");
}
#[test]
fn test_last_keyword_with_function() {
let mut engine = Engine::new();
last_keyword(&mut engine);
// Register a function that returns a string
engine.register_fn("get_name", || -> String { "john doe".to_string() });
let result: String = engine.eval("LAST(get_name())").unwrap();
assert_eq!(result, "doe");
}
#[test]
fn test_last_keyword_multiple_calls() {
let mut engine = Engine::new();
last_keyword(&mut engine);
let script = r#"
let text1 = "apple banana cherry";
let text2 = "cat dog elephant";
let result1 = LAST(text1);
let result2 = LAST(text2);
result1 + "-" + result2
"#;
let result: String = engine.eval(script).unwrap();
assert_eq!(result, "cherry-elephant");
}
}

View file

@ -0,0 +1,30 @@
use log::info;
use crate::{shared::state::AppState, utils::call_llm};
use rhai::{Dynamic, Engine};
pub fn llm_keyword(state: &AppState, engine: &mut Engine) {
let ai_config = state.config.clone().unwrap().ai.clone();
engine
.register_custom_syntax(
&["LLM", "$expr$"], // Syntax: LLM "text to process"
false, // Expression, not statement
move |context, inputs| {
let text = context.eval_expression_tree(&inputs[0])?;
let text_str = text.to_string();
info!("LLM processing text: {}", text_str);
// Use the same pattern as GET
let fut = call_llm(&text_str, &ai_config);
let result =
tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut))
.map_err(|e| format!("LLM call failed: {}", e))?;
Ok(Dynamic::from(result))
},
)
.unwrap();
}

15
src/basic/keywords/mod.rs Normal file
View file

@ -0,0 +1,15 @@
pub mod create_draft;
pub mod create_site;
pub mod find;
pub mod first;
pub mod last;
pub mod format;
pub mod for_next;
pub mod get;
pub mod get_website;
pub mod llm_keyword;
pub mod on;
pub mod print;
pub mod set;
pub mod set_schedule;
pub mod wait;

86
src/basic/keywords/on.rs Normal file
View file

@ -0,0 +1,86 @@
use log::{error, info};
use rhai::Dynamic;
use rhai::Engine;
use serde_json::{json, Value};
use sqlx::PgPool;
use crate::shared::models::automation_model::TriggerKind;
use crate::shared::state::AppState;
pub fn on_keyword(state: &AppState, engine: &mut Engine) {
let db = state.db_custom.clone();
engine
.register_custom_syntax(
["ON", "$ident$", "OF", "$string$"], // Changed $string$ to $ident$ for operation
true,
{
let db = db.clone();
move |context, inputs| {
let trigger_type = context.eval_expression_tree(&inputs[0])?.to_string();
let table = context.eval_expression_tree(&inputs[1])?.to_string();
let script_name = format!("{}_{}.rhai", table, trigger_type.to_lowercase());
// Determine the trigger kind based on the trigger type
let kind = match trigger_type.to_uppercase().as_str() {
"UPDATE" => TriggerKind::TableUpdate,
"INSERT" => TriggerKind::TableInsert,
"DELETE" => TriggerKind::TableDelete,
_ => return Err(format!("Invalid trigger type: {}", trigger_type).into()),
};
let binding = db.as_ref().unwrap();
let fut = execute_on_trigger(binding, kind, &table, &script_name);
let result = tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(fut)
})
.map_err(|e| format!("DB error: {}", e))?;
if let Some(rows_affected) = result.get("rows_affected") {
Ok(Dynamic::from(rows_affected.as_i64().unwrap_or(0)))
} else {
Err("No rows affected".into())
}
}
},
)
.unwrap();
}
pub async fn execute_on_trigger(
pool: &PgPool,
kind: TriggerKind,
table: &str,
script_name: &str,
) -> Result<Value, String> {
info!(
"Starting execute_on_trigger with kind: {:?}, table: {}, script_name: {}",
kind, table, script_name
);
// Option 1: Use query_with macro if you need to pass enum values
let result = sqlx::query(
"INSERT INTO system_automations
(kind, target, script_name)
VALUES ($1, $2, $3)",
)
.bind(kind.clone() as i32) // Assuming TriggerKind is #[repr(i32)]
.bind(table)
.bind(script_name)
.execute(pool)
.await
.map_err(|e| {
error!("SQL execution error: {}", e);
e.to_string()
})?;
Ok(json!({
"command": "on_trigger",
"trigger_type": format!("{:?}", kind),
"table": table,
"script_name": script_name,
"rows_affected": result.rows_affected()
}))
}

View file

@ -0,0 +1,20 @@
use log::info;
use rhai::Dynamic;
use rhai::Engine;
use crate::shared::state::AppState;
pub fn print_keyword(_state: &AppState, engine: &mut Engine) {
// PRINT command
engine
.register_custom_syntax(
&["PRINT", "$expr$"],
true, // Statement
|context, inputs| {
let value = context.eval_expression_tree(&inputs[0])?;
info!("{}", value);
Ok(Dynamic::UNIT)
},
)
.unwrap();
}

View file

@ -0,0 +1,150 @@
Create a new Rhai custom keyword implementation with these specifications:
1. DATABASE REQUIREMENTS:
- No enums in database schema (only in Rust code)
- Use direct integer values for enum variants in queries
- Follow existing connection pooling pattern with AppState
- Include proper error handling and logging
2. RUST IMPLEMENTATION:
- Enum definition (Rust-only, no DB enum):
```rust
#[repr(i32)]
pub enum KeywordAction {
Action1 = 0,
Action2 = 1,
Action3 = 2
}
```
3. KEYWORD TEMPLATE:
```rust
pub fn {keyword_name}_keyword(state: &AppState, engine: &mut Engine) {
let db = state.db_custom.clone();
engine.register_custom_syntax(
{syntax_pattern},
{is_raw},
{
let db = db.clone();
move |context, inputs| {
// Input processing
{input_processing}
let binding = db.as_ref().unwrap();
let fut = execute_{keyword_name}(binding, {params});
let result = tokio::task::block_in_place(||
tokio::runtime::Handle::current().block_on(fut))
.map_err(|e| format!("DB error: {}", e))?;
{result_handling}
}
}
).unwrap();
}
pub async fn execute_{keyword_name}(
pool: &PgPool,
{params_with_types}
) -> Result<Value, Box<dyn std::error::Error>> {
info!("Executing {keyword_name} with: {debug_params}");
let result = sqlx::query(
"{sql_query_with_i32_enum}"
)
.bind({enum_value} as i32)
{additional_binds}
.execute(pool)
.await?;
Ok(json!({
"command": "{keyword_name}",
{result_fields}
"rows_affected": result.rows_affected()
}))
}
```
4. EXAMPLE IMPLEMENTATION (SET SCHEDULE):
```rust
// Enum (Rust-only)
#[repr(i32)]
pub enum TriggerKind {
Scheduled = 0,
TableUpdate = 1,
TableInsert = 2,
TableDelete = 3
}
// Keyword implementation
pub fn set_schedule_keyword(state: &AppState, engine: &mut Engine) {
let db = state.db_custom.clone();
engine.register_custom_syntax(
["SET", "SCHEDULE", "$string$"],
true,
{
let db = db.clone();
move |context, inputs| {
let cron = context.eval_expression_tree(&inputs[0])?.to_string();
let script_name = format!("cron_{}.rhai", cron.replace(' ', "_"));
let binding = db.as_ref().unwrap();
let fut = execute_set_schedule(binding, &cron, &script_name);
let result = tokio::task::block_in_place(||
tokio::runtime::Handle::current().block_on(fut))
.map_err(|e| format!("DB error: {}", e))?;
if let Some(rows_affected) = result.get("rows_affected") {
Ok(Dynamic::from(rows_affected.as_i64().unwrap_or(0)))
} else {
Err("No rows affected".into())
}
}
}
).unwrap();
}
pub async fn execute_set_schedule(
pool: &PgPool,
cron: &str,
script_name: &str,
) -> Result<Value, Box<dyn std::error::Error>> {
info!("Executing schedule: {}, {}", cron, script_name);
let result = sqlx::query(
"INSERT INTO system_automations
(kind, schedule, script_name)
VALUES ($1, $2, $3)"
)
.bind(TriggerKind::Scheduled as i32)
.bind(cron)
.bind(script_name)
.execute(pool)
.await?;
Ok(json!({
"command": "set_schedule",
"schedule": cron,
"script_name": script_name,
"rows_affected": result.rows_affected()
}))
}
```
5. ADDITIONAL REQUIREMENTS:
- Maintain consistent tokio runtime handling
- Include parameter validation
- Follow existing JSON response format
- Ensure proper script name generation
- Include debug logging for all operations
6. OUTPUT FORMAT:
Provide complete implementation with:
1. Rust enum definition
2. Keyword registration function
3. Execution function
4. Example usage in Rhai

141
src/basic/keywords/set.rs Normal file
View file

@ -0,0 +1,141 @@
use log::{error, info};
use rhai::Dynamic;
use rhai::Engine;
use serde_json::{json, Value};
use sqlx::PgPool;
use std::error::Error;
use crate::shared::state::AppState;
use crate::shared::utils;
pub fn set_keyword(state: &AppState, engine: &mut Engine) {
let db = state.db_custom.clone();
engine
.register_custom_syntax(&["SET", "$expr$", ",", "$expr$", ",", "$expr$"], false, {
let db = db.clone();
move |context, inputs| {
let table_name = context.eval_expression_tree(&inputs[0])?;
let filter = context.eval_expression_tree(&inputs[1])?;
let updates = context.eval_expression_tree(&inputs[2])?;
let binding = db.as_ref().unwrap();
// Use the current async context instead of creating a new runtime
let binding2 = table_name.to_string();
let binding3 = filter.to_string();
let binding4 = updates.to_string();
let fut = execute_set(binding, &binding2, &binding3, &binding4);
// Use tokio::task::block_in_place + tokio::runtime::Handle::current().block_on
let result =
tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut))
.map_err(|e| format!("DB error: {}", e))?;
if let Some(rows_affected) = result.get("rows_affected") {
Ok(Dynamic::from(rows_affected.as_i64().unwrap_or(0)))
} else {
Err("No rows affected".into())
}
}
})
.unwrap();
}
pub async fn execute_set(
pool: &PgPool,
table_str: &str,
filter_str: &str,
updates_str: &str,
) -> Result<Value, String> {
info!(
"Starting execute_set with table: {}, filter: {}, updates: {}",
table_str, filter_str, updates_str
);
// Parse updates with proper type handling
let (set_clause, update_values) = parse_updates(updates_str).map_err(|e| e.to_string())?;
let update_params_count = update_values.len();
// Parse filter with proper type handling
let (where_clause, filter_values) =
utils::parse_filter_with_offset(filter_str, update_params_count)
.map_err(|e| e.to_string())?;
let query = format!(
"UPDATE {} SET {} WHERE {}",
table_str, set_clause, where_clause
);
info!("Executing query: {}", query);
// Build query with proper parameter binding
let mut query = sqlx::query(&query);
// Bind update values
for value in update_values {
query = bind_value(query, value);
}
// Bind filter values
for value in filter_values {
query = bind_value(query, value);
}
let result = query.execute(pool).await.map_err(|e| {
error!("SQL execution error: {}", e);
e.to_string()
})?;
Ok(json!({
"command": "set",
"table": table_str,
"filter": filter_str,
"updates": updates_str,
"rows_affected": result.rows_affected()
}))
}
fn bind_value<'q>(
query: sqlx::query::Query<'q, sqlx::Postgres, sqlx::postgres::PgArguments>,
value: String,
) -> sqlx::query::Query<'q, sqlx::Postgres, sqlx::postgres::PgArguments> {
if let Ok(int_val) = value.parse::<i64>() {
query.bind(int_val)
} else if let Ok(float_val) = value.parse::<f64>() {
query.bind(float_val)
} else if value.eq_ignore_ascii_case("true") {
query.bind(true)
} else if value.eq_ignore_ascii_case("false") {
query.bind(false)
} else {
query.bind(value)
}
}
// Parse updates without adding quotes
fn parse_updates(updates_str: &str) -> Result<(String, Vec<String>), Box<dyn Error>> {
let mut set_clauses = Vec::new();
let mut params = Vec::new();
for (i, update) in updates_str.split(',').enumerate() {
let parts: Vec<&str> = update.split('=').collect();
if parts.len() != 2 {
return Err("Invalid update format".into());
}
let column = parts[0].trim();
let value = parts[1].trim();
if !column
.chars()
.all(|c| c.is_ascii_alphanumeric() || c == '_')
{
return Err("Invalid column name".into());
}
set_clauses.push(format!("{} = ${}", column, i + 1));
params.push(value.to_string()); // Store raw value without quotes
}
Ok((set_clauses.join(", "), params))
}

View file

@ -0,0 +1,67 @@
use log::info;
use rhai::Dynamic;
use rhai::Engine;
use serde_json::{json, Value};
use sqlx::PgPool;
use crate::shared::models::automation_model::TriggerKind;
use crate::shared::state::AppState;
pub fn set_schedule_keyword(state: &AppState, engine: &mut Engine) {
let db = state.db_custom.clone();
engine
.register_custom_syntax(["SET_SCHEDULE", "$string$"], true, {
let db = db.clone();
move |context, inputs| {
let cron = context.eval_expression_tree(&inputs[0])?.to_string();
let script_name = format!("cron_{}.rhai", cron.replace(' ', "_"));
let binding = db.as_ref().unwrap();
let fut = execute_set_schedule(binding, &cron, &script_name);
let result =
tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut))
.map_err(|e| format!("DB error: {}", e))?;
if let Some(rows_affected) = result.get("rows_affected") {
Ok(Dynamic::from(rows_affected.as_i64().unwrap_or(0)))
} else {
Err("No rows affected".into())
}
}
})
.unwrap();
}
pub async fn execute_set_schedule(
pool: &PgPool,
cron: &str,
script_name: &str,
) -> Result<Value, Box<dyn std::error::Error>> {
info!(
"Starting execute_set_schedule with cron: {}, script_name: {}",
cron, script_name
);
let result = sqlx::query(
r#"
INSERT INTO system_automations
(kind, schedule, script_name)
VALUES ($1, $2, $3)
"#,
)
.bind(TriggerKind::Scheduled as i32) // Cast to i32
.bind(cron)
.bind(script_name)
.execute(pool)
.await?;
Ok(json!({
"command": "set_schedule",
"schedule": cron,
"script_name": script_name,
"rows_affected": result.rows_affected()
}))
}

View file

@ -0,0 +1,46 @@
use crate::shared::state::AppState;
use log::info;
use rhai::{Dynamic, Engine};
use std::thread;
use std::time::Duration;
pub fn wait_keyword(_state: &AppState, engine: &mut Engine) {
engine
.register_custom_syntax(
&["WAIT", "$expr$"],
false, // Expression, not statement
move |context, inputs| {
let seconds = context.eval_expression_tree(&inputs[0])?;
// Convert to number (handle both int and float)
let duration_secs = if seconds.is::<i64>() {
seconds.cast::<i64>() as f64
} else if seconds.is::<f64>() {
seconds.cast::<f64>()
} else {
return Err(format!("WAIT expects a number, got: {}", seconds).into());
};
if duration_secs < 0.0 {
return Err("WAIT duration cannot be negative".into());
}
// Cap maximum wait time to prevent abuse (e.g., 5 minutes max)
let capped_duration = if duration_secs > 300.0 {
300.0
} else {
duration_secs
};
info!("WAIT {} seconds (thread sleep)", capped_duration);
// Use thread::sleep to block only the current thread, not the entire server
let duration = Duration::from_secs_f64(capped_duration);
thread::sleep(duration);
info!("WAIT completed after {} seconds", capped_duration);
Ok(Dynamic::from(format!("Waited {} seconds", capped_duration)))
},
)
.unwrap();
}

154
src/basic/mod.rs Normal file
View file

@ -0,0 +1,154 @@
mod keywords;
use self::keywords::create_draft::create_draft_keyword;
use self::keywords::create_site::create_site_keyword;
use self::keywords::find::find_keyword;
use self::keywords::first::first_keyword;
use self::keywords::for_next::for_keyword;
use self::keywords::format::format_keyword;
use self::keywords::get::get_keyword;
use self::keywords::get_website::get_website_keyword;
use self::keywords::last::last_keyword;
use self::keywords::llm_keyword::llm_keyword;
use self::keywords::on::on_keyword;
use self::keywords::print::print_keyword;
use self::keywords::set::set_keyword;
use self::keywords::set_schedule::set_schedule_keyword;
use self::keywords::wait::wait_keyword;
use crate::shared::AppState;
use log::info;
use rhai::{Dynamic, Engine, EvalAltResult};
pub struct ScriptService {
engine: Engine,
}
impl ScriptService {
pub fn new(state: &AppState) -> Self {
let mut engine = Engine::new();
// Configure engine for BASIC-like syntax
engine.set_allow_anonymous_fn(true);
engine.set_allow_looping(true);
create_draft_keyword(state, &mut engine);
create_site_keyword(state, &mut engine);
find_keyword(state, &mut engine);
for_keyword(state, &mut engine);
first_keyword(&mut engine);
last_keyword(&mut engine);
format_keyword(&mut engine);
llm_keyword(state, &mut engine);
get_website_keyword(state, &mut engine);
get_keyword(state, &mut engine);
set_keyword(state, &mut engine);
wait_keyword(state, &mut engine);
print_keyword(state, &mut engine);
on_keyword(state, &mut engine);
set_schedule_keyword(state, &mut engine);
ScriptService { engine }
}
fn preprocess_basic_script(&self, script: &str) -> String {
let mut result = String::new();
let mut for_stack: Vec<usize> = Vec::new();
let mut current_indent = 0;
for line in script.lines() {
let trimmed = line.trim();
// Skip empty lines and comments
if trimmed.is_empty() || trimmed.starts_with("//") || trimmed.starts_with("REM") {
result.push_str(line);
result.push('\n');
continue;
}
// Handle FOR EACH start
if trimmed.starts_with("FOR EACH") {
for_stack.push(current_indent);
result.push_str(&" ".repeat(current_indent));
result.push_str(trimmed);
result.push_str("{\n");
current_indent += 4;
result.push_str(&" ".repeat(current_indent));
result.push('\n');
continue;
}
// Handle NEXT
if trimmed.starts_with("NEXT") {
if let Some(expected_indent) = for_stack.pop() {
if (current_indent - 4) != expected_indent {
panic!("NEXT without matching FOR EACH");
}
current_indent = current_indent - 4;
result.push_str(&" ".repeat(current_indent));
result.push_str("}\n");
result.push_str(&" ".repeat(current_indent));
result.push_str(trimmed);
result.push(';');
result.push('\n');
continue;
} else {
panic!("NEXT without matching FOR EACH");
}
}
// Handle EXIT FOR
if trimmed == "EXIT FOR" {
result.push_str(&" ".repeat(current_indent));
result.push_str(trimmed);
result.push('\n');
continue;
}
// Handle regular lines - no semicolons added for BASIC-style commands
result.push_str(&" ".repeat(current_indent));
let basic_commands = [
"SET", "CREATE", "PRINT", "FOR", "FIND", "GET", "EXIT", "IF", "THEN", "ELSE",
"END IF", "WHILE", "WEND", "DO", "LOOP",
];
let is_basic_command = basic_commands.iter().any(|&cmd| trimmed.starts_with(cmd));
let is_control_flow = trimmed.starts_with("IF")
|| trimmed.starts_with("ELSE")
|| trimmed.starts_with("END IF");
if is_basic_command || !for_stack.is_empty() || is_control_flow {
// Don'ta add semicolons for BASIC-style commands or inside blocks
result.push_str(trimmed);
result.push(';');
} else {
// Add semicolons only for BASIC statements
result.push_str(trimmed);
if !trimmed.ends_with(';') && !trimmed.ends_with('{') && !trimmed.ends_with('}') {
result.push(';');
}
}
result.push('\n');
}
if !for_stack.is_empty() {
panic!("Unclosed FOR EACH loop");
}
result
}
/// Preprocesses BASIC-style script to handle semicolon-free syntax
pub fn compile(&self, script: &str) -> Result<rhai::AST, Box<EvalAltResult>> {
let processed_script = self.preprocess_basic_script(script);
info!("Processed Script:\n{}", processed_script);
match self.engine.compile(&processed_script) {
Ok(ast) => Ok(ast),
Err(parse_error) => Err(Box::new(EvalAltResult::from(parse_error))),
}
}
pub fn run(&self, ast: &rhai::AST) -> Result<Dynamic, Box<EvalAltResult>> {
self.engine.eval_ast(ast)
}
}

852
src/bot/mod.rs Normal file
View file

@ -0,0 +1,852 @@
use actix_web::{web, HttpRequest, HttpResponse, Result};
use actix_ws::Message as WsMessage;
use chrono::Utc;
use langchain_rust::{
chain::{Chain, LLMChain},
llm::openai::OpenAI,
memory::SimpleMemory,
prompt_args,
tools::{postgres::PostgreSQLEngine, SQLDatabaseBuilder},
vectorstore::qdrant::Qdrant as LangChainQdrant,
vectorstore::{VecStoreOptions, VectorStore},
};
use log::info;
use serde_json;
use std::collections::HashMap;
use std::fs;
use std::sync::Arc;
use tokio::sync::{mpsc, Mutex};
use uuid::Uuid;
use crate::{
auth::AuthService,
channels::{ChannelAdapter, VoiceAdapter, WebChannelAdapter},
chart::ChartGenerator,
llm::LLMProvider,
session::SessionManager,
shared::{BotResponse, UserMessage, UserSession},
tools::ToolManager,
whatsapp::WhatsAppAdapter,
};
pub struct BotOrchestrator {
session_manager: SessionManager,
tool_manager: ToolManager,
llm_provider: Arc<dyn LLMProvider>,
auth_service: AuthService,
channels: HashMap<String, Arc<dyn ChannelAdapter>>,
response_channels: Arc<Mutex<HashMap<String, mpsc::Sender<BotResponse>>>>,
chart_generator: Option<Arc<ChartGenerator>>,
vector_store: Option<Arc<LangChainQdrant>>,
sql_chain: Option<Arc<LLMChain>>,
}
impl BotOrchestrator {
pub fn new(
session_manager: SessionManager,
tool_manager: ToolManager,
llm_provider: Arc<dyn LLMProvider>,
auth_service: AuthService,
chart_generator: Option<Arc<ChartGenerator>>,
vector_store: Option<Arc<LangChainQdrant>>,
sql_chain: Option<Arc<LLMChain>>,
) -> Self {
Self {
session_manager,
tool_manager,
llm_provider,
auth_service,
channels: HashMap::new(),
response_channels: Arc::new(Mutex::new(HashMap::new())),
chart_generator,
vector_store,
sql_chain,
}
}
pub fn add_channel(&mut self, channel_type: &str, adapter: Arc<dyn ChannelAdapter>) {
self.channels.insert(channel_type.to_string(), adapter);
}
pub async fn register_response_channel(
&self,
session_id: String,
sender: mpsc::Sender<BotResponse>,
) {
self.response_channels
.lock()
.await
.insert(session_id, sender);
}
pub async fn set_user_answer_mode(
&self,
user_id: &str,
bot_id: &str,
mode: &str,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
self.session_manager
.update_answer_mode(user_id, bot_id, mode)
.await?;
Ok(())
}
pub async fn process_message(
&self,
message: UserMessage,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!(
"Processing message from channel: {}, user: {}",
message.channel, message.user_id
);
let user_id = Uuid::parse_str(&message.user_id).unwrap_or_else(|_| Uuid::new_v4());
let bot_id = Uuid::parse_str(&message.bot_id)
.unwrap_or_else(|_| Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap());
let session = match self
.session_manager
.get_user_session(user_id, bot_id)
.await?
{
Some(session) => session,
None => {
self.session_manager
.create_session(user_id, bot_id, "New Conversation")
.await?
}
};
if session.answer_mode == "tool" && session.current_tool.is_some() {
self.tool_manager
.provide_user_response(&message.user_id, &message.bot_id, message.content.clone())
.await?;
return Ok(());
}
self.session_manager
.save_message(
session.id,
user_id,
"user",
&message.content,
&message.message_type,
)
.await?;
let response_content = match session.answer_mode.as_str() {
"document" => self.document_mode_handler(&message, &session).await?,
"chart" => self.chart_mode_handler(&message, &session).await?,
"database" => self.database_mode_handler(&message, &session).await?,
"tool" => self.tool_mode_handler(&message, &session).await?,
_ => self.direct_mode_handler(&message, &session).await?,
};
self.session_manager
.save_message(session.id, user_id, "assistant", &response_content, "text")
.await?;
let bot_response = BotResponse {
bot_id: message.bot_id,
user_id: message.user_id,
session_id: message.session_id,
channel: message.channel,
content: response_content,
message_type: "text".to_string(),
stream_token: None,
is_complete: true,
};
if let Some(adapter) = self.channels.get(&message.channel) {
adapter.send_message(bot_response).await?;
}
Ok(())
}
async fn document_mode_handler(
&self,
message: &UserMessage,
session: &UserSession,
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
if let Some(vector_store) = &self.vector_store {
let similar_docs = vector_store
.similarity_search(&message.content, 3, &VecStoreOptions::default())
.await?;
let mut enhanced_prompt = format!("User question: {}\n\n", message.content);
if !similar_docs.is_empty() {
enhanced_prompt.push_str("Relevant documents:\n");
for (i, doc) in similar_docs.iter().enumerate() {
enhanced_prompt.push_str(&format!("[Doc {}]: {}\n", i + 1, doc.page_content));
}
enhanced_prompt.push_str(
"\nPlease answer the user's question based on the provided documents.",
);
}
self.llm_provider
.generate(&enhanced_prompt, &serde_json::Value::Null)
.await
} else {
self.direct_mode_handler(message, session).await
}
}
async fn chart_mode_handler(
&self,
message: &UserMessage,
session: &UserSession,
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
if let Some(chart_generator) = &self.chart_generator {
let chart_response = chart_generator
.generate_chart(&message.content, "bar")
.await?;
self.session_manager
.save_message(
session.id,
session.user_id,
"system",
&format!("Generated chart for query: {}", message.content),
"chart",
)
.await?;
Ok(format!(
"Chart generated for your query. Data retrieved: {}",
chart_response.sql_query
))
} else {
self.document_mode_handler(message, session).await
}
}
async fn database_mode_handler(
&self,
message: &UserMessage,
_session: &UserSession,
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
if let Some(sql_chain) = &self.sql_chain {
let input_variables = prompt_args! {
"input" => message.content,
};
let result = sql_chain.invoke(input_variables).await?;
Ok(result.to_string())
} else {
let db_url = std::env::var("DATABASE_URL")?;
let engine = PostgreSQLEngine::new(&db_url).await?;
let db = SQLDatabaseBuilder::new(engine).build().await?;
let llm = OpenAI::default();
let chain = langchain_rust::chain::SQLDatabaseChainBuilder::new()
.llm(llm)
.top_k(5)
.database(db)
.build()?;
let input_variables = chain.prompt_builder().query(&message.content).build();
let result = chain.invoke(input_variables).await?;
Ok(result.to_string())
}
}
async fn tool_mode_handler(
&self,
message: &UserMessage,
_session: &UserSession,
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
if message.content.to_lowercase().contains("calculator") {
if let Some(_adapter) = self.channels.get(&message.channel) {
let (tx, _rx) = mpsc::channel(100);
self.register_response_channel(message.session_id.clone(), tx.clone())
.await;
let tool_manager = self.tool_manager.clone();
let user_id_str = message.user_id.clone();
let bot_id_str = message.bot_id.clone();
let session_manager = self.session_manager.clone();
tokio::spawn(async move {
let _ = tool_manager
.execute_tool_with_session(
"calculator",
&user_id_str,
&bot_id_str,
session_manager,
tx,
)
.await;
});
}
Ok("Starting calculator tool...".to_string())
} else {
let available_tools = self.tool_manager.list_tools();
let tools_context = if !available_tools.is_empty() {
format!("\n\nAvailable tools: {}. If the user needs calculations, suggest using the calculator tool.", available_tools.join(", "))
} else {
String::new()
};
let full_prompt = format!("{}{}", message.content, tools_context);
self.llm_provider
.generate(&full_prompt, &serde_json::Value::Null)
.await
}
}
async fn direct_mode_handler(
&self,
message: &UserMessage,
session: &UserSession,
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
let history = self
.session_manager
.get_conversation_history(session.id, session.user_id)
.await?;
let mut memory = SimpleMemory::new();
for (role, content) in history {
match role.as_str() {
"user" => memory.add_user_message(&content),
"assistant" => memory.add_ai_message(&content),
_ => {}
}
}
let mut prompt = String::new();
if let Some(chat_history) = memory.get_chat_history() {
for message in chat_history {
prompt.push_str(&format!(
"{}: {}\n",
message.message_type(),
message.content()
));
}
}
prompt.push_str(&format!("User: {}\nAssistant:", message.content));
self.llm_provider
.generate(&prompt, &serde_json::Value::Null)
.await
}
pub async fn stream_response(
&self,
message: UserMessage,
mut response_tx: mpsc::Sender<BotResponse>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!("Streaming response for user: {}", message.user_id);
let user_id = Uuid::parse_str(&message.user_id).unwrap_or_else(|_| Uuid::new_v4());
let bot_id = Uuid::parse_str(&message.bot_id)
.unwrap_or_else(|_| Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap());
let session = match self
.session_manager
.get_user_session(user_id, bot_id)
.await?
{
Some(session) => session,
None => {
self.session_manager
.create_session(user_id, bot_id, "New Conversation")
.await?
}
};
if session.answer_mode == "tool" && session.current_tool.is_some() {
self.tool_manager
.provide_user_response(&message.user_id, &message.bot_id, message.content.clone())
.await?;
return Ok(());
}
self.session_manager
.save_message(
session.id,
user_id,
"user",
&message.content,
&message.message_type,
)
.await?;
let history = self
.session_manager
.get_conversation_history(session.id, user_id)
.await?;
let mut memory = SimpleMemory::new();
for (role, content) in history {
match role.as_str() {
"user" => memory.add_user_message(&content),
"assistant" => memory.add_ai_message(&content),
_ => {}
}
}
let mut prompt = String::new();
if let Some(chat_history) = memory.get_chat_history() {
for message in chat_history {
prompt.push_str(&format!(
"{}: {}\n",
message.message_type(),
message.content()
));
}
}
prompt.push_str(&format!("User: {}\nAssistant:", message.content));
let (stream_tx, mut stream_rx) = mpsc::channel(100);
let llm_provider = self.llm_provider.clone();
let prompt_clone = prompt.clone();
tokio::spawn(async move {
let _ = llm_provider
.generate_stream(&prompt_clone, &serde_json::Value::Null, stream_tx)
.await;
});
let mut full_response = String::new();
while let Some(chunk) = stream_rx.recv().await {
full_response.push_str(&chunk);
let bot_response = BotResponse {
bot_id: message.bot_id.clone(),
user_id: message.user_id.clone(),
session_id: message.session_id.clone(),
channel: message.channel.clone(),
content: chunk,
message_type: "text".to_string(),
stream_token: None,
is_complete: false,
};
if response_tx.send(bot_response).await.is_err() {
break;
}
}
self.session_manager
.save_message(session.id, user_id, "assistant", &full_response, "text")
.await?;
let final_response = BotResponse {
bot_id: message.bot_id,
user_id: message.user_id,
session_id: message.session_id,
channel: message.channel,
content: "".to_string(),
message_type: "text".to_string(),
stream_token: None,
is_complete: true,
};
response_tx.send(final_response).await?;
Ok(())
}
pub async fn get_user_sessions(
&self,
user_id: Uuid,
) -> Result<Vec<UserSession>, Box<dyn std::error::Error + Send + Sync>> {
self.session_manager.get_user_sessions(user_id).await
}
pub async fn get_conversation_history(
&self,
session_id: Uuid,
user_id: Uuid,
) -> Result<Vec<(String, String)>, Box<dyn std::error::Error + Send + Sync>> {
self.session_manager
.get_conversation_history(session_id, user_id)
.await
}
pub async fn process_message_with_tools(
&self,
message: UserMessage,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!(
"Processing message with tools from user: {}",
message.user_id
);
let user_id = Uuid::parse_str(&message.user_id).unwrap_or_else(|_| Uuid::new_v4());
let bot_id = Uuid::parse_str(&message.bot_id)
.unwrap_or_else(|_| Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap());
let session = match self
.session_manager
.get_user_session(user_id, bot_id)
.await?
{
Some(session) => session,
None => {
self.session_manager
.create_session(user_id, bot_id, "New Conversation")
.await?
}
};
self.session_manager
.save_message(
session.id,
user_id,
"user",
&message.content,
&message.message_type,
)
.await?;
let is_tool_waiting = self
.tool_manager
.is_tool_waiting(&message.session_id)
.await
.unwrap_or(false);
if is_tool_waiting {
self.tool_manager
.provide_input(&message.session_id, &message.content)
.await?;
if let Ok(tool_output) = self.tool_manager.get_tool_output(&message.session_id).await {
for output in tool_output {
let bot_response = BotResponse {
bot_id: message.bot_id.clone(),
user_id: message.user_id.clone(),
session_id: message.session_id.clone(),
channel: message.channel.clone(),
content: output,
message_type: "text".to_string(),
stream_token: None,
is_complete: true,
};
if let Some(adapter) = self.channels.get(&message.channel) {
adapter.send_message(bot_response).await?;
}
}
}
return Ok(());
}
let response = if message.content.to_lowercase().contains("calculator")
|| message.content.to_lowercase().contains("calculate")
|| message.content.to_lowercase().contains("math")
{
match self
.tool_manager
.execute_tool("calculator", &message.session_id, &message.user_id)
.await
{
Ok(tool_result) => {
self.session_manager
.save_message(
session.id,
user_id,
"assistant",
&tool_result.output,
"tool_start",
)
.await?;
tool_result.output
}
Err(e) => {
format!("I encountered an error starting the calculator: {}", e)
}
}
} else {
let available_tools = self.tool_manager.list_tools();
let tools_context = if !available_tools.is_empty() {
format!("\n\nAvailable tools: {}. If the user needs calculations, suggest using the calculator tool.", available_tools.join(", "))
} else {
String::new()
};
let full_prompt = format!("{}{}", message.content, tools_context);
self.llm_provider
.generate(&full_prompt, &serde_json::Value::Null)
.await?
};
self.session_manager
.save_message(session.id, user_id, "assistant", &response, "text")
.await?;
let bot_response = BotResponse {
bot_id: message.bot_id,
user_id: message.user_id,
session_id: message.session_id,
channel: message.channel,
content: response,
message_type: "text".to_string(),
stream_token: None,
is_complete: true,
};
if let Some(adapter) = self.channels.get(&message.channel) {
adapter.send_message(bot_response).await?;
}
Ok(())
}
}
#[actix_web::get("/ws")]
async fn websocket_handler(
req: HttpRequest,
stream: web::Payload,
data: web::Data<crate::shared::AppState>,
) -> Result<HttpResponse, actix_web::Error> {
let (res, mut session, mut msg_stream) = actix_ws::handle(&req, stream)?;
let session_id = Uuid::new_v4().to_string();
let (tx, mut rx) = mpsc::channel::<BotResponse>(100);
data.orchestrator
.register_response_channel(session_id.clone(), tx.clone())
.await;
data.web_adapter
.add_connection(session_id.clone(), tx.clone())
.await;
data.voice_adapter
.add_connection(session_id.clone(), tx.clone())
.await;
let orchestrator = data.orchestrator.clone();
let web_adapter = data.web_adapter.clone();
actix_web::rt::spawn(async move {
while let Some(msg) = rx.recv().await {
if let Ok(json) = serde_json::to_string(&msg) {
let _ = session.text(json).await;
}
}
});
actix_web::rt::spawn(async move {
while let Some(Ok(msg)) = msg_stream.recv().await {
match msg {
WsMessage::Text(text) => {
let user_message = UserMessage {
bot_id: "default_bot".to_string(),
user_id: "default_user".to_string(),
session_id: session_id.clone(),
channel: "web".to_string(),
content: text.to_string(),
message_type: "text".to_string(),
media_url: None,
timestamp: Utc::now(),
};
if let Err(e) = orchestrator.stream_response(user_message, tx.clone()).await {
info!("Error processing message: {}", e);
}
}
WsMessage::Close(_) => {
web_adapter.remove_connection(&session_id).await;
break;
}
_ => {}
}
}
});
Ok(res)
}
#[actix_web::get("/api/whatsapp/webhook")]
async fn whatsapp_webhook_verify(
data: web::Data<crate::shared::AppState>,
web::Query(params): web::Query<HashMap<String, String>>,
) -> Result<HttpResponse> {
let mode = params.get("hub.mode").unwrap_or(&"".to_string());
let token = params.get("hub.verify_token").unwrap_or(&"".to_string());
let challenge = params.get("hub.challenge").unwrap_or(&"".to_string());
match data.whatsapp_adapter.verify_webhook(mode, token, challenge) {
Ok(challenge_response) => Ok(HttpResponse::Ok().body(challenge_response)),
Err(_) => Ok(HttpResponse::Forbidden().body("Verification failed")),
}
}
#[actix_web::post("/api/whatsapp/webhook")]
async fn whatsapp_webhook(
data: web::Data<crate::shared::AppState>,
payload: web::Json<crate::whatsapp::WhatsAppMessage>,
) -> Result<HttpResponse> {
match data
.whatsapp_adapter
.process_incoming_message(payload.into_inner())
.await
{
Ok(user_messages) => {
for user_message in user_messages {
if let Err(e) = data.orchestrator.process_message(user_message).await {
log::error!("Error processing WhatsApp message: {}", e);
}
}
Ok(HttpResponse::Ok().body(""))
}
Err(e) => {
log::error!("Error processing WhatsApp webhook: {}", e);
Ok(HttpResponse::BadRequest().body("Invalid message"))
}
}
}
#[actix_web::post("/api/voice/start")]
async fn voice_start(
data: web::Data<crate::shared::AppState>,
info: web::Json<serde_json::Value>,
) -> Result<HttpResponse> {
let session_id = info
.get("session_id")
.and_then(|s| s.as_str())
.unwrap_or("");
let user_id = info
.get("user_id")
.and_then(|u| u.as_str())
.unwrap_or("user");
match data
.voice_adapter
.start_voice_session(session_id, user_id)
.await
{
Ok(token) => {
Ok(HttpResponse::Ok().json(serde_json::json!({"token": token, "status": "started"})))
}
Err(e) => {
Ok(HttpResponse::InternalServerError()
.json(serde_json::json!({"error": e.to_string()})))
}
}
}
#[actix_web::post("/api/voice/stop")]
async fn voice_stop(
data: web::Data<crate::shared::AppState>,
info: web::Json<serde_json::Value>,
) -> Result<HttpResponse> {
let session_id = info
.get("session_id")
.and_then(|s| s.as_str())
.unwrap_or("");
match data.voice_adapter.stop_voice_session(session_id).await {
Ok(()) => Ok(HttpResponse::Ok().json(serde_json::json!({"status": "stopped"}))),
Err(e) => {
Ok(HttpResponse::InternalServerError()
.json(serde_json::json!({"error": e.to_string()})))
}
}
}
#[actix_web::post("/api/sessions")]
async fn create_session(_data: web::Data<crate::shared::AppState>) -> Result<HttpResponse> {
let session_id = Uuid::new_v4();
Ok(HttpResponse::Ok().json(serde_json::json!({
"session_id": session_id,
"title": "New Conversation",
"created_at": Utc::now()
})))
}
#[actix_web::get("/api/sessions")]
async fn get_sessions(data: web::Data<crate::shared::AppState>) -> Result<HttpResponse> {
let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap();
match data.orchestrator.get_user_sessions(user_id).await {
Ok(sessions) => Ok(HttpResponse::Ok().json(sessions)),
Err(e) => {
Ok(HttpResponse::InternalServerError()
.json(serde_json::json!({"error": e.to_string()})))
}
}
}
#[actix_web::get("/api/sessions/{session_id}")]
async fn get_session_history(
data: web::Data<crate::shared::AppState>,
path: web::Path<String>,
) -> Result<HttpResponse> {
let session_id = path.into_inner();
let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap();
match Uuid::parse_str(&session_id) {
Ok(session_uuid) => match data
.orchestrator
.get_conversation_history(session_uuid, user_id)
.await
{
Ok(history) => Ok(HttpResponse::Ok().json(history)),
Err(e) => Ok(HttpResponse::InternalServerError()
.json(serde_json::json!({"error": e.to_string()}))),
},
Err(_) => {
Ok(HttpResponse::BadRequest().json(serde_json::json!({"error": "Invalid session ID"})))
}
}
}
#[actix_web::post("/api/set_mode")]
async fn set_mode_handler(
data: web::Data<crate::shared::AppState>,
info: web::Json<HashMap<String, String>>,
) -> Result<HttpResponse> {
let default_user = "default_user".to_string();
let default_bot = "default_bot".to_string();
let default_mode = "direct".to_string();
let user_id = info.get("user_id").unwrap_or(&default_user);
let bot_id = info.get("bot_id").unwrap_or(&default_bot);
let mode = info.get("mode").unwrap_or(&default_mode);
if let Err(e) = data
.orchestrator
.set_user_answer_mode(user_id, bot_id, mode)
.await
{
return Ok(
HttpResponse::InternalServerError().json(serde_json::json!({"error": e.to_string()}))
);
}
Ok(HttpResponse::Ok().json(serde_json::json!({"status": "mode_updated"})))
}
#[actix_web::get("/")]
async fn index() -> Result<HttpResponse> {
let html = fs::read_to_string("templates/index.html")
.unwrap_or_else(|_| include_str!("../../static/index.html").to_string());
Ok(HttpResponse::Ok().content_type("text/html").body(html))
}
#[actix_web::get("/static/{filename:.*}")]
async fn static_files(req: HttpRequest) -> Result<HttpResponse> {
let filename = req.match_info().query("filename");
let path = format!("static/{}", filename);
match fs::read(&path) {
Ok(content) => {
let content_type = match filename {
f if f.ends_with(".js") => "application/javascript",
f if f.ends_with(".css") => "text/css",
f if f.ends_with(".png") => "image/png",
f if f.ends_with(".jpg") | f.ends_with(".jpeg") => "image/jpeg",
_ => "text/plain",
};
Ok(HttpResponse::Ok().content_type(content_type).body(content))
}
Err(_) => Ok(HttpResponse::NotFound().body("File not found")),
}
}

178
src/channels/mod.rs Normal file
View file

@ -0,0 +1,178 @@
use async_trait::async_trait;
use chrono::Utc;
use livekit::{DataPacketKind, Room, RoomOptions};
use log::info;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::{mpsc, Mutex};
use crate::shared::{BotResponse, UserMessage};
#[async_trait]
pub trait ChannelAdapter: Send + Sync {
async fn send_message(&self, response: BotResponse) -> Result<(), Box<dyn std::error::Error>>;
}
pub struct WebChannelAdapter {
connections: Arc<Mutex<HashMap<String, mpsc::Sender<BotResponse>>>>,
}
impl WebChannelAdapter {
pub fn new() -> Self {
Self {
connections: Arc::new(Mutex::new(HashMap::new())),
}
}
pub async fn add_connection(&self, session_id: String, tx: mpsc::Sender<BotResponse>) {
self.connections.lock().await.insert(session_id, tx);
}
pub async fn remove_connection(&self, session_id: &str) {
self.connections.lock().await.remove(session_id);
}
}
#[async_trait]
impl ChannelAdapter for WebChannelAdapter {
async fn send_message(&self, response: BotResponse) -> Result<(), Box<dyn std::error::Error>> {
let connections = self.connections.lock().await;
if let Some(tx) = connections.get(&response.session_id) {
tx.send(response).await?;
}
Ok(())
}
}
pub struct VoiceAdapter {
livekit_url: String,
api_key: String,
api_secret: String,
rooms: Arc<Mutex<HashMap<String, Room>>>,
connections: Arc<Mutex<HashMap<String, mpsc::Sender<BotResponse>>>>,
}
impl VoiceAdapter {
pub fn new(livekit_url: String, api_key: String, api_secret: String) -> Self {
Self {
livekit_url,
api_key,
api_secret,
rooms: Arc::new(Mutex::new(HashMap::new())),
connections: Arc::new(Mutex::new(HashMap::new())),
}
}
pub async fn start_voice_session(
&self,
session_id: &str,
user_id: &str,
) -> Result<String, Box<dyn std::error::Error>> {
let token = AccessToken::with_api_key(&self.api_key, &self.api_secret)
.with_identity(user_id)
.with_name(user_id)
.with_room_name(session_id)
.with_room_join(true)
.to_jwt()?;
let room_options = RoomOptions {
auto_subscribe: true,
..Default::default()
};
let (room, mut events) = Room::connect(&self.livekit_url, &token, room_options).await?;
self.rooms
.lock()
.await
.insert(session_id.to_string(), room.clone());
let rooms_clone = self.rooms.clone();
let connections_clone = self.connections.clone();
let session_id_clone = session_id.to_string();
tokio::spawn(async move {
while let Some(event) = events.recv().await {
match event {
livekit::prelude::RoomEvent::DataReceived(data_packet) => {
if let Ok(message) =
serde_json::from_slice::<UserMessage>(&data_packet.data)
{
info!("Received voice message: {}", message.content);
if let Some(tx) =
connections_clone.lock().await.get(&message.session_id)
{
let _ = tx
.send(BotResponse {
bot_id: message.bot_id,
user_id: message.user_id,
session_id: message.session_id,
channel: "voice".to_string(),
content: format!("🎤 Voice: {}", message.content),
message_type: "voice".to_string(),
stream_token: None,
is_complete: true,
})
.await;
}
}
}
livekit::prelude::RoomEvent::TrackSubscribed(
track,
publication,
participant,
) => {
info!("Voice track subscribed from {}", participant.identity());
}
_ => {}
}
}
rooms_clone.lock().await.remove(&session_id_clone);
});
Ok(token)
}
pub async fn stop_voice_session(
&self,
session_id: &str,
) -> Result<(), Box<dyn std::error::Error>> {
if let Some(room) = self.rooms.lock().await.remove(session_id) {
room.disconnect();
}
Ok(())
}
pub async fn add_connection(&self, session_id: String, tx: mpsc::Sender<BotResponse>) {
self.connections.lock().await.insert(session_id, tx);
}
pub async fn send_voice_response(
&self,
session_id: &str,
text: &str,
) -> Result<(), Box<dyn std::error::Error>> {
if let Some(room) = self.rooms.lock().await.get(session_id) {
let voice_response = serde_json::json!({
"type": "voice_response",
"text": text,
"timestamp": Utc::now()
});
room.local_participant().publish_data(
serde_json::to_vec(&voice_response)?,
DataPacketKind::Reliable,
&[],
)?;
}
Ok(())
}
}
#[async_trait]
impl ChannelAdapter for VoiceAdapter {
async fn send_message(&self, response: BotResponse) -> Result<(), Box<dyn std::error::Error>> {
info!("Sending voice response to: {}", response.user_id);
self.send_voice_response(&response.session_id, &response.content)
.await
}
}

92
src/chart/mod.rs Normal file
View file

@ -0,0 +1,92 @@
use langchain_rust::{
chain::{Chain, SQLDatabaseChainBuilder, options::ChainCallOptions},
llm::openai::OpenAI,
tools::{postgres::PostgreSQLEngine, SQLDatabaseBuilder},
prompt::PromptTemplate,
};
pub struct ChartGenerator {
sql_chain: SQLDatabaseChainBuilder,
llm: OpenAI,
}
impl ChartGenerator {
pub async fn new(database_url: &str) -> Result<Self, Box<dyn std::error::Error>> {
let llm = OpenAI::default();
let engine = PostgreSQLEngine::new(database_url).await?;
let db = SQLDatabaseBuilder::new(engine).build().await?;
let sql_chain = SQLDatabaseChainBuilder::new()
.llm(llm.clone())
.top_k(4)
.database(db);
Ok(Self {
sql_chain,
llm,
})
}
pub async fn generate_chart(
&self,
question: &str,
chart_type: &str
) -> Result<ChartResponse, Box<dyn std::error::Error>> {
// Step 1: Generate SQL using LangChain
let sql_result = self.generate_sql(question).await?;
// Step 2: Execute SQL and get data
let data = self.execute_sql(&sql_result).await?;
// Step 3: Generate chart configuration using LLM
let chart_config = self.generate_chart_config(&data, chart_type).await?;
// Step 4: Generate and render chart
let chart_image = self.render_chart(&chart_config).await?;
Ok(ChartResponse {
sql_query: sql_result,
data,
chart_image,
chart_config,
})
}
async fn generate_sql(&self, question: &str) -> Result<String, Box<dyn std::error::Error>> {
let chain = self.sql_chain
.clone()
.build()
.expect("Failed to build SQL chain");
let input_variables = chain.prompt_builder().query(question).build();
let result = chain.invoke(input_variables).await?;
Ok(result.to_string())
}
async fn execute_sql(&self, query: &str) -> Result<Value, Box<dyn std::error::Error>> {
// Execute the generated SQL and return structured data
// Implementation depends on your database setup
Ok(Value::Null)
}
async fn generate_chart_config(&self, data: &Value, chart_type: &str) -> Result<Value, Box<dyn std::error::Error>> {
let prompt = format!(
"Given this data: {} and chart type: {}, generate a billboard.js configuration JSON. \
Focus on creating meaningful visualizations for this business data.",
data, chart_type
);
let message = HumanMessage::new(prompt);
let result = self.llm.invoke(&[message]).await?;
serde_json::from_str(&result.generation)
.map_err(|e| e.into())
}
async fn render_chart(&self, config: &Value) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
// Use headless browser to render chart and capture as image
// This would integrate with your browser automation setup
Ok(vec![])
}
}

148
src/config/mod.rs Normal file
View file

@ -0,0 +1,148 @@
use std::env;
#[derive(Clone)]
pub struct AppConfig {
pub minio: MinioConfig,
pub server: ServerConfig,
pub database: DatabaseConfig,
pub database_custom: DatabaseConfig,
pub email: EmailConfig,
pub ai: AIConfig,
pub site_path: String,
}
#[derive(Clone)]
pub struct DatabaseConfig {
pub username: String,
pub password: String,
pub server: String,
pub port: u32,
pub database: String,
}
#[derive(Clone)]
pub struct MinioConfig {
pub server: String,
pub access_key: String,
pub secret_key: String,
pub use_ssl: bool,
pub bucket: String,
}
#[derive(Clone)]
pub struct ServerConfig {
pub host: String,
pub port: u16,
}
#[derive(Clone)]
pub struct EmailConfig {
pub from: String,
pub server: String,
pub port: u16,
pub username: String,
pub password: String,
}
#[derive(Clone)]
pub struct AIConfig {
pub instance: String,
pub key: String,
pub version: String,
pub endpoint: String,
}
impl AppConfig {
pub fn database_url(&self) -> String {
format!(
"postgres://{}:{}@{}:{}/{}",
self.database.username,
self.database.password,
self.database.server,
self.database.port,
self.database.database
)
}
pub fn database_custom_url(&self) -> String {
format!(
"postgres://{}:{}@{}:{}/{}",
self.database_custom.username,
self.database_custom.password,
self.database_custom.server,
self.database_custom.port,
self.database_custom.database
)
}
pub fn from_env() -> Self {
let database = DatabaseConfig {
username: env::var("TABLES_USERNAME").unwrap_or_else(|_| "user".to_string()),
password: env::var("TABLES_PASSWORD").unwrap_or_else(|_| "pass".to_string()),
server: env::var("TABLES_SERVER").unwrap_or_else(|_| "localhost".to_string()),
port: env::var("TABLES_PORT")
.ok()
.and_then(|p| p.parse().ok())
.unwrap_or(5432),
database: env::var("TABLES_DATABASE").unwrap_or_else(|_| "db".to_string()),
};
let database_custom = DatabaseConfig {
username: env::var("CUSTOM_USERNAME").unwrap_or_else(|_| "user".to_string()),
password: env::var("CUSTOM_PASSWORD").unwrap_or_else(|_| "pass".to_string()),
server: env::var("CUSTOM_SERVER").unwrap_or_else(|_| "localhost".to_string()),
port: env::var("CUSTOM_PORT")
.ok()
.and_then(|p| p.parse().ok())
.unwrap_or(5432),
database: env::var("CUSTOM_DATABASE").unwrap_or_else(|_| "db".to_string()),
};
let minio = MinioConfig {
server: env::var("DRIVE_SERVER").expect("DRIVE_SERVER not set"),
access_key: env::var("DRIVE_ACCESSKEY").expect("DRIVE_ACCESSKEY not set"),
secret_key: env::var("DRIVE_SECRET").expect("DRIVE_SECRET not set"),
use_ssl: env::var("DRIVE_USE_SSL")
.unwrap_or_else(|_| "false".to_string())
.parse()
.unwrap_or(false),
bucket: env::var("DRIVE_ORG_PREFIX").unwrap_or_else(|_| "".to_string()),
};
let email = EmailConfig {
from: env::var("EMAIL_FROM").expect("EMAIL_FROM not set"),
server: env::var("EMAIL_SERVER").expect("EMAIL_SERVER not set"),
port: env::var("EMAIL_PORT")
.expect("EMAIL_PORT not set")
.parse()
.expect("EMAIL_PORT must be a number"),
username: env::var("EMAIL_USER").expect("EMAIL_USER not set"),
password: env::var("EMAIL_PASS").expect("EMAIL_PASS not set"),
};
let ai = AIConfig {
instance: env::var("AI_INSTANCE").expect("AI_INSTANCE not set"),
key: env::var("AI_KEY").expect("AI_KEY not set"),
version: env::var("AI_VERSION").expect("AI_VERSION not set"),
endpoint: env::var("AI_ENDPOINT").expect("AI_ENDPOINT not set"),
};
AppConfig {
minio,
server: ServerConfig {
host: env::var("SERVER_HOST").unwrap_or_else(|_| "127.0.0.1".to_string()),
port: env::var("SERVER_PORT")
.ok()
.and_then(|p| p.parse().ok())
.unwrap_or(8080),
},
database,
database_custom,
email,
ai,
site_path: env::var("SITES_ROOT").unwrap()
}
}
}

97
src/context/mod.rs Normal file
View file

@ -0,0 +1,97 @@
use async_trait::async_trait;
use langchain_rust::{
embedding::openai::openai_embedder::OpenAiEmbedder,
vectorstore::qdrant::{Qdrant, StoreBuilder},
vectorstore::{VectorStore, VecStoreOptions},
schemas::Document,
};
use qdrant_client::qdrant::Qdrant as QdrantClient;
use sqlx::PgPool;
use uuid::Uuid;
#[async_trait]
pub trait ContextProvider: Send + Sync {
async fn get_context(&self, session_id: Uuid, user_id: Uuid, query: &str) -> Result<String, Box<dyn std::error::Error>>;
async fn store_embedding(&self, text: &str, embedding: Vec<f32>, metadata: Value) -> Result<(), Box<dyn std::error::Error>>;
async fn search_similar(&self, embedding: Vec<f32>, limit: u32) -> Result<Vec<SearchResult>, Box<dyn std::error::Error>>;
}
pub struct LangChainContextProvider {
pool: PgPool,
vector_store: Qdrant,
embedder: OpenAiEmbedder,
}
impl LangChainContextProvider {
pub async fn new(pool: PgPool, qdrant_url: &str) -> Result<Self, Box<dyn std::error::Error>> {
let embedder = OpenAiEmbedder::default();
let client = QdrantClient::from_url(qdrant_url).build()?;
let vector_store = StoreBuilder::new()
.embedder(embedder.clone())
.client(client)
.collection_name("conversations")
.build()
.await?;
Ok(Self {
pool,
vector_store,
embedder,
})
}
}
#[async_trait]
impl ContextProvider for LangChainContextProvider {
async fn get_context(&self, session_id: Uuid, user_id: Uuid, query: &str) -> Result<String, Box<dyn std::error::Error>> {
// Get conversation history
let history = sqlx::query(
"SELECT role, content_encrypted FROM message_history
WHERE session_id = $1 AND user_id = $2
ORDER BY message_index DESC LIMIT 5"
)
.bind(session_id)
.bind(user_id)
.fetch_all(&self.pool)
.await?;
let mut context = String::from("Conversation history:\n");
for row in history.iter().rev() {
let role: String = row.get("role");
let content: String = row.get("content_encrypted");
context.push_str(&format!("{}: {}\n", role, content));
}
// Search for similar documents using LangChain
let similar_docs = self.vector_store
.similarity_search(query, 3, &VecStoreOptions::default())
.await?;
if !similar_docs.is_empty() {
context.push_str("\nRelevant context:\n");
for doc in similar_docs {
context.push_str(&format!("- {}\n", doc.page_content));
}
}
context.push_str(&format!("\nCurrent message: {}", query));
Ok(context)
}
async fn store_embedding(&self, text: &str, embedding: Vec<f32>, metadata: Value) -> Result<(), Box<dyn std::error::Error>> {
let document = Document::new(text).with_metadata(metadata);
self.vector_store
.add_documents(&[document], &VecStoreOptions::default())
.await?;
Ok(())
}
async fn search_similar(&self, embedding: Vec<f32>, limit: u32) -> Result<Vec<SearchResult>, Box<dyn std::error::Error>> {
// LangChain handles this through the vector store interface
// This method would need adaptation to work with LangChain's search patterns
Ok(vec![])
}
}

533
src/email/mod.rs Normal file
View file

@ -0,0 +1,533 @@
use crate::{config::EmailConfig, state::AppState};
use log::info;
use actix_web::error::ErrorInternalServerError;
use actix_web::http::header::ContentType;
use actix_web::{web, HttpResponse, Result};
use lettre::{transport::smtp::authentication::Credentials, Message, SmtpTransport, Transport};
use serde::Serialize;
use imap::types::Seq;
use mailparse::{parse_mail, MailHeaderMap}; // Added MailHeaderMap import
#[derive(Debug, Serialize)]
pub struct EmailResponse {
pub id: String,
pub name: String,
pub email: String,
pub subject: String,
pub text: String,
date: String,
read: bool,
labels: Vec<String>,
}
async fn internal_send_email(config: &EmailConfig, to: &str, subject: &str, body: &str) {
let email = Message::builder()
.from(config.from.parse().unwrap())
.to(to.parse().unwrap())
.subject(subject)
.body(body.to_string())
.unwrap();
let creds = Credentials::new(config.username.clone(), config.password.clone());
SmtpTransport::relay(&config.server)
.unwrap()
.port(config.port)
.credentials(creds)
.build()
.send(&email)
.unwrap();
}
#[actix_web::get("/emails/list")]
pub async fn list_emails(
state: web::Data<AppState>,
) -> Result<web::Json<Vec<EmailResponse>>, actix_web::Error> {
let _config = state
.config
.as_ref()
.ok_or_else(|| ErrorInternalServerError("Configuration not available"))?;
// Establish connection
let tls = native_tls::TlsConnector::builder().build().map_err(|e| {
ErrorInternalServerError(format!("Failed to create TLS connector: {:?}", e))
})?;
let client = imap::connect(
(_config.email.server.as_str(), 993),
_config.email.server.as_str(),
&tls,
)
.map_err(|e| ErrorInternalServerError(format!("Failed to connect to IMAP: {:?}", e)))?;
// Login
let mut session = client
.login(&_config.email.username, &_config.email.password)
.map_err(|e| ErrorInternalServerError(format!("Login failed: {:?}", e)))?;
// Select INBOX
session
.select("INBOX")
.map_err(|e| ErrorInternalServerError(format!("Failed to select INBOX: {:?}", e)))?;
// Search for all messages
let messages = session
.search("ALL")
.map_err(|e| ErrorInternalServerError(format!("Failed to search emails: {:?}", e)))?;
let mut email_list = Vec::new();
// Get last 20 messages
let recent_messages: Vec<_> = messages.iter().cloned().collect(); // Collect items into a Vec
let recent_messages: Vec<Seq> = recent_messages.into_iter().rev().take(20).collect(); // Now you can reverse and take the last 20
for seq in recent_messages {
// Fetch the entire message (headers + body)
let fetch_result = session.fetch(seq.to_string(), "RFC822");
let messages = fetch_result
.map_err(|e| ErrorInternalServerError(format!("Failed to fetch email: {:?}", e)))?;
for msg in messages.iter() {
let body = msg
.body()
.ok_or_else(|| ErrorInternalServerError("No body found"))?;
// Parse the complete email message
let parsed = parse_mail(body)
.map_err(|e| ErrorInternalServerError(format!("Failed to parse email: {:?}", e)))?;
// Extract headers
let headers = parsed.get_headers();
let subject = headers.get_first_value("Subject").unwrap_or_default();
let from = headers.get_first_value("From").unwrap_or_default();
let date = headers.get_first_value("Date").unwrap_or_default();
// Extract body text (handles both simple and multipart emails)
let body_text = if let Some(body_part) = parsed
.subparts
.iter()
.find(|p| p.ctype.mimetype == "text/plain")
{
body_part.get_body().unwrap_or_default()
} else {
parsed.get_body().unwrap_or_default()
};
// Create preview
let preview = body_text.lines().take(3).collect::<Vec<_>>().join(" ");
let preview_truncated = if preview.len() > 150 {
format!("{}...", &preview[..150])
} else {
preview
};
// Parse From field
let (from_name, from_email) = parse_from_field(&from);
email_list.push(EmailResponse {
id: seq.to_string(),
name: from_name,
email: from_email,
subject: if subject.is_empty() {
"(No Subject)".to_string()
} else {
subject
},
text: preview_truncated,
date: if date.is_empty() {
chrono::Utc::now().format("%Y-%m-%d %H:%M:%S").to_string()
} else {
date
},
read: false,
labels: Vec::new(),
});
}
}
session
.logout()
.map_err(|e| ErrorInternalServerError(format!("Failed to logout: {:?}", e)))?;
Ok(web::Json(email_list))
}
// Helper function to parse From field
fn parse_from_field(from: &str) -> (String, String) {
if let Some(start) = from.find('<') {
if let Some(end) = from.find('>') {
let email = from[start + 1..end].trim().to_string();
let name = from[..start].trim().trim_matches('"').to_string();
return (name, email);
}
}
("Unknown".to_string(), from.to_string())
}
#[derive(serde::Deserialize)]
pub struct SaveDraftRequest {
pub to: String,
pub subject: String,
pub cc: Option<String>,
pub text: String,
}
#[derive(serde::Serialize)]
pub struct SaveDraftResponse {
pub success: bool,
pub message: String,
pub draft_id: Option<String>,
}
#[derive(serde::Deserialize)]
pub struct GetLatestEmailRequest {
pub from_email: String,
}
#[derive(serde::Serialize)]
pub struct LatestEmailResponse {
pub success: bool,
pub email_text: Option<String>,
pub message: String,
}
#[actix_web::post("/emails/save_draft")]
pub async fn save_draft(
state: web::Data<AppState>,
draft_data: web::Json<SaveDraftRequest>,
) -> Result<web::Json<SaveDraftResponse>, actix_web::Error> {
let config = state
.config
.as_ref()
.ok_or_else(|| ErrorInternalServerError("Configuration not available"))?;
match save_email_draft(&config.email, &draft_data).await {
Ok(draft_id) => Ok(web::Json(SaveDraftResponse {
success: true,
message: "Draft saved successfully".to_string(),
draft_id: Some(draft_id),
})),
Err(e) => Ok(web::Json(SaveDraftResponse {
success: false,
message: format!("Failed to save draft: {}", e),
draft_id: None,
})),
}
}
pub async fn save_email_draft(
email_config: &EmailConfig,
draft_data: &SaveDraftRequest,
) -> Result<String, Box<dyn std::error::Error>> {
// Establish connection
let tls = native_tls::TlsConnector::builder().build()?;
let client = imap::connect(
(email_config.server.as_str(), 993),
email_config.server.as_str(),
&tls,
)?;
// Login
let mut session = client
.login(&email_config.username, &email_config.password)
.map_err(|e| format!("Login failed: {:?}", e))?;
// Select or create Drafts folder
if session.select("Drafts").is_err() {
// Try to create Drafts folder if it doesn't exist
session.create("Drafts")?;
session.select("Drafts")?;
}
// Create email message
let cc_header = draft_data
.cc
.as_deref()
.filter(|cc| !cc.is_empty())
.map(|cc| format!("Cc: {}\r\n", cc))
.unwrap_or_default();
let email_message = format!(
"From: {}\r\nTo: {}\r\n{}Subject: {}\r\nDate: {}\r\nContent-Type: text/html; charset=UTF-8\r\n\r\n{}",
email_config.username,
draft_data.to,
cc_header,
draft_data.subject,
chrono::Utc::now().format("%a, %d %b %Y %H:%M:%S +0000"),
draft_data.text
);
// Append to Drafts folder
session.append("Drafts", &email_message)?;
session.logout()?;
Ok(chrono::Utc::now().timestamp().to_string())
}
async fn fetch_latest_email_from_sender(
email_config: &EmailConfig,
from_email: &str,
) -> Result<String, Box<dyn std::error::Error>> {
// Establish connection
let tls = native_tls::TlsConnector::builder().build()?;
let client = imap::connect(
(email_config.server.as_str(), 993),
email_config.server.as_str(),
&tls,
)?;
// Login
let mut session = client
.login(&email_config.username, &email_config.password)
.map_err(|e| format!("Login failed: {:?}", e))?;
// Try to select Archive folder first, then fall back to INBOX
if session.select("Archive").is_err() {
session.select("INBOX")?;
}
// Search for emails from the specified sender
let search_query = format!("FROM \"{}\"", from_email);
let messages = session.search(&search_query)?;
if messages.is_empty() {
session.logout()?;
return Err(format!("No emails found from {}", from_email).into());
}
// Get the latest message (highest sequence number)
let latest_seq = messages.iter().max().unwrap();
// Fetch the entire message
let messages = session.fetch(latest_seq.to_string(), "RFC822")?;
let mut email_text = String::new();
for msg in messages.iter() {
let body = msg.body().ok_or("No body found in email")?;
// Parse the complete email message
let parsed = parse_mail(body)?;
// Extract headers
let headers = parsed.get_headers();
let subject = headers.get_first_value("Subject").unwrap_or_default();
let from = headers.get_first_value("From").unwrap_or_default();
let date = headers.get_first_value("Date").unwrap_or_default();
let to = headers.get_first_value("To").unwrap_or_default();
// Extract body text
let body_text = if let Some(body_part) = parsed
.subparts
.iter()
.find(|p| p.ctype.mimetype == "text/plain")
{
body_part.get_body().unwrap_or_default()
} else {
parsed.get_body().unwrap_or_default()
};
// Format the email text ready for reply with headers
email_text = format!(
"--- Original Message ---\nFrom: {}\nTo: {}\nDate: {}\nSubject: {}\n\n{}\n\n--- Reply Above This Line ---\n\n",
from, to, date, subject, body_text
);
break; // We only want the first (and should be only) message
}
session.logout()?;
if email_text.is_empty() {
Err("Failed to extract email content".into())
} else {
Ok(email_text)
}
}
#[actix_web::post("/emails/get_latest_from")]
pub async fn get_latest_email_from(
state: web::Data<AppState>,
request: web::Json<GetLatestEmailRequest>,
) -> Result<web::Json<LatestEmailResponse>, actix_web::Error> {
let config = state
.config
.as_ref()
.ok_or_else(|| ErrorInternalServerError("Configuration not available"))?;
match fetch_latest_email_from_sender(&config.email, &request.from_email).await {
Ok(email_text) => Ok(web::Json(LatestEmailResponse {
success: true,
email_text: Some(email_text),
message: "Latest email retrieved successfully".to_string(),
})),
Err(e) => {
if e.to_string().contains("No emails found") {
Ok(web::Json(LatestEmailResponse {
success: false,
email_text: None,
message: e.to_string(),
}))
} else {
Err(ErrorInternalServerError(e))
}
}
}
}
pub async fn fetch_latest_sent_to(
email_config: &EmailConfig,
to_email: &str,
) -> Result<String, Box<dyn std::error::Error>> {
// Establish connection
let tls = native_tls::TlsConnector::builder().build()?;
let client = imap::connect(
(email_config.server.as_str(), 993),
email_config.server.as_str(),
&tls,
)?;
// Login
let mut session = client
.login(&email_config.username, &email_config.password)
.map_err(|e| format!("Login failed: {:?}", e))?;
// Try to select Archive folder first, then fall back to INBOX
if session.select("Sent").is_err() {
session.select("Sent Items")?;
}
// Search for emails from the specified sender
let search_query = format!("TO \"{}\"", to_email);
let messages = session.search(&search_query)?;
if messages.is_empty() {
session.logout()?;
return Err(format!("No emails found to {}", to_email).into());
}
// Get the latest message (highest sequence number)
let latest_seq = messages.iter().max().unwrap();
// Fetch the entire message
let messages = session.fetch(latest_seq.to_string(), "RFC822")?;
let mut email_text = String::new();
for msg in messages.iter() {
let body = msg.body().ok_or("No body found in email")?;
// Parse the complete email message
let parsed = parse_mail(body)?;
// Extract headers
let headers = parsed.get_headers();
let subject = headers.get_first_value("Subject").unwrap_or_default();
let from = headers.get_first_value("From").unwrap_or_default();
let date = headers.get_first_value("Date").unwrap_or_default();
let to = headers.get_first_value("To").unwrap_or_default();
if !to
.trim()
.to_lowercase()
.contains(&to_email.trim().to_lowercase())
{
continue;
}
// Extract body text (handles both simple and multipart emails) - SAME AS LIST_EMAILS
let body_text = if let Some(body_part) = parsed
.subparts
.iter()
.find(|p| p.ctype.mimetype == "text/plain")
{
body_part.get_body().unwrap_or_default()
} else {
parsed.get_body().unwrap_or_default()
};
// Only format if we have actual content
if !body_text.trim().is_empty() && body_text != "No readable content found" {
// Format the email text ready for reply with headers
email_text = format!(
"--- Original Message ---\nFrom: {}\nTo: {}\nDate: {}\nSubject: {}\n\n{}\n\n--- Reply Above This Line ---\n\n",
from, to, date, subject, body_text.trim()
);
} else {
// Still provide headers even if body is empty
email_text = format!(
"--- Original Message ---\nFrom: {}\nTo: {}\nDate: {}\nSubject: {}\n\n[No readable content]\n\n--- Reply Above This Line ---\n\n",
from, to, date, subject
);
}
break; // We only want the first (and should be only) message
}
session.logout()?;
// Always return something, even if it's just headers
if email_text.is_empty() {
Err("Failed to extract email content".into())
} else {
Ok(email_text)
}
}
#[actix_web::post("/emails/send")]
pub async fn send_email(
payload: web::Json<(String, String, String)>,
state: web::Data<AppState>,
) -> Result<HttpResponse, actix_web::Error> {
let (to, subject, body) = payload.into_inner();
info!("To: {}", to);
info!("Subject: {}", subject);
info!("Body: {}", body);
// Send via SMTP
internal_send_email(&state.config.clone().unwrap().email, &to, &subject, &body).await;
Ok(HttpResponse::Ok().finish())
}
#[actix_web::get("/campaigns/{campaign_id}/click/{email}")]
pub async fn save_click(
path: web::Path<(String, String)>,
state: web::Data<AppState>,
) -> HttpResponse {
let (campaign_id, email) = path.into_inner();
let _ = sqlx::query("INSERT INTO public.clicks (campaign_id, email, updated_at) VALUES ($1, $2, NOW()) ON CONFLICT (campaign_id, email) DO UPDATE SET updated_at = NOW()")
.bind(campaign_id)
.bind(email)
.execute(state.db.as_ref().unwrap())
.await;
let pixel = [
0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, // PNG header
0x00, 0x00, 0x00, 0x0D, 0x49, 0x48, 0x44, 0x52, // IHDR chunk
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, // 1x1 dimension
0x08, 0x06, 0x00, 0x00, 0x00, 0x1F, 0x15, 0xC4, 0x89, // RGBA
0x00, 0x00, 0x00, 0x0A, 0x49, 0x44, 0x41, 0x54, // IDAT chunk
0x78, 0x9C, 0x63, 0x00, 0x01, 0x00, 0x00, 0x05, // data
0x00, 0x01, 0x0D, 0x0A, 0x2D, 0xB4, // CRC
0x00, 0x00, 0x00, 0x00, 0x49, 0x45, 0x4E, 0x44, // IEND chunk
0xAE, 0x42, 0x60, 0x82,
]; // EOF
// At the end of your save_click function:
HttpResponse::Ok()
.content_type(ContentType::png())
.body(pixel.to_vec()) // Using slicing to pass a reference
}
#[actix_web::get("/campaigns/{campaign_id}/emails")]
pub async fn get_emails(path: web::Path<String>, state: web::Data<AppState>) -> String {
let campaign_id = path.into_inner();
let rows = sqlx::query_scalar::<_, String>("SELECT email FROM clicks WHERE campaign_id = $1")
.bind(campaign_id)
.fetch_all(state.db.as_ref().unwrap())
.await
.unwrap_or_default();
rows.join(",")
}

142
src/file/mod.rs Normal file
View file

@ -0,0 +1,142 @@
use actix_web::web;
use actix_multipart::Multipart;
use actix_web::{post, HttpResponse};
use minio::s3::builders::ObjectContent;
use minio::s3::types::ToStream;
use minio::s3::Client;
use std::io::Write;
use tempfile::NamedTempFile;
use tokio_stream::StreamExt;
use minio::s3::client::{Client as MinioClient, ClientBuilder as MinioClientBuilder};
use minio::s3::creds::StaticProvider;
use minio::s3::http::BaseUrl;
use std::str::FromStr;
use crate::config::AppConfig;
use crate::shared::state::AppState;
pub async fn init_minio(config: &AppConfig) -> Result<MinioClient, minio::s3::error::Error> {
let scheme = if config.minio.use_ssl {
"https"
} else {
"http"
};
let base_url = format!("{}://{}", scheme, config.minio.server);
let base_url = BaseUrl::from_str(&base_url)?;
let credentials = StaticProvider::new(&config.minio.access_key, &config.minio.secret_key, None);
let minio_client = MinioClientBuilder::new(base_url)
.provider(Some(credentials))
.build()?;
Ok(minio_client)
}
#[post("/files/upload/{folder_path}")]
pub async fn upload_file(
folder_path: web::Path<String>,
mut payload: Multipart,
state: web::Data<AppState>,
) -> Result<HttpResponse, actix_web::Error> {
let folder_path = folder_path.into_inner();
// Create a temporary file to store the uploaded file.
let mut temp_file = NamedTempFile::new().map_err(|e| {
actix_web::error::ErrorInternalServerError(format!("Failed to create temp file: {}", e))
})?;
let mut file_name = None;
// Iterate over the multipart stream.
while let Some(mut field) = payload.try_next().await? {
let content_disposition = field.content_disposition();
file_name = content_disposition
.get_filename()
.map(|name| name.to_string());
// Write the file content to the temporary file.
while let Some(chunk) = field.try_next().await? {
temp_file.write_all(&chunk).map_err(|e| {
actix_web::error::ErrorInternalServerError(format!(
"Failed to write to temp file: {}",
e
))
})?;
}
}
// Get the file name or use a default name
let file_name = file_name.unwrap_or_else(|| "unnamed_file".to_string());
// Construct the object name using the folder path and file name
let object_name = format!("{}/{}", folder_path, file_name);
// Upload the file to the MinIO bucket
let client: Client = state.minio_client.clone().unwrap();
let bucket_name = state.config.as_ref().unwrap().minio.bucket.clone();
let content = ObjectContent::from(temp_file.path());
client
.put_object_content(bucket_name, &object_name, content)
.send()
.await
.map_err(|e| {
actix_web::error::ErrorInternalServerError(format!(
"Failed to upload file to MinIO: {}",
e
))
})?;
// Clean up the temporary file
temp_file.close().map_err(|e| {
actix_web::error::ErrorInternalServerError(format!("Failed to close temp file: {}", e))
})?;
Ok(HttpResponse::Ok().body(format!(
"Uploaded file '{}' to folder '{}'",
file_name, folder_path
)))
}
#[post("/files/list/{folder_path}")]
pub async fn list_file(
folder_path: web::Path<String>,
state: web::Data<AppState>,
) -> Result<HttpResponse, actix_web::Error> {
let folder_path = folder_path.into_inner();
let client: Client = state.minio_client.clone().unwrap();
let bucket_name = "file-upload-rust-bucket";
// Create the stream using the to_stream() method
let mut objects_stream = client
.list_objects(bucket_name)
.prefix(Some(folder_path))
.to_stream()
.await;
let mut file_list = Vec::new();
// Use StreamExt::next() to iterate through the stream
while let Some(items) = objects_stream.next().await {
match items {
Ok(result) => {
for item in result.contents {
file_list.push(item.name);
}
}
Err(e) => {
return Err(actix_web::error::ErrorInternalServerError(format!(
"Failed to list files in MinIO: {}",
e
)));
}
}
}
Ok(HttpResponse::Ok().json(file_list))
}

139
src/llm/llm.rs Normal file
View file

@ -0,0 +1,139 @@
use log::error;
use actix_web::{
web::{self, Bytes},
HttpResponse, Responder,
};
use anyhow::Result;
use futures::StreamExt;
use langchain_rust::{
chain::{Chain, LLMChainBuilder},
fmt_message, fmt_template,
language_models::llm::LLM,
llm::openai::OpenAI,
message_formatter,
prompt::HumanMessagePromptTemplate,
prompt_args,
schemas::messages::Message,
template_fstring,
};
use crate::{state::AppState, utils::azure_from_config};
#[derive(serde::Deserialize)]
struct ChatRequest {
input: String,
}
#[derive(serde::Serialize)]
struct ChatResponse {
text: String,
#[serde(skip_serializing_if = "Option::is_none")]
action: Option<ChatAction>,
}
#[derive(serde::Serialize)]
#[serde(tag = "type", content = "content")]
enum ChatAction {
ReplyEmail { content: String },
// Add other action variants here as needed
}
#[actix_web::post("/chat")]
pub async fn chat(
web::Json(request): web::Json<String>,
state: web::Data<AppState>,
) -> Result<impl Responder, actix_web::Error> {
let azure_config = azure_from_config(&state.config.clone().unwrap().ai);
let open_ai = OpenAI::new(azure_config);
// Parse the context JSON
let context: serde_json::Value = match serde_json::from_str(&request) {
Ok(ctx) => ctx,
Err(_) => serde_json::json!({}),
};
// Check view type and prepare appropriate prompt
let view_type = context
.get("viewType")
.and_then(|v| v.as_str())
.unwrap_or("");
let (prompt, might_trigger_action) = match view_type {
"email" => (
format!(
"Respond to this email: {}. Keep it professional and concise. \
If the email requires a response, provide one in the 'replyEmail' action format.",
request
),
true,
),
_ => (request, false),
};
let response_text = match open_ai.invoke(&prompt).await {
Ok(res) => res,
Err(err) => {
error!("Error invoking API: {}", err);
return Err(actix_web::error::ErrorInternalServerError(
"Failed to invoke OpenAI API",
));
}
};
// Prepare response with potential action
let mut chat_response = ChatResponse {
text: response_text.clone(),
action: None,
};
// If in email view and the response looks like an email reply, add action
if might_trigger_action && view_type == "email" {
chat_response.action = Some(ChatAction::ReplyEmail {
content: response_text,
});
}
Ok(HttpResponse::Ok().json(chat_response))
}
#[actix_web::post("/stream")]
pub async fn chat_stream(
web::Json(request): web::Json<ChatRequest>,
state: web::Data<AppState>,
) -> Result<impl Responder, actix_web::Error> {
let azure_config = azure_from_config(&state.config.clone().unwrap().ai);
let open_ai = OpenAI::new(azure_config);
let prompt = message_formatter![
fmt_message!(Message::new_system_message(
"You are world class technical documentation writer."
)),
fmt_template!(HumanMessagePromptTemplate::new(template_fstring!(
"{input}", "input"
)))
];
let chain = LLMChainBuilder::new()
.prompt(prompt)
.llm(open_ai)
.build()
.map_err(actix_web::error::ErrorInternalServerError)?;
let mut stream = chain
.stream(prompt_args! { "input" => request.input })
.await
.map_err(actix_web::error::ErrorInternalServerError)?;
let actix_stream = async_stream::stream! {
while let Some(result) = stream.next().await {
match result {
Ok(value) => yield Ok::<_, actix_web::Error>(Bytes::from(value.content)),
Err(e) => yield Err(actix_web::error::ErrorInternalServerError(e)),
}
}
};
Ok(HttpResponse::Ok()
.content_type("text/event-stream")
.streaming(actix_stream))
}

248
src/llm/llm_generic.rs Normal file
View file

@ -0,0 +1,248 @@
use log::{error, info};
use actix_web::{post, web, HttpRequest, HttpResponse, Result};
use dotenv::dotenv;
use regex::Regex;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use std::env;
// OpenAI-compatible request/response structures
#[derive(Debug, Serialize, Deserialize)]
struct ChatMessage {
role: String,
content: String,
}
#[derive(Debug, Serialize, Deserialize)]
struct ChatCompletionRequest {
model: String,
messages: Vec<ChatMessage>,
stream: Option<bool>,
}
#[derive(Debug, Serialize, Deserialize)]
struct ChatCompletionResponse {
id: String,
object: String,
created: u64,
model: String,
choices: Vec<Choice>,
}
#[derive(Debug, Serialize, Deserialize)]
struct Choice {
message: ChatMessage,
finish_reason: String,
}
fn clean_request_body(body: &str) -> String {
// Remove problematic parameters that might not be supported by all providers
let re = Regex::new(r#","?\s*"(max_completion_tokens|parallel_tool_calls|top_p|frequency_penalty|presence_penalty)"\s*:\s*[^,}]*"#).unwrap();
re.replace_all(body, "").to_string()
}
#[post("/v1/chat/completions")]
pub async fn generic_chat_completions(body: web::Bytes, _req: HttpRequest) -> Result<HttpResponse> {
// Log raw POST data
let body_str = std::str::from_utf8(&body).unwrap_or_default();
info!("Original POST Data: {}", body_str);
dotenv().ok();
// Get environment variables
let api_key = env::var("AI_KEY")
.map_err(|_| actix_web::error::ErrorInternalServerError("AI_KEY not set."))?;
let model = env::var("AI_LLM_MODEL")
.map_err(|_| actix_web::error::ErrorInternalServerError("AI_LLM_MODEL not set."))?;
let endpoint = env::var("AI_ENDPOINT")
.map_err(|_| actix_web::error::ErrorInternalServerError("AI_ENDPOINT not set."))?;
// Parse and modify the request body
let mut json_value: serde_json::Value = serde_json::from_str(body_str)
.map_err(|_| actix_web::error::ErrorInternalServerError("Failed to parse JSON"))?;
// Add model parameter
if let Some(obj) = json_value.as_object_mut() {
obj.insert("model".to_string(), serde_json::Value::String(model));
}
let modified_body_str = serde_json::to_string(&json_value)
.map_err(|_| actix_web::error::ErrorInternalServerError("Failed to serialize JSON"))?;
info!("Modified POST Data: {}", modified_body_str);
// Set up headers
let mut headers = reqwest::header::HeaderMap::new();
headers.insert(
"Authorization",
reqwest::header::HeaderValue::from_str(&format!("Bearer {}", api_key))
.map_err(|_| actix_web::error::ErrorInternalServerError("Invalid API key format"))?,
);
headers.insert(
"Content-Type",
reqwest::header::HeaderValue::from_static("application/json"),
);
// Send request to the AI provider
let client = Client::new();
let response = client
.post(&endpoint)
.headers(headers)
.body(modified_body_str)
.send()
.await
.map_err(actix_web::error::ErrorInternalServerError)?;
// Handle response
let status = response.status();
let raw_response = response
.text()
.await
.map_err(actix_web::error::ErrorInternalServerError)?;
info!("Provider response status: {}", status);
info!("Provider response body: {}", raw_response);
// Convert response to OpenAI format if successful
if status.is_success() {
match convert_to_openai_format(&raw_response) {
Ok(openai_response) => Ok(HttpResponse::Ok()
.content_type("application/json")
.body(openai_response)),
Err(e) => {
error!("Failed to convert response format: {}", e);
// Return the original response if conversion fails
Ok(HttpResponse::Ok()
.content_type("application/json")
.body(raw_response))
}
}
} else {
// Return error as-is
let actix_status = actix_web::http::StatusCode::from_u16(status.as_u16())
.unwrap_or(actix_web::http::StatusCode::INTERNAL_SERVER_ERROR);
Ok(HttpResponse::build(actix_status)
.content_type("application/json")
.body(raw_response))
}
}
/// Converts provider response to OpenAI-compatible format
fn convert_to_openai_format(provider_response: &str) -> Result<String, Box<dyn std::error::Error>> {
#[derive(serde::Deserialize)]
struct ProviderChoice {
message: ProviderMessage,
#[serde(default)]
finish_reason: Option<String>,
}
#[derive(serde::Deserialize)]
struct ProviderMessage {
role: Option<String>,
content: String,
}
#[derive(serde::Deserialize)]
struct ProviderResponse {
id: Option<String>,
object: Option<String>,
created: Option<u64>,
model: Option<String>,
choices: Vec<ProviderChoice>,
usage: Option<ProviderUsage>,
}
#[derive(serde::Deserialize, Default)]
struct ProviderUsage {
prompt_tokens: Option<u32>,
completion_tokens: Option<u32>,
total_tokens: Option<u32>,
}
#[derive(serde::Serialize)]
struct OpenAIResponse {
id: String,
object: String,
created: u64,
model: String,
choices: Vec<OpenAIChoice>,
usage: OpenAIUsage,
}
#[derive(serde::Serialize)]
struct OpenAIChoice {
index: u32,
message: OpenAIMessage,
finish_reason: String,
}
#[derive(serde::Serialize)]
struct OpenAIMessage {
role: String,
content: String,
}
#[derive(serde::Serialize)]
struct OpenAIUsage {
prompt_tokens: u32,
completion_tokens: u32,
total_tokens: u32,
}
// Parse the provider response
let provider: ProviderResponse = serde_json::from_str(provider_response)?;
// Extract content from the first choice
let first_choice = provider.choices.get(0).ok_or("No choices in response")?;
let content = first_choice.message.content.clone();
let role = first_choice
.message
.role
.clone()
.unwrap_or_else(|| "assistant".to_string());
// Calculate token usage
let usage = provider.usage.unwrap_or_default();
let prompt_tokens = usage.prompt_tokens.unwrap_or(0);
let completion_tokens = usage
.completion_tokens
.unwrap_or_else(|| content.split_whitespace().count() as u32);
let total_tokens = usage
.total_tokens
.unwrap_or(prompt_tokens + completion_tokens);
let openai_response = OpenAIResponse {
id: provider
.id
.unwrap_or_else(|| format!("chatcmpl-{}", uuid::Uuid::new_v4().simple())),
object: provider
.object
.unwrap_or_else(|| "chat.completion".to_string()),
created: provider.created.unwrap_or_else(|| {
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs()
}),
model: provider.model.unwrap_or_else(|| "llama".to_string()),
choices: vec![OpenAIChoice {
index: 0,
message: OpenAIMessage { role, content },
finish_reason: first_choice
.finish_reason
.clone()
.unwrap_or_else(|| "stop".to_string()),
}],
usage: OpenAIUsage {
prompt_tokens,
completion_tokens,
total_tokens,
},
};
serde_json::to_string(&openai_response).map_err(|e| e.into())
}
// Default implementation for ProviderUsage

Some files were not shown because too many files have changed in this diff Show more