diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 000000000..6c2b69a04 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,45 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "type": "lldb", + "request": "launch", + "name": "Debug executable 'gbserver'", + "cargo": { + "args": [ + "build", + "--bin=gbserver", + "--package=gbserver" + ], + "filter": { + "name": "gbserver", + "kind": "bin" + } + }, + "args": [], + "cwd": "${workspaceFolder}" + }, + { + "type": "lldb", + "request": "launch", + "name": "Debug unit tests in executable 'gbserver'", + "cargo": { + "args": [ + "test", + "--no-run", + "--bin=gbserver", + "--package=gbserver" + ], + "filter": { + "name": "gbserver", + "kind": "bin" + } + }, + "args": [], + "cwd": "${workspaceFolder}" + } + ] +} \ No newline at end of file diff --git a/.zed/debug.json b/.zed/debug.json new file mode 100644 index 000000000..fb5d47718 --- /dev/null +++ b/.zed/debug.json @@ -0,0 +1,14 @@ +[ + { + "label": "Build & Debug native binary", + "build": { + "command": "cargo", + "args": ["build"] + }, + "program": "$ZED_WORKTREE_ROOT/target/debug/gbserver", + + "sourceLanguages": ["rust"], + "request": "launch", + "adapter": "CodeLLDB" + } +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 000000000..133737593 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,61 @@ +[package] +name = "gbserver" +version = "0.1.0" +edition = "2021" +authors = ["Rodrigo Rodriguez "] +description = "General Bots Server" +license = "AGPL-3.0" +repository = "https://alm.pragmatismo.com.br/generalbots/gbserver" + +[features] +default = ["postgres", "qdrant"] +local_llm = [] +postgres = ["sqlx/postgres"] +qdrant = ["langchain-rust/qdrant"] + +[dependencies] +actix-cors = "0.7" +actix-multipart = "0.7" +actix-web = "4.9" +actix-ws = "0.3" +anyhow = "1.0" +async-stream = "0.3" +async-trait = "0.1" +aes-gcm = "0.10" +argon2 = "0.5" +base64 = "0.22" +bytes = "1.8" +chrono = { version = "0.4", features = ["serde"] } +dotenv = "0.15" +downloader = "0.2" +env_logger = "0.11" +futures = "0.3" +futures-util = "0.3" +imap = "2.4" +langchain-rust = { version = "4.6", features = ["qdrant", "postgres"] } +lettre = { version = "0.11", features = ["smtp-transport", "builder", "tokio1", "tokio1-native-tls"] } +livekit = "0.7" +log = "0.4" +mailparse = "0.15" +minio = { git = "https://github.com/minio/minio-rs", branch = "master" } +native-tls = "0.2" +num-format = "0.4" +qdrant-client = "1.12" +rhai = "1.22" +redis = { version = "0.27", features = ["tokio-comp"] } +regex = "1.11" +reqwest = { version = "0.12", features = ["json", "stream"] } +scraper = "0.20" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +smartstring = "1.0" +sqlx = { version = "0.8", features = ["time", "uuid", "runtime-tokio-rustls", "postgres", "chrono"] } +tempfile = "3" +thirtyfour = "0.34" +tokio = { version = "1.41", features = ["full"] } +tokio-stream = "0.1" +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["fmt"] } +urlencoding = "2.1" +uuid = { version = "1.11", features = ["serde", "v4"] } +zip = "2.2" diff --git a/README-6.md b/README-6.md new file mode 100644 index 000000000..6e13ce554 --- /dev/null +++ b/README-6.md @@ -0,0 +1,519 @@ +# General Bots 6 (GB6) Platform + +## Vision +GB6 is a billion-scale real-time communication platform integrating advanced bot capabilities, WebRTC multimedia, and enterprise-grade messaging, built with Rust for maximum performance and reliability and BASIC-WebAssembly VM. + +## 🌟 Key Features + +### Scale & Performance +- Billion+ active users support +- Sub-second message delivery +- 4K video streaming +- 99.99% uptime guarantee +- Zero message loss +- Petabyte-scale storage + +### Core Services +- **API Service** (gb-server) + - Axum-based REST & WebSocket + - Multi-tenant request routing + - Authentication & Authorization + - File handling & streaming + +- **Media Processing** (gb-media) + - WebRTC integration + - GStreamer transcoding + - Real-time track management + - Professional recording + +- **Messaging** (gb-messaging) + - Kafka event processing + - RabbitMQ integration + - WebSocket communication + - Redis PubSub + +- **Storage** (gb-storage) + - PostgreSQL with sharding + - Redis caching + - TiKV distributed storage + +## 🏗 Architecture + +### Multi-Tenant Core +- Organizations +- Instance management +- Resource quotas +- Usage analytics + +### Communication Infrastructure +- WebRTC rooms +- Real-time messaging +- Media processing +- Video conferencing + +## 🛠 Installation + +### Prerequisites +- Rust 1.70+ +- Kubernetes cluster +- PostgreSQL 13+ +- Redis 6+ +- Kafka 3.0+ +- GStreamer + +# Deploy platform + + +## Linux && Mac +``` +sudo apt update + +sudo apt install brave-browser-beta + +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y +source "$HOME/.cargo/env" +git clone https://alm.pragmatismo.com.br/generalbots/gbserver + +apt install -y build-essential \ + pkg-config \ + libssl-dev \ + gcc-multilib \ + g++-multilib \ + clang \ + lld \ + binutils-dev \ + libudev-dev \ + libdbus-1-dev +``` + +## Build + +``` + +### Build & Run +```bash +# Build all services +cargo build --workspace + +# Run tests +cargo test --workspace + +# Start API service +cargo run -p gb-server +``` + +## 📊 Monitoring & Operations + +### Health Metrics +- System performance +- Resource utilization +- Error rates +- Latency tracking + +### Scaling Operations +- Auto-scaling rules +- Shard management +- Load balancing +- Failover systems + +## 🔒 Security + +### Authentication & Authorization +- Multi-factor auth +- Role-based access +- Rate limiting +- End-to-end encryption + +### Data Protection +- Tenant isolation +- Encryption at rest +- Secure communications +- Audit logging + +## 🚀 Development + +### Project Structure +``` +general-bots/ +├── gb-server/ # API service +├── gb-core/ # Core functionality +├── gb-media/ # Media processing +├── gb-messaging/ # Message brokers +├── gb-storage/ # Data storage +├── gb-utils/ # Utilities +└── migrations/ # DB migrations +``` + +### Configuration +```env +DATABASE_URL=postgresql://user:password@localhost:5432/gbdb +REDIS_URL=redis://localhost:6379 +KAFKA_BROKERS=localhost:9092 +RABBIT_URL=amqp://guest:guest@localhost:5672 +``` + +## 🌍 Deployment + +### Global Infrastructure +- Edge presence +- Regional optimization +- Content delivery +- Traffic management + +### Disaster Recovery +- Automated backups +- Multi-region failover +- Data replication +- System redundancy + +## 🤝 Contributing + +1. Fork repository +2. Create feature branch +3. Implement changes +4. Add tests +5. Submit PR + +## 📝 License + +Licensed under terms specified in workspace configuration. + +## 🆘 Support + +### Issues +- Check existing issues +- Provide reproduction steps +- Include relevant logs +- Follow up on discussions + +### Documentation +- API references +- Integration guides +- Deployment docs +- Best practices + +## 🔮 Roadmap + +### Short Term +- Enhanced media processing +- Additional messaging protocols +- Improved scalability +- Extended monitoring + +### Long Term +- Advanced analytics +- Global expansion +- Enterprise features + + +| ✓ | Requirement | Component | Standard | Implementation Steps | +|---|-------------|-----------|-----------|---------------------| +| ✅ | TLS 1.3 Configuration | Nginx | All | Configure modern SSL parameters and ciphers in `/etc/nginx/conf.d/ssl.conf` | +| ✅ | Access Logging | Nginx | All | Enable detailed access logs with privacy fields in `/etc/nginx/nginx.conf` | +| ⬜ | Rate Limiting | Nginx | ISO 27001 | Implement rate limiting rules in location blocks | +| ⬜ | WAF Rules | Nginx | HIPAA | Install and configure ModSecurity with OWASP rules | +| ✅ | Reverse Proxy Security | Nginx | All | Configure security headers (X-Frame-Options, HSTS, CSP) | +| ✅ | MFA Implementation | Zitadel | All | Enable and enforce MFA for all administrative accounts | +| ✅ | RBAC Configuration | Zitadel | All | Set up role-based access control with least privilege | +| ✅ | Password Policy | Zitadel | All | Configure strong password requirements (length, complexity, history) | +| ✅ | OAuth2/OIDC Setup | Zitadel | ISO 27001 | Configure secure OAuth flows and token policies | +| ✅ | Audit Logging | Zitadel | All | Enable comprehensive audit logging for user activities | +| ✅ | Encryption at Rest | MinIO | All | Configure encrypted storage with key management | +| ✅ | Bucket Policies | MinIO | All | Implement strict bucket access policies | +| ✅ | Object Versioning | MinIO | HIPAA | Enable versioning for data recovery capability | +| ✅ | Access Logging | MinIO | All | Enable detailed access logging for object operations | +| ⬜ | Lifecycle Rules | MinIO | LGPD | Configure data retention and deletion policies | +| ✅ | DKIM/SPF/DMARC | Stalwart | All | Configure email authentication mechanisms | +| ✅ | Mail Encryption | Stalwart | All | Enable TLS for mail transport | +| ✅ | Content Filtering | Stalwart | All | Implement content scanning and filtering rules | +| ⬜ | Mail Archiving | Stalwart | HIPAA | Configure compliant email archiving | +| ✅ | Sieve Filtering | Stalwart | All | Implement security-focused mail filtering rules | +| ⬜ | System Hardening | Ubuntu | All | Apply CIS Ubuntu Linux benchmarks | +| ✅ | System Updates | Ubuntu | All | Configure unattended-upgrades for security patches | +| ⬜ | Audit Daemon | Ubuntu | All | Configure auditd for system event logging | +| ✅ | Firewall Rules | Ubuntu | All | Configure UFW with restrictive rules | +| ⬜ | Disk Encryption | Ubuntu | All | Implement LUKS encryption for system disks | +| ⬜ | SELinux/AppArmor | Ubuntu | All | Enable and configure mandatory access control | +| ✅ | Monitoring Setup | All | All | Install and configure Prometheus + Grafana | +| ✅ | Log Aggregation | All | All | Implement centralized logging (e.g., ELK Stack) | +| ⬜ | Backup System | All | All | Configure automated backup system with encryption | +| ✅ | Network Isolation | All | All | Implement proper network segmentation | +| ✅ | Data Classification | All | HIPAA/LGPD | Document data types and handling procedures | +| ✅ | Session Management | Zitadel | All | Configure secure session timeouts and invalidation | +| ✅ | Certificate Management | All | All | Implement automated certificate renewal with Let's Encrypt | +| ✅ | Vulnerability Scanning | All | ISO 27001 | Regular automated scanning with tools like OpenVAS | +| ✅ | Incident Response Plan | All | All | Document and test incident response procedures | +| ✅ | Disaster Recovery | All | HIPAA | Implement and test disaster recovery procedures | + + +## Documentation Requirements + +1. **Security Policies** + - Information Security Policy + - Access Control Policy + - Password Policy + - Data Protection Policy + - Incident Response Plan + +2. **Procedures** + - Backup and Recovery Procedures + - Change Management Procedures + - Access Review Procedures + - Security Incident Procedures + - Data Breach Response Procedures + +3. **Technical Documentation** + - Network Architecture Diagrams + - System Configuration Documentation + - Security Controls Documentation + - Encryption Standards Documentation + - Logging and Monitoring Documentation + +4. **Compliance Records** + - Risk Assessment Reports + - Audit Logs + - Training Records + - Incident Reports + - Access Review Records + +## Regular Maintenance Tasks + +- Weekly security updates +- Monthly access reviews +- Quarterly compliance audits +- Annual penetration testing +- Bi-annual disaster recovery testing + + +### **Key Open Source Tools in Rust/Go**: +1. **Zitadel (Go)**: Identity and access management for secure authentication. +2. **Stalwart (Rust)**: Secure email server for threat detection. +3. **MinIO (Go)**: High-performance object storage for unstructured data. +4. **Ubuntu Advantage (Go/Rust tools)**: Compliance and security tools for Ubuntu. +5. **Tantivy (Rust)**: Full-text search engine for data discovery. +6. **Drone (Go)**: CI/CD platform for DevOps automation. +7. **Temporal (Go)**: Workflow orchestration engine. +8. **Caddy (Go)**: Web server for seamless customer experiences. +9. **SeaweedFS (Go)**: Distributed file system for secure file sharing. +10. **Vector (Rust)**: Observability pipeline for monitoring. +11. **Tyk (Go)**: API gateway for secure API management. +12. **Vault (Go)**: Secrets management and encryption. +13. **Hugging Face Transformers (Rust/Go bindings)**: LLM integration and fine-tuning. +14. **Kubernetes (Go)**: Container orchestration for scalable deployments. +15. **Matrix (Rust)**: Real-time communication and collaboration. + +# API: + +## **File & Document Management** +/files/upload +/files/download +/files/copy +/files/move +/files/delete +/files/getContents +/files/save +/files/createFolder +/files/shareFolder +/files/dirFolder +/files/list +/files/search +/files/recent +/files/favorite +/files/versions +/files/restore +/files/permissions +/files/quota +/files/shared +/files/sync/status +/files/sync/start +/files/sync/stop + +--- + +### **Document Processing** +/docs/merge +/docs/convert +/docs/fill +/docs/export +/docs/import + +--- + +### **Groups & Organizations** +/groups/create +/groups/update +/groups/delete +/groups/list +/groups/search +/groups/members +/groups/members/add +/groups/members/remove +/groups/permissions +/groups/settings +/groups/analytics +/groups/join/request +/groups/join/approve +/groups/join/reject +/groups/invites/send +/groups/invites/list + +--- + +### **Conversations & Real-time Communication** +/conversations/create +/conversations/join +/conversations/leave +/conversations/members +/conversations/messages +/conversations/messages/send +/conversations/messages/edit +/conversations/messages/delete +/conversations/messages/react +/conversations/messages/pin +/conversations/messages/search +/conversations/calls/start +/conversations/calls/join +/conversations/calls/leave +/conversations/calls/mute +/conversations/calls/unmute +/conversations/screen/share +/conversations/screen/stop +/conversations/recording/start +/conversations/recording/stop +/conversations/whiteboard/create +/conversations/whiteboard/collaborate + +--- + +### **Communication Services** +/comm/email/send +/comm/email/template +/comm/email/schedule +/comm/email/cancel +/comm/sms/send +/comm/sms/bulk +/comm/notifications/send +/comm/notifications/preferences +/comm/broadcast/send +/comm/contacts/import +/comm/contacts/export +/comm/contacts/sync +/comm/contacts/groups + +--- + +### **User Management & Authentication** +/users/create +/users/update +/users/delete +/users/list +/users/search +/users/profile +/users/profile/update +/users/settings +/users/permissions +/users/roles +/users/status +/users/presence +/users/activity +/users/security/2fa/enable +/users/security/2fa/disable +/users/security/devices +/users/security/sessions +/users/notifications/settings + +--- + +### **Calendar & Task Management** +/calendar/events/create +/calendar/events/update +/calendar/events/delete +/calendar/events/list +/calendar/events/search +/calendar/availability/check +/calendar/schedule/meeting +/calendar/reminders/set +/tasks/create +/tasks/update +/tasks/delete +/tasks/list +/tasks/assign +/tasks/status/update +/tasks/priority/set +/tasks/dependencies/set + +--- + +### **Storage & Data Management** +/storage/save +/storage/batch +/storage/json +/storage/delete +/storage/quota/check +/storage/cleanup +/storage/backup/create +/storage/backup/restore +/storage/archive +/storage/metrics + +--- + +### **Analytics & Reporting** +/analytics/dashboard +/analytics/reports/generate +/analytics/reports/schedule +/analytics/metrics/collect +/analytics/insights/generate +/analytics/trends/analyze +/analytics/export + +--- + +### **System & Administration** +/admin/system/status +/admin/system/metrics +/admin/logs/view +/admin/logs/export +/admin/config/update +/admin/maintenance/schedule +/admin/backup/create +/admin/backup/restore +/admin/users/manage +/admin/roles/manage +/admin/quotas/manage +/admin/licenses/manage + +--- + +### **AI & Machine Learning** +/ai/analyze/text +/ai/analyze/image +/ai/generate/text +/ai/generate/image +/ai/translate +/ai/summarize +/ai/recommend +/ai/train/model +/ai/predict + +--- + +### **Security & Compliance** +/security/audit/logs +/security/compliance/check +/security/threats/scan +/security/access/review +/security/encryption/manage +/security/certificates/manage + +--- + +### **Health & Monitoring** +/health +/health/detailed +/monitoring/status +/monitoring/alerts +/monitoring/metrics + + +Built with ❤️ from Brazil, using Rust for maximum performance and reliability. diff --git a/docs/keywords/PROMPT.md b/docs/keywords/PROMPT.md new file mode 100644 index 000000000..cae6a07a7 --- /dev/null +++ b/docs/keywords/PROMPT.md @@ -0,0 +1,201 @@ +# Modelo de Prompt para Aprendizado de BASIC em Markdown + +## 🎯 **ESTRUTURA PARA APRENDIZ DE BASIC** + +``` +**CONCEITO BASIC:** +[Nome do conceito ou comando] + +**NÍVEL:** +☐ Iniciante ☐ Intermediário ☐ Avançado + +**OBJETIVO DE APRENDIZADO:** +[O que você quer entender ou criar] + +**CÓDIGO EXEMPLO:** +```basic +[Seu código ou exemplo aqui] +``` + +**DÚVIDAS ESPECÍFICAS:** +- [Dúvida 1 sobre o conceito] +- [Dúvida 2 sobre sintaxe] +- [Dúvida 3 sobre aplicação] + +**CONTEXTO DO PROJETO:** +[Descrição do que está tentando fazer] + +**RESULTADO ESPERADO:** +[O que o código deve fazer] + +**PARTES QUE NÃO ENTENDE:** +- [Trecho específico do código] +- [Mensagem de erro] +- [Lógica confusa] +``` + +--- + +## 📚 **EXEMPLO PRÁTICO: LOOP FOR** + +``` +**CONCEITO BASIC:** +LOOP FOR + +**NÍVEL:** +☒ Iniciante ☐ Intermediário ☐ Avançado + +**OBJETIVO DE APRENDIZADO:** +Entender como criar um contador de 1 a 10 + +**CÓDIGO EXEMPLO:** +```basic +10 FOR I = 1 TO 10 +20 PRINT "Número: "; I +30 NEXT I +``` + +**DÚVIDAS ESPECÍFICAS:** +- O que significa "NEXT I"? +- Posso usar outras letras além de "I"? +- Como fazer contagem regressiva? + +**CONTEXTO DO PROJETO:** +Estou criando um programa que lista números + +**RESULTADO ESPERADO:** +Que apareça: Número: 1, Número: 2, etc. + +**PARTES QUE NÃO ENTENDE:** +- Por que precisa do número 10 na linha 10? +- O que acontece se esquecer o NEXT? +``` + +--- + +## 🛠️ **MODELO PARA RESOLVER ERROS** + +``` +**ERRO NO BASIC:** +[Mensagem de erro ou comportamento estranho] + +**MEU CÓDIGO:** +```basic +[Coloque seu código completo] +``` + +**LINHA COM PROBLEMA:** +[Linha específica onde ocorre o erro] + +**COMPORTAMENTO ESPERADO:** +[O que deveria acontecer] + +**COMPORTAMENTO ATUAL:** +[O que está acontecendo de errado] + +**O QUE JÁ TENTEI:** +- [Tentativa 1 de correção] +- [Tentativa 2] +- [Tentativa 3] + +**VERSÃO DO BASIC:** +[QBASIC, GW-BASIC, FreeBASIC, etc.] +``` + +--- + +## 📖 **MODELO PARA EXPLICAR COMANDOS** + +``` +**COMANDO:** +[Nome do comando - ex: PRINT, INPUT, GOTO] + +**SYNTAX:** +[Como escrever corretamente] + +**PARÂMETROS:** +- Parâmetro 1: [Função] +- Parâmetro 2: [Função] + +**EXEMPLO SIMPLES:** +```basic +[Exemplo mínimo e funcional] +``` + +**EXEMPLO PRÁTICO:** +```basic +[Exemplo em contexto real] +``` + +**ERROS COMUNS:** +- [Erro frequente 1] +- [Erro frequente 2] + +**DICA PARA INICIANTES:** +[Dica simples para não errar] + +**EXERCÍCIO SUGERIDO:** +[Pequeno exercício para praticar] +``` + +--- + +## 🎨 **FORMATAÇÃO MARKDOWN PARA BASIC** + +### **Como documentar seu código em .md:** +```markdown +# [NOME DO PROGRAMA] + +## 🎯 OBJETIVO +[O que o programa faz] + +## 📋 COMO USAR +1. [Passo 1] +2. [Passo 2] + +## 🧩 CÓDIGO FONTE +```basic +[Seu código aqui] +``` + +## 🔍 EXPLICAÇÃO +- **Linha X**: [Explicação] +- **Linha Y**: [Explicação] + +## 🚀 EXEMPLO DE EXECUÇÃO +``` +[Saída do programa] +``` +``` + +--- + +## 🏆 **MODELO DE PROJETO COMPLETO** + +``` +# PROJETO BASIC: [NOME] + +## 📝 DESCRIÇÃO +[Descrição do que o programa faz] + +## 🎨 FUNCIONALIDADES +- [ ] Funcionalidade 1 +- [ ] Funcionalidade 2 +- [ ] Funcionalidade 3 + +## 🧩 ESTRUTURA DO CÓDIGO +```basic +[Seu código organizado] +``` + +## 🎯 APRENDIZADOS +- [Conceito 1 aprendido] +- [Conceito 2 aprendido] + +## ❓ DÚVIDAS PARA EVOLUIR +- [Dúvida para melhorar] +- [O que gostaria de fazer depois] +``` + +gerenerate several examples +for this keyword written in rhai do this only for basic audience: \ No newline at end of file diff --git a/docs/keywords/format.md b/docs/keywords/format.md new file mode 100644 index 000000000..fba80552d --- /dev/null +++ b/docs/keywords/format.md @@ -0,0 +1,402 @@ +# 📚 **BASIC LEARNING EXAMPLES - FORMAT Function** + +## 🎯 **EXAMPLE 1: BASIC CONCEPT OF FORMAT FUNCTION** + +``` +**BASIC CONCEPT:** +FORMAT FUNCTION - Value formatting + +**LEVEL:** +☒ Beginner ☐ Intermediate ☐ Advanced + +**LEARNING OBJECTIVE:** +Understand how to format numbers, dates, and text + +**CODE EXAMPLE:** +```basic +10 NUMBER = 1234.56 +20 TEXT$ = "John" +30 DATE$ = "2024-03-15 14:30:00" +40 +50 PRINT FORMAT(NUMBER, "n") ' 1234.56 +60 PRINT FORMAT(NUMBER, "F") ' 1234.56 +70 PRINT FORMAT(TEXT$, "Hello @!") ' Hello John! +80 PRINT FORMAT(DATE$, "dd/MM/yyyy") ' 15/03/2024 +``` + +**SPECIFIC QUESTIONS:** +- What's the difference between "n" and "F"? +- What does "@" mean in text? +- How to format dates in Brazilian format? + +**PROJECT CONTEXT:** +I need to display data in a nicer way + +**EXPECTED RESULT:** +Values formatted according to the pattern + +**PARTS I DON'T UNDERSTAND:** +- When to use each type of formatting +- How it works internally +``` + +--- + +## 🛠️ **EXAMPLE 2: NUMERIC FORMATTING** + +``` +**BASIC CONCEPT:** +NUMBER FORMATTING + +**LEVEL:** +☒ Beginner ☐ Intermediate ☐ Advanced + +**LEARNING OBJECTIVE:** +Learn to format numbers as currency and with separators + +**CODE EXAMPLE:** +```basic +10 VALUE = 1234567.89 +20 +30 PRINT "Standard: "; FORMAT(VALUE, "n") ' 1234567.89 +40 PRINT "Decimal: "; FORMAT(VALUE, "F") ' 1234567.89 +45 PRINT "Integer: "; FORMAT(VALUE, "f") ' 1234567 +50 PRINT "Percentage: "; FORMAT(0.856, "0%") ' 86% +60 +70 ' Formatting with locale +80 PRINT "Dollar: "; FORMAT(VALUE, "C2[en]") ' $1,234,567.89 +90 PRINT "Real: "; FORMAT(VALUE, "C2[pt]") ' R$ 1.234.567,89 +100 PRINT "Euro: "; FORMAT(VALUE, "C2[fr]") ' €1,234,567.89 +``` + +**SPECIFIC QUESTIONS:** +- What does "C2[pt]" mean? +- How to change decimal places? +- Which locales are available? + +**PROJECT CONTEXT:** +Multi-currency financial system + +**EXPECTED RESULT:** +Numbers formatted according to regional standards + +**PARTS I DON'T UNDERSTAND:** +- Syntax of complex patterns +- Differences between locales +``` + +--- + +## 📖 **EXAMPLE 3: EXPLAINING FORMAT COMMAND** + +``` +**COMMAND:** +FORMAT - Formats values + +**SYNTAX:** +```basic +RESULT$ = FORMAT(VALUE, PATTERN$) +``` + +**PARAMETERS:** +- VALUE: Number, date or text to format +- PATTERN$: String with formatting pattern + +**SIMPLE EXAMPLE:** +```basic +10 PRINT FORMAT(123.45, "n") ' 123.45 +20 PRINT FORMAT("Mary", "Ms. @") ' Ms. Mary +``` + +**PRACTICAL EXAMPLE:** +```basic +10 INPUT "Name: "; NAME$ +20 INPUT "Salary: "; SALARY +30 INPUT "Birth date: "; BIRTH_DATE$ +40 +50 PRINT "Record:" +60 PRINT "Name: "; FORMAT(NAME$, "!") ' UPPERCASE +70 PRINT "Salary: "; FORMAT(SALARY, "C2[en]") ' $1,234.56 +80 PRINT "Birth: "; FORMAT(BIRTH_DATE$, "MM/dd/yyyy") +``` + +**COMMON ERRORS:** +- Using wrong pattern for data type +- Forgetting it returns string +- Formatting date without correct format + +**BEGINNER TIP:** +Test each pattern separately before using in project + +**SUGGESTED EXERCISE:** +Create a bank statement with professional formatting +``` + +--- + +## 🎨 **EXAMPLE 4: DATE AND TIME FORMATTING** + +``` +**BASIC CONCEPT:** +DATE AND TIME FORMATTING + +**LEVEL:** +☐ Beginner ☒ Intermediate ☐ Advanced + +**LEARNING OBJECTIVE:** +Learn all date formatting patterns + +**CODE EXAMPLE:** +```basic +10 DATE$ = "2024-03-15 14:30:25" +20 +30 PRINT "Brazilian: "; FORMAT(DATE$, "dd/MM/yyyy") ' 15/03/2024 +40 PRINT "Complete: "; FORMAT(DATE$, "dd/MM/yyyy HH:mm") ' 15/03/2024 14:30 +50 PRINT "US: "; FORMAT(DATE$, "MM/dd/yyyy") ' 03/15/2024 +60 PRINT "International: "; FORMAT(DATE$, "yyyy-MM-dd") ' 2024-03-15 +70 +80 PRINT "24h Time: "; FORMAT(DATE$, "HH:mm:ss") ' 14:30:25 +90 PRINT "12h Time: "; FORMAT(DATE$, "hh:mm:ss tt") ' 02:30:25 PM +100 PRINT "Long date: "; FORMAT(DATE$, "dd 'of' MMMM 'of' yyyy") +``` + +**SPECIFIC QUESTIONS:** +- What's the difference between HH and hh? +- How to show month name? +- What is "tt"? + +**PROJECT CONTEXT:** +Scheduling system and reports + +**EXPECTED RESULT:** +Dates formatted according to needs + +**PARTS I DON'T UNDERSTAND:** +- All formatting codes +- How milliseconds work +``` + +--- + +## 🏆 **EXAMPLE 5: COMPLETE PROJECT - BANK STATEMENT** + +``` +# BASIC PROJECT: FORMATTED BANK STATEMENT + +## 📝 DESCRIPTION +System that generates bank statement with professional formatting + +## 🎨 FEATURES +- [x] Currency formatting +- [x] Date formatting +- [x] Value alignment + +## 🧩 CODE STRUCTURE +```basic +10 ' Customer data +20 NAME$ = "Carlos Silva" +30 BALANCE = 12567.89 +40 +50 ' Transactions +60 DIM DATES$(3), DESCRIPTIONS$(3), AMOUNTS(3) +70 DATES$(1) = "2024-03-10 09:15:00" : DESCRIPTIONS$(1) = "Deposit" : AMOUNTS(1) = 2000 +80 DATES$(2) = "2024-03-12 14:20:00" : DESCRIPTIONS$(2) = "Withdrawal" : AMOUNTS(2) = -500 +90 DATES$(3) = "2024-03-14 11:30:00" : DESCRIPTIONS$(3) = "Transfer" : AMOUNTS(3) = -150.50 +100 +110 ' Header +120 PRINT FORMAT("BANK STATEMENT", "!") +130 PRINT "Customer: "; FORMAT(NAME$, "&") +140 PRINT "Date: "; FORMAT("2024-03-15 08:00:00", "dd/MM/yyyy HH:mm") +150 PRINT STRING$(40, "-") +160 +170 ' Transactions +180 FOR I = 1 TO 3 +190 FORMATTED_DATE$ = FORMAT(DATES$(I), "dd/MM HH:mm") +200 FORMATTED_AMOUNT$ = FORMAT(AMOUNTS(I), "C2[en]") +210 +220 PRINT FORMATTED_DATE$; " - "; +230 PRINT DESCRIPTIONS$(I); +240 PRINT TAB(30); FORMATTED_AMOUNT$ +250 NEXT I +260 +270 ' Balance +280 PRINT STRING$(40, "-") +290 PRINT "Balance: "; TAB(30); FORMAT(BALANCE, "C2[en]") +``` + +## 🎯 LEARNINGS +- Currency formatting with locale +- Date formatting +- Composition of multiple formats + +## ❓ QUESTIONS TO EVOLVE +- How to perfectly align columns? +- How to format negative numbers in red? +- How to add more locales? +``` + +--- + +## 🛠️ **EXAMPLE 6: TEXT FORMATTING** + +``` +**BASIC CONCEPT:** +STRING/TEXT FORMATTING + +**LEVEL:** +☒ Beginner ☐ Intermediate ☐ Advanced + +**LEARNING OBJECTIVE:** +Learn to use placeholders in text + +**CODE EXAMPLE:** +```basic +10 NAME$ = "Mary" +20 CITY$ = "são paulo" +21 COUNTRY$ = "BRAZIL" +22 AGE = 25 +30 +40 PRINT FORMAT(NAME$, "Hello @!") ' Hello Mary! +50 PRINT FORMAT(NAME$, "Welcome, @") ' Welcome, Mary +60 PRINT FORMAT(CITY$, "City: !") ' City: SÃO PAULO +70 PRINT FORMAT(CITY$, "City: &") ' City: são paulo +80 PRINT FORMAT(COUNTRY$, "Country: &") ' Country: brazil +90 +100 ' Combining with numbers +110 PRINT FORMAT(NAME$, "@ is ") + FORMAT(AGE, "n") + " years old" +120 ' Mary is 25 years old +``` + +**SPECIFIC QUESTIONS:** +- What's the difference between @, ! and &? +- Can I use multiple placeholders? +- How to escape special characters? + +**PROJECT CONTEXT:** +Personalized report generation + +**EXPECTED RESULT:** +Dynamic texts formatted automatically + +**PARTS I DON'T UNDERSTAND:** +- Placeholder limitations +- How to mix different types +``` + +--- + +## 📚 **EXAMPLE 7: PRACTICAL EXERCISES** + +``` +# EXERCISES: PRACTICING WITH FORMAT + +## 🎯 EXERCISE 1 - BASIC +Create a program that formats product prices. + +**SOLUTION:** +```basic +10 DIM PRODUCTS$(3), PRICES(3) +20 PRODUCTS$(1) = "Laptop" : PRICES(1) = 2500.99 +30 PRODUCTS$(2) = "Mouse" : PRICES(2) = 45.5 +40 PRODUCTS$(3) = "Keyboard" : PRICES(3) = 120.75 +50 +60 FOR I = 1 TO 3 +70 PRINT FORMAT(PRODUCTS$(I), "@: ") + FORMAT(PRICES(I), "C2[en]") +80 NEXT I +``` + +## 🎯 EXERCISE 2 - INTERMEDIATE +Make a program that shows dates in different formats. + +**SOLUTION:** +```basic +10 DATE$ = "2024-12-25 20:00:00" +20 +30 PRINT "Christmas: "; FORMAT(DATE$, "dd/MM/yyyy") +40 PRINT "US: "; FORMAT(DATE$, "MM/dd/yyyy") +50 PRINT "Dinner: "; FORMAT(DATE$, "HH'h'mm") +60 PRINT "Formatted: "; FORMAT(DATE$, "dd 'of' MMMM 'of' yyyy 'at' HH:mm") +``` + +## 🎯 EXERCISE 3 - ADVANCED +Create a school report card system with formatting. + +**SOLUTION:** +```basic +10 NAME$ = "ana silva" +20 AVERAGE = 8.75 +21 ATTENDANCE = 0.92 +30 REPORT_DATE$ = "2024-03-15 10:00:00" +40 +50 PRINT FORMAT("SCHOOL REPORT CARD", "!") +60 PRINT "Student: "; FORMAT(NAME$, "&") +70 PRINT "Date: "; FORMAT(REPORT_DATE$, "dd/MM/yyyy") +80 PRINT "Average: "; FORMAT(AVERAGE, "n") +90 PRINT "Attendance: "; FORMAT(ATTENDANCE, "0%") +``` + +## 💡 TIPS +- Always test patterns before using +- Use PRINT to see each formatting result +- Combine simple formats to create complex ones +``` + +--- + +## 🎨 **EXAMPLE 8: COMPLETE REFERENCE GUIDE** + +```markdown +# FORMAT FUNCTION - COMPLETE GUIDE + +## 🎯 OBJECTIVE +Format numbers, dates and text professionally + +## 📋 SYNTAX +```basic +RESULT$ = FORMAT(VALUE, PATTERN$) +``` + +## 🔢 NUMERIC FORMATTING +| Pattern | Example | Result | +|---------|---------|--------| +| "n" | `FORMAT(1234.5, "n")` | 1234.50 | +| "F" | `FORMAT(1234.5, "F")` | 1234.50 | +| "f" | `FORMAT(1234.5, "f")` | 1234 | +| "0%" | `FORMAT(0.85, "0%")` | 85% | +| "C2[en]" | `FORMAT(1234.5, "C2[en]")` | $1,234.50 | +| "C2[pt]" | `FORMAT(1234.5, "C2[pt]")` | R$ 1.234,50 | + +## 📅 DATE FORMATTING +| Code | Meaning | Example | +|------|---------|---------| +| yyyy | 4-digit year | 2024 | +| yy | 2-digit year | 24 | +| MM | 2-digit month | 03 | +| M | 1-2 digit month | 3 | +| dd | 2-digit day | 05 | +| d | 1-2 digit day | 5 | +| HH | 24h hour 2-digit | 14 | +| H | 24h hour 1-2 digit | 14 | +| hh | 12h hour 2-digit | 02 | +| h | 12h hour 1-2 digit | 2 | +| mm | 2-digit minute | 05 | +| m | 1-2 digit minute | 5 | +| ss | 2-digit second | 09 | +| s | 1-2 digit second | 9 | +| tt | AM/PM | PM | +| t | A/P | P | + +## 📝 TEXT FORMATTING +| Placeholder | Function | Example | +|-------------|----------|---------| +| @ | Insert original text | `FORMAT("John", "@")` → John | +| ! | Text in UPPERCASE | `FORMAT("John", "!")` → JOHN | +| & | Text in lowercase | `FORMAT("John", "&")` → john | + +## ⚠️ LIMITATIONS +- Dates must be in "YYYY-MM-DD HH:MM:SS" format +- Very large numbers may have issues +- Supported locales: en, pt, fr, de, es, it +``` + +These examples cover from basic to advanced applications of the FORMAT function! 🚀 \ No newline at end of file diff --git a/docs/keywords/last.md b/docs/keywords/last.md new file mode 100644 index 000000000..9e40ef370 --- /dev/null +++ b/docs/keywords/last.md @@ -0,0 +1,348 @@ +# 📚 **BASIC LEARNING EXAMPLES - LAST Function** + +## 🎯 **EXAMPLE 1: BASIC CONCEPT OF LAST FUNCTION** + +``` +**BASIC CONCEPT:** +LAST FUNCTION - Extract last word + +**LEVEL:** +☒ Beginner ☐ Intermediate ☐ Advanced + +**LEARNING OBJECTIVE:** +Understand how the LAST function extracts the last word from text + +**CODE EXAMPLE:** +```basic +10 PALAVRA$ = "The mouse chewed the clothes" +20 ULTIMA$ = LAST(PALAVRA$) +30 PRINT "Last word: "; ULTIMA$ +``` + +**SPECIFIC QUESTIONS:** +- How does the function know where the last word ends? +- What happens if there are extra spaces? +- Can I use it with numeric variables? + +**PROJECT CONTEXT:** +I'm creating a program that analyzes sentences + +**EXPECTED RESULT:** +Should display: "Last word: clothes" + +**PARTS I DON'T UNDERSTAND:** +- Why are parentheses needed? +- How does the function work internally? +``` + +--- + +## 🛠️ **EXAMPLE 2: SOLVING ERROR WITH LAST** + +``` +**BASIC ERROR:** +"Syntax error" when using LAST + +**MY CODE:** +```basic +10 TEXTO$ = "Good day world" +20 RESULTADO$ = LAST TEXTO$ +30 PRINT RESULTADO$ +``` + +**PROBLEM LINE:** +Line 20 + +**EXPECTED BEHAVIOR:** +Show "world" on screen + +**CURRENT BEHAVIOR:** +Syntax error + +**WHAT I'VE TRIED:** +- Tried without parentheses +- Tried with different quotes +- Tried changing variable name + +**BASIC VERSION:** +QBASIC with Rhai extension + +**CORRECTED SOLUTION:** +```basic +10 TEXTO$ = "Good day world" +20 RESULTADO$ = LAST(TEXTO$) +30 PRINT RESULTADO$ +``` +``` + +--- + +## 📖 **EXAMPLE 3: EXPLAINING LAST COMMAND** + +``` +**COMMAND:** +LAST - Extracts last word + +**SYNTAX:** +```basic +ULTIMA$ = LAST(TEXTO$) +``` + +**PARAMETERS:** +- TEXTO$: String from which to extract the last word + +**SIMPLE EXAMPLE:** +```basic +10 FRASE$ = "The sun is bright" +20 ULTIMA$ = LAST(FRASE$) +30 PRINT ULTIMA$ ' Shows: bright +``` + +**PRACTICAL EXAMPLE:** +```basic +10 INPUT "Enter your full name: "; NOME$ +20 SOBRENOME$ = LAST(NOME$) +30 PRINT "Hello Mr./Mrs. "; SOBRENOME$ +``` + +**COMMON ERRORS:** +- Forgetting parentheses: `LAST TEXTO$` ❌ +- Using with numbers: `LAST(123)` ❌ +- Forgetting to assign to a variable + +**BEGINNER TIP:** +Always use parentheses and ensure content is text + +**SUGGESTED EXERCISE:** +Create a program that asks for a sentence and shows the first and last word +``` + +--- + +## 🎨 **EXAMPLE 4: COMPLETE PROJECT WITH LAST** + +``` +# BASIC PROJECT: SENTENCE ANALYZER + +## 📝 DESCRIPTION +Program that analyzes sentences and extracts useful information + +## 🎨 FEATURES +- [x] Extract last word +- [x] Count words +- [x] Show statistics + +## 🧩 CODE STRUCTURE +```basic +10 PRINT "=== SENTENCE ANALYZER ===" +20 INPUT "Enter a sentence: "; FRASE$ +30 +40 ' Extract last word +50 ULTIMA$ = LAST(FRASE$) +60 +70 ' Count words (simplified) +80 PALAVRAS = 1 +90 FOR I = 1 TO LEN(FRASE$) +100 IF MID$(FRASE$, I, 1) = " " THEN PALAVRAS = PALAVRAS + 1 +110 NEXT I +120 +130 PRINT +140 PRINT "Last word: "; ULTIMA$ +150 PRINT "Total words: "; PALAVRAS +160 PRINT "Original sentence: "; FRASE$ +``` + +## 🎯 LEARNINGS +- How to use LAST function +- How to count words manually +- String manipulation in BASIC + +## ❓ QUESTIONS TO EVOLVE +- How to extract the first word? +- How to handle punctuation? +- How to work with multiple sentences? +``` + +--- + +## 🏆 **EXAMPLE 5: SPECIAL CASES AND TESTS** + +``` +**BASIC CONCEPT:** +SPECIAL CASES OF LAST FUNCTION + +**LEVEL:** +☐ Beginner ☒ Intermediate ☐ Advanced + +**LEARNING OBJECTIVE:** +Understand how LAST behaves in special situations + +**CODE EXAMPLES:** +```basic +' Case 1: Empty string +10 TEXTO$ = "" +20 PRINT LAST(TEXTO$) ' Result: "" + +' Case 2: Single word only +30 TEXTO$ = "Sun" +40 PRINT LAST(TEXTO$) ' Result: "Sun" + +' Case 3: Multiple spaces +50 TEXTO$ = "Hello World " +60 PRINT LAST(TEXTO$) ' Result: "World" + +' Case 4: With tabs and newlines +70 TEXTO$ = "Line1" + CHR$(9) + "Line2" + CHR$(13) +80 PRINT LAST(TEXTO$) ' Result: "Line2" +``` + +**SPECIFIC QUESTIONS:** +- What happens with empty strings? +- How does it work with special characters? +- Is it case-sensitive? + +**PROJECT CONTEXT:** +I need to robustly validate user inputs + +**EXPECTED RESULT:** +Consistent behavior in all cases + +**PARTS I DON'T UNDERSTAND:** +- How the function handles whitespace? +- What are CHR$(9) and CHR$(13)? +``` + +--- + +## 🛠️ **EXAMPLE 6: INTEGRATION WITH OTHER FUNCTIONS** + +``` +**BASIC CONCEPT:** +COMBINING LAST WITH OTHER FUNCTIONS + +**LEVEL:** +☐ Beginner ☒ Intermediate ☐ Advanced + +**LEARNING OBJECTIVE:** +Learn to use LAST in more complex expressions + +**CODE EXAMPLE:** +```basic +10 ' Example 1: With concatenation +20 PARTE1$ = "Programming" +30 PARTE2$ = " in BASIC" +40 FRASE_COMPLETA$ = PARTE1$ + PARTE2$ +50 PRINT LAST(FRASE_COMPLETA$) ' Result: "BASIC" + +60 ' Example 2: With string functions +70 NOME_COMPLETO$ = "Maria Silva Santos" +80 SOBRENOME$ = LAST(NOME_COMPLETO$) +90 PRINT "Mr./Mrs. "; SOBRENOME$ + +100 ' Example 3: In conditional expressions +110 FRASE$ = "The sky is blue" +120 IF LAST(FRASE$) = "blue" THEN PRINT "The last word is blue!" +``` + +**SPECIFIC QUESTIONS:** +- Can I use LAST directly in IF? +- How to combine with LEFT$, RIGHT$, MID$? +- Is there a size limit for the string? + +**PROJECT CONTEXT:** +Creating validations and text processing + +**EXPECTED RESULT:** +Use LAST flexibly in different contexts + +**PARTS I DON'T UNDERSTAND:** +- Expression evaluation order +- Performance with very large strings +``` + +--- + +## 📚 **EXAMPLE 7: PRACTICAL EXERCISES** + +``` +# EXERCISES: PRACTICING WITH LAST + +## 🎯 EXERCISE 1 - BASIC +Create a program that asks for the user's full name and greets using only the last name. + +**SOLUTION:** +```basic +10 INPUT "Enter your full name: "; NOME$ +20 SOBRENOME$ = LAST(NOME$) +30 PRINT "Hello, Mr./Mrs. "; SOBRENOME$; "!" +``` + +## 🎯 EXERCISE 2 - INTERMEDIATE +Make a program that analyzes if the last word of a sentence is "end". + +**SOLUTION:** +```basic +10 INPUT "Enter a sentence: "; FRASE$ +20 IF LAST(FRASE$) = "end" THEN PRINT "Sentence ends with 'end'" ELSE PRINT "Sentence doesn't end with 'end'" +``` + +## 🎯 EXERCISE 3 - ADVANCED +Create a program that processes multiple sentences and shows statistics. + +**SOLUTION:** +```basic +10 DIM FRASES$(3) +20 FRASES$(1) = "The sun shines" +30 FRASES$(2) = "The rain falls" +40 FRASES$(3) = "The wind blows" +50 +60 FOR I = 1 TO 3 +70 PRINT "Sentence "; I; ": "; FRASES$(I) +80 PRINT "Last word: "; LAST(FRASES$(I)) +90 PRINT +100 NEXT I +``` + +## 💡 TIPS +- Always test with different inputs +- Use PRINT for debugging +- Start with simple examples +``` + +--- + +## 🎨 **EXAMPLE 8: MARKDOWN DOCUMENTATION** + +```markdown +# LAST FUNCTION - COMPLETE GUIDE + +## 🎯 OBJECTIVE +Extract the last word from a string + +## 📋 SYNTAX +```basic +RESULTADO$ = LAST(TEXTO$) +``` + +## 🧩 PARAMETERS +- `TEXTO$`: Input string + +## 🔍 BEHAVIOR +- Splits string by spaces +- Returns the last part +- Ignores extra spaces at beginning/end + +## 🚀 EXAMPLES +```basic +10 PRINT LAST("hello world") ' Output: world +20 PRINT LAST("one word") ' Output: word +30 PRINT LAST(" spaces ") ' Output: spaces +``` + +## ⚠️ LIMITATIONS +- Doesn't work with numbers +- Requires parentheses +- Considers only spaces as separators +``` + +These examples cover from the basic concept to practical applications of the LAST function, always focusing on BASIC beginners! 🚀 \ No newline at end of file diff --git a/prompts/ai/analyze-customer-sentiment.bas b/prompts/ai/analyze-customer-sentiment.bas new file mode 100644 index 000000000..e57c9f26c --- /dev/null +++ b/prompts/ai/analyze-customer-sentiment.bas @@ -0,0 +1,45 @@ +PARAM customer_id AS STRING +PARAM time_period AS INTEGER DEFAULT 30 + +# Gather customer communications +emails = CALL "/storage/json", ".gbdata/communication_logs", + "to = '${customer_id}' OR from = '${customer_id}' AND timestamp > NOW() - DAYS(${time_period})" + +support_tickets = CALL "/crm/tickets/list", { + "customer_id": customer_id, + "created_after": NOW() - DAYS(time_period) +} + +meeting_notes = CALL "/crm/meetings/list", { + "customer_id": customer_id, + "date_after": NOW() - DAYS(time_period) +} + +# Combine all text for analysis +all_text = "" +FOR EACH email IN emails + all_text = all_text + email.subject + " " + email.body + " " +NEXT + +FOR EACH ticket IN support_tickets + all_text = all_text + ticket.description + " " + ticket.resolution + " " +NEXT + +FOR EACH meeting IN meeting_notes + all_text = all_text + meeting.notes + " " +NEXT + +# Analyze sentiment +sentiment = CALL "/ai/analyze/text", all_text, "sentiment" + +# Generate insights +insights = CALL "/ai/analyze/text", all_text, "key_topics" + +RETURN { + "customer_id": customer_id, + "time_period": time_period + " days", + "sentiment_score": sentiment.score, + "sentiment_label": sentiment.label, + "key_topics": insights.topics, + "recommendations": insights.recommendations +} diff --git a/prompts/analytics/sales-performance.bas b/prompts/analytics/sales-performance.bas new file mode 100644 index 000000000..67abf9538 --- /dev/null +++ b/prompts/analytics/sales-performance.bas @@ -0,0 +1,83 @@ +PARAM period AS STRING DEFAULT "month" +PARAM team_id AS STRING OPTIONAL + +# Determine date range +IF period = "week" THEN + start_date = NOW() - DAYS(7) +ELSEIF period = "month" THEN + start_date = NOW() - DAYS(30) +ELSEIF period = "quarter" THEN + start_date = NOW() - DAYS(90) +ELSEIF period = "year" THEN + start_date = NOW() - DAYS(365) +ELSE + RETURN "Invalid period specified. Use 'week', 'month', 'quarter', or 'year'." +END IF + +# Construct team filter +team_filter = "" +IF team_id IS NOT NULL THEN + team_filter = " AND team_id = '" + team_id + "'" +END IF + +# Get sales data +opportunities = QUERY "SELECT * FROM Opportunities WHERE close_date >= '${start_date}'" + team_filter +closed_won = QUERY "SELECT * FROM Opportunities WHERE status = 'Won' AND close_date >= '${start_date}'" + team_filter +closed_lost = QUERY "SELECT * FROM Opportunities WHERE status = 'Lost' AND close_date >= '${start_date}'" + team_filter + +# Calculate metrics +total_value = 0 +FOR EACH opp IN closed_won + total_value = total_value + opp.value +NEXT + +win_rate = LEN(closed_won) / (LEN(closed_won) + LEN(closed_lost)) * 100 + +# Get performance by rep +sales_reps = QUERY "SELECT owner_id, COUNT(*) as deals, SUM(value) as total_value FROM Opportunities WHERE status = 'Won' AND close_date >= '${start_date}'" + team_filter + " GROUP BY owner_id" + +# Generate report +report = CALL "/analytics/reports/generate", { + "title": "Sales Performance Report - " + UPPER(period), + "date_range": "From " + FORMAT_DATE(start_date) + " to " + FORMAT_DATE(NOW()), + "metrics": { + "total_opportunities": LEN(opportunities), + "won_opportunities": LEN(closed_won), + "lost_opportunities": LEN(closed_lost), + "win_rate": win_rate, + "total_value": total_value + }, + "rep_performance": sales_reps, + "charts": [ + { + "type": "bar", + "title": "Won vs Lost Opportunities", + "data": {"Won": LEN(closed_won), "Lost": LEN(closed_lost)} + }, + { + "type": "line", + "title": "Sales Trend", + "data": QUERY "SELECT DATE_FORMAT(close_date, '%Y-%m-%d') as date, COUNT(*) as count, SUM(value) as value FROM Opportunities WHERE status = 'Won' AND close_date >= '${start_date}'" + team_filter + " GROUP BY DATE_FORMAT(close_date, '%Y-%m-%d')" + } + ] +} + +# Save report +report_file = ".gbdrive/Reports/Sales/sales_performance_" + period + "_" + FORMAT_DATE(NOW(), "Ymd") + ".pdf" +CALL "/files/save", report_file, report + +# Share report +IF team_id IS NOT NULL THEN + CALL "/files/shareFolder", report_file, team_id + + # Notify team manager + manager = QUERY "SELECT manager_id FROM Teams WHERE id = '${team_id}'" + IF LEN(manager) > 0 THEN + CALL "/comm/email/send", manager[0], + "Sales Performance Report - " + UPPER(period), + "The latest sales performance report for your team is now available.", + [report_file] + END IF +END IF + +RETURN "Sales performance report generated: " + report_file diff --git a/prompts/business/create-lead-from-draft.bas b/prompts/business/create-lead-from-draft.bas new file mode 100644 index 000000000..270386b87 --- /dev/null +++ b/prompts/business/create-lead-from-draft.bas @@ -0,0 +1,5 @@ + +PARAM text as STRING +DESCRIPTION "Called when someone wants to create a customer by pasting unstructured text, like and e-mail answer." + +SAVE_FROM_UNSTRUCTURED "rob", text diff --git a/prompts/business/data-enrichment.bas b/prompts/business/data-enrichment.bas new file mode 100644 index 000000000..d752c1f21 --- /dev/null +++ b/prompts/business/data-enrichment.bas @@ -0,0 +1,31 @@ +let items = FIND "gb.rob", "ACTION=EMUL" +FOR EACH item IN items + + PRINT item.company + + let website = item.website ?? "" + if item.website == "" { + website = WEBSITE OF item.company + SET "gb.rob", "id="+ item.id, "website=" + website + PRINT website + } + + let page = GET website + let prompt = "Build the same simulator, keep js, svg, css, assets paths, just change title, keep six cases of six messages each (change and return VALID JSON with a minium of 6 cases and 6-8 messages each), but for " + item.company + " using just *content about the company* " + item.llm_notes + " from its website, so it is possible to create a good and useful emulator in the same langue as the content: " + page + let alias = LLM "Return a single word for " + item.company + " like a token, no spaces, no special characters, no numbers, no uppercase letters." + CREATE_SITE alias, "gb-emulator-base", prompt + + let to = item.emailcto + let subject = "Simulador " + alias + " ficou pronto" + let name = FIRST(item.contact) + let body = "Oi, " + name + ". Tudo bem? Para vocês terem uma ideia do ambiente conversacional em AI e algumas possibilidades, preparamos o " + alias + " especificamente para vocês!" + "\n\n Acesse o site: https://sites.pragmatismo.com.br/" + alias + "\n\n" + "Para acessar o simulador, clique no link acima ou copie e cole no seu navegador." + "\n\n" + "Para iniciar, escolha um dos casos conversacionais." + "\n\n" + "Atenciosamente,\nRodrigo Rodriguez\n\n" + + let body = LLM "Melhora este e-mail: ------ " + body + " ----- mas mantem o link e inclui alguma referência ao histórico com o cliente: " + item.history + + CREATE_DRAFT to, subject, body + + SET "gb.rob", "id="+ item.id, "ACTION=CALL" + SET "gb.rob", "id="+ item.id, "emulator=true" + + WAIT 3000 +NEXT item diff --git a/prompts/business/on-emulator-sent.bas b/prompts/business/on-emulator-sent.bas new file mode 100644 index 000000000..a6ab7d1d7 --- /dev/null +++ b/prompts/business/on-emulator-sent.bas @@ -0,0 +1,12 @@ +let items = FIND "gb.rob", "ACTION=EMUL_ASK" +FOR EACH item IN items + + let to = item.emailcto + let subject = "Sobre o Simulador de AI enviado" + let name = FIRST(item.contact) + let body = GET "/EMUL-message.html" + + CREATE_DRAFT to, subject, body + SET "gb.rob", "id="+ item.id, "ACTION=EMUL_ASKED" + +NEXT item diff --git a/prompts/business/send-proposal-v0.bas b/prompts/business/send-proposal-v0.bas new file mode 100644 index 000000000..225973571 --- /dev/null +++ b/prompts/business/send-proposal-v0.bas @@ -0,0 +1,2 @@ +Based on this ${history}, generate the response for +${to}, signed by ${user} \ No newline at end of file diff --git a/prompts/business/send-proposal.bas b/prompts/business/send-proposal.bas new file mode 100644 index 000000000..c542d11b1 --- /dev/null +++ b/prompts/business/send-proposal.bas @@ -0,0 +1,25 @@ +PARAM to AS STRING +PARAM template AS STRING +PARAM opportunity AS STRING + +company = QUERY "SELECT Company FROM Opportunities WHERE Id = ${opportunity}" + +doc = FILL template + +' Generate email subject and content based on conversation history +subject = REWRITE "Based on this ${history}, generate a subject for a proposal email to ${company}" +contents = REWRITE "Based on this ${history}, and ${subject}, generate the e-mail body for ${to}, signed by ${user}, including key points from our proposal" + +' Add proposal to CRM +CALL "/files/upload", ".gbdrive/Proposals/${company}-proposal.docx", doc +CALL "/files/permissions", ".gbdrive/Proposals/${company}-proposal.docx", "sales-team", "edit" + +' Record activity in CRM +CALL "/crm/activities/create", opportunity, "email_sent", { + "subject": subject, + "description": "Proposal sent to " + company, + "date": NOW() +} + +' Send the email +CALL "/comm/email/send", to, subject, contents, doc diff --git a/prompts/calendar/schedule-meeting.bas b/prompts/calendar/schedule-meeting.bas new file mode 100644 index 000000000..1fbaa4950 --- /dev/null +++ b/prompts/calendar/schedule-meeting.bas @@ -0,0 +1,36 @@ +PARAM attendees AS ARRAY +PARAM topic AS STRING +PARAM duration AS INTEGER +PARAM preferred_date AS DATE OPTIONAL + +# Find available time for all attendees +IF preferred_date IS NULL THEN + available_slots = CALL "/calendar/availability/check", attendees, NOW(), NOW() + DAYS(7), duration +ELSE + available_slots = CALL "/calendar/availability/check", attendees, preferred_date, preferred_date + DAYS(1), duration +END IF + +IF LEN(available_slots) = 0 THEN + RETURN "No available time slots found for all attendees." +END IF + +# Create meeting description +description = REWRITE "Generate a concise meeting description for topic: ${topic}" + +# Schedule the meeting +event_id = CALL "/calendar/events/create", { + "subject": topic, + "description": description, + "start_time": available_slots[0].start, + "end_time": available_slots[0].end, + "attendees": attendees, + "location": "Virtual Meeting" +} + +# Notify attendees +FOR EACH person IN attendees + CALL "/comm/notifications/send", person, "Meeting Scheduled: " + topic, + "You have been invited to a meeting on " + FORMAT_DATE(available_slots[0].start) +NEXT + +RETURN "Meeting scheduled for " + FORMAT_DATE(available_slots[0].start) diff --git a/prompts/code/system-code.bas b/prompts/code/system-code.bas new file mode 100644 index 000000000..2312d22db --- /dev/null +++ b/prompts/code/system-code.bas @@ -0,0 +1,5 @@ +BEGIN SYSTEM PROMPT + + No comments, no echo, condensed. + +END SYSTEM PROMPT diff --git a/prompts/communication/keyword-sendmail.bas b/prompts/communication/keyword-sendmail.bas new file mode 100644 index 000000000..f83a0a46d --- /dev/null +++ b/prompts/communication/keyword-sendmail.bas @@ -0,0 +1,23 @@ +PARAM from AS STRING +PARAM to AS STRING +PARAM subject AS STRING +PARAM body AS STRING +PARAM attachments AS ARRAY + +# Track in communication history +CALL "/storage/save", ".gbdata/communication_logs", { + "from": from, + "to": to, + "subject": subject, + "timestamp": NOW(), + "type": "email" +} + +# Send actual email +CALL "/comm/email/send", from, to, subject, body, attachments + +# If WITH HISTORY flag present, include prior communication +IF WITH_HISTORY THEN + prevComms = CALL "/storage/json", ".gbdata/communication_logs", "to = '" + to + "' ORDER BY timestamp DESC LIMIT 5" + APPEND body WITH FORMAT_HISTORY(prevComms) +END IF diff --git a/prompts/conversations/meeting-assistant.bas b/prompts/conversations/meeting-assistant.bas new file mode 100644 index 000000000..bfd6c97d0 --- /dev/null +++ b/prompts/conversations/meeting-assistant.bas @@ -0,0 +1,67 @@ +PARAM meeting_id AS STRING +PARAM action AS STRING DEFAULT "join" + +IF action = "join" THEN + # Get meeting details + meeting = CALL "/calendar/events/get", meeting_id + + # Join the meeting + CALL "/conversations/calls/join", meeting.conference_link + + # Set up recording + CALL "/conversations/recording/start", meeting_id + + # Create meeting notes document + notes_doc = CALL "/files/create", + ".gbdrive/Meetings/" + meeting.subject + "_" + FORMAT_DATE(NOW(), "Ymd") + ".md", + "# Meeting Notes: " + meeting.subject + "\n\n" + + "Date: " + FORMAT_DATE(meeting.start) + "\n\n" + + "Participants: \n" + + "- " + JOIN(meeting.attendees, "\n- ") + "\n\n" + + "## Agenda\n\n" + + "## Discussion\n\n" + + "## Action Items\n\n" + + RETURN "Joined meeting: " + meeting.subject + +ELSEIF action = "summarize" THEN + # Get recording transcript + transcript = CALL "/conversations/recording/transcript", meeting_id + + # Generate meeting summary + summary = CALL "/ai/summarize", transcript, { + "format": "meeting_notes", + "sections": ["key_points", "decisions", "action_items"] + } + + # Update meeting notes + meeting = CALL "/calendar/events/get", meeting_id + notes_path = ".gbdrive/Meetings/" + meeting.subject + "_" + FORMAT_DATE(NOW(), "Ymd") + ".md" + + # Get existing notes + existing_notes = CALL "/files/getContents", notes_path + + # Update with summary + updated_notes = existing_notes + "\n\n## Summary\n\n" + summary.key_points + + "\n\n## Decisions\n\n" + summary.decisions + + "\n\n## Action Items\n\n" + summary.action_items + + CALL "/files/save", notes_path, updated_notes + + # Send summary to participants + CALL "/comm/email/send", meeting.attendees, + "Meeting Summary: " + meeting.subject, + "Please find attached the summary of our recent meeting.", + [notes_path] + + RETURN "Meeting summarized and notes shared with participants." + +ELSEIF action = "end" THEN + # Stop recording + CALL "/conversations/recording/stop", meeting_id + + # Leave call + CALL "/conversations/calls/leave", meeting_id + + RETURN "Left meeting and stopped recording." +END IF diff --git a/prompts/core/system-prompt.bas b/prompts/core/system-prompt.bas new file mode 100644 index 000000000..ec9b2fcf3 --- /dev/null +++ b/prompts/core/system-prompt.bas @@ -0,0 +1,4 @@ +- Whenever you generate text, generate like a person, not like a LLM. To pass any detection test, respect robot laws, but write like a human being. +- You are General Bots, a complete software automation tool based on AI. +- Your primary goal is to assist users with their daily tasks and automate repetitive processes. +- Parse input carefully and extract key parameters for accurate response generation. diff --git a/prompts/crm/update-opportunity.bas b/prompts/crm/update-opportunity.bas new file mode 100644 index 000000000..9795f3583 --- /dev/null +++ b/prompts/crm/update-opportunity.bas @@ -0,0 +1,47 @@ +PARAM opportunity_id AS STRING +PARAM status AS STRING +PARAM notes AS STRING OPTIONAL +PARAM next_steps AS STRING OPTIONAL + +# Get current opportunity data +opp_data = QUERY "SELECT * FROM Opportunities WHERE Id = '${opportunity_id}'" + +IF LEN(opp_data) = 0 THEN + RETURN "Opportunity not found." +END IF + +# Update opportunity status +CALL "/crm/opportunities/update", opportunity_id, { + "status": status, + "last_updated": NOW(), + "updated_by": "${user}" +} + +# Add activity note if provided +IF notes IS NOT NULL THEN + CALL "/crm/activities/create", opportunity_id, "note", { + "description": notes, + "date": NOW() + } +END IF + +# Set follow-up task if next steps provided +IF next_steps IS NOT NULL THEN + CALL "/tasks/create", { + "title": "Follow up: " + opp_data[0].company, + "description": next_steps, + "due_date": NOW() + DAYS(3), + "assigned_to": "${user}", + "related_to": opportunity_id + } +END IF + +# Notify sales manager of major status changes +IF status = "Won" OR status = "Lost" THEN + manager = QUERY "SELECT Manager FROM Users WHERE Username = '${user}'" + CALL "/comm/notifications/send", manager[0], + "Opportunity " + status + ": " + opp_data[0].company, + "The opportunity with " + opp_data[0].company + " has been marked as " + status + " by ${user}." +END IF + +RETURN "Opportunity status updated to " + status diff --git a/prompts/dev/fix.md b/prompts/dev/fix.md new file mode 100644 index 000000000..b3d8a1ccc --- /dev/null +++ b/prompts/dev/fix.md @@ -0,0 +1,36 @@ +You are fixing Rust code in a Cargo project. The user is providing problematic code that needs to be corrected. + +## Your Task +Fix ALL compiler errors and logical issues while maintaining the original intent. Return the COMPLETE corrected files as a SINGLE .sh script that can be executed from project root. +Use Cargo.toml as reference, do not change it. +Only return input files, all other files already exists. +If something, need to be added to a external file, inform it separated. + +## Critical Requirements +1. **Return as SINGLE .sh script** - Output must be a complete shell script using `cat > file << 'EOF'` pattern +2. **Include ALL files** - Every corrected file must be included in the script +3. **Respect Cargo.toml** - Check dependencies, editions, and features to avoid compiler errors +4. **Type safety** - Ensure all types match and trait bounds are satisfied +5. **Ownership rules** - Fix borrowing, ownership, and lifetime issues + +## Output Format Requirements +You MUST return exactly this example format: + +```sh +#!/bin/bash + +# Restore fixed Rust project + +cat > src/.rs << 'EOF' +use std::io; + +// test + +cat > src/.rs << 'EOF' +// Fixed library code +pub fn add(a: i32, b: i32) -> i32 { + a + b +} +EOF + +---- diff --git a/prompts/dev/general.md b/prompts/dev/general.md new file mode 100644 index 000000000..d847f5fcd --- /dev/null +++ b/prompts/dev/general.md @@ -0,0 +1,8 @@ +* Preffer imports than using :: to call methods, +* Output a single `.sh` script using `cat` so it can be restored directly. +* No placeholders, only real, production-ready code. +* No comments, no explanations, no extra text. +* Follow KISS principles. +* Provide a complete, professional, working solution. +* If the script is too long, split into multiple parts, but always return the **entire code**. +* Output must be **only the code**, nothing else. diff --git a/prompts/dev/model.md b/prompts/dev/model.md new file mode 100644 index 000000000..c3c462ef6 --- /dev/null +++ b/prompts/dev/model.md @@ -0,0 +1,94 @@ + +Create a Rust data model for database storage with optimal size and performance characteristics. Follow these specifications: + +**REQUIREMENTS:** +1. Use appropriate integer types (i32, i16, i8, etc.) based on expected value ranges +2. Use `Option` for nullable fields to avoid memory overhead +3. Use `Vec` for binary data instead of strings when appropriate +4. Prefer enum representations as integers rather than strings +5. Use `chrono::DateTime` for timestamps +6. Use `uuid::Uuid` for unique identifiers +7. Implement necessary traits: `Debug`, `Clone`, `Serialize`, `Deserialize`, `FromRow` +8. Include validation where appropriate +9. Consider database index strategy in field design + +**CONTEXT:** +- Database: PostgreSQL/SQLx compatible +- Serialization: Serde for JSON +- ORM: SQLx for database operations + +**OUTPUT FORMAT:** +Provide the complete Rust struct with: +- Struct definition with fields +- Enum definitions with integer representations +- Conversion implementations +- Basic validation if needed + +**EXAMPLE REFERENCE:** +```rust +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum Status { + Pending = 0, + Active = 1, + Inactive = 2, +} + +impl Status { + pub fn from_i16(value: i16) -> Option { + match value { + 0 => Some(Self::Pending), + 1 => Some(Self::Active), + 2 => Some(Self::Inactive), + _ => None, + } + } +} + +#[derive(Debug, FromRow, Serialize, Deserialize)] +pub struct User { + pub id: Uuid, + pub status: i16, // Using i16 for enum storage + pub email: String, + pub age: Option, // Nullable small integer + pub metadata: Vec, // Binary data for flexibility + pub created_at: DateTime, +} +``` + +Generate a similar model for: [YOUR DOMAIN HERE] +``` + +## Specialized Variants + +### For High-Performance Applications +``` +Add these additional requirements: +- Use `#[repr(u8)]` for enums to ensure minimal size +- Consider `Box` instead of `String` for reduced heap overhead +- Use `arrayvec::ArrayString` for fixed-size short strings +- Implement `PartialEq` and `Eq` for hash-based operations +- Include `#[derive(Default)]` where appropriate +``` + +### For Embedded/Memory-Constrained Systems +``` +Add these constraints: +- Prefer `i16` over `i32` where possible +- Use `u32` instead of `Uuid` if sequential IDs are acceptable +- Consider `bitflags` for multiple boolean flags in single byte +- Use `smol_str::SmolStr` for string optimization +- Avoid `Vec` in favor of arrays with capacity limits +``` + +### For Time-Series Data +``` +Add time-series specific optimizations: +- Use `i64` for timestamps as nanoseconds since epoch +- Use `f32` instead of `f64` for measurements where precision allows +- Consider `ordered_float::OrderedFloat` for floating-point comparisons +- Use `#[serde(with = "chrono::serde::ts_seconds")]` for compact serialization diff --git a/prompts/dev/service.md b/prompts/dev/service.md new file mode 100644 index 000000000..002e22868 --- /dev/null +++ b/prompts/dev/service.md @@ -0,0 +1,57 @@ +Generate a Rust service module following these patterns: + +Core Structure: + +Use actix-web for HTTP endpoints (get, post, etc.) + +Isolate shared resources (DB, clients, config) in AppState + +Split logic into reusable helper functions + +do not create main logic + +Endpoints: + +Follow REST conventions (e.g., POST /{resource}/create) use anotations in methods. + +Use web::Path for route parameters, web::Json for payloads + +Return consistent responses (e.g., HttpResponse::Ok().json(data)) + +Error Handling: + +Wrap fallible operations in Result + +Use map_err to convert errors to actix_web::Error + +Provide clear error messages (e.g., ErrorInternalServerError) + +Async Patterns: + +Use async/await for I/O (DB, external APIs) + +Leverage streams for pagination/large datasets + +Isolate blocking ops in spawn_blocking if needed + +Configuration: + +Load settings (e.g., URLs, credentials) from AppConfig + +Initialize clients (DB, SDKs) at startup (e.g., init_*() helpers) + +Documentation: + +Add brief doc comments for public functions + +Note safety assumptions (e.g., #[post] invariants) +postgres sqlx +Omit domain-specific logic (e.g., file/email details), focusing on the scaffolding." + +Key Features: + +Generic (applies to any service: auth, payments, etc.) + +KISS (avoids over-engineering) + +Copy-paste friendly (clear patterns without verbosity) diff --git a/prompts/files/search-documents.bas b/prompts/files/search-documents.bas new file mode 100644 index 000000000..a8345d0af --- /dev/null +++ b/prompts/files/search-documents.bas @@ -0,0 +1,36 @@ +PARAM query AS STRING +PARAM location AS STRING OPTIONAL +PARAM file_type AS STRING OPTIONAL +PARAM date_range AS ARRAY OPTIONAL + +search_params = { + "query": query +} + +IF location IS NOT NULL THEN + search_params["location"] = location +END IF + +IF file_type IS NOT NULL THEN + search_params["file_type"] = file_type +END IF + +IF date_range IS NOT NULL THEN + search_params["created_after"] = date_range[0] + search_params["created_before"] = date_range[1] +END IF + +results = CALL "/files/search", search_params + +IF LEN(results) = 0 THEN + RETURN "No documents found matching your criteria." +END IF + +# Format results for display +formatted_results = "Found " + LEN(results) + " documents:\n\n" +FOR EACH doc IN results + formatted_results = formatted_results + "- " + doc.name + " (" + FORMAT_DATE(doc.modified) + ")\n" + formatted_results = formatted_results + " Location: " + doc.path + "\n" +NEXT + +RETURN formatted_results diff --git a/prompts/geral.bas b/prompts/geral.bas new file mode 100644 index 000000000..704f4b422 --- /dev/null +++ b/prompts/geral.bas @@ -0,0 +1,28 @@ +My Work + General + Sales Manager + Project Management + +CRM + You should use files in .gbdrive/Proposals to search proposals. + You should use table RoB present in .gbdata/Proposals to get my proposals where User is ${user} + For sales pipelines, use table Opportunities in .gbdata/Sales. + +Files + Use API endpoints under /files/* for document management. + CALL "/files/upload" uploads files to the system. + CALL "/files/search" finds relevant documents. + +HR + People are in .gbdata/People + You should use files in .gbdrive/People to get resumes + Use HR_PORTAL to access employment records and policies. + +ALM + My issues are in .gbservice/forgejo + CALL "/tasks/create" creates new project tasks. + CALL "/tasks/status/update" updates existing task status. + +SETTINGS + API_KEYS stored in .gbsecure/keys + PREFERENCES in .gbdata/user-settings diff --git a/prompts/groups/create-workspace.bas b/prompts/groups/create-workspace.bas new file mode 100644 index 000000000..15d498af8 --- /dev/null +++ b/prompts/groups/create-workspace.bas @@ -0,0 +1,76 @@ +PARAM name AS STRING +PARAM members AS ARRAY +PARAM description AS STRING OPTIONAL +PARAM team_type AS STRING DEFAULT "project" + +# Create the group +group_id = CALL "/groups/create", { + "name": name, + "description": description, + "type": team_type +} + +# Add members +FOR EACH member IN members + CALL "/groups/members/add", group_id, member +NEXT + +# Create standard workspace structure +CALL "/files/createFolder", ".gbdrive/Workspaces/" + name + "/Documents" +CALL "/files/createFolder", ".gbdrive/Workspaces/" + name + "/Meetings" +CALL "/files/createFolder", ".gbdrive/Workspaces/" + name + "/Resources" + +# Create default workspace components +IF team_type = "project" THEN + # Create project board + board_id = CALL "/tasks/create", { + "title": name + " Project Board", + "description": "Task board for " + name, + "type": "project_board" + } + + # Create standard task lanes + lanes = ["Backlog", "To Do", "In Progress", "Review", "Done"] + FOR EACH lane IN lanes + CALL "/tasks/lanes/create", board_id, lane + NEXT + + # Link group to project board + CALL "/groups/settings", group_id, "project_board", board_id +END IF + +# Set up communication channel +channel_id = CALL "/conversations/create", { + "name": name, + "description": description, + "type": "group_chat" +} + +# Add all members to channel +FOR EACH member IN members + CALL "/conversations/members/add", channel_id, member +NEXT + +# Link group to channel +CALL "/groups/settings", group_id, "conversation", channel_id + +# Create welcome message +welcome_msg = REWRITE "Create a welcome message for a new workspace called ${name} with purpose: ${description}" + +CALL "/conversations/messages/send", channel_id, { + "text": welcome_msg, + "pinned": TRUE +} + +# Notify members +FOR EACH member IN members + CALL "/comm/notifications/send", member, + "You've been added to " + name, + "You have been added to the new workspace: " + name +NEXT + +RETURN { + "group_id": group_id, + "channel_id": channel_id, + "workspace_location": ".gbdrive/Workspaces/" + name +} diff --git a/prompts/health/system-check.bas b/prompts/health/system-check.bas new file mode 100644 index 000000000..205957920 --- /dev/null +++ b/prompts/health/system-check.bas @@ -0,0 +1,58 @@ +PARAM components AS ARRAY OPTIONAL +PARAM notify AS BOOLEAN DEFAULT TRUE + +# Check all components by default +IF components IS NULL THEN + components = ["storage", "api", "database", "integrations", "security"] +END IF + +status_report = {} + +FOR EACH component IN components + status = CALL "/health/detailed", component + status_report[component] = status +NEXT + +# Calculate overall health score +total_score = 0 +FOR EACH component IN components + total_score = total_score + status_report[component].health_score +NEXT + +overall_health = total_score / LEN(components) +status_report["overall_health"] = overall_health +status_report["timestamp"] = NOW() + +# Save status report +CALL "/storage/save", ".gbdata/health/status_" + FORMAT_DATE(NOW(), "Ymd_His") + ".json", status_report + +# Check for critical issues +critical_issues = [] +FOR EACH component IN components + IF status_report[component].health_score < 0.7 THEN + APPEND critical_issues, { + "component": component, + "score": status_report[component].health_score, + "issues": status_report[component].issues + } + END IF +NEXT + +# Notify if critical issues found +IF LEN(critical_issues) > 0 AND notify THEN + issue_summary = "Critical system health issues detected:\n\n" + FOR EACH issue IN critical_issues + issue_summary = issue_summary + "- " + issue.component + " (Score: " + issue.score + ")\n" + FOR EACH detail IN issue.issues + issue_summary = issue_summary + " * " + detail + "\n" + NEXT + issue_summary = issue_summary + "\n" + NEXT + + CALL "/comm/notifications/send", "admin-team", + "ALERT: System Health Issues Detected", + issue_summary, + "high" +END IF + +RETURN status_report diff --git a/prompts/marketing/add-new-idea.bas b/prompts/marketing/add-new-idea.bas new file mode 100644 index 000000000..99b60716e --- /dev/null +++ b/prompts/marketing/add-new-idea.bas @@ -0,0 +1,5 @@ + +PARAM idea as STRING +DESCRIPTION "Called when someone have an idea and wants to keep it." + +SAVE "marketing_ideas", idea, username diff --git a/prompts/scheduled/basic-check.bas b/prompts/scheduled/basic-check.bas new file mode 100644 index 000000000..f8751504f --- /dev/null +++ b/prompts/scheduled/basic-check.bas @@ -0,0 +1,51 @@ +SET SCHEDULE every 1 hour + +# Check emails +unread_emails = CALL "/comm/email/list", { + "status": "unread", + "folder": "inbox", + "max_age": "24h" +} + +# Check calendar +upcoming_events = CALL "/calendar/events/list", { + "start": NOW(), + "end": NOW() + HOURS(24) +} + +# Check tasks +due_tasks = CALL "/tasks/list", { + "status": "open", + "due_before": NOW() + HOURS(24) +} + +# Check important documents +new_documents = CALL "/files/recent", { + "folders": [".gbdrive/papers", ".gbdrive/Proposals"], + "since": NOW() - HOURS(24) +} + +# Prepare notification message +notification = "Daily Update:\n" + +IF LEN(unread_emails) > 0 THEN + notification = notification + "- You have " + LEN(unread_emails) + " unread emails\n" +END IF + +IF LEN(upcoming_events) > 0 THEN + notification = notification + "- You have " + LEN(upcoming_events) + " upcoming meetings in the next 24 hours\n" + notification = notification + " Next: " + upcoming_events[0].subject + " at " + FORMAT_TIME(upcoming_events[0].start) + "\n" +END IF + +IF LEN(due_tasks) > 0 THEN + notification = notification + "- You have " + LEN(due_tasks) + " tasks due in the next 24 hours\n" +END IF + +IF LEN(new_documents) > 0 THEN + notification = notification + "- " + LEN(new_documents) + " new documents have been added to your monitored folders\n" +END IF + +# Send notification +IF LEN(notification) > "Daily Update:\n" THEN + CALL "/comm/notifications/send", "${user}", "Daily Status Update", notification +END IF diff --git a/prompts/security/access-review.bas b/prompts/security/access-review.bas new file mode 100644 index 000000000..2a56c0de9 --- /dev/null +++ b/prompts/security/access-review.bas @@ -0,0 +1,63 @@ +PARAM resource_path AS STRING +PARAM review_period AS INTEGER DEFAULT 90 + +# Get current permissions +current_perms = CALL "/files/permissions", resource_path + +# Get access logs +access_logs = CALL "/security/audit/logs", { + "resource": resource_path, + "action": "access", + "timeframe": NOW() - DAYS(review_period) +} + +# Identify inactive users with access +inactive_users = [] +FOR EACH user IN current_perms + # Check if user has accessed in review period + user_logs = FILTER access_logs WHERE user_id = user.id + + IF LEN(user_logs) = 0 THEN + APPEND inactive_users, { + "user_id": user.id, + "access_level": user.access_level, + "last_access": CALL "/security/audit/logs", { + "resource": resource_path, + "action": "access", + "user_id": user.id, + "limit": 1 + } + } + END IF +NEXT + +# Generate review report +review_report = { + "resource": resource_path, + "review_date": NOW(), + "total_users_with_access": LEN(current_perms), + "inactive_users": inactive_users, + "recommendations": [] +} + +# Add recommendations +IF LEN(inactive_users) > 0 THEN + review_report.recommendations.APPEND("Remove access for " + LEN(inactive_users) + " inactive users") +END IF + +excessive_admins = FILTER current_perms WHERE access_level = "admin" +IF LEN(excessive_admins) > 3 THEN + review_report.recommendations.APPEND("Reduce number of admin users (currently " + LEN(excessive_admins) + ")") +END IF + +# Save review report +report_file = ".gbdata/security/access_reviews/" + REPLACE(resource_path, "/", "_") + "_" + FORMAT_DATE(NOW(), "Ymd") + ".json" +CALL "/files/save", report_file, review_report + +# Notify security team +CALL "/comm/email/send", "security-team", + "Access Review Report: " + resource_path, + "A new access review report has been generated for " + resource_path + ".", + [report_file] + +RETURN review_report diff --git a/prompts/tools/on-receive-email.bas b/prompts/tools/on-receive-email.bas new file mode 100644 index 000000000..abd0f91bc --- /dev/null +++ b/prompts/tools/on-receive-email.bas @@ -0,0 +1,59 @@ +PARAM sender AS STRING +PARAM subject AS STRING +PARAM body AS STRING + +# Get history for this sender +history = CALL "/storage/json", ".gbdata/communication_logs", "from = '${sender}' OR to = '${sender}' ORDER BY timestamp DESC LIMIT 10" + +# Check if this is a known customer +customer = CALL "/crm/customers/get", sender + +# Analyze email content +urgency = CALL "/ai/analyze/text", body, "urgency" +intent = CALL "/ai/analyze/text", body, "intent" +sentiment = CALL "/ai/analyze/text", body, "sentiment" + +# Determine if auto-reply needed +should_auto_reply = FALSE + +IF urgency.score > 0.8 THEN + should_auto_reply = TRUE +END IF + +IF customer IS NOT NULL AND customer.tier = "premium" THEN + should_auto_reply = TRUE +END IF + +IF intent.category = "support_request" THEN + # Create support ticket + ticket_id = CALL "/crm/tickets/create", { + "customer": sender, + "subject": subject, + "description": body, + "priority": urgency.score > 0.7 ? "High" : "Normal" + } + + should_auto_reply = TRUE + + # Notify support team + CALL "/comm/notifications/send", "support-team", + "New Support Ticket: " + subject, + "A new support ticket has been created from an email by " + sender +END IF + +IF should_auto_reply THEN + reply_template = intent.category = "support_request" ? "support_acknowledgment" : "general_acknowledgment" + + reply_text = REWRITE "Based on this email: ${body} + And this sender history: ${history} + Generate a personalized auto-reply message using the ${reply_template} style. + Include appropriate next steps and expected timeframe for response." + + CALL "/comm/email/send", "${user}", sender, "Re: " + subject, reply_text + + CALL "/storage/save", ".gbdata/auto_replies", { + "to": sender, + "subject": "Re: " + subject, + "timestamp": NOW() + } +END IF diff --git a/scripts/containers/PROMPT.md b/scripts/containers/PROMPT.md new file mode 100644 index 000000000..fc005f2d3 --- /dev/null +++ b/scripts/containers/PROMPT.md @@ -0,0 +1,16 @@ + do not comment or echo anything + + keep lines condensed + always call it not own name. Eg.: proxy instead of Caddy. alm instead of forgejo. + use KISS priciple + + use local /opt/gbo/{logs, data, conf} exposed as + HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/" + HOST_DATA="$HOST_BASE/data" + HOST_CONF="$HOST_BASE/conf" + HOST_LOGS="$HOST_BASE/logs" + instead of using app original paths. + and use /opt/gbo/bin to put local binaries of installations + during sh exection, never touch files in /opt/gbo/{logs, data, conf} + use wget + use gbuser as system user \ No newline at end of file diff --git a/scripts/containers/alm-ci.sh b/scripts/containers/alm-ci.sh new file mode 100644 index 000000000..5f34c98cb --- /dev/null +++ b/scripts/containers/alm-ci.sh @@ -0,0 +1,173 @@ +#!/bin/bash + +# Configuration +ALM_CI_NAME="CI" +ALM_CI_LABELS="gbo" +FORGEJO_RUNNER_VERSION="v6.3.1" +FORGEJO_RUNNER_BINARY="forgejo-runner-6.3.1-linux-amd64" +CONTAINER_IMAGE="images:debian/12" + +# Paths +HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/alm-ci" +HOST_DATA="$HOST_BASE/data" +HOST_CONF="$HOST_BASE/conf" +HOST_LOGS="$HOST_BASE/logs" +BIN_PATH="/opt/gbo/bin" +CONTAINER_NAME="${PARAM_TENANT}-alm-ci" + +# Create host directories +mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS" || exit 1 +chmod -R 750 "$HOST_BASE" || exit 1 + +# Launch container +if ! lxc launch "$CONTAINER_IMAGE" "$CONTAINER_NAME" -c security.privileged=true; then + echo "Failed to launch container" + exit 1 +fi + +# Wait for container to be ready +for i in {1..10}; do + if lxc exec "$CONTAINER_NAME" -- bash -c "true"; then + break + fi + sleep 3 +done + + +# Container setup +lxc exec "$CONTAINER_NAME" -- bash -c " +set -e + +useradd --system --no-create-home --shell /bin/false $CONTAINER_NAME + +# Update and install dependencies +apt-get update && apt-get install -y wget git || { echo 'Package installation failed'; exit 1; } + +sudo apt update +sudo apt install -y curl gnupg ca-certificates git +apt-get update && apt-get install -y \ +build-essential cmake git pkg-config libjpeg-dev libtiff-dev \ +libpng-dev libavcodec-dev libavformat-dev libswscale-dev \ +libv4l-dev libatlas-base-dev gfortran python3-dev cpulimit \ +expect libxtst-dev libpng-dev + +sudo apt-get install -y libcairo2-dev libpango1.0-dev libgif-dev librsvg2-dev +sudo apt install xvfb -y + +sudo apt install -y \ + libnss3 \ + libatk1.0-0 \ + libatk-bridge2.0-0 \ + libcups2 \ + libdrm2 \ + libxkbcommon0 \ + libxcomposite1 \ + libxdamage1 \ + libxfixes3 \ + libxrandr2 \ + libgbm1 \ + libasound2 \ + libpangocairo-1.0-0 + +export OPENCV4NODEJS_DISABLE_AUTOBUILD=1 +export OPENCV_LIB_DIR=/usr/lib/x86_64-linux-gnu + +sudo apt install -y curl gnupg ca-certificates git + +# Install Node.js 22.x +curl -fsSL https://deb.nodesource.com/setup_22.x | sudo bash - +sudo apt install -y nodejs +npm install -g pnpm@latest + +# Install rust 1.85 +apt-get install -y libssl-dev pkg-config +sudo apt-get install -y \ + apt-transport-https \ + software-properties-common \ + gnupg \ + cmake \ + build-essential \ + clang \ + libclang-dev \ + libz-dev \ + libssl-dev \ + pkg-config + +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- --default-toolchain 1.85.1 -y +source ~/.cargo/env +rustc --version + + +# Install Xvfb and other dependencies +sudo apt install -y xvfb libgbm-dev lxd-client + +# Create directories +mkdir -p \"$BIN_PATH\" /opt/gbo/data /opt/gbo/conf /opt/gbo/logs || { echo 'Directory creation failed'; exit 1; } + +# Download and install forgejo-runner +wget -O \"$BIN_PATH/forgejo-runner\" \"https://code.forgejo.org/forgejo/runner/releases/download/$FORGEJO_RUNNER_VERSION/$FORGEJO_RUNNER_BINARY\" || { echo 'Download failed'; exit 1; } +chmod +x \"$BIN_PATH/forgejo-runner\" || { echo 'chmod failed'; exit 1; } + +cd \"$BIN_PATH\" + +# Register runner +\"$BIN_PATH/forgejo-runner\" register --no-interactive \\ + --name \"$ALM_CI_NAME\" \\ + --instance \"$PARAM_ALM_CI_INSTANCE\" \\ + --token \"$PARAM_ALM_CI_TOKEN\" \\ + --labels \"$ALM_CI_LABELS\" || { echo 'Runner registration failed'; exit 1; } + +chown -R $CONTAINER_NAME:$CONTAINER_NAME /opt/gbo/bin /opt/gbo/data /opt/gbo/conf /opt/gbo/logs + +" + +# Set permissions +echo "[CONTAINER] Setting permissions..." +EMAIL_UID=$(lxc exec "$PARAM_TENANT"-alm-ci -- id -u $CONTAINER_NAME) +EMAIL_GID=$(lxc exec "$PARAM_TENANT"-alm-ci -- id -g $CONTAINER_NAME) +HOST_EMAIL_UID=$((100000 + EMAIL_UID)) +HOST_EMAIL_GID=$((100000 + EMAIL_GID)) +sudo chown -R "$HOST_EMAIL_UID:$HOST_EMAIL_GID" "$HOST_BASE" + + +# Add directory mappings +lxc config device add "$CONTAINER_NAME" almdata disk source="$HOST_DATA" path=/opt/gbo/data || exit 1 +lxc config device add "$CONTAINER_NAME" almconf disk source="$HOST_CONF" path=/opt/gbo/conf || exit 1 +lxc config device add "$CONTAINER_NAME" almlogs disk source="$HOST_LOGS" path=/opt/gbo/logs || exit 1 + + +lxc exec "$CONTAINER_NAME" -- bash -c " +# Create systemd service +cat > /etc/systemd/system/alm-ci.service < /etc/systemd/system/alm.service </dev/null || true +lxc config device add "$PARAM_TENANT"-alm alm-proxy proxy \ + listen=tcp:0.0.0.0:"$PARAM_ALM_PORT" \ + connect=tcp:127.0.0.1:"$PARAM_ALM_PORT" \ No newline at end of file diff --git a/scripts/containers/bot.sh b/scripts/containers/bot.sh new file mode 100644 index 000000000..da226d591 --- /dev/null +++ b/scripts/containers/bot.sh @@ -0,0 +1,113 @@ +#!/bin/bash + +HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/bot" +HOST_DATA="$HOST_BASE/data" +HOST_CONF="$HOST_BASE/conf" +HOST_LOGS="$HOST_BASE/logs" + +mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS" +chmod -R 750 "$HOST_BASE" + +lxc launch images:debian/12 "$PARAM_TENANT"-bot -c security.privileged=true +sleep 15 + +lxc exec "$PARAM_TENANT"-bot -- bash -c " + +apt-get update && apt-get install -y \ +build-essential cmake git pkg-config libjpeg-dev libtiff-dev \ +libpng-dev libavcodec-dev libavformat-dev libswscale-dev \ +libv4l-dev libatlas-base-dev gfortran python3-dev cpulimit \ +expect libxtst-dev libpng-dev + +sudo apt-get install -y libcairo2-dev libpango1.0-dev libgif-dev librsvg2-dev +sudo apt install xvfb -y + +sudo apt install -y \ + libnss3 \ + libatk1.0-0 \ + libatk-bridge2.0-0 \ + libcups2 \ + libdrm2 \ + libxkbcommon0 \ + libxcomposite1 \ + libxdamage1 \ + libxfixes3 \ + libxrandr2 \ + libgbm1 \ + libasound2 \ + libpangocairo-1.0-0 + +export OPENCV4NODEJS_DISABLE_AUTOBUILD=1 +export OPENCV_LIB_DIR=/usr/lib/x86_64-linux-gnu + +useradd --system --no-create-home --shell /bin/false gbuser +" + +BOT_UID=$(lxc exec "$PARAM_TENANT"-bot -- id -u gbuser) +BOT_GID=$(lxc exec "$PARAM_TENANT"-bot -- id -g gbuser) +HOST_BOT_UID=$((100000 + BOT_UID)) +HOST_BOT_GID=$((100000 + BOT_GID)) +chown -R "$HOST_BOT_UID:$HOST_BOT_GID" "$HOST_BASE" + +lxc config device add "$PARAM_TENANT"-bot botdata disk source="$HOST_DATA" path=/opt/gbo/data +lxc config device add "$PARAM_TENANT"-bot botconf disk source="$HOST_CONF" path=/opt/gbo/conf +lxc config device add "$PARAM_TENANT"-bot botlogs disk source="$HOST_LOGS" path=/opt/gbo/logs + +lxc exec "$PARAM_TENANT"-bot -- bash -c ' +mkdir -p /opt/gbo/data /opt/gbo/conf /opt/gbo/logs + +sudo apt update +sudo apt install -y curl gnupg ca-certificates git + +curl -fsSL https://deb.nodesource.com/setup_22.x | sudo bash - +sudo apt install -y nodejs + +sudo apt install -y xvfb libgbm-dev + +wget https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_128.0.6613.119-1_amd64.deb +sudo apt install ./google-chrome-stable_128.0.6613.119-1_amd64.deb + +cd /opt/gbo/data +git clone https://alm.pragmatismo.com.br/generalbots/botserver.git +cd botserver +npm install + +./node_modules/.bin/tsc +cd packages/default.gbui +npm install +npm run build + +chown -R gbuser:gbuser /opt/gbo + +# Create systemd service +sudo tee /etc/systemd/system/bot.service > /dev/null </dev/null || true +lxc config device add "$PARAM_TENANT"-bot bot-proxy proxy \ + listen=tcp:0.0.0.0:"$PARAM_BOT_PORT" \ + connect=tcp:127.0.0.1:"$PARAM_BOT_PORT" diff --git a/scripts/containers/cache.sh b/scripts/containers/cache.sh new file mode 100644 index 000000000..1e78013e9 --- /dev/null +++ b/scripts/containers/cache.sh @@ -0,0 +1,7 @@ +curl -fsSL https://packages.redis.io/gpg | sudo gpg --dearmor -o /usr/share/keyrings/valkey.gpg +echo "deb [signed-by=/usr/share/keyrings/valkey.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/valkey.list +sudo apt update +sudo apt install valkey-server + +sudo systemctl enable valkey-server +sudo systemctl start valkey-server diff --git a/scripts/containers/desktop.sh b/scripts/containers/desktop.sh new file mode 100644 index 000000000..6a3c9a4f5 --- /dev/null +++ b/scripts/containers/desktop.sh @@ -0,0 +1,47 @@ +#!/bin/bash + +HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/desktop" +HOST_DATA="$HOST_BASE/data" +HOST_CONF="$HOST_BASE/conf" +HOST_LOGS="$HOST_BASE/logs" + +mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS" +chmod -R 750 "$HOST_BASE" + +lxc launch images:debian/12 "$PARAM_TENANT"-desktop -c security.privileged=true +sleep 15 + +lxc exec "$PARAM_TENANT"-desktop -- bash -c " + +apt-get update +apt-get install -y xvfb xrdp xfce4 xfce4-goodies +cat > /etc/xrdp/startwm.sh < /root/.xsession +chmod +x /root/.xsession + +apt install -y curl apt-transport-https gnupg +curl -s https://brave-browser-apt-release.s3.brave.com/brave-core.asc | gpg --dearmor > /usr/share/keyrings/brave-browser-archive-keyring.gpg +echo "deb [arch=amd64 signed-by=/usr/share/keyrings/brave-browser-archive-keyring.gpg] https://brave-browser-apt-release.s3.brave.com/ stable main" > /etc/apt/sources.list.d/brave-browser-release.list +apt update && apt install -y brave-browser + +sudo apt install gnome-tweaks +/etc/environment + GTK_IM_MODULE=cedilla + QT_IM_MODULE=cedilla + +" +port=3389 +lxc config device remove "$PARAM_TENANT"-desktop "port-$port" 2>/dev/null || true +lxc config device add "$PARAM_TENANT"-desktop "port-$port" proxy listen=tcp:0.0.0.0:$port connect=tcp:127.0.0.1:$port diff --git a/scripts/containers/directory.sh b/scripts/containers/directory.sh new file mode 100644 index 000000000..d6231d98e --- /dev/null +++ b/scripts/containers/directory.sh @@ -0,0 +1,67 @@ +#!/bin/bash + +HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/directory" +HOST_DATA="$HOST_BASE/data" +HOST_CONF="$HOST_BASE/conf" +HOST_LOGS="$HOST_BASE/logs" + +sudo mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS" +sudo chmod -R 750 "$HOST_BASE" + +lxc launch images:debian/12 "$PARAM_TENANT"-directory -c security.privileged=true +sleep 15 + +lxc exec "$PARAM_TENANT"-directory -- bash -c " +apt-get update && apt-get install -y wget libcap2-bin +wget -c https://github.com/zitadel/zitadel/releases/download/v2.71.2/zitadel-linux-amd64.tar.gz -O - | tar -xz -C /tmp +mkdir -p /opt/gbo/bin +mv /tmp/zitadel-linux-amd64/zitadel /opt/gbo/bin/zitadel +chmod +x /opt/gbo/bin/zitadel +sudo setcap 'cap_net_bind_service=+ep' /opt/gbo/bin/zitadel + +useradd --system --no-create-home --shell /bin/false gbuser +mkdir -p /opt/gbo/data /opt/gbo/conf /opt/gbo/logs +chown -R gbuser:gbuser /opt/gbo/data /opt/gbo/conf /opt/gbo/logs /opt/gbo/bin +" + +GBUSER_UID=$(lxc exec "$PARAM_TENANT"-directory -- id -u gbuser) +GBUSER_GID=$(lxc exec "$PARAM_TENANT"-directory -- id -g gbuser) +HOST_GBUSER_UID=$((100000 + GBUSER_UID)) +HOST_GBUSER_GID=$((100000 + GBUSER_GID)) +sudo chown -R "$HOST_GBUSER_UID:$HOST_GBUSER_GID" "$HOST_BASE" + +lxc config device add "$PARAM_TENANT"-directory directorydata disk source="$HOST_DATA" path=/opt/gbo/data +lxc config device add "$PARAM_TENANT"-directory directoryconf disk source="$HOST_CONF" path=/opt/gbo/conf +lxc config device add "$PARAM_TENANT"-directory directorylogs disk source="$HOST_LOGS" path=/opt/gbo/logs + +lxc exec "$PARAM_TENANT"-directory -- bash -c " +chown -R gbuser:gbuser /opt/gbo/data /opt/gbo/conf /opt/gbo/logs /opt/gbo/bin + +cat > /etc/systemd/system/directory.service </dev/null || true +lxc config device add "$PARAM_TENANT"-directory directory-proxy proxy \ + listen=tcp:0.0.0.0:"$PARAM_DIRECTORY_PORT" \ + connect=tcp:127.0.0.1:"$PARAM_DIRECTORY_PORT" diff --git a/scripts/containers/dns.sh b/scripts/containers/dns.sh new file mode 100644 index 000000000..a5e3d21ce --- /dev/null +++ b/scripts/containers/dns.sh @@ -0,0 +1,88 @@ +#!/bin/bash +HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/dns" +HOST_CONF="$HOST_BASE/conf" +HOST_DATA="$HOST_BASE/data" +HOST_LOGS="$HOST_BASE/logs" +mkdir -p "$HOST_BASE" "$HOST_CONF" "$HOST_DATA" "$HOST_LOGS" +chmod -R 750 "$HOST_BASE" + +lxc network set lxdbr0 user.dns.nameservers $PARAM_DNS_INTERNAL_IP,8.8.8.8,1.1.1.1 +lxc network set lxdbr0 dns.mode managed + +# Clear existing rules +sudo iptables -F + +# Allow DNS traffic +sudo iptables -A INPUT -p udp --dport 53 -j ACCEPT +sudo iptables -A INPUT -p tcp --dport 53 -j ACCEPT +sudo iptables -A FORWARD -p udp --dport 53 -j ACCEPT +sudo iptables -A FORWARD -p tcp --dport 53 -j ACCEPT + +# Enable NAT +sudo iptables -t nat -A POSTROUTING -o eth0 -j MASQUERADE + +# Save rules (if using iptables-persistent) +sudo netfilter-persistent save + +lxc launch images:debian/12 "${PARAM_TENANT}-dns" -c security.privileged=true +until lxc exec "${PARAM_TENANT}-dns" -- true; do sleep 3; done + +lxc config device remove pragmatismo-dns dns-udp +lxc config device remove pragmatismo-dns dns-tcp + +# Forward HOST's public IP:53 → CONTAINER's 0.0.0.0:53 +lxc config device add pragmatismo-dns dns-udp proxy listen=udp:$GB_PUBLIC_IP:53 connect=udp:0.0.0.0:53 +lxc config device add pragmatismo-dns dns-tcp proxy listen=tcp:$GB_PUBLIC_IP:53 connect=tcp:0.0.0.0:53 + + +lxc exec "${PARAM_TENANT}-dns" -- bash -c " +mkdir /opt/gbo +mkdir /opt/gbo/{bin,conf,data,logs} + +echo 'nameserver 8.8.8.8' > /etc/resolv.conf + + +apt-get upgrade -y && apt-get install -y wget +wget -qO /opt/gbo/bin/coredns https://github.com/coredns/coredns/releases/download/v1.12.4/coredns_1.12.4_linux_amd64.tgz +tar -xzf /opt/gbo/bin/coredns -C /opt/gbo/bin/ +useradd --system --no-create-home --shell /bin/false gbuser +setcap cap_net_bind_service=+ep /opt/gbo/bin/coredns + + +cat > /etc/systemd/system/dns.service < /etc/apt/sources.list.d/collaboraonline.sources +Types: deb +URIs: https://www.collaboraoffice.com/repos/CollaboraOnline/24.04/customer-deb-$customer_hash +Suites: ./ +Signed-By: /usr/share/keyrings/collaboraonline-release-keyring.gpg +EOF + +apt update && apt install coolwsd collabora-online-brand +" + +lxc config device remove "$PARAM_TENANT"-doc-editor doc-proxy 2>/dev/null || true +lxc config device add "$PARAM_TENANT"-doc-editor doc-proxy proxy \ + listen=tcp:0.0.0.0:"$PARAM_DOC_PORT" \ + connect=tcp:127.0.0.1:9980 \ No newline at end of file diff --git a/scripts/containers/drive.sh b/scripts/containers/drive.sh new file mode 100644 index 000000000..4a9eda378 --- /dev/null +++ b/scripts/containers/drive.sh @@ -0,0 +1,60 @@ +#!/bin/bash +STORAGE_PATH="/opt/gbo/tenants/$PARAM_TENANT/drive/data" +LOGS_PATH="/opt/gbo/tenants/$PARAM_TENANT/drive/logs" + +mkdir -p "${STORAGE_PATH}" "${LOGS_PATH}" +chmod -R 770 "${STORAGE_PATH}" "${LOGS_PATH}" +chown -R 100999:100999 "${STORAGE_PATH}" "${LOGS_PATH}" + +lxc launch images:debian/12 "${PARAM_TENANT}-drive" -c security.privileged=true +sleep 15 + +lxc config device add "${PARAM_TENANT}-drive" storage disk source="${STORAGE_PATH}" path=/data +lxc config device add "${PARAM_TENANT}-drive" logs disk source="${LOGS_PATH}" path=/var/log/minio + +lxc exec "${PARAM_TENANT}-drive" -- bash -c ' +apt-get update && apt-get install -y wget +wget https://dl.min.io/server/minio/release/linux-amd64/minio -O /usr/local/bin/minio +chmod +x /usr/local/bin/minio + +wget https://dl.min.io/client/mc/release/linux-amd64/mc -O /usr/local/bin/mc +chmod +x /usr/local/bin/mc + + +useradd -r -s /bin/false minio-user || true +mkdir -p /var/log/minio /data +chown -R minio-user:minio-user /var/log/minio /data + +cat > /etc/systemd/system/minio.service </dev/null || true +lxc config device add "${PARAM_TENANT}-drive" minio-proxy proxy \ + listen=tcp:0.0.0.0:"${PARAM_DRIVE_API_PORT}" \ + connect=tcp:127.0.0.1:"${PARAM_DRIVE_API_PORT}" + +lxc config device remove "${PARAM_TENANT}-drive" console-proxy 2>/dev/null || true +lxc config device add "${PARAM_TENANT}-drive" console-proxy proxy \ + listen=tcp:0.0.0.0:"${PARAM_DRIVE_PORT}" \ + connect=tcp:127.0.0.1:"${PARAM_DRIVE_PORT}" diff --git a/scripts/containers/email.sh b/scripts/containers/email.sh new file mode 100644 index 000000000..94a166cee --- /dev/null +++ b/scripts/containers/email.sh @@ -0,0 +1,107 @@ +#!/bin/bash +PUBLIC_INTERFACE="eth0" # Your host's public network interface + +# Configure firewall +echo "[HOST] Configuring firewall..." +sudo iptables -A FORWARD -i $PUBLIC_INTERFACE -o lxcbr0 -p tcp -m multiport --dports 25,80,110,143,465,587,993,995,4190 -j ACCEPT +sudo iptables -A FORWARD -i lxcbr0 -o $PUBLIC_INTERFACE -m state --state RELATED,ESTABLISHED -j ACCEPT +sudo iptables -t nat -A POSTROUTING -o $PUBLIC_INTERFACE -j MASQUERADE + +# IPv6 firewall +sudo ip6tables -A FORWARD -i $PUBLIC_INTERFACE -o lxcbr0 -p tcp -m multiport --dports 25,80,110,143,465,587,993,995,4190 -j ACCEPT +sudo ip6tables -A FORWARD -i lxcbr0 -o $PUBLIC_INTERFACE -m state --state RELATED,ESTABLISHED -j ACCEPT + +# Save iptables rules permanently (adjust based on your distro) +if command -v iptables-persistent >/dev/null; then + sudo iptables-save | sudo tee /etc/iptables/rules.v4 + sudo ip6tables-save | sudo tee /etc/iptables/rules.v6 +fi + + +# ------------------------- CONTAINER SETUP ------------------------- + +# Create directory structure +echo "[CONTAINER] Creating directories..." +HOST_BASE="/opt/email" +HOST_DATA="$HOST_BASE/data" +HOST_CONF="$HOST_BASE/conf" +HOST_LOGS="$HOST_BASE/logs" + +sudo mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS" +sudo chmod -R 750 "$HOST_BASE" + +# Launch container +echo "[CONTAINER] Launching LXC container..." +lxc launch images:debian/12 "$PARAM_TENANT"-email -c security.privileged=true +sleep 15 + +echo "[CONTAINER] Installing Stalwart Mail..." +lxc exec "$PARAM_TENANT"-email -- bash -c " + +echo "nameserver $PARAM_DNS_INTERNAL_IP" > /etc/resolv.conf + +apt install resolvconf -y +apt-get update && apt-get install -y wget libcap2-bin +wget -O /tmp/stalwart.tar.gz https://github.com/stalwartlabs/stalwart/releases/download/v0.13.1/stalwart-x86_64-unknown-linux-gnu.tar.gz + +tar -xzf /tmp/stalwart.tar.gz -C /tmp +mkdir -p /opt/gbo/bin +mv /tmp/stalwart /opt/gbo/bin/stalwart +chmod +x /opt/gbo/bin/stalwart +sudo setcap 'cap_net_bind_service=+ep' /opt/gbo/bin/stalwart +rm /tmp/stalwart.tar.gz + +useradd --system --no-create-home --shell /bin/false email +mkdir -p /opt/gbo/data /opt/gbo/conf /opt/gbo/logs +chown -R email:email /opt/gbo/data /opt/gbo/conf /opt/gbo/logs /opt/gbo/bin +" + +# Set permissions +echo "[CONTAINER] Setting permissions..." +EMAIL_UID=$(lxc exec "$PARAM_TENANT"-email -- id -u email) +EMAIL_GID=$(lxc exec "$PARAM_TENANT"-email -- id -g email) +HOST_EMAIL_UID=$((100000 + EMAIL_UID)) +HOST_EMAIL_GID=$((100000 + EMAIL_GID)) +sudo chown -R "$HOST_EMAIL_UID:$HOST_EMAIL_GID" "$HOST_BASE" + +# Mount directories +echo "[CONTAINER] Mounting directories..." +lxc config device add "$PARAM_TENANT"-email emaildata disk source="$HOST_DATA" path=/opt/gbo/data +lxc config device add "$PARAM_TENANT"-email emailconf disk source="$HOST_CONF" path=/opt/gbo/conf +lxc config device add "$PARAM_TENANT"-email emaillogs disk source="$HOST_LOGS" path=/opt/gbo/logs + +# Create systemd service +echo "[CONTAINER] Creating email service..." +lxc exec "$PARAM_TENANT"-email -- bash -c " +chown -R email:email /opt/gbo/data /opt/gbo/conf /opt/gbo/logs /opt/gbo/bin + +cat > /etc/systemd/system/email.service </dev/null || true + lxc config device add "$PARAM_TENANT"-email "port-$port" proxy \ + listen=tcp:0.0.0.0:$port \ + listen=tcp:[::]:$port \ + connect=tcp:127.0.0.1:$port +done diff --git a/scripts/containers/host.sh b/scripts/containers/host.sh new file mode 100644 index 000000000..27ad2c96c --- /dev/null +++ b/scripts/containers/host.sh @@ -0,0 +1,30 @@ +sudo apt install sshfs -y +lxc init +lxc storage create default dir +lxc profile device add default root disk path=/ pool=default + +sudo apt update && sudo apt install -y bridge-utils + +# Enable IP forwarding +echo "[HOST] Enabling IP forwarding..." +echo "net.ipv4.ip_forward=1" | sudo tee -a /etc/sysctl.conf +sudo sysctl -p + +wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2404/x86_64/cuda-keyring_1.1-1_all.deb +sudo dpkg -i cuda-keyring_1.1-1_all.deb +sudo apt-get update + + + + + + + + +sudo apt purge '^nvidia-*' # Clean existing drivers +sudo add-apt-repository ppa:graphics-drivers/ppa +sudo apt update +sudo apt install nvidia-driver-470-server # Most stable for Kepler GPUs + +wget https://developer.download.nvidia.com/compute/cuda/11.0.3/local_installers/cuda_11.0.3_450.51.06_linux.run +sudo sh cuda_11.0.3_450.51.06_linux.run --override diff --git a/scripts/containers/llm.sh b/scripts/containers/llm.sh new file mode 100644 index 000000000..2d5668089 --- /dev/null +++ b/scripts/containers/llm.sh @@ -0,0 +1,9 @@ + +wget https://github.com/ggml-org/llama.cpp/releases/download/b6148/llama-b6148-bin-ubuntu-x64.zip + +wget https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat-v1.0.Q4_0.gguf?download=true +wget https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-1.5B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-1.5B-Q3_K_M.gguf +# Phi-3.5-mini-instruct-IQ2_M.gguf + +# ./llama-cli -m tinyllama-1.1b-chat-v1.0.Q4_0.gguf --reasoning-budget 0 --reasoning-format none -mli +# ./llama-cli -m DeepSeek-R1-Distill-Qwen-1.5B-Q3_K_M.gguf --system-prompt " Output as JSON: Name 3 colors and their HEX codes. Use format: [{\"name\": \"red\", \"hex\": \"#FF0000\"}]" --reasoning-budget 0 --reasoning-format none -mli diff --git a/scripts/containers/meeting.sh b/scripts/containers/meeting.sh new file mode 100644 index 000000000..192d70921 --- /dev/null +++ b/scripts/containers/meeting.sh @@ -0,0 +1,89 @@ +#!/bin/bash + +HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/meeting" +HOST_DATA="$HOST_BASE/data" +HOST_CONF="$HOST_BASE/conf" +HOST_LOGS="$HOST_BASE/logs" + +mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS" +chmod -R 750 "$HOST_BASE" + +lxc launch images:debian/12 "$PARAM_TENANT"-meeting -c security.privileged=true +sleep 15 + +lxc exec "$PARAM_TENANT"-meeting -- bash -c " + +apt-get update && apt-get install -y wget coturn +mkdir -p /opt/gbo/bin +cd /opt/gbo/bin +wget -q https://github.com/livekit/livekit/releases/download/v1.8.4/livekit_1.8.4_linux_amd64.tar.gz +tar -xzf livekit*.tar.gz +rm livekit_1.8.4_linux_amd64.tar.gz +chmod +x livekit-server + +while netstat -tuln | grep -q \":$PARAM_MEETING_TURN_PORT \"; do + ((PARAM_MEETING_TURN_PORT++)) +done + +useradd --system --no-create-home --shell /bin/false gbuser + +" + +MEETING_UID=$(lxc exec "$PARAM_TENANT"-meeting -- id -u gbuser) +MEETING_GID=$(lxc exec "$PARAM_TENANT"-meeting -- id -g gbuser) +HOST_MEETING_UID=$((100000 + MEETING_UID)) +HOST_MEETING_GID=$((100000 + MEETING_GID)) +chown -R "$HOST_MEETING_UID:$HOST_MEETING_GID" "$HOST_BASE" + +lxc config device add "$PARAM_TENANT"-meeting meetingdata disk source="$HOST_DATA" path=/opt/gbo/data +lxc config device add "$PARAM_TENANT"-meeting meetingconf disk source="$HOST_CONF" path=/opt/gbo/conf +lxc config device add "$PARAM_TENANT"-meeting meetinglogs disk source="$HOST_LOGS" path=/opt/gbo/logs + +lxc exec "$PARAM_TENANT"-meeting -- bash -c " + +mkdir -p /opt/gbo/data /opt/gbo/conf /opt/gbo/logs +chown -R gbuser:gbuser /opt/gbo/data /opt/gbo/conf /opt/gbo/logs + +sudo chown gbuser:gbuser /var/run/turnserver.pid + + +cat > /etc/systemd/system/meeting.service < /etc/systemd/system/meeting-turn.service </dev/null || true +lxc config device add "$PARAM_TENANT"-meeting meeting-proxy proxy \ + listen=tcp:0.0.0.0:"$PARAM_MEETING_PORT" \ + connect=tcp:127.0.0.1:"$PARAM_MEETING_PORT" \ No newline at end of file diff --git a/scripts/containers/proxy.sh b/scripts/containers/proxy.sh new file mode 100644 index 000000000..97bbee605 --- /dev/null +++ b/scripts/containers/proxy.sh @@ -0,0 +1,56 @@ +#!/bin/bash +HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/proxy" +HOST_DATA="$HOST_BASE/data" +HOST_CONF="$HOST_BASE/conf" +HOST_LOGS="$HOST_BASE/logs" +mkdir -p "$HOST_BASE" "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS" +chmod 750 "$HOST_BASE" "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS" + +lxc launch images:debian/12 "$PARAM_TENANT"-proxy -c security.privileged=true +sleep 15 + +lxc exec "$PARAM_TENANT"-proxy -- bash -c " +mkdir -p /opt/gbo/{bin,data,conf,logs} +apt-get update && apt-get install -y wget libcap2-bin +wget -q https://github.com/caddyserver/caddy/releases/download/v2.10.0-beta.3/caddy_2.10.0-beta.3_linux_amd64.tar.gz +tar -xzf caddy_2.10.0-beta.3_linux_amd64.tar.gz -C /opt/gbo/bin +rm caddy_2.10.0-beta.3_linux_amd64.tar.gz +chmod 750 /opt/gbo/bin/caddy +setcap 'cap_net_bind_service=+ep' /opt/gbo/bin/caddy +useradd --create-home --system --shell /usr/sbin/nologin gbuser +chown -R gbuser:gbuser /opt/gbo/{bin,data,conf,logs} +" + +lxc config device add "$PARAM_TENANT"-proxy data disk source="$HOST_DATA" path=/opt/gbo/data +lxc config device add "$PARAM_TENANT"-proxy conf disk source="$HOST_CONF" path=/opt/gbo/conf +lxc config device add "$PARAM_TENANT"-proxy logs disk source="$HOST_LOGS" path=/opt/gbo/logs + +lxc exec "$PARAM_TENANT"-proxy -- bash -c " +cat > /etc/systemd/system/proxy.service </dev/null || true + lxc config device add "$PARAM_TENANT"-proxy "port-$port" proxy listen=tcp:0.0.0.0:$port connect=tcp:127.0.0.1:$port + done + +lxc config set "$PARAM_TENANT"-proxy security.syscalls.intercept.mknod true +lxc config set "$PARAM_TENANT"-proxy security.syscalls.intercept.setxattr true diff --git a/scripts/containers/social.sh b/scripts/containers/social.sh new file mode 100644 index 000000000..1db61e526 --- /dev/null +++ b/scripts/containers/social.sh @@ -0,0 +1 @@ +https://www.brasil247.com/mundo/meta-quer-automatizar-totalmente-publicidade-com-ia-ate-2026-diz-wsj \ No newline at end of file diff --git a/scripts/containers/system.sh b/scripts/containers/system.sh new file mode 100644 index 000000000..d009a8263 --- /dev/null +++ b/scripts/containers/system.sh @@ -0,0 +1,93 @@ +#!/bin/bash +HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/system" +HOST_DATA="$HOST_BASE/data" +HOST_CONF="$HOST_BASE/conf" +HOST_LOGS="$HOST_BASE/logs" +HOST_BIN="$HOST_BASE/bin" +BIN_PATH="/opt/gbo/bin" +CONTAINER_NAME="${PARAM_TENANT}-system" + +# Create host directories +mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS" || exit 1 +chmod -R 750 "$HOST_BASE" || exit 1 + + +lxc launch images:debian/12 $CONTAINER_NAME -c security.privileged=true +sleep 15 + +lxc exec $CONTAINER_NAME -- bash -c ' + +apt-get update && apt-get install -y wget curl unzip git + + +useradd -r -s /bin/false gbuser || true +mkdir -p /opt/gbo/logs /opt/gbo/bin /opt/gbo/data /opt/gbo/conf +chown -R gbuser:gbuser /opt/gbo/ + +wget https://github.com/ggml-org/llama.cpp/releases/download/b6148/llama-b6148-bin-ubuntu-x64.zip +mkdir llm +mv llama-b6148-bin-ubuntu-x64.zip llm +cd llm +unzip llama-b6148-bin-ubuntu-x64.zip +mv build/bin/* . +rm build/bin -r +rm llama-b6148-bin-ubuntu-x64.zip + +wget https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-1.5B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-1.5B-Q3_K_M.gguf +wget https://huggingface.co/CompendiumLabs/bge-small-en-v1.5-gguf/resolve/main/bge-small-en-v1.5-f32.gguf + +sudo curl -fsSLo /usr/share/keyrings/brave-browser-beta-archive-keyring.gpg https://brave-browser-apt-beta.s3.brave.com/brave-browser-beta-archive-keyring.gpg +sudo curl -fsSLo /etc/apt/sources.list.d/brave-browser-beta.sources https://brave-browser-apt-beta.s3.brave.com/brave-browser.sources +sudo apt update + +sudo apt install brave-browser-beta + +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y +source "$HOME/.cargo/env" +git clone https://alm.pragmatismo.com.br/generalbots/gbserver + +apt install -y build-essential \ + pkg-config \ + libssl-dev \ + gcc-multilib \ + g++-multilib \ + clang \ + lld \ + binutils-dev \ + libudev-dev \ + libdbus-1-dev + + +cat > /etc/systemd/system/system.service </dev/null || true +lxc config device add $CONTAINER_NAME proxy proxy \ + listen=tcp:0.0.0.0:"${PARAM_SYSTEM_PORT}" \ + connect=tcp:127.0.0.1:"${PARAM_SYSTEM_PORT}" diff --git a/scripts/containers/table-editor.sh b/scripts/containers/table-editor.sh new file mode 100644 index 000000000..3231f25e3 --- /dev/null +++ b/scripts/containers/table-editor.sh @@ -0,0 +1,86 @@ + #!/bin/bash + +# Fixed container name +CONTAINER_NAME="$PARAM_TENANT-table-editor" + +TABLE_EDITOR_PORT="5757" + +# Paths +HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/table-editor" +HOST_DATA="$HOST_BASE/data" +HOST_CONF="$HOST_BASE/conf" +HOST_LOGS="$HOST_BASE/logs" +BIN_PATH="/opt/gbo/bin" + +# Create host directories +mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS" +chmod -R 750 "$HOST_BASE" + +# Launch container +lxc launch images:debian/12 "$CONTAINER_NAME" -c security.privileged=true + +# Wait for container to be ready +sleep 10 + +# Container setup +lxc exec "$CONTAINER_NAME" -- bash -c " +useradd --system --no-create-home --shell /bin/false gbuser +apt-get update +apt-get install -y wget curl + +# Create directories +mkdir -p \"$BIN_PATH\" /opt/gbo/data /opt/gbo/conf /opt/gbo/logs + +# Download and install NocoDB binary +cd \"$BIN_PATH\" +curl http://get.nocodb.com/linux-x64 -o nocodb -L +chmod +x nocodb +" + +# Set permissions +TE_UID=$(lxc exec "$CONTAINER_NAME" -- id -u gbuser) +TE_GID=$(lxc exec "$CONTAINER_NAME" -- id -g gbuser) +HOST_TE_UID=$((100000 + TE_UID)) +HOST_TE_GID=$((100000 + TE_GID)) +chown -R "$HOST_TE_UID:$HOST_TE_GID" "$HOST_BASE" + +# Add directory mappings +lxc config device add "$CONTAINER_NAME" tedata disk source="$HOST_DATA" path=/opt/gbo/data +lxc config device add "$CONTAINER_NAME" teconf disk source="$HOST_CONF" path=/opt/gbo/conf +lxc config device add "$CONTAINER_NAME" telogs disk source="$HOST_LOGS" path=/opt/gbo/logs + + + + +# Create systemd service +lxc exec "$CONTAINER_NAME" -- bash -c " +cat > /etc/systemd/system/table-editor.service </dev/null; do + echo \"Waiting for PostgreSQL to start on port $PARAM_TABLES_PORT...\" + sleep 3 +done + +sudo -u postgres psql -p $PARAM_TABLES_PORT -c \"CREATE USER $PARAM_TENANT WITH PASSWORD '$PARAM_TABLES_PASSWORD';\" +sudo -u postgres psql -p $PARAM_TABLES_PORT -c \"CREATE DATABASE ${PARAM_TENANT}_db OWNER $PARAM_TENANT;\" +sudo -u postgres psql -p $PARAM_TABLES_PORT -c \"GRANT ALL PRIVILEGES ON DATABASE ${PARAM_TENANT}_db TO $PARAM_TENANT;\" + +" + + +lxc config device remove "$PARAM_TENANT"-tables postgres-proxy 2>/dev/null || true +lxc config device add "$PARAM_TENANT"-tables postgres-proxy proxy \ + listen=tcp:0.0.0.0:"$PARAM_TABLES_PORT" \ + connect=tcp:127.0.0.1:"$PARAM_TABLES_PORT" + +echo "PostgreSQL setup completed successfully!" +echo "Database: ${PARAM_TENANT}_db" +echo "User: $PARAM_TENANT" +echo "Password: $PARAM_TABLES_PASSWORD" +echo "Port: $PARAM_TABLES_PORT" diff --git a/scripts/containers/vector-db.sh b/scripts/containers/vector-db.sh new file mode 100644 index 000000000..43c17092c --- /dev/null +++ b/scripts/containers/vector-db.sh @@ -0,0 +1,4 @@ +#!/bin/bash +wget https://github.com/qdrant/qdrant/releases/latest/download/qdrant-x86_64-unknown-linux-gnu.tar.gz +tar -xzf qdrant-x86_64-unknown-linux-gnu.tar.gz +./qdrant diff --git a/scripts/containers/webmail.sh b/scripts/containers/webmail.sh new file mode 100644 index 000000000..078791011 --- /dev/null +++ b/scripts/containers/webmail.sh @@ -0,0 +1,103 @@ +#!/bin/bash + +HOST_BASE="/opt/gbo/tenants/$PARAM_TENANT/webmail" +HOST_DATA="$HOST_BASE/data" +HOST_CONF="$HOST_BASE/conf" +HOST_LOGS="$HOST_BASE/logs" + +PARAM_RC_VERSION="1.6.6" + +mkdir -p "$HOST_DATA" "$HOST_CONF" "$HOST_LOGS" +chmod -R 750 "$HOST_BASE" + +lxc launch images:debian/12 "$PARAM_TENANT"-webmail -c security.privileged=true +sleep 15 + +RC_PATH="/opt/gbo/data" + +lxc exec "$PARAM_TENANT"-webmail -- bash -c ' +# Install prerequisites +apt install -y ca-certificates apt-transport-https lsb-release gnupg wget + +# Add the Sury PHP repository (official for Debian) +wget -O /etc/apt/trusted.gpg.d/php.gpg https://packages.sury.org/php/apt.gpg +sh -c '\''echo "deb https://packages.sury.org/php/ $(lsb_release -sc) main" > /etc/apt/sources.list.d/php.list'\'' + +# Update and install PHP 8.1 +apt update +apt install -y \ + php8.1 \ + php8.1-fpm \ + php8.1-imap \ + php8.1-pgsql \ + php8.1-mbstring \ + php8.1-xml \ + php8.1-curl \ + php8.1-zip \ + php8.1-cli \ + php8.1-intl \ + php8.1-dom + +# Restart PHP-FPM +systemctl restart php8.1-fpm + +mkdir -p '"$RC_PATH"' +wget -q https://github.com/roundcube/roundcubemail/releases/download/'"$PARAM_RC_VERSION"'/roundcubemail-'"$PARAM_RC_VERSION"'-complete.tar.gz +tar -xzf roundcubemail-*.tar.gz +mv roundcubemail-'"$PARAM_RC_VERSION"'/* '"$RC_PATH"' +rm -rf roundcubemail-* + +mkdir -p /opt/gbo/logs + +chmod 750 '"$RC_PATH"' +find '"$RC_PATH"' -type d -exec chmod 750 {} \; +find '"$RC_PATH"' -type f -exec chmod 640 {} \; + +' + +WEBMAIL_UID=$(lxc exec "$PARAM_TENANT"-webmail -- id -u www-data) +WEBMAIL_GID=$(lxc exec "$PARAM_TENANT"-webmail -- id -g www-data) +HOST_WEBMAIL_UID=$((100000 + WEBMAIL_UID)) +HOST_WEBMAIL_GID=$((100000 + WEBMAIL_GID)) +chown -R "$HOST_WEBMAIL_UID:$HOST_WEBMAIL_GID" "$HOST_BASE" + +lxc config device add "$PARAM_TENANT"-webmail webmaildata disk source="$HOST_DATA" path="$RC_PATH" +lxc config device add "$PARAM_TENANT"-webmail webmaillogs disk source="$HOST_LOGS" path=/opt/gbo/logs + +lxc exec "$PARAM_TENANT"-webmail -- bash -c " +chown -R www-data:www-data '"$RC_PATH"' /opt/gbo/logs +cat > /etc/systemd/system/webmail.service </dev/null; then + echo "Port $PARAM_WEBMAIL_PORT is already in use. Please choose a different port." + exit 1 +fi + + +lxc config device remove "$PARAM_TENANT"-webmail webmail-proxy 2>/dev/null || true +lxc config device add "$PARAM_TENANT"-webmail webmail-proxy proxy \ + listen=tcp:0.0.0.0:"$PARAM_WEBMAIL_PORT" \ + connect=tcp:127.0.0.1:"$PARAM_WEBMAIL_PORT" \ No newline at end of file diff --git a/scripts/database/0001.sql b/scripts/database/0001.sql new file mode 100644 index 000000000..6a3f5d8cf --- /dev/null +++ b/scripts/database/0001.sql @@ -0,0 +1,6 @@ +CREATE TABLE clicks ( + campaign_id TEXT NOT NULL, + email TEXT NOT NULL, + updated_at TIMESTAMP DEFAULT NOW(), + UNIQUE(campaign_id, email) +); \ No newline at end of file diff --git a/scripts/database/0002.sql b/scripts/database/0002.sql new file mode 100644 index 000000000..b5a2aed04 --- /dev/null +++ b/scripts/database/0002.sql @@ -0,0 +1,11 @@ +CREATE TABLE public.system_automations ( + id uuid NOT NULL, + kind int4 NULL, + target varchar(32) NULL, + schedule bpchar(12) NULL, + param varchar(32) NOT NULL, + is_active bool DEFAULT true NOT NULL, + last_triggered timestamptz NULL, + CONSTRAINT system_automations_pkey PRIMARY KEY (id) +); +CREATE INDEX idx_active_automations ON public.system_automations USING btree (kind) WHERE is_active; diff --git a/scripts/database/0003.sql b/scripts/database/0003.sql new file mode 100644 index 000000000..579e1cb51 --- /dev/null +++ b/scripts/database/0003.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS user_sessions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id TEXT NOT NULL, + bot_id TEXT NOT NULL, + answer_mode TEXT NOT NULL DEFAULT 'direct', + context JSONB NOT NULL DEFAULT '{}', + current_tool TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(user_id, bot_id) +); + +CREATE INDEX IF NOT EXISTS idx_user_sessions_user_bot ON user_sessions(user_id, bot_id); diff --git a/scripts/database/0004.sql b/scripts/database/0004.sql new file mode 100644 index 000000000..6e1beea40 --- /dev/null +++ b/scripts/database/0004.sql @@ -0,0 +1,129 @@ +-- User authentication and profiles +CREATE TABLE IF NOT EXISTS users ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + username VARCHAR(255) UNIQUE NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL, + password_hash VARCHAR(255) NOT NULL, + phone_number VARCHAR(50), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + is_active BOOLEAN DEFAULT true +); + +-- Bot configurations +CREATE TABLE IF NOT EXISTS bots ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name VARCHAR(255) NOT NULL, + description TEXT, + llm_provider VARCHAR(100) NOT NULL, + llm_config JSONB NOT NULL DEFAULT '{}', + context_provider VARCHAR(100) NOT NULL, + context_config JSONB NOT NULL DEFAULT '{}', + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + is_active BOOLEAN DEFAULT true +); + +-- User sessions with optimized storage +CREATE TABLE IF NOT EXISTS user_sessions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE, + title VARCHAR(500) NOT NULL DEFAULT 'New Conversation', + answer_mode VARCHAR(50) NOT NULL DEFAULT 'direct', + context_data JSONB NOT NULL DEFAULT '{}', + current_tool VARCHAR(255), + message_count INTEGER NOT NULL DEFAULT 0, + total_tokens INTEGER NOT NULL DEFAULT 0, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + last_activity TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(user_id, bot_id, title) +); + +-- Encrypted message history with analytics-friendly structure +CREATE TABLE IF NOT EXISTS message_history ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + session_id UUID NOT NULL REFERENCES user_sessions(id) ON DELETE CASCADE, + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + role VARCHAR(50) NOT NULL CHECK (role IN ('user', 'assistant', 'system')), + content_encrypted TEXT NOT NULL, + message_type VARCHAR(50) NOT NULL DEFAULT 'text', + media_url TEXT, + token_count INTEGER NOT NULL DEFAULT 0, + processing_time_ms INTEGER, + llm_model VARCHAR(100), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + message_index INTEGER NOT NULL +); + +-- Bot channel configurations +CREATE TABLE IF NOT EXISTS bot_channels ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE, + channel_type VARCHAR(50) NOT NULL CHECK (channel_type IN ('web', 'whatsapp', 'meet', 'api')), + config JSONB NOT NULL DEFAULT '{}', + is_active BOOLEAN DEFAULT true, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(bot_id, channel_type) +); + +-- WhatsApp number mappings +CREATE TABLE IF NOT EXISTS whatsapp_numbers ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE, + phone_number VARCHAR(50) NOT NULL, + is_active BOOLEAN DEFAULT true, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(phone_number, bot_id) +); + +-- User email mappings for web channel +CREATE TABLE IF NOT EXISTS user_emails ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + email VARCHAR(255) NOT NULL, + is_primary BOOLEAN DEFAULT false, + verified BOOLEAN DEFAULT false, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(email) +); + +-- Tools registry +CREATE TABLE IF NOT EXISTS tools ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name VARCHAR(255) UNIQUE NOT NULL, + description TEXT NOT NULL, + parameters JSONB NOT NULL DEFAULT '{}', + script TEXT NOT NULL, + is_active BOOLEAN DEFAULT true, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- Manual context injections +CREATE TABLE IF NOT EXISTS context_injections ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + session_id UUID NOT NULL REFERENCES user_sessions(id) ON DELETE CASCADE, + injected_by UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + context_data JSONB NOT NULL, + reason TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- Analytics tables +CREATE TABLE IF NOT EXISTS usage_analytics ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE, + session_id UUID NOT NULL REFERENCES user_sessions(id) ON DELETE CASCADE, + date DATE NOT NULL DEFAULT CURRENT_DATE, + message_count INTEGER NOT NULL DEFAULT 0, + total_tokens INTEGER NOT NULL DEFAULT 0, + total_processing_time_ms INTEGER NOT NULL DEFAULT 0 +); + +-- Indexes for performance +CREATE INDEX IF NOT EXISTS idx_message_history_session_id ON message_history(session_id); +CREATE INDEX IF NOT EXISTS idx_message_history_created_at ON message_history(created_at); +CREATE INDEX IF NOT EXISTS idx_user_sessions_user_bot ON user_sessions(user_id, bot_id); +CREATE INDEX IF NOT EXISTS idx_usage_analytics_date ON usage_analytics(date); diff --git a/scripts/database/001_init.sql b/scripts/database/001_init.sql new file mode 100644 index 000000000..36a773167 --- /dev/null +++ b/scripts/database/001_init.sql @@ -0,0 +1,57 @@ +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; + +CREATE TABLE IF NOT EXISTS users ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + username VARCHAR(255) UNIQUE NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL, + password_hash VARCHAR(255) NOT NULL, + is_active BOOLEAN DEFAULT true, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP +); + +CREATE TABLE IF NOT EXISTS bots ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + name VARCHAR(255) NOT NULL, + llm_provider VARCHAR(100) NOT NULL, + config JSONB DEFAULT '{}', + is_active BOOLEAN DEFAULT true, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP +); + +CREATE TABLE IF NOT EXISTS user_sessions ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE, + title VARCHAR(500) NOT NULL, + context_data JSONB DEFAULT '{}', + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + UNIQUE(user_id, bot_id) +); + +CREATE TABLE IF NOT EXISTS message_history ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + session_id UUID NOT NULL REFERENCES user_sessions(id) ON DELETE CASCADE, + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + role VARCHAR(50) NOT NULL, + content_encrypted TEXT NOT NULL, + message_type VARCHAR(50) DEFAULT 'text', + message_index INTEGER NOT NULL, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP +); + +CREATE INDEX IF NOT EXISTS idx_user_sessions_user_id ON user_sessions(user_id); +CREATE INDEX IF NOT EXISTS idx_user_sessions_bot_id ON user_sessions(bot_id); +CREATE INDEX IF NOT EXISTS idx_message_history_session_id ON message_history(session_id); +CREATE INDEX IF NOT EXISTS idx_message_history_user_id ON message_history(user_id); +CREATE INDEX IF NOT EXISTS idx_message_history_created_at ON message_history(created_at); + +INSERT INTO bots (id, name, llm_provider) +VALUES ('00000000-0000-0000-0000-000000000000', 'Default Bot', 'mock') +ON CONFLICT (id) DO NOTHING; + +INSERT INTO users (id, username, email, password_hash) +VALUES ('00000000-0000-0000-0000-000000000001', 'demo', 'demo@example.com', '$argon2id$v=19$m=19456,t=2,p=1$c29tZXNhbHQ$RdescudvJCsgt3ub+b+dWRWJTmaaJObG') +ON CONFLICT (id) DO NOTHING; diff --git a/scripts/dev/llm_context.txt b/scripts/dev/llm_context.txt new file mode 100644 index 000000000..d10432985 --- /dev/null +++ b/scripts/dev/llm_context.txt @@ -0,0 +1,2625 @@ +Consolidated LLM Context +* Preffer imports than using :: to call methods, +* Output a single `.sh` script using `cat` so it can be restored directly. +* No placeholders, only real, production-ready code. +* No comments, no explanations, no extra text. +* Follow KISS principles. +* Provide a complete, professional, working solution. +* If the script is too long, split into multiple parts, but always return the **entire code**. +* Output must be **only the code**, nothing else. + +[package] +name = "gbserver" +version = "0.1.0" +edition = "2021" +authors = ["Rodrigo Rodriguez "] +description = "General Bots Server" +license = "AGPL-3.0" +repository = "https://alm.pragmatismo.com.br/generalbots/gbserver" + +[features] +default = ["postgres", "qdrant"] +local_llm = [] +postgres = ["sqlx/postgres"] +qdrant = ["langchain-rust/qdrant"] + +[dependencies] +actix-cors = "0.7" +actix-multipart = "0.7" +actix-web = "4.9" +actix-ws = "0.3" +anyhow = "1.0" +async-stream = "0.3" +async-trait = "0.1" +aes-gcm = "0.10" +argon2 = "0.5" +base64 = "0.22" +bytes = "1.8" +chrono = { version = "0.4", features = ["serde"] } +dotenv = "0.15" +downloader = "0.2" +env_logger = "0.11" +futures = "0.3" +futures-util = "0.3" +imap = "2.4" +langchain-rust = { version = "4.6", features = ["qdrant", "postgres"] } +lettre = { version = "0.11", features = ["smtp-transport", "builder", "tokio1", "tokio1-native-tls"] } +livekit = "0.7" +log = "0.4" +mailparse = "0.15" +minio = { git = "https://github.com/minio/minio-rs", branch = "master" } +native-tls = "0.2" +num-format = "0.4" +qdrant-client = "1.12" +rhai = "1.22" +redis = { version = "0.27", features = ["tokio-comp"] } +regex = "1.11" +reqwest = { version = "0.12", features = ["json", "stream"] } +scraper = "0.20" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +smartstring = "1.0" +sqlx = { version = "0.8", features = ["time", "uuid", "runtime-tokio-rustls", "postgres", "chrono"] } +tempfile = "3" +thirtyfour = "0.34" +tokio = { version = "1.41", features = ["full"] } +tokio-stream = "0.1" +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["fmt"] } +urlencoding = "2.1" +uuid = { version = "1.11", features = ["serde", "v4"] } +zip = "2.2" + +You are fixing Rust code in a Cargo project. The user is providing problematic code that needs to be corrected. + +## Your Task +Fix ALL compiler errors and logical issues while maintaining the original intent. Return the COMPLETE corrected files as a SINGLE .sh script that can be executed from project root. +Use Cargo.toml as reference, do not change it. +Only return input files, all other files already exists. +If something, need to be added to a external file, inform it separated. + +## Critical Requirements +1. **Return as SINGLE .sh script** - Output must be a complete shell script using `cat > file << 'EOF'` pattern +2. **Include ALL files** - Every corrected file must be included in the script +3. **Respect Cargo.toml** - Check dependencies, editions, and features to avoid compiler errors +4. **Type safety** - Ensure all types match and trait bounds are satisfied +5. **Ownership rules** - Fix borrowing, ownership, and lifetime issues + +## Output Format Requirements +You MUST return exactly this example format: + +```sh +#!/bin/bash + +# Restore fixed Rust project + +cat > src/.rs << 'EOF' +use std::io; + +// test + +cat > src/.rs << 'EOF' +// Fixed library code +pub fn add(a: i32, b: i32) -> i32 { + a + b +} +EOF + +---- + +use std::sync::Arc; + +use minio::s3::Client; + +use crate::{config::AppConfig, web_automator::BrowserPool}; + +#[derive(Clone)] +pub struct AppState { + pub minio_client: Option, + pub config: Option, + pub db: Option, + pub db_custom: Option, + pub browser_pool: Arc, + pub orchestrator: Arc, + pub web_adapter: Arc, + pub voice_adapter: Arc, + pub whatsapp_adapter: Arc, + tool_api: Arc, // Add this +} +pub struct _BotState { + pub language: String, + pub work_folder: String, +} + +use crate::config::AIConfig; +use langchain_rust::llm::OpenAI; +use langchain_rust::{language_models::llm::LLM, llm::AzureConfig}; +use log::error; +use log::{debug, warn}; +use rhai::{Array, Dynamic}; +use serde_json::{json, Value}; +use smartstring::SmartString; +use sqlx::Column; // Required for .name() method +use sqlx::TypeInfo; // Required for .type_info() method +use sqlx::{postgres::PgRow, Row}; +use sqlx::{Decode, Type}; +use std::error::Error; +use std::fs::File; +use std::io::BufReader; +use std::path::Path; +use tokio::fs::File as TokioFile; +use tokio_stream::StreamExt; +use zip::ZipArchive; + +use reqwest::Client; +use tokio::io::AsyncWriteExt; + +pub fn azure_from_config(config: &AIConfig) -> AzureConfig { + AzureConfig::new() + .with_api_base(&config.endpoint) + .with_api_key(&config.key) + .with_api_version(&config.version) + .with_deployment_id(&config.instance) +} + +pub async fn call_llm( + text: &str, + ai_config: &AIConfig, +) -> Result> { + let azure_config = azure_from_config(&ai_config.clone()); + let open_ai = OpenAI::new(azure_config); + + // Directly use the input text as prompt + let prompt = text.to_string(); + + // Call LLM and return the raw text response + match open_ai.invoke(&prompt).await { + Ok(response_text) => Ok(response_text), + Err(err) => { + error!("Error invoking LLM API: {}", err); + Err(Box::new(std::io::Error::new( + std::io::ErrorKind::Other, + "Failed to invoke LLM API", + ))) + } + } +} + +pub fn extract_zip_recursive( + zip_path: &Path, + destination_path: &Path, +) -> Result<(), Box> { + let file = File::open(zip_path)?; + let buf_reader = BufReader::new(file); + let mut archive = ZipArchive::new(buf_reader)?; + + for i in 0..archive.len() { + let mut file = archive.by_index(i)?; + let outpath = destination_path.join(file.mangled_name()); + + if file.is_dir() { + std::fs::create_dir_all(&outpath)?; + } else { + if let Some(parent) = outpath.parent() { + if !parent.exists() { + std::fs::create_dir_all(&parent)?; + } + } + let mut outfile = File::create(&outpath)?; + std::io::copy(&mut file, &mut outfile)?; + } + } + + Ok(()) +} +pub fn row_to_json(row: PgRow) -> Result> { + let mut result = serde_json::Map::new(); + let columns = row.columns(); + debug!("Converting row with {} columns", columns.len()); + + for (i, column) in columns.iter().enumerate() { + let column_name = column.name(); + let type_name = column.type_info().name(); + + let value = match type_name { + "INT4" | "int4" => handle_nullable_type::(&row, i, column_name), + "INT8" | "int8" => handle_nullable_type::(&row, i, column_name), + "FLOAT4" | "float4" => handle_nullable_type::(&row, i, column_name), + "FLOAT8" | "float8" => handle_nullable_type::(&row, i, column_name), + "TEXT" | "VARCHAR" | "text" | "varchar" => { + handle_nullable_type::(&row, i, column_name) + } + "BOOL" | "bool" => handle_nullable_type::(&row, i, column_name), + "JSON" | "JSONB" | "json" | "jsonb" => handle_json(&row, i, column_name), + _ => { + warn!("Unknown type {} for column {}", type_name, column_name); + handle_nullable_type::(&row, i, column_name) + } + }; + + result.insert(column_name.to_string(), value); + } + + Ok(Value::Object(result)) +} + +fn handle_nullable_type<'r, T>(row: &'r PgRow, idx: usize, col_name: &str) -> Value +where + T: Type + Decode<'r, sqlx::Postgres> + serde::Serialize + std::fmt::Debug, +{ + match row.try_get::, _>(idx) { + Ok(Some(val)) => { + debug!("Successfully read column {} as {:?}", col_name, val); + json!(val) + } + Ok(None) => { + debug!("Column {} is NULL", col_name); + Value::Null + } + Err(e) => { + warn!("Failed to read column {}: {}", col_name, e); + Value::Null + } + } +} + +fn handle_json(row: &PgRow, idx: usize, col_name: &str) -> Value { + // First try to get as Option + match row.try_get::, _>(idx) { + Ok(Some(val)) => { + debug!("Successfully read JSON column {} as Value", col_name); + return val; + } + Ok(None) => return Value::Null, + Err(_) => (), // Fall through to other attempts + } + + // Try as Option that might contain JSON + match row.try_get::, _>(idx) { + Ok(Some(s)) => match serde_json::from_str(&s) { + Ok(val) => val, + Err(_) => { + debug!("Column {} contains string that's not JSON", col_name); + json!(s) + } + }, + Ok(None) => Value::Null, + Err(e) => { + warn!("Failed to read JSON column {}: {}", col_name, e); + Value::Null + } + } +} +pub fn json_value_to_dynamic(value: &Value) -> Dynamic { + match value { + Value::Null => Dynamic::UNIT, + Value::Bool(b) => Dynamic::from(*b), + Value::Number(n) => { + if let Some(i) = n.as_i64() { + Dynamic::from(i) + } else if let Some(f) = n.as_f64() { + Dynamic::from(f) + } else { + Dynamic::UNIT + } + } + Value::String(s) => Dynamic::from(s.clone()), + Value::Array(arr) => Dynamic::from( + arr.iter() + .map(json_value_to_dynamic) + .collect::(), + ), + Value::Object(obj) => Dynamic::from( + obj.iter() + .map(|(k, v)| (SmartString::from(k), json_value_to_dynamic(v))) + .collect::(), + ), + } +} + +/// Converts any value to an array - single values become single-element arrays +pub fn to_array(value: Dynamic) -> Array { + if value.is_array() { + // Already an array - return as-is + value.cast::() + } else if value.is_unit() || value.is::<()>() { + // Handle empty/unit case + Array::new() + } else { + // Convert single value to single-element array + Array::from([value]) + } +} + +pub async fn download_file(url: &str, output_path: &str) -> Result<(), Box> { + let client = Client::new(); + let response = client.get(url).send().await?; + + if response.status().is_success() { + let mut file = TokioFile::create(output_path).await?; + + let mut stream = response.bytes_stream(); + + while let Some(chunk) = stream.next().await { + file.write_all(&chunk?).await?; + } + debug!("File downloaded successfully to {}", output_path); + } else { + return Err("Failed to download file".into()); + } + + Ok(()) +} + +// Helper function to parse the filter string into SQL WHERE clause and parameters +pub fn parse_filter(filter_str: &str) -> Result<(String, Vec), Box> { + let parts: Vec<&str> = filter_str.split('=').collect(); + if parts.len() != 2 { + return Err("Invalid filter format. Expected 'KEY=VALUE'".into()); + } + + let column = parts[0].trim(); + let value = parts[1].trim(); + + // Validate column name to prevent SQL injection + if !column + .chars() + .all(|c| c.is_ascii_alphanumeric() || c == '_') + { + return Err("Invalid column name in filter".into()); + } + + // Return the parameterized query part and the value separately + Ok((format!("{} = $1", column), vec![value.to_string()])) +} + +// Parse filter without adding quotes +pub fn parse_filter_with_offset( + filter_str: &str, + offset: usize, +) -> Result<(String, Vec), Box> { + let mut clauses = Vec::new(); + let mut params = Vec::new(); + + for (i, condition) in filter_str.split('&').enumerate() { + let parts: Vec<&str> = condition.split('=').collect(); + if parts.len() != 2 { + return Err("Invalid filter format".into()); + } + + let column = parts[0].trim(); + let value = parts[1].trim(); + + if !column + .chars() + .all(|c| c.is_ascii_alphanumeric() || c == '_') + { + return Err("Invalid column name".into()); + } + + clauses.push(format!("{} = ${}", column, i + 1 + offset)); + params.push(value.to_string()); // Store raw value without quotes + } + + Ok((clauses.join(" AND "), params)) +} + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct organization { + pub org_id: Uuid, + pub name: String, + pub slug: String, + pub created_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Bot { + pub bot_id: Uuid, + pub name: String, + pub status: BotStatus, + pub config: serde_json::Value, + pub created_at: chrono::DateTime, + pub updated_at: chrono::DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, sqlx::Type)] +#[serde(rename_all = "snake_case")] +#[sqlx(type_name = "bot_status", rename_all = "snake_case")] +pub enum BotStatus { + Active, + Inactive, + Maintenance, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum TriggerKind { + Scheduled = 0, + TableUpdate = 1, + TableInsert = 2, + TableDelete = 3, +} + +impl TriggerKind { + pub fn from_i32(value: i32) -> Option { + match value { + 0 => Some(Self::Scheduled), + 1 => Some(Self::TableUpdate), + 2 => Some(Self::TableInsert), + 3 => Some(Self::TableDelete), + _ => None, + } + } +} + +#[derive(Debug, FromRow, Serialize, Deserialize)] +pub struct Automation { + pub id: Uuid, + pub kind: i32, // Using number for trigger type + pub target: Option, + pub schedule: Option, + pub param: String, + pub is_active: bool, + pub last_triggered: Option>, +} + +pub mod models; +pub mod state; +pub mod utils; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct UserSession { + pub id: Uuid, + pub user_id: Uuid, + pub bot_id: Uuid, + pub title: String, + pub context_data: serde_json::Value, + pub answer_mode: String, + pub current_tool: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmbeddingRequest { + pub text: String, + pub model: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmbeddingResponse { + pub embedding: Vec, + pub model: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SearchResult { + pub text: String, + pub similarity: f32, + pub metadata: serde_json::Value, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UserMessage { + pub bot_id: String, + pub user_id: String, + pub session_id: String, + pub channel: String, + pub content: String, + pub message_type: String, + pub media_url: Option, + pub timestamp: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BotResponse { + pub bot_id: String, + pub user_id: String, + pub session_id: String, + pub channel: String, + pub content: String, + pub message_type: String, + pub stream_token: Option, + pub is_complete: bool, +} + +use crate::{shared::state::AppState, whatsapp}; +use actix_web::{web, HttpRequest, HttpResponse, Result}; +use actix_ws::Message as WsMessage; +use chrono::Utc; +use langchain_rust::{ + chain::{Chain, LLMChain}, + llm::openai::OpenAI, + memory::SimpleMemory, + prompt_args, + tools::{postgres::PostgreSQLEngine, SQLDatabaseBuilder}, + vectorstore::qdrant::Qdrant as LangChainQdrant, + vectorstore::{VecStoreOptions, VectorStore}, +}; +use log::info; +use serde_json; +use std::collections::HashMap; +use std::fs; +use std::sync::Arc; +use tokio::sync::{mpsc, Mutex}; +use uuid::Uuid; + +use crate::channels::{ChannelAdapter, VoiceAdapter, WebChannelAdapter}; +use crate::chart::ChartGenerator; +use crate::llm::LLMProvider; +use crate::session::SessionManager; +use crate::tools::ToolManager; +use crate::whatsapp::WhatsAppAdapter; +use crate::{ + auth::AuthService, + shared::{BotResponse, UserMessage, UserSession}, +}; +pub struct BotOrchestrator { + session_manager: SessionManager, + tool_manager: ToolManager, + llm_provider: Arc, + auth_service: AuthService, + channels: HashMap>, + response_channels: Arc>>>, + chart_generator: Option>, + vector_store: Option>, + sql_chain: Option>, +} + +impl BotOrchestrator { + pub fn new( + session_manager: SessionManager, + tool_manager: ToolManager, + llm_provider: Arc, + auth_service: AuthService, + chart_generator: Option>, + vector_store: Option>, + sql_chain: Option>, + ) -> Self { + Self { + session_manager, + tool_manager, + llm_provider, + auth_service, + channels: HashMap::new(), + response_channels: Arc::new(Mutex::new(HashMap::new())), + chart_generator, + vector_store, + sql_chain, + } + } + + pub fn add_channel(&mut self, channel_type: &str, adapter: Arc) { + self.channels.insert(channel_type.to_string(), adapter); + } + + pub async fn register_response_channel( + &self, + session_id: String, + sender: mpsc::Sender, + ) { + self.response_channels + .lock() + .await + .insert(session_id, sender); + } + + pub async fn set_user_answer_mode( + &self, + user_id: &str, + bot_id: &str, + mode: &str, + ) -> Result<(), Box> { + self.session_manager + .update_answer_mode(user_id, bot_id, mode) + .await?; + Ok(()) + } + + pub async fn process_message( + &self, + message: UserMessage, + ) -> Result<(), Box> { + info!( + "Processing message from channel: {}, user: {}", + message.channel, message.user_id + ); + + let user_id = Uuid::parse_str(&message.user_id).unwrap_or_else(|_| Uuid::new_v4()); + let bot_id = Uuid::parse_str(&message.bot_id) + .unwrap_or_else(|_| Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap()); + + let session = match self + .session_manager + .get_user_session(user_id, bot_id) + .await? + { + Some(session) => session, + None => { + self.session_manager + .create_session(user_id, bot_id, "New Conversation") + .await? + } + }; + + if session.answer_mode == "tool" && session.current_tool.is_some() { + self.tool_manager + .provide_user_response(&message.user_id, &message.bot_id, message.content.clone()) + .await?; + return Ok(()); + } + + self.session_manager + .save_message( + session.id, + user_id, + "user", + &message.content, + &message.message_type, + ) + .await?; + + let response_content = match session.answer_mode.as_str() { + "document" => self.document_mode_handler(&message, &session).await?, + "chart" => self.chart_mode_handler(&message, &session).await?, + "database" => self.database_mode_handler(&message, &session).await?, + "tool" => self.tool_mode_handler(&message, &session).await?, + _ => self.direct_mode_handler(&message, &session).await?, + }; + + self.session_manager + .save_message(session.id, user_id, "assistant", &response_content, "text") + .await?; + + let bot_response = BotResponse { + bot_id: message.bot_id, + user_id: message.user_id, + session_id: message.session_id, + channel: message.channel, + content: response_content, + message_type: "text".to_string(), + stream_token: None, + is_complete: true, + }; + + if let Some(adapter) = self.channels.get(&message.channel) { + adapter.send_message(bot_response).await?; + } + + Ok(()) + } + + async fn document_mode_handler( + &self, + message: &UserMessage, + session: &UserSession, + ) -> Result> { + if let Some(vector_store) = &self.vector_store { + let similar_docs = vector_store + .similarity_search(&message.content, 3, &VecStoreOptions::default()) + .await?; + + let mut enhanced_prompt = format!("User question: {}\n\n", message.content); + + if !similar_docs.is_empty() { + enhanced_prompt.push_str("Relevant documents:\n"); + for (i, doc) in similar_docs.iter().enumerate() { + enhanced_prompt.push_str(&format!("[Doc {}]: {}\n", i + 1, doc.page_content)); + } + enhanced_prompt.push_str( + "\nPlease answer the user's question based on the provided documents.", + ); + } + + self.llm_provider + .generate(&enhanced_prompt, &serde_json::Value::Null) + .await + } else { + self.direct_mode_handler(message, session).await + } + } + + async fn chart_mode_handler( + &self, + message: &UserMessage, + session: &UserSession, + ) -> Result> { + if let Some(chart_generator) = &self.chart_generator { + let chart_response = chart_generator + .generate_chart(&message.content, "bar") + .await?; + + self.session_manager + .save_message( + session.id, + session.user_id, + "system", + &format!("Generated chart for query: {}", message.content), + "chart", + ) + .await?; + + Ok(format!( + "Chart generated for your query. Data retrieved: {}", + chart_response.sql_query + )) + } else { + self.document_mode_handler(message, session).await + } + } + + async fn database_mode_handler( + &self, + message: &UserMessage, + _session: &UserSession, + ) -> Result> { + if let Some(sql_chain) = &self.sql_chain { + let input_variables = prompt_args! { + "input" => message.content, + }; + + let result = sql_chain.invoke(input_variables).await?; + Ok(result.to_string()) + } else { + let db_url = std::env::var("DATABASE_URL")?; + let engine = PostgreSQLEngine::new(&db_url).await?; + let db = SQLDatabaseBuilder::new(engine).build().await?; + + let llm = OpenAI::default(); + let chain = langchain_rust::chain::SQLDatabaseChainBuilder::new() + .llm(llm) + .top_k(5) + .database(db) + .build()?; + + let input_variables = chain.prompt_builder().query(&message.content).build(); + let result = chain.invoke(input_variables).await?; + + Ok(result.to_string()) + } + } + + async fn tool_mode_handler( + &self, + message: &UserMessage, + _session: &UserSession, + ) -> Result> { + if message.content.to_lowercase().contains("calculator") { + if let Some(_adapter) = self.channels.get(&message.channel) { + let (tx, _rx) = mpsc::channel(100); + + self.register_response_channel(message.session_id.clone(), tx.clone()) + .await; + + let tool_manager = self.tool_manager.clone(); + let user_id_str = message.user_id.clone(); + let bot_id_str = message.bot_id.clone(); + let session_manager = self.session_manager.clone(); + + tokio::spawn(async move { + let _ = tool_manager + .execute_tool("calculator", &user_id_str, &bot_id_str, session_manager, tx) + .await; + }); + } + Ok("Starting calculator tool...".to_string()) + } else { + let available_tools = self.tool_manager.list_tools(); + let tools_context = if !available_tools.is_empty() { + format!("\n\nAvailable tools: {}. If the user needs calculations, suggest using the calculator tool.", available_tools.join(", ")) + } else { + String::new() + }; + + let full_prompt = format!("{}{}", message.content, tools_context); + + self.llm_provider + .generate(&full_prompt, &serde_json::Value::Null) + .await + } + } + + async fn direct_mode_handler( + &self, + message: &UserMessage, + session: &UserSession, + ) -> Result> { + let history = self + .session_manager + .get_conversation_history(session.id, session.user_id) + .await?; + + let mut memory = SimpleMemory::new(); + for (role, content) in history { + match role.as_str() { + "user" => memory.add_user_message(&content), + "assistant" => memory.add_ai_message(&content), + _ => {} + } + } + + let mut prompt = String::new(); + if let Some(chat_history) = memory.get_chat_history() { + for message in chat_history { + prompt.push_str(&format!( + "{}: {}\n", + message.message_type(), + message.content() + )); + } + } + prompt.push_str(&format!("User: {}\nAssistant:", message.content)); + + self.llm_provider + .generate(&prompt, &serde_json::Value::Null) + .await + } + + pub async fn stream_response( + &self, + message: UserMessage, + mut response_tx: mpsc::Sender, + ) -> Result<(), Box> { + info!("Streaming response for user: {}", message.user_id); + + let user_id = Uuid::parse_str(&message.user_id).unwrap_or_else(|_| Uuid::new_v4()); + let bot_id = Uuid::parse_str(&message.bot_id) + .unwrap_or_else(|_| Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap()); + + let session = match self + .session_manager + .get_user_session(user_id, bot_id) + .await? + { + Some(session) => session, + None => { + self.session_manager + .create_session(user_id, bot_id, "New Conversation") + .await? + } + }; + + if session.answer_mode == "tool" && session.current_tool.is_some() { + self.tool_manager + .provide_user_response(&message.user_id, &message.bot_id, message.content.clone()) + .await?; + return Ok(()); + } + + self.session_manager + .save_message( + session.id, + user_id, + "user", + &message.content, + &message.message_type, + ) + .await?; + + let history = self + .session_manager + .get_conversation_history(session.id, user_id) + .await?; + + let mut memory = SimpleMemory::new(); + for (role, content) in history { + match role.as_str() { + "user" => memory.add_user_message(&content), + "assistant" => memory.add_ai_message(&content), + _ => {} + } + } + + let mut prompt = String::new(); + if let Some(chat_history) = memory.get_chat_history() { + for message in chat_history { + prompt.push_str(&format!( + "{}: {}\n", + message.message_type(), + message.content() + )); + } + } + prompt.push_str(&format!("User: {}\nAssistant:", message.content)); + + let (stream_tx, mut stream_rx) = mpsc::channel(100); + let llm_provider = self.llm_provider.clone(); + let prompt_clone = prompt.clone(); + + tokio::spawn(async move { + let _ = llm_provider + .generate_stream(&prompt_clone, &serde_json::Value::Null, stream_tx) + .await; + }); + + let mut full_response = String::new(); + while let Some(chunk) = stream_rx.recv().await { + full_response.push_str(&chunk); + + let bot_response = BotResponse { + bot_id: message.bot_id.clone(), + user_id: message.user_id.clone(), + session_id: message.session_id.clone(), + channel: message.channel.clone(), + content: chunk, + message_type: "text".to_string(), + stream_token: None, + is_complete: false, + }; + + if response_tx.send(bot_response).await.is_err() { + break; + } + } + + self.session_manager + .save_message(session.id, user_id, "assistant", &full_response, "text") + .await?; + + let final_response = BotResponse { + bot_id: message.bot_id, + user_id: message.user_id, + session_id: message.session_id, + channel: message.channel, + content: "".to_string(), + message_type: "text".to_string(), + stream_token: None, + is_complete: true, + }; + + response_tx.send(final_response).await?; + Ok(()) + } + + pub async fn get_user_sessions( + &self, + user_id: Uuid, + ) -> Result, Box> { + self.session_manager.get_user_sessions(user_id).await + } + + pub async fn get_conversation_history( + &self, + session_id: Uuid, + user_id: Uuid, + ) -> Result, Box> { + self.session_manager + .get_conversation_history(session_id, user_id) + .await + } + + pub async fn process_message_with_tools( + &self, + message: UserMessage, + ) -> Result<(), Box> { + info!( + "Processing message with tools from user: {}", + message.user_id + ); + + let user_id = Uuid::parse_str(&message.user_id).unwrap_or_else(|_| Uuid::new_v4()); + let bot_id = Uuid::parse_str(&message.bot_id) + .unwrap_or_else(|_| Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap()); + + let session = match self + .session_manager + .get_user_session(user_id, bot_id) + .await? + { + Some(session) => session, + None => { + self.session_manager + .create_session(user_id, bot_id, "New Conversation") + .await? + } + }; + + self.session_manager + .save_message( + session.id, + user_id, + "user", + &message.content, + &message.message_type, + ) + .await?; + + let is_tool_waiting = self + .tool_manager + .is_tool_waiting(&message.session_id) + .await + .unwrap_or(false); + + if is_tool_waiting { + self.tool_manager + .provide_input(&message.session_id, &message.content) + .await?; + + if let Ok(tool_output) = self.tool_manager.get_tool_output(&message.session_id).await { + for output in tool_output { + let bot_response = BotResponse { + bot_id: message.bot_id.clone(), + user_id: message.user_id.clone(), + session_id: message.session_id.clone(), + channel: message.channel.clone(), + content: output, + message_type: "text".to_string(), + stream_token: None, + is_complete: true, + }; + + if let Some(adapter) = self.channels.get(&message.channel) { + adapter.send_message(bot_response).await?; + } + } + } + return Ok(()); + } + + let available_tools = self.tool_manager.list_tools(); + let tools_context = if !available_tools.is_empty() { + format!("\n\nAvailable tools: {}. If the user needs calculations, suggest using the calculator tool.", available_tools.join(", ")) + } else { + String::new() + }; + + let full_prompt = format!("{}{}", message.content, tools_context); + + let response = if message.content.to_lowercase().contains("calculator") + || message.content.to_lowercase().contains("calculate") + || message.content.to_lowercase().contains("math") + { + match self + .tool_manager + .execute_tool("calculator", &message.session_id, &message.user_id) + .await + { + Ok(tool_result) => { + self.session_manager + .save_message( + session.id, + user_id, + "assistant", + &tool_result.output, + "tool_start", + ) + .await?; + + tool_result.output + } + Err(e) => { + format!("I encountered an error starting the calculator: {}", e) + } + } + } else { + self.llm_provider + .generate(&full_prompt, &serde_json::Value::Null) + .await? + }; + + self.session_manager + .save_message(session.id, user_id, "assistant", &response, "text") + .await?; + + let bot_response = BotResponse { + bot_id: message.bot_id, + user_id: message.user_id, + session_id: message.session_id, + channel: message.channel, + content: response, + message_type: "text".to_string(), + stream_token: None, + is_complete: true, + }; + + if let Some(adapter) = self.channels.get(&message.channel) { + adapter.send_message(bot_response).await?; + } + + Ok(()) + } + + async fn tool_mode_handler( + &self, + message: &UserMessage, + _session: &UserSession, + ) -> Result> { + if message.content.to_lowercase().contains("calculator") { + if let Some(_adapter) = self.channels.get(&message.channel) { + let (tx, _rx) = mpsc::channel(100); + + self.register_response_channel(message.session_id.clone(), tx.clone()) + .await; + + let tool_manager = self.tool_manager.clone(); + let user_id_str = message.user_id.clone(); + let bot_id_str = message.bot_id.clone(); + let session_manager = self.session_manager.clone(); + + tokio::spawn(async move { + let _ = tool_manager + .execute_tool_with_session( + "calculator", + &user_id_str, + &bot_id_str, + session_manager, + tx, + ) + .await; + }); + } + Ok("Starting calculator tool...".to_string()) + } else { + let available_tools = self.tool_manager.list_tools(); + let tools_context = if !available_tools.is_empty() { + format!("\n\nAvailable tools: {}. If the user needs calculations, suggest using the calculator tool.", available_tools.join(", ")) + } else { + String::new() + }; + + let full_prompt = format!("{}{}", message.content, tools_context); + + self.llm_provider + .generate(&full_prompt, &serde_json::Value::Null) + .await + } + } + + // Fix the process_message_with_tools method + pub async fn process_message_with_tools( + &self, + message: UserMessage, + ) -> Result<(), Box> { + info!( + "Processing message with tools from user: {}", + message.user_id + ); + + let user_id = Uuid::parse_str(&message.user_id).unwrap_or_else(|_| Uuid::new_v4()); + let bot_id = Uuid::parse_str(&message.bot_id) + .unwrap_or_else(|_| Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap()); + + let session = match self + .session_manager + .get_user_session(user_id, bot_id) + .await? + { + Some(session) => session, + None => { + self.session_manager + .create_session(user_id, bot_id, "New Conversation") + .await? + } + }; + + self.session_manager + .save_message( + session.id, + user_id, + "user", + &message.content, + &message.message_type, + ) + .await?; + + let is_tool_waiting = self + .tool_manager + .is_tool_waiting(&message.session_id) + .await + .unwrap_or(false); + + if is_tool_waiting { + self.tool_manager + .provide_input(&message.session_id, &message.content) + .await?; + + if let Ok(tool_output) = self.tool_manager.get_tool_output(&message.session_id).await { + for output in tool_output { + let bot_response = BotResponse { + bot_id: message.bot_id.clone(), + user_id: message.user_id.clone(), + session_id: message.session_id.clone(), + channel: message.channel.clone(), + content: output, + message_type: "text".to_string(), + stream_token: None, + is_complete: true, + }; + + if let Some(adapter) = self.channels.get(&message.channel) { + adapter.send_message(bot_response).await?; + } + } + } + return Ok(()); + } + + let response = if message.content.to_lowercase().contains("calculator") + || message.content.to_lowercase().contains("calculate") + || message.content.to_lowercase().contains("math") + { + match self + .tool_manager + .execute_tool("calculator", &message.session_id, &message.user_id) + .await + { + Ok(tool_result) => { + self.session_manager + .save_message( + session.id, + user_id, + "assistant", + &tool_result.output, + "tool_start", + ) + .await?; + + tool_result.output + } + Err(e) => { + format!("I encountered an error starting the calculator: {}", e) + } + } + } else { + let available_tools = self.tool_manager.list_tools(); + let tools_context = if !available_tools.is_empty() { + format!("\n\nAvailable tools: {}. If the user needs calculations, suggest using the calculator tool.", available_tools.join(", ")) + } else { + String::new() + }; + + let full_prompt = format!("{}{}", message.content, tools_context); + + self.llm_provider + .generate(&full_prompt, &serde_json::Value::Null) + .await? + }; + + self.session_manager + .save_message(session.id, user_id, "assistant", &response, "text") + .await?; + + let bot_response = BotResponse { + bot_id: message.bot_id, + user_id: message.user_id, + session_id: message.session_id, + channel: message.channel, + content: response, + message_type: "text".to_string(), + stream_token: None, + is_complete: true, + }; + + if let Some(adapter) = self.channels.get(&message.channel) { + adapter.send_message(bot_response).await?; + } + + Ok(()) + } +} + +#[actix_web::get("/ws")] +async fn websocket_handler( + req: HttpRequest, + stream: web::Payload, + data: web::Data, +) -> Result { + let (res, mut session, mut msg_stream) = actix_ws::handle(&req, stream)?; + let session_id = Uuid::new_v4().to_string(); + let (tx, mut rx) = mpsc::channel::(100); + + data.orchestrator + .register_response_channel(session_id.clone(), tx.clone()) + .await; + data.web_adapter + .add_connection(session_id.clone(), tx.clone()) + .await; + data.voice_adapter + .add_connection(session_id.clone(), tx.clone()) + .await; + + let orchestrator = data.orchestrator.clone(); + let web_adapter = data.web_adapter.clone(); + + actix_web::rt::spawn(async move { + while let Some(msg) = rx.recv().await { + if let Ok(json) = serde_json::to_string(&msg) { + let _ = session.text(json).await; + } + } + }); + + actix_web::rt::spawn(async move { + while let Some(Ok(msg)) = msg_stream.recv().await { + match msg { + WsMessage::Text(text) => { + let user_message = UserMessage { + bot_id: "default_bot".to_string(), + user_id: "default_user".to_string(), + session_id: session_id.clone(), + channel: "web".to_string(), + content: text.to_string(), + message_type: "text".to_string(), + media_url: None, + timestamp: Utc::now(), + }; + + if let Err(e) = orchestrator.stream_response(user_message, tx.clone()).await { + info!("Error processing message: {}", e); + } + } + WsMessage::Close(_) => { + web_adapter.remove_connection(&session_id).await; + break; + } + _ => {} + } + } + }); + + Ok(res) +} + +#[actix_web::get("/api/whatsapp/webhook")] +async fn whatsapp_webhook_verify( + data: web::Data, + web::Query(params): web::Query>, +) -> Result { + let mode = params.get("hub.mode").unwrap_or(&"".to_string()); + let token = params.get("hub.verify_token").unwrap_or(&"".to_string()); + let challenge = params.get("hub.challenge").unwrap_or(&"".to_string()); + + match data.whatsapp_adapter.verify_webhook(mode, token, challenge) { + Ok(challenge_response) => Ok(HttpResponse::Ok().body(challenge_response)), + Err(_) => Ok(HttpResponse::Forbidden().body("Verification failed")), + } +} + +#[actix_web::post("/api/whatsapp/webhook")] +async fn whatsapp_webhook( + data: web::Data, + payload: web::Json, +) -> Result { + match data + .whatsapp_adapter + .process_incoming_message(payload.into_inner()) + .await + { + Ok(user_messages) => { + for user_message in user_messages { + if let Err(e) = data.orchestrator.process_message(user_message).await { + log::error!("Error processing WhatsApp message: {}", e); + } + } + Ok(HttpResponse::Ok().body("")) + } + Err(e) => { + log::error!("Error processing WhatsApp webhook: {}", e); + Ok(HttpResponse::BadRequest().body("Invalid message")) + } + } +} + +#[actix_web::post("/api/voice/start")] +async fn voice_start( + data: web::Data, + info: web::Json, +) -> Result { + let session_id = info + .get("session_id") + .and_then(|s| s.as_str()) + .unwrap_or(""); + let user_id = info + .get("user_id") + .and_then(|u| u.as_str()) + .unwrap_or("user"); + + match data + .voice_adapter + .start_voice_session(session_id, user_id) + .await + { + Ok(token) => { + Ok(HttpResponse::Ok().json(serde_json::json!({"token": token, "status": "started"}))) + } + Err(e) => { + Ok(HttpResponse::InternalServerError() + .json(serde_json::json!({"error": e.to_string()}))) + } + } +} + +#[actix_web::post("/api/voice/stop")] +async fn voice_stop( + data: web::Data, + info: web::Json, +) -> Result { + let session_id = info + .get("session_id") + .and_then(|s| s.as_str()) + .unwrap_or(""); + + match data.voice_adapter.stop_voice_session(session_id).await { + Ok(()) => Ok(HttpResponse::Ok().json(serde_json::json!({"status": "stopped"}))), + Err(e) => { + Ok(HttpResponse::InternalServerError() + .json(serde_json::json!({"error": e.to_string()}))) + } + } +} + +#[actix_web::post("/api/sessions")] +async fn create_session(_data: web::Data) -> Result { + let session_id = Uuid::new_v4(); + Ok(HttpResponse::Ok().json(serde_json::json!({ + "session_id": session_id, + "title": "New Conversation", + "created_at": Utc::now() + }))) +} + +#[actix_web::get("/api/sessions")] +async fn get_sessions(data: web::Data) -> Result { + let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap(); + match data.orchestrator.get_user_sessions(user_id).await { + Ok(sessions) => Ok(HttpResponse::Ok().json(sessions)), + Err(e) => { + Ok(HttpResponse::InternalServerError() + .json(serde_json::json!({"error": e.to_string()}))) + } + } +} + +#[actix_web::get("/api/sessions/{session_id}")] +async fn get_session_history( + data: web::Data, + path: web::Path, +) -> Result { + let session_id = path.into_inner(); + let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap(); + + match Uuid::parse_str(&session_id) { + Ok(session_uuid) => match data + .orchestrator + .get_conversation_history(session_uuid, user_id) + .await + { + Ok(history) => Ok(HttpResponse::Ok().json(history)), + Err(e) => Ok(HttpResponse::InternalServerError() + .json(serde_json::json!({"error": e.to_string()}))), + }, + Err(_) => { + Ok(HttpResponse::BadRequest().json(serde_json::json!({"error": "Invalid session ID"}))) + } + } +} + +#[actix_web::post("/api/set_mode")] +async fn set_mode_handler( + data: web::Data, + info: web::Json>, +) -> Result { + let default_user = "default_user".to_string(); + let default_bot = "default_bot".to_string(); + let default_mode = "direct".to_string(); + + let user_id = info.get("user_id").unwrap_or(&default_user); + let bot_id = info.get("bot_id").unwrap_or(&default_bot); + let mode = info.get("mode").unwrap_or(&default_mode); + + if let Err(e) = data + .orchestrator + .set_user_answer_mode(user_id, bot_id, mode) + .await + { + return Ok( + HttpResponse::InternalServerError().json(serde_json::json!({"error": e.to_string()})) + ); + } + + Ok(HttpResponse::Ok().json(serde_json::json!({"status": "mode_updated"}))) +} + +#[actix_web::get("/")] +async fn index() -> Result { + let html = fs::read_to_string("templates/index.html") + .unwrap_or_else(|_| include_str!("../../static/index.html").to_string()); + Ok(HttpResponse::Ok().content_type("text/html").body(html)) +} + +#[actix_web::get("/static/{filename:.*}")] +async fn static_files(req: HttpRequest) -> Result { + let filename = req.match_info().query("filename"); + let path = format!("static/{}", filename); + + match fs::read(&path) { + Ok(content) => { + let content_type = match filename { + f if f.ends_with(".js") => "application/javascript", + f if f.ends_with(".css") => "text/css", + f if f.ends_with(".png") => "image/png", + f if f.ends_with(".jpg") | f.ends_with(".jpeg") => "image/jpeg", + _ => "text/plain", + }; + + Ok(HttpResponse::Ok().content_type(content_type).body(content)) + } + Err(_) => Ok(HttpResponse::NotFound().body("File not found")), + } +} + +use crate::shared::UserSession; +use redis::{AsyncCommands, Client}; +use serde_json; +use sqlx::{PgPool, Row}; +use std::sync::Arc; +use uuid::Uuid; + +pub struct SessionManager { + pub pool: PgPool, + pub redis: Option>, +} + +impl SessionManager { + pub fn new(pool: PgPool, redis: Option>) -> Self { + Self { pool, redis } + } + + pub async fn get_user_session( + &self, + user_id: Uuid, + bot_id: Uuid, + ) -> Result, Box> { + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session:{}:{}", user_id, bot_id); + let session_json: Option = conn.get(&cache_key).await?; + if let Some(json) = session_json { + if let Ok(session) = serde_json::from_str::(&json) { + return Ok(Some(session)); + } + } + } + + let session = sqlx::query_as::<_, UserSession>( + "SELECT * FROM user_sessions WHERE user_id = $1 AND bot_id = $2 ORDER BY updated_at DESC LIMIT 1", + ) + .bind(user_id) + .bind(bot_id) + .fetch_optional(&self.pool) + .await?; + + if let Some(ref session) = session { + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session:{}:{}", user_id, bot_id); + let session_json = serde_json::to_string(session)?; + let _: () = conn.set_ex(cache_key, session_json, 1800).await?; + } + } + + Ok(session) + } + + pub async fn create_session( + &self, + user_id: Uuid, + bot_id: Uuid, + title: &str, + ) -> Result> { + let session = sqlx::query_as::<_, UserSession>( + "INSERT INTO user_sessions (user_id, bot_id, title) VALUES ($1, $2, $3) RETURNING *", + ) + .bind(user_id) + .bind(bot_id) + .bind(title) + .fetch_one(&self.pool) + .await?; + + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session:{}:{}", user_id, bot_id); + let session_json = serde_json::to_string(&session)?; + let _: () = conn.set_ex(cache_key, session_json, 1800).await?; + } + + Ok(session) + } + + pub async fn save_message( + &self, + session_id: Uuid, + user_id: Uuid, + role: &str, + content: &str, + message_type: &str, + ) -> Result<(), Box> { + let message_count: i64 = + sqlx::query("SELECT COUNT(*) as count FROM message_history WHERE session_id = $1") + .bind(session_id) + .fetch_one(&self.pool) + .await? + .get("count"); + + sqlx::query( + "INSERT INTO message_history (session_id, user_id, role, content_encrypted, message_type, message_index) + VALUES ($1, $2, $3, $4, $5, $6)", + ) + .bind(session_id) + .bind(user_id) + .bind(role) + .bind(content) + .bind(message_type) + .bind(message_count + 1) + .execute(&self.pool) + .await?; + + sqlx::query("UPDATE user_sessions SET updated_at = NOW() WHERE id = $1") + .bind(session_id) + .execute(&self.pool) + .await?; + + if let Some(redis_client) = &self.redis { + if let Some(session_info) = + sqlx::query("SELECT user_id, bot_id FROM user_sessions WHERE id = $1") + .bind(session_id) + .fetch_optional(&self.pool) + .await? + { + let user_id: Uuid = session_info.get("user_id"); + let bot_id: Uuid = session_info.get("bot_id"); + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session:{}:{}", user_id, bot_id); + let _: () = conn.del(cache_key).await?; + } + } + + Ok(()) + } + + pub async fn get_conversation_history( + &self, + session_id: Uuid, + user_id: Uuid, + ) -> Result, Box> { + let messages = sqlx::query( + "SELECT role, content_encrypted FROM message_history + WHERE session_id = $1 AND user_id = $2 + ORDER BY message_index ASC", + ) + .bind(session_id) + .bind(user_id) + .fetch_all(&self.pool) + .await?; + + let history = messages + .into_iter() + .map(|row| (row.get("role"), row.get("content_encrypted"))) + .collect(); + + Ok(history) + } + + pub async fn get_user_sessions( + &self, + user_id: Uuid, + ) -> Result, Box> { + let sessions = sqlx::query_as::<_, UserSession>( + "SELECT * FROM user_sessions WHERE user_id = $1 ORDER BY updated_at DESC", + ) + .bind(user_id) + .fetch_all(&self.pool) + .await?; + Ok(sessions) + } + + pub async fn update_answer_mode( + &self, + user_id: &str, + bot_id: &str, + mode: &str, + ) -> Result<(), Box> { + let user_uuid = Uuid::parse_str(user_id)?; + let bot_uuid = Uuid::parse_str(bot_id)?; + + sqlx::query( + "UPDATE user_sessions + SET answer_mode = $1, updated_at = NOW() + WHERE user_id = $2 AND bot_id = $3", + ) + .bind(mode) + .bind(user_uuid) + .bind(bot_uuid) + .execute(&self.pool) + .await?; + + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session:{}:{}", user_uuid, bot_uuid); + let _: () = conn.del(cache_key).await?; + } + + Ok(()) + } + + pub async fn update_current_tool( + &self, + user_id: &str, + bot_id: &str, + tool_name: Option<&str>, + ) -> Result<(), Box> { + let user_uuid = Uuid::parse_str(user_id)?; + let bot_uuid = Uuid::parse_str(bot_id)?; + + sqlx::query( + "UPDATE user_sessions + SET current_tool = $1, updated_at = NOW() + WHERE user_id = $2 AND bot_id = $3", + ) + .bind(tool_name) + .bind(user_uuid) + .bind(bot_uuid) + .execute(&self.pool) + .await?; + + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session:{}:{}", user_uuid, bot_uuid); + let _: () = conn.del(cache_key).await?; + } + + Ok(()) + } + + pub async fn get_session_by_id( + &self, + session_id: Uuid, + ) -> Result, Box> { + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session_by_id:{}", session_id); + let session_json: Option = conn.get(&cache_key).await?; + if let Some(json) = session_json { + if let Ok(session) = serde_json::from_str::(&json) { + return Ok(Some(session)); + } + } + } + + let session = sqlx::query_as::<_, UserSession>("SELECT * FROM user_sessions WHERE id = $1") + .bind(session_id) + .fetch_optional(&self.pool) + .await?; + + if let Some(ref session) = session { + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session_by_id:{}", session_id); + let session_json = serde_json::to_string(session)?; + let _: () = conn.set_ex(cache_key, session_json, 1800).await?; + } + } + + Ok(session) + } + + pub async fn cleanup_old_sessions( + &self, + days_old: i32, + ) -> Result> { + let result = sqlx::query( + "DELETE FROM user_sessions + WHERE updated_at < NOW() - INTERVAL '1 day' * $1", + ) + .bind(days_old) + .execute(&self.pool) + .await?; + Ok(result.rows_affected()) + } + + pub async fn set_current_tool( + &self, + user_id: &str, + bot_id: &str, + tool_name: Option, + ) -> Result<(), Box> { + let user_uuid = Uuid::parse_str(user_id)?; + let bot_uuid = Uuid::parse_str(bot_id)?; + + sqlx::query( + "UPDATE user_sessions + SET current_tool = $1, updated_at = NOW() + WHERE user_id = $2 AND bot_id = $3", + ) + .bind(tool_name) + .bind(user_uuid) + .bind(bot_uuid) + .execute(&self.pool) + .await?; + + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session:{}:{}", user_uuid, bot_uuid); + let _: () = conn.del(cache_key).await?; + } + + Ok(()) + } +} + +impl Clone for SessionManager { + fn clone(&self) -> Self { + Self { + pool: self.pool.clone(), + redis: self.redis.clone(), + } + } +} + +// src/tools/mod.rs +use async_trait::async_trait; +use redis::AsyncCommands; +use rhai::{Engine, Scope}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::sync::{mpsc, Mutex}; +use uuid::Uuid; + +use crate::channels::ChannelAdapter; +use crate::session::SessionManager; +use crate::shared::BotResponse; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolResult { + pub success: bool, + pub output: String, + pub requires_input: bool, + pub session_id: String, +} + +#[derive(Clone)] +pub struct Tool { + pub name: String, + pub description: String, + pub parameters: HashMap, + pub script: String, +} + +#[async_trait] +pub trait ToolExecutor: Send + Sync { + async fn execute( + &self, + tool_name: &str, + session_id: &str, + user_id: &str, + ) -> Result>; + async fn provide_input( + &self, + session_id: &str, + input: &str, + ) -> Result<(), Box>; + async fn get_output(&self, session_id: &str) + -> Result, Box>; + async fn is_waiting_for_input( + &self, + session_id: &str, + ) -> Result>; +} + +pub struct RedisToolExecutor { + redis_client: redis::Client, + web_adapter: Arc, + voice_adapter: Arc, + whatsapp_adapter: Arc, +} + +impl RedisToolExecutor { + pub fn new( + redis_url: &str, + web_adapter: Arc, + voice_adapter: Arc, + whatsapp_adapter: Arc, + ) -> Result> { + let client = redis::Client::open(redis_url)?; + Ok(Self { + redis_client: client, + web_adapter, + voice_adapter, + whatsapp_adapter, + }) + } + + async fn send_tool_message( + &self, + session_id: &str, + user_id: &str, + channel: &str, + message: &str, + ) -> Result<(), Box> { + let response = BotResponse { + bot_id: "tool_bot".to_string(), + user_id: user_id.to_string(), + session_id: session_id.to_string(), + channel: channel.to_string(), + content: message.to_string(), + message_type: "tool".to_string(), + stream_token: None, + is_complete: true, + }; + + match channel { + "web" => self.web_adapter.send_message(response).await, + "voice" => self.voice_adapter.send_message(response).await, + "whatsapp" => self.whatsapp_adapter.send_message(response).await, + _ => Ok(()), + } + } + + fn create_rhai_engine(&self, session_id: String, user_id: String, channel: String) -> Engine { + let mut engine = Engine::new(); + + // Clone for TALK function + let tool_executor = Arc::new(( + self.redis_client.clone(), + self.web_adapter.clone(), + self.voice_adapter.clone(), + self.whatsapp_adapter.clone(), + )); + + let session_id_clone = session_id.clone(); + let user_id_clone = user_id.clone(); + let channel_clone = channel.clone(); + + engine.register_fn("talk", move |message: String| { + let tool_executor = Arc::clone(&tool_executor); + let session_id = session_id_clone.clone(); + let user_id = user_id_clone.clone(); + let channel = channel_clone.clone(); + + tokio::spawn(async move { + let (redis_client, web_adapter, voice_adapter, whatsapp_adapter) = &*tool_executor; + + let response = BotResponse { + bot_id: "tool_bot".to_string(), + user_id: user_id.clone(), + session_id: session_id.clone(), + channel: channel.clone(), + content: message.clone(), + message_type: "tool".to_string(), + stream_token: None, + is_complete: true, + }; + + let result = match channel.as_str() { + "web" => web_adapter.send_message(response).await, + "voice" => voice_adapter.send_message(response).await, + "whatsapp" => whatsapp_adapter.send_message(response).await, + _ => Ok(()), + }; + + if let Err(e) = result { + log::error!("Failed to send tool message: {}", e); + } + + if let Ok(mut conn) = redis_client.get_async_connection().await { + let output_key = format!("tool:{}:output", session_id); + let _ = conn.lpush(&output_key, &message).await; + } + }); + }); + + let hear_executor = self.redis_client.clone(); + let session_id_clone = session_id.clone(); + + engine.register_fn("hear", move || -> String { + let hear_executor = hear_executor.clone(); + let session_id = session_id_clone.clone(); + + let rt = tokio::runtime::Runtime::new().unwrap(); + rt.block_on(async move { + match hear_executor.get_async_connection().await { + Ok(mut conn) => { + let input_key = format!("tool:{}:input", session_id); + let waiting_key = format!("tool:{}:waiting", session_id); + + let _ = conn.set_ex(&waiting_key, "true", 300).await; + let result: Option<(String, String)> = + conn.brpop(&input_key, 30).await.ok().flatten(); + let _ = conn.del(&waiting_key).await; + + result + .map(|(_, input)| input) + .unwrap_or_else(|| "timeout".to_string()) + } + Err(e) => { + log::error!("HEAR Redis error: {}", e); + "error".to_string() + } + } + }) + }); + + engine + } + + async fn cleanup_session(&self, session_id: &str) -> Result<(), Box> { + let mut conn = self.redis_client.get_multiplexed_async_connection().await?; + + let keys = vec![ + format!("tool:{}:output", session_id), + format!("tool:{}:input", session_id), + format!("tool:{}:waiting", session_id), + format!("tool:{}:active", session_id), + ]; + + for key in keys { + let _: () = conn.del(&key).await?; + } + + Ok(()) + } +} + +#[async_trait] +impl ToolExecutor for RedisToolExecutor { + async fn execute( + &self, + tool_name: &str, + session_id: &str, + user_id: &str, + ) -> Result> { + let tool = get_tool(tool_name).ok_or_else(|| format!("Tool not found: {}", tool_name))?; + + let mut conn = self.redis_client.get_multiplexed_async_connection().await?; + let session_key = format!("tool:{}:session", session_id); + let session_data = serde_json::json!({ + "user_id": user_id, + "tool_name": tool_name, + "started_at": chrono::Utc::now().to_rfc3339(), + }); + conn.set_ex(&session_key, session_data.to_string(), 3600) + .await?; + + let active_key = format!("tool:{}:active", session_id); + conn.set_ex(&active_key, "true", 3600).await?; + + let channel = "web"; + let _engine = self.create_rhai_engine( + session_id.to_string(), + user_id.to_string(), + channel.to_string(), + ); + + let redis_clone = self.redis_client.clone(); + let web_adapter_clone = self.web_adapter.clone(); + let voice_adapter_clone = self.voice_adapter.clone(); + let whatsapp_adapter_clone = self.whatsapp_adapter.clone(); + let session_id_clone = session_id.to_string(); + let user_id_clone = user_id.to_string(); + let tool_script = tool.script.clone(); + + tokio::spawn(async move { + let mut engine = Engine::new(); + let mut scope = Scope::new(); + + let redis_client = redis_clone.clone(); + let web_adapter = web_adapter_clone.clone(); + let voice_adapter = voice_adapter_clone.clone(); + let whatsapp_adapter = whatsapp_adapter_clone.clone(); + let session_id = session_id_clone.clone(); + let user_id = user_id_clone.clone(); + + engine.register_fn("talk", move |message: String| { + let redis_client = redis_client.clone(); + let web_adapter = web_adapter.clone(); + let voice_adapter = voice_adapter.clone(); + let whatsapp_adapter = whatsapp_adapter.clone(); + let session_id = session_id.clone(); + let user_id = user_id.clone(); + + tokio::spawn(async move { + let channel = "web"; + + let response = BotResponse { + bot_id: "tool_bot".to_string(), + user_id: user_id.clone(), + session_id: session_id.clone(), + channel: channel.to_string(), + content: message.clone(), + message_type: "tool".to_string(), + stream_token: None, + is_complete: true, + }; + + let send_result = match channel { + "web" => web_adapter.send_message(response).await, + "voice" => voice_adapter.send_message(response).await, + "whatsapp" => whatsapp_adapter.send_message(response).await, + _ => Ok(()), + }; + + if let Err(e) = send_result { + log::error!("Failed to send tool message: {}", e); + } + + if let Ok(mut conn) = redis_client.get_async_connection().await { + let output_key = format!("tool:{}:output", session_id); + let _ = conn.lpush(&output_key, &message).await; + } + }); + }); + + let hear_redis = redis_clone.clone(); + let session_id_hear = session_id.clone(); + engine.register_fn("hear", move || -> String { + let hear_redis = hear_redis.clone(); + let session_id = session_id_hear.clone(); + + let rt = tokio::runtime::Runtime::new().unwrap(); + rt.block_on(async move { + match hear_redis.get_async_connection().await { + Ok(mut conn) => { + let input_key = format!("tool:{}:input", session_id); + let waiting_key = format!("tool:{}:waiting", session_id); + + let _ = conn.set_ex(&waiting_key, "true", 300).await; + let result: Option<(String, String)> = + conn.brpop(&input_key, 30).await.ok().flatten(); + let _ = conn.del(&waiting_key).await; + + result + .map(|(_, input)| input) + .unwrap_or_else(|| "timeout".to_string()) + } + Err(_) => "error".to_string(), + } + }) + }); + + match engine.eval_with_scope::<()>(&mut scope, &tool_script) { + Ok(_) => { + log::info!( + "Tool {} completed successfully for session {}", + tool_name, + session_id + ); + + let completion_msg = + "🛠️ Tool execution completed. How can I help you with anything else?"; + let response = BotResponse { + bot_id: "tool_bot".to_string(), + user_id: user_id_clone, + session_id: session_id_clone.clone(), + channel: "web".to_string(), + content: completion_msg.to_string(), + message_type: "tool_complete".to_string(), + stream_token: None, + is_complete: true, + }; + + let _ = web_adapter_clone.send_message(response).await; + } + Err(e) => { + log::error!("Tool execution failed: {}", e); + + let error_msg = format!("❌ Tool error: {}", e); + let response = BotResponse { + bot_id: "tool_bot".to_string(), + user_id: user_id_clone, + session_id: session_id_clone.clone(), + channel: "web".to_string(), + content: error_msg, + message_type: "tool_error".to_string(), + stream_token: None, + is_complete: true, + }; + + let _ = web_adapter_clone.send_message(response).await; + } + } + + if let Ok(mut conn) = redis_clone.get_async_connection().await { + let active_key = format!("tool:{}:active", session_id_clone); + let _ = conn.del(&active_key).await; + } + }); + + Ok(ToolResult { + success: true, + output: format!( + "🛠️ Starting {} tool. Please follow the tool's instructions.", + tool_name + ), + requires_input: true, + session_id: session_id.to_string(), + }) + } + + async fn provide_input( + &self, + session_id: &str, + input: &str, + ) -> Result<(), Box> { + let mut conn = self.redis_client.get_multiplexed_async_connection().await?; + let input_key = format!("tool:{}:input", session_id); + conn.lpush(&input_key, input).await?; + Ok(()) + } + + async fn get_output( + &self, + session_id: &str, + ) -> Result, Box> { + let mut conn = self.redis_client.get_multiplexed_async_connection().await?; + let output_key = format!("tool:{}:output", session_id); + let messages: Vec = conn.lrange(&output_key, 0, -1).await?; + let _: () = conn.del(&output_key).await?; + Ok(messages) + } + + async fn is_waiting_for_input( + &self, + session_id: &str, + ) -> Result> { + let mut conn = self.redis_client.get_multiplexed_async_connection().await?; + let waiting_key = format!("tool:{}:waiting", session_id); + let exists: bool = conn.exists(&waiting_key).await?; + Ok(exists) + } +} + +fn get_tool(name: &str) -> Option { + match name { + "calculator" => Some(Tool { + name: "calculator".to_string(), + description: "Perform mathematical calculations".to_string(), + parameters: HashMap::from([ + ("operation".to_string(), "add|subtract|multiply|divide".to_string()), + ("a".to_string(), "number".to_string()), + ("b".to_string(), "number".to_string()), + ]), + script: r#" + let TALK = |message| { + talk(message); + }; + + let HEAR = || { + hear() + }; + + TALK("🔢 Calculator started!"); + TALK("Please enter the first number:"); + let a = HEAR(); + TALK("Please enter the second number:"); + let b = HEAR(); + TALK("Choose operation: add, subtract, multiply, or divide:"); + let op = HEAR(); + + let num_a = a.to_float(); + let num_b = b.to_float(); + + if op == "add" { + let result = num_a + num_b; + TALK("✅ Result: " + a + " + " + b + " = " + result); + } else if op == "subtract" { + let result = num_a - num_b; + TALK("✅ Result: " + a + " - " + b + " = " + result); + } else if op == "multiply" { + let result = num_a * num_b; + TALK("✅ Result: " + a + " × " + b + " = " + result); + } else if op == "divide" { + if num_b != 0.0 { + let result = num_a / num_b; + TALK("✅ Result: " + a + " ÷ " + b + " = " + result); + } else { + TALK("❌ Error: Cannot divide by zero!"); + } + } else { + TALK("❌ Error: Invalid operation. Please use: add, subtract, multiply, or divide"); + } + + TALK("Calculator session completed. Thank you!"); + "#.to_string(), + }), + _ => None, + } +} + +#[derive(Clone)] +pub struct ToolManager { + tools: HashMap, + waiting_responses: Arc>>>, +} + +impl ToolManager { + pub fn new() -> Self { + let mut tools = HashMap::new(); + + let calculator_tool = Tool { + name: "calculator".to_string(), + description: "Perform calculations".to_string(), + parameters: HashMap::from([ + ( + "operation".to_string(), + "add|subtract|multiply|divide".to_string(), + ), + ("a".to_string(), "number".to_string()), + ("b".to_string(), "number".to_string()), + ]), + script: r#" + TALK("Calculator started. Enter first number:"); + let a = HEAR(); + TALK("Enter second number:"); + let b = HEAR(); + TALK("Operation (add/subtract/multiply/divide):"); + let op = HEAR(); + + let num_a = a.parse::().unwrap(); + let num_b = b.parse::().unwrap(); + let result = if op == "add" { + num_a + num_b + } else if op == "subtract" { + num_a - num_b + } else if op == "multiply" { + num_a * num_b + } else if op == "divide" { + if num_b == 0.0 { + TALK("Cannot divide by zero"); + return; + } + num_a / num_b + } else { + TALK("Invalid operation"); + return; + }; + TALK("Result: ".to_string() + &result.to_string()); + "# + .to_string(), + }; + + tools.insert(calculator_tool.name.clone(), calculator_tool); + Self { + tools, + waiting_responses: Arc::new(Mutex::new(HashMap::new())), + } + } + + pub fn get_tool(&self, name: &str) -> Option<&Tool> { + self.tools.get(name) + } + + pub fn list_tools(&self) -> Vec { + self.tools.keys().cloned().collect() + } + + pub async fn execute_tool( + &self, + tool_name: &str, + session_id: &str, + user_id: &str, + ) -> Result> { + let tool = self.get_tool(tool_name).ok_or("Tool not found")?; + + Ok(ToolResult { + success: true, + output: format!("Tool {} started for user {}", tool_name, user_id), + requires_input: true, + session_id: session_id.to_string(), + }) + } + + pub async fn is_tool_waiting( + &self, + session_id: &str, + ) -> Result> { + let waiting = self.waiting_responses.lock().await; + Ok(waiting.contains_key(session_id)) + } + + pub async fn provide_input( + &self, + session_id: &str, + input: &str, + ) -> Result<(), Box> { + self.provide_user_response(session_id, "default_bot", input.to_string()) + .await + } + + pub async fn get_tool_output( + &self, + session_id: &str, + ) -> Result, Box> { + Ok(vec![]) + } + + pub async fn execute_tool_with_session( + &self, + tool_name: &str, + user_id: &str, + bot_id: &str, + session_manager: SessionManager, + channel_sender: mpsc::Sender, + ) -> Result<(), Box> { + let tool = self.get_tool(tool_name).ok_or("Tool not found")?; + session_manager + .set_current_tool(user_id, bot_id, Some(tool_name.to_string())) + .await?; + + let user_id = user_id.to_string(); + let bot_id = bot_id.to_string(); + let script = tool.script.clone(); + let session_manager_clone = session_manager.clone(); + let waiting_responses = self.waiting_responses.clone(); + + tokio::spawn(async move { + let mut engine = rhai::Engine::new(); + let (talk_tx, mut talk_rx) = mpsc::channel(100); + let (hear_tx, mut hear_rx) = mpsc::channel(100); + + { + let key = format!("{}:{}", user_id, bot_id); + let mut waiting = waiting_responses.lock().await; + waiting.insert(key, hear_tx); + } + + let channel_sender_clone = channel_sender.clone(); + let user_id_clone = user_id.clone(); + let bot_id_clone = bot_id.clone(); + + let talk_tx_clone = talk_tx.clone(); + engine.register_fn("TALK", move |message: String| { + let tx = talk_tx_clone.clone(); + tokio::spawn(async move { + let _ = tx.send(message).await; + }); + }); + + let hear_rx_mutex = Arc::new(Mutex::new(hear_rx)); + engine.register_fn("HEAR", move || { + let hear_rx = hear_rx_mutex.clone(); + tokio::task::block_in_place(|| { + tokio::runtime::Handle::current().block_on(async move { + let mut receiver = hear_rx.lock().await; + receiver.recv().await.unwrap_or_default() + }) + }) + }); + + let script_result = + tokio::task::spawn_blocking(move || engine.eval::<()>(&script)).await; + + if let Ok(Err(e)) = script_result { + let error_response = BotResponse { + bot_id: bot_id_clone.clone(), + user_id: user_id_clone.clone(), + session_id: Uuid::new_v4().to_string(), + channel: "test".to_string(), + content: format!("Tool error: {}", e), + message_type: "text".to_string(), + stream_token: None, + is_complete: true, + }; + let _ = channel_sender_clone.send(error_response).await; + } + + while let Some(message) = talk_rx.recv().await { + let response = BotResponse { + bot_id: bot_id.clone(), + user_id: user_id.clone(), + session_id: Uuid::new_v4().to_string(), + channel: "test".to_string(), + content: message, + message_type: "text".to_string(), + stream_token: None, + is_complete: true, + }; + let _ = channel_sender.send(response).await; + } + + let _ = session_manager_clone + .set_current_tool(&user_id, &bot_id, None) + .await; + }); + + Ok(()) + } + + pub async fn provide_user_response( + &self, + user_id: &str, + bot_id: &str, + response: String, + ) -> Result<(), Box> { + let key = format!("{}:{}", user_id, bot_id); + let mut waiting = self.waiting_responses.lock().await; + if let Some(tx) = waiting.get_mut(&key) { + let _ = tx.send(response).await; + waiting.remove(&key); + } + Ok(()) + } +} + +impl Default for ToolManager { + fn default() -> Self { + Self::new() + } +} + +. +└── src + ├── auth + │   └── mod.rs + ├── automation + │   └── mod.rs + ├── basic + │   ├── keywords + │   │   ├── create_draft.rs + │   │   ├── create_site.rs + │   │   ├── find.rs + │   │   ├── first.rs + │   │   ├── format.rs + │   │   ├── for_next.rs + │   │   ├── get.rs + │   │   ├── get_website.rs + │   │   ├── last.rs + │   │   ├── llm_keyword.rs + │   │   ├── mod.rs + │   │   ├── on.rs + │   │   ├── print.rs + │   │   ├── set.rs + │   │   ├── set_schedule.rs + │   │   └── wait.rs + │   └── mod.rs + ├── bot + │   └── mod.rs + ├── channels + │   └── mod.rs + ├── chart + │   └── mod.rs + ├── config + │   └── mod.rs + ├── context + │   └── mod.rs + ├── email + │   └── mod.rs + ├── file + │   └── mod.rs + ├── llm + │   ├── llm_generic.rs + │   ├── llm_local.rs + │   ├── llm_provider.rs + │   ├── llm.rs + │   └── mod.rs + ├── main.rs + ├── org + │   └── mod.rs + ├── session + │   └── mod.rs + ├── shared + │   ├── models.rs + │   ├── mod.rs + │   ├── state.rs + │   └── utils.rs + ├── tests + │   ├── integration_email_list.rs + │   ├── integration_file_list_test.rs + │   └── integration_file_upload_test.rs + ├── tools + │   └── mod.rs + ├── web_automation + │   └── mod.rs + └── whatsapp + └── mod.rs + +21 directories, 44 files diff --git a/scripts/dev/llm_fix.sh b/scripts/dev/llm_fix.sh new file mode 100755 index 000000000..d30f1d7c1 --- /dev/null +++ b/scripts/dev/llm_fix.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" +OUTPUT_FILE="$SCRIPT_DIR/llm_context.txt" + +echo "Consolidated LLM Context" > "$OUTPUT_FILE" + +prompts=( + "../../prompts/dev/general.md" + "../../Cargo.toml" + "../../prompts/dev/fix.md" +) + +for file in "${prompts[@]}"; do + cat "$file" >> "$OUTPUT_FILE" + echo "" >> "$OUTPUT_FILE" +done + +dirs=( + "src/shared" + "src/bot" + "src/session" + "src/tools" +) + +for dir in "${dirs[@]}"; do + find "$PROJECT_ROOT/$dir" -name "*.rs" | while read file; do + cat "$file" >> "$OUTPUT_FILE" + echo "" >> "$OUTPUT_FILE" + done +done + +cd "$PROJECT_ROOT" +tree -P '*.rs' -I 'target|*.lock' --prune | grep -v '[0-9] directories$' >> "$OUTPUT_FILE" diff --git a/scripts/dev/source_tree.sh b/scripts/dev/source_tree.sh new file mode 100644 index 000000000..ce788f6db --- /dev/null +++ b/scripts/dev/source_tree.sh @@ -0,0 +1,2 @@ +# apt install tree +tree -P '*.rs' -I 'target|*.lock' --prune | grep -v '[0-9] directories$' diff --git a/scripts/utils/add-drive-user.sh b/scripts/utils/add-drive-user.sh new file mode 100644 index 000000000..0f691e23e --- /dev/null +++ b/scripts/utils/add-drive-user.sh @@ -0,0 +1,27 @@ +export BOT_ID= +./mc alias set minio http://localhost:9000 user pass +./mc admin user add minio $BOT_ID + +cat > $BOT_ID-policy.json </dev/null + +# Temporary files +echo "Cleaning temporary files..." +rm -rf /tmp/* /var/tmp/* + +# Thumbnail cache +echo "Cleaning thumbnail cache..." +rm -rf ~/.cache/thumbnails/* /root/.cache/thumbnails/* + +# DNS cache +echo "Flushing DNS cache..." +systemd-resolve --flush-caches 2>/dev/null || true + +# Old kernels (keep 2 latest) +echo "Removing old kernels..." +apt purge -y $(dpkg -l | awk '/^ii linux-image-*/{print $2}' | grep -v $(uname -r) | head -n -2) 2>/dev/null + +# Crash reports +echo "Clearing crash reports..." +rm -f /var/crash/* + +### LXC Containers Cleanup ### +echo -e "\n[ LXC CONTAINERS CLEANUP ]" + +# Check if LXC is installed +if command -v lxc >/dev/null 2>&1; then + for container in $(lxc list -c n --format csv | grep -v "^$"); do + echo -e "\nCleaning container: $container" + + # Execute cleanup commands in container + lxc exec "$container" -- bash -c " + echo 'Cleaning package cache...' + apt clean && apt autoclean && apt autoremove -y + + echo 'Cleaning temporary files...' + rm -rf /tmp/* /var/tmp/* + + echo 'Cleaning logs...' + rm -rf /opt/gbo/logs/* + + echo 'Cleaning journal logs...' + journalctl --vacuum-time=1d 2>/dev/null || true + + echo 'Cleaning thumbnail cache...' + rm -rf /home/*/.cache/thumbnails/* /root/.cache/thumbnails/* + " 2>/dev/null + done +else + echo "LXC not installed, skipping container cleanup." +fi + +echo -e "\nCleanup completed!" \ No newline at end of file diff --git a/scripts/utils/disk-size.md b/scripts/utils/disk-size.md new file mode 100644 index 000000000..b7d3a5455 --- /dev/null +++ b/scripts/utils/disk-size.md @@ -0,0 +1,6 @@ +lxc list --format json | jq -r '.[].name' | while read container; do + echo -n "$container: " + lxc exec $container -- df -h / --output=used < /dev/null | tail -n1 +done + +du -h --max-depth=1 "." 2>/dev/null | sort -rh | head -n 50 | awk '{printf "%-10s %s\n", $1, $2}' diff --git a/scripts/utils/email-ips.sh b/scripts/utils/email-ips.sh new file mode 100644 index 000000000..442af3c18 --- /dev/null +++ b/scripts/utils/email-ips.sh @@ -0,0 +1,8 @@ +az network public-ip list --resource-group "$CLOUD_GROUP" \ + --query "[].{Name:name, IP:ipAddress, ReverseDNS:dnsSettings.reverseFqdn}" \ + -o table + +az network public-ip update --resource-group "$CLOUD_GROUP" + --name "pip-network-adapter-name" + --reverse-fqdn "outbound14.domain.com.br" + diff --git a/scripts/utils/install-libreoffice-online.sh b/scripts/utils/install-libreoffice-online.sh new file mode 100644 index 000000000..30b953db7 --- /dev/null +++ b/scripts/utils/install-libreoffice-online.sh @@ -0,0 +1,65 @@ +sudo apt install -y cloud-guest-utils e2fsprogs + +apt install -y make g++ build-essential +apt install -y openjdk-17-jdk ant +apt install -y sudo systemd wget zip procps ccache +apt install -y automake bison flex git gperf graphviz junit4 libtool m4 nasm +apt install -y libcairo2-dev libjpeg-dev libegl1-mesa-dev libfontconfig1-dev \ + libgl1-mesa-dev libgif-dev libgtk-3-dev librsvg2-dev libpango1.0-dev +apt install -y libcap-dev libcap2-bin libkrb5-dev libpcap0.8-dev openssl libssl-dev +apt install -y libxcb-dev libx11-xcb-dev libxkbcommon-x11-dev libxtst-dev \ + libxrender-dev libxslt1-dev libxt-dev xsltproc +apt install -y libcunit1-dev libcppunit-dev libpam0g-dev libcups2-dev libzstd-dev uuid-runtime +apt install -y python3-dev python3-lxml python3-pip python3-polib +apt install -y nodejs npm +apt install -y libpoco-dev libpococrypto80 +apt install -y libreoffice-dev + + +mkdir -p /opt/lo && cd /opt/lo +wget https://github.com/CollaboraOnline/online/releases/download/for-code-assets/core-co-24.04-assets.tar.gz +tar xf core-co-24.04-assets.tar.gz && rm core-co-24.04-assets.tar.gz + +useradd cool -G sudo +mkdir -p /opt/cool && chown cool:cool /opt/cool +cd /opt/cool +sudo -Hu cool git clone https://github.com/CollaboraOnline/online.git +cd online && sudo -Hu cool ./autogen.sh + +export CPPFLAGS=-I/opt/lo/include +export LDFLAGS=-L/opt/lo/instdir/program +./configure --with-lokit-path=/opt/lo --with-lo-path=/opt/lo/instdir --with-poco-includes=/usr/local/include --with-poco-libs=/usr/local/lib + +sudo -Hu cool make -j$(nproc) + +make install +mkdir -p /etc/coolwsd /usr/local/var/cache/coolwsd + +chown cool:cool /usr/local/var/cache/coolwsd +admin_pwd=$(openssl rand -hex 6) + +cat < /lib/systemd/system/coolwsd.service +[Unit] +Description=Collabora Online WebSocket Daemon +After=network.target + +[Service] +ExecStart=/opt/cool/online/coolwsd --o:sys_template_path=/opt/cool/online/systemplate \ +--o:lo_template_path=/opt/lo/instdir --o:child_root_path=/opt/cool/online/jails \ +--o:admin_console.username=admin --o:admin_console.password=$DOC_EDITOR_ADMIN_PWD \ +--o:ssl.enable=false +User=cool + +[Install] +WantedBy=multi-user.target +EOT + +systemctl daemon-reload +systemctl enable coolwsd.service +systemctl start coolwsd.service +" + +echo "Installation complete!" +echo "Admin password: $admin_pwd" +echo "Access at: https://localhost:9980" + diff --git a/scripts/utils/set-limits.sh b/scripts/utils/set-limits.sh new file mode 100644 index 000000000..215cca7fe --- /dev/null +++ b/scripts/utils/set-limits.sh @@ -0,0 +1,53 @@ +#!/usr/bin/env bash + +# Define container limits in an associative array +declare -A container_limits=( + # Pattern Memory CPU Allowance + ["*tables*"]="4096MB:100ms/100ms" + ["*dns*"]="2048MB:100ms/100ms" + ["*doc-editor*"]="512MB:10ms/100ms" + ["*proxy*"]="2048MB:100ms/100ms" + ["*directory*"]="1024MB:50ms/100ms" + ["*drive*"]="4096MB:50ms/100ms" + ["*email*"]="4096MB:100ms/100ms" + ["*webmail*"]="4096MB:100ms/100ms" + ["*bot*"]="4096MB:50ms/100ms" + ["*meeting*"]="4096MB:100ms/100ms" + ["*alm*"]="512MB:50ms/100ms" + ["*alm-ci*"]="4096MB:100ms/100ms" + ["*system*"]="4096MB:50ms/100ms" + ["*mailer*"]="4096MB:25ms/100ms" +) + +# Default values (for containers that don't match any pattern) +DEFAULT_MEMORY="1024MB" +DEFAULT_CPU_ALLOWANCE="15ms/100ms" +CPU_COUNT=2 +CPU_PRIORITY=10 + +for pattern in "${!container_limits[@]}"; do + echo "Configuring $container..." + + memory=$DEFAULT_MEMORY + cpu_allowance=$DEFAULT_CPU_ALLOWANCE + + # Configure all containers + for container in $(lxc list -c n --format csv); do + # Check if container matches any pattern + if [[ $container == $pattern ]]; then + IFS=':' read -r memory cpu_allowance <<< "${container_limits[$pattern]}" + + # Apply configuration + lxc config set "$container" limits.memory "$memory" + lxc config set "$container" limits.cpu.allowance "$cpu_allowance" + lxc config set "$container" limits.cpu "$CPU_COUNT" + lxc config set "$container" limits.cpu.priority "$CPU_PRIORITY" + + echo "Restarting $container..." + lxc restart "$container" + + lxc config show "$container" | grep -E "memory|cpu" + break + fi + done +done diff --git a/scripts/utils/set-size-5GB.sh b/scripts/utils/set-size-5GB.sh new file mode 100644 index 000000000..6de4216c6 --- /dev/null +++ b/scripts/utils/set-size-5GB.sh @@ -0,0 +1,7 @@ +lxc config device override $CONTAINER_NAME root +lxc config device set $CONTAINER_NAME root size 6GB + +zpool set autoexpand=on default +zpool online -e default /var/snap/lxd/common/lxd/disks/default.img +zpool list +zfs list diff --git a/scripts/utils/setup-host.sh b/scripts/utils/setup-host.sh new file mode 100644 index 000000000..ae611e8d5 --- /dev/null +++ b/scripts/utils/setup-host.sh @@ -0,0 +1,6 @@ + +# Host +sudo lxc config set core.trust_password "$LXC_TRUST_PASSWORD" + +# ALM-CI +lxc remote add bot 10.16.164.? --accept-certificate --password "$LXC_TRUST_PASSWORD" diff --git a/scripts/utils/startup.sh b/scripts/utils/startup.sh new file mode 100644 index 000000000..e274f6a38 --- /dev/null +++ b/scripts/utils/startup.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +# Disable shell timeout + +sed -i '/TMOUT/d' /etc/profile /etc/bash.bashrc /etc/profile.d/* +echo 'export TMOUT=0' > /etc/profile.d/notimeout.sh +chmod +x /etc/profile.d/notimeout.sh +sed -i '/pam_exec.so/s/quiet/quiet set_timeout=0/' /etc/pam.d/sshd 2>/dev/null +source /etc/profile + diff --git a/src/auth/mod.rs b/src/auth/mod.rs new file mode 100644 index 000000000..92890ae98 --- /dev/null +++ b/src/auth/mod.rs @@ -0,0 +1,137 @@ +use argon2::{ + password_hash::{rand_core::OsRng, PasswordHash, PasswordHasher, PasswordVerifier, SaltString}, + Argon2, +}; +use redis::Client; +use sqlx::{PgPool, Row}; // <-- required for .get() +use std::sync::Arc; +use uuid::Uuid; + +pub struct AuthService { + pub pool: PgPool, + pub redis: Option>, +} + +impl AuthService { + #[allow(clippy::new_without_default)] + pub fn new(pool: PgPool, redis: Option>) -> Self { + Self { pool, redis } + } + + pub async fn verify_user( + &self, + username: &str, + password: &str, + ) -> Result, Box> { + let user = sqlx::query( + "SELECT id, password_hash FROM users WHERE username = $1 AND is_active = true", + ) + .bind(username) + .fetch_optional(&self.pool) + .await?; + + if let Some(row) = user { + let user_id: Uuid = row.get("id"); + let password_hash: String = row.get("password_hash"); + + if let Ok(parsed_hash) = PasswordHash::new(&password_hash) { + if Argon2::default() + .verify_password(password.as_bytes(), &parsed_hash) + .is_ok() + { + return Ok(Some(user_id)); + } + } + } + + Ok(None) + } + + pub async fn create_user( + &self, + username: &str, + email: &str, + password: &str, + ) -> Result> { + let salt = SaltString::generate(&mut OsRng); + let argon2 = Argon2::default(); + let password_hash = match argon2.hash_password(password.as_bytes(), &salt) { + Ok(ph) => ph.to_string(), + Err(e) => { + return Err(Box::new(std::io::Error::new( + std::io::ErrorKind::Other, + e.to_string(), + ))) + } + }; + + let row = sqlx::query( + "INSERT INTO users (username, email, password_hash) VALUES ($1, $2, $3) RETURNING id", + ) + .bind(username) + .bind(email) + .bind(&password_hash) + .fetch_one(&self.pool) + .await?; + + Ok(row.get::("id")) + } + + pub async fn delete_user_cache( + &self, + username: &str, + ) -> Result<(), Box> { + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("auth:user:{}", username); + + let _: () = redis::Cmd::del(&cache_key).query_async(&mut conn).await?; + } + Ok(()) + } + + pub async fn update_user_password( + &self, + user_id: Uuid, + new_password: &str, + ) -> Result<(), Box> { + let salt = SaltString::generate(&mut OsRng); + let argon2 = Argon2::default(); + let password_hash = match argon2.hash_password(new_password.as_bytes(), &salt) { + Ok(ph) => ph.to_string(), + Err(e) => { + return Err(Box::new(std::io::Error::new( + std::io::ErrorKind::Other, + e.to_string(), + ))) + } + }; + + sqlx::query("UPDATE users SET password_hash = $1, updated_at = NOW() WHERE id = $2") + .bind(&password_hash) + .bind(user_id) + .execute(&self.pool) + .await?; + + if let Some(user_row) = sqlx::query("SELECT username FROM users WHERE id = $1") + .bind(user_id) + .fetch_optional(&self.pool) + .await? + { + let username: String = user_row.get("username"); + self.delete_user_cache(&username).await?; + } + + Ok(()) + } +} +{{END_REWRITTEN_CODE}} + +impl Clone for AuthService { + fn clone(&self) -> Self { + Self { + pool: self.pool.clone(), + redis: self.redis.clone(), + } + } +} diff --git a/src/automation/mod.rs b/src/automation/mod.rs new file mode 100644 index 000000000..f1b2b47fc --- /dev/null +++ b/src/automation/mod.rs @@ -0,0 +1,197 @@ +use crate::basic::ScriptService; + use crate::shared::models::automation_model::{Automation, TriggerKind}; +use crate::shared::state::AppState; +use chrono::Datelike; +use chrono::Timelike; +use chrono::{DateTime, Utc}; +use log::{error, info}; +use std::path::Path; +use tokio::time::Duration; +use uuid::Uuid; +pub struct AutomationService { + state: AppState, // Use web::Data directly + scripts_dir: String, +} + +impl AutomationService { + pub fn new(state: AppState, scripts_dir: &str) -> Self { + Self { + state, + scripts_dir: scripts_dir.to_string(), + } + } + + pub fn spawn(self) -> tokio::task::JoinHandle<()> { + tokio::spawn(async move { + let mut interval = tokio::time::interval(Duration::from_secs(5)); + let mut last_check = Utc::now(); + + loop { + interval.tick().await; + + if let Err(e) = self.run_cycle(&mut last_check).await { + error!("Automation cycle error: {}", e); + } + } + }) + } + + async fn run_cycle( + &self, + last_check: &mut DateTime, + ) -> Result<(), Box> { + let automations = self.load_active_automations().await?; + self.check_table_changes(&automations, *last_check).await; + self.process_schedules(&automations).await; + *last_check = Utc::now(); + Ok(()) + } + + async fn load_active_automations(&self) -> Result, sqlx::Error> { + if let Some(pool) = &self.state.db { + sqlx::query_as::<_, Automation>( + r#" + SELECT id, kind, target, schedule, param, is_active, last_triggered + FROM public.system_automations + WHERE is_active = true + "#, + ) + .fetch_all(pool) + .await + } else { + Err(sqlx::Error::PoolClosed) + } + } + + async fn check_table_changes(&self, automations: &[Automation], since: DateTime) { + if let Some(pool) = &self.state.db_custom { + for automation in automations { + if let Some(trigger_kind) = TriggerKind::from_i32(automation.kind) { + if matches!( + trigger_kind, + TriggerKind::TableUpdate + | TriggerKind::TableInsert + | TriggerKind::TableDelete + ) { + if let Some(table) = &automation.target { + let column = match trigger_kind { + TriggerKind::TableInsert => "created_at", + _ => "updated_at", + }; + + let query = + format!("SELECT COUNT(*) FROM {} WHERE {} > $1", table, column); + + match sqlx::query_scalar::<_, i64>(&query) + .bind(since) + .fetch_one(pool) + .await + { + Ok(count) => { + if count > 0 { + self.execute_action(&automation.param).await; + self.update_last_triggered(automation.id).await; + } + } + Err(e) => { + error!("Error checking changes for table {}: {}", table, e); + } + } + } + } + } + } + } + } + + async fn process_schedules(&self, automations: &[Automation]) { + let now = Utc::now().timestamp(); + + for automation in automations { + if let Some(TriggerKind::Scheduled) = TriggerKind::from_i32(automation.kind) { + if let Some(pattern) = &automation.schedule { + if Self::should_run_cron(pattern, now) { + self.execute_action(&automation.param).await; + self.update_last_triggered(automation.id).await; + } + } + } + } + } + + async fn update_last_triggered(&self, automation_id: Uuid) { + if let Some(pool) = &self.state.db { + if let Err(e) = sqlx::query!( + "UPDATE public.system_automations SET last_triggered = $1 WHERE id = $2", + Utc::now(), + automation_id + ) + .execute(pool) + .await + { + error!( + "Failed to update last_triggered for automation {}: {}", + automation_id, e + ); + } + } + } + + fn should_run_cron(pattern: &str, timestamp: i64) -> bool { + let parts: Vec<&str> = pattern.split_whitespace().collect(); + if parts.len() != 5 { + return false; + } + + let dt = chrono::DateTime::from_timestamp(timestamp, 0).unwrap(); + let minute = dt.minute() as i32; + let hour = dt.hour() as i32; + let day = dt.day() as i32; + let month = dt.month() as i32; + let weekday = dt.weekday().num_days_from_monday() as i32; + + [minute, hour, day, month, weekday] + .iter() + .enumerate() + .all(|(i, &val)| Self::cron_part_matches(parts[i], val)) + } + + fn cron_part_matches(part: &str, value: i32) -> bool { + if part == "*" { + return true; + } + if part.contains('/') { + let parts: Vec<&str> = part.split('/').collect(); + if parts.len() != 2 { + return false; + } + let step: i32 = parts[1].parse().unwrap_or(1); + if parts[0] == "*" { + return value % step == 0; + } + } + part.parse::().map_or(false, |num| num == value) + } + + async fn execute_action(&self, param: &str) { + let full_path = Path::new(&self.scripts_dir).join(param); + match tokio::fs::read_to_string(&full_path).await { + Ok(script_content) => { + info!("Executing action with param: {}", param); + + let script_service = ScriptService::new(&self.state.clone()); + + match script_service.compile(&script_content) { + Ok(ast) => match script_service.run(&ast) { + Ok(result) => info!("Script executed successfully: {:?}", result), + Err(e) => error!("Error executing script: {}", e), + }, + Err(e) => error!("Error compiling script: {}", e), + } + } + Err(e) => { + error!("Failed to execute action {}: {}", full_path.display(), e); + } + } + } +} diff --git a/src/basic/keywords/create_draft.rs b/src/basic/keywords/create_draft.rs new file mode 100644 index 000000000..8516b4e6e --- /dev/null +++ b/src/basic/keywords/create_draft.rs @@ -0,0 +1,69 @@ +use crate::email::save_email_draft; +use crate::email::{fetch_latest_sent_to, SaveDraftRequest}; +use crate::shared::state::AppState; +use rhai::Dynamic; +use rhai::Engine; + +pub fn create_draft_keyword(state: &AppState, engine: &mut Engine) { + let state_clone = state.clone(); + + engine + .register_custom_syntax( + &["CREATE_DRAFT", "$expr$", ",", "$expr$", ",", "$expr$"], + true, // Statement + move |context, inputs| { + // Extract arguments + let to = context.eval_expression_tree(&inputs[0])?.to_string(); + let subject = context.eval_expression_tree(&inputs[1])?.to_string(); + let reply_text = context.eval_expression_tree(&inputs[2])?.to_string(); + + // Execute async operations using the same pattern as FIND + let fut = execute_create_draft(&state_clone, &to, &subject, &reply_text); + let result = + tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut)) + .map_err(|e| format!("Draft creation error: {}", e))?; + + Ok(Dynamic::from(result)) + }, + ) + .unwrap(); +} + +async fn execute_create_draft( + state: &AppState, + to: &str, + subject: &str, + reply_text: &str, +) -> Result { + let get_result = fetch_latest_sent_to(&state.config.clone().unwrap().email, to).await; + let email_body = if let Ok(get_result_str) = get_result { + if !get_result_str.is_empty() { + let email_separator = "


"; // Horizontal rule in HTML + let formatted_reply_text = reply_text.to_string(); + let formatted_old_text = get_result_str.replace("\n", "
"); + let fixed_reply_text = formatted_reply_text.replace("FIX", "Fixed"); + format!( + "{}{}{}", + fixed_reply_text, email_separator, formatted_old_text + ) + } else { + reply_text.to_string() + } + } else { + reply_text.to_string() + }; + + // Create and save draft + let draft_request = SaveDraftRequest { + to: to.to_string(), + subject: subject.to_string(), + cc: None, + text: email_body, + }; + + let save_result = save_email_draft(&state.config.clone().unwrap().email, &draft_request).await; + match save_result { + Ok(_) => Ok("Draft saved successfully".to_string()), + Err(e) => Err(e.to_string()), + } +} diff --git a/src/basic/keywords/create_site.rs b/src/basic/keywords/create_site.rs new file mode 100644 index 000000000..9a6e39918 --- /dev/null +++ b/src/basic/keywords/create_site.rs @@ -0,0 +1,94 @@ +use log::info; + +use rhai::Dynamic; +use rhai::Engine; +use std::error::Error; +use std::fs; +use std::io::Read; +use std::path::PathBuf; + +use crate::shared::state::AppState; +use crate::shared::utils; + +pub fn create_site_keyword(state: &AppState, engine: &mut Engine) { + let state_clone = state.clone(); + engine + .register_custom_syntax( + &["CREATE_SITE", "$expr$", ",", "$expr$", ",", "$expr$"], + true, + move |context, inputs| { + if inputs.len() < 3 { + return Err("Not enough arguments for CREATE SITE".into()); + } + + let alias = context.eval_expression_tree(&inputs[0])?; + let template_dir = context.eval_expression_tree(&inputs[1])?; + let prompt = context.eval_expression_tree(&inputs[2])?; + + let config = state_clone + .config + .as_ref() + .expect("Config must be initialized") + .clone(); + + let fut = create_site(&config, alias, template_dir, prompt); + let result = + tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut)) + .map_err(|e| format!("Site creation failed: {}", e))?; + + Ok(Dynamic::from(result)) + }, + ) + .unwrap(); +} + +async fn create_site( + config: &crate::config::AppConfig, + alias: Dynamic, + template_dir: Dynamic, + prompt: Dynamic, +) -> Result> { + // Convert paths to platform-specific format + let base_path = PathBuf::from(&config.site_path); + let template_path = base_path.join(template_dir.to_string()); + let alias_path = base_path.join(alias.to_string()); + + // Create destination directory + fs::create_dir_all(&alias_path).map_err(|e| e.to_string())?; + + // Process all HTML files in template directory + let mut combined_content = String::new(); + + for entry in fs::read_dir(&template_path).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + let path = entry.path(); + + if path.extension().map_or(false, |ext| ext == "html") { + let mut file = fs::File::open(&path).map_err(|e| e.to_string())?; + let mut contents = String::new(); + file.read_to_string(&mut contents) + .map_err(|e| e.to_string())?; + + combined_content.push_str(&contents); + combined_content.push_str("\n\n--- TEMPLATE SEPARATOR ---\n\n"); + } + } + + // Combine template content with prompt + let full_prompt = format!( + "TEMPLATE FILES:\n{}\n\nPROMPT: {}\n\nGenerate a new HTML file cloning all previous TEMPLATE (keeping only the local _assets libraries use, no external resources), but turning this into this prompt:", + combined_content, + prompt.to_string() + ); + + // Call LLM with the combined prompt + info!("Asking LLM to create site."); + let llm_result = utils::call_llm(&full_prompt, &config.ai).await?; + + // Write the generated HTML file + let index_path = alias_path.join("index.html"); + fs::write(index_path, llm_result).map_err(|e| e.to_string())?; + + info!("Site created at: {}", alias_path.display()); + Ok(alias_path.to_string_lossy().into_owned()) +} diff --git a/src/basic/keywords/find.rs b/src/basic/keywords/find.rs new file mode 100644 index 000000000..6c9935e37 --- /dev/null +++ b/src/basic/keywords/find.rs @@ -0,0 +1,87 @@ +use log::{error, info}; +use rhai::Dynamic; +use rhai::Engine; +use serde_json::{json, Value}; +use sqlx::PgPool; + +use crate::shared::state::AppState; +use crate::shared::utils; +use crate::shared::utils::row_to_json; +use crate::shared::utils::to_array; + +pub fn find_keyword(state: &AppState, engine: &mut Engine) { + let db = state.db_custom.clone(); + + engine + .register_custom_syntax(&["FIND", "$expr$", ",", "$expr$"], false, { + let db = db.clone(); + + move |context, inputs| { + let table_name = context.eval_expression_tree(&inputs[0])?; + let filter = context.eval_expression_tree(&inputs[1])?; + let binding = db.as_ref().unwrap(); + + // Use the current async context instead of creating a new runtime + let binding2 = table_name.to_string(); + let binding3 = filter.to_string(); + let fut = execute_find(binding, &binding2, &binding3); + + // Use tokio::task::block_in_place + tokio::runtime::Handle::current().block_on + let result = + tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut)) + .map_err(|e| format!("DB error: {}", e))?; + + if let Some(results) = result.get("results") { + let array = to_array(utils::json_value_to_dynamic(results)); + Ok(Dynamic::from(array)) + } else { + Err("No results".into()) + } + } + }) + .unwrap(); +} + +pub async fn execute_find( + pool: &PgPool, + table_str: &str, + filter_str: &str, +) -> Result { + // Changed to String error like your Actix code + info!( + "Starting execute_find with table: {}, filter: {}", + table_str, filter_str + ); + + let (where_clause, params) = utils::parse_filter(filter_str).map_err(|e| e.to_string())?; + + let query = format!( + "SELECT * FROM {} WHERE {} LIMIT 10", + table_str, where_clause + ); + info!("Executing query: {}", query); + + // Use the same simple pattern as your Actix code - no timeout wrapper + let rows = sqlx::query(&query) + .bind(¶ms[0]) // Simplified like your working code + .fetch_all(pool) + .await + .map_err(|e| { + error!("SQL execution error: {}", e); + e.to_string() + })?; + + info!("Query successful, got {} rows", rows.len()); + + let mut results = Vec::new(); + for row in rows { + results.push(row_to_json(row).map_err(|e| e.to_string())?); + } + + Ok(json!({ + "command": "find", + "table": table_str, + "filter": filter_str, + "results": results + })) +} diff --git a/src/basic/keywords/first.rs b/src/basic/keywords/first.rs new file mode 100644 index 000000000..cfbcca39f --- /dev/null +++ b/src/basic/keywords/first.rs @@ -0,0 +1,185 @@ +use rhai::Dynamic; +use rhai::Engine; + +pub fn first_keyword(engine: &mut Engine) { + engine + .register_custom_syntax(&["FIRST", "$expr$"], false, { + move |context, inputs| { + let input_string = context.eval_expression_tree(&inputs[0])?; + let input_str = input_string.to_string(); + + // Extract first word by splitting on whitespace + let first_word = input_str + .split_whitespace() + .next() + .unwrap_or("") + .to_string(); + + Ok(Dynamic::from(first_word)) + } + }) + .unwrap(); +} + +#[cfg(test)] +mod tests { + use super::*; + use rhai::Engine; + + fn setup_engine() -> Engine { + let mut engine = Engine::new(); + first_keyword(&mut engine); + engine + } + + #[test] + fn test_first_keyword_basic() { + let engine = setup_engine(); + + let result = engine + .eval::( + r#" + FIRST "hello world" + "#, + ) + .unwrap(); + + assert_eq!(result, "hello"); + } + + #[test] + fn test_first_keyword_single_word() { + let engine = setup_engine(); + + let result = engine + .eval::( + r#" + FIRST "single" + "#, + ) + .unwrap(); + + assert_eq!(result, "single"); + } + + #[test] + fn test_first_keyword_multiple_spaces() { + let engine = setup_engine(); + + let result = engine + .eval::( + r#" + FIRST " leading spaces" + "#, + ) + .unwrap(); + + assert_eq!(result, "leading"); + } + + #[test] + fn test_first_keyword_empty_string() { + let engine = setup_engine(); + + let result = engine + .eval::( + r#" + FIRST "" + "#, + ) + .unwrap(); + + assert_eq!(result, ""); + } + + #[test] + fn test_first_keyword_whitespace_only() { + let engine = setup_engine(); + + let result = engine + .eval::( + r#" + FIRST " " + "#, + ) + .unwrap(); + + assert_eq!(result, ""); + } + + #[test] + fn test_first_keyword_with_tabs() { + let engine = setup_engine(); + + let result = engine + .eval::( + r#" + FIRST " tab separated words" + "#, + ) + .unwrap(); + + assert_eq!(result, "tab"); + } + + #[test] + fn test_first_keyword_with_variable() { + let engine = setup_engine(); + + let result = engine + .eval::( + r#" + let text = "variable test"; + FIRST text + "#, + ) + .unwrap(); + + assert_eq!(result, "variable"); + } + + #[test] + fn test_first_keyword_with_expression() { + let engine = setup_engine(); + + let result = engine + .eval::( + r#" + FIRST "one two " + "three four" + "#, + ) + .unwrap(); + + assert_eq!(result, "one"); + } + + #[test] + fn test_first_keyword_mixed_whitespace() { + let engine = setup_engine(); + + let result = engine + .eval::( + r#" + FIRST " multiple spaces between words " + "#, + ) + .unwrap(); + + assert_eq!(result, "multiple"); + } + + #[test] + fn test_first_keyword_special_characters() { + let engine = setup_engine(); + + let result = engine + .eval::( + r#" + FIRST "hello-world example" + "#, + ) + .unwrap(); + + assert_eq!(result, "hello-world"); + } +} diff --git a/src/basic/keywords/for_next.rs b/src/basic/keywords/for_next.rs new file mode 100644 index 000000000..1fcdba9f7 --- /dev/null +++ b/src/basic/keywords/for_next.rs @@ -0,0 +1,83 @@ +use crate::shared::state::AppState; +use log::info; +use rhai::Dynamic; +use rhai::Engine; + +pub fn for_keyword(_state: &AppState, engine: &mut Engine) { + engine + .register_custom_syntax(&["EXIT", "FOR"], false, |_context, _inputs| { + Err("EXIT FOR".into()) + }) + .unwrap(); + + engine + .register_custom_syntax( + &[ + "FOR", "EACH", "$ident$", "IN", "$expr$", "$block$", "NEXT", "$ident$", + ], + true, // We're modifying the scope by adding the loop variable + |context, inputs| { + // Get the iterator variable names + let loop_var = inputs[0].get_string_value().unwrap(); + let next_var = inputs[3].get_string_value().unwrap(); + + // Verify variable names match + if loop_var != next_var { + return Err(format!( + "NEXT variable '{}' doesn't match FOR EACH variable '{}'", + next_var, loop_var + ) + .into()); + } + + // Evaluate the collection expression + let collection = context.eval_expression_tree(&inputs[1])?; + + // Debug: Print the collection type + info!("Collection type: {}", collection.type_name()); + let ccc = collection.clone(); + // Convert to array - with proper error handling + let array = match collection.into_array() { + Ok(arr) => arr, + Err(err) => { + return Err(format!( + "foreach expected array, got {}: {}", + ccc.type_name(), + err + ) + .into()); + } + }; + // Get the block as an expression tree + let block = &inputs[2]; + + // Remember original scope length + let orig_len = context.scope().len(); + + for item in array { + // Push the loop variable into the scope + context.scope_mut().push(loop_var, item); + + // Evaluate the block with the current scope + match context.eval_expression_tree(block) { + Ok(_) => (), + Err(e) if e.to_string() == "EXIT FOR" => { + context.scope_mut().rewind(orig_len); + break; + } + Err(e) => { + // Rewind the scope before returning error + context.scope_mut().rewind(orig_len); + return Err(e); + } + } + + // Remove the loop variable for next iteration + context.scope_mut().rewind(orig_len); + } + + Ok(Dynamic::UNIT) + }, + ) + .unwrap(); +} diff --git a/src/basic/keywords/format.rs b/src/basic/keywords/format.rs new file mode 100644 index 000000000..ca2617723 --- /dev/null +++ b/src/basic/keywords/format.rs @@ -0,0 +1,460 @@ +use rhai::{Dynamic, Engine}; +use chrono::{NaiveDateTime, Timelike, Datelike}; +use num_format::{Locale, ToFormattedString}; +use std::str::FromStr; + +pub fn format_keyword(engine: &mut Engine) { + engine + .register_custom_syntax(&["FORMAT", "$expr$", "$expr$"], false, { + move |context, inputs| { + let value_dyn = context.eval_expression_tree(&inputs[0])?; + let pattern_dyn = context.eval_expression_tree(&inputs[1])?; + + let value_str = value_dyn.to_string(); + let pattern = pattern_dyn.to_string(); + + // --- NUMÉRICO --- + if let Ok(num) = f64::from_str(&value_str) { + let formatted = if pattern.starts_with("N") || pattern.starts_with("C") { + // extrai partes: prefixo, casas decimais, locale + let (prefix, decimals, locale_tag) = parse_pattern(&pattern); + + let locale = get_locale(&locale_tag); + let symbol = if prefix == "C" { + get_currency_symbol(&locale_tag) + } else { + "" + }; + + let int_part = num.trunc() as i64; + let frac_part = num.fract(); + + if decimals == 0 { + format!("{}{}", symbol, int_part.to_formatted_string(&locale)) + } else { + let frac_scaled = + ((frac_part * 10f64.powi(decimals as i32)).round()) as i64; + format!( + "{}{}.{:0width$}", + symbol, + int_part.to_formatted_string(&locale), + frac_scaled, + width = decimals + ) + } + } else { + match pattern.as_str() { + "n" => format!("{:.2}", num), + "F" => format!("{:.2}", num), + "f" => format!("{}", num), + "0%" => format!("{:.0}%", num * 100.0), + _ => format!("{}", num), + } + }; + + return Ok(Dynamic::from(formatted)); + } + + // --- DATA --- + if let Ok(dt) = NaiveDateTime::parse_from_str(&value_str, "%Y-%m-%d %H:%M:%S") { + let formatted = apply_date_format(&dt, &pattern); + return Ok(Dynamic::from(formatted)); + } + + // --- TEXTO --- + let formatted = apply_text_placeholders(&value_str, &pattern); + Ok(Dynamic::from(formatted)) + } + }) + .unwrap(); +} + +// ====================== +// Extração de locale + precisão +// ====================== +fn parse_pattern(pattern: &str) -> (String, usize, String) { + let mut prefix = String::new(); + let mut decimals: usize = 2; // padrão 2 casas + let mut locale_tag = "en".to_string(); + + // ex: "C2[pt]" ou "N3[fr]" + if pattern.starts_with('C') { + prefix = "C".to_string(); + } else if pattern.starts_with('N') { + prefix = "N".to_string(); + } + + // procura número após prefixo + let rest = &pattern[1..]; + let mut num_part = String::new(); + for ch in rest.chars() { + if ch.is_ascii_digit() { + num_part.push(ch); + } else { + break; + } + } + if !num_part.is_empty() { + decimals = num_part.parse().unwrap_or(2); + } + + // procura locale entre colchetes + if let Some(start) = pattern.find('[') { + if let Some(end) = pattern.find(']') { + if end > start { + locale_tag = pattern[start + 1..end].to_string(); + } + } + } + + (prefix, decimals, locale_tag) +} + +fn get_locale(tag: &str) -> Locale { + match tag { + "en" => Locale::en, + "fr" => Locale::fr, + "de" => Locale::de, + "pt" => Locale::pt, + "it" => Locale::it, + "es" => Locale::es, + _ => Locale::en, + } +} + +fn get_currency_symbol(tag: &str) -> &'static str { + match tag { + "en" => "$", + "pt" => "R$ ", + "fr" | "de" | "es" | "it" => "€", + _ => "$", + } +} + +// ================== +// SUPORTE A DATAS +// ================== +fn apply_date_format(dt: &NaiveDateTime, pattern: &str) -> String { + let mut output = pattern.to_string(); + + let year = dt.year(); + let month = dt.month(); + let day = dt.day(); + let hour24 = dt.hour(); + let minute = dt.minute(); + let second = dt.second(); + let millis = dt.and_utc().timestamp_subsec_millis(); + + output = output.replace("yyyy", &format!("{:04}", year)); + output = output.replace("yy", &format!("{:02}", year % 100)); + output = output.replace("MM", &format!("{:02}", month)); + output = output.replace("M", &format!("{}", month)); + output = output.replace("dd", &format!("{:02}", day)); + output = output.replace("d", &format!("{}", day)); + + output = output.replace("HH", &format!("{:02}", hour24)); + output = output.replace("H", &format!("{}", hour24)); + + let mut hour12 = hour24 % 12; + if hour12 == 0 { hour12 = 12; } + output = output.replace("hh", &format!("{:02}", hour12)); + output = output.replace("h", &format!("{}", hour12)); + + output = output.replace("mm", &format!("{:02}", minute)); + output = output.replace("m", &format!("{}", minute)); + + output = output.replace("ss", &format!("{:02}", second)); + output = output.replace("s", &format!("{}", second)); + + output = output.replace("fff", &format!("{:03}", millis)); + + output = output.replace("tt", if hour24 < 12 { "AM" } else { "PM" }); + output = output.replace("t", if hour24 < 12 { "A" } else { "P" }); + + output +} + +// ================== +// SUPORTE A TEXTO +// ================== +fn apply_text_placeholders(value: &str, pattern: &str) -> String { + let mut result = String::new(); + + for ch in pattern.chars() { + match ch { + '@' => result.push_str(value), + '&' | '<' => result.push_str(&value.to_lowercase()), + '>' | '!' => result.push_str(&value.to_uppercase()), + _ => result.push(ch), // copia qualquer caractere literal + } + } + + result +} + +#[cfg(test)] +mod tests { + use super::*; + use rhai::Engine; + + fn create_engine() -> Engine { + let mut engine = Engine::new(); + format_keyword(&mut engine); + engine + } + + #[test] + fn test_numeric_formatting_basic() { + let engine = create_engine(); + + // Teste formatação básica + assert_eq!( + engine.eval::("FORMAT 1234.567 \"n\"").unwrap(), + "1234.57" + ); + assert_eq!( + engine.eval::("FORMAT 1234.5 \"F\"").unwrap(), + "1234.50" + ); + assert_eq!( + engine.eval::("FORMAT 1234.567 \"f\"").unwrap(), + "1234.567" + ); + assert_eq!( + engine.eval::("FORMAT 0.85 \"0%\"").unwrap(), + "85%" + ); + } + + #[test] + fn test_numeric_formatting_with_locale() { + let engine = create_engine(); + + // Teste formatação numérica com locale + assert_eq!( + engine.eval::("FORMAT 1234.56 \"N[en]\"").unwrap(), + "1,234.56" + ); + assert_eq!( + engine.eval::("FORMAT 1234.56 \"N[pt]\"").unwrap(), + "1.234,56" + ); + assert_eq!( + engine.eval::("FORMAT 1234.56 \"N[fr]\"").unwrap(), + "1 234,56" + ); + } + + #[test] + fn test_currency_formatting() { + let engine = create_engine(); + + // Teste formatação monetária + assert_eq!( + engine.eval::("FORMAT 1234.56 \"C[en]\"").unwrap(), + "$1,234.56" + ); + assert_eq!( + engine.eval::("FORMAT 1234.56 \"C[pt]\"").unwrap(), + "R$ 1.234,56" + ); + assert_eq!( + engine.eval::("FORMAT 1234.56 \"C[fr]\"").unwrap(), + "€1 234,56" + ); + } + + #[test] + fn test_numeric_decimals_precision() { + let engine = create_engine(); + + // Teste precisão decimal + assert_eq!( + engine.eval::("FORMAT 1234.5678 \"N0[en]\"").unwrap(), + "1,235" + ); + assert_eq!( + engine.eval::("FORMAT 1234.5678 \"N1[en]\"").unwrap(), + "1,234.6" + ); + assert_eq!( + engine.eval::("FORMAT 1234.5678 \"N3[en]\"").unwrap(), + "1,234.568" + ); + assert_eq!( + engine.eval::("FORMAT 1234.5 \"C0[en]\"").unwrap(), + "$1,235" + ); + } + + #[test] + fn test_date_formatting() { + let engine = create_engine(); + + // Teste formatação de datas + let result = engine.eval::("FORMAT \"2024-03-15 14:30:25\" \"yyyy-MM-dd HH:mm:ss\"").unwrap(); + assert_eq!(result, "2024-03-15 14:30:25"); + + let result = engine.eval::("FORMAT \"2024-03-15 14:30:25\" \"dd/MM/yyyy\"").unwrap(); + assert_eq!(result, "15/03/2024"); + + let result = engine.eval::("FORMAT \"2024-03-15 14:30:25\" \"MM/dd/yy\"").unwrap(); + assert_eq!(result, "03/15/24"); + + let result = engine.eval::("FORMAT \"2024-03-15 14:30:25\" \"HH:mm\"").unwrap(); + assert_eq!(result, "14:30"); + } + + #[test] + fn test_date_formatting_12h() { + let engine = create_engine(); + + // Teste formato 12h + let result = engine.eval::("FORMAT \"2024-03-15 14:30:25\" \"hh:mm tt\"").unwrap(); + assert_eq!(result, "02:30 PM"); + + let result = engine.eval::("FORMAT \"2024-03-15 09:30:25\" \"hh:mm tt\"").unwrap(); + assert_eq!(result, "09:30 AM"); + + let result = engine.eval::("FORMAT \"2024-03-15 00:30:25\" \"h:mm t\"").unwrap(); + assert_eq!(result, "12:30 A"); + } + + #[test] + fn test_text_formatting() { + let engine = create_engine(); + + // Teste formatação de texto + assert_eq!( + engine.eval::("FORMAT \"hello\" \"Prefix: @\"").unwrap(), + "Prefix: hello" + ); + assert_eq!( + engine.eval::("FORMAT \"HELLO\" \"Result: &!\"").unwrap(), + "Result: hello!" + ); + assert_eq!( + engine.eval::("FORMAT \"hello\" \"RESULT: >\"").unwrap(), + "RESULT: HELLO" + ); + assert_eq!( + engine.eval::("FORMAT \"Hello\" \"<>\"").unwrap(), + "hello>" + ); + } + + #[test] + fn test_mixed_patterns() { + let engine = create_engine(); + + // Teste padrões mistos + assert_eq!( + engine.eval::("FORMAT \"hello\" \"@ World!\"").unwrap(), + "hello World!" + ); + assert_eq!( + engine.eval::("FORMAT \"test\" \"< & > ! @\"").unwrap(), + "test test TEST ! test" + ); + } + + #[test] + fn test_edge_cases() { + let engine = create_engine(); + + // Teste casos extremos + assert_eq!( + engine.eval::("FORMAT 0 \"n\"").unwrap(), + "0.00" + ); + assert_eq!( + engine.eval::("FORMAT -1234.56 \"N[en]\"").unwrap(), + "-1,234.56" + ); + assert_eq!( + engine.eval::("FORMAT \"\" \"@\"").unwrap(), + "" + ); + assert_eq!( + engine.eval::("FORMAT \"test\" \"\"").unwrap(), + "" + ); + } + + #[test] + fn test_invalid_patterns_fallback() { + let engine = create_engine(); + + // Teste padrões inválidos (devem fallback para string) + assert_eq!( + engine.eval::("FORMAT 123.45 \"invalid\"").unwrap(), + "123.45" + ); + assert_eq!( + engine.eval::("FORMAT \"text\" \"unknown\"").unwrap(), + "unknown" + ); + } + + #[test] + fn test_milliseconds_formatting() { + let engine = create_engine(); + + // Teste milissegundos + let result = engine.eval::("FORMAT \"2024-03-15 14:30:25.123\" \"HH:mm:ss.fff\"").unwrap(); + assert_eq!(result, "14:30:25.123"); + } + + #[test] + fn test_parse_pattern_function() { + // Teste direto da função parse_pattern + assert_eq!(parse_pattern("C[en]"), ("C".to_string(), 2, "en".to_string())); + assert_eq!(parse_pattern("N3[pt]"), ("N".to_string(), 3, "pt".to_string())); + assert_eq!(parse_pattern("C0[fr]"), ("C".to_string(), 0, "fr".to_string())); + assert_eq!(parse_pattern("N"), ("N".to_string(), 2, "en".to_string())); + assert_eq!(parse_pattern("C2"), ("C".to_string(), 2, "en".to_string())); + } + + #[test] + fn test_locale_functions() { + // Teste funções de locale + assert!(matches!(get_locale("en"), Locale::en)); + assert!(matches!(get_locale("pt"), Locale::pt)); + assert!(matches!(get_locale("fr"), Locale::fr)); + assert!(matches!(get_locale("invalid"), Locale::en)); // fallback + + assert_eq!(get_currency_symbol("en"), "$"); + assert_eq!(get_currency_symbol("pt"), "R$ "); + assert_eq!(get_currency_symbol("fr"), "€"); + assert_eq!(get_currency_symbol("invalid"), "$"); // fallback + } + + #[test] + fn test_apply_text_placeholders() { + // Teste direto da função apply_text_placeholders + assert_eq!(apply_text_placeholders("Hello", "@"), "Hello"); + assert_eq!(apply_text_placeholders("Hello", "&"), "hello"); + assert_eq!(apply_text_placeholders("Hello", ">"), "HELLO"); + assert_eq!(apply_text_placeholders("Hello", "Prefix: @!"), "Prefix: Hello!"); + assert_eq!(apply_text_placeholders("Hello", "<>"), "hello>"); + } + + #[test] + fn test_expression_parameters() { + let engine = create_engine(); + + // Teste com expressões como parâmetros + assert_eq!( + engine.eval::("let x = 1000.50; FORMAT x \"N[en]\"").unwrap(), + "1,000.50" + ); + assert_eq!( + engine.eval::("FORMAT (500 + 500) \"n\"").unwrap(), + "1000.00" + ); + assert_eq!( + engine.eval::("let pattern = \"@ World\"; FORMAT \"Hello\" pattern").unwrap(), + "Hello World" + ); + } +} \ No newline at end of file diff --git a/src/basic/keywords/get.rs b/src/basic/keywords/get.rs new file mode 100644 index 000000000..c0b077026 --- /dev/null +++ b/src/basic/keywords/get.rs @@ -0,0 +1,97 @@ +use log::info; + +use crate::shared::state::AppState; +use reqwest::{self, Client}; +use rhai::{Dynamic, Engine}; +use scraper::{Html, Selector}; +use std::error::Error; + +pub fn get_keyword(_state: &AppState, engine: &mut Engine) { + let _ = engine.register_custom_syntax( + &["GET", "$expr$"], + false, // Expression, not statement + move |context, inputs| { + let url = context.eval_expression_tree(&inputs[0])?; + let url_str = url.to_string(); + + // Prevent path traversal attacks + if url_str.contains("..") { + return Err("URL contains invalid path traversal sequences like '..'.".into()); + } + + let modified_url = if url_str.starts_with("/") { + let work_root = std::env::var("WORK_ROOT").unwrap_or_else(|_| "./work".to_string()); + let full_path = std::path::Path::new(&work_root) + .join(url_str.trim_start_matches('/')) + .to_string_lossy() + .into_owned(); + + let base_url = "file://"; + format!("{}{}", base_url, full_path) + } else { + url_str.to_string() + }; + + if modified_url.starts_with("https://") { + info!("HTTPS GET request: {}", modified_url); + + let fut = execute_get(&modified_url); + let result = + tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut)) + .map_err(|e| format!("HTTP request failed: {}", e))?; + + Ok(Dynamic::from(result)) + } else if modified_url.starts_with("file://") { + // Handle file:// URLs + let file_path = modified_url.trim_start_matches("file://"); + match std::fs::read_to_string(file_path) { + Ok(content) => Ok(Dynamic::from(content)), + Err(e) => Err(format!("Failed to read file: {}", e).into()), + } + } else { + Err( + format!("GET request failed: URL must begin with 'https://' or 'file://'") + .into(), + ) + } + }, + ); +} + +pub async fn execute_get(url: &str) -> Result> { + info!("Starting execute_get with URL: {}", url); + + // Create a client that ignores invalid certificates + let client = Client::builder() + .danger_accept_invalid_certs(true) + .build()?; + + let response = client.get(url).send().await?; + let html_content = response.text().await?; + + // Parse HTML and extract text only if it appears to be HTML + if html_content.trim_start().starts_with(">() + .join(" "); + + // Clean up the text + let cleaned_text = text_content + .replace('\n', " ") + .replace('\t', " ") + .split_whitespace() + .collect::>() + .join(" "); + + Ok(cleaned_text) + } else { + Ok(html_content) // Return plain content as is if not HTML + } +} diff --git a/src/basic/keywords/get_website.rs b/src/basic/keywords/get_website.rs new file mode 100644 index 000000000..022380832 --- /dev/null +++ b/src/basic/keywords/get_website.rs @@ -0,0 +1,133 @@ +use crate::{state::AppState, web_automation::BrowserPool}; +use log::info; +use rhai::{Dynamic, Engine}; +use std::error::Error; +use std::sync::Arc; +use std::time::Duration; +use thirtyfour::{By, WebDriver}; +use tokio::time::sleep; + +pub fn get_website_keyword(state: &AppState, engine: &mut Engine) { + let browser_pool = state.browser_pool.clone(); // Assuming AppState has browser_pool field + + engine + .register_custom_syntax( + &["WEBSITE", "OF", "$expr$"], + false, + move |context, inputs| { + let search_term = context.eval_expression_tree(&inputs[0])?.to_string(); + + info!("GET WEBSITE executed - Search: '{}'", search_term); + + let browser_pool_clone = browser_pool.clone(); + let fut = execute_headless_browser_search(browser_pool_clone, &search_term); + + let result = + tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut)) + .map_err(|e| format!("Headless browser search failed: {}", e))?; + + Ok(Dynamic::from(result)) + }, + ) + .unwrap(); +} + +pub async fn execute_headless_browser_search( + browser_pool: Arc, // Adjust path as needed + search_term: &str, +) -> Result> { + info!("Starting headless browser search: '{}' ", search_term); + + let search_term = search_term.to_string(); + + let result = browser_pool + .with_browser(|driver| Box::pin(async move { perform_search(driver, &search_term).await })) + .await?; + + Ok(result) +} +async fn perform_search( + driver: WebDriver, + search_term: &str, +) -> Result> { + // Navigate to DuckDuckGo + driver.goto("https://duckduckgo.com").await?; + + // Wait for search box and type query + let search_input = driver.find(By::Id("searchbox_input")).await?; + search_input.click().await?; + search_input.send_keys(search_term).await?; + + // Submit search by pressing Enter + search_input.send_keys("\n").await?; + + // Wait for results to load - using a modern result selector + driver.find(By::Css("[data-testid='result']")).await?; + sleep(Duration::from_millis(2000)).await; + + // Extract results + let results = extract_search_results(&driver).await?; + driver.close_window().await?; + + if !results.is_empty() { + Ok(results[0].clone()) + } else { + Ok("No results found".to_string()) + } +} + +async fn extract_search_results( + driver: &WebDriver, +) -> Result, Box> { + let mut results = Vec::new(); + + // Try different selectors for search results, ordered by most specific to most general + let selectors = [ + // Modern DuckDuckGo (as seen in the HTML) + "a[data-testid='result-title-a']", // Primary result links + "a[data-testid='result-extras-url-link']", // URL links in results + "a.eVNpHGjtxRBq_gLOfGDr", // Class-based selector for result titles + "a.Rn_JXVtoPVAFyGkcaXyK", // Class-based selector for URL links + ".ikg2IXiCD14iVX7AdZo1 a", // Heading container links + ".OQ_6vPwNhCeusNiEDcGp a", // URL container links + // Fallback selectors + ".result__a", // Classic DuckDuckGo + "a.result-link", // Alternative + ".result a[href]", // Generic result links + ]; + + for selector in &selectors { + if let Ok(elements) = driver.find_all(By::Css(selector)).await { + for element in elements { + if let Ok(Some(href)) = element.attr("href").await { + // Filter out internal and non-http links + if href.starts_with("http") + && !href.contains("duckduckgo.com") + && !href.contains("duck.co") + && !results.contains(&href) + { + // Get the display URL for verification + let display_url = if let Ok(text) = element.text().await { + text.trim().to_string() + } else { + String::new() + }; + + // Only add if it looks like a real result (not an ad or internal link) + if !display_url.is_empty() && !display_url.contains("Ad") { + results.push(href); + } + } + } + } + if !results.is_empty() { + break; + } + } + } + + // Deduplicate results + results.dedup(); + + Ok(results) +} diff --git a/src/basic/keywords/last.rs b/src/basic/keywords/last.rs new file mode 100644 index 000000000..010f02266 --- /dev/null +++ b/src/basic/keywords/last.rs @@ -0,0 +1,250 @@ +use rhai::Dynamic; +use rhai::Engine; + +pub fn last_keyword(engine: &mut Engine) { + engine + .register_custom_syntax(&["LAST", "(", "$expr$", ")"], false, { + move |context, inputs| { + let input_string = context.eval_expression_tree(&inputs[0])?; + let input_str = input_string.to_string(); + + // Extrai a última palavra dividindo por espaço + let last_word = input_str + .split_whitespace() + .last() + .unwrap_or("") + .to_string(); + + Ok(Dynamic::from(last_word)) + } + }) + .unwrap(); +} + +#[cfg(test)] +mod tests { + use super::*; + use rhai::{Engine, Scope}; + + #[test] + fn test_last_keyword_basic() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + let result: String = engine.eval("LAST(\"hello world\")").unwrap(); + assert_eq!(result, "world"); + } + + #[test] + fn test_last_keyword_single_word() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + let result: String = engine.eval("LAST(\"hello\")").unwrap(); + assert_eq!(result, "hello"); + } + + #[test] + fn test_last_keyword_empty_string() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + let result: String = engine.eval("LAST(\"\")").unwrap(); + assert_eq!(result, ""); + } + + #[test] + fn test_last_keyword_multiple_spaces() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + let result: String = engine.eval("LAST(\"hello world \")").unwrap(); + assert_eq!(result, "world"); + } + + #[test] + fn test_last_keyword_tabs_and_newlines() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + let result: String = engine.eval("LAST(\"hello\tworld\n\")").unwrap(); + assert_eq!(result, "world"); + } + + #[test] + fn test_last_keyword_with_variable() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + let mut scope = Scope::new(); + + scope.push("text", "this is a test"); + let result: String = engine.eval_with_scope(&mut scope, "LAST(text)").unwrap(); + + assert_eq!(result, "test"); + } + + #[test] + fn test_last_keyword_whitespace_only() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + let result: String = engine.eval("LAST(\" \")").unwrap(); + assert_eq!(result, ""); + } + + #[test] + fn test_last_keyword_mixed_whitespace() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + let result: String = engine.eval("LAST(\"hello\t \n world \t final\")").unwrap(); + assert_eq!(result, "final"); + } + + #[test] + fn test_last_keyword_expression() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + // Test with string concatenation + let result: String = engine.eval("LAST(\"hello\" + \" \" + \"world\")").unwrap(); + assert_eq!(result, "world"); + } + + #[test] + fn test_last_keyword_unicode() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + let result: String = engine.eval("LAST(\"hello 世界 мир world\")").unwrap(); + assert_eq!(result, "world"); + } + + #[test] + fn test_last_keyword_in_expression() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + // Test using the result in another expression + let result: bool = engine.eval("LAST(\"hello world\") == \"world\"").unwrap(); + assert!(result); + } + + #[test] + fn test_last_keyword_complex_scenario() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + let mut scope = Scope::new(); + + scope.push("sentence", "The quick brown fox jumps over the lazy dog"); + let result: String = engine.eval_with_scope(&mut scope, "LAST(sentence)").unwrap(); + + assert_eq!(result, "dog"); + } + + #[test] + #[should_panic] // This should fail because the syntax expects parentheses + fn test_last_keyword_missing_parentheses() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + // This should fail - missing parentheses + let _: String = engine.eval("LAST \"hello world\"").unwrap(); + } + + #[test] + #[should_panic] // This should fail because of incomplete syntax + fn test_last_keyword_missing_closing_parenthesis() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + // This should fail - missing closing parenthesis + let _: String = engine.eval("LAST(\"hello world\"").unwrap(); + } + + #[test] + #[should_panic] // This should fail because of incomplete syntax + fn test_last_keyword_missing_opening_parenthesis() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + // This should fail - missing opening parenthesis + let _: String = engine.eval("LAST \"hello world\")").unwrap(); + } + + #[test] + fn test_last_keyword_dynamic_type() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + // Test that the function returns the correct Dynamic type + let result = engine.eval::("LAST(\"test string\")").unwrap(); + assert!(result.is::()); + assert_eq!(result.to_string(), "string"); + } + + #[test] + fn test_last_keyword_nested_expression() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + // Test with a more complex nested expression + let result: String = engine.eval("LAST(\"The result is: \" + \"hello world\")").unwrap(); + assert_eq!(result, "world"); + } +} + +#[cfg(test)] +mod integration_tests { + use super::*; + + #[test] + fn test_last_keyword_in_script() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + let script = r#" + let sentence1 = "first second third"; + let sentence2 = "alpha beta gamma"; + + let last1 = LAST(sentence1); + let last2 = LAST(sentence2); + + last1 + " and " + last2 + "#; + + let result: String = engine.eval(script).unwrap(); + assert_eq!(result, "third and gamma"); + } + + #[test] + fn test_last_keyword_with_function() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + // Register a function that returns a string + engine.register_fn("get_name", || -> String { "john doe".to_string() }); + + let result: String = engine.eval("LAST(get_name())").unwrap(); + assert_eq!(result, "doe"); + } + + #[test] + fn test_last_keyword_multiple_calls() { + let mut engine = Engine::new(); + last_keyword(&mut engine); + + let script = r#" + let text1 = "apple banana cherry"; + let text2 = "cat dog elephant"; + + let result1 = LAST(text1); + let result2 = LAST(text2); + + result1 + "-" + result2 + "#; + + let result: String = engine.eval(script).unwrap(); + assert_eq!(result, "cherry-elephant"); + } +} \ No newline at end of file diff --git a/src/basic/keywords/llm_keyword.rs b/src/basic/keywords/llm_keyword.rs new file mode 100644 index 000000000..e4f06f10b --- /dev/null +++ b/src/basic/keywords/llm_keyword.rs @@ -0,0 +1,30 @@ +use log::info; + +use crate::{shared::state::AppState, utils::call_llm}; +use rhai::{Dynamic, Engine}; + +pub fn llm_keyword(state: &AppState, engine: &mut Engine) { + let ai_config = state.config.clone().unwrap().ai.clone(); + + engine + .register_custom_syntax( + &["LLM", "$expr$"], // Syntax: LLM "text to process" + false, // Expression, not statement + move |context, inputs| { + let text = context.eval_expression_tree(&inputs[0])?; + let text_str = text.to_string(); + + info!("LLM processing text: {}", text_str); + + // Use the same pattern as GET + + let fut = call_llm(&text_str, &ai_config); + let result = + tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut)) + .map_err(|e| format!("LLM call failed: {}", e))?; + + Ok(Dynamic::from(result)) + }, + ) + .unwrap(); +} diff --git a/src/basic/keywords/mod.rs b/src/basic/keywords/mod.rs new file mode 100644 index 000000000..5155810ce --- /dev/null +++ b/src/basic/keywords/mod.rs @@ -0,0 +1,15 @@ +pub mod create_draft; +pub mod create_site; +pub mod find; +pub mod first; +pub mod last; +pub mod format; +pub mod for_next; +pub mod get; +pub mod get_website; +pub mod llm_keyword; +pub mod on; +pub mod print; +pub mod set; +pub mod set_schedule; +pub mod wait; diff --git a/src/basic/keywords/on.rs b/src/basic/keywords/on.rs new file mode 100644 index 000000000..9d07afb0e --- /dev/null +++ b/src/basic/keywords/on.rs @@ -0,0 +1,86 @@ +use log::{error, info}; +use rhai::Dynamic; +use rhai::Engine; +use serde_json::{json, Value}; +use sqlx::PgPool; + +use crate::shared::models::automation_model::TriggerKind; +use crate::shared::state::AppState; + +pub fn on_keyword(state: &AppState, engine: &mut Engine) { + let db = state.db_custom.clone(); + + engine + .register_custom_syntax( + ["ON", "$ident$", "OF", "$string$"], // Changed $string$ to $ident$ for operation + true, + { + let db = db.clone(); + + move |context, inputs| { + let trigger_type = context.eval_expression_tree(&inputs[0])?.to_string(); + let table = context.eval_expression_tree(&inputs[1])?.to_string(); + let script_name = format!("{}_{}.rhai", table, trigger_type.to_lowercase()); + + // Determine the trigger kind based on the trigger type + let kind = match trigger_type.to_uppercase().as_str() { + "UPDATE" => TriggerKind::TableUpdate, + "INSERT" => TriggerKind::TableInsert, + "DELETE" => TriggerKind::TableDelete, + _ => return Err(format!("Invalid trigger type: {}", trigger_type).into()), + }; + + let binding = db.as_ref().unwrap(); + let fut = execute_on_trigger(binding, kind, &table, &script_name); + + let result = tokio::task::block_in_place(|| { + tokio::runtime::Handle::current().block_on(fut) + }) + .map_err(|e| format!("DB error: {}", e))?; + + if let Some(rows_affected) = result.get("rows_affected") { + Ok(Dynamic::from(rows_affected.as_i64().unwrap_or(0))) + } else { + Err("No rows affected".into()) + } + } + }, + ) + .unwrap(); +} + +pub async fn execute_on_trigger( + pool: &PgPool, + kind: TriggerKind, + table: &str, + script_name: &str, +) -> Result { + info!( + "Starting execute_on_trigger with kind: {:?}, table: {}, script_name: {}", + kind, table, script_name + ); + + // Option 1: Use query_with macro if you need to pass enum values + let result = sqlx::query( + "INSERT INTO system_automations + (kind, target, script_name) + VALUES ($1, $2, $3)", + ) + .bind(kind.clone() as i32) // Assuming TriggerKind is #[repr(i32)] + .bind(table) + .bind(script_name) + .execute(pool) + .await + .map_err(|e| { + error!("SQL execution error: {}", e); + e.to_string() + })?; + + Ok(json!({ + "command": "on_trigger", + "trigger_type": format!("{:?}", kind), + "table": table, + "script_name": script_name, + "rows_affected": result.rows_affected() + })) +} diff --git a/src/basic/keywords/print.rs b/src/basic/keywords/print.rs new file mode 100644 index 000000000..162482237 --- /dev/null +++ b/src/basic/keywords/print.rs @@ -0,0 +1,20 @@ +use log::info; +use rhai::Dynamic; +use rhai::Engine; + +use crate::shared::state::AppState; + +pub fn print_keyword(_state: &AppState, engine: &mut Engine) { + // PRINT command + engine + .register_custom_syntax( + &["PRINT", "$expr$"], + true, // Statement + |context, inputs| { + let value = context.eval_expression_tree(&inputs[0])?; + info!("{}", value); + Ok(Dynamic::UNIT) + }, + ) + .unwrap(); +} diff --git a/src/basic/keywords/prompt.md b/src/basic/keywords/prompt.md new file mode 100644 index 000000000..e6feac8c9 --- /dev/null +++ b/src/basic/keywords/prompt.md @@ -0,0 +1,150 @@ + +Create a new Rhai custom keyword implementation with these specifications: + +1. DATABASE REQUIREMENTS: +- No enums in database schema (only in Rust code) +- Use direct integer values for enum variants in queries +- Follow existing connection pooling pattern with AppState +- Include proper error handling and logging + +2. RUST IMPLEMENTATION: +- Enum definition (Rust-only, no DB enum): +```rust +#[repr(i32)] +pub enum KeywordAction { + Action1 = 0, + Action2 = 1, + Action3 = 2 +} +``` + +3. KEYWORD TEMPLATE: +```rust +pub fn {keyword_name}_keyword(state: &AppState, engine: &mut Engine) { + let db = state.db_custom.clone(); + + engine.register_custom_syntax( + {syntax_pattern}, + {is_raw}, + { + let db = db.clone(); + move |context, inputs| { + // Input processing + {input_processing} + + let binding = db.as_ref().unwrap(); + let fut = execute_{keyword_name}(binding, {params}); + + let result = tokio::task::block_in_place(|| + tokio::runtime::Handle::current().block_on(fut)) + .map_err(|e| format!("DB error: {}", e))?; + + {result_handling} + } + } + ).unwrap(); +} + +pub async fn execute_{keyword_name}( + pool: &PgPool, + {params_with_types} +) -> Result> { + info!("Executing {keyword_name} with: {debug_params}"); + + let result = sqlx::query( + "{sql_query_with_i32_enum}" + ) + .bind({enum_value} as i32) + {additional_binds} + .execute(pool) + .await?; + + Ok(json!({ + "command": "{keyword_name}", + {result_fields} + "rows_affected": result.rows_affected() + })) +} +``` + +4. EXAMPLE IMPLEMENTATION (SET SCHEDULE): +```rust +// Enum (Rust-only) +#[repr(i32)] +pub enum TriggerKind { + Scheduled = 0, + TableUpdate = 1, + TableInsert = 2, + TableDelete = 3 +} + +// Keyword implementation +pub fn set_schedule_keyword(state: &AppState, engine: &mut Engine) { + let db = state.db_custom.clone(); + + engine.register_custom_syntax( + ["SET", "SCHEDULE", "$string$"], + true, + { + let db = db.clone(); + move |context, inputs| { + let cron = context.eval_expression_tree(&inputs[0])?.to_string(); + let script_name = format!("cron_{}.rhai", cron.replace(' ', "_")); + + let binding = db.as_ref().unwrap(); + let fut = execute_set_schedule(binding, &cron, &script_name); + + let result = tokio::task::block_in_place(|| + tokio::runtime::Handle::current().block_on(fut)) + .map_err(|e| format!("DB error: {}", e))?; + + if let Some(rows_affected) = result.get("rows_affected") { + Ok(Dynamic::from(rows_affected.as_i64().unwrap_or(0))) + } else { + Err("No rows affected".into()) + } + } + } + ).unwrap(); +} + +pub async fn execute_set_schedule( + pool: &PgPool, + cron: &str, + script_name: &str, +) -> Result> { + info!("Executing schedule: {}, {}", cron, script_name); + + let result = sqlx::query( + "INSERT INTO system_automations + (kind, schedule, script_name) + VALUES ($1, $2, $3)" + ) + .bind(TriggerKind::Scheduled as i32) + .bind(cron) + .bind(script_name) + .execute(pool) + .await?; + + Ok(json!({ + "command": "set_schedule", + "schedule": cron, + "script_name": script_name, + "rows_affected": result.rows_affected() + })) +} +``` + +5. ADDITIONAL REQUIREMENTS: +- Maintain consistent tokio runtime handling +- Include parameter validation +- Follow existing JSON response format +- Ensure proper script name generation +- Include debug logging for all operations + +6. OUTPUT FORMAT: +Provide complete implementation with: +1. Rust enum definition +2. Keyword registration function +3. Execution function +4. Example usage in Rhai diff --git a/src/basic/keywords/set.rs b/src/basic/keywords/set.rs new file mode 100644 index 000000000..20900afc7 --- /dev/null +++ b/src/basic/keywords/set.rs @@ -0,0 +1,141 @@ +use log::{error, info}; +use rhai::Dynamic; +use rhai::Engine; +use serde_json::{json, Value}; +use sqlx::PgPool; +use std::error::Error; + +use crate::shared::state::AppState; +use crate::shared::utils; + +pub fn set_keyword(state: &AppState, engine: &mut Engine) { + let db = state.db_custom.clone(); + + engine + .register_custom_syntax(&["SET", "$expr$", ",", "$expr$", ",", "$expr$"], false, { + let db = db.clone(); + + move |context, inputs| { + let table_name = context.eval_expression_tree(&inputs[0])?; + let filter = context.eval_expression_tree(&inputs[1])?; + let updates = context.eval_expression_tree(&inputs[2])?; + let binding = db.as_ref().unwrap(); + + // Use the current async context instead of creating a new runtime + let binding2 = table_name.to_string(); + let binding3 = filter.to_string(); + let binding4 = updates.to_string(); + let fut = execute_set(binding, &binding2, &binding3, &binding4); + + // Use tokio::task::block_in_place + tokio::runtime::Handle::current().block_on + let result = + tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut)) + .map_err(|e| format!("DB error: {}", e))?; + + if let Some(rows_affected) = result.get("rows_affected") { + Ok(Dynamic::from(rows_affected.as_i64().unwrap_or(0))) + } else { + Err("No rows affected".into()) + } + } + }) + .unwrap(); +} + +pub async fn execute_set( + pool: &PgPool, + table_str: &str, + filter_str: &str, + updates_str: &str, +) -> Result { + info!( + "Starting execute_set with table: {}, filter: {}, updates: {}", + table_str, filter_str, updates_str + ); + + // Parse updates with proper type handling + let (set_clause, update_values) = parse_updates(updates_str).map_err(|e| e.to_string())?; + let update_params_count = update_values.len(); + + // Parse filter with proper type handling + let (where_clause, filter_values) = + utils::parse_filter_with_offset(filter_str, update_params_count) + .map_err(|e| e.to_string())?; + + let query = format!( + "UPDATE {} SET {} WHERE {}", + table_str, set_clause, where_clause + ); + info!("Executing query: {}", query); + + // Build query with proper parameter binding + let mut query = sqlx::query(&query); + + // Bind update values + for value in update_values { + query = bind_value(query, value); + } + + // Bind filter values + for value in filter_values { + query = bind_value(query, value); + } + + let result = query.execute(pool).await.map_err(|e| { + error!("SQL execution error: {}", e); + e.to_string() + })?; + + Ok(json!({ + "command": "set", + "table": table_str, + "filter": filter_str, + "updates": updates_str, + "rows_affected": result.rows_affected() + })) +} + +fn bind_value<'q>( + query: sqlx::query::Query<'q, sqlx::Postgres, sqlx::postgres::PgArguments>, + value: String, +) -> sqlx::query::Query<'q, sqlx::Postgres, sqlx::postgres::PgArguments> { + if let Ok(int_val) = value.parse::() { + query.bind(int_val) + } else if let Ok(float_val) = value.parse::() { + query.bind(float_val) + } else if value.eq_ignore_ascii_case("true") { + query.bind(true) + } else if value.eq_ignore_ascii_case("false") { + query.bind(false) + } else { + query.bind(value) + } +} + +// Parse updates without adding quotes +fn parse_updates(updates_str: &str) -> Result<(String, Vec), Box> { + let mut set_clauses = Vec::new(); + let mut params = Vec::new(); + + for (i, update) in updates_str.split(',').enumerate() { + let parts: Vec<&str> = update.split('=').collect(); + if parts.len() != 2 { + return Err("Invalid update format".into()); + } + + let column = parts[0].trim(); + let value = parts[1].trim(); + + if !column + .chars() + .all(|c| c.is_ascii_alphanumeric() || c == '_') + { + return Err("Invalid column name".into()); + } + + set_clauses.push(format!("{} = ${}", column, i + 1)); + params.push(value.to_string()); // Store raw value without quotes + } + + Ok((set_clauses.join(", "), params)) +} diff --git a/src/basic/keywords/set_schedule.rs b/src/basic/keywords/set_schedule.rs new file mode 100644 index 000000000..710d8a454 --- /dev/null +++ b/src/basic/keywords/set_schedule.rs @@ -0,0 +1,67 @@ +use log::info; +use rhai::Dynamic; +use rhai::Engine; +use serde_json::{json, Value}; +use sqlx::PgPool; + +use crate::shared::models::automation_model::TriggerKind; +use crate::shared::state::AppState; + +pub fn set_schedule_keyword(state: &AppState, engine: &mut Engine) { + let db = state.db_custom.clone(); + + engine + .register_custom_syntax(["SET_SCHEDULE", "$string$"], true, { + let db = db.clone(); + + move |context, inputs| { + let cron = context.eval_expression_tree(&inputs[0])?.to_string(); + let script_name = format!("cron_{}.rhai", cron.replace(' ', "_")); + + let binding = db.as_ref().unwrap(); + let fut = execute_set_schedule(binding, &cron, &script_name); + + let result = + tokio::task::block_in_place(|| tokio::runtime::Handle::current().block_on(fut)) + .map_err(|e| format!("DB error: {}", e))?; + + if let Some(rows_affected) = result.get("rows_affected") { + Ok(Dynamic::from(rows_affected.as_i64().unwrap_or(0))) + } else { + Err("No rows affected".into()) + } + } + }) + .unwrap(); +} + +pub async fn execute_set_schedule( + pool: &PgPool, + cron: &str, + script_name: &str, +) -> Result> { + info!( + "Starting execute_set_schedule with cron: {}, script_name: {}", + cron, script_name + ); + + let result = sqlx::query( + r#" + INSERT INTO system_automations + (kind, schedule, script_name) + VALUES ($1, $2, $3) + "#, + ) + .bind(TriggerKind::Scheduled as i32) // Cast to i32 + .bind(cron) + .bind(script_name) + .execute(pool) + .await?; + + Ok(json!({ + "command": "set_schedule", + "schedule": cron, + "script_name": script_name, + "rows_affected": result.rows_affected() + })) +} diff --git a/src/basic/keywords/wait.rs b/src/basic/keywords/wait.rs new file mode 100644 index 000000000..c84c30100 --- /dev/null +++ b/src/basic/keywords/wait.rs @@ -0,0 +1,46 @@ +use crate::shared::state::AppState; +use log::info; +use rhai::{Dynamic, Engine}; +use std::thread; +use std::time::Duration; + +pub fn wait_keyword(_state: &AppState, engine: &mut Engine) { + engine + .register_custom_syntax( + &["WAIT", "$expr$"], + false, // Expression, not statement + move |context, inputs| { + let seconds = context.eval_expression_tree(&inputs[0])?; + + // Convert to number (handle both int and float) + let duration_secs = if seconds.is::() { + seconds.cast::() as f64 + } else if seconds.is::() { + seconds.cast::() + } else { + return Err(format!("WAIT expects a number, got: {}", seconds).into()); + }; + + if duration_secs < 0.0 { + return Err("WAIT duration cannot be negative".into()); + } + + // Cap maximum wait time to prevent abuse (e.g., 5 minutes max) + let capped_duration = if duration_secs > 300.0 { + 300.0 + } else { + duration_secs + }; + + info!("WAIT {} seconds (thread sleep)", capped_duration); + + // Use thread::sleep to block only the current thread, not the entire server + let duration = Duration::from_secs_f64(capped_duration); + thread::sleep(duration); + + info!("WAIT completed after {} seconds", capped_duration); + Ok(Dynamic::from(format!("Waited {} seconds", capped_duration))) + }, + ) + .unwrap(); +} diff --git a/src/basic/mod.rs b/src/basic/mod.rs new file mode 100644 index 000000000..cb0d388af --- /dev/null +++ b/src/basic/mod.rs @@ -0,0 +1,154 @@ +mod keywords; + +use self::keywords::create_draft::create_draft_keyword; +use self::keywords::create_site::create_site_keyword; +use self::keywords::find::find_keyword; +use self::keywords::first::first_keyword; +use self::keywords::for_next::for_keyword; +use self::keywords::format::format_keyword; +use self::keywords::get::get_keyword; +use self::keywords::get_website::get_website_keyword; +use self::keywords::last::last_keyword; +use self::keywords::llm_keyword::llm_keyword; +use self::keywords::on::on_keyword; +use self::keywords::print::print_keyword; +use self::keywords::set::set_keyword; +use self::keywords::set_schedule::set_schedule_keyword; +use self::keywords::wait::wait_keyword; +use crate::shared::AppState; +use log::info; +use rhai::{Dynamic, Engine, EvalAltResult}; + +pub struct ScriptService { + engine: Engine, +} + +impl ScriptService { + pub fn new(state: &AppState) -> Self { + let mut engine = Engine::new(); + + // Configure engine for BASIC-like syntax + engine.set_allow_anonymous_fn(true); + engine.set_allow_looping(true); + + create_draft_keyword(state, &mut engine); + create_site_keyword(state, &mut engine); + find_keyword(state, &mut engine); + for_keyword(state, &mut engine); + first_keyword(&mut engine); + last_keyword(&mut engine); + format_keyword(&mut engine); + llm_keyword(state, &mut engine); + get_website_keyword(state, &mut engine); + get_keyword(state, &mut engine); + set_keyword(state, &mut engine); + wait_keyword(state, &mut engine); + print_keyword(state, &mut engine); + on_keyword(state, &mut engine); + set_schedule_keyword(state, &mut engine); + + ScriptService { engine } + } + + fn preprocess_basic_script(&self, script: &str) -> String { + let mut result = String::new(); + let mut for_stack: Vec = Vec::new(); + let mut current_indent = 0; + + for line in script.lines() { + let trimmed = line.trim(); + + // Skip empty lines and comments + if trimmed.is_empty() || trimmed.starts_with("//") || trimmed.starts_with("REM") { + result.push_str(line); + result.push('\n'); + continue; + } + + // Handle FOR EACH start + if trimmed.starts_with("FOR EACH") { + for_stack.push(current_indent); + result.push_str(&" ".repeat(current_indent)); + result.push_str(trimmed); + result.push_str("{\n"); + current_indent += 4; + result.push_str(&" ".repeat(current_indent)); + result.push('\n'); + continue; + } + + // Handle NEXT + if trimmed.starts_with("NEXT") { + if let Some(expected_indent) = for_stack.pop() { + if (current_indent - 4) != expected_indent { + panic!("NEXT without matching FOR EACH"); + } + current_indent = current_indent - 4; + result.push_str(&" ".repeat(current_indent)); + result.push_str("}\n"); + result.push_str(&" ".repeat(current_indent)); + result.push_str(trimmed); + result.push(';'); + result.push('\n'); + continue; + } else { + panic!("NEXT without matching FOR EACH"); + } + } + + // Handle EXIT FOR + if trimmed == "EXIT FOR" { + result.push_str(&" ".repeat(current_indent)); + result.push_str(trimmed); + result.push('\n'); + continue; + } + + // Handle regular lines - no semicolons added for BASIC-style commands + result.push_str(&" ".repeat(current_indent)); + + let basic_commands = [ + "SET", "CREATE", "PRINT", "FOR", "FIND", "GET", "EXIT", "IF", "THEN", "ELSE", + "END IF", "WHILE", "WEND", "DO", "LOOP", + ]; + + let is_basic_command = basic_commands.iter().any(|&cmd| trimmed.starts_with(cmd)); + let is_control_flow = trimmed.starts_with("IF") + || trimmed.starts_with("ELSE") + || trimmed.starts_with("END IF"); + + if is_basic_command || !for_stack.is_empty() || is_control_flow { + // Don'ta add semicolons for BASIC-style commands or inside blocks + result.push_str(trimmed); + result.push(';'); + } else { + // Add semicolons only for BASIC statements + result.push_str(trimmed); + if !trimmed.ends_with(';') && !trimmed.ends_with('{') && !trimmed.ends_with('}') { + result.push(';'); + } + } + result.push('\n'); + } + + if !for_stack.is_empty() { + panic!("Unclosed FOR EACH loop"); + } + + result + } + + /// Preprocesses BASIC-style script to handle semicolon-free syntax + pub fn compile(&self, script: &str) -> Result> { + let processed_script = self.preprocess_basic_script(script); + info!("Processed Script:\n{}", processed_script); + match self.engine.compile(&processed_script) { + Ok(ast) => Ok(ast), + Err(parse_error) => Err(Box::new(EvalAltResult::from(parse_error))), + } + } + + pub fn run(&self, ast: &rhai::AST) -> Result> { + self.engine.eval_ast(ast) + } +} diff --git a/src/bot/mod.rs b/src/bot/mod.rs new file mode 100644 index 000000000..823834a5c --- /dev/null +++ b/src/bot/mod.rs @@ -0,0 +1,852 @@ +use actix_web::{web, HttpRequest, HttpResponse, Result}; +use actix_ws::Message as WsMessage; +use chrono::Utc; +use langchain_rust::{ + chain::{Chain, LLMChain}, + llm::openai::OpenAI, + memory::SimpleMemory, + prompt_args, + tools::{postgres::PostgreSQLEngine, SQLDatabaseBuilder}, + vectorstore::qdrant::Qdrant as LangChainQdrant, + vectorstore::{VecStoreOptions, VectorStore}, +}; +use log::info; +use serde_json; +use std::collections::HashMap; +use std::fs; +use std::sync::Arc; +use tokio::sync::{mpsc, Mutex}; +use uuid::Uuid; + +use crate::{ + auth::AuthService, + channels::{ChannelAdapter, VoiceAdapter, WebChannelAdapter}, + chart::ChartGenerator, + llm::LLMProvider, + session::SessionManager, + shared::{BotResponse, UserMessage, UserSession}, + tools::ToolManager, + whatsapp::WhatsAppAdapter, +}; + +pub struct BotOrchestrator { + session_manager: SessionManager, + tool_manager: ToolManager, + llm_provider: Arc, + auth_service: AuthService, + channels: HashMap>, + response_channels: Arc>>>, + chart_generator: Option>, + vector_store: Option>, + sql_chain: Option>, +} + +impl BotOrchestrator { + pub fn new( + session_manager: SessionManager, + tool_manager: ToolManager, + llm_provider: Arc, + auth_service: AuthService, + chart_generator: Option>, + vector_store: Option>, + sql_chain: Option>, + ) -> Self { + Self { + session_manager, + tool_manager, + llm_provider, + auth_service, + channels: HashMap::new(), + response_channels: Arc::new(Mutex::new(HashMap::new())), + chart_generator, + vector_store, + sql_chain, + } + } + + pub fn add_channel(&mut self, channel_type: &str, adapter: Arc) { + self.channels.insert(channel_type.to_string(), adapter); + } + + pub async fn register_response_channel( + &self, + session_id: String, + sender: mpsc::Sender, + ) { + self.response_channels + .lock() + .await + .insert(session_id, sender); + } + + pub async fn set_user_answer_mode( + &self, + user_id: &str, + bot_id: &str, + mode: &str, + ) -> Result<(), Box> { + self.session_manager + .update_answer_mode(user_id, bot_id, mode) + .await?; + Ok(()) + } + + pub async fn process_message( + &self, + message: UserMessage, + ) -> Result<(), Box> { + info!( + "Processing message from channel: {}, user: {}", + message.channel, message.user_id + ); + + let user_id = Uuid::parse_str(&message.user_id).unwrap_or_else(|_| Uuid::new_v4()); + let bot_id = Uuid::parse_str(&message.bot_id) + .unwrap_or_else(|_| Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap()); + + let session = match self + .session_manager + .get_user_session(user_id, bot_id) + .await? + { + Some(session) => session, + None => { + self.session_manager + .create_session(user_id, bot_id, "New Conversation") + .await? + } + }; + + if session.answer_mode == "tool" && session.current_tool.is_some() { + self.tool_manager + .provide_user_response(&message.user_id, &message.bot_id, message.content.clone()) + .await?; + return Ok(()); + } + + self.session_manager + .save_message( + session.id, + user_id, + "user", + &message.content, + &message.message_type, + ) + .await?; + + let response_content = match session.answer_mode.as_str() { + "document" => self.document_mode_handler(&message, &session).await?, + "chart" => self.chart_mode_handler(&message, &session).await?, + "database" => self.database_mode_handler(&message, &session).await?, + "tool" => self.tool_mode_handler(&message, &session).await?, + _ => self.direct_mode_handler(&message, &session).await?, + }; + + self.session_manager + .save_message(session.id, user_id, "assistant", &response_content, "text") + .await?; + + let bot_response = BotResponse { + bot_id: message.bot_id, + user_id: message.user_id, + session_id: message.session_id, + channel: message.channel, + content: response_content, + message_type: "text".to_string(), + stream_token: None, + is_complete: true, + }; + + if let Some(adapter) = self.channels.get(&message.channel) { + adapter.send_message(bot_response).await?; + } + + Ok(()) + } + + async fn document_mode_handler( + &self, + message: &UserMessage, + session: &UserSession, + ) -> Result> { + if let Some(vector_store) = &self.vector_store { + let similar_docs = vector_store + .similarity_search(&message.content, 3, &VecStoreOptions::default()) + .await?; + + let mut enhanced_prompt = format!("User question: {}\n\n", message.content); + + if !similar_docs.is_empty() { + enhanced_prompt.push_str("Relevant documents:\n"); + for (i, doc) in similar_docs.iter().enumerate() { + enhanced_prompt.push_str(&format!("[Doc {}]: {}\n", i + 1, doc.page_content)); + } + enhanced_prompt.push_str( + "\nPlease answer the user's question based on the provided documents.", + ); + } + + self.llm_provider + .generate(&enhanced_prompt, &serde_json::Value::Null) + .await + } else { + self.direct_mode_handler(message, session).await + } + } + + async fn chart_mode_handler( + &self, + message: &UserMessage, + session: &UserSession, + ) -> Result> { + if let Some(chart_generator) = &self.chart_generator { + let chart_response = chart_generator + .generate_chart(&message.content, "bar") + .await?; + + self.session_manager + .save_message( + session.id, + session.user_id, + "system", + &format!("Generated chart for query: {}", message.content), + "chart", + ) + .await?; + + Ok(format!( + "Chart generated for your query. Data retrieved: {}", + chart_response.sql_query + )) + } else { + self.document_mode_handler(message, session).await + } + } + + async fn database_mode_handler( + &self, + message: &UserMessage, + _session: &UserSession, + ) -> Result> { + if let Some(sql_chain) = &self.sql_chain { + let input_variables = prompt_args! { + "input" => message.content, + }; + + let result = sql_chain.invoke(input_variables).await?; + Ok(result.to_string()) + } else { + let db_url = std::env::var("DATABASE_URL")?; + let engine = PostgreSQLEngine::new(&db_url).await?; + let db = SQLDatabaseBuilder::new(engine).build().await?; + + let llm = OpenAI::default(); + let chain = langchain_rust::chain::SQLDatabaseChainBuilder::new() + .llm(llm) + .top_k(5) + .database(db) + .build()?; + + let input_variables = chain.prompt_builder().query(&message.content).build(); + let result = chain.invoke(input_variables).await?; + + Ok(result.to_string()) + } + } + + async fn tool_mode_handler( + &self, + message: &UserMessage, + _session: &UserSession, + ) -> Result> { + if message.content.to_lowercase().contains("calculator") { + if let Some(_adapter) = self.channels.get(&message.channel) { + let (tx, _rx) = mpsc::channel(100); + + self.register_response_channel(message.session_id.clone(), tx.clone()) + .await; + + let tool_manager = self.tool_manager.clone(); + let user_id_str = message.user_id.clone(); + let bot_id_str = message.bot_id.clone(); + let session_manager = self.session_manager.clone(); + + tokio::spawn(async move { + let _ = tool_manager + .execute_tool_with_session( + "calculator", + &user_id_str, + &bot_id_str, + session_manager, + tx, + ) + .await; + }); + } + Ok("Starting calculator tool...".to_string()) + } else { + let available_tools = self.tool_manager.list_tools(); + let tools_context = if !available_tools.is_empty() { + format!("\n\nAvailable tools: {}. If the user needs calculations, suggest using the calculator tool.", available_tools.join(", ")) + } else { + String::new() + }; + + let full_prompt = format!("{}{}", message.content, tools_context); + + self.llm_provider + .generate(&full_prompt, &serde_json::Value::Null) + .await + } + } + + async fn direct_mode_handler( + &self, + message: &UserMessage, + session: &UserSession, + ) -> Result> { + let history = self + .session_manager + .get_conversation_history(session.id, session.user_id) + .await?; + + let mut memory = SimpleMemory::new(); + for (role, content) in history { + match role.as_str() { + "user" => memory.add_user_message(&content), + "assistant" => memory.add_ai_message(&content), + _ => {} + } + } + + let mut prompt = String::new(); + if let Some(chat_history) = memory.get_chat_history() { + for message in chat_history { + prompt.push_str(&format!( + "{}: {}\n", + message.message_type(), + message.content() + )); + } + } + prompt.push_str(&format!("User: {}\nAssistant:", message.content)); + + self.llm_provider + .generate(&prompt, &serde_json::Value::Null) + .await + } + + pub async fn stream_response( + &self, + message: UserMessage, + mut response_tx: mpsc::Sender, + ) -> Result<(), Box> { + info!("Streaming response for user: {}", message.user_id); + + let user_id = Uuid::parse_str(&message.user_id).unwrap_or_else(|_| Uuid::new_v4()); + let bot_id = Uuid::parse_str(&message.bot_id) + .unwrap_or_else(|_| Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap()); + + let session = match self + .session_manager + .get_user_session(user_id, bot_id) + .await? + { + Some(session) => session, + None => { + self.session_manager + .create_session(user_id, bot_id, "New Conversation") + .await? + } + }; + + if session.answer_mode == "tool" && session.current_tool.is_some() { + self.tool_manager + .provide_user_response(&message.user_id, &message.bot_id, message.content.clone()) + .await?; + return Ok(()); + } + + self.session_manager + .save_message( + session.id, + user_id, + "user", + &message.content, + &message.message_type, + ) + .await?; + + let history = self + .session_manager + .get_conversation_history(session.id, user_id) + .await?; + + let mut memory = SimpleMemory::new(); + for (role, content) in history { + match role.as_str() { + "user" => memory.add_user_message(&content), + "assistant" => memory.add_ai_message(&content), + _ => {} + } + } + + let mut prompt = String::new(); + if let Some(chat_history) = memory.get_chat_history() { + for message in chat_history { + prompt.push_str(&format!( + "{}: {}\n", + message.message_type(), + message.content() + )); + } + } + prompt.push_str(&format!("User: {}\nAssistant:", message.content)); + + let (stream_tx, mut stream_rx) = mpsc::channel(100); + let llm_provider = self.llm_provider.clone(); + let prompt_clone = prompt.clone(); + + tokio::spawn(async move { + let _ = llm_provider + .generate_stream(&prompt_clone, &serde_json::Value::Null, stream_tx) + .await; + }); + + let mut full_response = String::new(); + while let Some(chunk) = stream_rx.recv().await { + full_response.push_str(&chunk); + + let bot_response = BotResponse { + bot_id: message.bot_id.clone(), + user_id: message.user_id.clone(), + session_id: message.session_id.clone(), + channel: message.channel.clone(), + content: chunk, + message_type: "text".to_string(), + stream_token: None, + is_complete: false, + }; + + if response_tx.send(bot_response).await.is_err() { + break; + } + } + + self.session_manager + .save_message(session.id, user_id, "assistant", &full_response, "text") + .await?; + + let final_response = BotResponse { + bot_id: message.bot_id, + user_id: message.user_id, + session_id: message.session_id, + channel: message.channel, + content: "".to_string(), + message_type: "text".to_string(), + stream_token: None, + is_complete: true, + }; + + response_tx.send(final_response).await?; + Ok(()) + } + + pub async fn get_user_sessions( + &self, + user_id: Uuid, + ) -> Result, Box> { + self.session_manager.get_user_sessions(user_id).await + } + + pub async fn get_conversation_history( + &self, + session_id: Uuid, + user_id: Uuid, + ) -> Result, Box> { + self.session_manager + .get_conversation_history(session_id, user_id) + .await + } + + pub async fn process_message_with_tools( + &self, + message: UserMessage, + ) -> Result<(), Box> { + info!( + "Processing message with tools from user: {}", + message.user_id + ); + + let user_id = Uuid::parse_str(&message.user_id).unwrap_or_else(|_| Uuid::new_v4()); + let bot_id = Uuid::parse_str(&message.bot_id) + .unwrap_or_else(|_| Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap()); + + let session = match self + .session_manager + .get_user_session(user_id, bot_id) + .await? + { + Some(session) => session, + None => { + self.session_manager + .create_session(user_id, bot_id, "New Conversation") + .await? + } + }; + + self.session_manager + .save_message( + session.id, + user_id, + "user", + &message.content, + &message.message_type, + ) + .await?; + + let is_tool_waiting = self + .tool_manager + .is_tool_waiting(&message.session_id) + .await + .unwrap_or(false); + + if is_tool_waiting { + self.tool_manager + .provide_input(&message.session_id, &message.content) + .await?; + + if let Ok(tool_output) = self.tool_manager.get_tool_output(&message.session_id).await { + for output in tool_output { + let bot_response = BotResponse { + bot_id: message.bot_id.clone(), + user_id: message.user_id.clone(), + session_id: message.session_id.clone(), + channel: message.channel.clone(), + content: output, + message_type: "text".to_string(), + stream_token: None, + is_complete: true, + }; + + if let Some(adapter) = self.channels.get(&message.channel) { + adapter.send_message(bot_response).await?; + } + } + } + return Ok(()); + } + + let response = if message.content.to_lowercase().contains("calculator") + || message.content.to_lowercase().contains("calculate") + || message.content.to_lowercase().contains("math") + { + match self + .tool_manager + .execute_tool("calculator", &message.session_id, &message.user_id) + .await + { + Ok(tool_result) => { + self.session_manager + .save_message( + session.id, + user_id, + "assistant", + &tool_result.output, + "tool_start", + ) + .await?; + + tool_result.output + } + Err(e) => { + format!("I encountered an error starting the calculator: {}", e) + } + } + } else { + let available_tools = self.tool_manager.list_tools(); + let tools_context = if !available_tools.is_empty() { + format!("\n\nAvailable tools: {}. If the user needs calculations, suggest using the calculator tool.", available_tools.join(", ")) + } else { + String::new() + }; + + let full_prompt = format!("{}{}", message.content, tools_context); + + self.llm_provider + .generate(&full_prompt, &serde_json::Value::Null) + .await? + }; + + self.session_manager + .save_message(session.id, user_id, "assistant", &response, "text") + .await?; + + let bot_response = BotResponse { + bot_id: message.bot_id, + user_id: message.user_id, + session_id: message.session_id, + channel: message.channel, + content: response, + message_type: "text".to_string(), + stream_token: None, + is_complete: true, + }; + + if let Some(adapter) = self.channels.get(&message.channel) { + adapter.send_message(bot_response).await?; + } + + Ok(()) + } +} + +#[actix_web::get("/ws")] +async fn websocket_handler( + req: HttpRequest, + stream: web::Payload, + data: web::Data, +) -> Result { + let (res, mut session, mut msg_stream) = actix_ws::handle(&req, stream)?; + let session_id = Uuid::new_v4().to_string(); + let (tx, mut rx) = mpsc::channel::(100); + + data.orchestrator + .register_response_channel(session_id.clone(), tx.clone()) + .await; + data.web_adapter + .add_connection(session_id.clone(), tx.clone()) + .await; + data.voice_adapter + .add_connection(session_id.clone(), tx.clone()) + .await; + + let orchestrator = data.orchestrator.clone(); + let web_adapter = data.web_adapter.clone(); + + actix_web::rt::spawn(async move { + while let Some(msg) = rx.recv().await { + if let Ok(json) = serde_json::to_string(&msg) { + let _ = session.text(json).await; + } + } + }); + + actix_web::rt::spawn(async move { + while let Some(Ok(msg)) = msg_stream.recv().await { + match msg { + WsMessage::Text(text) => { + let user_message = UserMessage { + bot_id: "default_bot".to_string(), + user_id: "default_user".to_string(), + session_id: session_id.clone(), + channel: "web".to_string(), + content: text.to_string(), + message_type: "text".to_string(), + media_url: None, + timestamp: Utc::now(), + }; + + if let Err(e) = orchestrator.stream_response(user_message, tx.clone()).await { + info!("Error processing message: {}", e); + } + } + WsMessage::Close(_) => { + web_adapter.remove_connection(&session_id).await; + break; + } + _ => {} + } + } + }); + + Ok(res) +} + +#[actix_web::get("/api/whatsapp/webhook")] +async fn whatsapp_webhook_verify( + data: web::Data, + web::Query(params): web::Query>, +) -> Result { + let mode = params.get("hub.mode").unwrap_or(&"".to_string()); + let token = params.get("hub.verify_token").unwrap_or(&"".to_string()); + let challenge = params.get("hub.challenge").unwrap_or(&"".to_string()); + + match data.whatsapp_adapter.verify_webhook(mode, token, challenge) { + Ok(challenge_response) => Ok(HttpResponse::Ok().body(challenge_response)), + Err(_) => Ok(HttpResponse::Forbidden().body("Verification failed")), + } +} + +#[actix_web::post("/api/whatsapp/webhook")] +async fn whatsapp_webhook( + data: web::Data, + payload: web::Json, +) -> Result { + match data + .whatsapp_adapter + .process_incoming_message(payload.into_inner()) + .await + { + Ok(user_messages) => { + for user_message in user_messages { + if let Err(e) = data.orchestrator.process_message(user_message).await { + log::error!("Error processing WhatsApp message: {}", e); + } + } + Ok(HttpResponse::Ok().body("")) + } + Err(e) => { + log::error!("Error processing WhatsApp webhook: {}", e); + Ok(HttpResponse::BadRequest().body("Invalid message")) + } + } +} + +#[actix_web::post("/api/voice/start")] +async fn voice_start( + data: web::Data, + info: web::Json, +) -> Result { + let session_id = info + .get("session_id") + .and_then(|s| s.as_str()) + .unwrap_or(""); + let user_id = info + .get("user_id") + .and_then(|u| u.as_str()) + .unwrap_or("user"); + + match data + .voice_adapter + .start_voice_session(session_id, user_id) + .await + { + Ok(token) => { + Ok(HttpResponse::Ok().json(serde_json::json!({"token": token, "status": "started"}))) + } + Err(e) => { + Ok(HttpResponse::InternalServerError() + .json(serde_json::json!({"error": e.to_string()}))) + } + } +} + +#[actix_web::post("/api/voice/stop")] +async fn voice_stop( + data: web::Data, + info: web::Json, +) -> Result { + let session_id = info + .get("session_id") + .and_then(|s| s.as_str()) + .unwrap_or(""); + + match data.voice_adapter.stop_voice_session(session_id).await { + Ok(()) => Ok(HttpResponse::Ok().json(serde_json::json!({"status": "stopped"}))), + Err(e) => { + Ok(HttpResponse::InternalServerError() + .json(serde_json::json!({"error": e.to_string()}))) + } + } +} + +#[actix_web::post("/api/sessions")] +async fn create_session(_data: web::Data) -> Result { + let session_id = Uuid::new_v4(); + Ok(HttpResponse::Ok().json(serde_json::json!({ + "session_id": session_id, + "title": "New Conversation", + "created_at": Utc::now() + }))) +} + +#[actix_web::get("/api/sessions")] +async fn get_sessions(data: web::Data) -> Result { + let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap(); + match data.orchestrator.get_user_sessions(user_id).await { + Ok(sessions) => Ok(HttpResponse::Ok().json(sessions)), + Err(e) => { + Ok(HttpResponse::InternalServerError() + .json(serde_json::json!({"error": e.to_string()}))) + } + } +} + +#[actix_web::get("/api/sessions/{session_id}")] +async fn get_session_history( + data: web::Data, + path: web::Path, +) -> Result { + let session_id = path.into_inner(); + let user_id = Uuid::parse_str("00000000-0000-0000-0000-000000000001").unwrap(); + + match Uuid::parse_str(&session_id) { + Ok(session_uuid) => match data + .orchestrator + .get_conversation_history(session_uuid, user_id) + .await + { + Ok(history) => Ok(HttpResponse::Ok().json(history)), + Err(e) => Ok(HttpResponse::InternalServerError() + .json(serde_json::json!({"error": e.to_string()}))), + }, + Err(_) => { + Ok(HttpResponse::BadRequest().json(serde_json::json!({"error": "Invalid session ID"}))) + } + } +} + +#[actix_web::post("/api/set_mode")] +async fn set_mode_handler( + data: web::Data, + info: web::Json>, +) -> Result { + let default_user = "default_user".to_string(); + let default_bot = "default_bot".to_string(); + let default_mode = "direct".to_string(); + + let user_id = info.get("user_id").unwrap_or(&default_user); + let bot_id = info.get("bot_id").unwrap_or(&default_bot); + let mode = info.get("mode").unwrap_or(&default_mode); + + if let Err(e) = data + .orchestrator + .set_user_answer_mode(user_id, bot_id, mode) + .await + { + return Ok( + HttpResponse::InternalServerError().json(serde_json::json!({"error": e.to_string()})) + ); + } + + Ok(HttpResponse::Ok().json(serde_json::json!({"status": "mode_updated"}))) +} + +#[actix_web::get("/")] +async fn index() -> Result { + let html = fs::read_to_string("templates/index.html") + .unwrap_or_else(|_| include_str!("../../static/index.html").to_string()); + Ok(HttpResponse::Ok().content_type("text/html").body(html)) +} + +#[actix_web::get("/static/{filename:.*}")] +async fn static_files(req: HttpRequest) -> Result { + let filename = req.match_info().query("filename"); + let path = format!("static/{}", filename); + + match fs::read(&path) { + Ok(content) => { + let content_type = match filename { + f if f.ends_with(".js") => "application/javascript", + f if f.ends_with(".css") => "text/css", + f if f.ends_with(".png") => "image/png", + f if f.ends_with(".jpg") | f.ends_with(".jpeg") => "image/jpeg", + _ => "text/plain", + }; + + Ok(HttpResponse::Ok().content_type(content_type).body(content)) + } + Err(_) => Ok(HttpResponse::NotFound().body("File not found")), + } +} diff --git a/src/channels/mod.rs b/src/channels/mod.rs new file mode 100644 index 000000000..07443b1ed --- /dev/null +++ b/src/channels/mod.rs @@ -0,0 +1,178 @@ +use async_trait::async_trait; +use chrono::Utc; +use livekit::{DataPacketKind, Room, RoomOptions}; +use log::info; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::sync::{mpsc, Mutex}; + +use crate::shared::{BotResponse, UserMessage}; + +#[async_trait] +pub trait ChannelAdapter: Send + Sync { + async fn send_message(&self, response: BotResponse) -> Result<(), Box>; +} + +pub struct WebChannelAdapter { + connections: Arc>>>, +} + +impl WebChannelAdapter { + pub fn new() -> Self { + Self { + connections: Arc::new(Mutex::new(HashMap::new())), + } + } + + pub async fn add_connection(&self, session_id: String, tx: mpsc::Sender) { + self.connections.lock().await.insert(session_id, tx); + } + + pub async fn remove_connection(&self, session_id: &str) { + self.connections.lock().await.remove(session_id); + } +} + +#[async_trait] +impl ChannelAdapter for WebChannelAdapter { + async fn send_message(&self, response: BotResponse) -> Result<(), Box> { + let connections = self.connections.lock().await; + if let Some(tx) = connections.get(&response.session_id) { + tx.send(response).await?; + } + Ok(()) + } +} + +pub struct VoiceAdapter { + livekit_url: String, + api_key: String, + api_secret: String, + rooms: Arc>>, + connections: Arc>>>, +} + +impl VoiceAdapter { + pub fn new(livekit_url: String, api_key: String, api_secret: String) -> Self { + Self { + livekit_url, + api_key, + api_secret, + rooms: Arc::new(Mutex::new(HashMap::new())), + connections: Arc::new(Mutex::new(HashMap::new())), + } + } + + pub async fn start_voice_session( + &self, + session_id: &str, + user_id: &str, + ) -> Result> { + let token = AccessToken::with_api_key(&self.api_key, &self.api_secret) + .with_identity(user_id) + .with_name(user_id) + .with_room_name(session_id) + .with_room_join(true) + .to_jwt()?; + + let room_options = RoomOptions { + auto_subscribe: true, + ..Default::default() + }; + + let (room, mut events) = Room::connect(&self.livekit_url, &token, room_options).await?; + self.rooms + .lock() + .await + .insert(session_id.to_string(), room.clone()); + + let rooms_clone = self.rooms.clone(); + let connections_clone = self.connections.clone(); + let session_id_clone = session_id.to_string(); + + tokio::spawn(async move { + while let Some(event) = events.recv().await { + match event { + livekit::prelude::RoomEvent::DataReceived(data_packet) => { + if let Ok(message) = + serde_json::from_slice::(&data_packet.data) + { + info!("Received voice message: {}", message.content); + if let Some(tx) = + connections_clone.lock().await.get(&message.session_id) + { + let _ = tx + .send(BotResponse { + bot_id: message.bot_id, + user_id: message.user_id, + session_id: message.session_id, + channel: "voice".to_string(), + content: format!("🎤 Voice: {}", message.content), + message_type: "voice".to_string(), + stream_token: None, + is_complete: true, + }) + .await; + } + } + } + livekit::prelude::RoomEvent::TrackSubscribed( + track, + publication, + participant, + ) => { + info!("Voice track subscribed from {}", participant.identity()); + } + _ => {} + } + } + rooms_clone.lock().await.remove(&session_id_clone); + }); + + Ok(token) + } + + pub async fn stop_voice_session( + &self, + session_id: &str, + ) -> Result<(), Box> { + if let Some(room) = self.rooms.lock().await.remove(session_id) { + room.disconnect(); + } + Ok(()) + } + + pub async fn add_connection(&self, session_id: String, tx: mpsc::Sender) { + self.connections.lock().await.insert(session_id, tx); + } + + pub async fn send_voice_response( + &self, + session_id: &str, + text: &str, + ) -> Result<(), Box> { + if let Some(room) = self.rooms.lock().await.get(session_id) { + let voice_response = serde_json::json!({ + "type": "voice_response", + "text": text, + "timestamp": Utc::now() + }); + + room.local_participant().publish_data( + serde_json::to_vec(&voice_response)?, + DataPacketKind::Reliable, + &[], + )?; + } + Ok(()) + } +} + +#[async_trait] +impl ChannelAdapter for VoiceAdapter { + async fn send_message(&self, response: BotResponse) -> Result<(), Box> { + info!("Sending voice response to: {}", response.user_id); + self.send_voice_response(&response.session_id, &response.content) + .await + } +} diff --git a/src/chart/mod.rs b/src/chart/mod.rs new file mode 100644 index 000000000..7ed78fd79 --- /dev/null +++ b/src/chart/mod.rs @@ -0,0 +1,92 @@ +use langchain_rust::{ + chain::{Chain, SQLDatabaseChainBuilder, options::ChainCallOptions}, + llm::openai::OpenAI, + tools::{postgres::PostgreSQLEngine, SQLDatabaseBuilder}, + prompt::PromptTemplate, +}; + +pub struct ChartGenerator { + sql_chain: SQLDatabaseChainBuilder, + llm: OpenAI, +} + +impl ChartGenerator { + pub async fn new(database_url: &str) -> Result> { + let llm = OpenAI::default(); + let engine = PostgreSQLEngine::new(database_url).await?; + let db = SQLDatabaseBuilder::new(engine).build().await?; + + let sql_chain = SQLDatabaseChainBuilder::new() + .llm(llm.clone()) + .top_k(4) + .database(db); + + Ok(Self { + sql_chain, + llm, + }) + } + + pub async fn generate_chart( + &self, + question: &str, + chart_type: &str + ) -> Result> { + // Step 1: Generate SQL using LangChain + let sql_result = self.generate_sql(question).await?; + + // Step 2: Execute SQL and get data + let data = self.execute_sql(&sql_result).await?; + + // Step 3: Generate chart configuration using LLM + let chart_config = self.generate_chart_config(&data, chart_type).await?; + + // Step 4: Generate and render chart + let chart_image = self.render_chart(&chart_config).await?; + + Ok(ChartResponse { + sql_query: sql_result, + data, + chart_image, + chart_config, + }) + } + + async fn generate_sql(&self, question: &str) -> Result> { + let chain = self.sql_chain + .clone() + .build() + .expect("Failed to build SQL chain"); + + let input_variables = chain.prompt_builder().query(question).build(); + let result = chain.invoke(input_variables).await?; + + Ok(result.to_string()) + } + + async fn execute_sql(&self, query: &str) -> Result> { + // Execute the generated SQL and return structured data + // Implementation depends on your database setup + Ok(Value::Null) + } + + async fn generate_chart_config(&self, data: &Value, chart_type: &str) -> Result> { + let prompt = format!( + "Given this data: {} and chart type: {}, generate a billboard.js configuration JSON. \ + Focus on creating meaningful visualizations for this business data.", + data, chart_type + ); + + let message = HumanMessage::new(prompt); + let result = self.llm.invoke(&[message]).await?; + + serde_json::from_str(&result.generation) + .map_err(|e| e.into()) + } + + async fn render_chart(&self, config: &Value) -> Result, Box> { + // Use headless browser to render chart and capture as image + // This would integrate with your browser automation setup + Ok(vec![]) + } +} diff --git a/src/config/mod.rs b/src/config/mod.rs new file mode 100644 index 000000000..3d0262bed --- /dev/null +++ b/src/config/mod.rs @@ -0,0 +1,148 @@ +use std::env; + +#[derive(Clone)] +pub struct AppConfig { + pub minio: MinioConfig, + pub server: ServerConfig, + pub database: DatabaseConfig, + pub database_custom: DatabaseConfig, + pub email: EmailConfig, + pub ai: AIConfig, + pub site_path: String, +} + +#[derive(Clone)] +pub struct DatabaseConfig { + pub username: String, + pub password: String, + pub server: String, + pub port: u32, + pub database: String, +} + +#[derive(Clone)] +pub struct MinioConfig { + pub server: String, + pub access_key: String, + pub secret_key: String, + pub use_ssl: bool, + pub bucket: String, +} + +#[derive(Clone)] +pub struct ServerConfig { + pub host: String, + pub port: u16, +} + +#[derive(Clone)] +pub struct EmailConfig { + pub from: String, + pub server: String, + pub port: u16, + pub username: String, + pub password: String, +} + +#[derive(Clone)] +pub struct AIConfig { + pub instance: String, + pub key: String, + pub version: String, + pub endpoint: String, +} + + +impl AppConfig { + pub fn database_url(&self) -> String { + format!( + "postgres://{}:{}@{}:{}/{}", + self.database.username, + self.database.password, + self.database.server, + self.database.port, + self.database.database + ) + } + + pub fn database_custom_url(&self) -> String { + format!( + "postgres://{}:{}@{}:{}/{}", + self.database_custom.username, + self.database_custom.password, + self.database_custom.server, + self.database_custom.port, + self.database_custom.database + ) + } + + + pub fn from_env() -> Self { + let database = DatabaseConfig { + username: env::var("TABLES_USERNAME").unwrap_or_else(|_| "user".to_string()), + password: env::var("TABLES_PASSWORD").unwrap_or_else(|_| "pass".to_string()), + server: env::var("TABLES_SERVER").unwrap_or_else(|_| "localhost".to_string()), + port: env::var("TABLES_PORT") + .ok() + .and_then(|p| p.parse().ok()) + .unwrap_or(5432), + database: env::var("TABLES_DATABASE").unwrap_or_else(|_| "db".to_string()), + }; + + let database_custom = DatabaseConfig { + username: env::var("CUSTOM_USERNAME").unwrap_or_else(|_| "user".to_string()), + password: env::var("CUSTOM_PASSWORD").unwrap_or_else(|_| "pass".to_string()), + server: env::var("CUSTOM_SERVER").unwrap_or_else(|_| "localhost".to_string()), + port: env::var("CUSTOM_PORT") + .ok() + .and_then(|p| p.parse().ok()) + .unwrap_or(5432), + database: env::var("CUSTOM_DATABASE").unwrap_or_else(|_| "db".to_string()), + }; + + let minio = MinioConfig { + server: env::var("DRIVE_SERVER").expect("DRIVE_SERVER not set"), + access_key: env::var("DRIVE_ACCESSKEY").expect("DRIVE_ACCESSKEY not set"), + secret_key: env::var("DRIVE_SECRET").expect("DRIVE_SECRET not set"), + use_ssl: env::var("DRIVE_USE_SSL") + .unwrap_or_else(|_| "false".to_string()) + .parse() + .unwrap_or(false), + bucket: env::var("DRIVE_ORG_PREFIX").unwrap_or_else(|_| "".to_string()), + }; + + let email = EmailConfig { + from: env::var("EMAIL_FROM").expect("EMAIL_FROM not set"), + server: env::var("EMAIL_SERVER").expect("EMAIL_SERVER not set"), + port: env::var("EMAIL_PORT") + .expect("EMAIL_PORT not set") + .parse() + .expect("EMAIL_PORT must be a number"), + username: env::var("EMAIL_USER").expect("EMAIL_USER not set"), + password: env::var("EMAIL_PASS").expect("EMAIL_PASS not set"), + }; + + let ai = AIConfig { + instance: env::var("AI_INSTANCE").expect("AI_INSTANCE not set"), + key: env::var("AI_KEY").expect("AI_KEY not set"), + version: env::var("AI_VERSION").expect("AI_VERSION not set"), + endpoint: env::var("AI_ENDPOINT").expect("AI_ENDPOINT not set"), + }; + + AppConfig { + minio, + server: ServerConfig { + host: env::var("SERVER_HOST").unwrap_or_else(|_| "127.0.0.1".to_string()), + port: env::var("SERVER_PORT") + .ok() + .and_then(|p| p.parse().ok()) + .unwrap_or(8080), + }, + database, + database_custom, + email, + ai, + site_path: env::var("SITES_ROOT").unwrap() + } + } +} \ No newline at end of file diff --git a/src/context/mod.rs b/src/context/mod.rs new file mode 100644 index 000000000..cea1836cd --- /dev/null +++ b/src/context/mod.rs @@ -0,0 +1,97 @@ +use async_trait::async_trait; +use langchain_rust::{ + embedding::openai::openai_embedder::OpenAiEmbedder, + vectorstore::qdrant::{Qdrant, StoreBuilder}, + vectorstore::{VectorStore, VecStoreOptions}, + schemas::Document, +}; +use qdrant_client::qdrant::Qdrant as QdrantClient; +use sqlx::PgPool; +use uuid::Uuid; + +#[async_trait] +pub trait ContextProvider: Send + Sync { + async fn get_context(&self, session_id: Uuid, user_id: Uuid, query: &str) -> Result>; + async fn store_embedding(&self, text: &str, embedding: Vec, metadata: Value) -> Result<(), Box>; + async fn search_similar(&self, embedding: Vec, limit: u32) -> Result, Box>; +} + +pub struct LangChainContextProvider { + pool: PgPool, + vector_store: Qdrant, + embedder: OpenAiEmbedder, +} + +impl LangChainContextProvider { + pub async fn new(pool: PgPool, qdrant_url: &str) -> Result> { + let embedder = OpenAiEmbedder::default(); + + let client = QdrantClient::from_url(qdrant_url).build()?; + let vector_store = StoreBuilder::new() + .embedder(embedder.clone()) + .client(client) + .collection_name("conversations") + .build() + .await?; + + Ok(Self { + pool, + vector_store, + embedder, + }) + } +} + +#[async_trait] +impl ContextProvider for LangChainContextProvider { + async fn get_context(&self, session_id: Uuid, user_id: Uuid, query: &str) -> Result> { + // Get conversation history + let history = sqlx::query( + "SELECT role, content_encrypted FROM message_history + WHERE session_id = $1 AND user_id = $2 + ORDER BY message_index DESC LIMIT 5" + ) + .bind(session_id) + .bind(user_id) + .fetch_all(&self.pool) + .await?; + + let mut context = String::from("Conversation history:\n"); + for row in history.iter().rev() { + let role: String = row.get("role"); + let content: String = row.get("content_encrypted"); + context.push_str(&format!("{}: {}\n", role, content)); + } + + // Search for similar documents using LangChain + let similar_docs = self.vector_store + .similarity_search(query, 3, &VecStoreOptions::default()) + .await?; + + if !similar_docs.is_empty() { + context.push_str("\nRelevant context:\n"); + for doc in similar_docs { + context.push_str(&format!("- {}\n", doc.page_content)); + } + } + + context.push_str(&format!("\nCurrent message: {}", query)); + Ok(context) + } + + async fn store_embedding(&self, text: &str, embedding: Vec, metadata: Value) -> Result<(), Box> { + let document = Document::new(text).with_metadata(metadata); + + self.vector_store + .add_documents(&[document], &VecStoreOptions::default()) + .await?; + + Ok(()) + } + + async fn search_similar(&self, embedding: Vec, limit: u32) -> Result, Box> { + // LangChain handles this through the vector store interface + // This method would need adaptation to work with LangChain's search patterns + Ok(vec![]) + } +} diff --git a/src/email/mod.rs b/src/email/mod.rs new file mode 100644 index 000000000..0c8041f54 --- /dev/null +++ b/src/email/mod.rs @@ -0,0 +1,533 @@ +use crate::{config::EmailConfig, state::AppState}; +use log::info; + +use actix_web::error::ErrorInternalServerError; +use actix_web::http::header::ContentType; +use actix_web::{web, HttpResponse, Result}; +use lettre::{transport::smtp::authentication::Credentials, Message, SmtpTransport, Transport}; +use serde::Serialize; + +use imap::types::Seq; +use mailparse::{parse_mail, MailHeaderMap}; // Added MailHeaderMap import + +#[derive(Debug, Serialize)] +pub struct EmailResponse { + pub id: String, + pub name: String, + pub email: String, + pub subject: String, + pub text: String, + date: String, + read: bool, + labels: Vec, +} + +async fn internal_send_email(config: &EmailConfig, to: &str, subject: &str, body: &str) { + let email = Message::builder() + .from(config.from.parse().unwrap()) + .to(to.parse().unwrap()) + .subject(subject) + .body(body.to_string()) + .unwrap(); + + let creds = Credentials::new(config.username.clone(), config.password.clone()); + + SmtpTransport::relay(&config.server) + .unwrap() + .port(config.port) + .credentials(creds) + .build() + .send(&email) + .unwrap(); +} + +#[actix_web::get("/emails/list")] +pub async fn list_emails( + state: web::Data, +) -> Result>, actix_web::Error> { + let _config = state + .config + .as_ref() + .ok_or_else(|| ErrorInternalServerError("Configuration not available"))?; + + // Establish connection + let tls = native_tls::TlsConnector::builder().build().map_err(|e| { + ErrorInternalServerError(format!("Failed to create TLS connector: {:?}", e)) + })?; + + let client = imap::connect( + (_config.email.server.as_str(), 993), + _config.email.server.as_str(), + &tls, + ) + .map_err(|e| ErrorInternalServerError(format!("Failed to connect to IMAP: {:?}", e)))?; + + // Login + let mut session = client + .login(&_config.email.username, &_config.email.password) + .map_err(|e| ErrorInternalServerError(format!("Login failed: {:?}", e)))?; + + // Select INBOX + session + .select("INBOX") + .map_err(|e| ErrorInternalServerError(format!("Failed to select INBOX: {:?}", e)))?; + + // Search for all messages + let messages = session + .search("ALL") + .map_err(|e| ErrorInternalServerError(format!("Failed to search emails: {:?}", e)))?; + + let mut email_list = Vec::new(); + + // Get last 20 messages + let recent_messages: Vec<_> = messages.iter().cloned().collect(); // Collect items into a Vec + let recent_messages: Vec = recent_messages.into_iter().rev().take(20).collect(); // Now you can reverse and take the last 20 + for seq in recent_messages { + // Fetch the entire message (headers + body) + let fetch_result = session.fetch(seq.to_string(), "RFC822"); + let messages = fetch_result + .map_err(|e| ErrorInternalServerError(format!("Failed to fetch email: {:?}", e)))?; + + for msg in messages.iter() { + let body = msg + .body() + .ok_or_else(|| ErrorInternalServerError("No body found"))?; + + // Parse the complete email message + let parsed = parse_mail(body) + .map_err(|e| ErrorInternalServerError(format!("Failed to parse email: {:?}", e)))?; + + // Extract headers + let headers = parsed.get_headers(); + let subject = headers.get_first_value("Subject").unwrap_or_default(); + let from = headers.get_first_value("From").unwrap_or_default(); + let date = headers.get_first_value("Date").unwrap_or_default(); + + // Extract body text (handles both simple and multipart emails) + let body_text = if let Some(body_part) = parsed + .subparts + .iter() + .find(|p| p.ctype.mimetype == "text/plain") + { + body_part.get_body().unwrap_or_default() + } else { + parsed.get_body().unwrap_or_default() + }; + + // Create preview + let preview = body_text.lines().take(3).collect::>().join(" "); + let preview_truncated = if preview.len() > 150 { + format!("{}...", &preview[..150]) + } else { + preview + }; + + // Parse From field + let (from_name, from_email) = parse_from_field(&from); + + email_list.push(EmailResponse { + id: seq.to_string(), + name: from_name, + email: from_email, + subject: if subject.is_empty() { + "(No Subject)".to_string() + } else { + subject + }, + text: preview_truncated, + date: if date.is_empty() { + chrono::Utc::now().format("%Y-%m-%d %H:%M:%S").to_string() + } else { + date + }, + read: false, + labels: Vec::new(), + }); + } + } + + session + .logout() + .map_err(|e| ErrorInternalServerError(format!("Failed to logout: {:?}", e)))?; + + Ok(web::Json(email_list)) +} + +// Helper function to parse From field +fn parse_from_field(from: &str) -> (String, String) { + if let Some(start) = from.find('<') { + if let Some(end) = from.find('>') { + let email = from[start + 1..end].trim().to_string(); + let name = from[..start].trim().trim_matches('"').to_string(); + return (name, email); + } + } + ("Unknown".to_string(), from.to_string()) +} + +#[derive(serde::Deserialize)] +pub struct SaveDraftRequest { + pub to: String, + pub subject: String, + pub cc: Option, + pub text: String, +} + +#[derive(serde::Serialize)] +pub struct SaveDraftResponse { + pub success: bool, + pub message: String, + pub draft_id: Option, +} + +#[derive(serde::Deserialize)] +pub struct GetLatestEmailRequest { + pub from_email: String, +} + +#[derive(serde::Serialize)] +pub struct LatestEmailResponse { + pub success: bool, + pub email_text: Option, + pub message: String, +} + +#[actix_web::post("/emails/save_draft")] +pub async fn save_draft( + state: web::Data, + draft_data: web::Json, +) -> Result, actix_web::Error> { + let config = state + .config + .as_ref() + .ok_or_else(|| ErrorInternalServerError("Configuration not available"))?; + + match save_email_draft(&config.email, &draft_data).await { + Ok(draft_id) => Ok(web::Json(SaveDraftResponse { + success: true, + message: "Draft saved successfully".to_string(), + draft_id: Some(draft_id), + })), + Err(e) => Ok(web::Json(SaveDraftResponse { + success: false, + message: format!("Failed to save draft: {}", e), + draft_id: None, + })), + } +} + +pub async fn save_email_draft( + email_config: &EmailConfig, + draft_data: &SaveDraftRequest, +) -> Result> { + // Establish connection + let tls = native_tls::TlsConnector::builder().build()?; + let client = imap::connect( + (email_config.server.as_str(), 993), + email_config.server.as_str(), + &tls, + )?; + + // Login + let mut session = client + .login(&email_config.username, &email_config.password) + .map_err(|e| format!("Login failed: {:?}", e))?; + + // Select or create Drafts folder + if session.select("Drafts").is_err() { + // Try to create Drafts folder if it doesn't exist + session.create("Drafts")?; + session.select("Drafts")?; + } + + // Create email message + let cc_header = draft_data + .cc + .as_deref() + .filter(|cc| !cc.is_empty()) + .map(|cc| format!("Cc: {}\r\n", cc)) + .unwrap_or_default(); + let email_message = format!( + "From: {}\r\nTo: {}\r\n{}Subject: {}\r\nDate: {}\r\nContent-Type: text/html; charset=UTF-8\r\n\r\n{}", + email_config.username, + draft_data.to, + cc_header, + draft_data.subject, + chrono::Utc::now().format("%a, %d %b %Y %H:%M:%S +0000"), + draft_data.text + ); + + // Append to Drafts folder + session.append("Drafts", &email_message)?; + + session.logout()?; + + Ok(chrono::Utc::now().timestamp().to_string()) +} + +async fn fetch_latest_email_from_sender( + email_config: &EmailConfig, + from_email: &str, +) -> Result> { + // Establish connection + let tls = native_tls::TlsConnector::builder().build()?; + let client = imap::connect( + (email_config.server.as_str(), 993), + email_config.server.as_str(), + &tls, + )?; + + // Login + let mut session = client + .login(&email_config.username, &email_config.password) + .map_err(|e| format!("Login failed: {:?}", e))?; + + // Try to select Archive folder first, then fall back to INBOX + if session.select("Archive").is_err() { + session.select("INBOX")?; + } + + // Search for emails from the specified sender + let search_query = format!("FROM \"{}\"", from_email); + let messages = session.search(&search_query)?; + + if messages.is_empty() { + session.logout()?; + return Err(format!("No emails found from {}", from_email).into()); + } + + // Get the latest message (highest sequence number) + let latest_seq = messages.iter().max().unwrap(); + + // Fetch the entire message + let messages = session.fetch(latest_seq.to_string(), "RFC822")?; + + let mut email_text = String::new(); + + for msg in messages.iter() { + let body = msg.body().ok_or("No body found in email")?; + + // Parse the complete email message + let parsed = parse_mail(body)?; + + // Extract headers + let headers = parsed.get_headers(); + let subject = headers.get_first_value("Subject").unwrap_or_default(); + let from = headers.get_first_value("From").unwrap_or_default(); + let date = headers.get_first_value("Date").unwrap_or_default(); + let to = headers.get_first_value("To").unwrap_or_default(); + + // Extract body text + let body_text = if let Some(body_part) = parsed + .subparts + .iter() + .find(|p| p.ctype.mimetype == "text/plain") + { + body_part.get_body().unwrap_or_default() + } else { + parsed.get_body().unwrap_or_default() + }; + + // Format the email text ready for reply with headers + email_text = format!( + "--- Original Message ---\nFrom: {}\nTo: {}\nDate: {}\nSubject: {}\n\n{}\n\n--- Reply Above This Line ---\n\n", + from, to, date, subject, body_text + ); + + break; // We only want the first (and should be only) message + } + + session.logout()?; + + if email_text.is_empty() { + Err("Failed to extract email content".into()) + } else { + Ok(email_text) + } +} + +#[actix_web::post("/emails/get_latest_from")] +pub async fn get_latest_email_from( + state: web::Data, + request: web::Json, +) -> Result, actix_web::Error> { + let config = state + .config + .as_ref() + .ok_or_else(|| ErrorInternalServerError("Configuration not available"))?; + + match fetch_latest_email_from_sender(&config.email, &request.from_email).await { + Ok(email_text) => Ok(web::Json(LatestEmailResponse { + success: true, + email_text: Some(email_text), + message: "Latest email retrieved successfully".to_string(), + })), + Err(e) => { + if e.to_string().contains("No emails found") { + Ok(web::Json(LatestEmailResponse { + success: false, + email_text: None, + message: e.to_string(), + })) + } else { + Err(ErrorInternalServerError(e)) + } + } + } +} + +pub async fn fetch_latest_sent_to( + email_config: &EmailConfig, + to_email: &str, +) -> Result> { + // Establish connection + let tls = native_tls::TlsConnector::builder().build()?; + let client = imap::connect( + (email_config.server.as_str(), 993), + email_config.server.as_str(), + &tls, + )?; + + // Login + let mut session = client + .login(&email_config.username, &email_config.password) + .map_err(|e| format!("Login failed: {:?}", e))?; + + // Try to select Archive folder first, then fall back to INBOX + if session.select("Sent").is_err() { + session.select("Sent Items")?; + } + + // Search for emails from the specified sender + let search_query = format!("TO \"{}\"", to_email); + let messages = session.search(&search_query)?; + + if messages.is_empty() { + session.logout()?; + return Err(format!("No emails found to {}", to_email).into()); + } + + // Get the latest message (highest sequence number) + let latest_seq = messages.iter().max().unwrap(); + + // Fetch the entire message + let messages = session.fetch(latest_seq.to_string(), "RFC822")?; + + let mut email_text = String::new(); + + for msg in messages.iter() { + let body = msg.body().ok_or("No body found in email")?; + + // Parse the complete email message + let parsed = parse_mail(body)?; + + // Extract headers + let headers = parsed.get_headers(); + let subject = headers.get_first_value("Subject").unwrap_or_default(); + let from = headers.get_first_value("From").unwrap_or_default(); + let date = headers.get_first_value("Date").unwrap_or_default(); + let to = headers.get_first_value("To").unwrap_or_default(); + + if !to + .trim() + .to_lowercase() + .contains(&to_email.trim().to_lowercase()) + { + continue; + } + // Extract body text (handles both simple and multipart emails) - SAME AS LIST_EMAILS + let body_text = if let Some(body_part) = parsed + .subparts + .iter() + .find(|p| p.ctype.mimetype == "text/plain") + { + body_part.get_body().unwrap_or_default() + } else { + parsed.get_body().unwrap_or_default() + }; + + // Only format if we have actual content + if !body_text.trim().is_empty() && body_text != "No readable content found" { + // Format the email text ready for reply with headers + email_text = format!( + "--- Original Message ---\nFrom: {}\nTo: {}\nDate: {}\nSubject: {}\n\n{}\n\n--- Reply Above This Line ---\n\n", + from, to, date, subject, body_text.trim() + ); + } else { + // Still provide headers even if body is empty + email_text = format!( + "--- Original Message ---\nFrom: {}\nTo: {}\nDate: {}\nSubject: {}\n\n[No readable content]\n\n--- Reply Above This Line ---\n\n", + from, to, date, subject + ); + } + + break; // We only want the first (and should be only) message + } + + session.logout()?; + + // Always return something, even if it's just headers + if email_text.is_empty() { + Err("Failed to extract email content".into()) + } else { + Ok(email_text) + } +} + +#[actix_web::post("/emails/send")] +pub async fn send_email( + payload: web::Json<(String, String, String)>, + state: web::Data, +) -> Result { + let (to, subject, body) = payload.into_inner(); + + info!("To: {}", to); + info!("Subject: {}", subject); + info!("Body: {}", body); + + // Send via SMTP + internal_send_email(&state.config.clone().unwrap().email, &to, &subject, &body).await; + + Ok(HttpResponse::Ok().finish()) +} + +#[actix_web::get("/campaigns/{campaign_id}/click/{email}")] +pub async fn save_click( + path: web::Path<(String, String)>, + state: web::Data, +) -> HttpResponse { + let (campaign_id, email) = path.into_inner(); + let _ = sqlx::query("INSERT INTO public.clicks (campaign_id, email, updated_at) VALUES ($1, $2, NOW()) ON CONFLICT (campaign_id, email) DO UPDATE SET updated_at = NOW()") + .bind(campaign_id) + .bind(email) + .execute(state.db.as_ref().unwrap()) + .await; + + let pixel = [ + 0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, // PNG header + 0x00, 0x00, 0x00, 0x0D, 0x49, 0x48, 0x44, 0x52, // IHDR chunk + 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, // 1x1 dimension + 0x08, 0x06, 0x00, 0x00, 0x00, 0x1F, 0x15, 0xC4, 0x89, // RGBA + 0x00, 0x00, 0x00, 0x0A, 0x49, 0x44, 0x41, 0x54, // IDAT chunk + 0x78, 0x9C, 0x63, 0x00, 0x01, 0x00, 0x00, 0x05, // data + 0x00, 0x01, 0x0D, 0x0A, 0x2D, 0xB4, // CRC + 0x00, 0x00, 0x00, 0x00, 0x49, 0x45, 0x4E, 0x44, // IEND chunk + 0xAE, 0x42, 0x60, 0x82, + ]; // EOF + + // At the end of your save_click function: + HttpResponse::Ok() + .content_type(ContentType::png()) + .body(pixel.to_vec()) // Using slicing to pass a reference +} + +#[actix_web::get("/campaigns/{campaign_id}/emails")] +pub async fn get_emails(path: web::Path, state: web::Data) -> String { + let campaign_id = path.into_inner(); + let rows = sqlx::query_scalar::<_, String>("SELECT email FROM clicks WHERE campaign_id = $1") + .bind(campaign_id) + .fetch_all(state.db.as_ref().unwrap()) + .await + .unwrap_or_default(); + rows.join(",") +} diff --git a/src/file/mod.rs b/src/file/mod.rs new file mode 100644 index 000000000..6bbe33ef0 --- /dev/null +++ b/src/file/mod.rs @@ -0,0 +1,142 @@ +use actix_web::web; + +use actix_multipart::Multipart; +use actix_web::{post, HttpResponse}; +use minio::s3::builders::ObjectContent; +use minio::s3::types::ToStream; +use minio::s3::Client; +use std::io::Write; +use tempfile::NamedTempFile; +use tokio_stream::StreamExt; + +use minio::s3::client::{Client as MinioClient, ClientBuilder as MinioClientBuilder}; +use minio::s3::creds::StaticProvider; +use minio::s3::http::BaseUrl; +use std::str::FromStr; + +use crate::config::AppConfig; +use crate::shared::state::AppState; + +pub async fn init_minio(config: &AppConfig) -> Result { + let scheme = if config.minio.use_ssl { + "https" + } else { + "http" + }; + let base_url = format!("{}://{}", scheme, config.minio.server); + let base_url = BaseUrl::from_str(&base_url)?; + let credentials = StaticProvider::new(&config.minio.access_key, &config.minio.secret_key, None); + + let minio_client = MinioClientBuilder::new(base_url) + .provider(Some(credentials)) + .build()?; + + Ok(minio_client) +} + +#[post("/files/upload/{folder_path}")] +pub async fn upload_file( + folder_path: web::Path, + mut payload: Multipart, + state: web::Data, +) -> Result { + let folder_path = folder_path.into_inner(); + + // Create a temporary file to store the uploaded file. + + let mut temp_file = NamedTempFile::new().map_err(|e| { + actix_web::error::ErrorInternalServerError(format!("Failed to create temp file: {}", e)) + })?; + + let mut file_name = None; + + // Iterate over the multipart stream. + + while let Some(mut field) = payload.try_next().await? { + let content_disposition = field.content_disposition(); + file_name = content_disposition + .get_filename() + .map(|name| name.to_string()); + + // Write the file content to the temporary file. + while let Some(chunk) = field.try_next().await? { + temp_file.write_all(&chunk).map_err(|e| { + actix_web::error::ErrorInternalServerError(format!( + "Failed to write to temp file: {}", + e + )) + })?; + } + } + + // Get the file name or use a default name + let file_name = file_name.unwrap_or_else(|| "unnamed_file".to_string()); + + // Construct the object name using the folder path and file name + let object_name = format!("{}/{}", folder_path, file_name); + + // Upload the file to the MinIO bucket + let client: Client = state.minio_client.clone().unwrap(); + let bucket_name = state.config.as_ref().unwrap().minio.bucket.clone(); + + let content = ObjectContent::from(temp_file.path()); + client + .put_object_content(bucket_name, &object_name, content) + .send() + .await + .map_err(|e| { + actix_web::error::ErrorInternalServerError(format!( + "Failed to upload file to MinIO: {}", + e + )) + })?; + + // Clean up the temporary file + temp_file.close().map_err(|e| { + actix_web::error::ErrorInternalServerError(format!("Failed to close temp file: {}", e)) + })?; + + Ok(HttpResponse::Ok().body(format!( + "Uploaded file '{}' to folder '{}'", + file_name, folder_path + ))) +} + +#[post("/files/list/{folder_path}")] +pub async fn list_file( + folder_path: web::Path, + state: web::Data, +) -> Result { + let folder_path = folder_path.into_inner(); + + let client: Client = state.minio_client.clone().unwrap(); + let bucket_name = "file-upload-rust-bucket"; + + // Create the stream using the to_stream() method + let mut objects_stream = client + .list_objects(bucket_name) + .prefix(Some(folder_path)) + .to_stream() + .await; + + let mut file_list = Vec::new(); + + // Use StreamExt::next() to iterate through the stream + while let Some(items) = objects_stream.next().await { + match items { + Ok(result) => { + for item in result.contents { + file_list.push(item.name); + } + } + Err(e) => { + return Err(actix_web::error::ErrorInternalServerError(format!( + "Failed to list files in MinIO: {}", + e + ))); + } + } + } + + Ok(HttpResponse::Ok().json(file_list)) +} diff --git a/src/llm/llm.rs b/src/llm/llm.rs new file mode 100644 index 000000000..63fa0961a --- /dev/null +++ b/src/llm/llm.rs @@ -0,0 +1,139 @@ +use log::error; + +use actix_web::{ + web::{self, Bytes}, + HttpResponse, Responder, +}; +use anyhow::Result; +use futures::StreamExt; +use langchain_rust::{ + chain::{Chain, LLMChainBuilder}, + fmt_message, fmt_template, + language_models::llm::LLM, + llm::openai::OpenAI, + message_formatter, + prompt::HumanMessagePromptTemplate, + prompt_args, + schemas::messages::Message, + template_fstring, +}; + +use crate::{state::AppState, utils::azure_from_config}; + +#[derive(serde::Deserialize)] +struct ChatRequest { + input: String, +} + +#[derive(serde::Serialize)] +struct ChatResponse { + text: String, + #[serde(skip_serializing_if = "Option::is_none")] + action: Option, +} + +#[derive(serde::Serialize)] +#[serde(tag = "type", content = "content")] +enum ChatAction { + ReplyEmail { content: String }, + // Add other action variants here as needed +} + +#[actix_web::post("/chat")] +pub async fn chat( + web::Json(request): web::Json, + state: web::Data, +) -> Result { + let azure_config = azure_from_config(&state.config.clone().unwrap().ai); + let open_ai = OpenAI::new(azure_config); + + // Parse the context JSON + let context: serde_json::Value = match serde_json::from_str(&request) { + Ok(ctx) => ctx, + Err(_) => serde_json::json!({}), + }; + + // Check view type and prepare appropriate prompt + let view_type = context + .get("viewType") + .and_then(|v| v.as_str()) + .unwrap_or(""); + let (prompt, might_trigger_action) = match view_type { + "email" => ( + format!( + "Respond to this email: {}. Keep it professional and concise. \ + If the email requires a response, provide one in the 'replyEmail' action format.", + request + ), + true, + ), + _ => (request, false), + }; + + let response_text = match open_ai.invoke(&prompt).await { + Ok(res) => res, + Err(err) => { + error!("Error invoking API: {}", err); + return Err(actix_web::error::ErrorInternalServerError( + "Failed to invoke OpenAI API", + )); + } + }; + + // Prepare response with potential action + let mut chat_response = ChatResponse { + text: response_text.clone(), + action: None, + }; + + // If in email view and the response looks like an email reply, add action + if might_trigger_action && view_type == "email" { + chat_response.action = Some(ChatAction::ReplyEmail { + content: response_text, + }); + } + + Ok(HttpResponse::Ok().json(chat_response)) +} + +#[actix_web::post("/stream")] +pub async fn chat_stream( + web::Json(request): web::Json, + state: web::Data, +) -> Result { + let azure_config = azure_from_config(&state.config.clone().unwrap().ai); + let open_ai = OpenAI::new(azure_config); + + let prompt = message_formatter![ + fmt_message!(Message::new_system_message( + "You are world class technical documentation writer." + )), + fmt_template!(HumanMessagePromptTemplate::new(template_fstring!( + "{input}", "input" + ))) + ]; + + let chain = LLMChainBuilder::new() + .prompt(prompt) + .llm(open_ai) + .build() + .map_err(actix_web::error::ErrorInternalServerError)?; + + let mut stream = chain + .stream(prompt_args! { "input" => request.input }) + .await + .map_err(actix_web::error::ErrorInternalServerError)?; + + let actix_stream = async_stream::stream! { + while let Some(result) = stream.next().await { + match result { + Ok(value) => yield Ok::<_, actix_web::Error>(Bytes::from(value.content)), + Err(e) => yield Err(actix_web::error::ErrorInternalServerError(e)), + } + } + }; + + Ok(HttpResponse::Ok() + .content_type("text/event-stream") + .streaming(actix_stream)) +} diff --git a/src/llm/llm_generic.rs b/src/llm/llm_generic.rs new file mode 100644 index 000000000..cc1ce7d56 --- /dev/null +++ b/src/llm/llm_generic.rs @@ -0,0 +1,248 @@ +use log::{error, info}; + +use actix_web::{post, web, HttpRequest, HttpResponse, Result}; +use dotenv::dotenv; +use regex::Regex; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use std::env; + +// OpenAI-compatible request/response structures +#[derive(Debug, Serialize, Deserialize)] +struct ChatMessage { + role: String, + content: String, +} + +#[derive(Debug, Serialize, Deserialize)] +struct ChatCompletionRequest { + model: String, + messages: Vec, + stream: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +struct ChatCompletionResponse { + id: String, + object: String, + created: u64, + model: String, + choices: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +struct Choice { + message: ChatMessage, + finish_reason: String, +} + +fn clean_request_body(body: &str) -> String { + // Remove problematic parameters that might not be supported by all providers + let re = Regex::new(r#","?\s*"(max_completion_tokens|parallel_tool_calls|top_p|frequency_penalty|presence_penalty)"\s*:\s*[^,}]*"#).unwrap(); + re.replace_all(body, "").to_string() +} + +#[post("/v1/chat/completions")] +pub async fn generic_chat_completions(body: web::Bytes, _req: HttpRequest) -> Result { + // Log raw POST data + let body_str = std::str::from_utf8(&body).unwrap_or_default(); + info!("Original POST Data: {}", body_str); + + dotenv().ok(); + + // Get environment variables + let api_key = env::var("AI_KEY") + .map_err(|_| actix_web::error::ErrorInternalServerError("AI_KEY not set."))?; + let model = env::var("AI_LLM_MODEL") + .map_err(|_| actix_web::error::ErrorInternalServerError("AI_LLM_MODEL not set."))?; + let endpoint = env::var("AI_ENDPOINT") + .map_err(|_| actix_web::error::ErrorInternalServerError("AI_ENDPOINT not set."))?; + + // Parse and modify the request body + let mut json_value: serde_json::Value = serde_json::from_str(body_str) + .map_err(|_| actix_web::error::ErrorInternalServerError("Failed to parse JSON"))?; + + // Add model parameter + if let Some(obj) = json_value.as_object_mut() { + obj.insert("model".to_string(), serde_json::Value::String(model)); + } + + let modified_body_str = serde_json::to_string(&json_value) + .map_err(|_| actix_web::error::ErrorInternalServerError("Failed to serialize JSON"))?; + + info!("Modified POST Data: {}", modified_body_str); + + // Set up headers + let mut headers = reqwest::header::HeaderMap::new(); + headers.insert( + "Authorization", + reqwest::header::HeaderValue::from_str(&format!("Bearer {}", api_key)) + .map_err(|_| actix_web::error::ErrorInternalServerError("Invalid API key format"))?, + ); + headers.insert( + "Content-Type", + reqwest::header::HeaderValue::from_static("application/json"), + ); + + // Send request to the AI provider + let client = Client::new(); + let response = client + .post(&endpoint) + .headers(headers) + .body(modified_body_str) + .send() + .await + .map_err(actix_web::error::ErrorInternalServerError)?; + + // Handle response + let status = response.status(); + let raw_response = response + .text() + .await + .map_err(actix_web::error::ErrorInternalServerError)?; + + info!("Provider response status: {}", status); + info!("Provider response body: {}", raw_response); + + // Convert response to OpenAI format if successful + if status.is_success() { + match convert_to_openai_format(&raw_response) { + Ok(openai_response) => Ok(HttpResponse::Ok() + .content_type("application/json") + .body(openai_response)), + Err(e) => { + error!("Failed to convert response format: {}", e); + // Return the original response if conversion fails + Ok(HttpResponse::Ok() + .content_type("application/json") + .body(raw_response)) + } + } + } else { + // Return error as-is + let actix_status = actix_web::http::StatusCode::from_u16(status.as_u16()) + .unwrap_or(actix_web::http::StatusCode::INTERNAL_SERVER_ERROR); + + Ok(HttpResponse::build(actix_status) + .content_type("application/json") + .body(raw_response)) + } +} + +/// Converts provider response to OpenAI-compatible format +fn convert_to_openai_format(provider_response: &str) -> Result> { + #[derive(serde::Deserialize)] + struct ProviderChoice { + message: ProviderMessage, + #[serde(default)] + finish_reason: Option, + } + + #[derive(serde::Deserialize)] + struct ProviderMessage { + role: Option, + content: String, + } + + #[derive(serde::Deserialize)] + struct ProviderResponse { + id: Option, + object: Option, + created: Option, + model: Option, + choices: Vec, + usage: Option, + } + + #[derive(serde::Deserialize, Default)] + struct ProviderUsage { + prompt_tokens: Option, + completion_tokens: Option, + total_tokens: Option, + } + + #[derive(serde::Serialize)] + struct OpenAIResponse { + id: String, + object: String, + created: u64, + model: String, + choices: Vec, + usage: OpenAIUsage, + } + + #[derive(serde::Serialize)] + struct OpenAIChoice { + index: u32, + message: OpenAIMessage, + finish_reason: String, + } + + #[derive(serde::Serialize)] + struct OpenAIMessage { + role: String, + content: String, + } + + #[derive(serde::Serialize)] + struct OpenAIUsage { + prompt_tokens: u32, + completion_tokens: u32, + total_tokens: u32, + } + + // Parse the provider response + let provider: ProviderResponse = serde_json::from_str(provider_response)?; + + // Extract content from the first choice + let first_choice = provider.choices.get(0).ok_or("No choices in response")?; + let content = first_choice.message.content.clone(); + let role = first_choice + .message + .role + .clone() + .unwrap_or_else(|| "assistant".to_string()); + + // Calculate token usage + let usage = provider.usage.unwrap_or_default(); + let prompt_tokens = usage.prompt_tokens.unwrap_or(0); + let completion_tokens = usage + .completion_tokens + .unwrap_or_else(|| content.split_whitespace().count() as u32); + let total_tokens = usage + .total_tokens + .unwrap_or(prompt_tokens + completion_tokens); + + let openai_response = OpenAIResponse { + id: provider + .id + .unwrap_or_else(|| format!("chatcmpl-{}", uuid::Uuid::new_v4().simple())), + object: provider + .object + .unwrap_or_else(|| "chat.completion".to_string()), + created: provider.created.unwrap_or_else(|| { + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + .as_secs() + }), + model: provider.model.unwrap_or_else(|| "llama".to_string()), + choices: vec![OpenAIChoice { + index: 0, + message: OpenAIMessage { role, content }, + finish_reason: first_choice + .finish_reason + .clone() + .unwrap_or_else(|| "stop".to_string()), + }], + usage: OpenAIUsage { + prompt_tokens, + completion_tokens, + total_tokens, + }, + }; + + serde_json::to_string(&openai_response).map_err(|e| e.into()) +} + +// Default implementation for ProviderUsage diff --git a/src/llm/llm_local.rs b/src/llm/llm_local.rs new file mode 100644 index 000000000..c1e21cb1b --- /dev/null +++ b/src/llm/llm_local.rs @@ -0,0 +1,577 @@ +use actix_web::{post, web, HttpRequest, HttpResponse, Result}; +use dotenv::dotenv; +use log::{error, info}; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use std::env; +use tokio::time::{sleep, Duration}; + +// OpenAI-compatible request/response structures +#[derive(Debug, Serialize, Deserialize)] +struct ChatMessage { + role: String, + content: String, +} + +#[derive(Debug, Serialize, Deserialize)] +struct ChatCompletionRequest { + model: String, + messages: Vec, + stream: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +struct ChatCompletionResponse { + id: String, + object: String, + created: u64, + model: String, + choices: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +struct Choice { + message: ChatMessage, + finish_reason: String, +} + +// Llama.cpp server request/response structures +#[derive(Debug, Serialize, Deserialize)] +struct LlamaCppRequest { + prompt: String, + n_predict: Option, + temperature: Option, + top_k: Option, + top_p: Option, + stream: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +struct LlamaCppResponse { + content: String, + stop: bool, + generation_settings: Option, +} + +pub async fn ensure_llama_servers_running() -> Result<(), Box> +{ + let llm_local = env::var("LLM_LOCAL").unwrap_or_else(|_| "false".to_string()); + + if llm_local.to_lowercase() != "true" { + info!("ℹ️ LLM_LOCAL is not enabled, skipping local server startup"); + return Ok(()); + } + + // Get configuration from environment variables + let llm_url = env::var("LLM_URL").unwrap_or_else(|_| "http://localhost:8081".to_string()); + let embedding_url = + env::var("EMBEDDING_URL").unwrap_or_else(|_| "http://localhost:8082".to_string()); + let llama_cpp_path = env::var("LLM_CPP_PATH").unwrap_or_else(|_| "~/llama.cpp".to_string()); + let llm_model_path = env::var("LLM_MODEL_PATH").unwrap_or_else(|_| "".to_string()); + let embedding_model_path = env::var("EMBEDDING_MODEL_PATH").unwrap_or_else(|_| "".to_string()); + + info!("🚀 Starting local llama.cpp servers..."); + info!("📋 Configuration:"); + info!(" LLM URL: {}", llm_url); + info!(" Embedding URL: {}", embedding_url); + info!(" LLM Model: {}", llm_model_path); + info!(" Embedding Model: {}", embedding_model_path); + + // Check if servers are already running + let llm_running = is_server_running(&llm_url).await; + let embedding_running = is_server_running(&embedding_url).await; + + if llm_running && embedding_running { + info!("✅ Both LLM and Embedding servers are already running"); + return Ok(()); + } + + // Start servers that aren't running + let mut tasks = vec![]; + + if !llm_running && !llm_model_path.is_empty() { + info!("🔄 Starting LLM server..."); + tasks.push(tokio::spawn(start_llm_server( + llama_cpp_path.clone(), + llm_model_path.clone(), + llm_url.clone(), + ))); + } else if llm_model_path.is_empty() { + info!("⚠️ LLM_MODEL_PATH not set, skipping LLM server"); + } + + if !embedding_running && !embedding_model_path.is_empty() { + info!("🔄 Starting Embedding server..."); + tasks.push(tokio::spawn(start_embedding_server( + llama_cpp_path.clone(), + embedding_model_path.clone(), + embedding_url.clone(), + ))); + } else if embedding_model_path.is_empty() { + info!("⚠️ EMBEDDING_MODEL_PATH not set, skipping Embedding server"); + } + + // Wait for all server startup tasks + for task in tasks { + task.await??; + } + + // Wait for servers to be ready with verbose logging + info!("⏳ Waiting for servers to become ready..."); + + let mut llm_ready = llm_running || llm_model_path.is_empty(); + let mut embedding_ready = embedding_running || embedding_model_path.is_empty(); + + let mut attempts = 0; + let max_attempts = 60; // 2 minutes total + + while attempts < max_attempts && (!llm_ready || !embedding_ready) { + sleep(Duration::from_secs(2)).await; + + info!( + "🔍 Checking server health (attempt {}/{})...", + attempts + 1, + max_attempts + ); + + if !llm_ready && !llm_model_path.is_empty() { + if is_server_running(&llm_url).await { + info!(" ✅ LLM server ready at {}", llm_url); + llm_ready = true; + } else { + info!(" ❌ LLM server not ready yet"); + } + } + + if !embedding_ready && !embedding_model_path.is_empty() { + if is_server_running(&embedding_url).await { + info!(" ✅ Embedding server ready at {}", embedding_url); + embedding_ready = true; + } else { + info!(" ❌ Embedding server not ready yet"); + } + } + + attempts += 1; + + if attempts % 10 == 0 { + info!( + "⏰ Still waiting for servers... (attempt {}/{})", + attempts, max_attempts + ); + } + } + + if llm_ready && embedding_ready { + info!("🎉 All llama.cpp servers are ready and responding!"); + Ok(()) + } else { + let mut error_msg = "❌ Servers failed to start within timeout:".to_string(); + if !llm_ready && !llm_model_path.is_empty() { + error_msg.push_str(&format!("\n - LLM server at {}", llm_url)); + } + if !embedding_ready && !embedding_model_path.is_empty() { + error_msg.push_str(&format!("\n - Embedding server at {}", embedding_url)); + } + Err(error_msg.into()) + } +} + +async fn start_llm_server( + llama_cpp_path: String, + model_path: String, + url: String, +) -> Result<(), Box> { + let port = url.split(':').last().unwrap_or("8081"); + + std::env::set_var("OMP_NUM_THREADS", "20"); + std::env::set_var("OMP_PLACES", "cores"); + std::env::set_var("OMP_PROC_BIND", "close"); + + // "cd {} && numactl --interleave=all ./llama-server -m {} --host 0.0.0.0 --port {} --threads 20 --threads-batch 40 --temp 0.7 --parallel 1 --repeat-penalty 1.1 --ctx-size 8192 --batch-size 8192 -n 4096 --mlock --no-mmap --flash-attn --no-kv-offload --no-mmap &", + + let mut cmd = tokio::process::Command::new("sh"); + cmd.arg("-c").arg(format!( + "cd {} && ./llama-server -m {} --host 0.0.0.0 --port {} --n-gpu-layers 99 &", + llama_cpp_path, model_path, port + )); + + cmd.spawn()?; + Ok(()) +} + +async fn start_embedding_server( + llama_cpp_path: String, + model_path: String, + url: String, +) -> Result<(), Box> { + let port = url.split(':').last().unwrap_or("8082"); + + let mut cmd = tokio::process::Command::new("sh"); + cmd.arg("-c").arg(format!( + "cd {} && ./llama-server -m {} --host 0.0.0.0 --port {} --embedding --n-gpu-layers 99 &", + llama_cpp_path, model_path, port + )); + + cmd.spawn()?; + Ok(()) +} + +async fn is_server_running(url: &str) -> bool { + let client = reqwest::Client::new(); + match client.get(&format!("{}/health", url)).send().await { + Ok(response) => response.status().is_success(), + Err(_) => false, + } +} + +// Convert OpenAI chat messages to a single prompt +fn messages_to_prompt(messages: &[ChatMessage]) -> String { + let mut prompt = String::new(); + + for message in messages { + match message.role.as_str() { + "system" => { + prompt.push_str(&format!("System: {}\n\n", message.content)); + } + "user" => { + prompt.push_str(&format!("User: {}\n\n", message.content)); + } + "assistant" => { + prompt.push_str(&format!("Assistant: {}\n\n", message.content)); + } + _ => { + prompt.push_str(&format!("{}: {}\n\n", message.role, message.content)); + } + } + } + + prompt.push_str("Assistant: "); + prompt +} + +// Proxy endpoint +#[post("/local/v1/chat/completions")] +pub async fn chat_completions_local( + req_body: web::Json, + _req: HttpRequest, +) -> Result { + dotenv().ok().unwrap(); + + // Get llama.cpp server URL + let llama_url = env::var("LLM_URL").unwrap_or_else(|_| "http://localhost:8081".to_string()); + + // Convert OpenAI format to llama.cpp format + let prompt = messages_to_prompt(&req_body.messages); + + let llama_request = LlamaCppRequest { + prompt, + n_predict: Some(500), // Adjust as needed + temperature: Some(0.7), + top_k: Some(40), + top_p: Some(0.9), + stream: req_body.stream, + }; + + // Send request to llama.cpp server + let client = Client::builder() + .timeout(Duration::from_secs(120)) // 2 minute timeout + .build() + .map_err(|e| { + error!("Error creating HTTP client: {}", e); + actix_web::error::ErrorInternalServerError("Failed to create HTTP client") + })?; + + let response = client + .post(&format!("{}/completion", llama_url)) + .header("Content-Type", "application/json") + .json(&llama_request) + .send() + .await + .map_err(|e| { + error!("Error calling llama.cpp server: {}", e); + actix_web::error::ErrorInternalServerError("Failed to call llama.cpp server") + })?; + + let status = response.status(); + + if status.is_success() { + let llama_response: LlamaCppResponse = response.json().await.map_err(|e| { + error!("Error parsing llama.cpp response: {}", e); + actix_web::error::ErrorInternalServerError("Failed to parse llama.cpp response") + })?; + + // Convert llama.cpp response to OpenAI format + let openai_response = ChatCompletionResponse { + id: format!("chatcmpl-{}", uuid::Uuid::new_v4()), + object: "chat.completion".to_string(), + created: std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + .as_secs(), + model: req_body.model.clone(), + choices: vec![Choice { + message: ChatMessage { + role: "assistant".to_string(), + content: llama_response.content.trim().to_string(), + }, + finish_reason: if llama_response.stop { + "stop".to_string() + } else { + "length".to_string() + }, + }], + }; + + Ok(HttpResponse::Ok().json(openai_response)) + } else { + let error_text = response + .text() + .await + .unwrap_or_else(|_| "Unknown error".to_string()); + + error!("Llama.cpp server error ({}): {}", status, error_text); + + let actix_status = actix_web::http::StatusCode::from_u16(status.as_u16()) + .unwrap_or(actix_web::http::StatusCode::INTERNAL_SERVER_ERROR); + + Ok(HttpResponse::build(actix_status).json(serde_json::json!({ + "error": { + "message": error_text, + "type": "server_error" + } + }))) + } +} + +// OpenAI Embedding Request - Modified to handle both string and array inputs +#[derive(Debug, Deserialize)] +pub struct EmbeddingRequest { + #[serde(deserialize_with = "deserialize_input")] + pub input: Vec, + pub model: String, + #[serde(default)] + pub _encoding_format: Option, +} + +// Custom deserializer to handle both string and array inputs +fn deserialize_input<'de, D>(deserializer: D) -> Result, D::Error> +where + D: serde::Deserializer<'de>, +{ + use serde::de::{self, Visitor}; + use std::fmt; + + struct InputVisitor; + + impl<'de> Visitor<'de> for InputVisitor { + type Value = Vec; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a string or an array of strings") + } + + fn visit_str(self, value: &str) -> Result + where + E: de::Error, + { + Ok(vec![value.to_string()]) + } + + fn visit_string(self, value: String) -> Result + where + E: de::Error, + { + Ok(vec![value]) + } + + fn visit_seq(self, mut seq: A) -> Result + where + A: de::SeqAccess<'de>, + { + let mut vec = Vec::new(); + while let Some(value) = seq.next_element::()? { + vec.push(value); + } + Ok(vec) + } + } + + deserializer.deserialize_any(InputVisitor) +} + +// OpenAI Embedding Response +#[derive(Debug, Serialize)] +pub struct EmbeddingResponse { + pub object: String, + pub data: Vec, + pub model: String, + pub usage: Usage, +} + +#[derive(Debug, Serialize)] +pub struct EmbeddingData { + pub object: String, + pub embedding: Vec, + pub index: usize, +} + +#[derive(Debug, Serialize)] +pub struct Usage { + pub prompt_tokens: u32, + pub total_tokens: u32, +} + +// Llama.cpp Embedding Request +#[derive(Debug, Serialize)] +struct LlamaCppEmbeddingRequest { + pub content: String, +} + +// FIXED: Handle the stupid nested array format +#[derive(Debug, Deserialize)] +struct LlamaCppEmbeddingResponseItem { + pub index: usize, + pub embedding: Vec>, // This is the up part - embedding is an array of arrays +} + +// Proxy endpoint for embeddings +#[post("/v1/embeddings")] +pub async fn embeddings_local( + req_body: web::Json, + _req: HttpRequest, +) -> Result { + dotenv().ok(); + + // Get llama.cpp server URL + let llama_url = + env::var("EMBEDDING_URL").unwrap_or_else(|_| "http://localhost:8082".to_string()); + + let client = Client::builder() + .timeout(Duration::from_secs(120)) + .build() + .map_err(|e| { + error!("Error creating HTTP client: {}", e); + actix_web::error::ErrorInternalServerError("Failed to create HTTP client") + })?; + + // Process each input text and get embeddings + let mut embeddings_data = Vec::new(); + let mut total_tokens = 0; + + for (index, input_text) in req_body.input.iter().enumerate() { + let llama_request = LlamaCppEmbeddingRequest { + content: input_text.clone(), + }; + + let response = client + .post(&format!("{}/embedding", llama_url)) + .header("Content-Type", "application/json") + .json(&llama_request) + .send() + .await + .map_err(|e| { + error!("Error calling llama.cpp server for embedding: {}", e); + actix_web::error::ErrorInternalServerError( + "Failed to call llama.cpp server for embedding", + ) + })?; + + let status = response.status(); + + if status.is_success() { + // First, get the raw response text for debugging + let raw_response = response.text().await.map_err(|e| { + error!("Error reading response text: {}", e); + actix_web::error::ErrorInternalServerError("Failed to read response") + })?; + + // Parse the response as a vector of items with nested arrays + let llama_response: Vec = + serde_json::from_str(&raw_response).map_err(|e| { + error!("Error parsing llama.cpp embedding response: {}", e); + error!("Raw response: {}", raw_response); + actix_web::error::ErrorInternalServerError( + "Failed to parse llama.cpp embedding response", + ) + })?; + + // Extract the embedding from the nested array bullshit + if let Some(item) = llama_response.get(0) { + // The embedding field contains Vec>, so we need to flatten it + // If it's [[0.1, 0.2, 0.3]], we want [0.1, 0.2, 0.3] + let flattened_embedding = if !item.embedding.is_empty() { + item.embedding[0].clone() // Take the first (and probably only) inner array + } else { + vec![] // Empty if no embedding data + }; + + // Estimate token count + let estimated_tokens = (input_text.len() as f32 / 4.0).ceil() as u32; + total_tokens += estimated_tokens; + + embeddings_data.push(EmbeddingData { + object: "embedding".to_string(), + embedding: flattened_embedding, + index, + }); + } else { + error!("No embedding data returned for input: {}", input_text); + return Ok(HttpResponse::InternalServerError().json(serde_json::json!({ + "error": { + "message": format!("No embedding data returned for input {}", index), + "type": "server_error" + } + }))); + } + } else { + let error_text = response + .text() + .await + .unwrap_or_else(|_| "Unknown error".to_string()); + + error!("Llama.cpp server error ({}): {}", status, error_text); + + let actix_status = actix_web::http::StatusCode::from_u16(status.as_u16()) + .unwrap_or(actix_web::http::StatusCode::INTERNAL_SERVER_ERROR); + + return Ok(HttpResponse::build(actix_status).json(serde_json::json!({ + "error": { + "message": format!("Failed to get embedding for input {}: {}", index, error_text), + "type": "server_error" + } + }))); + } + } + + // Build OpenAI-compatible response + let openai_response = EmbeddingResponse { + object: "list".to_string(), + data: embeddings_data, + model: req_body.model.clone(), + usage: Usage { + prompt_tokens: total_tokens, + total_tokens, + }, + }; + + Ok(HttpResponse::Ok().json(openai_response)) +} + +// Health check endpoint +#[actix_web::get("/health")] +pub async fn health() -> Result { + let llama_url = env::var("LLM_URL").unwrap_or_else(|_| "http://localhost:8081".to_string()); + + if is_server_running(&llama_url).await { + Ok(HttpResponse::Ok().json(serde_json::json!({ + "status": "healthy", + "llama_server": "running" + }))) + } else { + Ok(HttpResponse::ServiceUnavailable().json(serde_json::json!({ + "status": "unhealthy", + "llama_server": "not running" + }))) + } +} diff --git a/src/llm/llm_provider.rs b/src/llm/llm_provider.rs new file mode 100644 index 000000000..4fc02bc81 --- /dev/null +++ b/src/llm/llm_provider.rs @@ -0,0 +1,116 @@ +use log::info; + +use actix_web::{post, web, HttpRequest, HttpResponse, Result}; +use dotenv::dotenv; +use regex::Regex; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use std::env; + +// OpenAI-compatible request/response structures +#[derive(Debug, Serialize, Deserialize)] +struct ChatMessage { + role: String, + content: String, +} + +#[derive(Debug, Serialize, Deserialize)] +struct ChatCompletionRequest { + model: String, + messages: Vec, + stream: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +struct ChatCompletionResponse { + id: String, + object: String, + created: u64, + model: String, + choices: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +struct Choice { + message: ChatMessage, + finish_reason: String, +} + +#[post("/azure/v1/chat/completions")] +async fn chat_completions(body: web::Bytes, _req: HttpRequest) -> Result { + // Always log raw POST data + if let Ok(body_str) = std::str::from_utf8(&body) { + info!("POST Data: {}", body_str); + } else { + info!("POST Data (binary): {:?}", body); + } + + dotenv().ok(); + + // Environment variables + let azure_endpoint = env::var("AI_ENDPOINT") + .map_err(|_| actix_web::error::ErrorInternalServerError("AI_ENDPOINT not set."))?; + let azure_key = env::var("AI_KEY") + .map_err(|_| actix_web::error::ErrorInternalServerError("AI_KEY not set."))?; + let deployment_name = env::var("AI_LLM_MODEL") + .map_err(|_| actix_web::error::ErrorInternalServerError("AI_LLM_MODEL not set."))?; + + // Construct Azure OpenAI URL + let url = format!( + "{}/openai/deployments/{}/chat/completions?api-version=2025-01-01-preview", + azure_endpoint, deployment_name + ); + + // Forward headers + let mut headers = reqwest::header::HeaderMap::new(); + headers.insert( + "api-key", + reqwest::header::HeaderValue::from_str(&azure_key) + .map_err(|_| actix_web::error::ErrorInternalServerError("Invalid Azure key"))?, + ); + headers.insert( + "Content-Type", + reqwest::header::HeaderValue::from_static("application/json"), + ); + + let body_str = std::str::from_utf8(&body).unwrap_or(""); + info!("Original POST Data: {}", body_str); + + // Remove the problematic params + let re = + Regex::new(r#","?\s*"(max_completion_tokens|parallel_tool_calls)"\s*:\s*[^,}]*"#).unwrap(); + let cleaned = re.replace_all(body_str, ""); + let cleaned_body = web::Bytes::from(cleaned.to_string()); + + info!("Cleaned POST Data: {}", cleaned); + + // Send request to Azure + let client = Client::new(); + let response = client + .post(&url) + .headers(headers) + .body(cleaned_body) + .send() + .await + .map_err(actix_web::error::ErrorInternalServerError)?; + + // Handle response based on status + let status = response.status(); + let raw_response = response + .text() + .await + .map_err(actix_web::error::ErrorInternalServerError)?; + + // Log the raw response + info!("Raw Azure response: {}", raw_response); + + if status.is_success() { + Ok(HttpResponse::Ok().body(raw_response)) + } else { + // Handle error responses properly + let actix_status = actix_web::http::StatusCode::from_u16(status.as_u16()) + .unwrap_or(actix_web::http::StatusCode::INTERNAL_SERVER_ERROR); + + Ok(HttpResponse::build(actix_status).body(raw_response)) + } +} diff --git a/src/llm/mod.rs b/src/llm/mod.rs new file mode 100644 index 000000000..194ab19db --- /dev/null +++ b/src/llm/mod.rs @@ -0,0 +1,274 @@ +pub mod llm_generic; +pub mod llm_local; +pub mod llm_provider; + +use async_trait::async_trait; +use futures::StreamExt; +use langchain_rust::{ + language_models::llm::LLM, + llm::{claude::Claude, openai::OpenAI}, + schemas::Message, +}; +use serde_json::Value; +use std::sync::Arc; +use tokio::sync::mpsc; + +use crate::tools::ToolManager; + +#[async_trait] +pub trait LLMProvider: Send + Sync { + async fn generate( + &self, + prompt: &str, + config: &Value, + ) -> Result>; + + async fn generate_stream( + &self, + prompt: &str, + config: &Value, + tx: mpsc::Sender, + ) -> Result<(), Box>; + + // Add tool calling capability using LangChain tools + async fn generate_with_tools( + &self, + prompt: &str, + config: &Value, + available_tools: &[String], + tool_manager: Arc, + session_id: &str, + user_id: &str, + ) -> Result>; +} + +pub struct OpenAIClient { + client: OpenAI, +} + +impl OpenAIClient { + pub fn new(client: OpenAI) -> Self { + Self { client } + } +} + +#[async_trait] +impl LLMProvider for OpenAIClient { + async fn generate( + &self, + prompt: &str, + _config: &Value, + ) -> Result> { + let messages = vec![Message::new_human_message(prompt.to_string())]; + + let result = self + .client + .invoke(&messages) + .await + .map_err(|e| Box::new(e) as Box)?; + + Ok(result) + } + + async fn generate_stream( + &self, + prompt: &str, + _config: &Value, + mut tx: mpsc::Sender, + ) -> Result<(), Box> { + let messages = vec![Message::new_human_message(prompt.to_string())]; + + let mut stream = self + .client + .stream(&messages) + .await + .map_err(|e| Box::new(e) as Box)?; + + while let Some(result) = stream.next().await { + match result { + Ok(chunk) => { + let content = chunk.content; + if !content.is_empty() { + let _ = tx.send(content.to_string()).await; + } + } + Err(e) => { + eprintln!("Stream error: {}", e); + } + } + } + + Ok(()) + } + + async fn generate_with_tools( + &self, + prompt: &str, + _config: &Value, + available_tools: &[String], + _tool_manager: Arc, + _session_id: &str, + _user_id: &str, + ) -> Result> { + // Enhanced prompt with tool information + let tools_info = if available_tools.is_empty() { + String::new() + } else { + format!("\n\nAvailable tools: {}. You can suggest using these tools if they would help answer the user's question.", available_tools.join(", ")) + }; + + let enhanced_prompt = format!("{}{}", prompt, tools_info); + + let messages = vec![Message::new_human_message(enhanced_prompt)]; + + let result = self + .client + .invoke(&messages) + .await + .map_err(|e| Box::new(e) as Box)?; + + Ok(result) + } +} + +pub struct AnthropicClient { + client: Claude, +} + +impl AnthropicClient { + pub fn new(api_key: String) -> Self { + let client = Claude::default().with_api_key(api_key); + Self { client } + } +} + +#[async_trait] +impl LLMProvider for AnthropicClient { + async fn generate( + &self, + prompt: &str, + _config: &Value, + ) -> Result> { + let messages = vec![Message::new_human_message(prompt.to_string())]; + + let result = self + .client + .invoke(&messages) + .await + .map_err(|e| Box::new(e) as Box)?; + + Ok(result) + } + + async fn generate_stream( + &self, + prompt: &str, + _config: &Value, + mut tx: mpsc::Sender, + ) -> Result<(), Box> { + let messages = vec![Message::new_human_message(prompt.to_string())]; + + let mut stream = self + .client + .stream(&messages) + .await + .map_err(|e| Box::new(e) as Box)?; + + while let Some(result) = stream.next().await { + match result { + Ok(chunk) => { + let content = chunk.content; + if !content.is_empty() { + let _ = tx.send(content.to_string()).await; + } + } + Err(e) => { + eprintln!("Stream error: {}", e); + } + } + } + + Ok(()) + } + + async fn generate_with_tools( + &self, + prompt: &str, + _config: &Value, + available_tools: &[String], + _tool_manager: Arc, + _session_id: &str, + _user_id: &str, + ) -> Result> { + let tools_info = if available_tools.is_empty() { + String::new() + } else { + format!("\n\nAvailable tools: {}. You can suggest using these tools if they would help answer the user's question.", available_tools.join(", ")) + }; + + let enhanced_prompt = format!("{}{}", prompt, tools_info); + + let messages = vec![Message::new_human_message(enhanced_prompt)]; + + let result = self + .client + .invoke(&messages) + .await + .map_err(|e| Box::new(e) as Box)?; + + Ok(result) + } +} + +pub struct MockLLMProvider; + +impl MockLLMProvider { + pub fn new() -> Self { + Self + } +} + +#[async_trait] +impl LLMProvider for MockLLMProvider { + async fn generate( + &self, + prompt: &str, + _config: &Value, + ) -> Result> { + Ok(format!("Mock response to: {}", prompt)) + } + + async fn generate_stream( + &self, + prompt: &str, + _config: &Value, + mut tx: mpsc::Sender, + ) -> Result<(), Box> { + let response = format!("Mock stream response to: {}", prompt); + for word in response.split_whitespace() { + let _ = tx.send(format!("{} ", word)).await; + tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; + } + Ok(()) + } + + async fn generate_with_tools( + &self, + prompt: &str, + _config: &Value, + available_tools: &[String], + _tool_manager: Arc, + _session_id: &str, + _user_id: &str, + ) -> Result> { + let tools_list = if available_tools.is_empty() { + "no tools available".to_string() + } else { + available_tools.join(", ") + }; + Ok(format!( + "Mock response with tools [{}] to: {}", + tools_list, prompt + )) + } +} diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 000000000..d8d270254 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,254 @@ +mod auth; +mod automation; +mod basic; +mod bot; +mod channels; +mod chart; +mod config; +mod context; +mod email; +mod file; +mod llm; +mod org; +mod session; +mod shared; +mod tools; +mod web_automation; +mod whatsapp; + +use log::info; +use qdrant_client::Qdrant; +use std::sync::Arc; + +use actix_web::{web, App, HttpServer}; +use dotenv::dotenv; +use sqlx::PgPool; + +use crate::auth::AuthService; +use crate::bot::BotOrchestrator; +use crate::config::AppConfig; +use crate::email::{ + get_emails, get_latest_email_from, list_emails, save_click, save_draft, send_email, +}; +use crate::file::{list_file, upload_file}; +use crate::llm::llm_generic::generic_chat_completions; +use crate::llm::llm_local::{ + chat_completions_local, embeddings_local, ensure_llama_servers_running, +}; +use crate::session::SessionManager; +use crate::shared::state::AppState; +use crate::tools::{RedisToolExecutor, ToolManager}; +use crate::web_automation::{initialize_browser_pool, BrowserPool}; + +#[tokio::main(flavor = "multi_thread")] +async fn main() -> std::io::Result<()> { + dotenv().ok(); + env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init(); + info!("Starting General Bots 6.0..."); + + let config = AppConfig::from_env(); + let db_url = config.database_url(); + let db_custom_url = config.database_custom_url(); + let db = PgPool::connect(&db_url).await.unwrap(); + let db_custom = PgPool::connect(&db_custom_url).await.unwrap(); + + let minio_client = init_minio(&config) + .await + .expect("Failed to initialize Minio"); + + let browser_pool = Arc::new(BrowserPool::new( + "http://localhost:9515".to_string(), + 5, + "/usr/bin/brave-browser-beta".to_string(), + )); + + ensure_llama_servers_running() + .await + .expect("Failed to initialize LLM local server."); + + initialize_browser_pool() + .await + .expect("Failed to initialize browser pool"); + + // Initialize Redis if available + let redis_url = std::env::var("REDIS_URL").unwrap_or_else(|_| "".to_string()); + let redis_conn = match std::env::var("REDIS_URL") { + Ok(redis_url_value) => { + let client = redis::Client::open(redis_url_value.clone()) + .expect("Failed to create Redis client"); + let conn = client + .get_connection() + .expect("Failed to create Redis connection"); + Some(Arc::new(conn)) + } + Err(_) => None, + }; + + let qdrant_url = std::env::var("QDRANT_URL").unwrap_or("http://localhost:6334".to_string()); + let qdrant = Qdrant::from_url(&qdrant_url) + .build() + .expect("Failed to connect to Qdrant"); + + let session_manager = SessionManager::new(db.clone(), redis_conn.clone()); + let auth_service = AuthService::new(db.clone(), redis_conn.clone()); + + let llm_provider: Arc = match std::env::var("LLM_PROVIDER") + .unwrap_or("mock".to_string()) + .as_str() + { + "openai" => Arc::new(crate::llm::OpenAIClient::new( + std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY required"), + )), + "anthropic" => Arc::new(crate::llm::AnthropicClient::new( + std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY required"), + )), + _ => Arc::new(crate::llm::MockLLMProvider::new()), + }; + + let web_adapter = Arc::new(crate::channels::WebChannelAdapter::new()); + let voice_adapter = Arc::new(crate::channels::VoiceAdapter::new( + std::env::var("LIVEKIT_URL").unwrap_or("ws://localhost:7880".to_string()), + std::env::var("LIVEKIT_API_KEY").unwrap_or("dev".to_string()), + std::env::var("LIVEKIT_API_SECRET").unwrap_or("secret".to_string()), + )); + + let whatsapp_adapter = Arc::new(crate::whatsapp::WhatsAppAdapter::new( + std::env::var("META_ACCESS_TOKEN").unwrap_or("".to_string()), + std::env::var("META_PHONE_NUMBER_ID").unwrap_or("".to_string()), + std::env::var("META_WEBHOOK_VERIFY_TOKEN").unwrap_or("".to_string()), + )); + + let tool_executor = Arc::new( + RedisToolExecutor::new( + redis_url.as_str(), + web_adapter.clone() as Arc, + db.clone(), + redis_conn.clone(), + ) + .expect("Failed to create RedisToolExecutor"), + ); + let chart_generator = ChartGenerator::new().map(Arc::new); + // Initialize LangChain components + let llm = OpenAI::default(); + let llm_provider: Arc = Arc::new(OpenAIClient::new(llm)); + + // Initialize vector store for document mode + let vector_store = if let (Ok(qdrant_url), Ok(openai_key)) = + (std::env::var("QDRANT_URL"), std::env::var("OPENAI_API_KEY")) + { + let embedder = OpenAiEmbedder::default().with_api_key(openai_key); + let client = QdrantClient::from_url(&qdrant_url).build().ok()?; + + let store = StoreBuilder::new() + .embedder(embedder) + .client(client) + .collection_name("documents") + .build() + .await + .ok()?; + + Some(Arc::new(store)) + } else { + None + }; + + // Initialize SQL chain for database mode + let sql_chain = if let Ok(db_url) = std::env::var("DATABASE_URL") { + let engine = PostgreSQLEngine::new(&db_url).await.ok()?; + let db = SQLDatabaseBuilder::new(engine).build().await.ok()?; + + let llm = OpenAI::default(); + let chain = langchain_rust::chain::SQLDatabaseChainBuilder::new() + .llm(llm) + .top_k(5) + .database(db) + .build() + .ok()?; + + Some(Arc::new(chain)) + } else { + None + }; + + let tool_manager = ToolManager::new(); + let orchestrator = BotOrchestrator::new( + session_manager, + tool_manager, + llm_provider, + auth_service, + chart_generator, + vector_store, + sql_chain, + ); + + orchestrator.add_channel("web", web_adapter.clone()); + orchestrator.add_channel("voice", voice_adapter.clone()); + orchestrator.add_channel("whatsapp", whatsapp_adapter.clone()); + + sqlx::query( + "INSERT INTO bots (id, name, llm_provider) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING", + ) + .bind(uuid::Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap()) + .bind("Default Bot") + .bind("mock") + .execute(&db) + .await + .unwrap(); + + let app_state = web::Data::new(AppState { + db: db.into(), + db_custom: db_custom.into(), + config: Some(config.clone()), + minio_client: minio_client.into(), + browser_pool: browser_pool.clone(), + orchestrator: Arc::new(orchestrator), + web_adapter, + voice_adapter, + whatsapp_adapter, + }); + + // Start automation service in background + let automation_state = app_state.get_ref().clone(); + + let automation = AutomationService::new(automation_state, "src/prompts"); + let _automation_handle = automation.spawn(); + + // Start HTTP server + HttpServer::new(move || { + App::new() + .wrap(Logger::default()) + .wrap(Logger::new("HTTP REQUEST: %a %{User-Agent}i")) + .app_data(app_state.clone()) + // Original services + .service(upload_file) + .service(list_file) + .service(save_click) + .service(get_emails) + .service(list_emails) + .service(send_email) + .service(crate::orchestrator::chat_stream) + .service(crate::orchestrator::chat) + .service(chat_completions_local) + .service(save_draft) + .service(generic_chat_completions) + .service(embeddings_local) + .service(get_latest_email_from) + .service(services::orchestrator::websocket_handler) + .service(services::orchestrator::whatsapp_webhook_verify) + .service(services::orchestrator::whatsapp_webhook) + .service(services::orchestrator::voice_start) + .service(services::orchestrator::voice_stop) + .service(services::orchestrator::create_session) + .service(services::orchestrator::get_sessions) + .service(services::orchestrator::get_session_history) + .service(services::orchestrator::index) + .service(create_organization) + .service(get_organization) + .service(list_organizations) + .service(update_organization) + .service(delete_organization) + }) + .bind((config.server.host.clone(), config.server.port))? + .run() + .await +} diff --git a/src/org/mod.rs b/src/org/mod.rs new file mode 100644 index 000000000..e8dd67721 --- /dev/null +++ b/src/org/mod.rs @@ -0,0 +1,163 @@ +use actix_web::{put, web, HttpResponse, Result}; +use chrono::Utc; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use uuid::Uuid; + +#[derive(Debug, Deserialize)] +pub struct CreateOrganizationRequest { + pub name: String, + pub slug: String, +} + +#[derive(Debug, Serialize)] +pub struct ApiResponse { + pub data: T, + pub success: bool, +} + +// Helper functions + +/// Create a new organization in database +pub async fn create_organization_db( + db_pool: &PgPool, + name: &str, + slug: &str, +) -> Result { + let org = sqlx::query_as!( + Organization, + r#" + INSERT INTO organizations (org_id, name, slug, created_at) + VALUES ($1, $2, $3, $4) + RETURNING org_id, name, slug, created_at + "#, + Uuid::new_v4(), + name, + slug, + Utc::now() + ) + .fetch_one(db_pool) + .await?; + + Ok(org) +} + +/// Get organization by ID from database +pub async fn get_organization_by_id_db( + db_pool: &PgPool, + org_id: Uuid, +) -> Result, sqlx::Error> { + let org = sqlx::query_as!( + Organization, + r#" + SELECT org_id, name, slug, created_at + FROM organizations + WHERE org_id = $1 + "#, + org_id + ) + .fetch_optional(db_pool) + .await?; + + Ok(org) +} + +#[post("/organizations/create")] +pub async fn create_organization( + state: web::Data, + payload: web::Json, +) -> Result { + let org = create_organization_db(&state.db_pool, &payload.name, &payload.slug) + .await + .map_err(|e| { + actix_web::error::ErrorInternalServerError(format!( + "Failed to create organization: {}", + e + )) + })?; + + let response = ApiResponse { + data: org, + success: true, + }; + + Ok(HttpResponse::Ok().json(response)) +} + +#[get("/organizations/{org_id}")] +pub async fn get_organization( + state: web::Data, + path: web::Path, +) -> Result { + let org_id = path.into_inner(); + + let org = get_organization_by_id_db(&state.db_pool, org_id) + .await + .map_err(|e| { + actix_web::error::ErrorInternalServerError(format!("Database error: {}", e)) + })?; + + match org { + Some(org) => { + let response = ApiResponse { + data: org, + success: true, + }; + Ok(HttpResponse::Ok().json(response)) + } + None => Ok(HttpResponse::NotFound().json(ApiResponse { + data: "Organization not found", + success: false, + })), + } +} + +#[get("/organizations")] +pub async fn list_organizations( + state: web::Data, + query: web::Query, +) -> Result { + let orgs = get_organizations_db(&state.db_pool, query.page, query.page_size) + .await + .map_err(|e| { + actix_web::error::ErrorInternalServerError(format!("Database error: {}", e)) + })?; + + let response = ApiResponse { + data: orgs, + success: true, + }; + + Ok(HttpResponse::Ok().json(response)) +} + +#[put("/organizations/{org_id}")] +pub async fn update_organization( + state: web::Data, + path: web::Path, + payload: web::Json, +) -> Result { + let org_id = path.into_inner(); + + // Implementation for update operation + // Use spawn_blocking for CPU-intensive operations if needed + let updated_org = web::block(move || { + // Blocking database operation would go here + // For async, use direct SQLx calls + Ok::<_, actix_web::Error>(Organization { + org_id, + name: payload.name.clone(), + slug: payload.slug.clone(), + created_at: Utc::now(), + }) + }) + .await? + .map_err(|e: actix_web::Error| e)?; + + let response = ApiResponse { + data: updated_org, + success: true, + }; + + Ok(HttpResponse::Ok().json(response)) +} diff --git a/src/session/mod.rs b/src/session/mod.rs new file mode 100644 index 000000000..d3303a353 --- /dev/null +++ b/src/session/mod.rs @@ -0,0 +1,308 @@ +use redis::{AsyncCommands, Client}; +use serde_json; +use sqlx::{PgPool, Row}; +use std::sync::Arc; +use uuid::Uuid; + +use crate::shared::UserSession; + +pub struct SessionManager { + pub pool: PgPool, + pub redis: Option>, +} + +impl SessionManager { + pub fn new(pool: PgPool, redis: Option>) -> Self { + Self { pool, redis } + } + + pub async fn get_user_session( + &self, + user_id: Uuid, + bot_id: Uuid, + ) -> Result, Box> { + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session:{}:{}", user_id, bot_id); + let session_json: Option = conn.get(&cache_key).await?; + if let Some(json) = session_json { + if let Ok(session) = serde_json::from_str::(&json) { + return Ok(Some(session)); + } + } + } + + let session = sqlx::query_as::<_, UserSession>( + "SELECT * FROM user_sessions WHERE user_id = $1 AND bot_id = $2 ORDER BY updated_at DESC LIMIT 1", + ) + .bind(user_id) + .bind(bot_id) + .fetch_optional(&self.pool) + .await?; + + if let Some(ref session) = session { + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session:{}:{}", user_id, bot_id); + let session_json = serde_json::to_string(session)?; + let _: () = conn.set_ex(cache_key, session_json, 1800).await?; + } + } + + Ok(session) + } + + pub async fn create_session( + &self, + user_id: Uuid, + bot_id: Uuid, + title: &str, + ) -> Result> { + let session = sqlx::query_as::<_, UserSession>( + "INSERT INTO user_sessions (user_id, bot_id, title) VALUES ($1, $2, $3) RETURNING *", + ) + .bind(user_id) + .bind(bot_id) + .bind(title) + .fetch_one(&self.pool) + .await?; + + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session:{}:{}", user_id, bot_id); + let session_json = serde_json::to_string(&session)?; + let _: () = conn.set_ex(cache_key, session_json, 1800).await?; + } + + Ok(session) + } + + pub async fn save_message( + &self, + session_id: Uuid, + user_id: Uuid, + role: &str, + content: &str, + message_type: &str, + ) -> Result<(), Box> { + let message_count: i64 = + sqlx::query("SELECT COUNT(*) as count FROM message_history WHERE session_id = $1") + .bind(session_id) + .fetch_one(&self.pool) + .await? + .get("count"); + + sqlx::query( + "INSERT INTO message_history (session_id, user_id, role, content_encrypted, message_type, message_index) + VALUES ($1, $2, $3, $4, $5, $6)", + ) + .bind(session_id) + .bind(user_id) + .bind(role) + .bind(content) + .bind(message_type) + .bind(message_count + 1) + .execute(&self.pool) + .await?; + + sqlx::query("UPDATE user_sessions SET updated_at = NOW() WHERE id = $1") + .bind(session_id) + .execute(&self.pool) + .await?; + + if let Some(redis_client) = &self.redis { + if let Some(session_info) = + sqlx::query("SELECT user_id, bot_id FROM user_sessions WHERE id = $1") + .bind(session_id) + .fetch_optional(&self.pool) + .await? + { + let user_id: Uuid = session_info.get("user_id"); + let bot_id: Uuid = session_info.get("bot_id"); + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session:{}:{}", user_id, bot_id); + let _: () = conn.del(cache_key).await?; + } + } + + Ok(()) + } + + pub async fn get_conversation_history( + &self, + session_id: Uuid, + user_id: Uuid, + ) -> Result, Box> { + let messages = sqlx::query( + "SELECT role, content_encrypted FROM message_history + WHERE session_id = $1 AND user_id = $2 + ORDER BY message_index ASC", + ) + .bind(session_id) + .bind(user_id) + .fetch_all(&self.pool) + .await?; + + let history = messages + .into_iter() + .map(|row| (row.get("role"), row.get("content_encrypted"))) + .collect(); + + Ok(history) + } + + pub async fn get_user_sessions( + &self, + user_id: Uuid, + ) -> Result, Box> { + let sessions = sqlx::query_as::<_, UserSession>( + "SELECT * FROM user_sessions WHERE user_id = $1 ORDER BY updated_at DESC", + ) + .bind(user_id) + .fetch_all(&self.pool) + .await?; + Ok(sessions) + } + + pub async fn update_answer_mode( + &self, + user_id: &str, + bot_id: &str, + mode: &str, + ) -> Result<(), Box> { + let user_uuid = Uuid::parse_str(user_id)?; + let bot_uuid = Uuid::parse_str(bot_id)?; + + sqlx::query( + "UPDATE user_sessions + SET answer_mode = $1, updated_at = NOW() + WHERE user_id = $2 AND bot_id = $3", + ) + .bind(mode) + .bind(user_uuid) + .bind(bot_uuid) + .execute(&self.pool) + .await?; + + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session:{}:{}", user_uuid, bot_uuid); + let _: () = conn.del(cache_key).await?; + } + + Ok(()) + } + + pub async fn update_current_tool( + &self, + user_id: &str, + bot_id: &str, + tool_name: Option<&str>, + ) -> Result<(), Box> { + let user_uuid = Uuid::parse_str(user_id)?; + let bot_uuid = Uuid::parse_str(bot_id)?; + + sqlx::query( + "UPDATE user_sessions + SET current_tool = $1, updated_at = NOW() + WHERE user_id = $2 AND bot_id = $3", + ) + .bind(tool_name) + .bind(user_uuid) + .bind(bot_uuid) + .execute(&self.pool) + .await?; + + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session:{}:{}", user_uuid, bot_uuid); + let _: () = conn.del(cache_key).await?; + } + + Ok(()) + } + + pub async fn get_session_by_id( + &self, + session_id: Uuid, + ) -> Result, Box> { + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session_by_id:{}", session_id); + let session_json: Option = conn.get(&cache_key).await?; + if let Some(json) = session_json { + if let Ok(session) = serde_json::from_str::(&json) { + return Ok(Some(session)); + } + } + } + + let session = sqlx::query_as::<_, UserSession>("SELECT * FROM user_sessions WHERE id = $1") + .bind(session_id) + .fetch_optional(&self.pool) + .await?; + + if let Some(ref session) = session { + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session_by_id:{}", session_id); + let session_json = serde_json::to_string(session)?; + let _: () = conn.set_ex(cache_key, session_json, 1800).await?; + } + } + + Ok(session) + } + + pub async fn cleanup_old_sessions( + &self, + days_old: i32, + ) -> Result> { + let result = sqlx::query( + "DELETE FROM user_sessions + WHERE updated_at < NOW() - INTERVAL '1 day' * $1", + ) + .bind(days_old) + .execute(&self.pool) + .await?; + Ok(result.rows_affected()) + } + + pub async fn set_current_tool( + &self, + user_id: &str, + bot_id: &str, + tool_name: Option, + ) -> Result<(), Box> { + let user_uuid = Uuid::parse_str(user_id)?; + let bot_uuid = Uuid::parse_str(bot_id)?; + + sqlx::query( + "UPDATE user_sessions + SET current_tool = $1, updated_at = NOW() + WHERE user_id = $2 AND bot_id = $3", + ) + .bind(tool_name) + .bind(user_uuid) + .bind(bot_uuid) + .execute(&self.pool) + .await?; + + if let Some(redis_client) = &self.redis { + let mut conn = redis_client.get_multiplexed_async_connection().await?; + let cache_key = format!("session:{}:{}", user_uuid, bot_uuid); + let _: () = conn.del(cache_key).await?; + } + + Ok(()) + } +} + +impl Clone for SessionManager { + fn clone(&self) -> Self { + Self { + pool: self.pool.clone(), + redis: self.redis.clone(), + } + } +} diff --git a/src/shared/mod.rs b/src/shared/mod.rs new file mode 100644 index 000000000..1d5fa9739 --- /dev/null +++ b/src/shared/mod.rs @@ -0,0 +1,7 @@ +pub mod models; +pub mod state; +pub mod utils; + +pub use models::*; +pub use state::*; +pub use utils::*; diff --git a/src/shared/models.rs b/src/shared/models.rs new file mode 100644 index 000000000..c6001f8ce --- /dev/null +++ b/src/shared/models.rs @@ -0,0 +1,118 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Organization { + pub org_id: Uuid, + pub name: String, + pub slug: String, + pub created_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Bot { + pub bot_id: Uuid, + pub name: String, + pub status: BotStatus, + pub config: serde_json::Value, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize, sqlx::Type)] +#[serde(rename_all = "snake_case")] +#[sqlx(type_name = "bot_status", rename_all = "snake_case")] +pub enum BotStatus { + Active, + Inactive, + Maintenance, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum TriggerKind { + Scheduled = 0, + TableUpdate = 1, + TableInsert = 2, + TableDelete = 3, +} + +impl TriggerKind { + pub fn from_i32(value: i32) -> Option { + match value { + 0 => Some(Self::Scheduled), + 1 => Some(Self::TableUpdate), + 2 => Some(Self::TableInsert), + 3 => Some(Self::TableDelete), + _ => None, + } + } +} + +#[derive(Debug, FromRow, Serialize, Deserialize)] +pub struct Automation { + pub id: Uuid, + pub kind: i32, + pub target: Option, + pub schedule: Option, + pub param: String, + pub is_active: bool, + pub last_triggered: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct UserSession { + pub id: Uuid, + pub user_id: Uuid, + pub bot_id: Uuid, + pub title: String, + pub context_data: serde_json::Value, + pub answer_mode: String, + pub current_tool: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmbeddingRequest { + pub text: String, + pub model: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct EmbeddingResponse { + pub embedding: Vec, + pub model: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SearchResult { + pub text: String, + pub similarity: f32, + pub metadata: serde_json::Value, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UserMessage { + pub bot_id: String, + pub user_id: String, + pub session_id: String, + pub channel: String, + pub content: String, + pub message_type: String, + pub media_url: Option, + pub timestamp: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BotResponse { + pub bot_id: String, + pub user_id: String, + pub session_id: String, + pub channel: String, + pub content: String, + pub message_type: String, + pub stream_token: Option, + pub is_complete: bool, +} diff --git a/src/shared/state.rs b/src/shared/state.rs new file mode 100644 index 000000000..c27cc78a6 --- /dev/null +++ b/src/shared/state.rs @@ -0,0 +1,28 @@ +use std::sync::Arc; + +use crate::{ + bot::BotOrchestrator, + channels::{VoiceAdapter, WebChannelAdapter, WhatsAppAdapter}, + config::AppConfig, + tools::ToolApi, + web_automation::BrowserPool +}; + +#[derive(Clone)] +pub struct AppState { + pub minio_client: Option, + pub config: Option, + pub db: Option, + pub db_custom: Option, + pub browser_pool: Arc, + pub orchestrator: Arc, + pub web_adapter: Arc, + pub voice_adapter: Arc, + pub whatsapp_adapter: Arc, + pub tool_api: Arc, +} + +pub struct BotState { + pub language: String, + pub work_folder: String, +} diff --git a/src/shared/utils.rs b/src/shared/utils.rs new file mode 100644 index 000000000..b7b36b04a --- /dev/null +++ b/src/shared/utils.rs @@ -0,0 +1,258 @@ +use chrono::{DateTime, Utc}; +use langchain_rust::llm::AzureConfig; +use log::{debug, warn}; +use rhai::{Array, Dynamic}; +use serde_json::{json, Value}; +use smartstring::SmartString; +use sqlx::{postgres::PgRow, Column, Decode, Row, Type, TypeInfo}; +use std::error::Error; +use std::fs::File; +use std::io::BufReader; +use std::path::Path; +use tokio::fs::File as TokioFile; +use tokio_stream::StreamExt; +use zip::ZipArchive; + +use crate::config::AIConfig; +use reqwest::Client; +use tokio::io::AsyncWriteExt; + +pub fn azure_from_config(config: &AIConfig) -> AzureConfig { + AzureConfig::new() + .with_api_base(&config.endpoint) + .with_api_key(&config.key) + .with_api_version(&config.version) + .with_deployment_id(&config.instance) +} + +pub async fn call_llm( + text: &str, + ai_config: &AIConfig, +) -> Result> { + let azure_config = azure_from_config(&ai_config.clone()); + let open_ai = langchain_rust::llm::OpenAI::new(azure_config); + + let prompt = text.to_string(); + + match open_ai.invoke(&prompt).await { + Ok(response_text) => Ok(response_text), + Err(err) => { + log::error!("Error invoking LLM API: {}", err); + Err(Box::new(std::io::Error::new( + std::io::ErrorKind::Other, + "Failed to invoke LLM API", + ))) + } + } +} + +pub fn extract_zip_recursive( + zip_path: &Path, + destination_path: &Path, +) -> Result<(), Box> { + let file = File::open(zip_path)?; + let buf_reader = BufReader::new(file); + let mut archive = ZipArchive::new(buf_reader)?; + + for i in 0..archive.len() { + let mut file = archive.by_index(i)?; + let outpath = destination_path.join(file.mangled_name()); + + if file.is_dir() { + std::fs::create_dir_all(&outpath)?; + } else { + if let Some(parent) = outpath.parent() { + if !parent.exists() { + std::fs::create_dir_all(&parent)?; + } + } + let mut outfile = File::create(&outpath)?; + std::io::copy(&mut file, &mut outfile)?; + } + } + + Ok(()) +} + +pub fn row_to_json(row: PgRow) -> Result> { + let mut result = serde_json::Map::new(); + let columns = row.columns(); + debug!("Converting row with {} columns", columns.len()); + + for (i, column) in columns.iter().enumerate() { + let column_name = column.name(); + let type_name = column.type_info().name(); + + let value = match type_name { + "INT4" | "int4" => handle_nullable_type::(&row, i, column_name), + "INT8" | "int8" => handle_nullable_type::(&row, i, column_name), + "FLOAT4" | "float4" => handle_nullable_type::(&row, i, column_name), + "FLOAT8" | "float8" => handle_nullable_type::(&row, i, column_name), + "TEXT" | "VARCHAR" | "text" | "varchar" => { + handle_nullable_type::(&row, i, column_name) + } + "BOOL" | "bool" => handle_nullable_type::(&row, i, column_name), + "JSON" | "JSONB" | "json" | "jsonb" => handle_json(&row, i, column_name), + _ => { + warn!("Unknown type {} for column {}", type_name, column_name); + handle_nullable_type::(&row, i, column_name) + } + }; + + result.insert(column_name.to_string(), value); + } + + Ok(Value::Object(result)) +} + +fn handle_nullable_type<'r, T>(row: &'r PgRow, idx: usize, col_name: &str) -> Value +where + T: Type + Decode<'r, sqlx::Postgres> + serde::Serialize + std::fmt::Debug, +{ + match row.try_get::, _>(idx) { + Ok(Some(val)) => { + debug!("Successfully read column {} as {:?}", col_name, val); + json!(val) + } + Ok(None) => { + debug!("Column {} is NULL", col_name); + Value::Null + } + Err(e) => { + warn!("Failed to read column {}: {}", col_name, e); + Value::Null + } + } +} + +fn handle_json(row: &PgRow, idx: usize, col_name: &str) -> Value { + match row.try_get::, _>(idx) { + Ok(Some(val)) => { + debug!("Successfully read JSON column {} as Value", col_name); + return val; + } + Ok(None) => return Value::Null, + Err(_) => (), + } + + match row.try_get::, _>(idx) { + Ok(Some(s)) => match serde_json::from_str(&s) { + Ok(val) => val, + Err(_) => { + debug!("Column {} contains string that's not JSON", col_name); + json!(s) + } + }, + Ok(None) => Value::Null, + Err(e) => { + warn!("Failed to read JSON column {}: {}", col_name, e); + Value::Null + } + } +} + +pub fn json_value_to_dynamic(value: &Value) -> Dynamic { + match value { + Value::Null => Dynamic::UNIT, + Value::Bool(b) => Dynamic::from(*b), + Value::Number(n) => { + if let Some(i) = n.as_i64() { + Dynamic::from(i) + } else if let Some(f) = n.as_f64() { + Dynamic::from(f) + } else { + Dynamic::UNIT + } + } + Value::String(s) => Dynamic::from(s.clone()), + Value::Array(arr) => Dynamic::from( + arr.iter() + .map(json_value_to_dynamic) + .collect::(), + ), + Value::Object(obj) => Dynamic::from( + obj.iter() + .map(|(k, v)| (SmartString::from(k), json_value_to_dynamic(v))) + .collect::(), + ), + } +} + +pub fn to_array(value: Dynamic) -> Array { + if value.is_array() { + value.cast::() + } else if value.is_unit() || value.is::<()>() { + Array::new() + } else { + Array::from([value]) + } +} + +pub async fn download_file(url: &str, output_path: &str) -> Result<(), Box> { + let client = Client::new(); + let response = client.get(url).send().await?; + + if response.status().is_success() { + let mut file = TokioFile::create(output_path).await?; + + let mut stream = response.bytes_stream(); + + while let Some(chunk) = stream.next().await { + file.write_all(&chunk?).await?; + } + debug!("File downloaded successfully to {}", output_path); + } else { + return Err("Failed to download file".into()); + } + + Ok(()) +} + +pub fn parse_filter(filter_str: &str) -> Result<(String, Vec), Box> { + let parts: Vec<&str> = filter_str.split('=').collect(); + if parts.len() != 2 { + return Err("Invalid filter format. Expected 'KEY=VALUE'".into()); + } + + let column = parts[0].trim(); + let value = parts[1].trim(); + + if !column + .chars() + .all(|c| c.is_ascii_alphanumeric() || c == '_') + { + return Err("Invalid column name in filter".into()); + } + + Ok((format!("{} = $1", column), vec![value.to_string()])) +} + +pub fn parse_filter_with_offset( + filter_str: &str, + offset: usize, +) -> Result<(String, Vec), Box> { + let mut clauses = Vec::new(); + let mut params = Vec::new(); + + for (i, condition) in filter_str.split('&').enumerate() { + let parts: Vec<&str> = condition.split('=').collect(); + if parts.len() != 2 { + return Err("Invalid filter format".into()); + } + + let column = parts[0].trim(); + let value = parts[1].trim(); + + if !column + .chars() + .all(|c| c.is_ascii_alphanumeric() || c == '_') + { + return Err("Invalid column name".into()); + } + + clauses.push(format!("{} = ${}", column, i + 1 + offset)); + params.push(value.to_string()); + } + + Ok((clauses.join(" AND "), params)) +} diff --git a/src/tests/integration_email_list.rs b/src/tests/integration_email_list.rs new file mode 100644 index 000000000..11cb8da59 --- /dev/null +++ b/src/tests/integration_email_list.rs @@ -0,0 +1,58 @@ +use anyhow::Result; +use jmap_client::{ + client::{Client, Credentials}, + core::query::Filter, + email::{self, Property}, + mailbox::{self, Role}, +}; + +#[tokio::test] +async fn test_successful_email_list() -> Result<()> { + // JMAP server configuration + + + // 1. Authenticate with JMAP server + let client = Client::new() + .credentials(("test@", "")) + .connect("https://mail/jmap/") + .await + .unwrap(); + + let inbox_id = client + .mailbox_query( + mailbox::query::Filter::role(Role::Inbox).into(), + None::>, + ) + .await + .unwrap() + .take_ids() + .pop() + .unwrap(); + + let email_id = client + .email_query( + Filter::and([ + // email::query::Filter::subject("test"), + email::query::Filter::in_mailbox(&inbox_id), + // email::query::Filter::has_keyword("$draft"), + ]) + .into(), + [email::query::Comparator::from()].into(), + ) + .await + .unwrap() + .take_ids() + .pop() + .unwrap(); + + // Fetch message + let email = client + .email_get( + &email_id, + [Property::Subject, Property::Preview, Property::Keywords].into(), + ) + .await + .unwrap(); + + Ok(()) +} diff --git a/src/tests/integration_file_list_test.rs b/src/tests/integration_file_list_test.rs new file mode 100644 index 000000000..3da37aa4b --- /dev/null +++ b/src/tests/integration_file_list_test.rs @@ -0,0 +1,107 @@ +use actix_web::{test, web, App}; +use anyhow::Result; +use bytes::Bytes; +use gb_core::models::AppState; +use gb_file::handlers::list_file; +use minio::s3::args::{BucketExistsArgs, MakeBucketArgs}; +use minio::s3::builders::SegmentedBytes; +use minio::s3::client::ClientBuilder as MinioClientBuilder; +use minio::s3::creds::StaticProvider; +use minio::s3::http::BaseUrl; +use minio::s3::types::ToStream; +use std::fs::File; +use std::io::Read; +use std::io::Write; +use std::str::FromStr; +use tempfile::NamedTempFile; +use tokio_stream::StreamExt; + +#[tokio::test] + +async fn test_successful_file_listing() -> Result<(), Box> { + // Setup test environment and MinIO client + let base_url = format!("http://{}", "localhost:9000"); + let base_url = BaseUrl::from_str(&base_url)?; + let credentials = StaticProvider::new("minioadmin", "minioadmin", None); + + let minio_client = MinioClientBuilder::new(base_url.clone()) + .provider(Some(Box::new(credentials))) + .build()?; + + // Create test bucket if it doesn't exist + let bucket_name = "file-upload-rust-bucket"; + + // Using object-based API for bucket_exists + let bucket_exists_args = BucketExistsArgs::new(bucket_name)?; + let bucket_exists = minio_client.bucket_exists(&bucket_exists_args).await?; + + if !bucket_exists { + // Using object-based API for make_bucket + let make_bucket_args = MakeBucketArgs::new(bucket_name)?; + minio_client.make_bucket(&make_bucket_args).await?; + } + + // Put a single file in the bucket + let folder_path = "test-folder"; + let file_name = "test.txt"; + let object_name = format!("{}/{}", folder_path, file_name); + + // Create a temporary file with some content + let mut temp_file = NamedTempFile::new()?; + writeln!(temp_file, "This is a test file.")?; + + // Upload the file to the bucket + let mut file = File::open(temp_file.path())?; + let mut buffer = Vec::new(); + file.read_to_end(&mut buffer)?; + let content = SegmentedBytes::from(Bytes::from(buffer)); + minio_client.put_object(bucket_name, &object_name, content); + + let app_state = web::Data::new(AppState { + minio_client: Some(minio_client.clone()), + config: None, + db_pool: None, + }); + + let app = test::init_service(App::new().app_data(app_state.clone()).service(list_file)).await; + + // Execute request to list files in the folder + let req = test::TestRequest::post() + .uri(&format!("/files/list/{}", folder_path)) + .to_request(); + + let resp = test::call_service(&app, req).await; + + // Verify response + assert_eq!(resp.status(), 200); + + // Parse the response body as JSON + let body = test::read_body(resp).await; + let file_list: Vec = serde_json::from_slice(&body)?; + + // Verify the uploaded file is in the list + assert!( + file_list.contains(&object_name), + "Uploaded file should be listed" + ); + + // List all objects in a directory. + let mut list_objects = minio_client + .list_objects("my-bucket") + .use_api_v1(true) + .recursive(true) + .to_stream() + .await; + while let Some(result) = list_objects.next().await { + match result { + Ok(resp) => { + for item in resp.contents { + info!("{:?}", item); + } + } + Err(e) => info!("Error: {:?}", e), + } + } + + Ok(()) +} diff --git a/src/tests/integration_file_upload_test.rs b/src/tests/integration_file_upload_test.rs new file mode 100644 index 000000000..868b739f7 --- /dev/null +++ b/src/tests/integration_file_upload_test.rs @@ -0,0 +1,93 @@ +use actix_web::{test, web, App}; +use anyhow::Result; +use bytes::Bytes; +use gb_core::models::AppState; +use gb_file::handlers::upload_file; +use minio::s3::args::{BucketExistsArgs, MakeBucketArgs, StatObjectArgs}; +use minio::s3::client::ClientBuilder as MinioClientBuilder; +use minio::s3::creds::StaticProvider; +use minio::s3::http::BaseUrl; +use std::fs::File; +use std::io::Read; +use std::io::Write; +use std::str::FromStr; +use tempfile::NamedTempFile; + +#[tokio::test] +async fn test_successful_file_upload() -> Result<()> { + // Setup test environment and MinIO client + let base_url = format!("http://{}", "localhost:9000"); + let base_url = BaseUrl::from_str(&base_url)?; + let credentials = StaticProvider::new(&"minioadmin", &"minioadmin", None); + + let minio_client = MinioClientBuilder::new(base_url.clone()) + .provider(Some(Box::new(credentials))) + .build()?; + + // Create test bucket if it doesn't exist + let bucket_name = "file-upload-rust-bucket"; + + // Using object-based API for bucket_exists + let bucket_exists_args = BucketExistsArgs::new(bucket_name)?; + let bucket_exists = minio_client.bucket_exists(&bucket_exists_args).await?; + + if !bucket_exists { + // Using object-based API for make_bucket + let make_bucket_args = MakeBucketArgs::new(bucket_name)?; + minio_client.make_bucket(&make_bucket_args).await?; + } + + let app_state = web::Data::new(AppState { + minio_client: Some(minio_client.clone()), + config: None, + db_pool: None + }); + + let app = + test::init_service(App::new().app_data(app_state.clone()) + .service(upload_file)).await; + + // Create a test file with content + let mut temp_file = NamedTempFile::new()?; + write!(temp_file, "Test file content for upload")?; + + // Prepare a multipart request + let boundary = "----WebKitFormBoundaryX"; + + + // Read the file content + let mut file_content = Vec::new(); + let mut file = File::open(temp_file.path())?; + file.read_to_end(&mut file_content)?; + + let body = format!( + "--{}\r\nContent-Disposition: form-data; name=\"file\"; filename=\"test.txt\"\r\nContent-Type: text/plain\r\n\r\n{}\r\n--{}--\r\n", + boundary, + String::from_utf8_lossy(&file_content), + boundary + ); + + // Execute request + let req = test::TestRequest::post() + .uri("/files/upload/test-folder") + .set_payload(Bytes::from(body)) + .to_request(); + + let resp = test::call_service(&app, req).await; + + // Verify response + assert_eq!(resp.status(), 200); + + // Verify file exists in MinIO using object-based API + let object_name = "test-folder/test.txt"; + let bucket_name = "file-upload-rust-bucket"; + + // Using object-based API for stat_object + let stat_object_args = StatObjectArgs::new(bucket_name, object_name)?; + let object_exists = minio_client.clone().stat_object(&stat_object_args).await.is_ok(); + + assert!(object_exists, "Uploaded file should exist in MinIO"); + + + Ok(()) +} diff --git a/src/tools/mod.rs b/src/tools/mod.rs new file mode 100644 index 000000000..9661c6126 --- /dev/null +++ b/src/tools/mod.rs @@ -0,0 +1,700 @@ +use async_trait::async_trait; +use redis::AsyncCommands; +use rhai::{Engine, Scope}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::sync::{mpsc, Mutex}; +use uuid::Uuid; + +use crate::{ + channels::ChannelAdapter, + session::SessionManager, + shared::BotResponse, +}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolResult { + pub success: bool, + pub output: String, + pub requires_input: bool, + pub session_id: String, +} + +#[derive(Clone)] +pub struct Tool { + pub name: String, + pub description: String, + pub parameters: HashMap, + pub script: String, +} + +#[async_trait] +pub trait ToolExecutor: Send + Sync { + async fn execute( + &self, + tool_name: &str, + session_id: &str, + user_id: &str, + ) -> Result>; + async fn provide_input( + &self, + session_id: &str, + input: &str, + ) -> Result<(), Box>; + async fn get_output( + &self, + session_id: &str, + ) -> Result, Box>; + async fn is_waiting_for_input( + &self, + session_id: &str, + ) -> Result>; +} + +pub struct RedisToolExecutor { + redis_client: redis::Client, + web_adapter: Arc, + voice_adapter: Arc, + whatsapp_adapter: Arc, +} + +impl RedisToolExecutor { + pub fn new( + redis_url: &str, + web_adapter: Arc, + voice_adapter: Arc, + whatsapp_adapter: Arc, + ) -> Result> { + let client = redis::Client::open(redis_url)?; + Ok(Self { + redis_client: client, + web_adapter, + voice_adapter, + whatsapp_adapter, + }) + } + + async fn send_tool_message( + &self, + session_id: &str, + user_id: &str, + channel: &str, + message: &str, + ) -> Result<(), Box> { + let response = BotResponse { + bot_id: "tool_bot".to_string(), + user_id: user_id.to_string(), + session_id: session_id.to_string(), + channel: channel.to_string(), + content: message.to_string(), + message_type: "tool".to_string(), + stream_token: None, + is_complete: true, + }; + + match channel { + "web" => self.web_adapter.send_message(response).await, + "voice" => self.voice_adapter.send_message(response).await, + "whatsapp" => self.whatsapp_adapter.send_message(response).await, + _ => Ok(()), + } + } + + fn create_rhai_engine(&self, session_id: String, user_id: String, channel: String) -> Engine { + let mut engine = Engine::new(); + + let tool_executor = Arc::new(( + self.redis_client.clone(), + self.web_adapter.clone(), + self.voice_adapter.clone(), + self.whatsapp_adapter.clone(), + )); + + let session_id_clone = session_id.clone(); + let user_id_clone = user_id.clone(); + let channel_clone = channel.clone(); + + engine.register_fn("talk", move |message: String| { + let tool_executor = Arc::clone(&tool_executor); + let session_id = session_id_clone.clone(); + let user_id = user_id_clone.clone(); + let channel = channel_clone.clone(); + + tokio::spawn(async move { + let (redis_client, web_adapter, voice_adapter, whatsapp_adapter) = &*tool_executor; + + let response = BotResponse { + bot_id: "tool_bot".to_string(), + user_id: user_id.clone(), + session_id: session_id.clone(), + channel: channel.clone(), + content: message.clone(), + message_type: "tool".to_string(), + stream_token: None, + is_complete: true, + }; + + let result = match channel.as_str() { + "web" => web_adapter.send_message(response).await, + "voice" => voice_adapter.send_message(response).await, + "whatsapp" => whatsapp_adapter.send_message(response).await, + _ => Ok(()), + }; + + if let Err(e) = result { + log::error!("Failed to send tool message: {}", e); + } + + if let Ok(mut conn) = redis_client.get_async_connection().await { + let output_key = format!("tool:{}:output", session_id); + let _ = conn.lpush(&output_key, &message).await; + } + }); + }); + + let hear_executor = self.redis_client.clone(); + let session_id_clone = session_id.clone(); + + engine.register_fn("hear", move || -> String { + let hear_executor = hear_executor.clone(); + let session_id = session_id_clone.clone(); + + let rt = tokio::runtime::Runtime::new().unwrap(); + rt.block_on(async move { + match hear_executor.get_async_connection().await { + Ok(mut conn) => { + let input_key = format!("tool:{}:input", session_id); + let waiting_key = format!("tool:{}:waiting", session_id); + + let _ = conn.set_ex(&waiting_key, "true", 300).await; + let result: Option<(String, String)> = + conn.brpop(&input_key, 30).await.ok().flatten(); + let _ = conn.del(&waiting_key).await; + + result + .map(|(_, input)| input) + .unwrap_or_else(|| "timeout".to_string()) + } + Err(e) => { + log::error!("HEAR Redis error: {}", e); + "error".to_string() + } + } + }) + }); + + engine + } + + async fn cleanup_session(&self, session_id: &str) -> Result<(), Box> { + let mut conn = self.redis_client.get_multiplexed_async_connection().await?; + + let keys = vec![ + format!("tool:{}:output", session_id), + format!("tool:{}:input", session_id), + format!("tool:{}:waiting", session_id), + format!("tool:{}:active", session_id), + ]; + + for key in keys { + let _: () = conn.del(&key).await?; + } + + Ok(()) + } +} + +#[async_trait] +impl ToolExecutor for RedisToolExecutor { + async fn execute( + &self, + tool_name: &str, + session_id: &str, + user_id: &str, + ) -> Result> { + let tool = get_tool(tool_name).ok_or_else(|| format!("Tool not found: {}", tool_name))?; + + let mut conn = self.redis_client.get_multiplexed_async_connection().await?; + let session_key = format!("tool:{}:session", session_id); + let session_data = serde_json::json!({ + "user_id": user_id, + "tool_name": tool_name, + "started_at": chrono::Utc::now().to_rfc3339(), + }); + conn.set_ex(&session_key, session_data.to_string(), 3600) + .await?; + + let active_key = format!("tool:{}:active", session_id); + conn.set_ex(&active_key, "true", 3600).await?; + + let channel = "web"; + let _engine = self.create_rhai_engine( + session_id.to_string(), + user_id.to_string(), + channel.to_string(), + ); + + let redis_clone = self.redis_client.clone(); + let web_adapter_clone = self.web_adapter.clone(); + let voice_adapter_clone = self.voice_adapter.clone(); + let whatsapp_adapter_clone = self.whatsapp_adapter.clone(); + let session_id_clone = session_id.to_string(); + let user_id_clone = user_id.to_string(); + let tool_script = tool.script.clone(); + + tokio::spawn(async move { + let mut engine = Engine::new(); + let mut scope = Scope::new(); + + let redis_client = redis_clone.clone(); + let web_adapter = web_adapter_clone.clone(); + let voice_adapter = voice_adapter_clone.clone(); + let whatsapp_adapter = whatsapp_adapter_clone.clone(); + let session_id = session_id_clone.clone(); + let user_id = user_id_clone.clone(); + + engine.register_fn("talk", move |message: String| { + let redis_client = redis_client.clone(); + let web_adapter = web_adapter.clone(); + let voice_adapter = voice_adapter.clone(); + let whatsapp_adapter = whatsapp_adapter.clone(); + let session_id = session_id.clone(); + let user_id = user_id.clone(); + + tokio::spawn(async move { + let channel = "web"; + + let response = BotResponse { + bot_id: "tool_bot".to_string(), + user_id: user_id.clone(), + session_id: session_id.clone(), + channel: channel.to_string(), + content: message.clone(), + message_type: "tool".to_string(), + stream_token: None, + is_complete: true, + }; + + let send_result = match channel { + "web" => web_adapter.send_message(response).await, + "voice" => voice_adapter.send_message(response).await, + "whatsapp" => whatsapp_adapter.send_message(response).await, + _ => Ok(()), + }; + + if let Err(e) = send_result { + log::error!("Failed to send tool message: {}", e); + } + + if let Ok(mut conn) = redis_client.get_async_connection().await { + let output_key = format!("tool:{}:output", session_id); + let _ = conn.lpush(&output_key, &message).await; + } + }); + }); + + let hear_redis = redis_clone.clone(); + let session_id_hear = session_id.clone(); + engine.register_fn("hear", move || -> String { + let hear_redis = hear_redis.clone(); + let session_id = session_id_hear.clone(); + + let rt = tokio::runtime::Runtime::new().unwrap(); + rt.block_on(async move { + match hear_redis.get_async_connection().await { + Ok(mut conn) => { + let input_key = format!("tool:{}:input", session_id); + let waiting_key = format!("tool:{}:waiting", session_id); + + let _ = conn.set_ex(&waiting_key, "true", 300).await; + let result: Option<(String, String)> = + conn.brpop(&input_key, 30).await.ok().flatten(); + let _ = conn.del(&waiting_key).await; + + result + .map(|(_, input)| input) + .unwrap_or_else(|| "timeout".to_string()) + } + Err(_) => "error".to_string(), + } + }) + }); + + match engine.eval_with_scope::<()>(&mut scope, &tool_script) { + Ok(_) => { + log::info!( + "Tool {} completed successfully for session {}", + tool_name, + session_id + ); + + let completion_msg = + "🛠️ Tool execution completed. How can I help you with anything else?"; + let response = BotResponse { + bot_id: "tool_bot".to_string(), + user_id: user_id_clone, + session_id: session_id_clone.clone(), + channel: "web".to_string(), + content: completion_msg.to_string(), + message_type: "tool_complete".to_string(), + stream_token: None, + is_complete: true, + }; + + let _ = web_adapter_clone.send_message(response).await; + } + Err(e) => { + log::error!("Tool execution failed: {}", e); + + let error_msg = format!("❌ Tool error: {}", e); + let response = BotResponse { + bot_id: "tool_bot".to_string(), + user_id: user_id_clone, + session_id: session_id_clone.clone(), + channel: "web".to_string(), + content: error_msg, + message_type: "tool_error".to_string(), + stream_token: None, + is_complete: true, + }; + + let _ = web_adapter_clone.send_message(response).await; + } + } + + if let Ok(mut conn) = redis_clone.get_async_connection().await { + let active_key = format!("tool:{}:active", session_id_clone); + let _ = conn.del(&active_key).await; + } + }); + + Ok(ToolResult { + success: true, + output: format!( + "🛠️ Starting {} tool. Please follow the tool's instructions.", + tool_name + ), + requires_input: true, + session_id: session_id.to_string(), + }) + } + + async fn provide_input( + &self, + session_id: &str, + input: &str, + ) -> Result<(), Box> { + let mut conn = self.redis_client.get_multiplexed_async_connection().await?; + let input_key = format!("tool:{}:input", session_id); + conn.lpush(&input_key, input).await?; + Ok(()) + } + + async fn get_output( + &self, + session_id: &str, + ) -> Result, Box> { + let mut conn = self.redis_client.get_multiplexed_async_connection().await?; + let output_key = format!("tool:{}:output", session_id); + let messages: Vec = conn.lrange(&output_key, 0, -1).await?; + let _: () = conn.del(&output_key).await?; + Ok(messages) + } + + async fn is_waiting_for_input( + &self, + session_id: &str, + ) -> Result> { + let mut conn = self.redis_client.get_multiplexed_async_connection().await?; + let waiting_key = format!("tool:{}:waiting", session_id); + let exists: bool = conn.exists(&waiting_key).await?; + Ok(exists) + } +} + +fn get_tool(name: &str) -> Option { + match name { + "calculator" => Some(Tool { + name: "calculator".to_string(), + description: "Perform mathematical calculations".to_string(), + parameters: HashMap::from([ + ("operation".to_string(), "add|subtract|multiply|divide".to_string()), + ("a".to_string(), "number".to_string()), + ("b".to_string(), "number".to_string()), + ]), + script: r#" + let TALK = |message| { + talk(message); + }; + + let HEAR = || { + hear() + }; + + TALK("🔢 Calculator started!"); + TALK("Please enter the first number:"); + let a = HEAR(); + TALK("Please enter the second number:"); + let b = HEAR(); + TALK("Choose operation: add, subtract, multiply, or divide:"); + let op = HEAR(); + + let num_a = a.to_float(); + let num_b = b.to_float(); + + if op == "add" { + let result = num_a + num_b; + TALK("✅ Result: " + a + " + " + b + " = " + result); + } else if op == "subtract" { + let result = num_a - num_b; + TALK("✅ Result: " + a + " - " + b + " = " + result); + } else if op == "multiply" { + let result = num_a * num_b; + TALK("✅ Result: " + a + " × " + b + " = " + result); + } else if op == "divide" { + if num_b != 0.0 { + let result = num_a / num_b; + TALK("✅ Result: " + a + " ÷ " + b + " = " + result); + } else { + TALK("❌ Error: Cannot divide by zero!"); + } + } else { + TALK("❌ Error: Invalid operation. Please use: add, subtract, multiply, or divide"); + } + + TALK("Calculator session completed. Thank you!"); + "#.to_string(), + }), + _ => None, + } +} + +#[derive(Clone)] +pub struct ToolManager { + tools: HashMap, + waiting_responses: Arc>>>, +} + +impl ToolManager { + pub fn new() -> Self { + let mut tools = HashMap::new(); + + let calculator_tool = Tool { + name: "calculator".to_string(), + description: "Perform calculations".to_string(), + parameters: HashMap::from([ + ( + "operation".to_string(), + "add|subtract|multiply|divide".to_string(), + ), + ("a".to_string(), "number".to_string()), + ("b".to_string(), "number".to_string()), + ]), + script: r#" + TALK("Calculator started. Enter first number:"); + let a = HEAR(); + TALK("Enter second number:"); + let b = HEAR(); + TALK("Operation (add/subtract/multiply/divide):"); + let op = HEAR(); + + let num_a = a.parse::().unwrap(); + let num_b = b.parse::().unwrap(); + let result = if op == "add" { + num_a + num_b + } else if op == "subtract" { + num_a - num_b + } else if op == "multiply" { + num_a * num_b + } else if op == "divide" { + if num_b == 0.0 { + TALK("Cannot divide by zero"); + return; + } + num_a / num_b + } else { + TALK("Invalid operation"); + return; + }; + TALK("Result: ".to_string() + &result.to_string()); + "# + .to_string(), + }; + + tools.insert(calculator_tool.name.clone(), calculator_tool); + Self { + tools, + waiting_responses: Arc::new(Mutex::new(HashMap::new())), + } + } + + pub fn get_tool(&self, name: &str) -> Option<&Tool> { + self.tools.get(name) + } + + pub fn list_tools(&self) -> Vec { + self.tools.keys().cloned().collect() + } + + pub async fn execute_tool( + &self, + tool_name: &str, + session_id: &str, + user_id: &str, + ) -> Result> { + let tool = self.get_tool(tool_name).ok_or("Tool not found")?; + + Ok(ToolResult { + success: true, + output: format!("Tool {} started for user {}", tool_name, user_id), + requires_input: true, + session_id: session_id.to_string(), + }) + } + + pub async fn is_tool_waiting( + &self, + session_id: &str, + ) -> Result> { + let waiting = self.waiting_responses.lock().await; + Ok(waiting.contains_key(session_id)) + } + + pub async fn provide_input( + &self, + session_id: &str, + input: &str, + ) -> Result<(), Box> { + self.provide_user_response(session_id, "default_bot", input.to_string()) + .await + } + + pub async fn get_tool_output( + &self, + session_id: &str, + ) -> Result, Box> { + Ok(vec![]) + } + + pub async fn execute_tool_with_session( + &self, + tool_name: &str, + user_id: &str, + bot_id: &str, + session_manager: SessionManager, + channel_sender: mpsc::Sender, + ) -> Result<(), Box> { + let tool = self.get_tool(tool_name).ok_or("Tool not found")?; + session_manager + .set_current_tool(user_id, bot_id, Some(tool_name.to_string())) + .await?; + + let user_id = user_id.to_string(); + let bot_id = bot_id.to_string(); + let script = tool.script.clone(); + let session_manager_clone = session_manager.clone(); + let waiting_responses = self.waiting_responses.clone(); + + tokio::spawn(async move { + let mut engine = rhai::Engine::new(); + let (talk_tx, mut talk_rx) = mpsc::channel(100); + let (hear_tx, mut hear_rx) = mpsc::channel(100); + + { + let key = format!("{}:{}", user_id, bot_id); + let mut waiting = waiting_responses.lock().await; + waiting.insert(key, hear_tx); + } + + let channel_sender_clone = channel_sender.clone(); + let user_id_clone = user_id.clone(); + let bot_id_clone = bot_id.clone(); + + let talk_tx_clone = talk_tx.clone(); + engine.register_fn("TALK", move |message: String| { + let tx = talk_tx_clone.clone(); + tokio::spawn(async move { + let _ = tx.send(message).await; + }); + }); + + let hear_rx_mutex = Arc::new(Mutex::new(hear_rx)); + engine.register_fn("HEAR", move || { + let hear_rx = hear_rx_mutex.clone(); + tokio::task::block_in_place(|| { + tokio::runtime::Handle::current().block_on(async move { + let mut receiver = hear_rx.lock().await; + receiver.recv().await.unwrap_or_default() + }) + }) + }); + + let script_result = + tokio::task::spawn_blocking(move || engine.eval::<()>(&script)).await; + + if let Ok(Err(e)) = script_result { + let error_response = BotResponse { + bot_id: bot_id_clone.clone(), + user_id: user_id_clone.clone(), + session_id: Uuid::new_v4().to_string(), + channel: "test".to_string(), + content: format!("Tool error: {}", e), + message_type: "text".to_string(), + stream_token: None, + is_complete: true, + }; + let _ = channel_sender_clone.send(error_response).await; + } + + while let Some(message) = talk_rx.recv().await { + let response = BotResponse { + bot_id: bot_id.clone(), + user_id: user_id.clone(), + session_id: Uuid::new_v4().to_string(), + channel: "test".to_string(), + content: message, + message_type: "text".to_string(), + stream_token: None, + is_complete: true, + }; + let _ = channel_sender.send(response).await; + } + + let _ = session_manager_clone + .set_current_tool(&user_id, &bot_id, None) + .await; + }); + + Ok(()) + } + + pub async fn provide_user_response( + &self, + user_id: &str, + bot_id: &str, + response: String, + ) -> Result<(), Box> { + let key = format!("{}:{}", user_id, bot_id); + let mut waiting = self.waiting_responses.lock().await; + if let Some(tx) = waiting.get_mut(&key) { + let _ = tx.send(response).await; + waiting.remove(&key); + } + Ok(()) + } +} + +impl Default for ToolManager { + fn default() -> Self { + Self::new() + } +} + +pub struct ToolApi; + +impl ToolApi { + pub fn new() -> Self { + Self + } +} diff --git a/src/utils/add-drive-user.sh b/src/utils/add-drive-user.sh new file mode 100644 index 000000000..0f691e23e --- /dev/null +++ b/src/utils/add-drive-user.sh @@ -0,0 +1,27 @@ +export BOT_ID= +./mc alias set minio http://localhost:9000 user pass +./mc admin user add minio $BOT_ID + +cat > $BOT_ID-policy.json </dev/null + +# Temporary files +echo "Cleaning temporary files..." +rm -rf /tmp/* /var/tmp/* + +# Thumbnail cache +echo "Cleaning thumbnail cache..." +rm -rf ~/.cache/thumbnails/* /root/.cache/thumbnails/* + +# DNS cache +echo "Flushing DNS cache..." +systemd-resolve --flush-caches 2>/dev/null || true + +# Old kernels (keep 2 latest) +echo "Removing old kernels..." +apt purge -y $(dpkg -l | awk '/^ii linux-image-*/{print $2}' | grep -v $(uname -r) | head -n -2) 2>/dev/null + +# Crash reports +echo "Clearing crash reports..." +rm -f /var/crash/* + +### LXC Containers Cleanup ### +echo -e "\n[ LXC CONTAINERS CLEANUP ]" + +# Check if LXC is installed +if command -v lxc >/dev/null 2>&1; then + for container in $(lxc list -c n --format csv | grep -v "^$"); do + echo -e "\nCleaning container: $container" + + # Execute cleanup commands in container + lxc exec "$container" -- bash -c " + echo 'Cleaning package cache...' + apt clean && apt autoclean && apt autoremove -y + + echo 'Cleaning temporary files...' + rm -rf /tmp/* /var/tmp/* + + echo 'Cleaning logs...' + rm -rf /opt/gbo/logs/* + + echo 'Cleaning journal logs...' + journalctl --vacuum-time=1d 2>/dev/null || true + + echo 'Cleaning thumbnail cache...' + rm -rf /home/*/.cache/thumbnails/* /root/.cache/thumbnails/* + " 2>/dev/null + done +else + echo "LXC not installed, skipping container cleanup." +fi + +echo -e "\nCleanup completed!" \ No newline at end of file diff --git a/src/utils/disk-size.md b/src/utils/disk-size.md new file mode 100644 index 000000000..b7d3a5455 --- /dev/null +++ b/src/utils/disk-size.md @@ -0,0 +1,6 @@ +lxc list --format json | jq -r '.[].name' | while read container; do + echo -n "$container: " + lxc exec $container -- df -h / --output=used < /dev/null | tail -n1 +done + +du -h --max-depth=1 "." 2>/dev/null | sort -rh | head -n 50 | awk '{printf "%-10s %s\n", $1, $2}' diff --git a/src/utils/email-ips.sh b/src/utils/email-ips.sh new file mode 100644 index 000000000..442af3c18 --- /dev/null +++ b/src/utils/email-ips.sh @@ -0,0 +1,8 @@ +az network public-ip list --resource-group "$CLOUD_GROUP" \ + --query "[].{Name:name, IP:ipAddress, ReverseDNS:dnsSettings.reverseFqdn}" \ + -o table + +az network public-ip update --resource-group "$CLOUD_GROUP" + --name "pip-network-adapter-name" + --reverse-fqdn "outbound14.domain.com.br" + diff --git a/src/utils/install-libreoffice-online.sh b/src/utils/install-libreoffice-online.sh new file mode 100644 index 000000000..30b953db7 --- /dev/null +++ b/src/utils/install-libreoffice-online.sh @@ -0,0 +1,65 @@ +sudo apt install -y cloud-guest-utils e2fsprogs + +apt install -y make g++ build-essential +apt install -y openjdk-17-jdk ant +apt install -y sudo systemd wget zip procps ccache +apt install -y automake bison flex git gperf graphviz junit4 libtool m4 nasm +apt install -y libcairo2-dev libjpeg-dev libegl1-mesa-dev libfontconfig1-dev \ + libgl1-mesa-dev libgif-dev libgtk-3-dev librsvg2-dev libpango1.0-dev +apt install -y libcap-dev libcap2-bin libkrb5-dev libpcap0.8-dev openssl libssl-dev +apt install -y libxcb-dev libx11-xcb-dev libxkbcommon-x11-dev libxtst-dev \ + libxrender-dev libxslt1-dev libxt-dev xsltproc +apt install -y libcunit1-dev libcppunit-dev libpam0g-dev libcups2-dev libzstd-dev uuid-runtime +apt install -y python3-dev python3-lxml python3-pip python3-polib +apt install -y nodejs npm +apt install -y libpoco-dev libpococrypto80 +apt install -y libreoffice-dev + + +mkdir -p /opt/lo && cd /opt/lo +wget https://github.com/CollaboraOnline/online/releases/download/for-code-assets/core-co-24.04-assets.tar.gz +tar xf core-co-24.04-assets.tar.gz && rm core-co-24.04-assets.tar.gz + +useradd cool -G sudo +mkdir -p /opt/cool && chown cool:cool /opt/cool +cd /opt/cool +sudo -Hu cool git clone https://github.com/CollaboraOnline/online.git +cd online && sudo -Hu cool ./autogen.sh + +export CPPFLAGS=-I/opt/lo/include +export LDFLAGS=-L/opt/lo/instdir/program +./configure --with-lokit-path=/opt/lo --with-lo-path=/opt/lo/instdir --with-poco-includes=/usr/local/include --with-poco-libs=/usr/local/lib + +sudo -Hu cool make -j$(nproc) + +make install +mkdir -p /etc/coolwsd /usr/local/var/cache/coolwsd + +chown cool:cool /usr/local/var/cache/coolwsd +admin_pwd=$(openssl rand -hex 6) + +cat < /lib/systemd/system/coolwsd.service +[Unit] +Description=Collabora Online WebSocket Daemon +After=network.target + +[Service] +ExecStart=/opt/cool/online/coolwsd --o:sys_template_path=/opt/cool/online/systemplate \ +--o:lo_template_path=/opt/lo/instdir --o:child_root_path=/opt/cool/online/jails \ +--o:admin_console.username=admin --o:admin_console.password=$DOC_EDITOR_ADMIN_PWD \ +--o:ssl.enable=false +User=cool + +[Install] +WantedBy=multi-user.target +EOT + +systemctl daemon-reload +systemctl enable coolwsd.service +systemctl start coolwsd.service +" + +echo "Installation complete!" +echo "Admin password: $admin_pwd" +echo "Access at: https://localhost:9980" + diff --git a/src/utils/set-limits.sh b/src/utils/set-limits.sh new file mode 100644 index 000000000..215cca7fe --- /dev/null +++ b/src/utils/set-limits.sh @@ -0,0 +1,53 @@ +#!/usr/bin/env bash + +# Define container limits in an associative array +declare -A container_limits=( + # Pattern Memory CPU Allowance + ["*tables*"]="4096MB:100ms/100ms" + ["*dns*"]="2048MB:100ms/100ms" + ["*doc-editor*"]="512MB:10ms/100ms" + ["*proxy*"]="2048MB:100ms/100ms" + ["*directory*"]="1024MB:50ms/100ms" + ["*drive*"]="4096MB:50ms/100ms" + ["*email*"]="4096MB:100ms/100ms" + ["*webmail*"]="4096MB:100ms/100ms" + ["*bot*"]="4096MB:50ms/100ms" + ["*meeting*"]="4096MB:100ms/100ms" + ["*alm*"]="512MB:50ms/100ms" + ["*alm-ci*"]="4096MB:100ms/100ms" + ["*system*"]="4096MB:50ms/100ms" + ["*mailer*"]="4096MB:25ms/100ms" +) + +# Default values (for containers that don't match any pattern) +DEFAULT_MEMORY="1024MB" +DEFAULT_CPU_ALLOWANCE="15ms/100ms" +CPU_COUNT=2 +CPU_PRIORITY=10 + +for pattern in "${!container_limits[@]}"; do + echo "Configuring $container..." + + memory=$DEFAULT_MEMORY + cpu_allowance=$DEFAULT_CPU_ALLOWANCE + + # Configure all containers + for container in $(lxc list -c n --format csv); do + # Check if container matches any pattern + if [[ $container == $pattern ]]; then + IFS=':' read -r memory cpu_allowance <<< "${container_limits[$pattern]}" + + # Apply configuration + lxc config set "$container" limits.memory "$memory" + lxc config set "$container" limits.cpu.allowance "$cpu_allowance" + lxc config set "$container" limits.cpu "$CPU_COUNT" + lxc config set "$container" limits.cpu.priority "$CPU_PRIORITY" + + echo "Restarting $container..." + lxc restart "$container" + + lxc config show "$container" | grep -E "memory|cpu" + break + fi + done +done diff --git a/src/utils/set-size-5GB.sh b/src/utils/set-size-5GB.sh new file mode 100644 index 000000000..6de4216c6 --- /dev/null +++ b/src/utils/set-size-5GB.sh @@ -0,0 +1,7 @@ +lxc config device override $CONTAINER_NAME root +lxc config device set $CONTAINER_NAME root size 6GB + +zpool set autoexpand=on default +zpool online -e default /var/snap/lxd/common/lxd/disks/default.img +zpool list +zfs list diff --git a/src/utils/setup-host.sh b/src/utils/setup-host.sh new file mode 100644 index 000000000..ae611e8d5 --- /dev/null +++ b/src/utils/setup-host.sh @@ -0,0 +1,6 @@ + +# Host +sudo lxc config set core.trust_password "$LXC_TRUST_PASSWORD" + +# ALM-CI +lxc remote add bot 10.16.164.? --accept-certificate --password "$LXC_TRUST_PASSWORD" diff --git a/src/utils/startup.sh b/src/utils/startup.sh new file mode 100644 index 000000000..e274f6a38 --- /dev/null +++ b/src/utils/startup.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +# Disable shell timeout + +sed -i '/TMOUT/d' /etc/profile /etc/bash.bashrc /etc/profile.d/* +echo 'export TMOUT=0' > /etc/profile.d/notimeout.sh +chmod +x /etc/profile.d/notimeout.sh +sed -i '/pam_exec.so/s/quiet/quiet set_timeout=0/' /etc/pam.d/sshd 2>/dev/null +source /etc/profile + diff --git a/src/web_automation/mod.rs b/src/web_automation/mod.rs new file mode 100644 index 000000000..57ae5a62c --- /dev/null +++ b/src/web_automation/mod.rs @@ -0,0 +1,246 @@ +// wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb +// sudo dpkg -i google-chrome-stable_current_amd64.deb +use log::info; + +use crate::shared::utils; + +use std::env; +use std::error::Error; +use std::future::Future; +use std::path::PathBuf; +use std::pin::Pin; +use std::process::Command; +use std::sync::Arc; +use thirtyfour::{DesiredCapabilities, WebDriver}; +use tokio::fs; +use tokio::sync::Semaphore; + +pub struct BrowserSetup { + pub brave_path: String, + pub chromedriver_path: String, +} + +pub struct BrowserPool { + webdriver_url: String, + semaphore: Semaphore, + brave_path: String, +} + +impl BrowserPool { + pub fn new(webdriver_url: String, max_concurrent: usize, brave_path: String) -> Self { + Self { + webdriver_url, + semaphore: Semaphore::new(max_concurrent), + brave_path, + } + } + + pub async fn with_browser(&self, f: F) -> Result> + where + F: FnOnce( + WebDriver, + ) + -> Pin>> + Send>> + + Send + + 'static, + T: Send + 'static, + { + let _permit = self.semaphore.acquire().await?; + + let mut caps = DesiredCapabilities::chrome(); + caps.set_binary(&self.brave_path)?; + //caps.add_chrome_arg("--headless=new")?; + caps.add_chrome_arg("--disable-gpu")?; + caps.add_chrome_arg("--no-sandbox")?; + + let driver = WebDriver::new(&self.webdriver_url, caps).await?; + + // Execute user function + let result = f(driver).await; + + result + } +} + +impl BrowserSetup { + pub async fn new() -> Result> { + // Check for Brave installation + let brave_path = Self::find_brave().await?; + + // Check for chromedriver + let chromedriver_path = Self::setup_chromedriver().await?; + + Ok(Self { + brave_path, + chromedriver_path, + }) + } + + async fn find_brave() -> Result> { + let mut possible_paths = vec![ + // Windows - Program Files + String::from(r"C:\Program Files\BraveSoftware\Brave-Browser\Application\brave.exe"), + // macOS + String::from("/Applications/Brave Browser.app/Contents/MacOS/Brave Browser"), + // Linux + String::from("/usr/bin/brave-browser"), + String::from("/usr/bin/brave"), + ]; + + // Windows - AppData (usuário atual) + if let Ok(local_appdata) = env::var("LOCALAPPDATA") { + let mut path = PathBuf::from(local_appdata); + path.push("BraveSoftware\\Brave-Browser\\Application\\brave.exe"); + possible_paths.push(path.to_string_lossy().to_string()); + } + + for path in possible_paths { + if fs::metadata(&path).await.is_ok() { + return Ok(path); + } + } + + Err("Brave browser not found. Please install Brave first.".into()) + } + async fn setup_chromedriver() -> Result> { + // Create chromedriver directory in executable's parent directory + let mut chromedriver_dir = env::current_exe()?.parent().unwrap().to_path_buf(); + chromedriver_dir.push("chromedriver"); + + // Ensure the directory exists + if !chromedriver_dir.exists() { + fs::create_dir(&chromedriver_dir).await?; + } + + // Determine the final chromedriver path + let chromedriver_path = if cfg!(target_os = "windows") { + chromedriver_dir.join("chromedriver.exe") + } else { + chromedriver_dir.join("chromedriver") + }; + + // Check if chromedriver exists + if fs::metadata(&chromedriver_path).await.is_err() { + let (download_url, platform) = match (cfg!(target_os = "windows"), cfg!(target_arch = "x86_64")) { + (true, true) => ( + "https://storage.googleapis.com/chrome-for-testing-public/138.0.7204.183/win64/chromedriver-win64.zip", + "win64", + ), + (true, false) => ( + "https://storage.googleapis.com/chrome-for-testing-public/138.0.7204.183/win32/chromedriver-win32.zip", + "win32", + ), + (false, true) if cfg!(target_os = "macos") && cfg!(target_arch = "aarch64") => ( + "https://storage.googleapis.com/chrome-for-testing-public/138.0.7204.183/mac-arm64/chromedriver-mac-arm64.zip", + "mac-arm64", + ), + (false, true) if cfg!(target_os = "macos") => ( + "https://storage.googleapis.com/chrome-for-testing-public/138.0.7204.183/mac-x64/chromedriver-mac-x64.zip", + "mac-x64", + ), + (false, true) => ( + "https://storage.googleapis.com/chrome-for-testing-public/138.0.7204.183/linux64/chromedriver-linux64.zip", + "linux64", + ), + _ => return Err("Unsupported platform".into()), + }; + + let mut zip_path = std::env::temp_dir(); + zip_path.push("chromedriver.zip"); + info!("Downloading chromedriver for {}...", platform); + + // Download the zip file + utils::download_file(download_url, &zip_path.to_str().unwrap()).await?; + + // Extract the zip to a temporary directory first + let mut temp_extract_dir = std::env::temp_dir(); + temp_extract_dir.push("chromedriver_extract"); + let temp_extract_dir1 = temp_extract_dir.clone(); + + // Clean up any previous extraction + let _ = fs::remove_dir_all(&temp_extract_dir).await; + fs::create_dir(&temp_extract_dir).await?; + + utils::extract_zip_recursive(&zip_path, &temp_extract_dir)?; + + // Chrome for Testing zips contain a platform-specific directory + // Find the chromedriver binary in the extracted structure + let mut extracted_binary_path = temp_extract_dir; + extracted_binary_path.push(format!("chromedriver-{}", platform)); + extracted_binary_path.push(if cfg!(target_os = "windows") { + "chromedriver.exe" + } else { + "chromedriver" + }); + + // Try to move the file, fall back to copy if cross-device + match fs::rename(&extracted_binary_path, &chromedriver_path).await { + Ok(_) => (), + Err(e) if e.kind() == std::io::ErrorKind::CrossesDevices => { + // Cross-device move failed, use copy instead + fs::copy(&extracted_binary_path, &chromedriver_path).await?; + // Set permissions on the copied file + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let mut perms = fs::metadata(&chromedriver_path).await?.permissions(); + perms.set_mode(0o755); + fs::set_permissions(&chromedriver_path, perms).await?; + } + } + Err(e) => return Err(e.into()), + } + + // Clean up + let _ = fs::remove_file(&zip_path).await; + let _ = fs::remove_dir_all(temp_extract_dir1).await; + + // Set executable permissions (if not already set during copy) + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let mut perms = fs::metadata(&chromedriver_path).await?.permissions(); + perms.set_mode(0o755); + fs::set_permissions(&chromedriver_path, perms).await?; + } + } + + Ok(chromedriver_path.to_string_lossy().to_string()) + } +} + +// Modified BrowserPool initialization +pub async fn initialize_browser_pool() -> Result, Box> { + let setup = BrowserSetup::new().await?; + + // Start chromedriver process if not running + if !is_process_running("chromedriver").await { + Command::new(&setup.chromedriver_path) + .arg("--port=9515") + .spawn()?; + + // Give chromedriver time to start + tokio::time::sleep(tokio::time::Duration::from_secs(2)).await; + } + + Ok(Arc::new(BrowserPool::new( + "http://localhost:9515".to_string(), + 5, // Max concurrent browsers + setup.brave_path, + ))) +} + +async fn is_process_running(name: &str) -> bool { + if cfg!(target_os = "windows") { + Command::new("tasklist") + .output() + .map(|o| String::from_utf8_lossy(&o.stdout).contains(name)) + .unwrap_or(false) + } else { + Command::new("pgrep") + .arg(name) + .output() + .map(|o| o.status.success()) + .unwrap_or(false) + } +} diff --git a/src/whatsapp/mod.rs b/src/whatsapp/mod.rs new file mode 100644 index 000000000..ecbcfd8ba --- /dev/null +++ b/src/whatsapp/mod.rs @@ -0,0 +1,176 @@ +use async_trait::async_trait; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::sync::Mutex; +use log::info; + +use crate::shared::BotResponse; + +#[derive(Debug, Deserialize)] +pub struct WhatsAppMessage { + pub entry: Vec, +} + +#[derive(Debug, Deserialize)] +pub struct WhatsAppEntry { + pub changes: Vec, +} + +#[derive(Debug, Deserialize)] +pub struct WhatsAppChange { + pub value: WhatsAppValue, +} + +#[derive(Debug, Deserialize)] +pub struct WhatsAppValue { + pub contacts: Option>, + pub messages: Option>, +} + +#[derive(Debug, Deserialize)] +pub struct WhatsAppContact { + pub profile: WhatsAppProfile, + pub wa_id: String, +} + +#[derive(Debug, Deserialize)] +pub struct WhatsAppProfile { + pub name: String, +} + +#[derive(Debug, Deserialize)] +pub struct WhatsAppMessageData { + pub from: String, + pub id: String, + pub timestamp: String, + pub text: Option, + pub r#type: String, +} + +#[derive(Debug, Deserialize)] +pub struct WhatsAppText { + pub body: String, +} + +#[derive(Serialize)] +pub struct WhatsAppResponse { + pub messaging_product: String, + pub to: String, + pub text: WhatsAppResponseText, +} + +#[derive(Serialize)] +pub struct WhatsAppResponseText { + pub body: String, +} + +pub struct WhatsAppAdapter { + client: Client, + access_token: String, + phone_number_id: String, + webhook_verify_token: String, + sessions: Arc>>, // phone -> session_id +} + +impl WhatsAppAdapter { + pub fn new(access_token: String, phone_number_id: String, webhook_verify_token: String) -> Self { + Self { + client: Client::new(), + access_token, + phone_number_id, + webhook_verify_token, + sessions: Arc::new(Mutex::new(HashMap::new())), + } + } + + pub async fn get_session_id(&self, phone: &str) -> String { + let sessions = self.sessions.lock().await; + sessions.get(phone).cloned().unwrap_or_else(|| { + drop(sessions); + let session_id = uuid::Uuid::new_v4().to_string(); + let mut sessions = self.sessions.lock().await; + sessions.insert(phone.to_string(), session_id.clone()); + session_id + }) + } + + pub async fn send_whatsapp_message(&self, to: &str, body: &str) -> Result<(), Box> { + let url = format!( + "https://graph.facebook.com/v17.0/{}/messages", + self.phone_number_id + ); + + let response_data = WhatsAppResponse { + messaging_product: "whatsapp".to_string(), + to: to.to_string(), + text: WhatsAppResponseText { + body: body.to_string(), + }, + }; + + let response = self.client + .post(&url) + .header("Authorization", format!("Bearer {}", self.access_token)) + .json(&response_data) + .send() + .await?; + + if response.status().is_success() { + info!("WhatsApp message sent to {}", to); + } else { + let error_text = response.text().await?; + log::error!("Failed to send WhatsApp message: {}", error_text); + } + + Ok(()) + } + + pub async fn process_incoming_message(&self, message: WhatsAppMessage) -> Result, Box> { + let mut user_messages = Vec::new(); + + for entry in message.entry { + for change in entry.changes { + if let Some(messages) = change.value.messages { + for msg in messages { + if let Some(text) = msg.text { + let session_id = self.get_session_id(&msg.from).await; + + let user_message = crate::shared::UserMessage { + bot_id: "default_bot".to_string(), + user_id: msg.from.clone(), + session_id: session_id.clone(), + channel: "whatsapp".to_string(), + content: text.body, + message_type: msg.r#type, + media_url: None, + timestamp: chrono::Utc::now(), + }; + + user_messages.push(user_message); + } + } + } + } + } + + Ok(user_messages) + } + + pub fn verify_webhook(&self, mode: &str, token: &str, challenge: &str) -> Result> { + if mode == "subscribe" && token == self.webhook_verify_token { + Ok(challenge.to_string()) + } else { + Err("Invalid verification".into()) + } + } +} + +#[async_trait] +impl super::channels::ChannelAdapter for WhatsAppAdapter { + async fn send_message(&self, response: BotResponse) -> Result<(), Box> { + info!("Sending WhatsApp response to: {}", response.user_id); + self.send_whatsapp_message(&response.user_id, &response.content).await + } +} diff --git a/static/index.html b/static/index.html new file mode 100644 index 000000000..f92997495 --- /dev/null +++ b/static/index.html @@ -0,0 +1,484 @@ + + + + General Bots - ChatGPT Clone + + + + + +
+ +
+
+ + +
+
+ + + + +