Fix build errors and unused imports in core, security and package_manager modules

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2026-01-24 22:04:47 -03:00
parent 8e282177d1
commit 0a24cd4b50
142 changed files with 5291 additions and 5414 deletions

View file

@ -1,194 +0,0 @@
# App Launcher Integration Guide
## Overview
The `apps-manifest.json` file provides a complete mapping between Cargo.toml features and user-friendly app descriptions for the botui app launcher.
## File Location
```
botserver/apps-manifest.json
```
## Structure
### Categories
Apps are organized into 8 categories:
1. **Communication** (💬) - Chat, Mail, Meet, WhatsApp, Telegram, etc.
2. **Productivity** (⚡) - Tasks, Calendar, Project, Goals, Workspaces, etc.
3. **Documents** (📄) - Drive, Docs, Sheet, Slides, Paper
4. **Media** (🎬) - Video, Player, Canvas
5. **Learning** (📚) - Learn, Research, Sources
6. **Analytics** (📈) - Analytics, Dashboards, Monitoring
7. **Development** (⚙️) - Automation, Designer, Editor
8. **Administration** (🔐) - Attendant, Security, Settings, Directory
9. **Core** (🏗️) - Cache, LLM, Vector DB
### App Schema
Each app includes:
```json
{
"id": "tasks",
"name": "Tasks",
"description": "Task management with scheduling",
"feature": "tasks",
"icon": "✅",
"enabled_by_default": true,
"dependencies": ["automation", "drive", "monitoring"]
}
```
### Bundles
Pre-configured feature sets:
- **minimal** - Essential infrastructure (chat, automation, drive, cache)
- **lightweight** - Basic productivity (chat, drive, tasks, people)
- **full** - Complete feature set
- **communications** - All communication apps
- **productivity** - Productivity suite
- **documents** - Document suite
## Integration with botui
### Reading the Manifest
```javascript
// In botui/ui/suite/js/app-launcher.js
fetch('/api/apps/manifest')
.then(res => res.json())
.then(manifest => {
renderAppLauncher(manifest);
});
```
### Rendering Apps
```javascript
function renderAppLauncher(manifest) {
const categories = manifest.categories;
for (const [categoryId, category] of Object.entries(categories)) {
const categoryEl = createCategory(category);
category.apps.forEach(app => {
const appCard = createAppCard(app);
categoryEl.appendChild(appCard);
});
}
}
```
### App Card Template
```html
<div class="app-card" data-feature="${app.feature}">
<div class="app-icon">${app.icon}</div>
<div class="app-name">${app.name}</div>
<div class="app-description">${app.description}</div>
<div class="app-toggle">
<input type="checkbox"
${app.enabled_by_default ? 'checked' : ''}
${app.core_dependency ? 'disabled' : ''}>
</div>
${app.dependencies.length > 0 ?
`<div class="app-deps">Requires: ${app.dependencies.join(', ')}</div>`
: ''}
</div>
```
## Backend API Endpoint
Add to `botserver/src/main.rs`:
```rust
async fn get_apps_manifest() -> Json<serde_json::Value> {
let manifest = include_str!("../apps-manifest.json");
let value: serde_json::Value = serde_json::from_str(manifest)
.expect("Invalid apps-manifest.json");
Json(value)
}
// In router configuration:
api_router = api_router.route("/api/apps/manifest", get(get_apps_manifest));
```
## Compilation Testing
Use the `test_apps.sh` script to verify all apps compile:
```bash
cd /home/rodriguez/src/gb
./test_apps.sh
```
This will:
1. Test each app feature individually
2. Report which apps pass/fail compilation
3. Provide a summary of results
## Core Dependencies
These apps cannot be disabled (marked with `core_dependency: true`):
- **automation** - Required for .gbot script execution
- **drive** - S3 storage used throughout
- **cache** - Redis integrated into sessions
## Feature Bundling
When a user enables an app, all its dependencies are automatically enabled:
- Enable `tasks` → Automatically enables `automation`, `drive`, `monitoring`
- Enable `mail` → Automatically enables `mail_core`, `drive`
- Enable `research` → Automatically enables `llm`, `vectordb`
## Syncing with Cargo.toml
When adding new features to `Cargo.toml`:
1. Add the feature definition in `Cargo.toml`
2. Add the app entry in `apps-manifest.json`
3. Update the app launcher UI in botui
4. Run `./test_apps.sh` to verify compilation
5. Commit both files together
## Example: Adding a New App
### 1. In Cargo.toml
```toml
[features]
myapp = ["dep:myapp-crate", "drive"]
```
### 2. In apps-manifest.json
```json
{
"id": "myapp",
"name": "My App",
"description": "My awesome app",
"feature": "myapp",
"icon": "🚀",
"enabled_by_default": false,
"dependencies": ["drive"]
}
```
### 3. Test
```bash
cargo check -p botserver --no-default-features --features myapp
```
## Notes
- Icons use emoji for cross-platform compatibility
- Dependencies are automatically resolved by Cargo
- Core dependencies are shown but cannot be toggled off
- The manifest version matches botserver version

View file

@ -13,7 +13,8 @@ features = ["database", "i18n"]
default = ["chat", "automation", "drive", "tasks", "cache", "directory"] default = ["chat", "automation", "drive", "tasks", "cache", "directory"]
# ===== CORE INFRASTRUCTURE (Can be used standalone) ===== # ===== CORE INFRASTRUCTURE (Can be used standalone) =====
automation = ["dep:rhai", "dep:cron"] scripting = ["dep:rhai"]
automation = ["scripting", "dep:cron"]
drive = ["dep:aws-config", "dep:aws-sdk-s3", "dep:aws-smithy-async", "dep:pdf-extract"] drive = ["dep:aws-config", "dep:aws-sdk-s3", "dep:aws-smithy-async", "dep:pdf-extract"]
cache = ["dep:redis"] cache = ["dep:redis"]
directory = [] directory = []
@ -25,10 +26,6 @@ people = ["automation", "drive", "cache"]
mail = ["automation", "drive", "cache", "dep:lettre", "dep:mailparse", "dep:imap", "dep:native-tls"] mail = ["automation", "drive", "cache", "dep:lettre", "dep:mailparse", "dep:imap", "dep:native-tls"]
meet = ["automation", "drive", "cache", "dep:livekit"] meet = ["automation", "drive", "cache", "dep:livekit"]
social = ["automation", "drive", "cache"] social = ["automation", "drive", "cache"]
whatsapp = ["automation", "drive", "cache"]
telegram = ["automation", "drive", "cache"]
instagram = ["automation", "drive", "cache"]
msteams = ["automation", "drive", "cache"]
# Productivity # Productivity
calendar = ["automation", "drive", "cache"] calendar = ["automation", "drive", "cache"]
@ -41,7 +38,7 @@ billing = ["automation", "drive", "cache"]
# Documents # Documents
docs = ["automation", "drive", "cache", "docx-rs", "ooxmlsdk"] docs = ["automation", "drive", "cache", "docx-rs", "ooxmlsdk"]
sheet = ["automation", "drive", "cache", "calamine", "spreadsheet-ods"] sheet = ["automation", "drive", "cache", "calamine", "spreadsheet-ods", "dep:rust_xlsxwriter", "dep:umya-spreadsheet"]
slides = ["automation", "drive", "cache", "ooxmlsdk"] slides = ["automation", "drive", "cache", "ooxmlsdk"]
paper = ["automation", "drive", "cache"] paper = ["automation", "drive", "cache"]
@ -69,6 +66,10 @@ attendant = ["automation", "drive", "cache"]
security = ["automation", "drive", "cache"] security = ["automation", "drive", "cache"]
settings = ["automation", "drive", "cache"] settings = ["automation", "drive", "cache"]
whatsapp = ["automation", "drive", "cache"]
telegram = ["automation", "drive", "cache"]
instagram = ["automation", "drive", "cache"]
msteams = ["automation", "drive", "cache"]
# Core Tech # Core Tech
llm = ["automation", "drive", "cache"] llm = ["automation", "drive", "cache"]
vectordb = ["automation", "drive", "cache", "dep:qdrant-client"] vectordb = ["automation", "drive", "cache", "dep:qdrant-client"]
@ -163,9 +164,11 @@ qdrant-client = { workspace = true, optional = true }
# Document Processing # Document Processing
docx-rs = { workspace = true, optional = true } docx-rs = { workspace = true, optional = true }
ooxmlsdk = { workspace = true, optional = true } ooxmlsdk = { workspace = true, optional = true, features = ["parts"] }
calamine = { workspace = true, optional = true } calamine = { workspace = true, optional = true }
spreadsheet-ods = { workspace = true, optional = true } spreadsheet-ods = { workspace = true, optional = true }
rust_xlsxwriter = { workspace = true, optional = true }
umya-spreadsheet = { workspace = true, optional = true }
# File Storage & Drive (drive feature) # File Storage & Drive (drive feature)
aws-config = { workspace = true, features = ["behavior-version-latest", "rt-tokio", "rustls"], optional = true } aws-config = { workspace = true, features = ["behavior-version-latest", "rt-tokio", "rustls"], optional = true }

View file

@ -1,125 +0,0 @@
# Professional Dependency & Feature Architecture Plan
## Objective
Create a robust, "ease-of-selection" feature architecture where enabling a high-level **App** (e.g., `tasks`) automatically enables all required **Capabilities** (e.g., `drive`, `automation`). Simultaneously ensure the codebase compiles cleanly in a **Minimal** state (no default features).
## Current Status: ✅ MINIMAL BUILD WORKING
### Completed Work
**Cargo.toml restructuring** - Feature bundling implemented
**AppState guards** - Conditional fields for `drive`, `cache`, `tasks`
**main.rs guards** - Initialization logic properly guarded
**SessionManager guards** - Redis usage conditionally compiled
**bootstrap guards** - S3/Drive operations feature-gated
**compiler guards** - SET SCHEDULE conditionally compiled
**Task/NewTask exports** - Properly guarded in shared/mod.rs
**Minimal build compiles** - `cargo check -p botserver --no-default-features --features minimal` ✅ SUCCESS
### Architecture Decision Made
**Accepted Core Dependencies:**
- **`automation`** (Rhai scripting) - Required for .gbot script execution (100+ files depend on it)
- **`drive`** (S3 storage) - Used in 80+ places throughout codebase
- **`cache`** (Redis) - Integrated into session management and state
**Minimal Feature Set:**
```toml
minimal = ["chat", "automation", "drive", "cache"]
```
This provides a functional bot with:
- Chat capabilities
- Script execution (.gbot files)
- File storage (S3)
- Session caching (Redis)
## Part 1: Feature Architecture (Cargo.toml) ✅
**Status: COMPLETE**
We successfully restructured `Cargo.toml` using a **Bundle Pattern**:
- User selects **Apps** → Apps select **Capabilities** → Capabilities select **Dependencies**
### Implemented Hierarchy
#### User-Facing Apps (The Menu)
* **`tasks`** → includes `automation`, `drive`, `monitoring`
* **`drive`** → includes `storage_core`, `pdf`
* **`chat`** → includes (base functionality)
* **`mail`** → includes `mail_core`, `drive`
#### Core Capabilities (Internal Bundles)
* `automation_core``rhai`, `cron`
* `storage_core``aws-sdk-s3`, `aws-config`, `aws-smithy-async`
* `cache_core``redis`
* `mail_core``lettre`, `mailparse`, `imap`, `native-tls`
* `realtime_core``livekit`
* `pdf_core``pdf-extract`
## Part 2: Codebase Compilation Fixes ✅
### Completed Guards
1. ✅ **`AppState` Struct** (`src/core/shared/state.rs`)
* Fields `s3_client`, `drive`, `redis`, `task_engine`, `task_scheduler` are guarded
2. ✅ **`main.rs` Initialization**
* S3 client creation guarded with `#[cfg(feature = "drive")]`
* Redis client creation guarded with `#[cfg(feature = "cache")]`
* Task engine/scheduler guarded with `#[cfg(feature = "tasks")]`
3. ✅ **`bootstrap/mod.rs` Logic**
* `get_drive_client()` guarded with `#[cfg(feature = "drive")]`
* `upload_templates_to_drive()` has both feature-enabled and disabled versions
4. ✅ **`SessionManager`** (`src/core/session/mod.rs`)
* Redis imports and usage properly guarded with `#[cfg(feature = "cache")]`
5. ✅ **`compiler/mod.rs`**
* `execute_set_schedule` import and usage guarded with `#[cfg(feature = "tasks")]`
* Graceful degradation when tasks feature is disabled
6. ✅ **`shared/mod.rs`**
* `Task` and `NewTask` types properly exported with `#[cfg(feature = "tasks")]`
* Separate pub use statements for conditional compilation
## Verification Results
### ✅ Minimal Build
```bash
cargo check -p botserver --no-default-features --features minimal
# Result: SUCCESS ✅ (Exit code: 0)
```
### Feature Bundle Test
```bash
# Test tasks bundle (should include automation, drive, monitoring)
cargo check -p botserver --no-default-features --features tasks
# Expected: SUCCESS (includes all dependencies)
```
## Success Criteria ✅
**ACHIEVED**:
- `cargo check --no-default-features --features minimal` compiles successfully ✅
- Feature bundles work as expected (enabling `tasks` enables `automation`, `drive`, `monitoring`)
- All direct dependencies are maintained and secure
- GTK3 transitive warnings are documented as accepted risk
- Clippy warnings in botserver eliminated
## Summary
The feature bundling architecture is **successfully implemented** and the minimal build is **working**.
**Key Achievements:**
1. ✅ Feature bundling pattern allows easy selection (e.g., `tasks``automation` + `drive` + `monitoring`)
2. ✅ Minimal build compiles with core infrastructure (`chat` + `automation` + `drive` + `cache`)
3. ✅ Conditional compilation guards properly applied throughout codebase
4. ✅ No compilation warnings in botserver
**Accepted Trade-offs:**
- `automation` (Rhai) is a core dependency - too deeply integrated to make optional
- `drive` (S3) is a core dependency - used throughout for file storage
- `cache` (Redis) is a core dependency - integrated into session management
This provides a solid foundation for feature selection while maintaining a working minimal build.

290
TASKS.md
View file

@ -1,290 +0,0 @@
# Cargo Audit Migration Strategy - Task Breakdown
## Project Context
**Tauri Desktop Application** using GTK3 bindings for Linux support with 1143 total dependencies.
---
## CRITICAL: 1 Vulnerability (Fix Immediately)
### Task 1.1: Fix idna Punycode Vulnerability ⚠️ HIGH PRIORITY
**Issue**: RUSTSEC-2024-0421 - Accepts invalid Punycode labels
**Status**: ✅ FIXED (Updated validator to 0.20)
### Task 2.1: Replace atty (Used by clap 2.34.0)
**Issue**: RUSTSEC-2024-0375 + RUSTSEC-2021-0145 (unmaintained + unsound)
**Status**: ✅ FIXED (Replaced `ksni` with `tray-icon`)
### Task 2.2: Replace ansi_term (Used by clap 2.34.0)
**Issue**: RUSTSEC-2021-0139 (unmaintained)
**Status**: ✅ FIXED (Replaced `ksni` with `tray-icon`)
### Task 2.3: Replace rustls-pemfile
**Issue**: RUSTSEC-2025-0134 (unmaintained)
**Status**: ✅ FIXED (Updated axum-server to 0.8 and qdrant-client to 1.16)
### Task 2.4: Fix aws-smithy-runtime (Yanked Version)
**Issue**: Version 1.9.6 was yanked
**Status**: ✅ FIXED (Updated aws-sdk-s3 to 1.120.0)
### Task 2.5: Replace fxhash
**Issue**: RUSTSEC-2025-0057 (unmaintained)
**Current**: `fxhash 0.2.1`
**Used by**: `selectors 0.24.0``kuchikiki` (speedreader fork) → Tauri
**Status**: ⏳ PENDING (Wait for upstream Tauri update)
### Task 2.6: Replace instant
**Issue**: RUSTSEC-2024-0384 (unmaintained)
**Status**: ✅ FIXED (Updated rhai)
### Task 2.7: Replace lru (Unsound Iterator)
**Issue**: RUSTSEC-2026-0002 (unsound - violates Stacked Borrows)
**Status**: ✅ FIXED (Updated ratatui to 0.30 and aws-sdk-s3 to 1.120.0)
---
## MEDIUM PRIORITY: Tauri/GTK Stack (Major Effort)
### Task 3.1: Evaluate GTK3 → Tauri Pure Approach
**Issue**: All GTK3 crates unmaintained (12 crates total)
**Current**: Using Tauri with GTK3 Linux backend
**Strategic Question**: Do you actually need GTK3?
**Investigation Items**:
- [ ] Audit what GTK3 features you're using:
- System tray? (ksni 0.2.2 uses it)
- Native file dialogs? (rfd 0.15.4)
- Native menus? (muda 0.17.1)
- WebView? (wry uses webkit2gtk)
- [ ] Check if Tauri v2 can work without GTK3 on Linux
- [ ] Test if removing `ksni` and using Tauri's built-in tray works
**Decision Point**:
- **If GTK3 is only for tray/dialogs**: Migrate to pure Tauri approach
- **If GTK3 is deeply integrated**: Plan GTK4 migration
**Estimated effort**: 4-8 hours investigation
---
### Task 3.2: Option A - Migrate to Tauri Pure (Recommended)
**If Task 3.1 shows GTK3 isn't essential**
**Action Items**:
- [ ] Replace `ksni` with Tauri's `tauri-plugin-tray` or `tray-icon`
- [ ] Remove direct GTK dependencies from Cargo.toml
- [ ] Update Tauri config to use modern Linux backend
- [ ] Test on: Ubuntu 22.04+, Fedora, Arch
- [ ] Verify all system integrations work
**Benefits**:
- Removes 12 unmaintained crates
- Lighter dependency tree
- Better cross-platform consistency
**Estimated effort**: 1-2 days
---
### Task 3.3: Option B - Migrate to GTK4 (If GTK Required)
**If Task 3.1 shows GTK3 is essential**
**Action Items**:
- [ ] Create migration branch
- [ ] Update Cargo.toml GTK dependencies:
```toml
# Remove:
gtk = "0.18"
gdk = "0.18"
# Add:
gtk4 = "0.9"
gdk4 = "0.9"
```
- [ ] Rewrite GTK code following [gtk-rs migration guide](https://gtk-rs.org/gtk4-rs/stable/latest/book/migration/)
- [ ] Key API changes:
- `gtk::Window``gtk4::Window`
- Event handling completely redesigned
- Widget hierarchy changes
- CSS theming changes
- [ ] Test thoroughly on all Linux distros
**Estimated effort**: 1-2 weeks (significant API changes)
---
## LOW PRIORITY: Transitive Dependencies
### Task 4.1: Replace proc-macro-error
**Issue**: RUSTSEC-2024-0370 (unmaintained)
**Current**: `proc-macro-error 1.0.4`
**Used by**: `validator_derive` and `gtk3-macros` and `glib-macros`
**Action Items**:
- [ ] Update `validator` crate (may have migrated to `proc-macro-error2`)
- [ ] GTK macros will be fixed by Task 3.2 or 3.3
- [ ] Run `cargo update -p validator`
**Estimated effort**: 30 minutes (bundled with Task 1.1)
---
### Task 4.2: Replace paste
**Issue**: RUSTSEC-2024-0436 (unmaintained, no vulnerabilities)
**Current**: `paste 1.0.15`
**Used by**: `tikv-jemalloc-ctl`, `rav1e`, `ratatui`
**Action Items**:
- [ ] Low priority - no security issues
- [ ] Will likely be fixed by updating parent crates
- [ ] Monitor for updates when updating other deps
**Estimated effort**: Passive (wait for upstream)
---
### Task 4.3: Replace UNIC crates
**Issue**: All unmaintained (5 crates)
**Current**: Used by `urlpattern 0.3.0``tauri-utils`
**Action Items**:
- [ ] Update Tauri to latest version
- [ ] Check if Tauri has migrated to `unicode-*` crates
- [ ] Run `cargo update -p tauri -p tauri-utils`
**Estimated effort**: 30 minutes (bundled with Tauri updates)
---
### Task 4.4: Fix glib Unsoundness
**Issue**: RUSTSEC-2024-0429 (unsound iterator)
**Current**: `glib 0.18.5` (part of GTK3 stack)
**Status**: 🛑 Transitive / Accepted Risk (Requires GTK4 migration)
**Action Items**:
- [ ] Document as accepted transitive risk until Tauri migrates to GTK4
**Estimated effort**: N/A (Waiting for upstream)
---
## Recommended Migration Order
### Phase 1: Critical Fixes (Week 1)
1. ✅ Task 1.1 - Fix idna vulnerability
2. ✅ Task 2.4 - Fix AWS yanked version
3. ✅ Task 2.3 - Update rustls-pemfile
4. ✅ Task 2.6 - Update instant/rhai
5. ✅ Task 2.7 - Update lru
**Result**: No vulnerabilities, no yanked crates
---
### Phase 2: Direct Dependency Cleanup (Week 2)
6. ✅ Task 3.1 - Evaluate GTK3 usage (Determined ksni was main usage, replaced)
7. ✅ Task 2.1/2.2 - Fix atty/ansi_term via clap (Removed ksni)
8. ⏳ Task 2.5 - Fix fxhash (Waiting for upstream Tauri update, currently on v2)
**Result**: All direct unmaintained crates addressed
---
### Phase 3: GTK Migration (Weeks 3-4)
9. 🛑 Task 3.1/3.2/3.3 - GTK Migration halted.
- **Reason**: GTK3 is a hard dependency of Tauri on Linux (via `wry` -> `webkit2gtk`).
- **Decision**: Accept the ~11-12 transitive GTK3 warnings as they are unavoidable without changing frameworks.
- **Action**: Suppress warnings if possible, otherwise document as known transitive issues.
10. ✅ Task 4.1 - Update validator/proc-macro-error (Verified validator 0.20)
11. ✅ Task 4.3 - Update UNIC crates via Tauri (Verified Tauri v2)
**Result**: All actionable warnings addressed. GTK3 warnings acknowledged as transitive/upstream.
---
## Testing Checklist
After each phase, verify:
- [ ] `cargo audit` shows 0 vulnerabilities, 0 actionable warnings (GTK3 warnings accepted)
- [ ] `cargo build --release` succeeds
- [ ] `cargo test` passes
- [ ] Manual testing:
- [ ] botapp launches and renders correctly
- [ ] System tray works (Linux)
- [ ] File dialogs work
- [ ] Web view renders content
- [ ] HTTP/gRPC endpoints respond (botserver)
- [ ] S3 operations work (botserver)
- [ ] Database connections work
- [ ] Scripting engine works (botserver)
---
## Quick Commands Reference
```bash
# Phase 1 - Critical fixes
cargo update -p validator # Task 1.1
cargo update -p aws-config -p aws-sdk-s3 -p aws-sdk-sts # Task 2.4
cargo update -p tonic -p axum-server # Task 2.3
cargo update -p rhai # Task 2.6
cargo update -p ratatui -p aws-sdk-s3 # Task 2.7
# Phase 2 - Direct deps
cargo update -p dbus-codegen # Task 2.1 (if possible)
cargo update -p tauri -p wry # Task 2.5
# Verify after each update
cargo audit
cargo build --release
cargo test
```
---
## Risk Assessment
| Task | Risk Level | Breaking Changes | Rollback Difficulty |
|------|-----------|------------------|---------------------|
| 1.1 idna | Low | None expected | Easy |
| 2.1 atty/clap | Medium | Possible CLI changes | Medium |
| 2.3 rustls | Low | Internal only | Easy |
| 2.4 AWS | Low | None expected | Easy |
| 2.5 fxhash | Medium | Depends on upstream | Hard (may need fork) |
| 3.2 Tauri Pure | Medium | API changes | Medium |
| 3.3 GTK4 | **High** | **Major API rewrite** | **Hard** |
---
## Estimated Total Effort
- **Phase 1 (Critical)**: 2-4 hours
- **Phase 2 (Cleanup)**: 4-8 hours
- **Phase 3 Option A (Tauri Pure)**: 1-2 days
- **Phase 3 Option B (GTK4)**: 1-2 weeks
**Recommended**: Start Phase 1 immediately, then do Task 3.1 investigation before committing to Option A or B.
---
## Success Criteria
**Complete when**:
- `cargo audit` returns: `Success! 0 vulnerabilities found` (ignoring transitive GTK warnings)
- All direct dependencies are maintained and secure
- All automated tests pass
- Manual testing confirms no regressions
- Application runs on target Linux distributions
---
## Notes
- Most issues are **transitive dependencies** - updating direct deps often fixes them
- **GTK3 → GTK4** is the biggest effort but solves 12 warnings at once
- Consider **Tauri Pure** approach to avoid GUI framework entirely
- Some fixes (like fxhash) may require upstream updates - don't block on them
- Document any temporary workarounds for future reference

38
TODO.md
View file

@ -34,13 +34,13 @@ Compilar cada feature individualmente do botserver com `cargo check --no-default
### Grupo 5: Aprendizado ### Grupo 5: Aprendizado
- [x] `learn` - [x] `learn`
- [ ] `research` (Failed: missing EmailDocument struct, unknown field email_db, type inference errors) - [x] `research` (Fixed: gated email dependencies, added missing imports)
- [x] `sources` - [x] `sources`
### Grupo 6: Analytics ### Grupo 6: Analytics
- [x] `analytics` - [x] `analytics`
- [x] `dashboards` - [x] `dashboards`
- [ ] `monitoring` (Failed: E0308 type mismatch in SVG generation) - [x] `monitoring` (Fixed: E0308 type mismatch in SVG generation)
### Grupo 7: Desenvolvimento ### Grupo 7: Desenvolvimento
- [x] `designer` - [x] `designer`
@ -55,25 +55,25 @@ Compilar cada feature individualmente do botserver com `cargo check --no-default
### Erros de Compilação (Bloqueios) ### Erros de Compilação (Bloqueios)
- [ ] **meet**: Falha no build C++ da dependência `webrtc-sys` (header `absl/container/inlined_vector.h` não encontrado). - [ ] **meet**: Falha no build C++ da dependência `webrtc-sys` (header `absl/container/inlined_vector.h` não encontrado).
- [ ] **research**: Diversos erros de tipo e campos ausentes: - Requer instalação de dependências de sistema (não resolvido neste ambiente).
- `EmailDocument` não encontrado no escopo.
- Campo `email_db` desconhecido na struct `UserIndexingJob`.
- Erros de inferência de tipo em `vectordb_indexer.rs`.
- [ ] **monitoring**: Erro `E0308` (mismatched types) na geração de SVG em `app_generator.rs` (conflito entre `f32` e `f64`).
### Avisos Comuns (Shared) ### Avisos Comuns (Shared)
- `botserver/src/basic/compiler/mod.rs:358:25`: `unused mut` e `unused variable` (`conn`). - [x] Fixed all shared warnings (unused variables/mut/imports in compiler, state, drive_monitor).
- `botserver/src/basic/compiler/mod.rs:357:25`: `unused variable` (`cron`).
- `botserver/src/core/shared/state.rs:469:13`: `unused mut` (`debug`).
- `botserver/src/drive/drive_monitor/mod.rs:20:7`: `KB_INDEXING_TIMEOUT_SECS` (dead code).
- `botserver/src/drive/drive_monitor/mod.rs:39:5`: `kb_indexing_in_progress` (dead code).
### Avisos Específicos de Feature ### Avisos Específicos de Feature
- **mail**: Unused imports em `src/core/shared/schema/mail.rs`. - [x] **mail**: Fixed unused imports.
- **tasks**: Unused imports em `src/core/shared/schema/tasks.rs`. - [x] **tasks**: Fixed unused imports.
- **project**: Unused imports em `src/core/shared/schema/project.rs`. - [x] **project**: Fixed unused imports.
- **tickets**: Unused imports em `src/core/shared/schema/tickets.rs`. - [x] **tickets**: Fixed unused imports.
- **learn**: Unused imports em `src/core/shared/schema/learn.rs`. - [x] **learn**: Fixed unused imports.
- **analytics**: Unused import em `src/analytics/mod.rs`. - [x] **analytics**: Fixed unused imports.
- **designer**: Unused variable `_messages`. - [x] **designer**: Fixed unused variable `messages`.
## Remaining Warnings Plan (From TODO.tmp)
1. **Automated Fixes**: Run `cargo clippy --fix --workspace` to resolve simple warnings (unused imports/variables/mut).
- [ ] Execution in progress.
2. **Manual Fixes**: Address warnings not resolvable by auto-fix.
- [ ] Complex logic changes.
- [ ] Feature gating adjustments.
3. **Verification**: Run `cargo check --workspace` to ensure zero warnings.

663
TODO.tmp Normal file
View file

@ -0,0 +1,663 @@
Checking bottest v6.1.0 (/home/rodriguez/src/gb/bottest)
Compiling botapp v6.1.0 (/home/rodriguez/src/gb/botapp)
Checking botserver v6.1.0 (/home/rodriguez/src/gb/botserver)
warning: this function has too many arguments (8/7)
--> botserver/src/auto_task/app_logs.rs:117:5
|
117 | / pub fn log(
118 | | &self,
119 | | app_name: &str,
120 | | level: LogLevel,
... |
125 | | user_id: Option<Uuid>,
126 | | ) {
| |_____^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#too_many_arguments
= note: `#[warn(clippy::too_many_arguments)]` on by default
warning: this function has too many arguments (8/7)
--> botserver/src/auto_task/app_logs.rs:154:5
|
154 | / pub fn log_error(
155 | | &self,
156 | | app_name: &str,
157 | | source: LogSource,
... |
162 | | stack_trace: Option<&str>,
163 | | ) {
| |_____^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#too_many_arguments
warning: this function has too many arguments (8/7)
--> botserver/src/auto_task/task_manifest.rs:938:1
|
938 | / pub fn create_manifest_from_llm_response(
939 | | app_name: &str,
940 | | description: &str,
941 | | tables: Vec<TableDefinition>,
... |
946 | | monitors: Vec<MonitorDefinition>,
947 | | ) -> TaskManifest {
| |_________________^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#too_many_arguments
warning: this function has too many arguments (11/7)
--> botserver/src/basic/keywords/human_approval.rs:256:5
|
256 | / pub fn create_request(
257 | | &self,
258 | | bot_id: Uuid,
259 | | session_id: Uuid,
... |
267 | | default_action: Option<ApprovalDecision>,
268 | | ) -> ApprovalRequest {
| |________________________^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#too_many_arguments
warning: this function has too many arguments (8/7)
--> botserver/src/basic/keywords/create_site.rs:111:1
|
111 | / async fn create_site(
112 | | config: crate::core::config::AppConfig,
113 | | s3: Option<std::sync::Arc<aws_sdk_s3::Client>>,
114 | | bucket: String,
... |
119 | | prompt: Dynamic,
120 | | ) -> Result<String, Box<dyn Error + Send + Sync>> {
| |_________________________________________________^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#too_many_arguments
warning: method `from_str` can be confused for the standard trait method `std::str::FromStr::from_str`
--> botserver/src/channels/media_upload.rs:44:5
|
44 | / pub fn from_str(s: &str) -> Option<Self> {
45 | | match s.to_lowercase().as_str() {
46 | | "twitter" | "x" => Some(Self::Twitter),
47 | | "facebook" | "fb" => Some(Self::Facebook),
... |
61 | | }
| |_____^
|
= help: consider implementing the trait `std::str::FromStr` or choosing a less ambiguous method name
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#should_implement_trait
= note: `#[warn(clippy::should_implement_trait)]` on by default
warning: match expression looks like `matches!` macro
--> botserver/src/channels/oauth.rs:52:9
|
52 | / match self {
53 | | Self::Bluesky | Self::Telegram | Self::Twilio => false,
54 | | _ => true,
55 | | }
| |_________^ help: try: `!matches!(self, Self::Bluesky | Self::Telegram | Self::Twilio)`
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#match_like_matches_macro
= note: `#[warn(clippy::match_like_matches_macro)]` on by default
warning: very complex type used. Consider factoring parts into `type` definitions
--> botserver/src/core/middleware.rs:501:6
|
501 | ) -> impl Fn(Request<Body>, Next) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<Response, Response>> + Send>>
| ______^
502 | | + Clone
503 | | + Send {
| |_____________^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#type_complexity
= note: `#[warn(clippy::type_complexity)]` on by default
warning: stripping a prefix manually
--> botserver/src/core/middleware.rs:691:9
|
691 | &auth_header[7..]
| ^^^^^^^^^^^^^^^^^
|
note: the prefix was tested here
--> botserver/src/core/middleware.rs:690:17
|
690 | let token = if auth_header.starts_with("Bearer ") {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#manual_strip
= note: `#[warn(clippy::manual_strip)]` on by default
help: try using the `strip_prefix` method
|
690 ~ let token = if let Some(<stripped>) = auth_header.strip_prefix("Bearer ") {
691 ~ <stripped>
|
warning: method `from_str` can be confused for the standard trait method `std::str::FromStr::from_str`
--> botserver/src/core/organization_invitations.rs:37:5
|
37 | / pub fn from_str(s: &str) -> Option<Self> {
38 | | match s.to_lowercase().as_str() {
39 | | "owner" => Some(Self::Owner),
40 | | "admin" => Some(Self::Admin),
... |
47 | | }
| |_____^
|
= help: consider implementing the trait `std::str::FromStr` or choosing a less ambiguous method name
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#should_implement_trait
warning: this function has too many arguments (10/7)
--> botserver/src/core/organization_invitations.rs:184:5
|
184 | / pub async fn create_invitation(
185 | | &self,
186 | | organization_id: Uuid,
187 | | organization_name: &str,
... |
194 | | expires_in_days: i64,
195 | | ) -> Result<OrganizationInvitation, String> {
| |_______________________________________________^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#too_many_arguments
warning: this function has too many arguments (9/7)
--> botserver/src/core/organization_invitations.rs:249:5
|
249 | / pub async fn bulk_invite(
250 | | &self,
251 | | organization_id: Uuid,
252 | | organization_name: &str,
... |
258 | | message: Option<String>,
259 | | ) -> BulkInviteResponse {
| |___________________________^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#too_many_arguments
warning: clamp-like pattern without using clamp function
--> botserver/src/core/organization_invitations.rs:651:27
|
651 | let expires_in_days = req.expires_in_days.unwrap_or(7).max(1).min(30);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with clamp: `req.expires_in_days.unwrap_or(7).clamp(1, 30)`
|
= note: clamp will panic if max < min
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#manual_clamp
= note: `#[warn(clippy::manual_clamp)]` on by default
warning: very complex type used. Consider factoring parts into `type` definitions
--> botserver/src/core/organization_rbac.rs:246:17
|
246 | user_roles: Arc<RwLock<HashMap<(Uuid, Uuid), Vec<Uuid>>>>,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#type_complexity
warning: this function has too many arguments (8/7)
--> botserver/src/core/package_manager/setup/directory_setup.rs:221:5
|
221 | / pub async fn create_user(
222 | | &mut self,
223 | | org_id: &str,
224 | | username: &str,
... |
229 | | is_admin: bool,
230 | | ) -> Result<DefaultUser> {
| |____________________________^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#too_many_arguments
warning: very complex type used. Consider factoring parts into `type` definitions
--> botserver/src/core/performance.rs:740:16
|
740 | processor: Arc<dyn Fn(Vec<T>) -> std::pin::Pin<Box<dyn std::future::Future<Output = ()> + Send>> + Send + Sync>,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#type_complexity
warning: very complex type used. Consider factoring parts into `type` definitions
--> botserver/src/core/performance.rs:749:28
|
749 | let processor_arc: Arc<dyn Fn(Vec<T>) -> std::pin::Pin<Box<dyn std::future::Future<Output = ()> + Send>> + Send + Sync> =
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#type_complexity
warning: method `from_str` can be confused for the standard trait method `std::str::FromStr::from_str`
--> botserver/src/security/api_keys.rs:65:5
|
65 | / pub fn from_str(s: &str) -> Option<Self> {
66 | | match s {
67 | | "read" => Some(Self::Read),
68 | | "write" => Some(Self::Write),
... |
85 | | }
| |_____^
|
= help: consider implementing the trait `std::str::FromStr` or choosing a less ambiguous method name
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#should_implement_trait
warning: method `from_str` can be confused for the standard trait method `std::str::FromStr::from_str`
--> botserver/src/security/auth.rs:150:5
|
150 | / pub fn from_str(s: &str) -> Self {
151 | | match s.to_lowercase().as_str() {
152 | | "anonymous" => Self::Anonymous,
153 | | "user" => Self::User,
... |
164 | | }
| |_____^
|
= help: consider implementing the trait `std::str::FromStr` or choosing a less ambiguous method name
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#should_implement_trait
warning: very complex type used. Consider factoring parts into `type` definitions
--> botserver/src/security/passkey.rs:898:10
|
898 | ) -> Result<(Vec<u8>, Vec<u8>, Option<Vec<u8>>), PasskeyError> {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#type_complexity
warning: method `from_str` can be confused for the standard trait method `std::str::FromStr::from_str`
--> botserver/src/security/protection/manager.rs:36:5
|
36 | / pub fn from_str(s: &str) -> Option<Self> {
37 | | match s.to_lowercase().as_str() {
38 | | "lynis" => Some(Self::Lynis),
39 | | "rkhunter" => Some(Self::RKHunter),
... |
46 | | }
| |_____^
|
= help: consider implementing the trait `std::str::FromStr` or choosing a less ambiguous method name
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#should_implement_trait
warning: method `from_str` can be confused for the standard trait method `std::str::FromStr::from_str`
--> botserver/src/security/secrets.rs:13:5
|
13 | / pub fn from_str(secret: &str) -> Self {
14 | | Self {
15 | | inner: secret.to_string(),
16 | | }
17 | | }
| |_____^
|
= help: consider implementing the trait `std::str::FromStr` or choosing a less ambiguous method name
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#should_implement_trait
warning: method `from_str` can be confused for the standard trait method `std::str::FromStr::from_str`
--> botserver/src/botmodels/python_bridge.rs:124:5
|
124 | / pub fn from_str(s: &str) -> Option<Self> {
125 | | match s.to_lowercase().as_str() {
126 | | "mediapipe" => Some(Self::MediaPipe),
127 | | "deepface" => Some(Self::DeepFace),
... |
134 | | }
| |_____^
|
= help: consider implementing the trait `std::str::FromStr` or choosing a less ambiguous method name
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#should_implement_trait
warning: `botserver` (bin "botserver") generated 23 warnings
warning: variable does not need to be mutable
--> botserver/src/botmodels/opencv.rs:613:13
|
613 | let mut detector = OpenCvFaceDetector::new(config);
| ----^^^^^^^^
| |
| help: remove this `mut`
|
= note: `#[warn(unused_mut)]` (part of `#[warn(unused)]`) on by default
warning: this `impl` can be derived
--> botserver/src/core/session/mod.rs:551:5
|
551 | / impl Default for Role {
552 | | fn default() -> Self {
553 | | Self::User
554 | | }
555 | | }
| |_____^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#derivable_impls
= note: `#[warn(clippy::derivable_impls)]` on by default
help: replace the manual implementation with a derive attribute and mark the default variant
|
544 ~ #[derive(Default)]
545 ~ pub enum Role {
546 | Admin,
547 | Attendant,
548 ~ #[default]
549 ~ User,
550 | Guest,
551 | }
552 |
553 ~
|
warning: this `impl` can be derived
--> botserver/src/core/session/mod.rs:593:5
|
593 | / impl Default for Channel {
594 | | fn default() -> Self {
595 | | Self::WhatsApp
596 | | }
597 | | }
| |_____^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#derivable_impls
help: replace the manual implementation with a derive attribute and mark the default variant
|
584 ~ #[derive(Default)]
585 ~ pub enum Channel {
586 ~ #[default]
587 ~ WhatsApp,
588 | Teams,
...
594 |
595 ~
|
warning: this `impl` can be derived
--> botserver/src/core/session/mod.rs:668:5
|
668 | / impl Default for SessionState {
669 | | fn default() -> Self {
670 | | Self::Active
671 | | }
672 | | }
| |_____^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#derivable_impls
help: replace the manual implementation with a derive attribute and mark the default variant
|
661 ~ #[derive(Default)]
662 ~ pub enum SessionState {
663 ~ #[default]
664 ~ Active,
665 | Waiting,
...
669 |
670 ~
|
warning: this `impl` can be derived
--> botserver/src/core/session/mod.rs:723:5
|
723 | / impl Default for ContentType {
724 | | fn default() -> Self {
725 | | Self::Text
726 | | }
727 | | }
| |_____^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#derivable_impls
help: replace the manual implementation with a derive attribute and mark the default variant
|
712 ~ #[derive(Default)]
713 ~ pub enum ContentType {
714 ~ #[default]
715 ~ Text,
716 | Image,
...
724 |
725 ~
|
warning: this `impl` can be derived
--> botserver/src/core/session/mod.rs:763:5
|
763 | / impl Default for Priority {
764 | | fn default() -> Self {
765 | | Self::Normal
766 | | }
767 | | }
| |_____^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#derivable_impls
help: replace the manual implementation with a derive attribute and mark the default variant
|
756 ~ #[derive(Default)]
757 ~ pub enum Priority {
758 | Low = 0,
759 ~ #[default]
760 ~ Normal = 1,
761 | High = 2,
...
764 |
765 ~
|
warning: this `impl` can be derived
--> botserver/src/core/session/mod.rs:779:5
|
779 | / impl Default for QueueStatus {
780 | | fn default() -> Self {
781 | | Self::Waiting
782 | | }
783 | | }
| |_____^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#derivable_impls
help: replace the manual implementation with a derive attribute and mark the default variant
|
771 ~ #[derive(Default)]
772 ~ pub enum QueueStatus {
773 ~ #[default]
774 ~ Waiting,
775 | Assigned,
...
780 |
781 ~
|
warning: this `impl` can be derived
--> botserver/src/core/session/mod.rs:824:5
|
824 | / impl Default for ConversationState {
825 | | fn default() -> Self {
826 | | Self::Initial
827 | | }
828 | | }
| |_____^
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#derivable_impls
help: replace the manual implementation with a derive attribute and mark the default variant
|
815 ~ #[derive(Default)]
816 ~ pub enum ConversationState {
817 ~ #[default]
818 ~ Initial,
819 | WaitingForUser,
...
825 |
826 ~
|
error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
--> botserver/src/core/shared/memory_monitor.rs:500:36
|
500 | assert!(stats.rss_bytes > 0 || stats.virtual_bytes >= 0);
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: because `0` is the minimum value for this type, this comparison is always true
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#absurd_extreme_comparisons
= note: `#[deny(clippy::absurd_extreme_comparisons)]` on by default
warning: field assignment outside of initializer for an instance created with Default::default()
--> botserver/src/security/csrf.rs:606:9
|
606 | config.token_expiry_minutes = 0;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `security::csrf::CsrfConfig { token_expiry_minutes: 0, ..Default::default() }` and removing relevant reassignments
--> botserver/src/security/csrf.rs:605:9
|
605 | let mut config = CsrfConfig::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#field_reassign_with_default
= note: `#[warn(clippy::field_reassign_with_default)]` on by default
warning: field assignment outside of initializer for an instance created with Default::default()
--> botserver/src/security/dlp.rs:1079:9
|
1079 | config.scan_inbound = false;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `security::dlp::DlpConfig { scan_inbound: false, ..Default::default() }` and removing relevant reassignments
--> botserver/src/security/dlp.rs:1078:9
|
1078 | let mut config = DlpConfig::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#field_reassign_with_default
warning: field assignment outside of initializer for an instance created with Default::default()
--> botserver/src/security/encryption.rs:622:9
|
622 | config.envelope_encryption = true;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `security::encryption::EncryptionConfig { envelope_encryption: true, ..Default::default() }` and removing relevant reassignments
--> botserver/src/security/encryption.rs:621:9
|
621 | let mut config = EncryptionConfig::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#field_reassign_with_default
error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
--> botserver/src/security/password.rs:720:17
|
720 | assert!(result.strength.score() >= 0);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: because `0` is the minimum value for this type, this comparison is always true
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#absurd_extreme_comparisons
warning: field assignment outside of initializer for an instance created with Default::default()
--> botserver/src/security/security_monitoring.rs:1011:9
|
1011 | config.brute_force_threshold = 3;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `security::security_monitoring::SecurityMonitoringConfig { brute_force_threshold: 3, ..Default::default() }` and removing relevant reassignments
--> botserver/src/security/security_monitoring.rs:1010:9
|
1010 | let mut config = SecurityMonitoringConfig::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#field_reassign_with_default
warning: field assignment outside of initializer for an instance created with Default::default()
--> botserver/src/security/security_monitoring.rs:1033:9
|
1033 | config.brute_force_threshold = 2;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `security::security_monitoring::SecurityMonitoringConfig { brute_force_threshold: 2, ..Default::default() }` and removing relevant reassignments
--> botserver/src/security/security_monitoring.rs:1032:9
|
1032 | let mut config = SecurityMonitoringConfig::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#field_reassign_with_default
warning: field assignment outside of initializer for an instance created with Default::default()
--> botserver/src/security/security_monitoring.rs:1183:9
|
1183 | config.retention_hours = 0;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `security::security_monitoring::SecurityMonitoringConfig { retention_hours: 0, ..Default::default() }` and removing relevant reassignments
--> botserver/src/security/security_monitoring.rs:1182:9
|
1182 | let mut config = SecurityMonitoringConfig::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#field_reassign_with_default
warning: field assignment outside of initializer for an instance created with Default::default()
--> botserver/src/security/session.rs:715:9
|
715 | config.max_concurrent_sessions = 2;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `security::session::SessionConfig { max_concurrent_sessions: 2, ..Default::default() }` and removing relevant reassignments
--> botserver/src/security/session.rs:714:9
|
714 | let mut config = SessionConfig::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#field_reassign_with_default
warning: field assignment outside of initializer for an instance created with Default::default()
--> botserver/src/security/webhook.rs:701:9
|
701 | config.timestamp_tolerance_seconds = 60;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `security::webhook::WebhookConfig { timestamp_tolerance_seconds: 60, ..Default::default() }` and removing relevant reassignments
--> botserver/src/security/webhook.rs:700:9
|
700 | let mut config = WebhookConfig::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#field_reassign_with_default
warning: field assignment outside of initializer for an instance created with Default::default()
--> botserver/src/security/webhook.rs:732:9
|
732 | config.require_https = false;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `security::webhook::WebhookConfig { require_https: false, ..Default::default() }` and removing relevant reassignments
--> botserver/src/security/webhook.rs:731:9
|
731 | let mut config = WebhookConfig::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#field_reassign_with_default
warning: field assignment outside of initializer for an instance created with Default::default()
--> botserver/src/security/webhook.rs:742:9
|
742 | config.max_payload_size = 100;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `security::webhook::WebhookConfig { max_payload_size: 100, ..Default::default() }` and removing relevant reassignments
--> botserver/src/security/webhook.rs:741:9
|
741 | let mut config = WebhookConfig::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#field_reassign_with_default
warning: field assignment outside of initializer for an instance created with Default::default()
--> botserver/src/security/webhook.rs:871:9
|
871 | config.replay_window_seconds = 0;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
note: consider initializing the variable with `security::webhook::WebhookConfig { replay_window_seconds: 0, ..Default::default() }` and removing relevant reassignments
--> botserver/src/security/webhook.rs:870:9
|
870 | let mut config = WebhookConfig::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#field_reassign_with_default
warning: useless use of `vec!`
--> botserver/src/security/command_guard.rs:597:24
|
597 | let _allowed = vec![PathBuf::from("/tmp")];
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can use an array directly: `[PathBuf::from("/tmp")]`
|
= help: for further information visit https://rust-lang.github.io/rust-clippy/rust-1.92.0/index.html#useless_vec
= note: `#[warn(clippy::useless_vec)]` on by default
warning: comparison is useless due to type limits
--> botserver/src/core/shared/memory_monitor.rs:500:36
|
500 | assert!(stats.rss_bytes > 0 || stats.virtual_bytes >= 0);
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: `#[warn(unused_comparisons)]` on by default
warning: comparison is useless due to type limits
--> botserver/src/security/password.rs:720:17
|
720 | assert!(result.strength.score() >= 0);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
warning: `botserver` (bin "botserver" test) generated 45 warnings (23 duplicates)
error: could not compile `botserver` (bin "botserver" test) due to 2 previous errors; 45 warnings emitted

View file

@ -1,468 +0,0 @@
{
"version": "6.1.0",
"description": "Available apps and features for GeneralBots",
"categories": {
"communication": {
"name": "Communication",
"icon": "💬",
"apps": [
{
"id": "chat",
"name": "Chat",
"description": "Real-time messaging and conversations",
"feature": "chat",
"icon": "💬",
"enabled_by_default": true,
"dependencies": []
},
{
"id": "people",
"name": "People",
"description": "Contact management and CRM",
"feature": "people",
"icon": "👥",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "mail",
"name": "Mail",
"description": "Email integration (SMTP/IMAP)",
"feature": "mail",
"icon": "📧",
"enabled_by_default": false,
"dependencies": ["mail_core", "drive"]
},
{
"id": "meet",
"name": "Meet",
"description": "Video conferencing with LiveKit",
"feature": "meet",
"icon": "📹",
"enabled_by_default": false,
"dependencies": ["realtime_core"]
},
{
"id": "social",
"name": "Social",
"description": "Social media integration",
"feature": "social",
"icon": "🌐",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "whatsapp",
"name": "WhatsApp",
"description": "WhatsApp Business API",
"feature": "whatsapp",
"icon": "📱",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "telegram",
"name": "Telegram",
"description": "Telegram Bot API",
"feature": "telegram",
"icon": "✈️",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "instagram",
"name": "Instagram",
"description": "Instagram messaging",
"feature": "instagram",
"icon": "📷",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "msteams",
"name": "MS Teams",
"description": "Microsoft Teams integration",
"feature": "msteams",
"icon": "👔",
"enabled_by_default": false,
"dependencies": []
}
]
},
"productivity": {
"name": "Productivity",
"icon": "⚡",
"apps": [
{
"id": "tasks",
"name": "Tasks",
"description": "Task management with scheduling",
"feature": "tasks",
"icon": "✅",
"enabled_by_default": true,
"dependencies": ["automation", "drive", "monitoring"]
},
{
"id": "calendar",
"name": "Calendar",
"description": "Calendar and event management",
"feature": "calendar",
"icon": "📅",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "project",
"name": "Project",
"description": "Project management",
"feature": "project",
"icon": "📊",
"enabled_by_default": false,
"dependencies": ["quick-xml"]
},
{
"id": "goals",
"name": "Goals",
"description": "Goal tracking and OKRs",
"feature": "goals",
"icon": "🎯",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "workspaces",
"name": "Workspaces",
"description": "Team workspaces",
"feature": "workspaces",
"icon": "🏢",
"enabled_by_default": false,
"dependencies": ["workspace"]
},
{
"id": "tickets",
"name": "Tickets",
"description": "Support ticket system",
"feature": "tickets",
"icon": "🎫",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "billing",
"name": "Billing",
"description": "Invoicing and payments",
"feature": "billing",
"icon": "💰",
"enabled_by_default": false,
"dependencies": []
}
]
},
"documents": {
"name": "Documents",
"icon": "📄",
"apps": [
{
"id": "drive",
"name": "Drive",
"description": "Cloud file storage (S3)",
"feature": "drive",
"icon": "💾",
"enabled_by_default": true,
"dependencies": ["storage_core", "pdf"]
},
{
"id": "docs",
"name": "Docs",
"description": "Document editor (DOCX)",
"feature": "docs",
"icon": "📝",
"enabled_by_default": false,
"dependencies": ["docx-rs", "ooxmlsdk"]
},
{
"id": "sheet",
"name": "Sheet",
"description": "Spreadsheet editor",
"feature": "sheet",
"icon": "📊",
"enabled_by_default": false,
"dependencies": ["calamine", "spreadsheet-ods"]
},
{
"id": "slides",
"name": "Slides",
"description": "Presentation editor",
"feature": "slides",
"icon": "🎞️",
"enabled_by_default": false,
"dependencies": ["ooxmlsdk"]
},
{
"id": "paper",
"name": "Paper",
"description": "Note-taking with PDF support",
"feature": "paper",
"icon": "📋",
"enabled_by_default": false,
"dependencies": ["docs", "pdf"]
}
]
},
"media": {
"name": "Media",
"icon": "🎬",
"apps": [
{
"id": "video",
"name": "Video",
"description": "Video management",
"feature": "video",
"icon": "🎥",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "player",
"name": "Player",
"description": "Media player",
"feature": "player",
"icon": "▶️",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "canvas",
"name": "Canvas",
"description": "Drawing and design",
"feature": "canvas",
"icon": "🎨",
"enabled_by_default": false,
"dependencies": []
}
]
},
"learning": {
"name": "Learning & Research",
"icon": "📚",
"apps": [
{
"id": "learn",
"name": "Learn",
"description": "Learning management",
"feature": "learn",
"icon": "🎓",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "research",
"name": "Research",
"description": "Research tools with AI",
"feature": "research",
"icon": "🔬",
"enabled_by_default": false,
"dependencies": ["llm", "vectordb"]
},
{
"id": "sources",
"name": "Sources",
"description": "Source management",
"feature": "sources",
"icon": "📖",
"enabled_by_default": false,
"dependencies": []
}
]
},
"analytics": {
"name": "Analytics",
"icon": "📈",
"apps": [
{
"id": "analytics",
"name": "Analytics",
"description": "Data analytics",
"feature": "analytics",
"icon": "📊",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "dashboards",
"name": "Dashboards",
"description": "Custom dashboards",
"feature": "dashboards",
"icon": "📉",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "monitoring",
"name": "Monitoring",
"description": "System monitoring",
"feature": "monitoring",
"icon": "🔍",
"enabled_by_default": false,
"dependencies": ["sysinfo"]
}
]
},
"development": {
"name": "Development",
"icon": "⚙️",
"apps": [
{
"id": "automation",
"name": "Automation",
"description": "Scripting with Rhai (.gbot files)",
"feature": "automation",
"icon": "🤖",
"enabled_by_default": true,
"core_dependency": true,
"dependencies": ["automation_core"]
},
{
"id": "designer",
"name": "Designer",
"description": "UI/UX designer",
"feature": "designer",
"icon": "🎨",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "editor",
"name": "Editor",
"description": "Code editor",
"feature": "editor",
"icon": "💻",
"enabled_by_default": false,
"dependencies": []
}
]
},
"admin": {
"name": "Administration",
"icon": "🔐",
"apps": [
{
"id": "attendant",
"name": "Attendant",
"description": "Human attendant interface",
"feature": "attendant",
"icon": "👤",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "security",
"name": "Security",
"description": "Security settings",
"feature": "security",
"icon": "🔒",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "settings",
"name": "Settings",
"description": "System settings",
"feature": "settings",
"icon": "⚙️",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "directory",
"name": "Directory",
"description": "User directory (Zitadel)",
"feature": "directory",
"icon": "📇",
"enabled_by_default": true,
"dependencies": []
}
]
},
"core": {
"name": "Core Infrastructure",
"icon": "🏗️",
"apps": [
{
"id": "cache",
"name": "Cache",
"description": "Redis caching",
"feature": "cache",
"icon": "⚡",
"enabled_by_default": true,
"core_dependency": true,
"dependencies": ["cache_core"]
},
{
"id": "llm",
"name": "LLM",
"description": "Large Language Models",
"feature": "llm",
"icon": "🧠",
"enabled_by_default": false,
"dependencies": []
},
{
"id": "vectordb",
"name": "Vector DB",
"description": "Qdrant vector database",
"feature": "vectordb",
"icon": "🗄️",
"enabled_by_default": false,
"dependencies": ["qdrant-client"]
}
]
}
},
"bundles": {
"minimal": {
"name": "Minimal",
"description": "Essential infrastructure only",
"features": ["chat", "automation", "drive", "cache"]
},
"lightweight": {
"name": "Lightweight",
"description": "Basic productivity suite",
"features": ["chat", "drive", "tasks", "people"]
},
"full": {
"name": "Full Suite",
"description": "Complete feature set",
"features": ["chat", "people", "mail", "tasks", "calendar", "drive", "docs", "llm", "cache", "compliance"]
},
"communications": {
"name": "Communications",
"description": "All communication apps",
"features": ["chat", "people", "mail", "meet", "social", "whatsapp", "telegram", "instagram", "msteams", "cache"]
},
"productivity": {
"name": "Productivity",
"description": "Productivity suite",
"features": ["calendar", "tasks", "project", "goals", "workspaces", "cache"]
},
"documents": {
"name": "Documents",
"description": "Document suite",
"features": ["paper", "docs", "sheet", "slides", "drive"]
}
},
"core_dependencies": {
"automation": {
"reason": "Required for .gbot script execution (100+ files depend on it)",
"removable": false
},
"drive": {
"reason": "S3 storage used in 80+ places throughout codebase",
"removable": false
},
"cache": {
"reason": "Redis integrated into session management",
"removable": false
}
}
}

View file

@ -54,7 +54,10 @@ install_debian_ubuntu() {
zlib1g \ zlib1g \
ca-certificates \ ca-certificates \
curl \ curl \
wget wget \
libabseil-dev \
libclang-dev \
pkg-config
# LXC/LXD for container management (optional but recommended) # LXC/LXD for container management (optional but recommended)
echo "" echo ""

View file

@ -1,8 +1,11 @@
#[cfg(feature = "goals")]
pub mod goals; pub mod goals;
#[cfg(feature = "goals")]
pub mod goals_ui; pub mod goals_ui;
pub mod insights; pub mod insights;
use crate::core::urls::ApiUrls; use crate::core::urls::ApiUrls;
#[cfg(feature = "llm")]
use crate::llm::observability::{ObservabilityConfig, ObservabilityManager, QuickStats}; use crate::llm::observability::{ObservabilityConfig, ObservabilityManager, QuickStats};
use crate::shared::state::AppState; use crate::shared::state::AppState;
use axum::{ use axum::{
@ -15,6 +18,7 @@ use diesel::prelude::*;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt::Write as FmtWrite; use std::fmt::Write as FmtWrite;
use std::sync::Arc; use std::sync::Arc;
#[cfg(feature = "llm")]
use tokio::sync::RwLock; use tokio::sync::RwLock;
#[derive(Debug, Clone, Serialize, Deserialize, Queryable)] #[derive(Debug, Clone, Serialize, Deserialize, Queryable)]
@ -55,11 +59,13 @@ pub struct AnalyticsQuery {
pub time_range: Option<String>, pub time_range: Option<String>,
} }
#[cfg(feature = "llm")]
#[derive(Debug)] #[derive(Debug)]
pub struct AnalyticsService { pub struct AnalyticsService {
observability: Arc<RwLock<ObservabilityManager>>, observability: Arc<RwLock<ObservabilityManager>>,
} }
#[cfg(feature = "llm")]
impl AnalyticsService { impl AnalyticsService {
pub fn new() -> Self { pub fn new() -> Self {
let config = ObservabilityConfig::default(); let config = ObservabilityConfig::default();
@ -86,6 +92,7 @@ impl AnalyticsService {
} }
} }
#[cfg(feature = "llm")]
impl Default for AnalyticsService { impl Default for AnalyticsService {
fn default() -> Self { fn default() -> Self {
Self::new() Self::new()
@ -93,7 +100,7 @@ impl Default for AnalyticsService {
} }
pub fn configure_analytics_routes() -> Router<Arc<AppState>> { pub fn configure_analytics_routes() -> Router<Arc<AppState>> {
Router::new() let router = Router::new()
.route(ApiUrls::ANALYTICS_MESSAGES_COUNT, get(handle_message_count)) .route(ApiUrls::ANALYTICS_MESSAGES_COUNT, get(handle_message_count))
.route( .route(
ApiUrls::ANALYTICS_SESSIONS_ACTIVE, ApiUrls::ANALYTICS_SESSIONS_ACTIVE,
@ -127,9 +134,14 @@ pub fn configure_analytics_routes() -> Router<Arc<AppState>> {
get(handle_recent_activity), get(handle_recent_activity),
) )
.route(ApiUrls::ANALYTICS_QUERIES_TOP, get(handle_top_queries)) .route(ApiUrls::ANALYTICS_QUERIES_TOP, get(handle_top_queries))
.route(ApiUrls::ANALYTICS_CHAT, post(handle_analytics_chat)) .route(ApiUrls::ANALYTICS_CHAT, post(handle_analytics_chat));
#[cfg(feature = "llm")]
let router = router
.route(ApiUrls::ANALYTICS_LLM_STATS, get(handle_llm_stats)) .route(ApiUrls::ANALYTICS_LLM_STATS, get(handle_llm_stats))
.route(ApiUrls::ANALYTICS_BUDGET_STATUS, get(handle_budget_status)) .route(ApiUrls::ANALYTICS_BUDGET_STATUS, get(handle_budget_status));
router
} }
pub async fn handle_message_count(State(state): State<Arc<AppState>>) -> impl IntoResponse { pub async fn handle_message_count(State(state): State<Arc<AppState>>) -> impl IntoResponse {
@ -792,6 +804,7 @@ pub async fn handle_analytics_chat(
Html(html) Html(html)
} }
#[cfg(feature = "llm")]
pub async fn handle_llm_stats(State(_state): State<Arc<AppState>>) -> impl IntoResponse { pub async fn handle_llm_stats(State(_state): State<Arc<AppState>>) -> impl IntoResponse {
let service = AnalyticsService::new(); let service = AnalyticsService::new();
let stats = service.get_quick_stats().await; let stats = service.get_quick_stats().await;
@ -808,6 +821,7 @@ pub async fn handle_llm_stats(State(_state): State<Arc<AppState>>) -> impl IntoR
Html(html) Html(html)
} }
#[cfg(feature = "llm")]
pub async fn handle_budget_status(State(_state): State<Arc<AppState>>) -> impl IntoResponse { pub async fn handle_budget_status(State(_state): State<Arc<AppState>>) -> impl IntoResponse {
let status = { let status = {
let service = AnalyticsService::new(); let service = AnalyticsService::new();

View file

@ -1,5 +1,6 @@
pub mod drive; pub mod drive;
pub mod keyword_services; pub mod keyword_services;
#[cfg(feature = "llm")]
pub mod llm_assist; pub mod llm_assist;
pub mod queue; pub mod queue;
@ -8,6 +9,7 @@ pub use keyword_services::{
AttendanceCommand, AttendanceRecord, AttendanceResponse, AttendanceService, KeywordConfig, AttendanceCommand, AttendanceRecord, AttendanceResponse, AttendanceService, KeywordConfig,
KeywordParser, ParsedCommand, KeywordParser, ParsedCommand,
}; };
#[cfg(feature = "llm")]
pub use llm_assist::{ pub use llm_assist::{
AttendantTip, ConversationMessage, ConversationSummary, LlmAssistConfig, PolishRequest, AttendantTip, ConversationMessage, ConversationSummary, LlmAssistConfig, PolishRequest,
PolishResponse, SentimentAnalysis, SentimentResponse, SmartRepliesRequest, PolishResponse, SentimentAnalysis, SentimentResponse, SmartRepliesRequest,
@ -45,7 +47,7 @@ use tokio::sync::broadcast;
use uuid::Uuid; use uuid::Uuid;
pub fn configure_attendance_routes() -> Router<Arc<AppState>> { pub fn configure_attendance_routes() -> Router<Arc<AppState>> {
Router::new() let router = Router::new()
.route(ApiUrls::ATTENDANCE_QUEUE, get(queue::list_queue)) .route(ApiUrls::ATTENDANCE_QUEUE, get(queue::list_queue))
.route(ApiUrls::ATTENDANCE_ATTENDANTS, get(queue::list_attendants)) .route(ApiUrls::ATTENDANCE_ATTENDANTS, get(queue::list_attendants))
.route(ApiUrls::ATTENDANCE_ASSIGN, post(queue::assign_conversation)) .route(ApiUrls::ATTENDANCE_ASSIGN, post(queue::assign_conversation))
@ -56,7 +58,10 @@ pub fn configure_attendance_routes() -> Router<Arc<AppState>> {
.route(ApiUrls::ATTENDANCE_RESOLVE, post(queue::resolve_conversation)) .route(ApiUrls::ATTENDANCE_RESOLVE, post(queue::resolve_conversation))
.route(ApiUrls::ATTENDANCE_INSIGHTS, get(queue::get_insights)) .route(ApiUrls::ATTENDANCE_INSIGHTS, get(queue::get_insights))
.route(ApiUrls::ATTENDANCE_RESPOND, post(attendant_respond)) .route(ApiUrls::ATTENDANCE_RESPOND, post(attendant_respond))
.route(ApiUrls::WS_ATTENDANT, get(attendant_websocket_handler)) .route(ApiUrls::WS_ATTENDANT, get(attendant_websocket_handler));
#[cfg(feature = "llm")]
let router = router
.route( .route(
ApiUrls::ATTENDANCE_LLM_TIPS, ApiUrls::ATTENDANCE_LLM_TIPS,
post(llm_assist::generate_tips), post(llm_assist::generate_tips),
@ -74,7 +79,9 @@ pub fn configure_attendance_routes() -> Router<Arc<AppState>> {
ApiUrls::ATTENDANCE_LLM_SENTIMENT, ApiUrls::ATTENDANCE_LLM_SENTIMENT,
post(llm_assist::analyze_sentiment), post(llm_assist::analyze_sentiment),
) )
.route(ApiUrls::ATTENDANCE_LLM_CONFIG, get(llm_assist::get_llm_config)) .route(ApiUrls::ATTENDANCE_LLM_CONFIG, get(llm_assist::get_llm_config));
router
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]

View file

@ -13,6 +13,7 @@ use crate::basic::keywords::table_definition::{
use crate::core::shared::get_content_type; use crate::core::shared::get_content_type;
use crate::core::shared::models::UserSession; use crate::core::shared::models::UserSession;
use crate::core::shared::state::{AgentActivity, AppState}; use crate::core::shared::state::{AgentActivity, AppState};
#[cfg(feature = "drive")]
use aws_sdk_s3::primitives::ByteStream; use aws_sdk_s3::primitives::ByteStream;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use diesel::prelude::*; use diesel::prelude::*;
@ -21,6 +22,10 @@ use log::{error, info, trace, warn};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::sync::Arc; use std::sync::Arc;
#[cfg(feature = "llm")]
use crate::core::config::ConfigManager;
#[cfg(feature = "llm")]
use tokio::sync::mpsc;
use uuid::Uuid; use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -382,7 +387,7 @@ impl AppGenerator {
crate::core::shared::state::TaskProgressEvent::new( crate::core::shared::state::TaskProgressEvent::new(
task_id, task_id,
"manifest_update", "manifest_update",
&format!("Manifest updated: {}", manifest.app_name), format!("Manifest updated: {}", manifest.app_name),
) )
.with_event_type("manifest_update") .with_event_type("manifest_update")
.with_progress(manifest.completed_steps as u8, manifest.total_steps as u8) .with_progress(manifest.completed_steps as u8, manifest.total_steps as u8)
@ -390,7 +395,7 @@ impl AppGenerator {
crate::core::shared::state::TaskProgressEvent::new( crate::core::shared::state::TaskProgressEvent::new(
task_id, task_id,
"manifest_update", "manifest_update",
&format!("Manifest updated: {}", manifest.app_name), format!("Manifest updated: {}", manifest.app_name),
) )
.with_event_type("manifest_update") .with_event_type("manifest_update")
.with_progress(manifest.completed_steps as u8, manifest.total_steps as u8) .with_progress(manifest.completed_steps as u8, manifest.total_steps as u8)
@ -686,7 +691,7 @@ impl AppGenerator {
// Check items directly in section // Check items directly in section
for item in &mut section.items { for item in &mut section.items {
if item.name == item_name { if item.name == item_name {
item.status = status.clone(); item.status = status;
if status == crate::auto_task::ItemStatus::Running { if status == crate::auto_task::ItemStatus::Running {
item.started_at = Some(Utc::now()); item.started_at = Some(Utc::now());
} else if status == crate::auto_task::ItemStatus::Completed { } else if status == crate::auto_task::ItemStatus::Completed {
@ -704,7 +709,7 @@ impl AppGenerator {
for child in &mut section.children { for child in &mut section.children {
for item in &mut child.items { for item in &mut child.items {
if item.name == item_name { if item.name == item_name {
item.status = status.clone(); item.status = status;
if status == crate::auto_task::ItemStatus::Running { if status == crate::auto_task::ItemStatus::Running {
item.started_at = Some(Utc::now()); item.started_at = Some(Utc::now());
} else if status == crate::auto_task::ItemStatus::Completed { } else if status == crate::auto_task::ItemStatus::Completed {
@ -1375,7 +1380,7 @@ impl AppGenerator {
.with_tables(self.tables_synced.clone()); .with_tables(self.tables_synced.clone());
// Include app_url in the completion event // Include app_url in the completion event
let event = crate::core::shared::state::TaskProgressEvent::new(task_id, "complete", &format!( let event = crate::core::shared::state::TaskProgressEvent::new(task_id, "complete", format!(
"App '{}' created: {} files, {} tables, {} bytes in {}s", "App '{}' created: {} files, {} tables, {} bytes in {}s",
llm_app.name, pages.len(), tables.len(), self.bytes_generated, elapsed llm_app.name, pages.len(), tables.len(), self.bytes_generated, elapsed
)) ))
@ -2615,12 +2620,13 @@ NO QUESTIONS. JUST BUILD."#
&self, &self,
bucket: &str, bucket: &str,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
#[cfg(feature = "drive")]
if let Some(ref s3) = self.state.drive { if let Some(ref s3) = self.state.drive {
// Check if bucket exists // Check if bucket exists
match s3.head_bucket().bucket(bucket).send().await { match s3.head_bucket().bucket(bucket).send().await {
Ok(_) => { Ok(_) => {
trace!("Bucket {} already exists", bucket); trace!("Bucket {} already exists", bucket);
return Ok(()); Ok(())
} }
Err(_) => { Err(_) => {
// Bucket doesn't exist, try to create it // Bucket doesn't exist, try to create it
@ -2628,7 +2634,7 @@ NO QUESTIONS. JUST BUILD."#
match s3.create_bucket().bucket(bucket).send().await { match s3.create_bucket().bucket(bucket).send().await {
Ok(_) => { Ok(_) => {
info!("Created bucket: {}", bucket); info!("Created bucket: {}", bucket);
return Ok(()); Ok(())
} }
Err(e) => { Err(e) => {
// Check if error is "bucket already exists" (race condition) // Check if error is "bucket already exists" (race condition)
@ -2638,7 +2644,7 @@ NO QUESTIONS. JUST BUILD."#
return Ok(()); return Ok(());
} }
error!("Failed to create bucket {}: {}", bucket, e); error!("Failed to create bucket {}: {}", bucket, e);
return Err(Box::new(e)); Err(Box::new(e))
} }
} }
} }
@ -2648,6 +2654,13 @@ NO QUESTIONS. JUST BUILD."#
trace!("No S3 client, using DB fallback for storage"); trace!("No S3 client, using DB fallback for storage");
Ok(()) Ok(())
} }
#[cfg(not(feature = "drive"))]
{
let _ = bucket;
trace!("Drive feature not enabled, no bucket check needed");
Ok(())
}
} }
async fn write_to_drive( async fn write_to_drive(
@ -2658,6 +2671,7 @@ NO QUESTIONS. JUST BUILD."#
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!("write_to_drive: bucket={}, path={}, content_len={}", bucket, path, content.len()); info!("write_to_drive: bucket={}, path={}, content_len={}", bucket, path, content.len());
#[cfg(feature = "drive")]
if let Some(ref s3) = self.state.drive { if let Some(ref s3) = self.state.drive {
let body = ByteStream::from(content.as_bytes().to_vec()); let body = ByteStream::from(content.as_bytes().to_vec());
let content_type = get_content_type(path); let content_type = get_content_type(path);
@ -2707,6 +2721,12 @@ NO QUESTIONS. JUST BUILD."#
self.write_to_db_fallback(bucket, path, content)?; self.write_to_db_fallback(bucket, path, content)?;
} }
#[cfg(not(feature = "drive"))]
{
warn!("Drive feature not enabled, using DB fallback for {}/{}", bucket, path);
self.write_to_db_fallback(bucket, path, content)?;
}
Ok(()) Ok(())
} }

View file

@ -10,6 +10,8 @@ use serde::{Deserialize, Serialize};
use std::fmt::Write; use std::fmt::Write;
use std::sync::Arc; use std::sync::Arc;
use uuid::Uuid; use uuid::Uuid;
#[cfg(feature = "llm")]
use crate::core::config::ConfigManager;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[serde(rename_all = "SCREAMING_SNAKE_CASE")]

View file

@ -4,6 +4,8 @@ use crate::basic::ScriptService;
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
#[cfg(feature = "llm")]
use crate::core::config::ConfigManager;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use diesel::prelude::*; use diesel::prelude::*;
use diesel::sql_query; use diesel::sql_query;

View file

@ -1,6 +1,8 @@
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
#[cfg(feature = "llm")]
use crate::core::config::ConfigManager;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use diesel::prelude::*; use diesel::prelude::*;
use log::{error, info, trace, warn}; use log::{error, info, trace, warn};
@ -91,34 +93,28 @@ pub struct PlanStep {
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum StepPriority { pub enum StepPriority {
Critical, Critical,
High, High,
#[default]
Medium, Medium,
Low, Low,
Optional, Optional,
} }
impl Default for StepPriority {
fn default() -> Self {
Self::Medium
}
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Default)]
pub enum RiskLevel { pub enum RiskLevel {
None, None,
#[default]
Low, Low,
Medium, Medium,
High, High,
Critical, Critical,
} }
impl Default for RiskLevel {
fn default() -> Self {
Self::Low
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ApiCallSpec { pub struct ApiCallSpec {
@ -132,7 +128,9 @@ pub struct ApiCallSpec {
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Default)]
pub enum AuthType { pub enum AuthType {
#[default]
None, None,
ApiKey { ApiKey {
header: String, header: String,
@ -151,11 +149,6 @@ pub enum AuthType {
}, },
} }
impl Default for AuthType {
fn default() -> Self {
Self::None
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RetryConfig { pub struct RetryConfig {
@ -184,18 +177,15 @@ pub struct ApprovalLevel {
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Default)]
pub enum DefaultApprovalAction { pub enum DefaultApprovalAction {
Approve, Approve,
Reject, Reject,
Escalate, Escalate,
#[default]
Pause, Pause,
} }
impl Default for DefaultApprovalAction {
fn default() -> Self {
Self::Pause
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AlternativeInterpretation { pub struct AlternativeInterpretation {

View file

@ -82,18 +82,15 @@ impl std::fmt::Display for ConstraintType {
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Ord, PartialOrd)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Ord, PartialOrd)]
#[derive(Default)]
pub enum ConstraintSeverity { pub enum ConstraintSeverity {
Info = 0, Info = 0,
#[default]
Warning = 1, Warning = 1,
Error = 2, Error = 2,
Critical = 3, Critical = 3,
} }
impl Default for ConstraintSeverity {
fn default() -> Self {
Self::Warning
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Constraint { pub struct Constraint {
@ -187,19 +184,16 @@ impl Default for ImpactAssessment {
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Ord, PartialOrd)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Ord, PartialOrd)]
#[derive(Default)]
pub enum RiskLevel { pub enum RiskLevel {
None = 0, None = 0,
#[default]
Low = 1, Low = 1,
Medium = 2, Medium = 2,
High = 3, High = 3,
Critical = 4, Critical = 4,
} }
impl Default for RiskLevel {
fn default() -> Self {
Self::Low
}
}
impl std::fmt::Display for RiskLevel { impl std::fmt::Display for RiskLevel {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@ -264,6 +258,7 @@ impl Default for CostImpact {
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Default)]
pub struct TimeImpact { pub struct TimeImpact {
pub estimated_duration_seconds: i32, pub estimated_duration_seconds: i32,
pub blocking: bool, pub blocking: bool,
@ -271,16 +266,6 @@ pub struct TimeImpact {
pub affects_deadline: bool, pub affects_deadline: bool,
} }
impl Default for TimeImpact {
fn default() -> Self {
Self {
estimated_duration_seconds: 0,
blocking: false,
delayed_tasks: Vec::new(),
affects_deadline: false,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SecurityImpact { pub struct SecurityImpact {

View file

@ -36,7 +36,9 @@ pub struct DecisionPoint {
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum ManifestStatus { pub enum ManifestStatus {
#[default]
Planning, Planning,
Ready, Ready,
Running, Running,
@ -45,11 +47,6 @@ pub enum ManifestStatus {
Failed, Failed,
} }
impl Default for ManifestStatus {
fn default() -> Self {
Self::Planning
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ManifestSection { pub struct ManifestSection {
@ -100,7 +97,9 @@ impl std::fmt::Display for SectionType {
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum SectionStatus { pub enum SectionStatus {
#[default]
Pending, Pending,
Running, Running,
Completed, Completed,
@ -108,11 +107,6 @@ pub enum SectionStatus {
Skipped, Skipped,
} }
impl Default for SectionStatus {
fn default() -> Self {
Self::Pending
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ManifestItem { pub struct ManifestItem {
@ -182,7 +176,9 @@ pub enum ItemType {
} }
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum ItemStatus { pub enum ItemStatus {
#[default]
Pending, Pending,
Running, Running,
Completed, Completed,
@ -190,11 +186,6 @@ pub enum ItemStatus {
Skipped, Skipped,
} }
impl Default for ItemStatus {
fn default() -> Self {
Self::Pending
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TerminalLine { pub struct TerminalLine {
@ -476,7 +467,7 @@ impl TaskManifest {
"total": self.total_steps, "total": self.total_steps,
"percentage": self.progress_percentage() "percentage": self.progress_percentage()
}, },
"sections": self.sections.iter().map(|s| section_to_web_json(s)).collect::<Vec<_>>(), "sections": self.sections.iter().map(section_to_web_json).collect::<Vec<_>>(),
"terminal": { "terminal": {
"lines": self.terminal_output.iter().map(|l| serde_json::json!({ "lines": self.terminal_output.iter().map(|l| serde_json::json!({
"content": l.content, "content": l.content,
@ -688,7 +679,7 @@ fn section_to_web_json(section: &ManifestSection) -> serde_json::Value {
"global_current": global_current, "global_current": global_current,
"global_start": section.global_step_start "global_start": section.global_step_start
}, },
"duration": section.duration_seconds.map(|d| format_duration(d)), "duration": section.duration_seconds.map(format_duration),
"duration_seconds": section.duration_seconds, "duration_seconds": section.duration_seconds,
"items": section.items.iter().map(|i| { "items": section.items.iter().map(|i| {
let item_checkbox = match i.status { let item_checkbox = match i.status {
@ -703,7 +694,7 @@ fn section_to_web_json(section: &ManifestSection) -> serde_json::Value {
"type": format!("{:?}", i.item_type), "type": format!("{:?}", i.item_type),
"status": format!("{:?}", i.status), "status": format!("{:?}", i.status),
"details": i.details, "details": i.details,
"duration": i.duration_seconds.map(|d| format_duration(d)), "duration": i.duration_seconds.map(format_duration),
"duration_seconds": i.duration_seconds "duration_seconds": i.duration_seconds
}) })
}).collect::<Vec<_>>(), }).collect::<Vec<_>>(),
@ -719,11 +710,11 @@ fn section_to_web_json(section: &ManifestSection) -> serde_json::Value {
"items": g.items, "items": g.items,
"checkbox": group_checkbox, "checkbox": group_checkbox,
"status": format!("{:?}", g.status), "status": format!("{:?}", g.status),
"duration": g.duration_seconds.map(|d| format_duration(d)), "duration": g.duration_seconds.map(format_duration),
"duration_seconds": g.duration_seconds "duration_seconds": g.duration_seconds
}) })
}).collect::<Vec<_>>(), }).collect::<Vec<_>>(),
"children": section.children.iter().map(|c| section_to_web_json(c)).collect::<Vec<_>>() "children": section.children.iter().map(section_to_web_json).collect::<Vec<_>>()
}) })
} }

View file

@ -73,7 +73,9 @@ pub struct AutoTask {
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum AutoTaskStatus { pub enum AutoTaskStatus {
#[default]
Draft, Draft,
Compiling, Compiling,
@ -103,11 +105,6 @@ pub enum AutoTaskStatus {
RolledBack, RolledBack,
} }
impl Default for AutoTaskStatus {
fn default() -> Self {
Self::Draft
}
}
impl std::fmt::Display for AutoTaskStatus { impl std::fmt::Display for AutoTaskStatus {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@ -131,9 +128,11 @@ impl std::fmt::Display for AutoTaskStatus {
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum ExecutionMode { pub enum ExecutionMode {
FullyAutomatic, FullyAutomatic,
#[default]
SemiAutomatic, SemiAutomatic,
Supervised, Supervised,
@ -143,26 +142,18 @@ pub enum ExecutionMode {
DryRun, DryRun,
} }
impl Default for ExecutionMode {
fn default() -> Self {
Self::SemiAutomatic
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Ord, PartialOrd, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Ord, PartialOrd, Eq)]
#[derive(Default)]
pub enum TaskPriority { pub enum TaskPriority {
Critical = 4, Critical = 4,
High = 3, High = 3,
#[default]
Medium = 2, Medium = 2,
Low = 1, Low = 1,
Background = 0, Background = 0,
} }
impl Default for TaskPriority {
fn default() -> Self {
Self::Medium
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StepExecutionResult { pub struct StepExecutionResult {
@ -258,18 +249,15 @@ pub struct ImpactEstimate {
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Default)]
pub enum TimeoutAction { pub enum TimeoutAction {
UseDefault, UseDefault,
#[default]
Pause, Pause,
Cancel, Cancel,
Escalate, Escalate,
} }
impl Default for TimeoutAction {
fn default() -> Self {
Self::Pause
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PendingApproval { pub struct PendingApproval {
@ -300,33 +288,27 @@ pub enum ApprovalType {
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Default)]
pub enum ApprovalDefault { pub enum ApprovalDefault {
Approve, Approve,
Reject, Reject,
#[default]
Pause, Pause,
Escalate, Escalate,
} }
impl Default for ApprovalDefault {
fn default() -> Self {
Self::Pause
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Ord, PartialOrd, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Ord, PartialOrd, Eq)]
#[derive(Default)]
pub enum RiskLevel { pub enum RiskLevel {
None = 0, None = 0,
#[default]
Low = 1, Low = 1,
Medium = 2, Medium = 2,
High = 3, High = 3,
Critical = 4, Critical = 4,
} }
impl Default for RiskLevel {
fn default() -> Self {
Self::Low
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RiskSummary { pub struct RiskSummary {
@ -396,6 +378,7 @@ pub struct TaskError {
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Default)]
pub struct RollbackState { pub struct RollbackState {
pub available: bool, pub available: bool,
pub steps_rolled_back: Vec<String>, pub steps_rolled_back: Vec<String>,
@ -404,17 +387,6 @@ pub struct RollbackState {
pub completed_at: Option<DateTime<Utc>>, pub completed_at: Option<DateTime<Utc>>,
} }
impl Default for RollbackState {
fn default() -> Self {
Self {
available: false,
steps_rolled_back: Vec::new(),
rollback_data: HashMap::new(),
started_at: None,
completed_at: None,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TaskSchedule { pub struct TaskSchedule {

View file

@ -354,19 +354,23 @@ impl BasicCompiler {
has_schedule = true; has_schedule = true;
let parts: Vec<&str> = normalized.split('"').collect(); let parts: Vec<&str> = normalized.split('"').collect();
if parts.len() >= 3 { if parts.len() >= 3 {
#[cfg(feature = "tasks")]
{
#[allow(unused_variables, unused_mut)]
let cron = parts[1]; let cron = parts[1];
#[allow(unused_variables, unused_mut)]
let mut conn = self let mut conn = self
.state .state
.conn .conn
.get() .get()
.map_err(|e| format!("Failed to get database connection: {e}"))?; .map_err(|e| format!("Failed to get database connection: {e}"))?;
#[cfg(feature = "tasks")]
if let Err(e) = execute_set_schedule(&mut conn, cron, &script_name, bot_id) { if let Err(e) = execute_set_schedule(&mut conn, cron, &script_name, bot_id) {
log::error!( log::error!(
"Failed to schedule SET SCHEDULE during preprocessing: {}", "Failed to schedule SET SCHEDULE during preprocessing: {}",
e e
); );
} }
}
#[cfg(not(feature = "tasks"))] #[cfg(not(feature = "tasks"))]
log::warn!("SET SCHEDULE requires 'tasks' feature - ignoring"); log::warn!("SET SCHEDULE requires 'tasks' feature - ignoring");
} else { } else {

View file

@ -594,7 +594,7 @@ fn add_bot_to_session(
.map_err(|e| format!("Failed to get bot ID: {e}"))? .map_err(|e| format!("Failed to get bot ID: {e}"))?
} else { } else {
let new_bot_id = Uuid::new_v4(); let new_bot_id = Uuid::new_v4();
let db_name = format!("bot_{}", bot_name.replace('-', "_").replace(' ', "_").to_lowercase()); let db_name = format!("bot_{}", bot_name.replace(['-', ' '], "_").to_lowercase());
diesel::sql_query( diesel::sql_query(
"INSERT INTO bots (id, name, description, is_active, database_name, created_at) "INSERT INTO bots (id, name, description, is_active, database_name, created_at)
VALUES ($1, $2, $3, true, $4, NOW()) VALUES ($1, $2, $3, true, $4, NOW())
@ -608,7 +608,7 @@ fn add_bot_to_session(
.execute(&mut *conn) .execute(&mut *conn)
.map_err(|e| format!("Failed to create bot: {e}"))?; .map_err(|e| format!("Failed to create bot: {e}"))?;
if let Err(e) = create_bot_database(&mut *conn, &db_name) { if let Err(e) = create_bot_database(&mut conn, &db_name) {
log::warn!("Failed to create database for bot {bot_name}: {e}"); log::warn!("Failed to create database for bot {bot_name}: {e}");
} }

View file

@ -9,7 +9,9 @@ use std::sync::Arc;
use uuid::Uuid; use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum ReflectionType { pub enum ReflectionType {
#[default]
ConversationQuality, ConversationQuality,
ResponseAccuracy, ResponseAccuracy,
ToolUsage, ToolUsage,
@ -18,11 +20,6 @@ pub enum ReflectionType {
Custom(String), Custom(String),
} }
impl Default for ReflectionType {
fn default() -> Self {
Self::ConversationQuality
}
}
impl From<&str> for ReflectionType { impl From<&str> for ReflectionType {
fn from(s: &str) -> Self { fn from(s: &str) -> Self {

View file

@ -182,7 +182,7 @@ impl ApiToolGenerator {
let mut generated_count = 0; let mut generated_count = 0;
for endpoint in &endpoints { for endpoint in &endpoints {
let bas_content = Self::generate_bas_file(&api_name, endpoint)?; let bas_content = Self::generate_bas_file(api_name, endpoint)?;
let file_path = format!("{}/{}.bas", api_folder, endpoint.operation_id); let file_path = format!("{}/{}.bas", api_folder, endpoint.operation_id);
std::fs::write(&file_path, &bas_content) std::fs::write(&file_path, &bas_content)

View file

@ -103,7 +103,7 @@ pub async fn serve_vendor_file(
let bot_name = state.bucket_name let bot_name = state.bucket_name
.trim_end_matches(".gbai") .trim_end_matches(".gbai")
.to_string(); .to_string();
let sanitized_bot_name = bot_name.to_lowercase().replace(' ', "-").replace('_', "-"); let sanitized_bot_name = bot_name.to_lowercase().replace([' ', '_'], "-");
let bucket = format!("{}.gbai", sanitized_bot_name); let bucket = format!("{}.gbai", sanitized_bot_name);
let key = format!("{}.gblib/vendor/{}", sanitized_bot_name, file_path); let key = format!("{}.gblib/vendor/{}", sanitized_bot_name, file_path);
@ -243,7 +243,7 @@ async fn serve_app_file_internal(state: &AppState, app_name: &str, file_path: &s
let bot_name = state.bucket_name let bot_name = state.bucket_name
.trim_end_matches(".gbai") .trim_end_matches(".gbai")
.to_string(); .to_string();
let sanitized_bot_name = bot_name.to_lowercase().replace(' ', "-").replace('_', "-"); let sanitized_bot_name = bot_name.to_lowercase().replace([' ', '_'], "-");
// MinIO bucket and path: botname.gbai / botname.gbapp/appname/file // MinIO bucket and path: botname.gbai / botname.gbapp/appname/file
let bucket = format!("{}.gbai", sanitized_bot_name); let bucket = format!("{}.gbai", sanitized_bot_name);

View file

@ -12,6 +12,7 @@ use tokio::time::timeout;
use uuid::Uuid; use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum SandboxRuntime { pub enum SandboxRuntime {
LXC, LXC,
@ -19,14 +20,10 @@ pub enum SandboxRuntime {
Firecracker, Firecracker,
#[default]
Process, Process,
} }
impl Default for SandboxRuntime {
fn default() -> Self {
Self::Process
}
}
impl From<&str> for SandboxRuntime { impl From<&str> for SandboxRuntime {
fn from(s: &str) -> Self { fn from(s: &str) -> Self {
@ -340,8 +337,8 @@ impl CodeSandbox {
.and_then(|c| c.arg(&code_file)); .and_then(|c| c.arg(&code_file));
match cmd_result { match cmd_result {
Ok(cmd) => cmd.execute_async().await.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string())), Ok(cmd) => cmd.execute_async().await.map_err(|e| std::io::Error::other(e.to_string())),
Err(e) => Err(std::io::Error::new(std::io::ErrorKind::Other, e.to_string())), Err(e) => Err(std::io::Error::other(e.to_string())),
} }
}) })
.await; .await;
@ -409,8 +406,8 @@ impl CodeSandbox {
.and_then(|c| c.args(&args.iter().map(|s| s.as_str()).collect::<Vec<_>>())); .and_then(|c| c.args(&args.iter().map(|s| s.as_str()).collect::<Vec<_>>()));
match cmd_result { match cmd_result {
Ok(cmd) => cmd.execute_async().await.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string())), Ok(cmd) => cmd.execute_async().await.map_err(|e| std::io::Error::other(e.to_string())),
Err(e) => Err(std::io::Error::new(std::io::ErrorKind::Other, e.to_string())), Err(e) => Err(std::io::Error::other(e.to_string())),
} }
}) })
.await; .await;
@ -471,8 +468,8 @@ impl CodeSandbox {
.and_then(|c| c.working_dir(std::path::Path::new(&temp_dir))); .and_then(|c| c.working_dir(std::path::Path::new(&temp_dir)));
match cmd_result { match cmd_result {
Ok(cmd) => cmd.execute_async().await.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string())), Ok(cmd) => cmd.execute_async().await.map_err(|e| std::io::Error::other(e.to_string())),
Err(e) => Err(std::io::Error::new(std::io::ErrorKind::Other, e.to_string())), Err(e) => Err(std::io::Error::other(e.to_string())),
} }
}) })
.await; .await;

View file

@ -3,6 +3,8 @@ use crate::llm::LLMProvider;
use crate::shared::models::UserSession; use crate::shared::models::UserSession;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use log::{debug, info}; use log::{debug, info};
#[cfg(feature = "llm")]
use log::warn;
use rhai::Dynamic; use rhai::Dynamic;
use rhai::Engine; use rhai::Engine;
#[cfg(feature = "llm")] #[cfg(feature = "llm")]

View file

@ -368,7 +368,7 @@ fn parse_due_date(due_date: &str) -> Result<Option<DateTime<Utc>>, String> {
return Ok(Some(now + Duration::days(30))); return Ok(Some(now + Duration::days(30)));
} }
if let Ok(date) = NaiveDate::parse_from_str(&due_date, "%Y-%m-%d") { if let Ok(date) = NaiveDate::parse_from_str(due_date, "%Y-%m-%d") {
if let Some(time) = date.and_hms_opt(0, 0, 0) { if let Some(time) = date.and_hms_opt(0, 0, 0) {
return Ok(Some(time.and_utc())); return Ok(Some(time.and_utc()));
} }

View file

@ -10,21 +10,21 @@ use rhai::Engine;
use std::sync::Arc; use std::sync::Arc;
pub fn register_datetime_functions(state: &Arc<AppState>, user: UserSession, engine: &mut Engine) { pub fn register_datetime_functions(state: &Arc<AppState>, user: UserSession, engine: &mut Engine) {
now::now_keyword(&state, user.clone(), engine); now::now_keyword(state, user.clone(), engine);
now::today_keyword(&state, user.clone(), engine); now::today_keyword(state, user.clone(), engine);
now::time_keyword(&state, user.clone(), engine); now::time_keyword(state, user.clone(), engine);
now::timestamp_keyword(&state, user.clone(), engine); now::timestamp_keyword(state, user.clone(), engine);
extract::year_keyword(&state, user.clone(), engine); extract::year_keyword(state, user.clone(), engine);
extract::month_keyword(&state, user.clone(), engine); extract::month_keyword(state, user.clone(), engine);
extract::day_keyword(&state, user.clone(), engine); extract::day_keyword(state, user.clone(), engine);
extract::hour_keyword(&state, user.clone(), engine); extract::hour_keyword(state, user.clone(), engine);
extract::minute_keyword(&state, user.clone(), engine); extract::minute_keyword(state, user.clone(), engine);
extract::second_keyword(&state, user.clone(), engine); extract::second_keyword(state, user.clone(), engine);
extract::weekday_keyword(&state, user.clone(), engine); extract::weekday_keyword(state, user.clone(), engine);
dateadd::dateadd_keyword(&state, user.clone(), engine); dateadd::dateadd_keyword(state, user.clone(), engine);
datediff::datediff_keyword(&state, user.clone(), engine); datediff::datediff_keyword(state, user.clone(), engine);
extract::format_date_keyword(&state, user.clone(), engine); extract::format_date_keyword(state, user.clone(), engine);
extract::isdate_keyword(&state, user, engine); extract::isdate_keyword(state, user, engine);
debug!("Registered all datetime functions"); debug!("Registered all datetime functions");
} }

View file

@ -1312,7 +1312,7 @@ async fn execute_compress(
.and_then(|n| n.to_str()) .and_then(|n| n.to_str())
.unwrap_or(file_path); .unwrap_or(file_path);
zip.start_file(file_name, options.clone())?; zip.start_file(file_name, options)?;
zip.write_all(content.as_bytes())?; zip.write_all(content.as_bytes())?;
} }

View file

@ -52,7 +52,9 @@ pub struct ApprovalRequest {
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum ApprovalStatus { pub enum ApprovalStatus {
#[default]
Pending, Pending,
Approved, Approved,
@ -68,11 +70,6 @@ pub enum ApprovalStatus {
Error, Error,
} }
impl Default for ApprovalStatus {
fn default() -> Self {
Self::Pending
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
@ -86,7 +83,9 @@ pub enum ApprovalDecision {
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum ApprovalChannel { pub enum ApprovalChannel {
#[default]
Email, Email,
Sms, Sms,
Mobile, Mobile,
@ -96,11 +95,6 @@ pub enum ApprovalChannel {
InApp, InApp,
} }
impl Default for ApprovalChannel {
fn default() -> Self {
Self::Email
}
}
impl std::fmt::Display for ApprovalChannel { impl std::fmt::Display for ApprovalChannel {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@ -205,6 +199,19 @@ pub struct ApprovalConfig {
pub approval_base_url: Option<String>, pub approval_base_url: Option<String>,
} }
pub struct CreateApprovalRequestParams<'a> {
pub bot_id: Uuid,
pub session_id: Uuid,
pub initiated_by: Uuid,
pub approval_type: &'a str,
pub channel: ApprovalChannel,
pub recipient: &'a str,
pub context: serde_json::Value,
pub message: &'a str,
pub timeout_seconds: Option<u64>,
pub default_action: Option<ApprovalDecision>,
}
impl Default for ApprovalConfig { impl Default for ApprovalConfig {
fn default() -> Self { fn default() -> Self {
Self { Self {
@ -261,33 +268,24 @@ impl ApprovalManager {
pub fn create_request( pub fn create_request(
&self, &self,
bot_id: Uuid, params: CreateApprovalRequestParams<'_>,
session_id: Uuid,
initiated_by: Uuid,
approval_type: &str,
channel: ApprovalChannel,
recipient: &str,
context: serde_json::Value,
message: &str,
timeout_seconds: Option<u64>,
default_action: Option<ApprovalDecision>,
) -> ApprovalRequest { ) -> ApprovalRequest {
let timeout = timeout_seconds.unwrap_or(self.config.default_timeout); let timeout = params.timeout_seconds.unwrap_or(self.config.default_timeout);
let now = Utc::now(); let now = Utc::now();
ApprovalRequest { ApprovalRequest {
id: Uuid::new_v4(), id: Uuid::new_v4(),
bot_id, bot_id: params.bot_id,
session_id, session_id: params.session_id,
initiated_by, initiated_by: params.initiated_by,
approval_type: approval_type.to_string(), approval_type: params.approval_type.to_string(),
status: ApprovalStatus::Pending, status: ApprovalStatus::Pending,
channel, channel: params.channel,
recipient: recipient.to_string(), recipient: params.recipient.to_string(),
context, context: params.context,
message: message.to_string(), message: params.message.to_string(),
timeout_seconds: timeout, timeout_seconds: timeout,
default_action, default_action: params.default_action,
current_level: 1, current_level: 1,
total_levels: 1, total_levels: 1,
created_at: now, created_at: now,

View file

@ -523,7 +523,7 @@ fn parse_csv_line(line: &str) -> Vec<String> {
fn escape_csv_value(value: &str) -> String { fn escape_csv_value(value: &str) -> String {
if value.contains(',') || value.contains('"') || value.contains('\n') { if value.contains(',') || value.contains('"') || value.contains('\n') {
format!("{}", value.replace('"', "")) value.replace('"', "").to_string()
} else { } else {
value.to_string() value.to_string()
} }

View file

@ -12,26 +12,26 @@ use rhai::Engine;
use std::sync::Arc; use std::sync::Arc;
pub fn register_math_functions(state: &Arc<AppState>, user: UserSession, engine: &mut Engine) { pub fn register_math_functions(state: &Arc<AppState>, user: UserSession, engine: &mut Engine) {
abs::abs_keyword(&state, user.clone(), engine); abs::abs_keyword(state, user.clone(), engine);
round::round_keyword(&state, user.clone(), engine); round::round_keyword(state, user.clone(), engine);
basic_math::int_keyword(&state, user.clone(), engine); basic_math::int_keyword(state, user.clone(), engine);
basic_math::floor_keyword(&state, user.clone(), engine); basic_math::floor_keyword(state, user.clone(), engine);
basic_math::ceil_keyword(&state, user.clone(), engine); basic_math::ceil_keyword(state, user.clone(), engine);
basic_math::max_keyword(&state, user.clone(), engine); basic_math::max_keyword(state, user.clone(), engine);
basic_math::min_keyword(&state, user.clone(), engine); basic_math::min_keyword(state, user.clone(), engine);
basic_math::mod_keyword(&state, user.clone(), engine); basic_math::mod_keyword(state, user.clone(), engine);
basic_math::sgn_keyword(&state, user.clone(), engine); basic_math::sgn_keyword(state, user.clone(), engine);
basic_math::sqrt_keyword(&state, user.clone(), engine); basic_math::sqrt_keyword(state, user.clone(), engine);
basic_math::pow_keyword(&state, user.clone(), engine); basic_math::pow_keyword(state, user.clone(), engine);
random::random_keyword(&state, user.clone(), engine); random::random_keyword(state, user.clone(), engine);
trig::sin_keyword(&state, user.clone(), engine); trig::sin_keyword(state, user.clone(), engine);
trig::cos_keyword(&state, user.clone(), engine); trig::cos_keyword(state, user.clone(), engine);
trig::tan_keyword(&state, user.clone(), engine); trig::tan_keyword(state, user.clone(), engine);
trig::log_keyword(&state, user.clone(), engine); trig::log_keyword(state, user.clone(), engine);
trig::exp_keyword(&state, user.clone(), engine); trig::exp_keyword(state, user.clone(), engine);
trig::pi_keyword(&state, user.clone(), engine); trig::pi_keyword(state, user.clone(), engine);
aggregate::sum_keyword(&state, user.clone(), engine); aggregate::sum_keyword(state, user.clone(), engine);
aggregate::avg_keyword(&state, user, engine); aggregate::avg_keyword(state, user, engine);
debug!("Registered all math functions"); debug!("Registered all math functions");
} }

View file

@ -132,7 +132,9 @@ impl Default for McpConnection {
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum ConnectionType { pub enum ConnectionType {
#[default]
Http, Http,
WebSocket, WebSocket,
@ -146,11 +148,6 @@ pub enum ConnectionType {
Tcp, Tcp,
} }
impl Default for ConnectionType {
fn default() -> Self {
Self::Http
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TlsConfig { pub struct TlsConfig {
@ -178,7 +175,9 @@ impl Default for McpAuth {
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum McpAuthType { pub enum McpAuthType {
#[default]
None, None,
ApiKey, ApiKey,
Bearer, Bearer,
@ -188,14 +187,11 @@ pub enum McpAuthType {
Custom(String), Custom(String),
} }
impl Default for McpAuthType {
fn default() -> Self {
Self::None
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Default)]
pub enum McpCredentials { pub enum McpCredentials {
#[default]
None, None,
ApiKey { ApiKey {
header_name: String, header_name: String,
@ -221,11 +217,6 @@ pub enum McpCredentials {
Custom(HashMap<String, String>), Custom(HashMap<String, String>),
} }
impl Default for McpCredentials {
fn default() -> Self {
Self::None
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct McpTool { pub struct McpTool {
@ -251,19 +242,16 @@ pub struct McpTool {
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum ToolRiskLevel { pub enum ToolRiskLevel {
Safe, Safe,
#[default]
Low, Low,
Medium, Medium,
High, High,
Critical, Critical,
} }
impl Default for ToolRiskLevel {
fn default() -> Self {
Self::Low
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)] #[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct McpCapabilities { pub struct McpCapabilities {
@ -283,8 +271,10 @@ pub struct McpCapabilities {
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum McpServerStatus { pub enum McpServerStatus {
Active, Active,
#[default]
Inactive, Inactive,
Connecting, Connecting,
Error(String), Error(String),
@ -292,13 +282,9 @@ pub enum McpServerStatus {
Unknown, Unknown,
} }
impl Default for McpServerStatus {
fn default() -> Self {
Self::Inactive
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Default)]
pub struct HealthStatus { pub struct HealthStatus {
pub healthy: bool, pub healthy: bool,
pub last_check: Option<DateTime<Utc>>, pub last_check: Option<DateTime<Utc>>,
@ -307,17 +293,6 @@ pub struct HealthStatus {
pub consecutive_failures: i32, pub consecutive_failures: i32,
} }
impl Default for HealthStatus {
fn default() -> Self {
Self {
healthy: false,
last_check: None,
response_time_ms: None,
error_message: None,
consecutive_failures: 0,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct McpRequest { pub struct McpRequest {

View file

@ -1,16 +1,23 @@
// ===== CORE KEYWORDS (always available) ===== // ===== CORE KEYWORDS (always available) =====
#[cfg(feature = "chat")]
pub mod add_bot; pub mod add_bot;
#[cfg(feature = "chat")]
pub mod add_member; pub mod add_member;
#[cfg(feature = "chat")]
pub mod add_suggestion; pub mod add_suggestion;
pub mod agent_reflection; pub mod agent_reflection;
#[cfg(feature = "llm")]
pub mod ai_tools; pub mod ai_tools;
#[cfg(feature = "automation")]
pub mod api_tool_generator; pub mod api_tool_generator;
pub mod app_server; pub mod app_server;
pub mod arrays; pub mod arrays;
pub mod bot_memory; pub mod bot_memory;
pub mod clear_tools; pub mod clear_tools;
#[cfg(feature = "automation")]
pub mod code_sandbox; pub mod code_sandbox;
pub mod core_functions; pub mod core_functions;
#[cfg(feature = "people")]
pub mod crm; pub mod crm;
pub mod data_operations; pub mod data_operations;
pub mod datetime; pub mod datetime;
@ -18,6 +25,7 @@ pub mod db_api;
pub mod errors; pub mod errors;
pub mod find; pub mod find;
pub mod first; pub mod first;
#[cfg(feature = "billing")]
pub mod products; pub mod products;
pub mod search; pub mod search;
pub mod for_next; pub mod for_next;
@ -32,14 +40,18 @@ pub mod llm_keyword;
#[cfg(feature = "llm")] #[cfg(feature = "llm")]
pub mod llm_macros; pub mod llm_macros;
pub mod math; pub mod math;
#[cfg(feature = "automation")]
pub mod mcp_client; pub mod mcp_client;
#[cfg(feature = "automation")]
pub mod mcp_directory; pub mod mcp_directory;
pub mod messaging; pub mod messaging;
pub mod on; pub mod on;
#[cfg(feature = "automation")]
pub mod on_form_submit; pub mod on_form_submit;
pub mod print; pub mod print;
pub mod procedures; pub mod procedures;
pub mod qrcode; pub mod qrcode;
#[cfg(feature = "security")]
pub mod security_protection; pub mod security_protection;
pub mod set; pub mod set;
pub mod set_context; pub mod set_context;
@ -55,6 +67,7 @@ pub mod user_memory;
pub mod validation; pub mod validation;
pub mod wait; pub mod wait;
pub mod web_data; pub mod web_data;
#[cfg(feature = "automation")]
pub mod webhook; pub mod webhook;
// ===== CALENDAR FEATURE KEYWORDS ===== // ===== CALENDAR FEATURE KEYWORDS =====
@ -79,7 +92,7 @@ pub mod set_schedule;
// ===== SOCIAL FEATURE KEYWORDS ===== // ===== SOCIAL FEATURE KEYWORDS =====
#[cfg(feature = "social")] #[cfg(feature = "social")]
pub mod post_to;
#[cfg(feature = "social")] #[cfg(feature = "social")]
pub mod social; pub mod social;
#[cfg(feature = "social")] #[cfg(feature = "social")]
@ -149,13 +162,16 @@ pub mod create_site;
pub use app_server::configure_app_server_routes; pub use app_server::configure_app_server_routes;
pub use db_api::configure_db_routes; pub use db_api::configure_db_routes;
#[cfg(feature = "automation")]
pub use mcp_client::{McpClient, McpRequest, McpResponse, McpServer, McpTool}; pub use mcp_client::{McpClient, McpRequest, McpResponse, McpServer, McpTool};
#[cfg(feature = "security")]
pub use security_protection::{ pub use security_protection::{
security_get_report, security_hardening_score, security_install_tool, security_run_scan, security_get_report, security_hardening_score, security_install_tool, security_run_scan,
security_service_is_running, security_start_service, security_stop_service, security_service_is_running, security_start_service, security_stop_service,
security_tool_is_installed, security_tool_status, security_update_definitions, security_tool_is_installed, security_tool_status, security_update_definitions,
SecurityScanResult, SecurityToolResult, SecurityScanResult, SecurityToolResult,
}; };
#[cfg(feature = "automation")]
pub use mcp_directory::{McpDirectoryScanResult, McpDirectoryScanner, McpServerConfig}; pub use mcp_directory::{McpDirectoryScanResult, McpDirectoryScanner, McpServerConfig};
pub use table_access::{ pub use table_access::{
check_field_write_access, check_table_access, filter_fields_by_role, load_table_access_info, check_field_write_access, check_table_access, filter_fields_by_role, load_table_access_info,

View file

@ -431,7 +431,7 @@ fn get_primary_text_column(conn: &mut PgConnection, table_name: &str) -> Result<
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
#[test] #[test]
fn test_sanitize_search_query() { fn test_sanitize_search_query() {

View file

@ -443,7 +443,7 @@ impl SynchronizeService {
} }
} }
if body.is_object() && !body.as_object().map_or(true, |o| o.is_empty()) { if body.is_object() && !body.as_object().is_none_or(|o| o.is_empty()) {
return Ok(vec![body.clone()]); return Ok(vec![body.clone()]);
} }

View file

@ -64,18 +64,15 @@ pub struct Attendant {
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[derive(Default)]
pub enum AttendantStatus { pub enum AttendantStatus {
Online, Online,
Busy, Busy,
Away, Away,
#[default]
Offline, Offline,
} }
impl Default for AttendantStatus {
fn default() -> Self {
Self::Offline
}
}
pub fn is_crm_enabled(bot_id: Uuid, work_path: &str) -> bool { pub fn is_crm_enabled(bot_id: Uuid, work_path: &str) -> bool {
let config_path = PathBuf::from(work_path) let config_path = PathBuf::from(work_path)

View file

@ -14,19 +14,19 @@ pub fn register_validation_functions(
user: UserSession, user: UserSession,
engine: &mut Engine, engine: &mut Engine,
) { ) {
str_val::val_keyword(&state, user.clone(), engine); str_val::val_keyword(state, user.clone(), engine);
str_val::str_keyword(&state, user.clone(), engine); str_val::str_keyword(state, user.clone(), engine);
str_val::cint_keyword(&state, user.clone(), engine); str_val::cint_keyword(state, user.clone(), engine);
str_val::cdbl_keyword(&state, user.clone(), engine); str_val::cdbl_keyword(state, user.clone(), engine);
isnull::isnull_keyword(&state, user.clone(), engine); isnull::isnull_keyword(state, user.clone(), engine);
isempty::isempty_keyword(&state, user.clone(), engine); isempty::isempty_keyword(state, user.clone(), engine);
typeof_check::typeof_keyword(&state, user.clone(), engine); typeof_check::typeof_keyword(state, user.clone(), engine);
typeof_check::isarray_keyword(&state, user.clone(), engine); typeof_check::isarray_keyword(state, user.clone(), engine);
typeof_check::isnumber_keyword(&state, user.clone(), engine); typeof_check::isnumber_keyword(state, user.clone(), engine);
typeof_check::isstring_keyword(&state, user.clone(), engine); typeof_check::isstring_keyword(state, user.clone(), engine);
typeof_check::isbool_keyword(&state, user.clone(), engine); typeof_check::isbool_keyword(state, user.clone(), engine);
nvl_iif::nvl_keyword(&state, user.clone(), engine); nvl_iif::nvl_keyword(state, user.clone(), engine);
nvl_iif::iif_keyword(&state, user, engine); nvl_iif::iif_keyword(state, user, engine);
debug!("Registered all validation functions"); debug!("Registered all validation functions");
} }

View file

@ -9,8 +9,8 @@ pub fn wait_keyword(_state: &AppState, _user: UserSession, engine: &mut Engine)
.register_custom_syntax(["WAIT", "$expr$"], false, move |context, inputs| { .register_custom_syntax(["WAIT", "$expr$"], false, move |context, inputs| {
let seconds = context.eval_expression_tree(&inputs[0])?; let seconds = context.eval_expression_tree(&inputs[0])?;
let duration_secs = if seconds.is::<i64>() { let duration_secs = if seconds.is::<i64>() {
let val = seconds.cast::<i64>() as f64;
val seconds.cast::<i64>() as f64
} else if seconds.is::<f64>() { } else if seconds.is::<f64>() {
seconds.cast::<f64>() seconds.cast::<f64>()
} else { } else {

View file

@ -1,3 +1,4 @@
#[cfg(feature = "chat")]
use crate::basic::keywords::add_suggestion::clear_suggestions_keyword; use crate::basic::keywords::add_suggestion::clear_suggestions_keyword;
use crate::basic::keywords::set_user::set_user_keyword; use crate::basic::keywords::set_user::set_user_keyword;
use crate::basic::keywords::string_functions::register_string_functions; use crate::basic::keywords::string_functions::register_string_functions;
@ -21,9 +22,13 @@ struct ParamConfigRow {
} }
// ===== CORE KEYWORD IMPORTS (always available) ===== // ===== CORE KEYWORD IMPORTS (always available) =====
#[cfg(feature = "chat")]
use self::keywords::add_bot::register_bot_keywords; use self::keywords::add_bot::register_bot_keywords;
#[cfg(feature = "chat")]
use self::keywords::add_member::add_member_keyword; use self::keywords::add_member::add_member_keyword;
#[cfg(feature = "chat")]
use self::keywords::add_suggestion::add_suggestion_keyword; use self::keywords::add_suggestion::add_suggestion_keyword;
#[cfg(feature = "llm")]
use self::keywords::ai_tools::register_ai_tools_keywords; use self::keywords::ai_tools::register_ai_tools_keywords;
use self::keywords::bot_memory::{get_bot_memory_keyword, set_bot_memory_keyword}; use self::keywords::bot_memory::{get_bot_memory_keyword, set_bot_memory_keyword};
use self::keywords::clear_tools::clear_tools_keyword; use self::keywords::clear_tools::clear_tools_keyword;
@ -31,6 +36,7 @@ use self::keywords::core_functions::register_core_functions;
use self::keywords::data_operations::register_data_operations; use self::keywords::data_operations::register_data_operations;
use self::keywords::find::find_keyword; use self::keywords::find::find_keyword;
use self::keywords::search::search_keyword; use self::keywords::search::search_keyword;
#[cfg(feature = "billing")]
use self::keywords::products::products_keyword; use self::keywords::products::products_keyword;
use self::keywords::first::first_keyword; use self::keywords::first::first_keyword;
use self::keywords::for_next::for_keyword; use self::keywords::for_next::for_keyword;
@ -39,11 +45,13 @@ use self::keywords::get::get_keyword;
use self::keywords::hear_talk::{hear_keyword, talk_keyword}; use self::keywords::hear_talk::{hear_keyword, talk_keyword};
use self::keywords::http_operations::register_http_operations; use self::keywords::http_operations::register_http_operations;
use self::keywords::last::last_keyword; use self::keywords::last::last_keyword;
#[cfg(feature = "automation")]
use self::keywords::on_form_submit::on_form_submit_keyword; use self::keywords::on_form_submit::on_form_submit_keyword;
use self::keywords::switch_case::preprocess_switch; use self::keywords::switch_case::preprocess_switch;
use self::keywords::use_tool::use_tool_keyword; use self::keywords::use_tool::use_tool_keyword;
use self::keywords::use_website::{clear_websites_keyword, use_website_keyword}; use self::keywords::use_website::{clear_websites_keyword, use_website_keyword};
use self::keywords::web_data::register_web_data_keywords; use self::keywords::web_data::register_web_data_keywords;
#[cfg(feature = "automation")]
use self::keywords::webhook::webhook_keyword; use self::keywords::webhook::webhook_keyword;
#[cfg(feature = "llm")] #[cfg(feature = "llm")]
use self::keywords::llm_keyword::llm_keyword; use self::keywords::llm_keyword::llm_keyword;
@ -128,6 +136,7 @@ impl ScriptService {
get_bot_memory_keyword(state.clone(), user.clone(), &mut engine); get_bot_memory_keyword(state.clone(), user.clone(), &mut engine);
find_keyword(&state, user.clone(), &mut engine); find_keyword(&state, user.clone(), &mut engine);
search_keyword(&state, user.clone(), &mut engine); search_keyword(&state, user.clone(), &mut engine);
#[cfg(feature = "billing")]
products_keyword(&state, user.clone(), &mut engine); products_keyword(&state, user.clone(), &mut engine);
for_keyword(&state, user.clone(), &mut engine); for_keyword(&state, user.clone(), &mut engine);
first_keyword(&mut engine); first_keyword(&mut engine);
@ -144,13 +153,17 @@ impl ScriptService {
talk_keyword(state.clone(), user.clone(), &mut engine); talk_keyword(state.clone(), user.clone(), &mut engine);
set_context_keyword(state.clone(), user.clone(), &mut engine); set_context_keyword(state.clone(), user.clone(), &mut engine);
set_user_keyword(state.clone(), user.clone(), &mut engine); set_user_keyword(state.clone(), user.clone(), &mut engine);
#[cfg(feature = "chat")]
clear_suggestions_keyword(state.clone(), user.clone(), &mut engine); clear_suggestions_keyword(state.clone(), user.clone(), &mut engine);
use_tool_keyword(state.clone(), user.clone(), &mut engine); use_tool_keyword(state.clone(), user.clone(), &mut engine);
clear_tools_keyword(state.clone(), user.clone(), &mut engine); clear_tools_keyword(state.clone(), user.clone(), &mut engine);
use_website_keyword(state.clone(), user.clone(), &mut engine); use_website_keyword(state.clone(), user.clone(), &mut engine);
clear_websites_keyword(state.clone(), user.clone(), &mut engine); clear_websites_keyword(state.clone(), user.clone(), &mut engine);
#[cfg(feature = "chat")]
add_suggestion_keyword(state.clone(), user.clone(), &mut engine); add_suggestion_keyword(state.clone(), user.clone(), &mut engine);
#[cfg(feature = "chat")]
add_member_keyword(state.clone(), user.clone(), &mut engine); add_member_keyword(state.clone(), user.clone(), &mut engine);
#[cfg(feature = "chat")]
register_bot_keywords(&state, &user, &mut engine); register_bot_keywords(&state, &user, &mut engine);
keywords::universal_messaging::register_universal_messaging( keywords::universal_messaging::register_universal_messaging(
state.clone(), state.clone(),
@ -161,8 +174,11 @@ impl ScriptService {
switch_keyword(&state, user.clone(), &mut engine); switch_keyword(&state, user.clone(), &mut engine);
register_http_operations(state.clone(), user.clone(), &mut engine); register_http_operations(state.clone(), user.clone(), &mut engine);
register_data_operations(state.clone(), user.clone(), &mut engine); register_data_operations(state.clone(), user.clone(), &mut engine);
#[cfg(feature = "automation")]
webhook_keyword(&state, user.clone(), &mut engine); webhook_keyword(&state, user.clone(), &mut engine);
#[cfg(feature = "automation")]
on_form_submit_keyword(state.clone(), user.clone(), &mut engine); on_form_submit_keyword(state.clone(), user.clone(), &mut engine);
#[cfg(feature = "llm")]
register_ai_tools_keywords(state.clone(), user.clone(), &mut engine); register_ai_tools_keywords(state.clone(), user.clone(), &mut engine);
register_web_data_keywords(state.clone(), user.clone(), &mut engine); register_web_data_keywords(state.clone(), user.clone(), &mut engine);
register_core_functions(state.clone(), user.clone(), &mut engine); register_core_functions(state.clone(), user.clone(), &mut engine);

View file

@ -452,23 +452,7 @@ pub async fn daily_snapshot_job(
mod tests { mod tests {
use super::*; use super::*;
#[test]
fn test_usage_metering_service_new() {
let service = UsageMeteringService::new();
assert_eq!(service.aggregation_interval(), 3600);
}
#[test]
fn test_usage_metering_service_with_interval() {
let service = UsageMeteringService::with_aggregation_interval(1800);
assert_eq!(service.aggregation_interval(), 1800);
}
#[test]
fn test_usage_metering_service_default() {
let service = UsageMeteringService::default();
assert_eq!(service.aggregation_interval(), 3600);
}
#[tokio::test] #[tokio::test]
async fn test_record_event() { async fn test_record_event() {

View file

@ -494,24 +494,7 @@ mod tests {
} }
} }
#[test]
fn test_quota_manager_new() {
let manager = QuotaManager::new();
assert_eq!(manager.alert_thresholds, vec![80.0, 90.0, 100.0]);
}
#[test]
fn test_quota_manager_with_thresholds() {
let thresholds = vec![50.0, 75.0, 90.0];
let manager = QuotaManager::with_thresholds(thresholds.clone());
assert_eq!(manager.alert_thresholds, thresholds);
}
#[test]
fn test_quota_manager_default() {
let manager = QuotaManager::default();
assert_eq!(manager.alert_thresholds, vec![80.0, 90.0, 100.0]);
}
#[tokio::test] #[tokio::test]
async fn test_set_and_get_quotas() { async fn test_set_and_get_quotas() {

View file

@ -7,8 +7,10 @@ use tokio::sync::RwLock;
use uuid::Uuid; use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum InsightFaceModel { pub enum InsightFaceModel {
#[serde(rename = "buffalo_l")] #[serde(rename = "buffalo_l")]
#[default]
BuffaloL, BuffaloL,
#[serde(rename = "buffalo_m")] #[serde(rename = "buffalo_m")]
BuffaloM, BuffaloM,
@ -26,11 +28,6 @@ pub enum InsightFaceModel {
W600kMbf, W600kMbf,
} }
impl Default for InsightFaceModel {
fn default() -> Self {
Self::BuffaloL
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct InsightFaceConfig { pub struct InsightFaceConfig {
@ -235,17 +232,14 @@ pub struct FaceIndex {
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[derive(Default)]
pub enum DistanceMetric { pub enum DistanceMetric {
#[default]
Cosine, Cosine,
Euclidean, Euclidean,
DotProduct, DotProduct,
} }
impl Default for DistanceMetric {
fn default() -> Self {
Self::Cosine
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct IndexedFace { pub struct IndexedFace {

View file

@ -469,8 +469,8 @@ impl OpenCvFaceDetector {
return Err(OpenCvError::InvalidImage("Image data too small".to_string())); return Err(OpenCvError::InvalidImage("Image data too small".to_string()));
} }
if image_data.starts_with(&[0x89, 0x50, 0x4E, 0x47]) { if image_data.starts_with(&[0x89, 0x50, 0x4E, 0x47])
if image_data.len() >= 24 { && image_data.len() >= 24 {
let width = u32::from_be_bytes([ let width = u32::from_be_bytes([
image_data[16], image_data[16],
image_data[17], image_data[17],
@ -488,7 +488,6 @@ impl OpenCvFaceDetector {
height, height,
}); });
} }
}
if image_data.starts_with(&[0xFF, 0xD8, 0xFF]) { if image_data.starts_with(&[0xFF, 0xD8, 0xFF]) {
return Ok(ImageInfo { return Ok(ImageInfo {
@ -497,8 +496,8 @@ impl OpenCvFaceDetector {
}); });
} }
if image_data.starts_with(b"BM") { if image_data.starts_with(b"BM")
if image_data.len() >= 26 { && image_data.len() >= 26 {
let width = i32::from_le_bytes([ let width = i32::from_le_bytes([
image_data[18], image_data[18],
image_data[19], image_data[19],
@ -517,7 +516,6 @@ impl OpenCvFaceDetector {
height, height,
}); });
} }
}
Ok(ImageInfo { Ok(ImageInfo {
width: 640, width: 640,

View file

@ -98,7 +98,9 @@ pub enum PythonResponse {
} }
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Default)]
pub enum PythonModel { pub enum PythonModel {
#[default]
MediaPipe, MediaPipe,
DeepFace, DeepFace,
FaceRecognition, FaceRecognition,
@ -118,26 +120,28 @@ impl PythonModel {
Self::OpenCV => "opencv", Self::OpenCV => "opencv",
} }
} }
}
pub fn from_str(s: &str) -> Option<Self> { impl std::str::FromStr for PythonModel {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() { match s.to_lowercase().as_str() {
"mediapipe" => Some(Self::MediaPipe), "mediapipe" => Ok(Self::MediaPipe),
"deepface" => Some(Self::DeepFace), "deepface" => Ok(Self::DeepFace),
"face_recognition" => Some(Self::FaceRecognition), "face_recognition" => Ok(Self::FaceRecognition),
"insightface" => Some(Self::InsightFace), "insightface" => Ok(Self::InsightFace),
"dlib" => Some(Self::Dlib), "dlib" => Ok(Self::Dlib),
"opencv" => Some(Self::OpenCV), "opencv" => Ok(Self::OpenCV),
_ => None, _ => Err(()),
} }
} }
} }
impl Default for PythonModel { impl PythonModel {
fn default() -> Self {
Self::MediaPipe
}
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct PythonBridgeConfig { pub struct PythonBridgeConfig {
pub python_path: String, pub python_path: String,
@ -573,9 +577,9 @@ mod tests {
#[test] #[test]
fn test_python_model_from_str() { fn test_python_model_from_str() {
assert_eq!(PythonModel::from_str("mediapipe"), Some(PythonModel::MediaPipe)); assert_eq!("mediapipe".parse::<PythonModel>(), Ok(PythonModel::MediaPipe));
assert_eq!(PythonModel::from_str("deepface"), Some(PythonModel::DeepFace)); assert_eq!("deepface".parse::<PythonModel>(), Ok(PythonModel::DeepFace));
assert_eq!(PythonModel::from_str("unknown"), None); assert!("unknown".parse::<PythonModel>().is_err());
} }
#[test] #[test]
@ -607,9 +611,10 @@ mod tests {
} }
#[test] #[test]
fn test_command_serialization() { fn test_command_serialization() -> Result<(), Box<dyn std::error::Error>> {
let cmd = PythonCommand::Health; let cmd = PythonCommand::Health;
let json = serde_json::to_string(&cmd).unwrap(); let json = serde_json::to_string(&cmd)?;
assert!(json.contains("health")); assert!(json.contains("health"));
Ok(())
} }
} }

View file

@ -553,6 +553,12 @@ pub struct RekognitionService {
face_details: Arc<RwLock<HashMap<String, RekognitionFace>>>, face_details: Arc<RwLock<HashMap<String, RekognitionFace>>>,
} }
impl Default for RekognitionService {
fn default() -> Self {
Self::new()
}
}
impl RekognitionService { impl RekognitionService {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {

View file

@ -249,7 +249,7 @@ impl ChannelProvider for BlueskyProvider {
let rkey = response let rkey = response
.uri .uri
.split('/') .split('/')
.last() .next_back()
.unwrap_or("") .unwrap_or("")
.to_string(); .to_string();

View file

@ -143,7 +143,7 @@ impl RedditChannel {
pub async fn exchange_code(&self, code: &str) -> Result<RedditTokens, RedditError> { pub async fn exchange_code(&self, code: &str) -> Result<RedditTokens, RedditError> {
let response = self let response = self
.http_client .http_client
.post(&format!("{}/access_token", self.oauth_url)) .post(format!("{}/access_token", self.oauth_url))
.basic_auth(&self.config.client_id, Some(&self.config.client_secret)) .basic_auth(&self.config.client_id, Some(&self.config.client_secret))
.form(&[ .form(&[
("grant_type", "authorization_code"), ("grant_type", "authorization_code"),
@ -187,7 +187,7 @@ impl RedditChannel {
let response = self let response = self
.http_client .http_client
.post(&format!("{}/access_token", self.oauth_url)) .post(format!("{}/access_token", self.oauth_url))
.basic_auth(&self.config.client_id, Some(&self.config.client_secret)) .basic_auth(&self.config.client_id, Some(&self.config.client_secret))
.form(&[ .form(&[
("grant_type", "refresh_token"), ("grant_type", "refresh_token"),
@ -234,7 +234,7 @@ impl RedditChannel {
let response = self let response = self
.http_client .http_client
.post(&format!("{}/access_token", self.oauth_url)) .post(format!("{}/access_token", self.oauth_url))
.basic_auth(&self.config.client_id, Some(&self.config.client_secret)) .basic_auth(&self.config.client_id, Some(&self.config.client_secret))
.form(&[ .form(&[
("grant_type", "password"), ("grant_type", "password"),
@ -286,7 +286,7 @@ impl RedditChannel {
let response = self let response = self
.http_client .http_client
.get(&format!("{}/api/v1/me", self.base_url)) .get(format!("{}/api/v1/me", self.base_url))
.bearer_auth(&token) .bearer_auth(&token)
.send() .send()
.await .await
@ -346,7 +346,7 @@ impl RedditChannel {
let response = self let response = self
.http_client .http_client
.post(&format!("{}/api/submit", self.base_url)) .post(format!("{}/api/submit", self.base_url))
.bearer_auth(&token) .bearer_auth(&token)
.form(&params) .form(&params)
.send() .send()
@ -388,7 +388,7 @@ impl RedditChannel {
let response = self let response = self
.http_client .http_client
.post(&format!("{}/api/comment", self.base_url)) .post(format!("{}/api/comment", self.base_url))
.bearer_auth(&token) .bearer_auth(&token)
.form(&[ .form(&[
("api_type", "json"), ("api_type", "json"),
@ -449,7 +449,7 @@ impl RedditChannel {
let response = self let response = self
.http_client .http_client
.get(&format!("{}/api/info?id={}", self.base_url, id)) .get(format!("{}/api/info?id={}", self.base_url, id))
.bearer_auth(&token) .bearer_auth(&token)
.send() .send()
.await .await
@ -471,7 +471,7 @@ impl RedditChannel {
.into_iter() .into_iter()
.next() .next()
.and_then(|c| c.data) .and_then(|c| c.data)
.ok_or_else(|| RedditError::PostNotFound)?; .ok_or(RedditError::PostNotFound)?;
Ok(RedditPost { Ok(RedditPost {
id: post_data.id.unwrap_or_default(), id: post_data.id.unwrap_or_default(),
@ -494,7 +494,7 @@ impl RedditChannel {
let response = self let response = self
.http_client .http_client
.get(&format!("{}/r/{}/about", self.base_url, name)) .get(format!("{}/r/{}/about", self.base_url, name))
.bearer_auth(&token) .bearer_auth(&token)
.send() .send()
.await .await
@ -541,7 +541,7 @@ impl RedditChannel {
let response = self let response = self
.http_client .http_client
.get(&format!( .get(format!(
"{}/r/{}/{}?limit={}", "{}/r/{}/{}?limit={}",
self.base_url, subreddit, sort_str, limit self.base_url, subreddit, sort_str, limit
)) ))
@ -595,7 +595,7 @@ impl RedditChannel {
let response = self let response = self
.http_client .http_client
.post(&format!("{}/api/vote", self.base_url)) .post(format!("{}/api/vote", self.base_url))
.bearer_auth(&token) .bearer_auth(&token)
.form(&[("id", thing_id), ("dir", dir)]) .form(&[("id", thing_id), ("dir", dir)])
.send() .send()
@ -615,7 +615,7 @@ impl RedditChannel {
let response = self let response = self
.http_client .http_client
.post(&format!("{}/api/del", self.base_url)) .post(format!("{}/api/del", self.base_url))
.bearer_auth(&token) .bearer_auth(&token)
.form(&[("id", thing_id)]) .form(&[("id", thing_id)])
.send() .send()
@ -635,7 +635,7 @@ impl RedditChannel {
let response = self let response = self
.http_client .http_client
.post(&format!("{}/api/editusertext", self.base_url)) .post(format!("{}/api/editusertext", self.base_url))
.bearer_auth(&token) .bearer_auth(&token)
.form(&[ .form(&[
("api_type", "json"), ("api_type", "json"),
@ -661,7 +661,7 @@ impl RedditChannel {
let response = self let response = self
.http_client .http_client
.post(&format!("{}/api/subscribe", self.base_url)) .post(format!("{}/api/subscribe", self.base_url))
.bearer_auth(&token) .bearer_auth(&token)
.form(&[ .form(&[
("action", action), ("action", action),

View file

@ -910,8 +910,7 @@ impl TikTokVideo {
/// Get video creation time as DateTime /// Get video creation time as DateTime
pub fn created_at(&self) -> Option<chrono::DateTime<chrono::Utc>> { pub fn created_at(&self) -> Option<chrono::DateTime<chrono::Utc>> {
self.create_time self.create_time
.map(|ts| chrono::DateTime::from_timestamp(ts, 0)) .and_then(|ts| chrono::DateTime::from_timestamp(ts, 0))
.flatten()
} }
} }

View file

@ -745,7 +745,7 @@ impl WeChatProvider {
) -> bool { ) -> bool {
use sha1::{Digest, Sha1}; use sha1::{Digest, Sha1};
let mut params = vec![token, timestamp, nonce]; let mut params = [token, timestamp, nonce];
params.sort(); params.sort();
let joined = params.join(""); let joined = params.join("");

View file

@ -625,7 +625,7 @@ impl CalendarIntegrationService {
let from_date = query.from_date; let from_date = query.from_date;
let to_date = query.to_date; let to_date = query.to_date;
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || -> Result<Vec<ContactEventWithDetails>, CalendarIntegrationError> {
let mut conn = pool.get().map_err(|_| CalendarIntegrationError::DatabaseError)?; let mut conn = pool.get().map_err(|_| CalendarIntegrationError::DatabaseError)?;
// Get events for the contact's organization in the date range // Get events for the contact's organization in the date range
@ -674,7 +674,10 @@ impl CalendarIntegrationService {
Ok(events) Ok(events)
}) })
.await .await
.map_err(|_| CalendarIntegrationError::DatabaseError)? .map_err(|e: tokio::task::JoinError| {
log::error!("Spawn blocking error: {}", e);
CalendarIntegrationError::DatabaseError
})?
} }
async fn get_contact_summary( async fn get_contact_summary(
@ -738,7 +741,7 @@ impl CalendarIntegrationService {
let pool = self.db_pool.clone(); let pool = self.db_pool.clone();
let exclude = exclude.to_vec(); let exclude = exclude.to_vec();
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || -> Result<Vec<ContactSummary>, CalendarIntegrationError> {
let mut conn = pool.get().map_err(|_| CalendarIntegrationError::DatabaseError)?; let mut conn = pool.get().map_err(|_| CalendarIntegrationError::DatabaseError)?;
// Find other contacts in the same organization, excluding specified ones // Find other contacts in the same organization, excluding specified ones
@ -780,7 +783,10 @@ impl CalendarIntegrationService {
Ok(contacts) Ok(contacts)
}) })
.await .await
.map_err(|_| CalendarIntegrationError::DatabaseError)? .map_err(|e: tokio::task::JoinError| {
log::error!("Spawn blocking error: {}", e);
CalendarIntegrationError::DatabaseError
})?
} }
async fn find_same_company_contacts( async fn find_same_company_contacts(
@ -792,7 +798,7 @@ impl CalendarIntegrationService {
let pool = self.db_pool.clone(); let pool = self.db_pool.clone();
let exclude = exclude.to_vec(); let exclude = exclude.to_vec();
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || -> Result<Vec<ContactSummary>, CalendarIntegrationError> {
let mut conn = pool.get().map_err(|_| CalendarIntegrationError::DatabaseError)?; let mut conn = pool.get().map_err(|_| CalendarIntegrationError::DatabaseError)?;
// Find contacts with company field set // Find contacts with company field set

View file

@ -1,7 +1,9 @@
#[cfg(feature = "calendar")]
pub mod calendar_integration; pub mod calendar_integration;
pub mod crm; pub mod crm;
pub mod crm_ui; pub mod crm_ui;
pub mod external_sync; pub mod external_sync;
#[cfg(feature = "tasks")]
pub mod tasks_integration; pub mod tasks_integration;
use axum::{ use axum::{

View file

@ -5,7 +5,8 @@ use serde::{Deserialize, Serialize};
use std::collections::HashMap; use std::collections::HashMap;
use uuid::Uuid; use uuid::Uuid;
use crate::core::shared::schema::{crm_contacts, people, tasks}; use crate::core::shared::schema::people::{crm_contacts as crm_contacts_table, people as people_table};
use crate::core::shared::schema::tasks::tasks as tasks_table;
use crate::shared::utils::DbPool; use crate::shared::utils::DbPool;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -813,11 +814,11 @@ impl TasksIntegrationService {
let mut conn = pool.get().map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?; let mut conn = pool.get().map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?;
// Get the contact's email to find the corresponding person // Get the contact's email to find the corresponding person
let contact_email: Option<String> = crm_contacts::table let contact_email: Option<String> = crm_contacts_table::table
.filter(crm_contacts::id.eq(contact_id)) .filter(crm_contacts_table::id.eq(contact_id))
.select(crm_contacts::email) .select(crm_contacts_table::email)
.first(&mut conn) .first(&mut conn)
.map_err(|e| TasksIntegrationError::DatabaseError(format!("Contact not found: {}", e)))?; .map_err(|e: diesel::result::Error| TasksIntegrationError::DatabaseError(format!("Contact not found: {}", e)))?;
let contact_email = match contact_email { let contact_email = match contact_email {
Some(email) => email, Some(email) => email,
@ -825,18 +826,18 @@ impl TasksIntegrationService {
}; };
// Find the person with this email // Find the person with this email
let person_id: Result<uuid::Uuid, _> = people::table let person_id: Result<uuid::Uuid, _> = people_table::table
.filter(people::email.eq(&contact_email)) .filter(people_table::email.eq(&contact_email))
.select(people::id) .select(people_table::id)
.first(&mut conn); .first(&mut conn);
if let Ok(pid) = person_id { if let Ok(pid) = person_id {
// Update the task's assigned_to field if this is an assignee // Update the task's assigned_to field if this is an assignee
if role == "assignee" { if role == "assignee" {
diesel::update(tasks::table.filter(tasks::id.eq(task_id))) diesel::update(tasks_table::table.filter(tasks_table::id.eq(task_id)))
.set(tasks::assignee_id.eq(Some(pid))) .set(tasks_table::assignee_id.eq(Some(pid)))
.execute(&mut conn) .execute(&mut conn)
.map_err(|e| TasksIntegrationError::DatabaseError(format!("Failed to update task: {}", e)))?; .map_err(|e: diesel::result::Error| TasksIntegrationError::DatabaseError(format!("Failed to update task: {}", e)))?;
} }
} }
@ -857,9 +858,9 @@ impl TasksIntegrationService {
let mut conn = pool.get().map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?; let mut conn = pool.get().map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?;
// Get task assignees from tasks table and look up corresponding contacts // Get task assignees from tasks table and look up corresponding contacts
let task_row: Result<(Uuid, Option<Uuid>, DateTime<Utc>), _> = tasks::table let task_row: Result<(Uuid, Option<Uuid>, DateTime<Utc>), _> = tasks_table::table
.filter(tasks::id.eq(task_id)) .filter(tasks_table::id.eq(task_id))
.select((tasks::id, tasks::assignee_id, tasks::created_at)) .select((tasks_table::id, tasks_table::assignee_id, tasks_table::created_at))
.first(&mut conn); .first(&mut conn);
let mut task_contacts = Vec::new(); let mut task_contacts = Vec::new();
@ -867,16 +868,16 @@ impl TasksIntegrationService {
if let Ok((tid, assigned_to, created_at)) = task_row { if let Ok((tid, assigned_to, created_at)) = task_row {
if let Some(assignee_id) = assigned_to { if let Some(assignee_id) = assigned_to {
// Look up person -> email -> contact // Look up person -> email -> contact
let person_email: Result<Option<String>, _> = people::table let person_email: Result<Option<String>, _> = people_table::table
.filter(people::id.eq(assignee_id)) .filter(people_table::id.eq(assignee_id))
.select(people::email) .select(people_table::email)
.first(&mut conn); .first(&mut conn);
if let Ok(Some(email)) = person_email { if let Ok(Some(email)) = person_email {
// Find contact with this email // Find contact with this email
let contact_result: Result<Uuid, _> = crm_contacts::table let contact_result: Result<Uuid, _> = crm_contacts_table::table
.filter(crm_contacts::email.eq(&email)) .filter(crm_contacts_table::email.eq(&email))
.select(crm_contacts::id) .select(crm_contacts_table::id)
.first(&mut conn); .first(&mut conn);
if let Ok(contact_id) = contact_result { if let Ok(contact_id) = contact_result {
@ -910,34 +911,34 @@ impl TasksIntegrationService {
let pool = self.db_pool.clone(); let pool = self.db_pool.clone();
let status_filter = query.status.clone(); let status_filter = query.status.clone();
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || -> Result<Vec<ContactTaskWithDetails>, TasksIntegrationError> {
let mut conn = pool.get().map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?; let mut conn = pool.get().map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?;
let mut db_query = tasks::table let mut db_query = tasks_table::table
.filter(tasks::status.ne("deleted")) .filter(tasks_table::status.ne("deleted"))
.into_boxed(); .into_boxed();
if let Some(status) = status_filter { if let Some(status) = status_filter {
db_query = db_query.filter(tasks::status.eq(status)); db_query = db_query.filter(tasks_table::status.eq(status));
} }
let rows: Vec<(Uuid, String, Option<String>, String, String, Option<DateTime<Utc>>, Option<Uuid>, i32, DateTime<Utc>, DateTime<Utc>)> = db_query let rows: Vec<(Uuid, String, Option<String>, String, String, Option<DateTime<Utc>>, Option<Uuid>, i32, DateTime<Utc>, DateTime<Utc>)> = db_query
.order(tasks::created_at.desc()) .order(tasks_table::created_at.desc())
.select(( .select((
tasks::id, tasks_table::id,
tasks::title, tasks_table::title,
tasks::description, tasks_table::description,
tasks::status, tasks_table::status,
tasks::priority, tasks_table::priority,
tasks::due_date, tasks_table::due_date,
tasks::project_id, tasks_table::project_id,
tasks::progress, tasks_table::progress,
tasks::created_at, tasks_table::created_at,
tasks::updated_at, tasks_table::updated_at,
)) ))
.limit(50) .limit(50)
.load(&mut conn) .load(&mut conn)
.map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?; .map_err(|e: diesel::result::Error| TasksIntegrationError::DatabaseError(e.to_string()))?;
let tasks_list = rows.into_iter().map(|row| { let tasks_list = rows.into_iter().map(|row| {
ContactTaskWithDetails { ContactTaskWithDetails {
@ -971,7 +972,7 @@ impl TasksIntegrationService {
Ok(tasks_list) Ok(tasks_list)
}) })
.await .await
.map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))? .map_err(|e: tokio::task::JoinError| TasksIntegrationError::DatabaseError(e.to_string()))?
} }
async fn get_contact_summary( async fn get_contact_summary(
@ -1017,27 +1018,27 @@ impl TasksIntegrationService {
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || {
let mut conn = pool.get().map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?; let mut conn = pool.get().map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?;
let assignee_id: Option<Uuid> = tasks::table let assignee_id: Option<Uuid> = tasks_table::table
.filter(tasks::id.eq(task_id)) .filter(tasks_table::id.eq(task_id))
.select(tasks::assignee_id) .select(tasks_table::assignee_id)
.first(&mut conn) .first(&mut conn)
.optional() .optional()
.map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))? .map_err(|e: diesel::result::Error| TasksIntegrationError::DatabaseError(e.to_string()))?
.flatten(); .flatten();
if let Some(user_id) = assignee_id { if let Some(user_id) = assignee_id {
let person_email: Option<String> = people::table let person_email: Option<String> = people_table::table
.filter(people::user_id.eq(user_id)) .filter(people_table::user_id.eq(user_id))
.select(people::email) .select(people_table::email)
.first(&mut conn) .first(&mut conn)
.optional() .optional()
.map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))? .map_err(|e: diesel::result::Error| TasksIntegrationError::DatabaseError(e.to_string()))?
.flatten(); .flatten();
if let Some(email) = person_email { if let Some(email) = person_email {
let contact_ids: Vec<Uuid> = crm_contacts::table let contact_ids: Vec<Uuid> = crm_contacts_table::table
.filter(crm_contacts::email.eq(&email)) .filter(crm_contacts_table::email.eq(&email))
.select(crm_contacts::id) .select(crm_contacts_table::id)
.load(&mut conn) .load(&mut conn)
.unwrap_or_default(); .unwrap_or_default();
@ -1095,26 +1096,26 @@ impl TasksIntegrationService {
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || {
let mut conn = pool.get().map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?; let mut conn = pool.get().map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?;
let mut query = crm_contacts::table let mut query = crm_contacts_table::table
.filter(crm_contacts::status.eq("active")) .filter(crm_contacts_table::status.eq("active"))
.into_boxed(); .into_boxed();
for exc in &exclude { for exc in &exclude {
query = query.filter(crm_contacts::id.ne(*exc)); query = query.filter(crm_contacts_table::id.ne(*exc));
} }
let rows: Vec<(Uuid, Option<String>, Option<String>, Option<String>, Option<String>, Option<String>)> = query let rows: Vec<(Uuid, Option<String>, Option<String>, Option<String>, Option<String>, Option<String>)> = query
.select(( .select((
crm_contacts::id, crm_contacts_table::id,
crm_contacts::first_name, crm_contacts_table::first_name,
crm_contacts::last_name, crm_contacts_table::last_name,
crm_contacts::email, crm_contacts_table::email,
crm_contacts::company, crm_contacts_table::company,
crm_contacts::job_title, crm_contacts_table::job_title,
)) ))
.limit(limit as i64) .limit(limit as i64)
.load(&mut conn) .load(&mut conn)
.map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?; .map_err(|e: diesel::result::Error| TasksIntegrationError::DatabaseError(e.to_string()))?;
let contacts = rows.into_iter().map(|row| { let contacts = rows.into_iter().map(|row| {
let summary = ContactSummary { let summary = ContactSummary {
@ -1155,22 +1156,22 @@ impl TasksIntegrationService {
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || {
let mut conn = pool.get().map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?; let mut conn = pool.get().map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?;
let mut query = crm_contacts::table let mut query = crm_contacts_table::table
.filter(crm_contacts::status.eq("active")) .filter(crm_contacts_table::status.eq("active"))
.into_boxed(); .into_boxed();
for exc in &exclude { for exc in &exclude {
query = query.filter(crm_contacts::id.ne(*exc)); query = query.filter(crm_contacts_table::id.ne(*exc));
} }
let rows: Vec<(Uuid, Option<String>, Option<String>, Option<String>, Option<String>, Option<String>)> = query let rows: Vec<(Uuid, Option<String>, Option<String>, Option<String>, Option<String>, Option<String>)> = query
.select(( .select((
crm_contacts::id, crm_contacts_table::id,
crm_contacts::first_name, crm_contacts_table::first_name,
crm_contacts::last_name, crm_contacts_table::last_name,
crm_contacts::email, crm_contacts_table::email,
crm_contacts::company, crm_contacts_table::company,
crm_contacts::job_title, crm_contacts_table::job_title,
)) ))
.limit(limit as i64) .limit(limit as i64)
.load(&mut conn) .load(&mut conn)
@ -1215,22 +1216,22 @@ impl TasksIntegrationService {
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || {
let mut conn = pool.get().map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?; let mut conn = pool.get().map_err(|e| TasksIntegrationError::DatabaseError(e.to_string()))?;
let mut query = crm_contacts::table let mut query = crm_contacts_table::table
.filter(crm_contacts::status.eq("active")) .filter(crm_contacts_table::status.eq("active"))
.into_boxed(); .into_boxed();
for exc in &exclude { for exc in &exclude {
query = query.filter(crm_contacts::id.ne(*exc)); query = query.filter(crm_contacts_table::id.ne(*exc));
} }
let rows: Vec<(Uuid, Option<String>, Option<String>, Option<String>, Option<String>, Option<String>)> = query let rows: Vec<(Uuid, Option<String>, Option<String>, Option<String>, Option<String>, Option<String>)> = query
.select(( .select((
crm_contacts::id, crm_contacts_table::id,
crm_contacts::first_name, crm_contacts_table::first_name,
crm_contacts::last_name, crm_contacts_table::last_name,
crm_contacts::email, crm_contacts_table::email,
crm_contacts::company, crm_contacts_table::company,
crm_contacts::job_title, crm_contacts_table::job_title,
)) ))
.limit(limit as i64) .limit(limit as i64)
.load(&mut conn) .load(&mut conn)

View file

@ -18,17 +18,12 @@ use std::fs;
use std::os::unix::fs::PermissionsExt; use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
fn safe_pkill(args: &[&str]) { fn safe_pkill(args: &[&str]) {
if let Ok(cmd) = SafeCommand::new("pkill") if let Ok(cmd) = SafeCommand::new("pkill").and_then(|c| c.args(args)) {
.and_then(|c| c.args(args))
{
let _ = cmd.execute(); let _ = cmd.execute();
} }
} }
fn safe_pgrep(args: &[&str]) -> Option<std::process::Output> { fn safe_pgrep(args: &[&str]) -> Option<std::process::Output> {
SafeCommand::new("pgrep") SafeCommand::new("pgrep")
.and_then(|c| c.args(args)) .and_then(|c| c.args(args))
@ -46,23 +41,19 @@ fn safe_sh_command(script: &str) -> Option<std::process::Output> {
fn safe_curl(args: &[&str]) -> Option<std::process::Output> { fn safe_curl(args: &[&str]) -> Option<std::process::Output> {
match SafeCommand::new("curl") { match SafeCommand::new("curl") {
Ok(cmd) => { Ok(cmd) => match cmd.args(args) {
match cmd.args(args) { Ok(cmd_with_args) => match cmd_with_args.execute() {
Ok(cmd_with_args) => {
match cmd_with_args.execute() {
Ok(output) => Some(output), Ok(output) => Some(output),
Err(e) => { Err(e) => {
log::warn!("safe_curl execute failed: {}", e); log::warn!("safe_curl execute failed: {}", e);
None None
} }
} },
}
Err(e) => { Err(e) => {
log::warn!("safe_curl args failed: {} - args: {:?}", e, args); log::warn!("safe_curl args failed: {} - args: {:?}", e, args);
None None
} }
} },
}
Err(e) => { Err(e) => {
log::warn!("safe_curl new failed: {}", e); log::warn!("safe_curl new failed: {}", e);
None None
@ -71,8 +62,10 @@ fn safe_curl(args: &[&str]) -> Option<std::process::Output> {
} }
fn vault_health_check() -> bool { fn vault_health_check() -> bool {
let client_cert = std::path::Path::new("./botserver-stack/conf/system/certificates/botserver/client.crt"); let client_cert =
let client_key = std::path::Path::new("./botserver-stack/conf/system/certificates/botserver/client.key"); std::path::Path::new("./botserver-stack/conf/system/certificates/botserver/client.crt");
let client_key =
std::path::Path::new("./botserver-stack/conf/system/certificates/botserver/client.key");
let certs_exist = client_cert.exists() && client_key.exists(); let certs_exist = client_cert.exists() && client_key.exists();
log::info!("Vault health check: certs_exist={}", certs_exist); log::info!("Vault health check: certs_exist={}", certs_exist);
@ -80,23 +73,39 @@ fn vault_health_check() -> bool {
let result = if certs_exist { let result = if certs_exist {
log::info!("Using mTLS for Vault health check"); log::info!("Using mTLS for Vault health check");
safe_curl(&[ safe_curl(&[
"-f", "-sk", "--connect-timeout", "2", "-m", "5", "-f",
"--cert", "./botserver-stack/conf/system/certificates/botserver/client.crt", "-sk",
"--key", "./botserver-stack/conf/system/certificates/botserver/client.key", "--connect-timeout",
"https://localhost:8200/v1/sys/health?standbyok=true&uninitcode=200&sealedcode=200" "2",
"-m",
"5",
"--cert",
"./botserver-stack/conf/system/certificates/botserver/client.crt",
"--key",
"./botserver-stack/conf/system/certificates/botserver/client.key",
"https://localhost:8200/v1/sys/health?standbyok=true&uninitcode=200&sealedcode=200",
]) ])
} else { } else {
log::info!("Using plain TLS for Vault health check (no client certs yet)"); log::info!("Using plain TLS for Vault health check (no client certs yet)");
safe_curl(&[ safe_curl(&[
"-f", "-sk", "--connect-timeout", "2", "-m", "5", "-f",
"https://localhost:8200/v1/sys/health?standbyok=true&uninitcode=200&sealedcode=200" "-sk",
"--connect-timeout",
"2",
"-m",
"5",
"https://localhost:8200/v1/sys/health?standbyok=true&uninitcode=200&sealedcode=200",
]) ])
}; };
match &result { match &result {
Some(output) => { Some(output) => {
let success = output.status.success(); let success = output.status.success();
log::info!("Vault health check result: success={}, status={:?}", success, output.status.code()); log::info!(
"Vault health check result: success={}, status={:?}",
success,
output.status.code()
);
if !success { if !success {
let stderr = String::from_utf8_lossy(&output.stderr); let stderr = String::from_utf8_lossy(&output.stderr);
let stdout = String::from_utf8_lossy(&output.stdout); let stdout = String::from_utf8_lossy(&output.stdout);
@ -113,9 +122,7 @@ fn vault_health_check() -> bool {
} }
fn safe_fuser(args: &[&str]) { fn safe_fuser(args: &[&str]) {
if let Ok(cmd) = SafeCommand::new("fuser") if let Ok(cmd) = SafeCommand::new("fuser").and_then(|c| c.args(args)) {
.and_then(|c| c.args(args))
{
let _ = cmd.execute(); let _ = cmd.execute();
} }
} }
@ -377,7 +384,9 @@ impl BootstrapManager {
for attempt in 1..=30 { for attempt in 1..=30 {
tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; tokio::time::sleep(tokio::time::Duration::from_secs(1)).await;
let status = SafeCommand::new("pg_isready") let status = SafeCommand::new("pg_isready")
.and_then(|c| c.args(&["-h", "localhost", "-p", "5432", "-U", "gbuser"])) .and_then(|c| {
c.args(&["-h", "localhost", "-p", "5432", "-U", "gbuser"])
})
.ok() .ok()
.and_then(|cmd| cmd.execute().ok()) .and_then(|cmd| cmd.execute().ok())
.map(|o| o.status.success()) .map(|o| o.status.success())
@ -388,7 +397,10 @@ impl BootstrapManager {
break; break;
} }
if attempt % 5 == 0 { if attempt % 5 == 0 {
info!("Waiting for PostgreSQL to be ready... (attempt {}/30)", attempt); info!(
"Waiting for PostgreSQL to be ready... (attempt {}/30)",
attempt
);
} }
} }
if !ready { if !ready {
@ -746,8 +758,7 @@ impl BootstrapManager {
info!("Vault unsealed successfully"); info!("Vault unsealed successfully");
} }
} else { } else {
let vault_pid = safe_pgrep(&["-f", "vault server"]) let vault_pid = safe_pgrep(&["-f", "vault server"]).and_then(|o| {
.and_then(|o| {
String::from_utf8_lossy(&o.stdout) String::from_utf8_lossy(&o.stdout)
.trim() .trim()
.parse::<i32>() .parse::<i32>()
@ -766,7 +777,10 @@ impl BootstrapManager {
std::env::set_var("VAULT_ADDR", vault_addr); std::env::set_var("VAULT_ADDR", vault_addr);
std::env::set_var("VAULT_TOKEN", &root_token); std::env::set_var("VAULT_TOKEN", &root_token);
std::env::set_var("VAULT_CACERT", "./botserver-stack/conf/system/certificates/ca/ca.crt"); std::env::set_var(
"VAULT_CACERT",
"./botserver-stack/conf/system/certificates/ca/ca.crt",
);
std::env::set_var( std::env::set_var(
"VAULT_CACERT", "VAULT_CACERT",
@ -816,7 +830,15 @@ impl BootstrapManager {
let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?; let pm = PackageManager::new(self.install_mode.clone(), self.tenant.clone())?;
let required_components = vec!["vault", "tables", "directory", "drive", "cache", "llm", "vector_db"]; let required_components = vec![
"vault",
"tables",
"directory",
"drive",
"cache",
"llm",
"vector_db",
];
let vault_needs_setup = !self.stack_dir("conf/vault/init.json").exists(); let vault_needs_setup = !self.stack_dir("conf/vault/init.json").exists();
@ -1074,7 +1096,11 @@ impl BootstrapManager {
std::env::current_dir()?.join(self.stack_dir("conf/directory/admin-pat.txt")) std::env::current_dir()?.join(self.stack_dir("conf/directory/admin-pat.txt"))
}; };
fs::create_dir_all(zitadel_config_path.parent().ok_or_else(|| anyhow::anyhow!("Invalid zitadel config path"))?)?; fs::create_dir_all(
zitadel_config_path
.parent()
.ok_or_else(|| anyhow::anyhow!("Invalid zitadel config path"))?,
)?;
let zitadel_db_password = Self::generate_secure_password(24); let zitadel_db_password = Self::generate_secure_password(24);
@ -1188,7 +1214,11 @@ DefaultInstance:
fn setup_caddy_proxy(&self) -> Result<()> { fn setup_caddy_proxy(&self) -> Result<()> {
let caddy_config = self.stack_dir("conf/proxy/Caddyfile"); let caddy_config = self.stack_dir("conf/proxy/Caddyfile");
fs::create_dir_all(caddy_config.parent().ok_or_else(|| anyhow::anyhow!("Invalid caddy config path"))?)?; fs::create_dir_all(
caddy_config
.parent()
.ok_or_else(|| anyhow::anyhow!("Invalid caddy config path"))?,
)?;
let config = format!( let config = format!(
r"{{ r"{{
@ -1240,7 +1270,11 @@ meet.botserver.local {{
fn setup_coredns(&self) -> Result<()> { fn setup_coredns(&self) -> Result<()> {
let dns_config = self.stack_dir("conf/dns/Corefile"); let dns_config = self.stack_dir("conf/dns/Corefile");
fs::create_dir_all(dns_config.parent().ok_or_else(|| anyhow::anyhow!("Invalid dns config path"))?)?; fs::create_dir_all(
dns_config
.parent()
.ok_or_else(|| anyhow::anyhow!("Invalid dns config path"))?,
)?;
let zone_file = self.stack_dir("conf/dns/botserver.local.zone"); let zone_file = self.stack_dir("conf/dns/botserver.local.zone");
@ -1359,15 +1393,15 @@ meet IN A 127.0.0.1
let user_password = Self::generate_secure_password(16); let user_password = Self::generate_secure_password(16);
match setup match setup
.create_user( .create_user(crate::package_manager::setup::CreateUserParams {
&org_id, org_id: &org_id,
"user", username: "user",
"user@default", email: "user@default",
&user_password, password: &user_password,
"User", first_name: "User",
"Default", last_name: "Default",
false, is_admin: false,
) })
.await .await
{ {
Ok(regular_user) => { Ok(regular_user) => {
@ -1856,7 +1890,9 @@ VAULT_CACHE_TTL=300
.credentials_provider(aws_sdk_s3::config::Credentials::new( .credentials_provider(aws_sdk_s3::config::Credentials::new(
access_key, secret_key, None, None, "static", access_key, secret_key, None, None, "static",
)) ))
.sleep_impl(std::sync::Arc::new(aws_smithy_async::rt::sleep::TokioSleep::new())) .sleep_impl(std::sync::Arc::new(
aws_smithy_async::rt::sleep::TokioSleep::new(),
))
.load() .load()
.await; .await;
@ -1904,7 +1940,10 @@ VAULT_CACHE_TTL=300
.to_string_lossy() .to_string_lossy()
.ends_with(".gbai") .ends_with(".gbai")
{ {
let bot_name = path.file_name().map(|n| n.to_string_lossy().to_string()).unwrap_or_default(); let bot_name = path
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_default();
let bucket = bot_name.trim_start_matches('/').to_string(); let bucket = bot_name.trim_start_matches('/').to_string();
let bucket_exists = client.head_bucket().bucket(&bucket).send().await.is_ok(); let bucket_exists = client.head_bucket().bucket(&bucket).send().await.is_ok();
if bucket_exists { if bucket_exists {
@ -1912,11 +1951,15 @@ VAULT_CACHE_TTL=300
continue; continue;
} }
if let Err(e) = client.create_bucket().bucket(&bucket).send().await { if let Err(e) = client.create_bucket().bucket(&bucket).send().await {
warn!("S3/MinIO not available, skipping bucket {}: {:?}", bucket, e); warn!(
"S3/MinIO not available, skipping bucket {}: {:?}",
bucket, e
);
continue; continue;
} }
info!("Created new bucket {}, uploading templates...", bucket); info!("Created new bucket {}, uploading templates...", bucket);
if let Err(e) = Self::upload_directory_recursive(&client, &path, &bucket, "/").await { if let Err(e) = Self::upload_directory_recursive(&client, &path, &bucket, "/").await
{
warn!("Failed to upload templates to bucket {}: {}", bucket, e); warn!("Failed to upload templates to bucket {}: {}", bucket, e);
} }
} }
@ -2089,7 +2132,10 @@ VAULT_CACHE_TTL=300
let mut read_dir = tokio::fs::read_dir(local_path).await?; let mut read_dir = tokio::fs::read_dir(local_path).await?;
while let Some(entry) = read_dir.next_entry().await? { while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path(); let path = entry.path();
let file_name = path.file_name().map(|n| n.to_string_lossy().to_string()).unwrap_or_default(); let file_name = path
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_default();
let mut key = prefix.trim_matches('/').to_string(); let mut key = prefix.trim_matches('/').to_string();
if !key.is_empty() { if !key.is_empty() {
key.push('/'); key.push('/');
@ -2167,10 +2213,7 @@ log_level = "info"
fs::create_dir_all(self.stack_dir("data/vault"))?; fs::create_dir_all(self.stack_dir("data/vault"))?;
info!( info!("Created Vault config with TLS at {}", config_path.display());
"Created Vault config with TLS at {}",
config_path.display()
);
Ok(()) Ok(())
} }
@ -2340,9 +2383,7 @@ log_level = "info"
for san in sans { for san in sans {
if let Ok(ip) = san.parse::<std::net::IpAddr>() { if let Ok(ip) = san.parse::<std::net::IpAddr>() {
params params.subject_alt_names.push(rcgen::SanType::IpAddress(ip));
.subject_alt_names
.push(rcgen::SanType::IpAddress(ip));
} else { } else {
params params
.subject_alt_names .subject_alt_names
@ -2362,7 +2403,10 @@ log_level = "info"
let minio_certs_dir = PathBuf::from("./botserver-stack/conf/drive/certs"); let minio_certs_dir = PathBuf::from("./botserver-stack/conf/drive/certs");
fs::create_dir_all(&minio_certs_dir)?; fs::create_dir_all(&minio_certs_dir)?;
let drive_cert_dir = cert_dir.join("drive"); let drive_cert_dir = cert_dir.join("drive");
fs::copy(drive_cert_dir.join("server.crt"), minio_certs_dir.join("public.crt"))?; fs::copy(
drive_cert_dir.join("server.crt"),
minio_certs_dir.join("public.crt"),
)?;
let drive_key_src = drive_cert_dir.join("server.key"); let drive_key_src = drive_cert_dir.join("server.key");
let drive_key_dst = minio_certs_dir.join("private.key"); let drive_key_dst = minio_certs_dir.join("private.key");

View file

@ -1,3 +1,4 @@
#[cfg(any(feature = "research", feature = "llm"))]
pub mod kb_context; pub mod kb_context;
#[cfg(feature = "llm")] #[cfg(feature = "llm")]
use crate::core::config::ConfigManager; use crate::core::config::ConfigManager;
@ -22,6 +23,8 @@ use axum::{
use diesel::PgConnection; use diesel::PgConnection;
use futures::{sink::SinkExt, stream::StreamExt}; use futures::{sink::SinkExt, stream::StreamExt};
use log::{error, info, warn}; use log::{error, info, warn};
#[cfg(feature = "llm")]
use log::trace;
use serde_json; use serde_json;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;

View file

@ -191,8 +191,7 @@ impl BotDatabaseManager {
format!( format!(
"bot_{}", "bot_{}",
bot_name bot_name
.replace('-', "_") .replace(['-', ' '], "_")
.replace(' ', "_")
.to_lowercase() .to_lowercase()
.chars() .chars()
.filter(|c| c.is_alphanumeric() || *c == '_') .filter(|c| c.is_alphanumeric() || *c == '_')

View file

@ -1,4 +1,5 @@
use anyhow::Result; use anyhow::Result;
#[cfg(feature = "drive")]
use aws_sdk_s3::Client as S3Client; use aws_sdk_s3::Client as S3Client;
use diesel::r2d2::{ConnectionManager, Pool}; use diesel::r2d2::{ConnectionManager, Pool};
use diesel::PgConnection; use diesel::PgConnection;
@ -11,7 +12,10 @@ pub type DbPool = Pool<ConnectionManager<PgConnection>>;
pub struct UserProvisioningService { pub struct UserProvisioningService {
db_pool: DbPool, db_pool: DbPool,
#[cfg(feature = "drive")]
s3_client: Option<Arc<S3Client>>, s3_client: Option<Arc<S3Client>>,
#[cfg(not(feature = "drive"))]
s3_client: Option<Arc<()>>,
base_url: String, base_url: String,
} }
@ -51,6 +55,7 @@ pub enum UserRole {
} }
impl UserProvisioningService { impl UserProvisioningService {
#[cfg(feature = "drive")]
pub fn new(db_pool: DbPool, s3_client: Option<Arc<S3Client>>, base_url: String) -> Self { pub fn new(db_pool: DbPool, s3_client: Option<Arc<S3Client>>, base_url: String) -> Self {
Self { Self {
db_pool, db_pool,
@ -59,6 +64,15 @@ impl UserProvisioningService {
} }
} }
#[cfg(not(feature = "drive"))]
pub fn new(db_pool: DbPool, _s3_client: Option<Arc<()>>, base_url: String) -> Self {
Self {
db_pool,
s3_client: None,
base_url,
}
}
pub fn get_base_url(&self) -> &str { pub fn get_base_url(&self) -> &str {
&self.base_url &self.base_url
} }
@ -130,6 +144,8 @@ impl UserProvisioningService {
} }
async fn create_s3_home(&self, account: &UserAccount, bot_access: &BotAccess) -> Result<()> { async fn create_s3_home(&self, account: &UserAccount, bot_access: &BotAccess) -> Result<()> {
#[cfg(feature = "drive")]
{
let Some(s3_client) = &self.s3_client else { let Some(s3_client) = &self.s3_client else {
log::warn!("S3 client not configured, skipping S3 home creation"); log::warn!("S3 client not configured, skipping S3 home creation");
return Ok(()); return Ok(());
@ -176,6 +192,15 @@ impl UserProvisioningService {
account.username, account.username,
bucket_name bucket_name
); );
}
#[cfg(not(feature = "drive"))]
{
let _ = account;
let _ = bot_access;
log::debug!("Drive feature not enabled, skipping S3 home creation");
}
Ok(()) Ok(())
} }
@ -275,6 +300,7 @@ impl UserProvisioningService {
} }
async fn remove_s3_data(&self, username: &str) -> Result<()> { async fn remove_s3_data(&self, username: &str) -> Result<()> {
#[cfg(feature = "drive")]
if let Some(s3_client) = &self.s3_client { if let Some(s3_client) = &self.s3_client {
let buckets_result = s3_client.list_buckets().send().await?; let buckets_result = s3_client.list_buckets().send().await?;
@ -309,6 +335,12 @@ impl UserProvisioningService {
} }
} }
#[cfg(not(feature = "drive"))]
{
let _ = username;
log::debug!("Drive feature not enabled, bypassing S3 data removal");
}
Ok(()) Ok(())
} }

View file

@ -33,8 +33,8 @@ pub const COMPILED_FEATURES: &[&str] = &[
"analytics", "analytics",
#[cfg(feature = "monitoring")] #[cfg(feature = "monitoring")]
"monitoring", "monitoring",
#[cfg(feature = "admin")] #[cfg(feature = "settings")]
"admin", "settings",
#[cfg(feature = "automation")] #[cfg(feature = "automation")]
"automation", "automation",
#[cfg(feature = "cache")] #[cfg(feature = "cache")]
@ -46,8 +46,8 @@ pub const COMPILED_FEATURES: &[&str] = &[
"project", "project",
#[cfg(feature = "goals")] #[cfg(feature = "goals")]
"goals", "goals",
#[cfg(feature = "workspace")] #[cfg(feature = "workspaces")]
"workspace", "workspaces",
#[cfg(feature = "tickets")] #[cfg(feature = "tickets")]
"tickets", "tickets",
#[cfg(feature = "billing")] #[cfg(feature = "billing")]

View file

@ -218,6 +218,8 @@ impl DocumentProcessor {
fn extract_pdf_with_library(&self, file_path: &Path) -> Result<String> { fn extract_pdf_with_library(&self, file_path: &Path) -> Result<String> {
let _ = self; // Suppress unused self warning let _ = self; // Suppress unused self warning
#[cfg(feature = "drive")]
{
use pdf_extract::extract_text; use pdf_extract::extract_text;
match extract_text(file_path) { match extract_text(file_path) {
@ -226,25 +228,30 @@ impl DocumentProcessor {
"Successfully extracted PDF with library: {}", "Successfully extracted PDF with library: {}",
file_path.display() file_path.display()
); );
Ok(text) return Ok(text);
} }
Err(e) => { Err(e) => {
warn!("PDF library extraction failed: {}", e); warn!("PDF library extraction failed: {}", e);
}
}
}
Self::extract_pdf_basic_sync(file_path) Self::extract_pdf_basic_sync(file_path)
} }
fn extract_pdf_basic_sync(file_path: &Path) -> Result<String> {
#[cfg(feature = "drive")]
{
if let Ok(text) = pdf_extract::extract_text(file_path) {
if !text.is_empty() {
return Ok(text);
}
} }
} }
fn extract_pdf_basic_sync(file_path: &Path) -> Result<String> { Err(anyhow::anyhow!(
pdf_extract::extract_text(file_path)
.ok()
.filter(|text| !text.is_empty())
.ok_or_else(|| {
anyhow::anyhow!(
"Could not extract text from PDF. Please ensure pdftotext is installed." "Could not extract text from PDF. Please ensure pdftotext is installed."
) ))
})
} }
async fn extract_docx_text(&self, file_path: &Path) -> Result<String> { async fn extract_docx_text(&self, file_path: &Path) -> Result<String> {

View file

@ -114,16 +114,13 @@ pub struct PaginatedQuery {
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[derive(Default)]
pub enum SortDirection { pub enum SortDirection {
#[default]
Asc, Asc,
Desc, Desc,
} }
impl Default for SortDirection {
fn default() -> Self {
Self::Asc
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PaginatedResult<T> { pub struct PaginatedResult<T> {
@ -237,7 +234,7 @@ impl LargeOrgOptimizer {
Vec::new() Vec::new()
}; };
let total_pages = (cached.total_count + query.page_size - 1) / query.page_size; let total_pages = cached.total_count.div_ceil(query.page_size);
PaginatedResult { PaginatedResult {
items, items,
@ -255,10 +252,10 @@ impl LargeOrgOptimizer {
query: &PaginatedQuery, query: &PaginatedQuery,
) -> Result<PaginatedResult<Uuid>, LargeOrgError> { ) -> Result<PaginatedResult<Uuid>, LargeOrgError> {
let items = Vec::new(); let items = Vec::new();
let total_count = 0; let total_count: usize = 0;
let total_pages = if total_count > 0 { let total_pages = if total_count > 0 {
(total_count + query.page_size - 1) / query.page_size total_count.div_ceil(query.page_size)
} else { } else {
0 0
}; };

View file

@ -12,6 +12,7 @@ use std::sync::Arc;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use uuid::Uuid; use uuid::Uuid;
#[cfg(any(feature = "research", feature = "llm"))]
use crate::core::kb::permissions::{build_qdrant_permission_filter, UserContext}; use crate::core::kb::permissions::{build_qdrant_permission_filter, UserContext};
use crate::shared::utils::DbPool; use crate::shared::utils::DbPool;
@ -154,6 +155,7 @@ impl AuthenticatedUser {
} }
/// Convert to UserContext for KB permission checks /// Convert to UserContext for KB permission checks
#[cfg(any(feature = "research", feature = "llm"))]
pub fn to_user_context(&self) -> UserContext { pub fn to_user_context(&self) -> UserContext {
if self.is_authenticated() { if self.is_authenticated() {
UserContext::authenticated(self.user_id, self.email.clone(), self.organization_id) UserContext::authenticated(self.user_id, self.email.clone(), self.organization_id)
@ -165,6 +167,7 @@ impl AuthenticatedUser {
} }
/// Get Qdrant permission filter for this user /// Get Qdrant permission filter for this user
#[cfg(any(feature = "research", feature = "llm"))]
pub fn get_qdrant_filter(&self) -> serde_json::Value { pub fn get_qdrant_filter(&self) -> serde_json::Value {
build_qdrant_permission_filter(&self.to_user_context()) build_qdrant_permission_filter(&self.to_user_context())
} }
@ -684,8 +687,8 @@ async fn extract_and_validate_user(
.and_then(|v| v.to_str().ok()) .and_then(|v| v.to_str().ok())
.ok_or(AuthError::MissingToken)?; .ok_or(AuthError::MissingToken)?;
let token = if auth_header.starts_with("Bearer ") { let token = if let Some(stripped) = auth_header.strip_prefix("Bearer ") {
&auth_header[7..] stripped
} else { } else {
return Err(AuthError::InvalidFormat); return Err(AuthError::InvalidFormat);
}; };
@ -990,6 +993,7 @@ pub fn can_access_resource(
} }
/// Build permission filter for Qdrant searches based on user context /// Build permission filter for Qdrant searches based on user context
#[cfg(any(feature = "research", feature = "llm"))]
pub fn build_search_permission_filter(context: &RequestContext) -> serde_json::Value { pub fn build_search_permission_filter(context: &RequestContext) -> serde_json::Value {
context.user.get_qdrant_filter() context.user.get_qdrant_filter()
} }

View file

@ -4,10 +4,12 @@ pub mod bootstrap;
pub mod bot; pub mod bot;
pub mod bot_database; pub mod bot_database;
pub mod config; pub mod config;
#[cfg(feature = "directory")]
pub mod directory; pub mod directory;
pub mod dns; pub mod dns;
pub mod features; pub mod features;
pub mod i18n; pub mod i18n;
#[cfg(any(feature = "research", feature = "llm"))]
pub mod kb; pub mod kb;
pub mod large_org_optimizer; pub mod large_org_optimizer;
pub mod manifest; pub mod manifest;

View file

@ -558,11 +558,10 @@ impl BotAccessConfig {
} }
// Organization-wide access // Organization-wide access
if self.visibility == BotVisibility::Organization { if self.visibility == BotVisibility::Organization
if user.organization_id == Some(self.organization_id) { && user.organization_id == Some(self.organization_id) {
return AccessCheckResult::Allowed; return AccessCheckResult::Allowed;
} }
}
AccessCheckResult::Denied("Access not granted".to_string()) AccessCheckResult::Denied("Access not granted".to_string())
} }
@ -702,11 +701,10 @@ impl AppAccessConfig {
} }
// Organization-wide // Organization-wide
if self.visibility == AppVisibility::Organization { if self.visibility == AppVisibility::Organization
if user.organization_id == Some(self.organization_id) { && user.organization_id == Some(self.organization_id) {
return AccessCheckResult::Allowed; return AccessCheckResult::Allowed;
} }
}
AccessCheckResult::Denied("Access not granted".to_string()) AccessCheckResult::Denied("Access not granted".to_string())
} }

View file

@ -33,19 +33,23 @@ pub enum InvitationRole {
Guest, Guest,
} }
impl InvitationRole { impl std::str::FromStr for InvitationRole {
pub fn from_str(s: &str) -> Option<Self> { type Err = ();
match s.to_lowercase().as_str() {
"owner" => Some(Self::Owner),
"admin" => Some(Self::Admin),
"manager" => Some(Self::Manager),
"member" => Some(Self::Member),
"viewer" => Some(Self::Viewer),
"guest" => Some(Self::Guest),
_ => None,
}
}
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"owner" => Ok(Self::Owner),
"admin" => Ok(Self::Admin),
"manager" => Ok(Self::Manager),
"member" => Ok(Self::Member),
"viewer" => Ok(Self::Viewer),
"guest" => Ok(Self::Guest),
_ => Err(()),
}
}
}
impl InvitationRole {
pub fn as_str(&self) -> &'static str { pub fn as_str(&self) -> &'static str {
match self { match self {
Self::Owner => "owner", Self::Owner => "owner",
@ -172,6 +176,29 @@ impl Default for InvitationService {
} }
} }
pub struct CreateInvitationParams<'a> {
pub organization_id: Uuid,
pub organization_name: &'a str,
pub email: &'a str,
pub role: InvitationRole,
pub groups: Vec<String>,
pub invited_by: Uuid,
pub invited_by_name: &'a str,
pub message: Option<String>,
pub expires_in_days: i64,
}
pub struct BulkInviteParams<'a> {
pub organization_id: Uuid,
pub organization_name: &'a str,
pub emails: Vec<String>,
pub role: InvitationRole,
pub groups: Vec<String>,
pub invited_by: Uuid,
pub invited_by_name: &'a str,
pub message: Option<String>,
}
impl InvitationService { impl InvitationService {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
@ -183,23 +210,17 @@ impl InvitationService {
pub async fn create_invitation( pub async fn create_invitation(
&self, &self,
organization_id: Uuid, params: CreateInvitationParams<'_>,
organization_name: &str,
email: &str,
role: InvitationRole,
groups: Vec<String>,
invited_by: Uuid,
invited_by_name: &str,
message: Option<String>,
expires_in_days: i64,
) -> Result<OrganizationInvitation, String> { ) -> Result<OrganizationInvitation, String> {
let email_lower = email.to_lowercase().trim().to_string(); let email_lower = params.email.to_lowercase().trim().to_string();
if !self.is_valid_email(&email_lower) { if !self.is_valid_email(&email_lower) {
return Err("Invalid email address".to_string()); return Err("Invalid email address".to_string());
} }
let existing = self.find_pending_invitation(&organization_id, &email_lower).await; let existing = self
.find_pending_invitation(&params.organization_id, &email_lower)
.await;
if existing.is_some() { if existing.is_some() {
return Err("An invitation already exists for this email".to_string()); return Err("An invitation already exists for this email".to_string());
} }
@ -210,16 +231,16 @@ impl InvitationService {
let invitation = OrganizationInvitation { let invitation = OrganizationInvitation {
id: invitation_id, id: invitation_id,
organization_id, organization_id: params.organization_id,
email: email_lower, email: email_lower,
role, role: params.role,
groups, groups: params.groups,
invited_by, invited_by: params.invited_by,
invited_by_name: invited_by_name.to_string(), invited_by_name: params.invited_by_name.to_string(),
status: InvitationStatus::Pending, status: InvitationStatus::Pending,
token: token.clone(), token: token.clone(),
message, message: params.message,
expires_at: now + Duration::days(expires_in_days), expires_at: now + Duration::days(params.expires_in_days),
created_at: now, created_at: now,
updated_at: now, updated_at: now,
accepted_at: None, accepted_at: None,
@ -238,45 +259,39 @@ impl InvitationService {
{ {
let mut by_org = self.invitations_by_org.write().await; let mut by_org = self.invitations_by_org.write().await;
by_org.entry(organization_id).or_default().push(invitation_id); by_org
.entry(params.organization_id)
.or_default()
.push(invitation_id);
} }
self.send_invitation_email(&invitation, organization_name).await; self.send_invitation_email(&invitation, params.organization_name)
.await;
Ok(invitation) Ok(invitation)
} }
pub async fn bulk_invite( pub async fn bulk_invite(&self, params: BulkInviteParams<'_>) -> BulkInviteResponse {
&self,
organization_id: Uuid,
organization_name: &str,
emails: Vec<String>,
role: InvitationRole,
groups: Vec<String>,
invited_by: Uuid,
invited_by_name: &str,
message: Option<String>,
) -> BulkInviteResponse {
let mut successful = Vec::new(); let mut successful = Vec::new();
let mut failed = Vec::new(); let mut failed = Vec::new();
for email in emails { for email in params.emails {
match self match self
.create_invitation( .create_invitation(CreateInvitationParams {
organization_id, organization_id: params.organization_id,
organization_name, organization_name: params.organization_name,
&email, email: &email,
role.clone(), role: params.role.clone(),
groups.clone(), groups: params.groups.clone(),
invited_by, invited_by: params.invited_by,
invited_by_name, invited_by_name: params.invited_by_name,
message.clone(), message: params.message.clone(),
7, expires_in_days: 7,
) })
.await .await
{ {
Ok(invitation) => { Ok(invitation) => {
successful.push(self.to_response(&invitation, organization_name)); successful.push(self.to_response(&invitation, params.organization_name));
} }
Err(error) => { Err(error) => {
failed.push(BulkInviteError { email, error }); failed.push(BulkInviteError { email, error });
@ -435,7 +450,7 @@ impl InvitationService {
filtered.sort_by(|a, b| b.created_at.cmp(&a.created_at)); filtered.sort_by(|a, b| b.created_at.cmp(&a.created_at));
let total = filtered.len() as u32; let total = filtered.len() as u32;
let total_pages = (total + per_page - 1) / per_page; let total_pages = total.div_ceil(per_page);
let start = ((page - 1) * per_page) as usize; let start = ((page - 1) * per_page) as usize;
let end = (start + per_page as usize).min(filtered.len()); let end = (start + per_page as usize).min(filtered.len());
@ -507,7 +522,11 @@ impl InvitationService {
None None
} }
fn to_response(&self, invitation: &OrganizationInvitation, org_name: &str) -> InvitationResponse { fn to_response(
&self,
invitation: &OrganizationInvitation,
org_name: &str,
) -> InvitationResponse {
let now = Utc::now(); let now = Utc::now();
InvitationResponse { InvitationResponse {
id: invitation.id, id: invitation.id,
@ -586,11 +605,11 @@ impl InvitationService {
pub fn configure() -> Router<Arc<AppState>> { pub fn configure() -> Router<Arc<AppState>> {
Router::new() Router::new()
.route("/organizations/:org_id/invitations", get(list_invitations)) .route("/organizations/:org_id/invitations", get(list_invitations))
.route("/organizations/:org_id/invitations", post(create_invitation))
.route( .route(
"/organizations/:org_id/invitations/bulk", "/organizations/:org_id/invitations",
post(bulk_invite), post(create_invitation),
) )
.route("/organizations/:org_id/invitations/bulk", post(bulk_invite))
.route( .route(
"/organizations/:org_id/invitations/:invitation_id", "/organizations/:org_id/invitations/:invitation_id",
get(get_invitation), get(get_invitation),
@ -641,29 +660,29 @@ async fn create_invitation(
) -> Result<Json<InvitationResponse>, (StatusCode, Json<serde_json::Value>)> { ) -> Result<Json<InvitationResponse>, (StatusCode, Json<serde_json::Value>)> {
let service = InvitationService::new(); let service = InvitationService::new();
let role = InvitationRole::from_str(&req.role).ok_or_else(|| { let role: InvitationRole = req.role.parse().map_err(|_| {
( (
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
Json(serde_json::json!({"error": "Invalid role"})), Json(serde_json::json!({"error": "Invalid role"})),
) )
})?; })?;
let expires_in_days = req.expires_in_days.unwrap_or(7).max(1).min(30); let expires_in_days = req.expires_in_days.unwrap_or(7).clamp(1, 30);
let invited_by = Uuid::new_v4(); let invited_by = Uuid::new_v4();
match service match service
.create_invitation( .create_invitation(CreateInvitationParams {
org_id, organization_id: org_id,
"Organization", organization_name: "Organization",
&req.email, email: &req.email,
role, role,
req.groups, groups: req.groups,
invited_by, invited_by,
"Admin User", invited_by_name: "Admin User",
req.message, message: req.message,
expires_in_days, expires_in_days,
) })
.await .await
{ {
Ok(invitation) => Ok(Json(service.to_response(&invitation, "Organization"))), Ok(invitation) => Ok(Json(service.to_response(&invitation, "Organization"))),
@ -681,7 +700,7 @@ async fn bulk_invite(
) -> Result<Json<BulkInviteResponse>, (StatusCode, Json<serde_json::Value>)> { ) -> Result<Json<BulkInviteResponse>, (StatusCode, Json<serde_json::Value>)> {
let service = InvitationService::new(); let service = InvitationService::new();
let role = InvitationRole::from_str(&req.role).ok_or_else(|| { let role = req.role.parse::<InvitationRole>().map_err(|_| {
( (
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
Json(serde_json::json!({"error": "Invalid role"})), Json(serde_json::json!({"error": "Invalid role"})),
@ -705,16 +724,16 @@ async fn bulk_invite(
let invited_by = Uuid::new_v4(); let invited_by = Uuid::new_v4();
let response = service let response = service
.bulk_invite( .bulk_invite(BulkInviteParams {
org_id, organization_id: org_id,
"Organization", organization_name: "Organization",
req.emails, emails: req.emails,
role, role,
req.groups, groups: req.groups,
invited_by, invited_by,
"Admin User", invited_by_name: "Admin User",
req.message, message: req.message,
) })
.await; .await;
Ok(Json(response)) Ok(Json(response))
@ -748,7 +767,9 @@ async fn revoke_invitation(
let service = InvitationService::new(); let service = InvitationService::new();
match service.revoke_invitation(invitation_id).await { match service.revoke_invitation(invitation_id).await {
Ok(()) => Ok(Json(serde_json::json!({"success": true, "message": "Invitation revoked"}))), Ok(()) => Ok(Json(
serde_json::json!({"success": true, "message": "Invitation revoked"}),
)),
Err(error) => Err(( Err(error) => Err((
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
Json(serde_json::json!({"error": error})), Json(serde_json::json!({"error": error})),
@ -801,7 +822,9 @@ async fn decline_invitation(
let service = InvitationService::new(); let service = InvitationService::new();
match service.decline_invitation(&req.token).await { match service.decline_invitation(&req.token).await {
Ok(()) => Ok(Json(serde_json::json!({"success": true, "message": "Invitation declined"}))), Ok(()) => Ok(Json(
serde_json::json!({"success": true, "message": "Invitation declined"}),
)),
Err(error) => Err(( Err(error) => Err((
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
Json(serde_json::json!({"error": error})), Json(serde_json::json!({"error": error})),
@ -935,13 +958,12 @@ mod tests {
.await .await
.unwrap(); .unwrap();
let result = service let result = service.accept_invitation(&invitation.token, user_id).await;
.accept_invitation(&invitation.token, user_id)
.await;
assert!(result.is_ok()); assert!(result.is_ok());
let accepted = result.unwrap(); result.unwrap();
assert_eq!(accepted.status, InvitationStatus::Accepted); let updated = service.get_invitation(invitation.id).await.unwrap();
assert!(accepted.accepted_at.is_some()); assert_eq!(updated.status, InvitationStatus::Accepted);
assert!(updated.accepted_at.is_some());
} }
} }

View file

@ -239,11 +239,13 @@ pub struct PolicyPrincipals {
pub resource_owner: bool, pub resource_owner: bool,
} }
type UserRolesMap = HashMap<(Uuid, Uuid), Vec<Uuid>>;
pub struct OrganizationRbacService { pub struct OrganizationRbacService {
roles: Arc<RwLock<HashMap<Uuid, OrganizationRole>>>, roles: Arc<RwLock<HashMap<Uuid, OrganizationRole>>>,
groups: Arc<RwLock<HashMap<Uuid, OrganizationGroup>>>, groups: Arc<RwLock<HashMap<Uuid, OrganizationGroup>>>,
policies: Arc<RwLock<HashMap<Uuid, ResourcePolicy>>>, policies: Arc<RwLock<HashMap<Uuid, ResourcePolicy>>>,
user_roles: Arc<RwLock<HashMap<(Uuid, Uuid), Vec<Uuid>>>>, user_roles: Arc<RwLock<UserRolesMap>>,
audit_log: Arc<RwLock<Vec<AccessAuditEntry>>>, audit_log: Arc<RwLock<Vec<AccessAuditEntry>>>,
} }
@ -261,6 +263,12 @@ pub struct AccessAuditEntry {
pub user_agent: Option<String>, pub user_agent: Option<String>,
} }
impl Default for OrganizationRbacService {
fn default() -> Self {
Self::new()
}
}
impl OrganizationRbacService { impl OrganizationRbacService {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {

View file

@ -74,10 +74,10 @@ fn get_llama_cpp_url() -> Option<String> {
} }
info!("Using standard Ubuntu x64 build (CPU)"); info!("Using standard Ubuntu x64 build (CPU)");
return Some(format!( Some(format!(
"{}/llama-{}-bin-ubuntu-x64.zip", "{}/llama-{}-bin-ubuntu-x64.zip",
base_url, LLAMA_CPP_VERSION base_url, LLAMA_CPP_VERSION
)); ))
} }
#[cfg(target_arch = "s390x")] #[cfg(target_arch = "s390x")]
@ -1155,9 +1155,9 @@ EOF"#.to_string(),
component.name component.name
); );
SafeCommand::noop_child() SafeCommand::noop_child()
.map_err(|e| anyhow::anyhow!("Failed to create noop process: {}", e).into()) .map_err(|e| anyhow::anyhow!("Failed to create noop process: {}", e))
} else { } else {
Err(e.into()) Err(e)
} }
} }
} }

View file

@ -57,6 +57,16 @@ pub struct DefaultUser {
pub last_name: String, pub last_name: String,
} }
pub struct CreateUserParams<'a> {
pub org_id: &'a str,
pub username: &'a str,
pub email: &'a str,
pub password: &'a str,
pub first_name: &'a str,
pub last_name: &'a str,
pub is_admin: bool,
}
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct DirectoryConfig { pub struct DirectoryConfig {
pub base_url: String, pub base_url: String,
@ -220,13 +230,7 @@ impl DirectorySetup {
pub async fn create_user( pub async fn create_user(
&mut self, &mut self,
org_id: &str, params: CreateUserParams<'_>,
username: &str,
email: &str,
password: &str,
first_name: &str,
last_name: &str,
is_admin: bool,
) -> Result<DefaultUser> { ) -> Result<DefaultUser> {
self.ensure_admin_token()?; self.ensure_admin_token()?;
@ -235,19 +239,19 @@ impl DirectorySetup {
.post(format!("{}/management/v1/users/human", self.base_url)) .post(format!("{}/management/v1/users/human", self.base_url))
.bearer_auth(self.admin_token.as_ref().unwrap_or(&String::new())) .bearer_auth(self.admin_token.as_ref().unwrap_or(&String::new()))
.json(&json!({ .json(&json!({
"userName": username, "userName": params.username,
"profile": { "profile": {
"firstName": first_name, "firstName": params.first_name,
"lastName": last_name, "lastName": params.last_name,
"displayName": format!("{} {}", first_name, last_name) "displayName": format!("{} {}", params.first_name, params.last_name)
}, },
"email": { "email": {
"email": email, "email": params.email,
"isEmailVerified": true "isEmailVerified": true
}, },
"password": password, "password": params.password,
"organisation": { "organisation": {
"orgId": org_id "orgId": params.org_id
} }
})) }))
.send() .send()
@ -262,15 +266,15 @@ impl DirectorySetup {
let user = DefaultUser { let user = DefaultUser {
id: result["userId"].as_str().unwrap_or("").to_string(), id: result["userId"].as_str().unwrap_or("").to_string(),
username: username.to_string(), username: params.username.to_string(),
email: email.to_string(), email: params.email.to_string(),
password: password.to_string(), password: params.password.to_string(),
first_name: first_name.to_string(), first_name: params.first_name.to_string(),
last_name: last_name.to_string(), last_name: params.last_name.to_string(),
}; };
if is_admin { if params.is_admin {
self.grant_user_permissions(org_id, &user.id).await?; self.grant_user_permissions(params.org_id, &user.id).await?;
} }
Ok(user) Ok(user)

View file

@ -2,6 +2,6 @@ pub mod directory_setup;
pub mod email_setup; pub mod email_setup;
pub mod vector_db_setup; pub mod vector_db_setup;
pub use directory_setup::{DirectorySetup, DefaultUser}; pub use directory_setup::{DirectorySetup, DefaultUser, CreateUserParams};
pub use email_setup::EmailSetup; pub use email_setup::EmailSetup;
pub use vector_db_setup::VectorDbSetup; pub use vector_db_setup::VectorDbSetup;

View file

@ -30,7 +30,7 @@ impl VectorDbSetup {
} }
} }
pub fn generate_qdrant_config(data_dir: &PathBuf, cert_dir: &PathBuf) -> String { pub fn generate_qdrant_config(data_dir: &std::path::Path, cert_dir: &std::path::Path) -> String {
let data_path = data_dir.to_string_lossy(); let data_path = data_dir.to_string_lossy();
let cert_path = cert_dir.join("server.crt").to_string_lossy().to_string(); let cert_path = cert_dir.join("server.crt").to_string_lossy().to_string();
let key_path = cert_dir.join("server.key").to_string_lossy().to_string(); let key_path = cert_dir.join("server.key").to_string_lossy().to_string();

View file

@ -734,10 +734,12 @@ pub struct ConnectionPoolMetrics {
pub pool_utilization: f64, pub pool_utilization: f64,
} }
type BatchProcessorFunc<T> = Arc<dyn Fn(Vec<T>) -> std::pin::Pin<Box<dyn std::future::Future<Output = ()> + Send>> + Send + Sync>;
pub struct BatchProcessor<T> { pub struct BatchProcessor<T> {
batch_size: usize, batch_size: usize,
buffer: Arc<RwLock<Vec<T>>>, buffer: Arc<RwLock<Vec<T>>>,
processor: Arc<dyn Fn(Vec<T>) -> std::pin::Pin<Box<dyn std::future::Future<Output = ()> + Send>> + Send + Sync>, processor: BatchProcessorFunc<T>,
} }
impl<T: Clone + Send + Sync + 'static> BatchProcessor<T> { impl<T: Clone + Send + Sync + 'static> BatchProcessor<T> {

View file

@ -275,9 +275,7 @@ impl AnonymousSessionManager {
let sessions = self.sessions.read().await; let sessions = self.sessions.read().await;
let session = sessions.get(&session_id)?; let session = sessions.get(&session_id)?;
if session.upgraded_to_user_id.is_none() { session.upgraded_to_user_id?;
return None;
}
let messages = self.messages.read().await; let messages = self.messages.read().await;
messages.get(&session_id).cloned() messages.get(&session_id).cloned()
@ -365,7 +363,7 @@ impl AnonymousSessionManager {
let mut sessions = self.sessions.write().await; let mut sessions = self.sessions.write().await;
if let Some(session) = sessions.get_mut(&session_id) { if let Some(session) = sessions.get_mut(&session_id) {
if session.is_active { if session.is_active {
session.expires_at = session.expires_at + Duration::minutes(additional_minutes); session.expires_at += Duration::minutes(additional_minutes);
return true; return true;
} }
} }

View file

@ -990,26 +990,7 @@ mod tests {
// Tests // Tests
#[test]
fn test_admin_user() {
let user = admin_user();
assert_eq!(user.role, Role::Admin);
assert_eq!(user.email, "admin@test.com");
}
#[test]
fn test_customer_factory() {
let c = customer("+15559876543");
assert_eq!(c.phone, Some("+15559876543".to_string()));
assert_eq!(c.channel, Channel::WhatsApp);
}
#[test]
fn test_bot_with_kb() {
let bot = bot_with_kb("kb-bot");
assert!(bot.kb_enabled);
assert!(bot.llm_enabled);
}
#[test] #[test]
fn test_session_for() { fn test_session_for() {

View file

@ -26,7 +26,9 @@ use std::io::Write;
#[diesel(sql_type = SmallInt)] #[diesel(sql_type = SmallInt)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[repr(i16)] #[repr(i16)]
#[derive(Default)]
pub enum ChannelType { pub enum ChannelType {
#[default]
Web = 0, Web = 0,
WhatsApp = 1, WhatsApp = 1,
Telegram = 2, Telegram = 2,
@ -39,11 +41,6 @@ pub enum ChannelType {
Api = 9, Api = 9,
} }
impl Default for ChannelType {
fn default() -> Self {
Self::Web
}
}
impl ToSql<SmallInt, Pg> for ChannelType { impl ToSql<SmallInt, Pg> for ChannelType {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
@ -117,7 +114,9 @@ impl std::str::FromStr for ChannelType {
#[diesel(sql_type = SmallInt)] #[diesel(sql_type = SmallInt)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[repr(i16)] #[repr(i16)]
#[derive(Default)]
pub enum MessageRole { pub enum MessageRole {
#[default]
User = 1, User = 1,
Assistant = 2, Assistant = 2,
System = 3, System = 3,
@ -126,11 +125,6 @@ pub enum MessageRole {
Compact = 10, Compact = 10,
} }
impl Default for MessageRole {
fn default() -> Self {
Self::User
}
}
impl ToSql<SmallInt, Pg> for MessageRole { impl ToSql<SmallInt, Pg> for MessageRole {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
@ -192,7 +186,9 @@ impl std::str::FromStr for MessageRole {
#[diesel(sql_type = SmallInt)] #[diesel(sql_type = SmallInt)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[repr(i16)] #[repr(i16)]
#[derive(Default)]
pub enum MessageType { pub enum MessageType {
#[default]
Text = 0, Text = 0,
Image = 1, Image = 1,
Audio = 2, Audio = 2,
@ -204,11 +200,6 @@ pub enum MessageType {
Reaction = 8, Reaction = 8,
} }
impl Default for MessageType {
fn default() -> Self {
Self::Text
}
}
impl ToSql<SmallInt, Pg> for MessageType { impl ToSql<SmallInt, Pg> for MessageType {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
@ -261,7 +252,9 @@ impl std::fmt::Display for MessageType {
#[diesel(sql_type = SmallInt)] #[diesel(sql_type = SmallInt)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[repr(i16)] #[repr(i16)]
#[derive(Default)]
pub enum LlmProvider { pub enum LlmProvider {
#[default]
OpenAi = 0, OpenAi = 0,
Anthropic = 1, Anthropic = 1,
AzureOpenAi = 2, AzureOpenAi = 2,
@ -274,11 +267,6 @@ pub enum LlmProvider {
Cohere = 9, Cohere = 9,
} }
impl Default for LlmProvider {
fn default() -> Self {
Self::OpenAi
}
}
impl ToSql<SmallInt, Pg> for LlmProvider { impl ToSql<SmallInt, Pg> for LlmProvider {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
@ -333,8 +321,10 @@ impl std::fmt::Display for LlmProvider {
#[diesel(sql_type = SmallInt)] #[diesel(sql_type = SmallInt)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[repr(i16)] #[repr(i16)]
#[derive(Default)]
pub enum ContextProvider { pub enum ContextProvider {
None = 0, None = 0,
#[default]
Qdrant = 1, Qdrant = 1,
Pinecone = 2, Pinecone = 2,
Weaviate = 3, Weaviate = 3,
@ -343,11 +333,6 @@ pub enum ContextProvider {
Elasticsearch = 6, Elasticsearch = 6,
} }
impl Default for ContextProvider {
fn default() -> Self {
Self::Qdrant
}
}
impl ToSql<SmallInt, Pg> for ContextProvider { impl ToSql<SmallInt, Pg> for ContextProvider {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
@ -382,7 +367,9 @@ impl FromSql<SmallInt, Pg> for ContextProvider {
#[diesel(sql_type = SmallInt)] #[diesel(sql_type = SmallInt)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[repr(i16)] #[repr(i16)]
#[derive(Default)]
pub enum TaskStatus { pub enum TaskStatus {
#[default]
Pending = 0, Pending = 0,
Ready = 1, Ready = 1,
Running = 2, Running = 2,
@ -393,11 +380,6 @@ pub enum TaskStatus {
Cancelled = 7, Cancelled = 7,
} }
impl Default for TaskStatus {
fn default() -> Self {
Self::Pending
}
}
impl ToSql<SmallInt, Pg> for TaskStatus { impl ToSql<SmallInt, Pg> for TaskStatus {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
@ -465,19 +447,16 @@ impl std::str::FromStr for TaskStatus {
#[diesel(sql_type = SmallInt)] #[diesel(sql_type = SmallInt)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[repr(i16)] #[repr(i16)]
#[derive(Default)]
pub enum TaskPriority { pub enum TaskPriority {
Low = 0, Low = 0,
#[default]
Normal = 1, Normal = 1,
High = 2, High = 2,
Urgent = 3, Urgent = 3,
Critical = 4, Critical = 4,
} }
impl Default for TaskPriority {
fn default() -> Self {
Self::Normal
}
}
impl ToSql<SmallInt, Pg> for TaskPriority { impl ToSql<SmallInt, Pg> for TaskPriority {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
@ -536,17 +515,14 @@ impl std::str::FromStr for TaskPriority {
#[diesel(sql_type = SmallInt)] #[diesel(sql_type = SmallInt)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[repr(i16)] #[repr(i16)]
#[derive(Default)]
pub enum ExecutionMode { pub enum ExecutionMode {
Manual = 0, Manual = 0,
#[default]
Supervised = 1, Supervised = 1,
Autonomous = 2, Autonomous = 2,
} }
impl Default for ExecutionMode {
fn default() -> Self {
Self::Supervised
}
}
impl ToSql<SmallInt, Pg> for ExecutionMode { impl ToSql<SmallInt, Pg> for ExecutionMode {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
@ -587,19 +563,16 @@ impl std::fmt::Display for ExecutionMode {
#[diesel(sql_type = SmallInt)] #[diesel(sql_type = SmallInt)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[repr(i16)] #[repr(i16)]
#[derive(Default)]
pub enum RiskLevel { pub enum RiskLevel {
None = 0, None = 0,
#[default]
Low = 1, Low = 1,
Medium = 2, Medium = 2,
High = 3, High = 3,
Critical = 4, Critical = 4,
} }
impl Default for RiskLevel {
fn default() -> Self {
Self::Low
}
}
impl ToSql<SmallInt, Pg> for RiskLevel { impl ToSql<SmallInt, Pg> for RiskLevel {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
@ -644,7 +617,9 @@ impl std::fmt::Display for RiskLevel {
#[diesel(sql_type = SmallInt)] #[diesel(sql_type = SmallInt)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
#[repr(i16)] #[repr(i16)]
#[derive(Default)]
pub enum ApprovalStatus { pub enum ApprovalStatus {
#[default]
Pending = 0, Pending = 0,
Approved = 1, Approved = 1,
Rejected = 2, Rejected = 2,
@ -652,11 +627,6 @@ pub enum ApprovalStatus {
Skipped = 4, Skipped = 4,
} }
impl Default for ApprovalStatus {
fn default() -> Self {
Self::Pending
}
}
impl ToSql<SmallInt, Pg> for ApprovalStatus { impl ToSql<SmallInt, Pg> for ApprovalStatus {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
@ -746,7 +716,9 @@ impl std::fmt::Display for ApprovalDecision {
#[diesel(sql_type = SmallInt)] #[diesel(sql_type = SmallInt)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[repr(i16)] #[repr(i16)]
#[derive(Default)]
pub enum IntentType { pub enum IntentType {
#[default]
Unknown = 0, Unknown = 0,
AppCreate = 1, AppCreate = 1,
Todo = 2, Todo = 2,
@ -758,11 +730,6 @@ pub enum IntentType {
Query = 8, Query = 8,
} }
impl Default for IntentType {
fn default() -> Self {
Self::Unknown
}
}
impl ToSql<SmallInt, Pg> for IntentType { impl ToSql<SmallInt, Pg> for IntentType {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result { fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {

View file

@ -438,7 +438,7 @@ tokio::spawn(async move {
); );
// Log jemalloc stats every 5 ticks if available // Log jemalloc stats every 5 ticks if available
if tick_count % 5 == 0 { if tick_count.is_multiple_of(5) {
log_jemalloc_stats(); log_jemalloc_stats();
} }

View file

@ -1,3 +1,5 @@
use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
dashboards (id) { dashboards (id) {
id -> Uuid, id -> Uuid,

View file

@ -1,3 +1,5 @@
use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
attendant_queues (id) { attendant_queues (id) {
id -> Uuid, id -> Uuid,

View file

@ -1,3 +1,5 @@
use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
billing_invoices (id) { billing_invoices (id) {
id -> Uuid, id -> Uuid,

View file

@ -1,3 +1,5 @@
use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
calendars (id) { calendars (id) {
id -> Uuid, id -> Uuid,

View file

@ -1,3 +1,5 @@
use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
canvases (id) { canvases (id) {
id -> Uuid, id -> Uuid,

View file

@ -1,3 +1,5 @@
use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
legal_documents (id) { legal_documents (id) {
id -> Uuid, id -> Uuid,

View file

@ -0,0 +1,95 @@
use diesel::prelude::*;
table! {
dashboards (id) {
id -> Uuid,
org_id -> Uuid,
bot_id -> Uuid,
owner_id -> Uuid,
name -> Text,
description -> Nullable<Text>,
layout -> Jsonb,
refresh_interval -> Nullable<Int4>,
is_public -> Bool,
is_template -> Bool,
tags -> Array<Text>,
created_at -> Timestamptz,
updated_at -> Timestamptz,
}
}
table! {
dashboard_widgets (id) {
id -> Uuid,
dashboard_id -> Uuid,
widget_type -> Text,
title -> Text,
position_x -> Int4,
position_y -> Int4,
width -> Int4,
height -> Int4,
config -> Jsonb,
data_query -> Nullable<Jsonb>,
style -> Jsonb,
created_at -> Timestamptz,
updated_at -> Timestamptz,
}
}
table! {
dashboard_data_sources (id) {
id -> Uuid,
org_id -> Uuid,
bot_id -> Uuid,
name -> Text,
description -> Nullable<Text>,
source_type -> Text,
connection -> Jsonb,
schema_definition -> Jsonb,
refresh_schedule -> Nullable<Text>,
last_sync -> Nullable<Timestamptz>,
status -> Text,
created_at -> Timestamptz,
updated_at -> Timestamptz,
}
}
table! {
dashboard_filters (id) {
id -> Uuid,
dashboard_id -> Uuid,
name -> Text,
field -> Text,
filter_type -> Text,
default_value -> Nullable<Jsonb>,
options -> Jsonb,
linked_widgets -> Jsonb,
created_at -> Timestamptz,
}
}
table! {
conversational_queries (id) {
id -> Uuid,
org_id -> Uuid,
bot_id -> Uuid,
dashboard_id -> Nullable<Uuid>,
user_id -> Uuid,
natural_language -> Text,
generated_query -> Nullable<Text>,
result_widget_config -> Nullable<Jsonb>,
created_at -> Timestamptz,
}
}
joinable!(dashboard_widgets -> dashboards (dashboard_id));
joinable!(dashboard_filters -> dashboards (dashboard_id));
joinable!(conversational_queries -> dashboards (dashboard_id));
allow_tables_to_appear_in_same_query!(
dashboards,
dashboard_widgets,
dashboard_data_sources,
dashboard_filters,
conversational_queries,
);

View file

@ -1,3 +1,5 @@
use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
okr_objectives (id) { okr_objectives (id) {
id -> Uuid, id -> Uuid,

View file

@ -1,3 +1,5 @@
// use crate::core::shared::schema::core::{organizations, bots};
use diesel::prelude::*; use diesel::prelude::*;
diesel::table! { diesel::table! {

View file

@ -1,3 +1,5 @@
// use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
global_email_signatures (id) { global_email_signatures (id) {
id -> Uuid, id -> Uuid,

View file

@ -1,3 +1,5 @@
use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
meeting_rooms (id) { meeting_rooms (id) {
id -> Uuid, id -> Uuid,

View file

@ -83,4 +83,11 @@ pub use self::learn::*;
#[cfg(feature = "project")] #[cfg(feature = "project")]
pub mod project; pub mod project;
#[cfg(feature = "project")] #[cfg(feature = "project")]
#[cfg(feature = "project")]
pub use self::project::*; pub use self::project::*;
#[cfg(feature = "dashboards")]
pub mod dashboards;
#[cfg(feature = "dashboards")]
pub use self::dashboards::*;

View file

@ -1,3 +1,5 @@
use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
crm_contacts (id) { crm_contacts (id) {
id -> Uuid, id -> Uuid,

View file

@ -1,3 +1,5 @@
// use crate::core::shared::schema::core::{organizations, bots};
use diesel::prelude::*; use diesel::prelude::*;
diesel::table! { diesel::table! {

View file

@ -1,3 +1,5 @@
use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
kb_documents (id) { kb_documents (id) {
id -> Uuid, id -> Uuid,

View file

@ -1,3 +1,5 @@
use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
social_communities (id) { social_communities (id) {
id -> Uuid, id -> Uuid,

View file

@ -1,3 +1,5 @@
// use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
tasks (id) { tasks (id) {
id -> Uuid, id -> Uuid,

View file

@ -1,3 +1,5 @@
// use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
support_tickets (id) { support_tickets (id) {
id -> Uuid, id -> Uuid,

View file

@ -1,3 +1,5 @@
use crate::core::shared::schema::core::{organizations, bots};
diesel::table! { diesel::table! {
workspaces (id) { workspaces (id) {
id -> Uuid, id -> Uuid,
@ -131,3 +133,14 @@ diesel::joinable!(workspace_comments -> workspace_pages (page_id));
diesel::joinable!(workspace_comment_reactions -> workspace_comments (comment_id)); diesel::joinable!(workspace_comment_reactions -> workspace_comments (comment_id));
diesel::joinable!(workspace_templates -> organizations (org_id)); diesel::joinable!(workspace_templates -> organizations (org_id));
diesel::joinable!(workspace_templates -> bots (bot_id)); diesel::joinable!(workspace_templates -> bots (bot_id));
diesel::allow_tables_to_appear_in_same_query!(
workspaces,
workspace_members,
workspace_pages,
workspace_page_versions,
workspace_page_permissions,
workspace_comments,
workspace_comment_reactions,
workspace_templates,
);

View file

@ -2,6 +2,7 @@ use crate::auto_task::TaskManifest;
use crate::core::bot::channels::{ChannelAdapter, VoiceAdapter, WebChannelAdapter}; use crate::core::bot::channels::{ChannelAdapter, VoiceAdapter, WebChannelAdapter};
use crate::core::bot_database::BotDatabaseManager; use crate::core::bot_database::BotDatabaseManager;
use crate::core::config::AppConfig; use crate::core::config::AppConfig;
#[cfg(any(feature = "research", feature = "llm"))]
use crate::core::kb::KnowledgeBaseManager; use crate::core::kb::KnowledgeBaseManager;
use crate::core::session::SessionManager; use crate::core::session::SessionManager;
use crate::core::shared::analytics::MetricsCollector; use crate::core::shared::analytics::MetricsCollector;
@ -365,6 +366,7 @@ pub struct AppState {
pub response_channels: Arc<tokio::sync::Mutex<HashMap<String, mpsc::Sender<BotResponse>>>>, pub response_channels: Arc<tokio::sync::Mutex<HashMap<String, mpsc::Sender<BotResponse>>>>,
pub web_adapter: Arc<WebChannelAdapter>, pub web_adapter: Arc<WebChannelAdapter>,
pub voice_adapter: Arc<VoiceAdapter>, pub voice_adapter: Arc<VoiceAdapter>,
#[cfg(any(feature = "research", feature = "llm"))]
pub kb_manager: Option<Arc<KnowledgeBaseManager>>, pub kb_manager: Option<Arc<KnowledgeBaseManager>>,
#[cfg(feature = "tasks")] #[cfg(feature = "tasks")]
pub task_engine: Arc<TaskEngine>, pub task_engine: Arc<TaskEngine>,
@ -404,6 +406,7 @@ impl Clone for AppState {
llm_provider: Arc::clone(&self.llm_provider), llm_provider: Arc::clone(&self.llm_provider),
#[cfg(feature = "directory")] #[cfg(feature = "directory")]
auth_service: Arc::clone(&self.auth_service), auth_service: Arc::clone(&self.auth_service),
#[cfg(any(feature = "research", feature = "llm"))]
kb_manager: self.kb_manager.clone(), kb_manager: self.kb_manager.clone(),
channels: Arc::clone(&self.channels), channels: Arc::clone(&self.channels),
response_channels: Arc::clone(&self.response_channels), response_channels: Arc::clone(&self.response_channels),
@ -449,6 +452,10 @@ impl std::fmt::Debug for AppState {
.field("session_manager", &"Arc<Mutex<SessionManager>>") .field("session_manager", &"Arc<Mutex<SessionManager>>")
.field("metrics_collector", &"MetricsCollector"); .field("metrics_collector", &"MetricsCollector");
#[cfg(any(feature = "research", feature = "llm"))]
debug.field("kb_manager", &self.kb_manager.is_some());
#[cfg(feature = "tasks")] #[cfg(feature = "tasks")]
debug.field("task_scheduler", &self.task_scheduler.is_some()); debug.field("task_scheduler", &self.task_scheduler.is_some());
@ -462,8 +469,10 @@ impl std::fmt::Debug for AppState {
.field("channels", &"Arc<Mutex<HashMap>>") .field("channels", &"Arc<Mutex<HashMap>>")
.field("response_channels", &"Arc<Mutex<HashMap>>") .field("response_channels", &"Arc<Mutex<HashMap>>")
.field("web_adapter", &self.web_adapter) .field("web_adapter", &self.web_adapter)
.field("voice_adapter", &self.voice_adapter) .field("voice_adapter", &self.voice_adapter);
.field("kb_manager", &self.kb_manager.is_some());
#[cfg(any(feature = "research", feature = "llm"))]
debug.field("kb_manager", &self.kb_manager.is_some());
#[cfg(feature = "tasks")] #[cfg(feature = "tasks")]
debug.field("task_engine", &"Arc<TaskEngine>"); debug.field("task_engine", &"Arc<TaskEngine>");
@ -617,12 +626,14 @@ impl Default for AppState {
response_channels: Arc::new(tokio::sync::Mutex::new(HashMap::new())), response_channels: Arc::new(tokio::sync::Mutex::new(HashMap::new())),
web_adapter: Arc::new(WebChannelAdapter::new()), web_adapter: Arc::new(WebChannelAdapter::new()),
voice_adapter: Arc::new(VoiceAdapter::new()), voice_adapter: Arc::new(VoiceAdapter::new()),
#[cfg(any(feature = "research", feature = "llm"))]
kb_manager: None, kb_manager: None,
#[cfg(feature = "tasks")] #[cfg(feature = "tasks")]
task_engine: Arc::new(TaskEngine::new(pool)), task_engine: Arc::new(TaskEngine::new(pool)),
extensions: Extensions::new(), extensions: Extensions::new(),
attendant_broadcast: Some(attendant_tx), attendant_broadcast: Some(attendant_tx),
task_progress_broadcast: Some(task_progress_tx), task_progress_broadcast: Some(task_progress_tx),
billing_alert_broadcast: None,
task_manifests: Arc::new(std::sync::RwLock::new(HashMap::new())), task_manifests: Arc::new(std::sync::RwLock::new(HashMap::new())),
#[cfg(feature = "project")] #[cfg(feature = "project")]
project_service: Arc::new(RwLock::new(crate::project::ProjectService::new())), project_service: Arc::new(RwLock::new(crate::project::ProjectService::new())),

View file

@ -12,6 +12,7 @@ use crate::directory::AuthService;
use crate::llm::LLMProvider; use crate::llm::LLMProvider;
use crate::shared::models::BotResponse; use crate::shared::models::BotResponse;
use crate::shared::utils::{get_database_url_sync, DbPool}; use crate::shared::utils::{get_database_url_sync, DbPool};
#[cfg(feature = "tasks")]
use crate::tasks::TaskEngine; use crate::tasks::TaskEngine;
use async_trait::async_trait; use async_trait::async_trait;
use diesel::r2d2::{ConnectionManager, Pool}; use diesel::r2d2::{ConnectionManager, Pool};
@ -194,6 +195,7 @@ impl TestAppStateBuilder {
Ok(AppState { Ok(AppState {
#[cfg(feature = "drive")] #[cfg(feature = "drive")]
drive: None, drive: None,
#[cfg(feature = "drive")]
s3_client: None, s3_client: None,
#[cfg(feature = "cache")] #[cfg(feature = "cache")]
cache: None, cache: None,
@ -204,6 +206,7 @@ impl TestAppStateBuilder {
bot_database_manager, bot_database_manager,
session_manager: Arc::new(tokio::sync::Mutex::new(session_manager)), session_manager: Arc::new(tokio::sync::Mutex::new(session_manager)),
metrics_collector: MetricsCollector::new(), metrics_collector: MetricsCollector::new(),
#[cfg(feature = "tasks")]
task_scheduler: None, task_scheduler: None,
#[cfg(feature = "llm")] #[cfg(feature = "llm")]
llm_provider: Arc::new(MockLLMProvider::new()), llm_provider: Arc::new(MockLLMProvider::new()),
@ -213,6 +216,7 @@ impl TestAppStateBuilder {
response_channels: Arc::new(tokio::sync::Mutex::new(HashMap::new())), response_channels: Arc::new(tokio::sync::Mutex::new(HashMap::new())),
web_adapter: Arc::new(WebChannelAdapter::new()), web_adapter: Arc::new(WebChannelAdapter::new()),
voice_adapter: Arc::new(VoiceAdapter::new()), voice_adapter: Arc::new(VoiceAdapter::new()),
#[cfg(any(feature = "research", feature = "llm"))]
kb_manager: None, kb_manager: None,
#[cfg(feature = "tasks")] #[cfg(feature = "tasks")]
task_engine: Arc::new(TaskEngine::new(pool)), task_engine: Arc::new(TaskEngine::new(pool)),

View file

@ -451,7 +451,7 @@ pub fn run_migrations_on_conn(conn: &mut diesel::PgConnection) -> Result<(), Box
} }
// Workspaces // Workspaces
#[cfg(feature = "workspace")] #[cfg(feature = "workspaces")]
{ {
const WORKSPACE_MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/workspaces"); const WORKSPACE_MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/workspaces");
conn.run_pending_migrations(WORKSPACE_MIGRATIONS).map_err(|e| Box::new(std::io::Error::other(format!("Workspace migration error: {}", e))) as Box<dyn std::error::Error + Send + Sync>)?; conn.run_pending_migrations(WORKSPACE_MIGRATIONS).map_err(|e| Box::new(std::io::Error::other(format!("Workspace migration error: {}", e))) as Box<dyn std::error::Error + Send + Sync>)?;

View file

@ -289,6 +289,21 @@ impl ApiUrls {
pub const MONITORING_LOGS: &'static str = "/api/ui/monitoring/logs"; pub const MONITORING_LOGS: &'static str = "/api/ui/monitoring/logs";
pub const MONITORING_LLM: &'static str = "/api/ui/monitoring/llm"; pub const MONITORING_LLM: &'static str = "/api/ui/monitoring/llm";
pub const MONITORING_HEALTH: &'static str = "/api/ui/monitoring/health"; pub const MONITORING_HEALTH: &'static str = "/api/ui/monitoring/health";
pub const MONITORING_ALERTS: &'static str = "/api/monitoring/alerts";
// Monitoring - Metrics & Widgets
pub const MONITORING_TIMESTAMP: &'static str = "/api/ui/monitoring/timestamp";
pub const MONITORING_BOTS: &'static str = "/api/ui/monitoring/bots";
pub const MONITORING_SERVICES_STATUS: &'static str = "/api/ui/monitoring/services/status";
pub const MONITORING_RESOURCES_BARS: &'static str = "/api/ui/monitoring/resources/bars";
pub const MONITORING_ACTIVITY_LATEST: &'static str = "/api/ui/monitoring/activity/latest";
pub const MONITORING_METRIC_SESSIONS: &'static str = "/api/ui/monitoring/metric/sessions";
pub const MONITORING_METRIC_MESSAGES: &'static str = "/api/ui/monitoring/metric/messages";
pub const MONITORING_METRIC_RESPONSE_TIME: &'static str = "/api/ui/monitoring/metric/response_time";
pub const MONITORING_TREND_SESSIONS: &'static str = "/api/ui/monitoring/trend/sessions";
pub const MONITORING_RATE_MESSAGES: &'static str = "/api/ui/monitoring/rate/messages";
pub const MONITORING_SESSIONS_PANEL: &'static str = "/api/ui/monitoring/sessions";
pub const MONITORING_MESSAGES_PANEL: &'static str = "/api/ui/monitoring/messages";
// MS Teams - JSON APIs // MS Teams - JSON APIs
pub const MSTEAMS_MESSAGES: &'static str = "/api/msteams/messages"; pub const MSTEAMS_MESSAGES: &'static str = "/api/msteams/messages";

View file

@ -8,7 +8,7 @@ use std::sync::Arc;
use uuid::Uuid; use uuid::Uuid;
use crate::bot::get_default_bot; use crate::bot::get_default_bot;
use crate::core::shared::schema::{dashboard_filters, dashboard_widgets, dashboards}; use crate::core::shared::schema::dashboards::{dashboard_filters, dashboard_widgets, dashboards};
use crate::shared::state::AppState; use crate::shared::state::AppState;
use crate::dashboards::error::DashboardsError; use crate::dashboards::error::DashboardsError;
@ -58,7 +58,7 @@ pub async fn handle_list_dashboards(
.offset(offset) .offset(offset)
.limit(limit) .limit(limit)
.load(&mut conn) .load(&mut conn)
.map_err(|e| DashboardsError::Database(e.to_string()))?; .map_err(|e: diesel::result::Error| DashboardsError::Database(e.to_string()))?;
let mut result_dashboards = Vec::new(); let mut result_dashboards = Vec::new();
for db_dash in db_dashboards { for db_dash in db_dashboards {
@ -79,10 +79,10 @@ pub async fn handle_list_dashboards(
result_dashboards.push(db_dashboard_to_dashboard(db_dash, widgets, filters)); result_dashboards.push(db_dashboard_to_dashboard(db_dash, widgets, filters));
} }
Ok::<_, DashboardsError>(result_dashboards) Ok::<Vec<Dashboard>, DashboardsError>(result_dashboards)
}) })
.await .await
.map_err(|e| DashboardsError::Internal(e.to_string()))??; .map_err(|e: tokio::task::JoinError| DashboardsError::Internal(e.to_string()))??;
Ok(Json(result)) Ok(Json(result))
} }
@ -123,12 +123,12 @@ pub async fn handle_create_dashboard(
diesel::insert_into(dashboards::table) diesel::insert_into(dashboards::table)
.values(&db_dashboard) .values(&db_dashboard)
.execute(&mut conn) .execute(&mut conn)
.map_err(|e| DashboardsError::Database(e.to_string()))?; .map_err(|e: diesel::result::Error| DashboardsError::Database(e.to_string()))?;
Ok::<_, DashboardsError>(db_dashboard_to_dashboard(db_dashboard, vec![], vec![])) Ok::<Dashboard, DashboardsError>(db_dashboard_to_dashboard(db_dashboard, vec![], vec![]))
}) })
.await .await
.map_err(|e| DashboardsError::Internal(e.to_string()))??; .map_err(|e: tokio::task::JoinError| DashboardsError::Internal(e.to_string()))??;
Ok(Json(result)) Ok(Json(result))
} }
@ -148,7 +148,7 @@ pub async fn handle_get_dashboard(
.find(dashboard_id) .find(dashboard_id)
.first(&mut conn) .first(&mut conn)
.optional() .optional()
.map_err(|e| DashboardsError::Database(e.to_string()))?; .map_err(|e: diesel::result::Error| DashboardsError::Database(e.to_string()))?;
match db_dash { match db_dash {
Some(db) => { Some(db) => {
@ -165,13 +165,13 @@ pub async fn handle_get_dashboard(
let filters: Vec<DashboardFilter> = let filters: Vec<DashboardFilter> =
filters_db.into_iter().map(db_filter_to_filter).collect(); filters_db.into_iter().map(db_filter_to_filter).collect();
Ok::<_, DashboardsError>(Some(db_dashboard_to_dashboard(db, widgets, filters))) Ok::<Option<Dashboard>, DashboardsError>(Some(db_dashboard_to_dashboard(db, widgets, filters)))
} }
None => Ok(None), None => Ok(None),
} }
}) })
.await .await
.map_err(|e| DashboardsError::Internal(e.to_string()))??; .map_err(|e: tokio::task::JoinError| DashboardsError::Internal(e.to_string()))??;
Ok(Json(result)) Ok(Json(result))
} }
@ -216,7 +216,7 @@ pub async fn handle_update_dashboard(
diesel::update(dashboards::table.find(dashboard_id)) diesel::update(dashboards::table.find(dashboard_id))
.set(&db_dash) .set(&db_dash)
.execute(&mut conn) .execute(&mut conn)
.map_err(|e| DashboardsError::Database(e.to_string()))?; .map_err(|e: diesel::result::Error| DashboardsError::Database(e.to_string()))?;
let widgets_db: Vec<DbWidget> = dashboard_widgets::table let widgets_db: Vec<DbWidget> = dashboard_widgets::table
.filter(dashboard_widgets::dashboard_id.eq(dashboard_id)) .filter(dashboard_widgets::dashboard_id.eq(dashboard_id))
@ -231,10 +231,10 @@ pub async fn handle_update_dashboard(
let filters: Vec<DashboardFilter> = let filters: Vec<DashboardFilter> =
filters_db.into_iter().map(db_filter_to_filter).collect(); filters_db.into_iter().map(db_filter_to_filter).collect();
Ok::<_, DashboardsError>(db_dashboard_to_dashboard(db_dash, widgets, filters)) Ok::<Dashboard, DashboardsError>(db_dashboard_to_dashboard(db_dash, widgets, filters))
}) })
.await .await
.map_err(|e| DashboardsError::Internal(e.to_string()))??; .map_err(|e: tokio::task::JoinError| DashboardsError::Internal(e.to_string()))??;
Ok(Json(result)) Ok(Json(result))
} }
@ -252,16 +252,16 @@ pub async fn handle_delete_dashboard(
let deleted = diesel::delete(dashboards::table.find(dashboard_id)) let deleted = diesel::delete(dashboards::table.find(dashboard_id))
.execute(&mut conn) .execute(&mut conn)
.map_err(|e| DashboardsError::Database(e.to_string()))?; .map_err(|e: diesel::result::Error| DashboardsError::Database(e.to_string()))?;
if deleted == 0 { if deleted == 0 {
return Err(DashboardsError::NotFound("Dashboard not found".to_string())); return Err(DashboardsError::NotFound("Dashboard not found".to_string()));
} }
Ok::<_, DashboardsError>(()) Ok::<(), DashboardsError>(())
}) })
.await .await
.map_err(|e| DashboardsError::Internal(e.to_string()))??; .map_err(|e: tokio::task::JoinError| DashboardsError::Internal(e.to_string()))??;
Ok(Json(serde_json::json!({ "success": true }))) Ok(Json(serde_json::json!({ "success": true })))
} }
@ -282,17 +282,17 @@ pub async fn handle_get_templates(
.filter(dashboards::is_template.eq(true)) .filter(dashboards::is_template.eq(true))
.order(dashboards::created_at.desc()) .order(dashboards::created_at.desc())
.load(&mut conn) .load(&mut conn)
.map_err(|e| DashboardsError::Database(e.to_string()))?; .map_err(|e: diesel::result::Error| DashboardsError::Database(e.to_string()))?;
let templates: Vec<Dashboard> = db_dashboards let templates: Vec<Dashboard> = db_dashboards
.into_iter() .into_iter()
.map(|db| db_dashboard_to_dashboard(db, vec![], vec![])) .map(|db| db_dashboard_to_dashboard(db, vec![], vec![]))
.collect(); .collect();
Ok::<_, DashboardsError>(templates) Ok::<Vec<Dashboard>, DashboardsError>(templates)
}) })
.await .await
.map_err(|e| DashboardsError::Internal(e.to_string()))??; .map_err(|e: tokio::task::JoinError| DashboardsError::Internal(e.to_string()))??;
Ok(Json(result)) Ok(Json(result))
} }

View file

@ -8,7 +8,7 @@ use std::sync::Arc;
use uuid::Uuid; use uuid::Uuid;
use crate::bot::get_default_bot; use crate::bot::get_default_bot;
use crate::core::shared::schema::{conversational_queries, dashboard_data_sources}; use crate::core::shared::schema::dashboards::{conversational_queries, dashboard_data_sources};
use crate::shared::state::AppState; use crate::shared::state::AppState;
use crate::dashboards::error::DashboardsError; use crate::dashboards::error::DashboardsError;
@ -33,16 +33,16 @@ pub async fn handle_list_data_sources(
.filter(dashboard_data_sources::bot_id.eq(bot_id)) .filter(dashboard_data_sources::bot_id.eq(bot_id))
.order(dashboard_data_sources::created_at.desc()) .order(dashboard_data_sources::created_at.desc())
.load(&mut conn) .load(&mut conn)
.map_err(|e| DashboardsError::Database(e.to_string()))?; .map_err(|e: diesel::result::Error| DashboardsError::Database(e.to_string()))?;
let sources: Vec<DataSource> = db_sources let sources: Vec<DataSource> = db_sources
.into_iter() .into_iter()
.map(db_data_source_to_data_source) .map(db_data_source_to_data_source)
.collect(); .collect();
Ok::<_, DashboardsError>(sources) Ok::<Vec<DataSource>, DashboardsError>(sources)
}) })
.await .await
.map_err(|e| DashboardsError::Internal(e.to_string()))??; .map_err(|e: tokio::task::JoinError| DashboardsError::Internal(e.to_string()))??;
Ok(Json(result)) Ok(Json(result))
} }
@ -80,12 +80,12 @@ pub async fn handle_create_data_source(
diesel::insert_into(dashboard_data_sources::table) diesel::insert_into(dashboard_data_sources::table)
.values(&db_source) .values(&db_source)
.execute(&mut conn) .execute(&mut conn)
.map_err(|e| DashboardsError::Database(e.to_string()))?; .map_err(|e: diesel::result::Error| DashboardsError::Database(e.to_string()))?;
Ok::<_, DashboardsError>(db_data_source_to_data_source(db_source)) Ok::<DataSource, DashboardsError>(db_data_source_to_data_source(db_source))
}) })
.await .await
.map_err(|e| DashboardsError::Internal(e.to_string()))??; .map_err(|e: tokio::task::JoinError| DashboardsError::Internal(e.to_string()))??;
Ok(Json(result)) Ok(Json(result))
} }
@ -120,12 +120,12 @@ pub async fn handle_delete_data_source(
diesel::delete(dashboard_data_sources::table.find(source_id)) diesel::delete(dashboard_data_sources::table.find(source_id))
.execute(&mut conn) .execute(&mut conn)
.map_err(|e| DashboardsError::Database(e.to_string()))?; .map_err(|e: diesel::result::Error| DashboardsError::Database(e.to_string()))?;
Ok::<_, DashboardsError>(()) Ok::<(), DashboardsError>(())
}) })
.await .await
.map_err(|e| DashboardsError::Internal(e.to_string()))??; .map_err(|e: tokio::task::JoinError| DashboardsError::Internal(e.to_string()))??;
Ok(Json(serde_json::json!({ "success": true }))) Ok(Json(serde_json::json!({ "success": true })))
} }
@ -228,7 +228,7 @@ pub async fn handle_conversational_query(
diesel::insert_into(conversational_queries::table) diesel::insert_into(conversational_queries::table)
.values(&db_query) .values(&db_query)
.execute(&mut conn) .execute(&mut conn)
.map_err(|e| DashboardsError::Database(e.to_string()))?; .map_err(|e: diesel::result::Error| DashboardsError::Database(e.to_string()))?;
let (suggested_viz, explanation) = analyze_query_intent(&query_text); let (suggested_viz, explanation) = analyze_query_intent(&query_text);
@ -242,7 +242,7 @@ pub async fn handle_conversational_query(
created_at: db_query.created_at, created_at: db_query.created_at,
}; };
Ok::<_, DashboardsError>(ConversationalQueryResponse { Ok::<ConversationalQueryResponse, DashboardsError>(ConversationalQueryResponse {
query: conv_query, query: conv_query,
data: Some(serde_json::json!([])), data: Some(serde_json::json!([])),
suggested_visualization: Some(suggested_viz), suggested_visualization: Some(suggested_viz),
@ -250,7 +250,7 @@ pub async fn handle_conversational_query(
}) })
}) })
.await .await
.map_err(|e| DashboardsError::Internal(e.to_string()))??; .map_err(|e: tokio::task::JoinError| DashboardsError::Internal(e.to_string()))??;
Ok(Json(result)) Ok(Json(result))
} }

View file

@ -7,7 +7,7 @@ use diesel::prelude::*;
use std::sync::Arc; use std::sync::Arc;
use uuid::Uuid; use uuid::Uuid;
use crate::core::shared::schema::dashboard_widgets; use crate::core::shared::schema::dashboards::dashboard_widgets;
use crate::shared::state::AppState; use crate::shared::state::AppState;
use crate::dashboards::error::DashboardsError; use crate::dashboards::error::DashboardsError;
@ -46,7 +46,7 @@ pub async fn handle_add_widget(
diesel::insert_into(dashboard_widgets::table) diesel::insert_into(dashboard_widgets::table)
.values(&db_widget) .values(&db_widget)
.execute(&mut conn) .execute(&mut conn)
.map_err(|e| DashboardsError::Database(e.to_string()))?; .map_err(|e: diesel::result::Error| DashboardsError::Database(e.to_string()))?;
Ok::<_, DashboardsError>(db_widget_to_widget(db_widget)) Ok::<_, DashboardsError>(db_widget_to_widget(db_widget))
}) })
@ -97,7 +97,7 @@ pub async fn handle_update_widget(
diesel::update(dashboard_widgets::table.find(widget_id)) diesel::update(dashboard_widgets::table.find(widget_id))
.set(&db_widget) .set(&db_widget)
.execute(&mut conn) .execute(&mut conn)
.map_err(|e| DashboardsError::Database(e.to_string()))?; .map_err(|e: diesel::result::Error| DashboardsError::Database(e.to_string()))?;
Ok::<_, DashboardsError>(db_widget_to_widget(db_widget)) Ok::<_, DashboardsError>(db_widget_to_widget(db_widget))
}) })
@ -124,7 +124,7 @@ pub async fn handle_delete_widget(
.filter(dashboard_widgets::dashboard_id.eq(dashboard_id)), .filter(dashboard_widgets::dashboard_id.eq(dashboard_id)),
) )
.execute(&mut conn) .execute(&mut conn)
.map_err(|e| DashboardsError::Database(e.to_string()))?; .map_err(|e: diesel::result::Error| DashboardsError::Database(e.to_string()))?;
if deleted == 0 { if deleted == 0 {
return Err(DashboardsError::NotFound("Widget not found".to_string())); return Err(DashboardsError::NotFound("Widget not found".to_string()));

View file

@ -3,7 +3,7 @@ use diesel::prelude::*;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
use crate::core::shared::schema::{ use crate::core::shared::schema::dashboards::{
conversational_queries, dashboard_data_sources, dashboard_filters, dashboard_widgets, conversational_queries, dashboard_data_sources, dashboard_filters, dashboard_widgets,
dashboards, dashboards,
}; };

View file

@ -1217,6 +1217,8 @@ async fn call_designer_llm(
.get_config(&uuid::Uuid::nil(), "llm-key", None) .get_config(&uuid::Uuid::nil(), "llm-key", None)
.unwrap_or_default(); .unwrap_or_default();
#[cfg(feature = "llm")]
let response_text = {
let system_prompt = "You are a web designer AI. Respond only with valid JSON."; let system_prompt = "You are a web designer AI. Respond only with valid JSON.";
let messages = serde_json::json!({ let messages = serde_json::json!({
"messages": [ "messages": [
@ -1224,8 +1226,11 @@ async fn call_designer_llm(
{"role": "user", "content": prompt} {"role": "user", "content": prompt}
] ]
}); });
state.llm_provider.generate(prompt, &messages, &model, &api_key).await?
};
let response_text = state.llm_provider.generate(prompt, &messages, &model, &api_key).await?; #[cfg(not(feature = "llm"))]
let response_text = String::from("{}"); // Fallback or handling for when LLM is missing
let json_text = if response_text.contains("```json") { let json_text = if response_text.contains("```json") {
response_text response_text

Some files were not shown because too many files have changed in this diff Show more