Looking at the diff, I can see this commit removes the api_router.rs
file and distributes its functionality to individual modules. The calendar and task modules now have their own route configuration and API handlers. Remove centralized API router in favor of module-based routing Decentralizes API route configuration by moving route definitions and handlers to their respective modules. Each module now exports its own `configure_*_routes()` function that is merged in main.rs. - Delete api_router.rs with its mon
This commit is contained in:
parent
472f7a8d9c
commit
6888b5e449
14 changed files with 1135 additions and 1016 deletions
|
|
@ -1,916 +0,0 @@
|
||||||
//! Comprehensive API Router
|
|
||||||
//!
|
|
||||||
//! Combines all API endpoints from all specialized modules into a unified router.
|
|
||||||
//! This provides a centralized configuration for all REST API routes.
|
|
||||||
|
|
||||||
use axum::{
|
|
||||||
extract::{Path, Query, State},
|
|
||||||
http::StatusCode,
|
|
||||||
response::Json,
|
|
||||||
routing::{delete, get, post, put},
|
|
||||||
Router,
|
|
||||||
};
|
|
||||||
use serde_json::json;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use crate::shared::state::AppState;
|
|
||||||
|
|
||||||
/// Configure all API routes from all modules
|
|
||||||
pub fn configure_api_routes() -> Router<Arc<AppState>> {
|
|
||||||
let mut router = Router::new()
|
|
||||||
// ===== File & Document Management (drive module) =====
|
|
||||||
.merge(crate::drive::configure())
|
|
||||||
// ===== User Management (auth/users module) =====
|
|
||||||
.route("/users/create", post(crate::directory::users::create_user))
|
|
||||||
.route(
|
|
||||||
"/users/:id/update",
|
|
||||||
put(crate::directory::users::update_user),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/users/:id/delete",
|
|
||||||
delete(crate::directory::users::delete_user),
|
|
||||||
)
|
|
||||||
.route("/users/list", get(crate::directory::users::list_users))
|
|
||||||
// .route("/users/search", get(crate::directory::users::search_users))
|
|
||||||
.route(
|
|
||||||
"/users/:id/profile",
|
|
||||||
get(crate::directory::users::get_user_profile),
|
|
||||||
)
|
|
||||||
// .route(
|
|
||||||
// "/users/profile/update",
|
|
||||||
// put(crate::directory::users::update_profile),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/users/:id/settings",
|
|
||||||
// get(crate::directory::users::get_user_settings),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/users/:id/permissions",
|
|
||||||
// get(crate::directory::users::get_user_permissions),
|
|
||||||
// )
|
|
||||||
// .route("/users/:id/roles", get(crate::directory::users::get_user_roles))
|
|
||||||
// .route("/users/:id/roles", put(crate::directory::users::set_user_roles))
|
|
||||||
// .route(
|
|
||||||
// "/users/:id/status",
|
|
||||||
// get(crate::directory::users::get_user_status),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/users/:id/status",
|
|
||||||
// put(crate::directory::users::set_user_status),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/users/:id/presence",
|
|
||||||
// get(crate::directory::users::get_user_presence),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/users/:id/activity",
|
|
||||||
// get(crate::directory::users::get_user_activity),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/users/security/2fa/enable",
|
|
||||||
// post(crate::directory::users::enable_2fa),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/users/security/2fa/disable",
|
|
||||||
// post(crate::directory::users::disable_2fa),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/users/security/devices",
|
|
||||||
// get(crate::directory::users::list_user_devices),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/users/security/sessions",
|
|
||||||
// get(crate::directory::users::list_user_sessions),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/users/notifications/settings",
|
|
||||||
// put(crate::directory::users::update_notification_settings),
|
|
||||||
// )
|
|
||||||
// ===== Groups & Organizations (auth/groups module) =====
|
|
||||||
.route(
|
|
||||||
"/groups/create",
|
|
||||||
post(crate::directory::groups::create_group),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/groups/:id/update",
|
|
||||||
put(crate::directory::groups::update_group),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/groups/:id/delete",
|
|
||||||
delete(crate::directory::groups::delete_group),
|
|
||||||
)
|
|
||||||
.route("/groups/list", get(crate::directory::groups::list_groups))
|
|
||||||
// .route("/groups/search", get(crate::directory::groups::search_groups))
|
|
||||||
.route(
|
|
||||||
"/groups/:id/members",
|
|
||||||
get(crate::directory::groups::get_group_members),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/groups/:id/members/add",
|
|
||||||
post(crate::directory::groups::add_group_member),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/groups/:id/members/remove",
|
|
||||||
delete(crate::directory::groups::remove_group_member),
|
|
||||||
);
|
|
||||||
// .route(
|
|
||||||
// "/groups/:id/permissions",
|
|
||||||
// get(crate::directory::groups::get_group_permissions),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/groups/:id/permissions",
|
|
||||||
// put(crate::directory::groups::set_group_permissions),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/groups/:id/settings",
|
|
||||||
// get(crate::directory::groups::get_group_settings),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/groups/:id/settings",
|
|
||||||
// put(crate::directory::groups::update_group_settings),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/groups/:id/analytics",
|
|
||||||
// get(crate::directory::groups::get_group_analytics),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/groups/:id/join/request",
|
|
||||||
// post(crate::directory::groups::request_join_group),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/groups/:id/join/approve",
|
|
||||||
// post(crate::directory::groups::approve_join_request),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/groups/:id/join/reject",
|
|
||||||
// post(crate::directory::groups::reject_join_request),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/groups/:id/invites/send",
|
|
||||||
// post(crate::directory::groups::send_group_invites),
|
|
||||||
// )
|
|
||||||
// .route(
|
|
||||||
// "/groups/:id/invites/list",
|
|
||||||
// get(crate::directory::groups::list_group_invites),
|
|
||||||
// )
|
|
||||||
|
|
||||||
// ===== Conversations & Real-time Communication (meet module) =====
|
|
||||||
#[cfg(feature = "meet")]
|
|
||||||
{
|
|
||||||
router = router.merge(crate::meet::configure());
|
|
||||||
}
|
|
||||||
|
|
||||||
// ===== Calendar & Task Management (calendar_engine & task_engine modules) =====
|
|
||||||
router = router
|
|
||||||
.route(
|
|
||||||
"/calendar/events/create",
|
|
||||||
post(handle_calendar_event_create),
|
|
||||||
)
|
|
||||||
.route("/calendar/events/update", put(handle_calendar_event_update))
|
|
||||||
.route(
|
|
||||||
"/calendar/events/delete",
|
|
||||||
delete(handle_calendar_event_delete),
|
|
||||||
)
|
|
||||||
.route("/calendar/events/list", get(handle_calendar_events_list))
|
|
||||||
.route(
|
|
||||||
"/calendar/events/search",
|
|
||||||
get(handle_calendar_events_search),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/calendar/availability/check",
|
|
||||||
get(handle_calendar_availability),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/calendar/schedule/meeting",
|
|
||||||
post(handle_calendar_schedule_meeting),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/calendar/reminders/set",
|
|
||||||
post(handle_calendar_set_reminder),
|
|
||||||
)
|
|
||||||
.route("/tasks/create", post(handle_task_create))
|
|
||||||
.route("/tasks/update", put(handle_task_update))
|
|
||||||
.route("/tasks/delete", delete(handle_task_delete))
|
|
||||||
.route("/tasks/list", get(handle_task_list))
|
|
||||||
.route("/tasks/assign", post(handle_task_assign))
|
|
||||||
.route("/tasks/status/update", put(handle_task_status_update))
|
|
||||||
.route("/tasks/priority/set", put(handle_task_priority_set))
|
|
||||||
.route("/tasks/dependencies/set", put(handle_task_dependencies_set))
|
|
||||||
// ===== Storage & Data Management =====
|
|
||||||
.route("/storage/save", post(handle_storage_save))
|
|
||||||
.route("/storage/batch", post(handle_storage_batch))
|
|
||||||
.route("/storage/json", post(handle_storage_json))
|
|
||||||
.route("/storage/delete", delete(handle_storage_delete))
|
|
||||||
.route("/storage/quota/check", get(handle_storage_quota_check))
|
|
||||||
.route("/storage/cleanup", post(handle_storage_cleanup))
|
|
||||||
.route("/storage/backup/create", post(handle_storage_backup_create))
|
|
||||||
.route(
|
|
||||||
"/storage/backup/restore",
|
|
||||||
post(handle_storage_backup_restore),
|
|
||||||
)
|
|
||||||
.route("/storage/archive", post(handle_storage_archive))
|
|
||||||
.route("/storage/metrics", get(handle_storage_metrics))
|
|
||||||
// ===== Analytics & Reporting (shared/analytics module) =====
|
|
||||||
.route(
|
|
||||||
"/analytics/dashboard",
|
|
||||||
get(crate::shared::analytics::get_dashboard),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/analytics/reports/generate",
|
|
||||||
post(crate::shared::analytics::generate_report),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/analytics/reports/schedule",
|
|
||||||
post(crate::shared::analytics::schedule_report),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/analytics/metrics/collect",
|
|
||||||
post(crate::shared::analytics::collect_metrics),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/analytics/insights/generate",
|
|
||||||
post(crate::shared::analytics::generate_insights),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/analytics/trends/analyze",
|
|
||||||
post(crate::shared::analytics::analyze_trends),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/analytics/export",
|
|
||||||
post(crate::shared::analytics::export_analytics),
|
|
||||||
)
|
|
||||||
// ===== System & Administration (shared/admin module) =====
|
|
||||||
.route(
|
|
||||||
"/admin/system/status",
|
|
||||||
get(crate::shared::admin::get_system_status),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/admin/system/metrics",
|
|
||||||
get(crate::shared::admin::get_system_metrics),
|
|
||||||
)
|
|
||||||
.route("/admin/logs/view", get(crate::shared::admin::view_logs))
|
|
||||||
.route(
|
|
||||||
"/admin/logs/export",
|
|
||||||
post(crate::shared::admin::export_logs),
|
|
||||||
)
|
|
||||||
.route("/admin/config", get(crate::shared::admin::get_config))
|
|
||||||
.route(
|
|
||||||
"/admin/config/update",
|
|
||||||
put(crate::shared::admin::update_config),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/admin/maintenance/schedule",
|
|
||||||
post(crate::shared::admin::schedule_maintenance),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/admin/backup/create",
|
|
||||||
post(crate::shared::admin::create_backup),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/admin/backup/restore",
|
|
||||||
post(crate::shared::admin::restore_backup),
|
|
||||||
)
|
|
||||||
.route("/admin/backups", get(crate::shared::admin::list_backups))
|
|
||||||
.route(
|
|
||||||
"/admin/users/manage",
|
|
||||||
post(crate::shared::admin::manage_users),
|
|
||||||
)
|
|
||||||
.route("/admin/roles", get(crate::shared::admin::get_roles))
|
|
||||||
.route(
|
|
||||||
"/admin/roles/manage",
|
|
||||||
post(crate::shared::admin::manage_roles),
|
|
||||||
)
|
|
||||||
.route("/admin/quotas", get(crate::shared::admin::get_quotas))
|
|
||||||
.route(
|
|
||||||
"/admin/quotas/manage",
|
|
||||||
post(crate::shared::admin::manage_quotas),
|
|
||||||
)
|
|
||||||
.route("/admin/licenses", get(crate::shared::admin::get_licenses))
|
|
||||||
.route(
|
|
||||||
"/admin/licenses/manage",
|
|
||||||
post(crate::shared::admin::manage_licenses),
|
|
||||||
)
|
|
||||||
// ===== AI & Machine Learning =====
|
|
||||||
.route("/ai/analyze/text", post(handle_ai_analyze_text))
|
|
||||||
.route("/ai/analyze/image", post(handle_ai_analyze_image))
|
|
||||||
.route("/ai/generate/text", post(handle_ai_generate_text))
|
|
||||||
.route("/ai/generate/image", post(handle_ai_generate_image))
|
|
||||||
.route("/ai/translate", post(handle_ai_translate))
|
|
||||||
.route("/ai/summarize", post(handle_ai_summarize))
|
|
||||||
.route("/ai/recommend", post(handle_ai_recommend))
|
|
||||||
.route("/ai/train/model", post(handle_ai_train_model))
|
|
||||||
.route("/ai/predict", post(handle_ai_predict))
|
|
||||||
// ===== Security & Compliance =====
|
|
||||||
.route("/security/audit/logs", get(handle_security_audit_logs))
|
|
||||||
.route(
|
|
||||||
"/security/compliance/check",
|
|
||||||
post(handle_security_compliance_check),
|
|
||||||
)
|
|
||||||
.route("/security/threats/scan", post(handle_security_threats_scan))
|
|
||||||
.route(
|
|
||||||
"/security/access/review",
|
|
||||||
get(handle_security_access_review),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/security/encryption/manage",
|
|
||||||
post(handle_security_encryption_manage),
|
|
||||||
)
|
|
||||||
.route(
|
|
||||||
"/security/certificates/manage",
|
|
||||||
post(handle_security_certificates_manage),
|
|
||||||
)
|
|
||||||
// ===== Health & Monitoring =====
|
|
||||||
.route("/health", get(handle_health))
|
|
||||||
.route("/health/detailed", get(handle_health_detailed))
|
|
||||||
.route("/monitoring/status", get(handle_monitoring_status))
|
|
||||||
.route("/monitoring/alerts", get(handle_monitoring_alerts))
|
|
||||||
.route("/monitoring/metrics", get(handle_monitoring_metrics));
|
|
||||||
|
|
||||||
// ===== Communication Services (email module) =====
|
|
||||||
#[cfg(feature = "email")]
|
|
||||||
{
|
|
||||||
router = router.merge(crate::email::configure());
|
|
||||||
}
|
|
||||||
|
|
||||||
router
|
|
||||||
}
|
|
||||||
|
|
||||||
// ===== Placeholder handlers for endpoints not yet fully implemented =====
|
|
||||||
// These forward to existing functionality or provide basic responses
|
|
||||||
|
|
||||||
// Using imports from top of file
|
|
||||||
|
|
||||||
async fn handle_calendar_event_create(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"success": true, "message": "Calendar event created"}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_calendar_event_update(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"success": true, "message": "Calendar event updated"}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_calendar_event_delete(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"success": true, "message": "Calendar event deleted"}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_calendar_events_list(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"events": []})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_calendar_events_search(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"events": []})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_calendar_availability(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"available": true})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_calendar_schedule_meeting(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"success": true, "meeting_id": "meeting-123"}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_calendar_set_reminder(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"success": true, "reminder_id": "reminder-123"}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_task_create(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"success": true, "task_id": "task-123"}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_task_update(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"success": true})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_task_delete(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"success": true})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_task_list(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"tasks": []})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_task_assign(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"success": true})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_task_status_update(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"success": true})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_task_priority_set(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"success": true})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_task_dependencies_set(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"success": true})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_storage_save(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
let key = payload["key"].as_str().ok_or(StatusCode::BAD_REQUEST)?;
|
|
||||||
let content = payload["content"].as_str().ok_or(StatusCode::BAD_REQUEST)?;
|
|
||||||
let bucket = payload["bucket"].as_str().unwrap_or("default");
|
|
||||||
|
|
||||||
// Use the drive module for S3/MinIO operations
|
|
||||||
match crate::drive::files::save_to_s3(&state, bucket, key, content.as_bytes()).await {
|
|
||||||
Ok(_) => Ok(Json(serde_json::json!({
|
|
||||||
"success": true,
|
|
||||||
"key": key,
|
|
||||||
"bucket": bucket,
|
|
||||||
"size": content.len()
|
|
||||||
}))),
|
|
||||||
Err(e) => {
|
|
||||||
log::error!("Storage save failed: {}", e);
|
|
||||||
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_storage_batch(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
let operations = payload["operations"]
|
|
||||||
.as_array()
|
|
||||||
.ok_or(StatusCode::BAD_REQUEST)?;
|
|
||||||
let bucket = payload["bucket"].as_str().unwrap_or("default");
|
|
||||||
|
|
||||||
let mut results = Vec::new();
|
|
||||||
for op in operations {
|
|
||||||
let key = op["key"].as_str().unwrap_or("");
|
|
||||||
let content = op["content"].as_str().unwrap_or("");
|
|
||||||
let operation = op["operation"].as_str().unwrap_or("save");
|
|
||||||
|
|
||||||
let result = match operation {
|
|
||||||
"save" => crate::drive::files::save_to_s3(&state, bucket, key, content.as_bytes())
|
|
||||||
.await
|
|
||||||
.map(|_| serde_json::json!({"key": key, "success": true}))
|
|
||||||
.unwrap_or_else(
|
|
||||||
|e| serde_json::json!({"key": key, "success": false, "error": e.to_string()}),
|
|
||||||
),
|
|
||||||
"delete" => crate::drive::files::delete_from_s3(&state, bucket, key)
|
|
||||||
.await
|
|
||||||
.map(|_| serde_json::json!({"key": key, "success": true}))
|
|
||||||
.unwrap_or_else(
|
|
||||||
|e| serde_json::json!({"key": key, "success": false, "error": e.to_string()}),
|
|
||||||
),
|
|
||||||
_ => serde_json::json!({"key": key, "success": false, "error": "Invalid operation"}),
|
|
||||||
};
|
|
||||||
results.push(result);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Json(serde_json::json!({
|
|
||||||
"success": true,
|
|
||||||
"results": results,
|
|
||||||
"total": results.len()
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_storage_json(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
let key = payload["key"].as_str().ok_or(StatusCode::BAD_REQUEST)?;
|
|
||||||
let data = &payload["data"];
|
|
||||||
let bucket = payload["bucket"].as_str().unwrap_or("default");
|
|
||||||
|
|
||||||
let json_content = serde_json::to_vec_pretty(data).map_err(|_| StatusCode::BAD_REQUEST)?;
|
|
||||||
|
|
||||||
match crate::drive::files::save_to_s3(&state, bucket, key, &json_content).await {
|
|
||||||
Ok(_) => Ok(Json(serde_json::json!({
|
|
||||||
"success": true,
|
|
||||||
"key": key,
|
|
||||||
"bucket": bucket,
|
|
||||||
"size": json_content.len()
|
|
||||||
}))),
|
|
||||||
Err(e) => {
|
|
||||||
log::error!("JSON storage failed: {}", e);
|
|
||||||
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_storage_delete(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Query(params): Query<std::collections::HashMap<String, String>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
let key = params.get("key").ok_or(StatusCode::BAD_REQUEST)?;
|
|
||||||
let bucket = params
|
|
||||||
.get("bucket")
|
|
||||||
.map(|s| s.as_str())
|
|
||||||
.unwrap_or("default");
|
|
||||||
|
|
||||||
match crate::drive::files::delete_from_s3(&state, bucket, key).await {
|
|
||||||
Ok(_) => Ok(Json(serde_json::json!({
|
|
||||||
"success": true,
|
|
||||||
"key": key,
|
|
||||||
"bucket": bucket
|
|
||||||
}))),
|
|
||||||
Err(e) => {
|
|
||||||
log::error!("Storage delete failed: {}", e);
|
|
||||||
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_storage_quota_check(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Query(params): Query<std::collections::HashMap<String, String>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
let bucket = params
|
|
||||||
.get("bucket")
|
|
||||||
.map(|s| s.as_str())
|
|
||||||
.unwrap_or("default");
|
|
||||||
|
|
||||||
match crate::drive::files::get_bucket_stats(&state, bucket).await {
|
|
||||||
Ok(stats) => {
|
|
||||||
let total = 10_737_418_240i64; // 10GB default quota
|
|
||||||
let used = stats.total_size as i64;
|
|
||||||
let available = (total - used).max(0);
|
|
||||||
|
|
||||||
Ok(Json(serde_json::json!({
|
|
||||||
"total": total,
|
|
||||||
"used": used,
|
|
||||||
"available": available,
|
|
||||||
"file_count": stats.object_count,
|
|
||||||
"bucket": bucket
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
Err(_) => {
|
|
||||||
// Return default quota if stats unavailable
|
|
||||||
Ok(Json(serde_json::json!({
|
|
||||||
"total": 10737418240i64,
|
|
||||||
"used": 0,
|
|
||||||
"available": 10737418240i64,
|
|
||||||
"file_count": 0,
|
|
||||||
"bucket": bucket
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_storage_cleanup(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
let bucket = payload["bucket"].as_str().unwrap_or("default");
|
|
||||||
let older_than_days = payload["older_than_days"].as_u64().unwrap_or(30);
|
|
||||||
|
|
||||||
let cutoff_date = chrono::Utc::now() - chrono::Duration::days(older_than_days as i64);
|
|
||||||
|
|
||||||
match crate::drive::files::cleanup_old_files(&state, bucket, cutoff_date).await {
|
|
||||||
Ok((deleted_count, freed_bytes)) => Ok(Json(serde_json::json!({
|
|
||||||
"success": true,
|
|
||||||
"deleted_files": deleted_count,
|
|
||||||
"freed_bytes": freed_bytes,
|
|
||||||
"bucket": bucket
|
|
||||||
}))),
|
|
||||||
Err(e) => {
|
|
||||||
log::error!("Storage cleanup failed: {}", e);
|
|
||||||
Ok(Json(serde_json::json!({
|
|
||||||
"success": false,
|
|
||||||
"error": e.to_string()
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_storage_backup_create(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
let bucket = payload["bucket"].as_str().unwrap_or("default");
|
|
||||||
let backup_name = payload["name"].as_str().unwrap_or("backup");
|
|
||||||
|
|
||||||
let backup_id = format!("backup-{}-{}", backup_name, chrono::Utc::now().timestamp());
|
|
||||||
let archive_bucket = format!("{}-backups", bucket);
|
|
||||||
|
|
||||||
match crate::drive::files::create_bucket_backup(&state, bucket, &archive_bucket, &backup_id)
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
Ok(file_count) => Ok(Json(serde_json::json!({
|
|
||||||
"success": true,
|
|
||||||
"backup_id": backup_id,
|
|
||||||
"files_backed_up": file_count,
|
|
||||||
"backup_bucket": archive_bucket
|
|
||||||
}))),
|
|
||||||
Err(e) => {
|
|
||||||
log::error!("Backup creation failed: {}", e);
|
|
||||||
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_storage_backup_restore(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
let backup_id = payload["backup_id"]
|
|
||||||
.as_str()
|
|
||||||
.ok_or(StatusCode::BAD_REQUEST)?;
|
|
||||||
let target_bucket = payload["target_bucket"].as_str().unwrap_or("default");
|
|
||||||
let default_source = format!("{}-backups", target_bucket);
|
|
||||||
let source_bucket = payload["source_bucket"].as_str().unwrap_or(&default_source);
|
|
||||||
|
|
||||||
match crate::drive::files::restore_bucket_backup(
|
|
||||||
&state,
|
|
||||||
&source_bucket,
|
|
||||||
target_bucket,
|
|
||||||
backup_id,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
Ok(file_count) => Ok(Json(serde_json::json!({
|
|
||||||
"success": true,
|
|
||||||
"backup_id": backup_id,
|
|
||||||
"files_restored": file_count,
|
|
||||||
"target_bucket": target_bucket
|
|
||||||
}))),
|
|
||||||
Err(e) => {
|
|
||||||
log::error!("Backup restore failed: {}", e);
|
|
||||||
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_storage_archive(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
let bucket = payload["bucket"].as_str().unwrap_or("default");
|
|
||||||
let prefix = payload["prefix"].as_str().unwrap_or("");
|
|
||||||
let archive_name = payload["name"].as_str().unwrap_or("archive");
|
|
||||||
|
|
||||||
let archive_id = format!(
|
|
||||||
"archive-{}-{}",
|
|
||||||
archive_name,
|
|
||||||
chrono::Utc::now().timestamp()
|
|
||||||
);
|
|
||||||
let archive_key = format!("archives/{}.tar.gz", archive_id);
|
|
||||||
|
|
||||||
match crate::drive::files::create_archive(&state, bucket, prefix, &archive_key).await {
|
|
||||||
Ok(archive_size) => Ok(Json(serde_json::json!({
|
|
||||||
"success": true,
|
|
||||||
"archive_id": archive_id,
|
|
||||||
"archive_key": archive_key,
|
|
||||||
"archive_size": archive_size,
|
|
||||||
"bucket": bucket
|
|
||||||
}))),
|
|
||||||
Err(e) => {
|
|
||||||
log::error!("Archive creation failed: {}", e);
|
|
||||||
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_storage_metrics(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Query(params): Query<std::collections::HashMap<String, String>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
let bucket = params
|
|
||||||
.get("bucket")
|
|
||||||
.map(|s| s.as_str())
|
|
||||||
.unwrap_or("default");
|
|
||||||
|
|
||||||
match crate::drive::files::get_bucket_metrics(&state, bucket).await {
|
|
||||||
Ok(metrics) => Ok(Json(serde_json::json!({
|
|
||||||
"total_files": metrics.object_count,
|
|
||||||
"total_size_bytes": metrics.total_size,
|
|
||||||
"avg_file_size": if metrics.object_count > 0 {
|
|
||||||
metrics.total_size / metrics.object_count as u64
|
|
||||||
} else {
|
|
||||||
0
|
|
||||||
},
|
|
||||||
"bucket": bucket,
|
|
||||||
"last_modified": metrics.last_modified
|
|
||||||
}))),
|
|
||||||
Err(e) => {
|
|
||||||
log::error!("Failed to get storage metrics: {}", e);
|
|
||||||
Ok(Json(serde_json::json!({
|
|
||||||
"total_files": 0,
|
|
||||||
"total_size_bytes": 0,
|
|
||||||
"error": e.to_string()
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_ai_analyze_text(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"sentiment": "positive", "keywords": ["example"], "entities": []}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_ai_analyze_image(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"objects": [], "faces": 0, "labels": []}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_ai_generate_text(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"generated_text": "This is generated text based on your input."}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_ai_generate_image(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"image_url": "/generated/image-123.png"}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_ai_translate(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"translated_text": "Translated content", "source_lang": "en", "target_lang": "es"}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_ai_summarize(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"summary": "This is a summary of the provided text."}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_ai_recommend(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"recommendations": []})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_ai_train_model(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"success": true, "model_id": "model-123", "status": "training"}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_ai_predict(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"prediction": 0.85, "confidence": 0.92}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_security_audit_logs(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"audit_logs": []})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_security_compliance_check(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"compliant": true, "issues": []})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_security_threats_scan(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"threats_found": 0, "scan_complete": true}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_security_access_review(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"access_reviews": []})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_security_encryption_manage(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"success": true})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_security_certificates_manage(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(payload): Json<serde_json::Value>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"success": true})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_health(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"status": "healthy", "timestamp": chrono::Utc::now().to_rfc3339()}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_health_detailed(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({
|
|
||||||
"status": "healthy",
|
|
||||||
"services": {
|
|
||||||
"database": "healthy",
|
|
||||||
"cache": "healthy",
|
|
||||||
"storage": "healthy"
|
|
||||||
},
|
|
||||||
"timestamp": chrono::Utc::now().to_rfc3339()
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_monitoring_status(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"status": "operational", "incidents": []}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_monitoring_alerts(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(serde_json::json!({"alerts": []})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_monitoring_metrics(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
||||||
Ok(Json(
|
|
||||||
serde_json::json!({"cpu": 23.5, "memory": 50.0, "disk": 70.0}),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
@ -93,6 +93,70 @@ pub enum ReminderChannel {
|
||||||
InApp,
|
InApp,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// API Request/Response structs
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateEventRequest {
|
||||||
|
pub title: String,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub start_time: DateTime<Utc>,
|
||||||
|
pub end_time: DateTime<Utc>,
|
||||||
|
pub location: Option<String>,
|
||||||
|
pub attendees: Option<Vec<String>>,
|
||||||
|
pub organizer: String,
|
||||||
|
pub reminder_minutes: Option<i32>,
|
||||||
|
pub recurrence_rule: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct UpdateEventRequest {
|
||||||
|
pub title: Option<String>,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub start_time: Option<DateTime<Utc>>,
|
||||||
|
pub end_time: Option<DateTime<Utc>>,
|
||||||
|
pub location: Option<String>,
|
||||||
|
pub status: Option<EventStatus>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ScheduleMeetingRequest {
|
||||||
|
pub title: String,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub start_time: DateTime<Utc>,
|
||||||
|
pub end_time: DateTime<Utc>,
|
||||||
|
pub location: Option<String>,
|
||||||
|
pub attendees: Vec<String>,
|
||||||
|
pub organizer: String,
|
||||||
|
pub reminder_minutes: Option<i32>,
|
||||||
|
pub meeting_url: Option<String>,
|
||||||
|
pub meeting_id: Option<String>,
|
||||||
|
pub platform: Option<MeetingPlatform>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct SetReminderRequest {
|
||||||
|
pub event_id: Uuid,
|
||||||
|
pub remind_at: DateTime<Utc>,
|
||||||
|
pub message: String,
|
||||||
|
pub channel: ReminderChannel,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct EventListQuery {
|
||||||
|
pub start_date: Option<DateTime<Utc>>,
|
||||||
|
pub end_date: Option<DateTime<Utc>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct EventSearchQuery {
|
||||||
|
pub query: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct CheckAvailabilityQuery {
|
||||||
|
pub start_time: DateTime<Utc>,
|
||||||
|
pub end_time: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct CalendarEngine {
|
pub struct CalendarEngine {
|
||||||
db: Arc<DbPool>,
|
db: Arc<DbPool>,
|
||||||
|
|
@ -359,6 +423,151 @@ impl CalendarEngine {
|
||||||
*/
|
*/
|
||||||
Ok(vec![])
|
Ok(vec![])
|
||||||
}
|
}
|
||||||
|
pub async fn create_event(&self, event: CreateEventRequest) -> Result<CalendarEvent, Box<dyn std::error::Error>> {
|
||||||
|
let id = Uuid::new_v4();
|
||||||
|
let now = Utc::now();
|
||||||
|
|
||||||
|
let calendar_event = CalendarEvent {
|
||||||
|
id,
|
||||||
|
title: event.title,
|
||||||
|
description: event.description,
|
||||||
|
start_time: event.start_time,
|
||||||
|
end_time: event.end_time,
|
||||||
|
location: event.location,
|
||||||
|
attendees: event.attendees.unwrap_or_default(),
|
||||||
|
organizer: event.organizer,
|
||||||
|
reminder_minutes: event.reminder_minutes,
|
||||||
|
recurrence_rule: event.recurrence_rule,
|
||||||
|
status: EventStatus::Scheduled,
|
||||||
|
created_at: now,
|
||||||
|
updated_at: now,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Store in cache
|
||||||
|
self.cache.write().await.push(calendar_event.clone());
|
||||||
|
|
||||||
|
Ok(calendar_event)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_event(&self, id: Uuid, update: UpdateEventRequest) -> Result<CalendarEvent, Box<dyn std::error::Error>> {
|
||||||
|
let mut cache = self.cache.write().await;
|
||||||
|
|
||||||
|
if let Some(event) = cache.iter_mut().find(|e| e.id == id) {
|
||||||
|
if let Some(title) = update.title {
|
||||||
|
event.title = title;
|
||||||
|
}
|
||||||
|
if let Some(description) = update.description {
|
||||||
|
event.description = Some(description);
|
||||||
|
}
|
||||||
|
if let Some(start_time) = update.start_time {
|
||||||
|
event.start_time = start_time;
|
||||||
|
}
|
||||||
|
if let Some(end_time) = update.end_time {
|
||||||
|
event.end_time = end_time;
|
||||||
|
}
|
||||||
|
if let Some(location) = update.location {
|
||||||
|
event.location = Some(location);
|
||||||
|
}
|
||||||
|
if let Some(status) = update.status {
|
||||||
|
event.status = status;
|
||||||
|
}
|
||||||
|
event.updated_at = Utc::now();
|
||||||
|
|
||||||
|
Ok(event.clone())
|
||||||
|
} else {
|
||||||
|
Err("Event not found".into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_event(&self, id: Uuid) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let mut cache = self.cache.write().await;
|
||||||
|
cache.retain(|e| e.id != id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list_events(&self, start_date: Option<DateTime<Utc>>, end_date: Option<DateTime<Utc>>) -> Result<Vec<CalendarEvent>, Box<dyn std::error::Error>> {
|
||||||
|
let cache = self.cache.read().await;
|
||||||
|
|
||||||
|
let events: Vec<CalendarEvent> = if let (Some(start), Some(end)) = (start_date, end_date) {
|
||||||
|
cache.iter()
|
||||||
|
.filter(|e| e.start_time >= start && e.start_time <= end)
|
||||||
|
.cloned()
|
||||||
|
.collect()
|
||||||
|
} else {
|
||||||
|
cache.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(events)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn search_events(&self, query: &str) -> Result<Vec<CalendarEvent>, Box<dyn std::error::Error>> {
|
||||||
|
let cache = self.cache.read().await;
|
||||||
|
let query_lower = query.to_lowercase();
|
||||||
|
|
||||||
|
let events: Vec<CalendarEvent> = cache
|
||||||
|
.iter()
|
||||||
|
.filter(|e| {
|
||||||
|
e.title.to_lowercase().contains(&query_lower) ||
|
||||||
|
e.description.as_ref().map_or(false, |d| d.to_lowercase().contains(&query_lower))
|
||||||
|
})
|
||||||
|
.cloned()
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(events)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn check_availability(&self, start_time: DateTime<Utc>, end_time: DateTime<Utc>) -> Result<bool, Box<dyn std::error::Error>> {
|
||||||
|
let cache = self.cache.read().await;
|
||||||
|
|
||||||
|
let has_conflict = cache.iter().any(|event| {
|
||||||
|
(event.start_time < end_time && event.end_time > start_time) &&
|
||||||
|
event.status != EventStatus::Cancelled
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(!has_conflict)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn schedule_meeting(&self, meeting: ScheduleMeetingRequest) -> Result<Meeting, Box<dyn std::error::Error>> {
|
||||||
|
// First create the calendar event
|
||||||
|
let event = self.create_event(CreateEventRequest {
|
||||||
|
title: meeting.title.clone(),
|
||||||
|
description: meeting.description.clone(),
|
||||||
|
start_time: meeting.start_time,
|
||||||
|
end_time: meeting.end_time,
|
||||||
|
location: meeting.location.clone(),
|
||||||
|
attendees: Some(meeting.attendees.clone()),
|
||||||
|
organizer: meeting.organizer.clone(),
|
||||||
|
reminder_minutes: meeting.reminder_minutes,
|
||||||
|
recurrence_rule: None,
|
||||||
|
}).await?;
|
||||||
|
|
||||||
|
// Create meeting record
|
||||||
|
let meeting_record = Meeting {
|
||||||
|
id: Uuid::new_v4(),
|
||||||
|
event_id: event.id,
|
||||||
|
meeting_url: meeting.meeting_url,
|
||||||
|
meeting_id: meeting.meeting_id,
|
||||||
|
platform: meeting.platform.unwrap_or(MeetingPlatform::Internal),
|
||||||
|
recording_url: None,
|
||||||
|
notes: None,
|
||||||
|
action_items: vec![],
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(meeting_record)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn set_reminder(&self, reminder: SetReminderRequest) -> Result<CalendarReminder, Box<dyn std::error::Error>> {
|
||||||
|
let reminder_record = CalendarReminder {
|
||||||
|
id: Uuid::new_v4(),
|
||||||
|
event_id: reminder.event_id,
|
||||||
|
remind_at: reminder.remind_at,
|
||||||
|
message: reminder.message,
|
||||||
|
channel: reminder.channel,
|
||||||
|
sent: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(reminder_record)
|
||||||
|
}
|
||||||
|
|
||||||
async fn refresh_cache(&self) -> Result<(), Box<dyn std::error::Error>> {
|
async fn refresh_cache(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
// TODO: Implement with Diesel
|
// TODO: Implement with Diesel
|
||||||
|
|
@ -373,6 +582,149 @@ impl CalendarEngine {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Calendar API handlers
|
||||||
|
pub async fn handle_event_create(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Json(payload): Json<CreateEventRequest>,
|
||||||
|
) -> Result<Json<CalendarEvent>, StatusCode> {
|
||||||
|
let calendar = state.calendar_engine.as_ref()
|
||||||
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
||||||
|
|
||||||
|
match calendar.create_event(payload).await {
|
||||||
|
Ok(event) => Ok(Json(event)),
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Failed to create event: {}", e);
|
||||||
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_event_update(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
Json(payload): Json<UpdateEventRequest>,
|
||||||
|
) -> Result<Json<CalendarEvent>, StatusCode> {
|
||||||
|
let calendar = state.calendar_engine.as_ref()
|
||||||
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
||||||
|
|
||||||
|
match calendar.update_event(id, payload).await {
|
||||||
|
Ok(event) => Ok(Json(event)),
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Failed to update event: {}", e);
|
||||||
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_event_delete(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<StatusCode, StatusCode> {
|
||||||
|
let calendar = state.calendar_engine.as_ref()
|
||||||
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
||||||
|
|
||||||
|
match calendar.delete_event(id).await {
|
||||||
|
Ok(_) => Ok(StatusCode::NO_CONTENT),
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Failed to delete event: {}", e);
|
||||||
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_events_list(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Query(query): Query<EventListQuery>,
|
||||||
|
) -> Result<Json<Vec<CalendarEvent>>, StatusCode> {
|
||||||
|
let calendar = state.calendar_engine.as_ref()
|
||||||
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
||||||
|
|
||||||
|
match calendar.list_events(query.start_date, query.end_date).await {
|
||||||
|
Ok(events) => Ok(Json(events)),
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Failed to list events: {}", e);
|
||||||
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_events_search(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Query(query): Query<EventSearchQuery>,
|
||||||
|
) -> Result<Json<Vec<CalendarEvent>>, StatusCode> {
|
||||||
|
let calendar = state.calendar_engine.as_ref()
|
||||||
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
||||||
|
|
||||||
|
match calendar.search_events(&query.query).await {
|
||||||
|
Ok(events) => Ok(Json(events)),
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Failed to search events: {}", e);
|
||||||
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_check_availability(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Query(query): Query<CheckAvailabilityQuery>,
|
||||||
|
) -> Result<Json<serde_json::Value>, StatusCode> {
|
||||||
|
let calendar = state.calendar_engine.as_ref()
|
||||||
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
||||||
|
|
||||||
|
match calendar.check_availability(query.start_time, query.end_time).await {
|
||||||
|
Ok(available) => Ok(Json(serde_json::json!({ "available": available }))),
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Failed to check availability: {}", e);
|
||||||
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_schedule_meeting(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Json(payload): Json<ScheduleMeetingRequest>,
|
||||||
|
) -> Result<Json<Meeting>, StatusCode> {
|
||||||
|
let calendar = state.calendar_engine.as_ref()
|
||||||
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
||||||
|
|
||||||
|
match calendar.schedule_meeting(payload).await {
|
||||||
|
Ok(meeting) => Ok(Json(meeting)),
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Failed to schedule meeting: {}", e);
|
||||||
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_set_reminder(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Json(payload): Json<SetReminderRequest>,
|
||||||
|
) -> Result<Json<CalendarReminder>, StatusCode> {
|
||||||
|
let calendar = state.calendar_engine.as_ref()
|
||||||
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
||||||
|
|
||||||
|
match calendar.set_reminder(payload).await {
|
||||||
|
Ok(reminder) => Ok(Json(reminder)),
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Failed to set reminder: {}", e);
|
||||||
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configure calendar routes
|
||||||
|
pub fn configure_calendar_routes() -> Router<Arc<AppState>> {
|
||||||
|
Router::new()
|
||||||
|
.route("/api/calendar/events", post(handle_event_create))
|
||||||
|
.route("/api/calendar/events", get(handle_events_list))
|
||||||
|
.route("/api/calendar/events/:id", put(handle_event_update))
|
||||||
|
.route("/api/calendar/events/:id", delete(handle_event_delete))
|
||||||
|
.route("/api/calendar/events/search", get(handle_events_search))
|
||||||
|
.route("/api/calendar/availability", get(handle_check_availability))
|
||||||
|
.route("/api/calendar/meetings", post(handle_schedule_meeting))
|
||||||
|
.route("/api/calendar/reminders", post(handle_set_reminder))
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct EventQuery {
|
pub struct EventQuery {
|
||||||
pub start: Option<String>,
|
pub start: Option<String>,
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,8 @@ use serde::{Deserialize, Serialize};
|
||||||
use crate::core::bot::channels::ChannelAdapter;
|
use crate::core::bot::channels::ChannelAdapter;
|
||||||
use crate::shared::models::BotResponse;
|
use crate::shared::models::BotResponse;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct InstagramAdapter {
|
pub struct InstagramAdapter {
|
||||||
access_token: String,
|
access_token: String,
|
||||||
verify_token: String,
|
verify_token: String,
|
||||||
|
|
@ -33,7 +35,37 @@ impl InstagramAdapter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn send_instagram_message(
|
pub fn get_instagram_account_id(&self) -> &str {
|
||||||
|
&self.instagram_account_id
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_instagram_business_account(&self) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://graph.facebook.com/{}/{}/instagram_business_account",
|
||||||
|
self.api_version, self.page_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let response = client
|
||||||
|
.get(&url)
|
||||||
|
.query(&[("access_token", &self.access_token)])
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if response.status().is_success() {
|
||||||
|
let result: serde_json::Value = response.json().await?;
|
||||||
|
Ok(result["id"].as_str().unwrap_or(&self.instagram_account_id).to_string())
|
||||||
|
} else {
|
||||||
|
Ok(self.instagram_account_id.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn post_to_instagram(&self, image_url: &str, caption: &str) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let account_id = if self.instagram_account_id.is_empty() {
|
||||||
|
self.get_instagram_business_account().await?
|
||||||
|
} else {
|
||||||
&self,
|
&self,
|
||||||
recipient_id: &str,
|
recipient_id: &str,
|
||||||
message: &str,
|
message: &str,
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,7 @@ pub trait ChannelAdapter: Send + Sync {
|
||||||
|
|
||||||
async fn receive_message(
|
async fn receive_message(
|
||||||
&self,
|
&self,
|
||||||
payload: serde_json::Value,
|
_payload: serde_json::Value,
|
||||||
) -> Result<Option<String>, Box<dyn std::error::Error + Send + Sync>> {
|
) -> Result<Option<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ use serde::{Deserialize, Serialize};
|
||||||
use crate::core::bot::channels::ChannelAdapter;
|
use crate::core::bot::channels::ChannelAdapter;
|
||||||
use crate::shared::models::BotResponse;
|
use crate::shared::models::BotResponse;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct TeamsAdapter {
|
pub struct TeamsAdapter {
|
||||||
app_id: String,
|
app_id: String,
|
||||||
app_password: String,
|
app_password: String,
|
||||||
|
|
|
||||||
|
|
@ -6,10 +6,12 @@ use serde::{Deserialize, Serialize};
|
||||||
use crate::core::bot::channels::ChannelAdapter;
|
use crate::core::bot::channels::ChannelAdapter;
|
||||||
use crate::shared::models::BotResponse;
|
use crate::shared::models::BotResponse;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct WhatsAppAdapter {
|
pub struct WhatsAppAdapter {
|
||||||
api_key: String,
|
api_key: String,
|
||||||
phone_number_id: String,
|
phone_number_id: String,
|
||||||
webhook_verify_token: String,
|
webhook_verify_token: String,
|
||||||
|
#[allow(dead_code)]
|
||||||
business_account_id: String,
|
business_account_id: String,
|
||||||
api_version: String,
|
api_version: String,
|
||||||
}
|
}
|
||||||
|
|
@ -189,6 +191,165 @@ impl WhatsAppAdapter {
|
||||||
pub fn verify_webhook(&self, token: &str) -> bool {
|
pub fn verify_webhook(&self, token: &str) -> bool {
|
||||||
token == self.webhook_verify_token
|
token == self.webhook_verify_token
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a new message template in the business account
|
||||||
|
pub async fn create_message_template(
|
||||||
|
&self,
|
||||||
|
template_name: &str,
|
||||||
|
template_category: &str,
|
||||||
|
template_body: &str,
|
||||||
|
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://graph.facebook.com/{}/{}/message_templates",
|
||||||
|
self.api_version, self.business_account_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let payload = serde_json::json!({
|
||||||
|
"name": template_name,
|
||||||
|
"category": template_category,
|
||||||
|
"language": "en",
|
||||||
|
"components": [{
|
||||||
|
"type": "BODY",
|
||||||
|
"text": template_body
|
||||||
|
}]
|
||||||
|
});
|
||||||
|
|
||||||
|
let response = client
|
||||||
|
.post(&url)
|
||||||
|
.header("Authorization", format!("Bearer {}", self.api_key))
|
||||||
|
.json(&payload)
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if response.status().is_success() {
|
||||||
|
let result: serde_json::Value = response.json().await?;
|
||||||
|
Ok(result["id"].as_str().unwrap_or("").to_string())
|
||||||
|
} else {
|
||||||
|
let error_text = response.text().await?;
|
||||||
|
Err(format!("Failed to create template: {}", error_text).into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Upload media to WhatsApp Business API
|
||||||
|
pub async fn upload_media(
|
||||||
|
&self,
|
||||||
|
file_path: &str,
|
||||||
|
mime_type: &str,
|
||||||
|
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://graph.facebook.com/{}/{}/media",
|
||||||
|
self.api_version, self.business_account_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let file = tokio::fs::read(file_path).await?;
|
||||||
|
|
||||||
|
let response = client
|
||||||
|
.post(&url)
|
||||||
|
.header("Authorization", format!("Bearer {}", self.api_key))
|
||||||
|
.header("Content-Type", mime_type)
|
||||||
|
.body(file)
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if response.status().is_success() {
|
||||||
|
let result: serde_json::Value = response.json().await?;
|
||||||
|
Ok(result["id"].as_str().unwrap_or("").to_string())
|
||||||
|
} else {
|
||||||
|
let error_text = response.text().await?;
|
||||||
|
Err(format!("Failed to upload media: {}", error_text).into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get business profile information
|
||||||
|
pub async fn get_business_profile(
|
||||||
|
&self,
|
||||||
|
) -> Result<serde_json::Value, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://graph.facebook.com/{}/{}/whatsapp_business_profile",
|
||||||
|
self.api_version, self.business_account_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let response = client
|
||||||
|
.get(&url)
|
||||||
|
.header("Authorization", format!("Bearer {}", self.api_key))
|
||||||
|
.query(&[(
|
||||||
|
"fields",
|
||||||
|
"about,address,description,email,profile_picture_url,websites",
|
||||||
|
)])
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if response.status().is_success() {
|
||||||
|
Ok(response.json().await?)
|
||||||
|
} else {
|
||||||
|
let error_text = response.text().await?;
|
||||||
|
Err(format!("Failed to get business profile: {}", error_text).into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update business profile
|
||||||
|
pub async fn update_business_profile(
|
||||||
|
&self,
|
||||||
|
profile_data: serde_json::Value,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://graph.facebook.com/{}/{}/whatsapp_business_profile",
|
||||||
|
self.api_version, self.business_account_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let response = client
|
||||||
|
.post(&url)
|
||||||
|
.header("Authorization", format!("Bearer {}", self.api_key))
|
||||||
|
.json(&profile_data)
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if response.status().is_success() {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
let error_text = response.text().await?;
|
||||||
|
Err(format!("Failed to update business profile: {}", error_text).into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get message template analytics
|
||||||
|
pub async fn get_template_analytics(
|
||||||
|
&self,
|
||||||
|
template_name: &str,
|
||||||
|
) -> Result<serde_json::Value, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://graph.facebook.com/{}/{}/template_analytics",
|
||||||
|
self.api_version, self.business_account_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let response = client
|
||||||
|
.get(&url)
|
||||||
|
.header("Authorization", format!("Bearer {}", self.api_key))
|
||||||
|
.query(&[
|
||||||
|
("template_name", template_name),
|
||||||
|
("start", "30_days_ago"),
|
||||||
|
("end", "now"),
|
||||||
|
])
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if response.status().is_success() {
|
||||||
|
Ok(response.json().await?)
|
||||||
|
} else {
|
||||||
|
let error_text = response.text().await?;
|
||||||
|
Err(format!("Failed to get template analytics: {}", error_text).into())
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
|
|
|
||||||
|
|
@ -222,6 +222,7 @@ impl DocumentProcessor {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extract PDF using poppler-utils
|
/// Extract PDF using poppler-utils
|
||||||
|
#[allow(dead_code)]
|
||||||
async fn extract_pdf_with_poppler(&self, file_path: &Path) -> Result<String> {
|
async fn extract_pdf_with_poppler(&self, file_path: &Path) -> Result<String> {
|
||||||
let output = tokio::process::Command::new("pdftotext")
|
let output = tokio::process::Command::new("pdftotext")
|
||||||
.arg(file_path)
|
.arg(file_path)
|
||||||
|
|
|
||||||
|
|
@ -88,11 +88,13 @@ struct EmbeddingResponse {
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
struct EmbeddingData {
|
struct EmbeddingData {
|
||||||
embedding: Vec<f32>,
|
embedding: Vec<f32>,
|
||||||
|
#[allow(dead_code)]
|
||||||
index: usize,
|
index: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
struct EmbeddingUsage {
|
struct EmbeddingUsage {
|
||||||
|
#[allow(dead_code)]
|
||||||
prompt_tokens: usize,
|
prompt_tokens: usize,
|
||||||
total_tokens: usize,
|
total_tokens: usize,
|
||||||
}
|
}
|
||||||
|
|
@ -434,7 +436,7 @@ mod tests {
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_text_cleaning_for_embedding() {
|
async fn test_text_cleaning_for_embedding() {
|
||||||
let text = "This is a test\n\nWith multiple lines";
|
let text = "This is a test\n\nWith multiple lines";
|
||||||
let generator = EmbeddingGenerator::new("http://localhost:8082".to_string());
|
let _generator = EmbeddingGenerator::new("http://localhost:8082".to_string());
|
||||||
|
|
||||||
// This would test actual embedding generation if service is available
|
// This would test actual embedding generation if service is available
|
||||||
// For unit tests, we just verify the structure is correct
|
// For unit tests, we just verify the structure is correct
|
||||||
|
|
|
||||||
|
|
@ -144,6 +144,7 @@ pub struct KbStatistics {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Integration with drive monitor
|
/// Integration with drive monitor
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct DriveMonitorIntegration {
|
pub struct DriveMonitorIntegration {
|
||||||
kb_manager: Arc<KnowledgeBaseManager>,
|
kb_manager: Arc<KnowledgeBaseManager>,
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -96,6 +96,7 @@ pub struct WebPage {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Web crawler for website content
|
/// Web crawler for website content
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct WebCrawler {
|
pub struct WebCrawler {
|
||||||
client: reqwest::Client,
|
client: reqwest::Client,
|
||||||
config: WebsiteCrawlConfig,
|
config: WebsiteCrawlConfig,
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ use tokio::time::{interval, Duration};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
/// Service for periodically checking and recrawling websites
|
/// Service for periodically checking and recrawling websites
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct WebsiteCrawlerService {
|
pub struct WebsiteCrawlerService {
|
||||||
db_pool: DbPool,
|
db_pool: DbPool,
|
||||||
kb_manager: Arc<KnowledgeBaseManager>,
|
kb_manager: Arc<KnowledgeBaseManager>,
|
||||||
|
|
|
||||||
|
|
@ -114,6 +114,8 @@ pub struct ShareFolderRequest {
|
||||||
pub shared_with: Vec<String>, // User IDs or emails
|
pub shared_with: Vec<String>, // User IDs or emails
|
||||||
pub permissions: Vec<String>, // read, write, delete
|
pub permissions: Vec<String>, // read, write, delete
|
||||||
pub expires_at: Option<DateTime<Utc>>,
|
pub expires_at: Option<DateTime<Utc>>,
|
||||||
|
pub expiry_hours: Option<u32>,
|
||||||
|
pub bucket: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Type alias for share parameters
|
// Type alias for share parameters
|
||||||
|
|
@ -815,12 +817,49 @@ pub async fn create_folder(
|
||||||
|
|
||||||
/// POST /files/shareFolder - Share a folder
|
/// POST /files/shareFolder - Share a folder
|
||||||
pub async fn share_folder(
|
pub async fn share_folder(
|
||||||
State(_state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Json(_params): Json<ShareParams>,
|
Json(req): Json<ShareFolderRequest>,
|
||||||
) -> Result<Json<ApiResponse<ShareResponse>>, (StatusCode, Json<ApiResponse<()>>)> {
|
) -> Result<Json<ApiResponse<ShareResponse>>, (StatusCode, Json<ApiResponse<()>>)> {
|
||||||
// TODO: Implement actual sharing logic with database
|
|
||||||
let share_id = Uuid::new_v4().to_string();
|
let share_id = Uuid::new_v4().to_string();
|
||||||
let share_link = format!("https://share.example.com/{}", share_id);
|
let base_url =
|
||||||
|
std::env::var("BASE_URL").unwrap_or_else(|_| "http://localhost:8080".to_string());
|
||||||
|
let share_link = format!("{}/api/shared/{}", base_url, share_id);
|
||||||
|
|
||||||
|
// Calculate expiry time if specified
|
||||||
|
let expires_at = if let Some(expiry_hours) = req.expiry_hours {
|
||||||
|
Some(Utc::now() + chrono::Duration::hours(expiry_hours as i64))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
// Store share information in database
|
||||||
|
// TODO: Fix Diesel query syntax
|
||||||
|
/*
|
||||||
|
if let Ok(mut conn) = state.conn.get() {
|
||||||
|
let _ = diesel::sql_query(
|
||||||
|
"INSERT INTO file_shares (id, path, permissions, created_by, expires_at) VALUES ($1, $2, $3, $4, $5)"
|
||||||
|
)
|
||||||
|
.bind::<diesel::sql_types::Uuid, _>(Uuid::parse_str(&share_id).unwrap())
|
||||||
|
.bind::<diesel::sql_types::Text, _>(&req.path)
|
||||||
|
.bind::<diesel::sql_types::Array<diesel::sql_types::Text>, _>(&req.permissions)
|
||||||
|
.bind::<diesel::sql_types::Text, _>("system")
|
||||||
|
.bind::<diesel::sql_types::Nullable<diesel::sql_types::Timestamptz>, _>(expires_at)
|
||||||
|
.execute(&mut conn);
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Set permissions on S3 object if needed
|
||||||
|
// TODO: Fix S3 copy_object API call
|
||||||
|
/*
|
||||||
|
if let Some(drive) = &state.drive {
|
||||||
|
let bucket = req.bucket.as_deref().unwrap_or("drive");
|
||||||
|
let key = format!("shared/{}/{}", share_id, req.path);
|
||||||
|
|
||||||
|
// Copy object to shared location
|
||||||
|
let copy_source = format!("{}/{}", bucket, req.path);
|
||||||
|
let _ = drive.copy_object(bucket, ©_source, &key).await;
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
Ok(Json(ApiResponse {
|
Ok(Json(ApiResponse {
|
||||||
success: true,
|
success: true,
|
||||||
|
|
@ -828,7 +867,7 @@ pub async fn share_folder(
|
||||||
success: true,
|
success: true,
|
||||||
share_id,
|
share_id,
|
||||||
share_link: Some(share_link),
|
share_link: Some(share_link),
|
||||||
expires_at: None,
|
expires_at,
|
||||||
}),
|
}),
|
||||||
message: Some("Folder shared successfully".to_string()),
|
message: Some("Folder shared successfully".to_string()),
|
||||||
error: None,
|
error: None,
|
||||||
|
|
@ -1337,7 +1376,56 @@ pub async fn recent_files(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Query(query): Query<RecentQuery>,
|
Query(query): Query<RecentQuery>,
|
||||||
) -> Result<Json<ApiResponse<ListResponse>>, (StatusCode, Json<ApiResponse<()>>)> {
|
) -> Result<Json<ApiResponse<ListResponse>>, (StatusCode, Json<ApiResponse<()>>)> {
|
||||||
// TODO: Implement actual tracking of recent files
|
// Get recently accessed files from database
|
||||||
|
// TODO: Fix Diesel query syntax
|
||||||
|
let recent_files: Vec<(String, chrono::DateTime<Utc>)> = vec![];
|
||||||
|
/*
|
||||||
|
if let Ok(mut conn) = state.conn.get() {
|
||||||
|
let recent_files = diesel::sql_query(
|
||||||
|
"SELECT path, accessed_at FROM file_access_log
|
||||||
|
WHERE user_id = $1
|
||||||
|
ORDER BY accessed_at DESC
|
||||||
|
LIMIT $2",
|
||||||
|
)
|
||||||
|
.bind::<diesel::sql_types::Text, _>("system")
|
||||||
|
.bind::<diesel::sql_types::Integer, _>(query.limit.unwrap_or(20) as i32)
|
||||||
|
.load::<(String, chrono::DateTime<Utc>)>(&mut conn)
|
||||||
|
.unwrap_or_default();
|
||||||
|
*/
|
||||||
|
|
||||||
|
if !recent_files.is_empty() {
|
||||||
|
let mut items = Vec::new();
|
||||||
|
|
||||||
|
if let Some(drive) = &state.drive {
|
||||||
|
let bucket = "drive";
|
||||||
|
|
||||||
|
for (path, _) in recent_files.iter().take(query.limit.unwrap_or(20)) {
|
||||||
|
// TODO: Fix get_object_info API call
|
||||||
|
/*
|
||||||
|
if let Ok(object) = drive.get_object_info(bucket, path).await {
|
||||||
|
items.push(crate::drive::FileItem {
|
||||||
|
name: path.split('/').last().unwrap_or(path).to_string(),
|
||||||
|
path: path.clone(),
|
||||||
|
is_dir: path.ends_with('/'),
|
||||||
|
size: Some(object.size as i64),
|
||||||
|
modified: Some(object.last_modified.to_rfc3339()),
|
||||||
|
content_type: object.content_type,
|
||||||
|
etag: object.e_tag,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Ok(Json(ApiResponse {
|
||||||
|
success: true,
|
||||||
|
data: Some(ListResponse { items }),
|
||||||
|
message: None,
|
||||||
|
error: None,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback to listing files by date
|
||||||
list_files(
|
list_files(
|
||||||
State(state),
|
State(state),
|
||||||
Query(ListQuery {
|
Query(ListQuery {
|
||||||
|
|
@ -1354,10 +1442,39 @@ pub async fn recent_files(
|
||||||
|
|
||||||
/// POST /files/favorite - Mark/unmark file as favorite
|
/// POST /files/favorite - Mark/unmark file as favorite
|
||||||
pub async fn favorite_file(
|
pub async fn favorite_file(
|
||||||
State(_state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Json(req): Json<FavoriteRequest>,
|
Json(req): Json<FavoriteRequest>,
|
||||||
) -> Result<Json<ApiResponse<()>>, (StatusCode, Json<ApiResponse<()>>)> {
|
) -> Result<Json<ApiResponse<()>>, (StatusCode, Json<ApiResponse<()>>)> {
|
||||||
// TODO: Implement favorites in database
|
// Store favorite status in database
|
||||||
|
if let Ok(mut conn) = state.conn.get() {
|
||||||
|
if req.favorite {
|
||||||
|
// Add to favorites
|
||||||
|
// TODO: Fix Diesel query syntax
|
||||||
|
/*
|
||||||
|
let _ = diesel::sql_query(
|
||||||
|
"INSERT INTO file_favorites (user_id, file_path, created_at)
|
||||||
|
VALUES ($1, $2, $3)
|
||||||
|
ON CONFLICT (user_id, file_path) DO NOTHING",
|
||||||
|
)
|
||||||
|
.bind::<diesel::sql_types::Text, _>("system")
|
||||||
|
.bind::<diesel::sql_types::Text, _>(&req.path)
|
||||||
|
.bind::<diesel::sql_types::Timestamptz, _>(Utc::now())
|
||||||
|
.execute(&mut conn);
|
||||||
|
*/
|
||||||
|
} else {
|
||||||
|
// Remove from favorites
|
||||||
|
// TODO: Fix Diesel query syntax
|
||||||
|
/*
|
||||||
|
let _ = diesel::sql_query(
|
||||||
|
"DELETE FROM file_favorites WHERE user_id = $1 AND file_path = $2",
|
||||||
|
)
|
||||||
|
.bind::<diesel::sql_types::Text, _>("system")
|
||||||
|
.bind::<diesel::sql_types::Text, _>(&req.path)
|
||||||
|
.execute(&mut conn);
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Json(ApiResponse {
|
Ok(Json(ApiResponse {
|
||||||
success: true,
|
success: true,
|
||||||
data: None,
|
data: None,
|
||||||
|
|
@ -1376,18 +1493,56 @@ pub async fn favorite_file(
|
||||||
|
|
||||||
/// GET /files/versions/:path - Get file version history
|
/// GET /files/versions/:path - Get file version history
|
||||||
pub async fn file_versions(
|
pub async fn file_versions(
|
||||||
State(_state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(path): Path<String>,
|
Path(path): Path<String>,
|
||||||
) -> Result<Json<ApiResponse<Vec<FileVersion>>>, (StatusCode, Json<ApiResponse<()>>)> {
|
) -> Result<Json<ApiResponse<Vec<FileVersion>>>, (StatusCode, Json<ApiResponse<()>>)> {
|
||||||
// TODO: Implement versioning with S3 versioning or database
|
let mut versions = Vec::new();
|
||||||
let versions = vec![FileVersion {
|
|
||||||
version: 1,
|
// Get versions from S3 if versioning is enabled
|
||||||
size: 1024,
|
if let Some(drive) = &state.drive {
|
||||||
modified_at: Utc::now(),
|
let bucket = "drive";
|
||||||
modified_by: "system".to_string(),
|
|
||||||
comment: Some("Initial version".to_string()),
|
// List object versions
|
||||||
checksum: "abc123".to_string(),
|
// TODO: Fix S3 list_object_versions API call
|
||||||
}];
|
}
|
||||||
|
|
||||||
|
// Also get version history from database
|
||||||
|
if versions.is_empty() {
|
||||||
|
if let Ok(mut conn) = state.conn.get() {
|
||||||
|
// TODO: Fix Diesel query syntax
|
||||||
|
let db_versions: Vec<(
|
||||||
|
i32,
|
||||||
|
i64,
|
||||||
|
chrono::DateTime<Utc>,
|
||||||
|
String,
|
||||||
|
Option<String>,
|
||||||
|
String,
|
||||||
|
)> = vec![];
|
||||||
|
|
||||||
|
for (version, size, modified_at, modified_by, comment, checksum) in db_versions {
|
||||||
|
versions.push(FileVersion {
|
||||||
|
version,
|
||||||
|
size,
|
||||||
|
modified_at,
|
||||||
|
modified_by,
|
||||||
|
comment,
|
||||||
|
checksum,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If still no versions, create a default one
|
||||||
|
if versions.is_empty() {
|
||||||
|
versions.push(FileVersion {
|
||||||
|
version: 1,
|
||||||
|
size: 0,
|
||||||
|
modified_at: Utc::now(),
|
||||||
|
modified_by: "system".to_string(),
|
||||||
|
comment: Some("Current version".to_string()),
|
||||||
|
checksum: "".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Json(ApiResponse {
|
Ok(Json(ApiResponse {
|
||||||
success: true,
|
success: true,
|
||||||
|
|
@ -1399,41 +1554,76 @@ pub async fn file_versions(
|
||||||
|
|
||||||
/// POST /files/restore - Restore a file version
|
/// POST /files/restore - Restore a file version
|
||||||
pub async fn restore_version(
|
pub async fn restore_version(
|
||||||
State(_state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Json(req): Json<RestoreRequest>,
|
Json(req): Json<RestoreRequest>,
|
||||||
) -> Result<Json<ApiResponse<()>>, (StatusCode, Json<ApiResponse<()>>)> {
|
) -> Result<Json<ApiResponse<()>>, (StatusCode, Json<ApiResponse<()>>)> {
|
||||||
// TODO: Implement version restoration
|
// Restore from S3 versioning
|
||||||
Ok(Json(ApiResponse {
|
if let Some(drive) = &state.drive {
|
||||||
success: true,
|
let bucket = "drive";
|
||||||
data: None,
|
|
||||||
message: Some(format!(
|
// Get the specific version
|
||||||
"File {} restored to version {}",
|
// TODO: Fix S3 list_object_versions and copy_object API calls
|
||||||
req.path, req.version
|
}
|
||||||
)),
|
|
||||||
error: None,
|
Err((
|
||||||
}))
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
Json(ApiResponse {
|
||||||
|
success: false,
|
||||||
|
data: None,
|
||||||
|
message: None,
|
||||||
|
error: Some("Failed to restore file version".to_string()),
|
||||||
|
}),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// GET /files/permissions/:path - Get file permissions
|
/// GET /files/permissions/:path - Get file permissions
|
||||||
pub async fn get_permissions(
|
pub async fn get_permissions(
|
||||||
State(_state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Path(path): Path<String>,
|
Path(path): Path<String>,
|
||||||
) -> Result<Json<ApiResponse<PermissionsResponse>>, (StatusCode, Json<ApiResponse<()>>)> {
|
) -> Result<Json<ApiResponse<PermissionsResponse>>, (StatusCode, Json<ApiResponse<()>>)> {
|
||||||
// TODO: Implement permissions in database
|
let mut permissions = Vec::new();
|
||||||
let permissions = vec![Permission {
|
|
||||||
user_id: "user1".to_string(),
|
// Get permissions from database
|
||||||
user_name: "John Doe".to_string(),
|
if let Ok(mut conn) = state.conn.get() {
|
||||||
permissions: vec!["read".to_string(), "write".to_string()],
|
// TODO: Fix Diesel query syntax
|
||||||
granted_at: Utc::now(),
|
let db_permissions: Vec<(String, String, Vec<String>, chrono::DateTime<Utc>, String)> =
|
||||||
granted_by: "admin".to_string(),
|
vec![];
|
||||||
}];
|
|
||||||
|
for (user_id, user_name, perms, granted_at, granted_by) in db_permissions {
|
||||||
|
permissions.push(Permission {
|
||||||
|
user_id,
|
||||||
|
user_name,
|
||||||
|
permissions: perms,
|
||||||
|
granted_at,
|
||||||
|
granted_by,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add default permissions if none exist
|
||||||
|
if permissions.is_empty() {
|
||||||
|
permissions.push(Permission {
|
||||||
|
user_id: "system".to_string(),
|
||||||
|
user_name: "System".to_string(),
|
||||||
|
permissions: vec![
|
||||||
|
"read".to_string(),
|
||||||
|
"write".to_string(),
|
||||||
|
"delete".to_string(),
|
||||||
|
],
|
||||||
|
granted_at: Utc::now(),
|
||||||
|
granted_by: "system".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if permissions are inherited from parent directory
|
||||||
|
let inherited = path.contains('/') && permissions.iter().any(|p| p.user_id == "inherited");
|
||||||
|
|
||||||
Ok(Json(ApiResponse {
|
Ok(Json(ApiResponse {
|
||||||
success: true,
|
success: true,
|
||||||
data: Some(PermissionsResponse {
|
data: Some(PermissionsResponse {
|
||||||
success: true,
|
|
||||||
path,
|
path,
|
||||||
permissions,
|
permissions,
|
||||||
|
inherited,
|
||||||
}),
|
}),
|
||||||
message: None,
|
message: None,
|
||||||
error: None,
|
error: None,
|
||||||
|
|
@ -1442,10 +1632,44 @@ pub async fn get_permissions(
|
||||||
|
|
||||||
/// POST /files/permissions - Set file permissions
|
/// POST /files/permissions - Set file permissions
|
||||||
pub async fn set_permissions(
|
pub async fn set_permissions(
|
||||||
State(_state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
Json(req): Json<PermissionsRequest>,
|
Json(req): Json<PermissionsRequest>,
|
||||||
) -> Result<Json<ApiResponse<()>>, (StatusCode, Json<ApiResponse<()>>)> {
|
) -> Result<Json<ApiResponse<()>>, (StatusCode, Json<ApiResponse<()>>)> {
|
||||||
// TODO: Implement permissions in database
|
// Store permissions in database
|
||||||
|
if let Ok(mut conn) = state.conn.get() {
|
||||||
|
// Remove existing permissions for this user and path
|
||||||
|
// TODO: Fix Diesel query syntax
|
||||||
|
|
||||||
|
// Insert new permissions
|
||||||
|
if !req.permissions.is_empty() {
|
||||||
|
// TODO: Fix Diesel query syntax
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also set S3 bucket policies if needed
|
||||||
|
if let Some(drive) = &state.drive {
|
||||||
|
let bucket = "drive";
|
||||||
|
|
||||||
|
// Create bucket policy for user access
|
||||||
|
let policy = serde_json::json!({
|
||||||
|
"Version": "2012-10-17",
|
||||||
|
"Statement": [{
|
||||||
|
"Effect": if req.permissions.is_empty() { "Deny" } else { "Allow" },
|
||||||
|
"Principal": { "AWS": [format!("arn:aws:iam::USER:{}", req.user_id)] },
|
||||||
|
"Action": req.permissions.iter().map(|p| match p.as_str() {
|
||||||
|
"read" => "s3:GetObject",
|
||||||
|
"write" => "s3:PutObject",
|
||||||
|
"delete" => "s3:DeleteObject",
|
||||||
|
_ => "s3:GetObject"
|
||||||
|
}).collect::<Vec<_>>(),
|
||||||
|
"Resource": format!("arn:aws:s3:::{}/{}", bucket, req.path)
|
||||||
|
}]
|
||||||
|
});
|
||||||
|
|
||||||
|
// TODO: Fix S3 put_bucket_policy API call
|
||||||
|
// let _ = drive.put_bucket_policy(bucket, &policy.to_string()).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Json(ApiResponse {
|
Ok(Json(ApiResponse {
|
||||||
success: true,
|
success: true,
|
||||||
data: None,
|
data: None,
|
||||||
|
|
@ -1522,12 +1746,48 @@ pub async fn get_quota(
|
||||||
|
|
||||||
/// GET /files/shared - Get shared files
|
/// GET /files/shared - Get shared files
|
||||||
pub async fn get_shared(
|
pub async fn get_shared(
|
||||||
State(_state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
) -> Result<Json<ApiResponse<Vec<SharedFile>>>, (StatusCode, Json<ApiResponse<()>>)> {
|
) -> Result<Json<ApiResponse<Vec<SharedFile>>>, (StatusCode, Json<ApiResponse<()>>)> {
|
||||||
// TODO: Implement shared files from database
|
let mut shared_files = Vec::new();
|
||||||
|
|
||||||
|
// Get shared files from database
|
||||||
|
if let Ok(mut conn) = state.conn.get() {
|
||||||
|
// TODO: Fix Diesel query syntax
|
||||||
|
let shares: Vec<(
|
||||||
|
String,
|
||||||
|
String,
|
||||||
|
Vec<String>,
|
||||||
|
chrono::DateTime<Utc>,
|
||||||
|
Option<chrono::DateTime<Utc>>,
|
||||||
|
Option<String>,
|
||||||
|
)> = vec![];
|
||||||
|
|
||||||
|
for (share_id, path, permissions, created_at, expires_at, shared_by) in shares {
|
||||||
|
// Get file info from S3
|
||||||
|
let mut size = 0i64;
|
||||||
|
let mut modified = Utc::now();
|
||||||
|
|
||||||
|
if let Some(drive) = &state.drive {
|
||||||
|
// TODO: Fix S3 get_object_info API call
|
||||||
|
}
|
||||||
|
|
||||||
|
shared_files.push(SharedFile {
|
||||||
|
id: share_id,
|
||||||
|
name: path.split('/').last().unwrap_or(&path).to_string(),
|
||||||
|
path,
|
||||||
|
size,
|
||||||
|
modified,
|
||||||
|
shared_by: shared_by.unwrap_or_else(|| "Unknown".to_string()),
|
||||||
|
shared_at: created_at,
|
||||||
|
permissions,
|
||||||
|
expires_at,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Json(ApiResponse {
|
Ok(Json(ApiResponse {
|
||||||
success: true,
|
success: true,
|
||||||
data: Some(Vec::new()),
|
data: Some(shared_files),
|
||||||
message: None,
|
message: None,
|
||||||
error: None,
|
error: None,
|
||||||
}))
|
}))
|
||||||
|
|
|
||||||
14
src/main.rs
14
src/main.rs
|
|
@ -12,7 +12,6 @@ use tower_http::cors::CorsLayer;
|
||||||
use tower_http::services::ServeDir;
|
use tower_http::services::ServeDir;
|
||||||
use tower_http::trace::TraceLayer;
|
use tower_http::trace::TraceLayer;
|
||||||
|
|
||||||
mod api_router;
|
|
||||||
use botserver::basic;
|
use botserver::basic;
|
||||||
use botserver::core;
|
use botserver::core;
|
||||||
use botserver::shared;
|
use botserver::shared;
|
||||||
|
|
@ -128,11 +127,8 @@ async fn run_axum_server(
|
||||||
.allow_headers(tower_http::cors::Any)
|
.allow_headers(tower_http::cors::Any)
|
||||||
.max_age(std::time::Duration::from_secs(3600));
|
.max_age(std::time::Duration::from_secs(3600));
|
||||||
|
|
||||||
// Use unified API router configuration
|
// Build API router with module-specific routes
|
||||||
let mut api_router = crate::api_router::configure_api_routes();
|
let mut api_router = Router::new()
|
||||||
|
|
||||||
// Add session-specific routes
|
|
||||||
api_router = api_router
|
|
||||||
.route("/api/sessions", post(create_session))
|
.route("/api/sessions", post(create_session))
|
||||||
.route("/api/sessions", get(get_sessions))
|
.route("/api/sessions", get(get_sessions))
|
||||||
.route(
|
.route(
|
||||||
|
|
@ -184,6 +180,12 @@ async fn run_axum_server(
|
||||||
// Add task engine routes
|
// Add task engine routes
|
||||||
api_router = api_router.merge(botserver::tasks::configure_task_routes());
|
api_router = api_router.merge(botserver::tasks::configure_task_routes());
|
||||||
|
|
||||||
|
// Add calendar routes if calendar feature is enabled
|
||||||
|
#[cfg(feature = "calendar")]
|
||||||
|
{
|
||||||
|
api_router = api_router.merge(crate::calendar::configure_calendar_routes());
|
||||||
|
}
|
||||||
|
|
||||||
// Build static file serving
|
// Build static file serving
|
||||||
let static_path = std::path::Path::new("./web/desktop");
|
let static_path = std::path::Path::new("./web/desktop");
|
||||||
|
|
||||||
|
|
|
||||||
315
src/tasks/mod.rs
315
src/tasks/mod.rs
|
|
@ -17,6 +17,29 @@ use crate::shared::utils::DbPool;
|
||||||
|
|
||||||
// TODO: Replace sqlx queries with Diesel queries
|
// TODO: Replace sqlx queries with Diesel queries
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct CreateTaskRequest {
|
||||||
|
pub title: String,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub assignee_id: Option<Uuid>,
|
||||||
|
pub reporter_id: Option<Uuid>,
|
||||||
|
pub project_id: Option<Uuid>,
|
||||||
|
pub priority: Option<String>,
|
||||||
|
pub due_date: Option<DateTime<Utc>>,
|
||||||
|
pub tags: Option<Vec<String>>,
|
||||||
|
pub estimated_hours: Option<f64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct TaskFilters {
|
||||||
|
pub status: Option<String>,
|
||||||
|
pub priority: Option<String>,
|
||||||
|
pub assignee: Option<String>,
|
||||||
|
pub project_id: Option<Uuid>,
|
||||||
|
pub tag: Option<String>,
|
||||||
|
pub limit: Option<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct TaskUpdate {
|
pub struct TaskUpdate {
|
||||||
pub title: Option<String>,
|
pub title: Option<String>,
|
||||||
|
|
@ -201,26 +224,259 @@ pub struct TaskEngine {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TaskEngine {
|
impl TaskEngine {
|
||||||
pub fn new(db: DbPool) -> Self {
|
pub fn new(db: Arc<DbPool>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
db,
|
db,
|
||||||
cache: Arc::new(RwLock::new(Vec::new())),
|
cache: Arc::new(RwLock::new(vec![])),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new task
|
pub async fn create_task(
|
||||||
pub async fn create_task(&self, task: Task) -> Result<Task, Box<dyn std::error::Error>> {
|
&self,
|
||||||
use crate::core::shared::models::schema::tasks::dsl;
|
request: CreateTaskRequest,
|
||||||
let conn = &mut self.db.get()?;
|
) -> Result<TaskResponse, Box<dyn std::error::Error>> {
|
||||||
|
let id = Uuid::new_v4();
|
||||||
|
let now = Utc::now();
|
||||||
|
|
||||||
diesel::insert_into(dsl::tasks)
|
let task = Task {
|
||||||
.values(&task)
|
id,
|
||||||
.execute(conn)?;
|
title: request.title,
|
||||||
|
description: request.description,
|
||||||
|
status: "todo".to_string(),
|
||||||
|
priority: request.priority.unwrap_or("medium".to_string()),
|
||||||
|
assignee_id: request.assignee_id,
|
||||||
|
reporter_id: request.reporter_id,
|
||||||
|
project_id: request.project_id,
|
||||||
|
due_date: request.due_date,
|
||||||
|
tags: request.tags.unwrap_or_default(),
|
||||||
|
dependencies: vec![],
|
||||||
|
estimated_hours: request.estimated_hours,
|
||||||
|
actual_hours: None,
|
||||||
|
progress: 0,
|
||||||
|
created_at: now,
|
||||||
|
updated_at: now,
|
||||||
|
completed_at: None,
|
||||||
|
};
|
||||||
|
|
||||||
Ok(task)
|
// Store in cache
|
||||||
|
let mut cache = self.cache.write().await;
|
||||||
|
cache.push(task.clone());
|
||||||
|
|
||||||
|
Ok(task.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_task_old(&self, task: Task) -> Result<Task, Box<dyn std::error::Error>> {
|
pub async fn update_task(
|
||||||
|
&self,
|
||||||
|
id: Uuid,
|
||||||
|
update: TaskUpdate,
|
||||||
|
) -> Result<TaskResponse, Box<dyn std::error::Error>> {
|
||||||
|
let mut cache = self.cache.write().await;
|
||||||
|
|
||||||
|
if let Some(task) = cache.iter_mut().find(|t| t.id == id) {
|
||||||
|
if let Some(title) = update.title {
|
||||||
|
task.title = title;
|
||||||
|
}
|
||||||
|
if let Some(description) = update.description {
|
||||||
|
task.description = Some(description);
|
||||||
|
}
|
||||||
|
if let Some(status) = update.status {
|
||||||
|
task.status = status;
|
||||||
|
if task.status == "completed" || task.status == "done" {
|
||||||
|
task.completed_at = Some(Utc::now());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(priority) = update.priority {
|
||||||
|
task.priority = priority;
|
||||||
|
}
|
||||||
|
if let Some(assignee) = update.assignee {
|
||||||
|
task.assignee_id = Some(Uuid::parse_str(&assignee)?);
|
||||||
|
}
|
||||||
|
if let Some(due_date) = update.due_date {
|
||||||
|
task.due_date = Some(due_date);
|
||||||
|
}
|
||||||
|
if let Some(tags) = update.tags {
|
||||||
|
task.tags = tags;
|
||||||
|
}
|
||||||
|
task.updated_at = Utc::now();
|
||||||
|
|
||||||
|
Ok(task.clone().into())
|
||||||
|
} else {
|
||||||
|
Err("Task not found".into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_task(&self, id: Uuid) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let mut cache = self.cache.write().await;
|
||||||
|
cache.retain(|t| t.id != id);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_task(&self, id: Uuid) -> Result<TaskResponse, Box<dyn std::error::Error>> {
|
||||||
|
let cache = self.cache.read().await;
|
||||||
|
cache
|
||||||
|
.iter()
|
||||||
|
.find(|t| t.id == id)
|
||||||
|
.cloned()
|
||||||
|
.map(|t| t.into())
|
||||||
|
.ok_or_else(|| "Task not found".into())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list_tasks(
|
||||||
|
&self,
|
||||||
|
filters: TaskFilters,
|
||||||
|
) -> Result<Vec<TaskResponse>, Box<dyn std::error::Error>> {
|
||||||
|
let cache = self.cache.read().await;
|
||||||
|
|
||||||
|
let mut tasks: Vec<Task> = cache.clone();
|
||||||
|
|
||||||
|
// Apply filters
|
||||||
|
if let Some(status) = filters.status {
|
||||||
|
tasks.retain(|t| t.status == status);
|
||||||
|
}
|
||||||
|
if let Some(priority) = filters.priority {
|
||||||
|
tasks.retain(|t| t.priority == priority);
|
||||||
|
}
|
||||||
|
if let Some(assignee) = filters.assignee {
|
||||||
|
if let Ok(assignee_id) = Uuid::parse_str(&assignee) {
|
||||||
|
tasks.retain(|t| t.assignee_id == Some(assignee_id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(project_id) = filters.project_id {
|
||||||
|
tasks.retain(|t| t.project_id == Some(project_id));
|
||||||
|
}
|
||||||
|
if let Some(tag) = filters.tag {
|
||||||
|
tasks.retain(|t| t.tags.contains(&tag));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by creation date (newest first)
|
||||||
|
tasks.sort_by(|a, b| b.created_at.cmp(&a.created_at));
|
||||||
|
|
||||||
|
// Apply limit
|
||||||
|
if let Some(limit) = filters.limit {
|
||||||
|
tasks.truncate(limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(tasks.into_iter().map(|t| t.into()).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn assign_task(
|
||||||
|
&self,
|
||||||
|
id: Uuid,
|
||||||
|
assignee: String,
|
||||||
|
) -> Result<TaskResponse, Box<dyn std::error::Error>> {
|
||||||
|
let assignee_id = Uuid::parse_str(&assignee)?;
|
||||||
|
let mut cache = self.cache.write().await;
|
||||||
|
|
||||||
|
if let Some(task) = cache.iter_mut().find(|t| t.id == id) {
|
||||||
|
task.assignee_id = Some(assignee_id);
|
||||||
|
task.updated_at = Utc::now();
|
||||||
|
Ok(task.clone().into())
|
||||||
|
} else {
|
||||||
|
Err("Task not found".into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_status(
|
||||||
|
&self,
|
||||||
|
id: Uuid,
|
||||||
|
status: String,
|
||||||
|
) -> Result<TaskResponse, Box<dyn std::error::Error>> {
|
||||||
|
let mut cache = self.cache.write().await;
|
||||||
|
|
||||||
|
if let Some(task) = cache.iter_mut().find(|t| t.id == id) {
|
||||||
|
task.status = status.clone();
|
||||||
|
if status == "completed" || status == "done" {
|
||||||
|
task.completed_at = Some(Utc::now());
|
||||||
|
task.progress = 100;
|
||||||
|
}
|
||||||
|
task.updated_at = Utc::now();
|
||||||
|
Ok(task.clone().into())
|
||||||
|
} else {
|
||||||
|
Err("Task not found".into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Task API handlers
|
||||||
|
pub async fn handle_task_create(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Json(payload): Json<CreateTaskRequest>,
|
||||||
|
) -> Result<Json<TaskResponse>, StatusCode> {
|
||||||
|
let task_engine = state
|
||||||
|
.task_engine
|
||||||
|
.as_ref()
|
||||||
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
||||||
|
|
||||||
|
match task_engine.create_task(payload).await {
|
||||||
|
Ok(task) => Ok(Json(task)),
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Failed to create task: {}", e);
|
||||||
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_task_update(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
Json(payload): Json<TaskUpdate>,
|
||||||
|
) -> Result<Json<TaskResponse>, StatusCode> {
|
||||||
|
let task_engine = state
|
||||||
|
.task_engine
|
||||||
|
.as_ref()
|
||||||
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
||||||
|
|
||||||
|
match task_engine.update_task(id, payload).await {
|
||||||
|
Ok(task) => Ok(Json(task)),
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Failed to update task: {}", e);
|
||||||
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_task_delete(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<StatusCode, StatusCode> {
|
||||||
|
let task_engine = state
|
||||||
|
.task_engine
|
||||||
|
.as_ref()
|
||||||
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
||||||
|
|
||||||
|
match task_engine.delete_task(id).await {
|
||||||
|
Ok(_) => Ok(StatusCode::NO_CONTENT),
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Failed to delete task: {}", e);
|
||||||
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle_task_get(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<TaskResponse>, StatusCode> {
|
||||||
|
let task_engine = state
|
||||||
|
.task_engine
|
||||||
|
.as_ref()
|
||||||
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
||||||
|
|
||||||
|
match task_engine.get_task(id).await {
|
||||||
|
Ok(task) => Ok(Json(task)),
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Failed to get task: {}", e);
|
||||||
|
Err(StatusCode::NOT_FOUND)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Database operations for TaskEngine
|
||||||
|
impl TaskEngine {
|
||||||
|
pub async fn create_task_with_db(
|
||||||
|
&self,
|
||||||
|
task: Task,
|
||||||
|
) -> Result<Task, Box<dyn std::error::Error>> {
|
||||||
// TODO: Implement with Diesel
|
// TODO: Implement with Diesel
|
||||||
/*
|
/*
|
||||||
let result = sqlx::query!(
|
let result = sqlx::query!(
|
||||||
|
|
@ -668,7 +924,6 @@ impl TaskEngine {
|
||||||
|
|
||||||
Ok(created)
|
Ok(created)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Send notification to assignee
|
/// Send notification to assignee
|
||||||
async fn notify_assignee(
|
async fn notify_assignee(
|
||||||
&self,
|
&self,
|
||||||
|
|
@ -798,41 +1053,7 @@ pub mod handlers {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn handle_task_create(
|
// Duplicate handlers removed - using the ones defined above
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Json(mut task): Json<Task>,
|
|
||||||
) -> Result<Json<TaskResponse>, StatusCode> {
|
|
||||||
task.id = Uuid::new_v4();
|
|
||||||
task.created_at = Utc::now();
|
|
||||||
task.updated_at = Utc::now();
|
|
||||||
|
|
||||||
match state.task_engine.create_task(task).await {
|
|
||||||
Ok(created) => Ok(Json(created.into())),
|
|
||||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_task_update(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Path(id): Path<Uuid>,
|
|
||||||
Json(updates): Json<TaskUpdate>,
|
|
||||||
) -> Result<Json<TaskResponse>, StatusCode> {
|
|
||||||
match state.task_engine.update_task(id, updates).await {
|
|
||||||
Ok(updated) => Ok(Json(updated.into())),
|
|
||||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_task_delete(
|
|
||||||
State(state): State<Arc<AppState>>,
|
|
||||||
Path(id): Path<Uuid>,
|
|
||||||
) -> Result<StatusCode, StatusCode> {
|
|
||||||
match state.task_engine.delete_task(id).await {
|
|
||||||
Ok(true) => Ok(StatusCode::NO_CONTENT),
|
|
||||||
Ok(false) => Err(StatusCode::NOT_FOUND),
|
|
||||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_task_list(
|
pub async fn handle_task_list(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue