2025-11-22 22:55:35 -03:00
|
|
|
use axum::{
|
|
|
|
|
extract::{Path, Query, State},
|
|
|
|
|
http::StatusCode,
|
|
|
|
|
response::Json,
|
|
|
|
|
routing::{delete, get, post, put},
|
|
|
|
|
Router,
|
|
|
|
|
};
|
|
|
|
|
use chrono::{DateTime, Utc};
|
|
|
|
|
use diesel::prelude::*;
|
|
|
|
|
use serde::{Deserialize, Serialize};
|
|
|
|
|
use std::sync::Arc;
|
|
|
|
|
use tokio::sync::RwLock;
|
|
|
|
|
use uuid::Uuid;
|
|
|
|
|
|
2025-11-27 08:34:24 -03:00
|
|
|
use crate::shared::state::AppState;
|
2025-11-22 22:55:35 -03:00
|
|
|
use crate::shared::utils::DbPool;
|
|
|
|
|
|
|
|
|
|
// TODO: Replace sqlx queries with Diesel queries
|
|
|
|
|
|
2025-11-27 09:38:50 -03:00
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct CreateTaskRequest {
|
|
|
|
|
pub title: String,
|
|
|
|
|
pub description: Option<String>,
|
|
|
|
|
pub assignee_id: Option<Uuid>,
|
|
|
|
|
pub reporter_id: Option<Uuid>,
|
|
|
|
|
pub project_id: Option<Uuid>,
|
|
|
|
|
pub priority: Option<String>,
|
|
|
|
|
pub due_date: Option<DateTime<Utc>>,
|
|
|
|
|
pub tags: Option<Vec<String>>,
|
|
|
|
|
pub estimated_hours: Option<f64>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct TaskFilters {
|
|
|
|
|
pub status: Option<String>,
|
|
|
|
|
pub priority: Option<String>,
|
|
|
|
|
pub assignee: Option<String>,
|
|
|
|
|
pub project_id: Option<Uuid>,
|
|
|
|
|
pub tag: Option<String>,
|
|
|
|
|
pub limit: Option<usize>,
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-22 22:55:35 -03:00
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct TaskUpdate {
|
|
|
|
|
pub title: Option<String>,
|
|
|
|
|
pub description: Option<String>,
|
2025-11-27 08:34:24 -03:00
|
|
|
pub status: Option<String>,
|
|
|
|
|
pub priority: Option<String>,
|
2025-11-22 22:55:35 -03:00
|
|
|
pub assignee: Option<String>,
|
|
|
|
|
pub due_date: Option<DateTime<Utc>>,
|
|
|
|
|
pub tags: Option<Vec<String>>,
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-27 08:34:24 -03:00
|
|
|
// Database model - matches schema exactly
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize, Queryable, Insertable)]
|
|
|
|
|
#[diesel(table_name = crate::core::shared::models::schema::tasks)]
|
2025-11-22 22:55:35 -03:00
|
|
|
pub struct Task {
|
|
|
|
|
pub id: Uuid,
|
|
|
|
|
pub title: String,
|
|
|
|
|
pub description: Option<String>,
|
2025-11-27 08:34:24 -03:00
|
|
|
pub status: String, // Changed to String to match schema
|
|
|
|
|
pub priority: String, // Changed to String to match schema
|
|
|
|
|
pub assignee_id: Option<Uuid>, // Changed to match schema
|
|
|
|
|
pub reporter_id: Option<Uuid>, // Changed to match schema
|
|
|
|
|
pub project_id: Option<Uuid>, // Added to match schema
|
|
|
|
|
pub due_date: Option<DateTime<Utc>>,
|
|
|
|
|
pub tags: Vec<String>,
|
|
|
|
|
pub dependencies: Vec<Uuid>,
|
|
|
|
|
pub estimated_hours: Option<f64>, // Changed to f64 to match Float8
|
|
|
|
|
pub actual_hours: Option<f64>, // Changed to f64 to match Float8
|
|
|
|
|
pub progress: i32, // Added to match schema
|
|
|
|
|
pub created_at: DateTime<Utc>,
|
|
|
|
|
pub updated_at: DateTime<Utc>,
|
|
|
|
|
pub completed_at: Option<DateTime<Utc>>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// API request/response model - includes additional fields for convenience
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct TaskResponse {
|
|
|
|
|
pub id: Uuid,
|
|
|
|
|
pub title: String,
|
|
|
|
|
pub description: Option<String>,
|
|
|
|
|
pub assignee: Option<String>, // Converted from assignee_id
|
|
|
|
|
pub reporter: String, // Converted from reporter_id
|
2025-11-22 22:55:35 -03:00
|
|
|
pub status: TaskStatus,
|
|
|
|
|
pub priority: TaskPriority,
|
|
|
|
|
pub due_date: Option<DateTime<Utc>>,
|
2025-11-27 08:34:24 -03:00
|
|
|
pub estimated_hours: Option<f64>,
|
|
|
|
|
pub actual_hours: Option<f64>,
|
2025-11-22 22:55:35 -03:00
|
|
|
pub tags: Vec<String>,
|
2025-11-27 08:34:24 -03:00
|
|
|
pub parent_task_id: Option<Uuid>, // For subtask relationships
|
|
|
|
|
pub subtasks: Vec<Uuid>, // List of subtask IDs
|
2025-11-22 22:55:35 -03:00
|
|
|
pub dependencies: Vec<Uuid>,
|
2025-11-27 08:34:24 -03:00
|
|
|
pub attachments: Vec<String>, // File paths/URLs
|
|
|
|
|
pub comments: Vec<TaskComment>, // Embedded comments
|
2025-11-22 22:55:35 -03:00
|
|
|
pub created_at: DateTime<Utc>,
|
|
|
|
|
pub updated_at: DateTime<Utc>,
|
|
|
|
|
pub completed_at: Option<DateTime<Utc>>,
|
2025-11-27 08:34:24 -03:00
|
|
|
pub progress: i32,
|
2025-11-22 22:55:35 -03:00
|
|
|
}
|
|
|
|
|
|
2025-11-27 08:34:24 -03:00
|
|
|
// Convert database Task to API TaskResponse
|
|
|
|
|
impl From<Task> for TaskResponse {
|
|
|
|
|
fn from(task: Task) -> Self {
|
|
|
|
|
TaskResponse {
|
|
|
|
|
id: task.id,
|
|
|
|
|
title: task.title,
|
|
|
|
|
description: task.description,
|
|
|
|
|
assignee: task.assignee_id.map(|id| id.to_string()),
|
|
|
|
|
reporter: task
|
|
|
|
|
.reporter_id
|
|
|
|
|
.map(|id| id.to_string())
|
|
|
|
|
.unwrap_or_default(),
|
|
|
|
|
status: match task.status.as_str() {
|
|
|
|
|
"todo" => TaskStatus::Todo,
|
|
|
|
|
"in_progress" | "in-progress" => TaskStatus::InProgress,
|
|
|
|
|
"completed" | "done" => TaskStatus::Completed,
|
|
|
|
|
"on_hold" | "on-hold" => TaskStatus::OnHold,
|
|
|
|
|
"review" => TaskStatus::Review,
|
|
|
|
|
"blocked" => TaskStatus::Blocked,
|
|
|
|
|
"cancelled" => TaskStatus::Cancelled,
|
|
|
|
|
_ => TaskStatus::Todo,
|
|
|
|
|
},
|
|
|
|
|
priority: match task.priority.as_str() {
|
|
|
|
|
"low" => TaskPriority::Low,
|
|
|
|
|
"medium" => TaskPriority::Medium,
|
|
|
|
|
"high" => TaskPriority::High,
|
|
|
|
|
"urgent" => TaskPriority::Urgent,
|
|
|
|
|
_ => TaskPriority::Medium,
|
|
|
|
|
},
|
|
|
|
|
due_date: task.due_date,
|
|
|
|
|
estimated_hours: task.estimated_hours,
|
|
|
|
|
actual_hours: task.actual_hours,
|
|
|
|
|
tags: task.tags,
|
|
|
|
|
parent_task_id: None, // Would need separate query
|
|
|
|
|
subtasks: vec![], // Would need separate query
|
|
|
|
|
dependencies: task.dependencies,
|
|
|
|
|
attachments: vec![], // Would need separate query
|
|
|
|
|
comments: vec![], // Would need separate query
|
|
|
|
|
created_at: task.created_at,
|
|
|
|
|
updated_at: task.updated_at,
|
|
|
|
|
completed_at: task.completed_at,
|
|
|
|
|
progress: task.progress,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
2025-11-22 22:55:35 -03:00
|
|
|
pub enum TaskStatus {
|
|
|
|
|
Todo,
|
|
|
|
|
InProgress,
|
2025-11-27 08:34:24 -03:00
|
|
|
Completed,
|
|
|
|
|
OnHold,
|
2025-11-22 22:55:35 -03:00
|
|
|
Review,
|
|
|
|
|
Blocked,
|
|
|
|
|
Cancelled,
|
2025-11-27 08:34:24 -03:00
|
|
|
Done,
|
2025-11-22 22:55:35 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
#[serde(rename_all = "lowercase")]
|
|
|
|
|
pub enum TaskPriority {
|
|
|
|
|
Low,
|
|
|
|
|
Medium,
|
|
|
|
|
High,
|
|
|
|
|
Urgent,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct TaskComment {
|
|
|
|
|
pub id: Uuid,
|
|
|
|
|
pub task_id: Uuid,
|
|
|
|
|
pub author: String,
|
|
|
|
|
pub content: String,
|
|
|
|
|
pub created_at: DateTime<Utc>,
|
|
|
|
|
pub updated_at: Option<DateTime<Utc>>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct TaskTemplate {
|
|
|
|
|
pub id: Uuid,
|
|
|
|
|
pub name: String,
|
|
|
|
|
pub description: Option<String>,
|
|
|
|
|
pub default_assignee: Option<String>,
|
|
|
|
|
pub default_priority: TaskPriority,
|
|
|
|
|
pub default_tags: Vec<String>,
|
|
|
|
|
pub checklist: Vec<ChecklistItem>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct ChecklistItem {
|
|
|
|
|
pub id: Uuid,
|
|
|
|
|
pub task_id: Uuid,
|
|
|
|
|
pub description: String,
|
|
|
|
|
pub completed: bool,
|
|
|
|
|
pub completed_by: Option<String>,
|
|
|
|
|
pub completed_at: Option<DateTime<Utc>>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct TaskBoard {
|
|
|
|
|
pub id: Uuid,
|
|
|
|
|
pub name: String,
|
|
|
|
|
pub description: Option<String>,
|
|
|
|
|
pub columns: Vec<BoardColumn>,
|
|
|
|
|
pub owner: String,
|
|
|
|
|
pub members: Vec<String>,
|
|
|
|
|
pub created_at: DateTime<Utc>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct BoardColumn {
|
|
|
|
|
pub id: Uuid,
|
|
|
|
|
pub name: String,
|
|
|
|
|
pub position: i32,
|
|
|
|
|
pub status_mapping: TaskStatus,
|
|
|
|
|
pub task_ids: Vec<Uuid>,
|
|
|
|
|
pub wip_limit: Option<i32>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub struct TaskEngine {
|
2025-11-27 08:34:24 -03:00
|
|
|
db: DbPool,
|
2025-11-22 22:55:35 -03:00
|
|
|
cache: Arc<RwLock<Vec<Task>>>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl TaskEngine {
|
2025-11-27 09:38:50 -03:00
|
|
|
pub fn new(db: Arc<DbPool>) -> Self {
|
2025-11-22 22:55:35 -03:00
|
|
|
Self {
|
|
|
|
|
db,
|
2025-11-27 09:38:50 -03:00
|
|
|
cache: Arc::new(RwLock::new(vec![])),
|
2025-11-22 22:55:35 -03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-27 09:38:50 -03:00
|
|
|
pub async fn create_task(
|
|
|
|
|
&self,
|
|
|
|
|
request: CreateTaskRequest,
|
|
|
|
|
) -> Result<TaskResponse, Box<dyn std::error::Error>> {
|
|
|
|
|
let id = Uuid::new_v4();
|
|
|
|
|
let now = Utc::now();
|
2025-11-27 08:34:24 -03:00
|
|
|
|
2025-11-27 09:38:50 -03:00
|
|
|
let task = Task {
|
|
|
|
|
id,
|
|
|
|
|
title: request.title,
|
|
|
|
|
description: request.description,
|
|
|
|
|
status: "todo".to_string(),
|
|
|
|
|
priority: request.priority.unwrap_or("medium".to_string()),
|
|
|
|
|
assignee_id: request.assignee_id,
|
|
|
|
|
reporter_id: request.reporter_id,
|
|
|
|
|
project_id: request.project_id,
|
|
|
|
|
due_date: request.due_date,
|
|
|
|
|
tags: request.tags.unwrap_or_default(),
|
|
|
|
|
dependencies: vec![],
|
|
|
|
|
estimated_hours: request.estimated_hours,
|
|
|
|
|
actual_hours: None,
|
|
|
|
|
progress: 0,
|
|
|
|
|
created_at: now,
|
|
|
|
|
updated_at: now,
|
|
|
|
|
completed_at: None,
|
|
|
|
|
};
|
2025-11-27 08:34:24 -03:00
|
|
|
|
2025-11-27 09:38:50 -03:00
|
|
|
// Store in cache
|
|
|
|
|
let mut cache = self.cache.write().await;
|
|
|
|
|
cache.push(task.clone());
|
|
|
|
|
|
|
|
|
|
Ok(task.into())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn update_task(
|
|
|
|
|
&self,
|
|
|
|
|
id: Uuid,
|
|
|
|
|
update: TaskUpdate,
|
|
|
|
|
) -> Result<TaskResponse, Box<dyn std::error::Error>> {
|
|
|
|
|
let mut cache = self.cache.write().await;
|
|
|
|
|
|
|
|
|
|
if let Some(task) = cache.iter_mut().find(|t| t.id == id) {
|
|
|
|
|
if let Some(title) = update.title {
|
|
|
|
|
task.title = title;
|
|
|
|
|
}
|
|
|
|
|
if let Some(description) = update.description {
|
|
|
|
|
task.description = Some(description);
|
|
|
|
|
}
|
|
|
|
|
if let Some(status) = update.status {
|
|
|
|
|
task.status = status;
|
|
|
|
|
if task.status == "completed" || task.status == "done" {
|
|
|
|
|
task.completed_at = Some(Utc::now());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if let Some(priority) = update.priority {
|
|
|
|
|
task.priority = priority;
|
|
|
|
|
}
|
|
|
|
|
if let Some(assignee) = update.assignee {
|
|
|
|
|
task.assignee_id = Some(Uuid::parse_str(&assignee)?);
|
|
|
|
|
}
|
|
|
|
|
if let Some(due_date) = update.due_date {
|
|
|
|
|
task.due_date = Some(due_date);
|
|
|
|
|
}
|
|
|
|
|
if let Some(tags) = update.tags {
|
|
|
|
|
task.tags = tags;
|
|
|
|
|
}
|
|
|
|
|
task.updated_at = Utc::now();
|
|
|
|
|
|
|
|
|
|
Ok(task.clone().into())
|
|
|
|
|
} else {
|
|
|
|
|
Err("Task not found".into())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn delete_task(&self, id: Uuid) -> Result<(), Box<dyn std::error::Error>> {
|
|
|
|
|
let mut cache = self.cache.write().await;
|
|
|
|
|
cache.retain(|t| t.id != id);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn get_task(&self, id: Uuid) -> Result<TaskResponse, Box<dyn std::error::Error>> {
|
|
|
|
|
let cache = self.cache.read().await;
|
|
|
|
|
cache
|
|
|
|
|
.iter()
|
|
|
|
|
.find(|t| t.id == id)
|
|
|
|
|
.cloned()
|
|
|
|
|
.map(|t| t.into())
|
|
|
|
|
.ok_or_else(|| "Task not found".into())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn list_tasks(
|
|
|
|
|
&self,
|
|
|
|
|
filters: TaskFilters,
|
|
|
|
|
) -> Result<Vec<TaskResponse>, Box<dyn std::error::Error>> {
|
|
|
|
|
let cache = self.cache.read().await;
|
|
|
|
|
|
|
|
|
|
let mut tasks: Vec<Task> = cache.clone();
|
|
|
|
|
|
|
|
|
|
// Apply filters
|
|
|
|
|
if let Some(status) = filters.status {
|
|
|
|
|
tasks.retain(|t| t.status == status);
|
|
|
|
|
}
|
|
|
|
|
if let Some(priority) = filters.priority {
|
|
|
|
|
tasks.retain(|t| t.priority == priority);
|
|
|
|
|
}
|
|
|
|
|
if let Some(assignee) = filters.assignee {
|
|
|
|
|
if let Ok(assignee_id) = Uuid::parse_str(&assignee) {
|
|
|
|
|
tasks.retain(|t| t.assignee_id == Some(assignee_id));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if let Some(project_id) = filters.project_id {
|
|
|
|
|
tasks.retain(|t| t.project_id == Some(project_id));
|
|
|
|
|
}
|
|
|
|
|
if let Some(tag) = filters.tag {
|
|
|
|
|
tasks.retain(|t| t.tags.contains(&tag));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Sort by creation date (newest first)
|
|
|
|
|
tasks.sort_by(|a, b| b.created_at.cmp(&a.created_at));
|
|
|
|
|
|
|
|
|
|
// Apply limit
|
|
|
|
|
if let Some(limit) = filters.limit {
|
|
|
|
|
tasks.truncate(limit);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(tasks.into_iter().map(|t| t.into()).collect())
|
2025-11-27 08:34:24 -03:00
|
|
|
}
|
|
|
|
|
|
2025-11-27 09:38:50 -03:00
|
|
|
pub async fn assign_task(
|
|
|
|
|
&self,
|
|
|
|
|
id: Uuid,
|
|
|
|
|
assignee: String,
|
|
|
|
|
) -> Result<TaskResponse, Box<dyn std::error::Error>> {
|
|
|
|
|
let assignee_id = Uuid::parse_str(&assignee)?;
|
|
|
|
|
let mut cache = self.cache.write().await;
|
|
|
|
|
|
|
|
|
|
if let Some(task) = cache.iter_mut().find(|t| t.id == id) {
|
|
|
|
|
task.assignee_id = Some(assignee_id);
|
|
|
|
|
task.updated_at = Utc::now();
|
|
|
|
|
Ok(task.clone().into())
|
|
|
|
|
} else {
|
|
|
|
|
Err("Task not found".into())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn update_status(
|
|
|
|
|
&self,
|
|
|
|
|
id: Uuid,
|
|
|
|
|
status: String,
|
|
|
|
|
) -> Result<TaskResponse, Box<dyn std::error::Error>> {
|
|
|
|
|
let mut cache = self.cache.write().await;
|
|
|
|
|
|
|
|
|
|
if let Some(task) = cache.iter_mut().find(|t| t.id == id) {
|
|
|
|
|
task.status = status.clone();
|
|
|
|
|
if status == "completed" || status == "done" {
|
|
|
|
|
task.completed_at = Some(Utc::now());
|
|
|
|
|
task.progress = 100;
|
|
|
|
|
}
|
|
|
|
|
task.updated_at = Utc::now();
|
|
|
|
|
Ok(task.clone().into())
|
|
|
|
|
} else {
|
|
|
|
|
Err("Task not found".into())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Task API handlers
|
|
|
|
|
pub async fn handle_task_create(
|
|
|
|
|
State(state): State<Arc<AppState>>,
|
|
|
|
|
Json(payload): Json<CreateTaskRequest>,
|
|
|
|
|
) -> Result<Json<TaskResponse>, StatusCode> {
|
|
|
|
|
let task_engine = state
|
|
|
|
|
.task_engine
|
|
|
|
|
.as_ref()
|
|
|
|
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
|
|
|
|
|
|
|
|
|
match task_engine.create_task(payload).await {
|
|
|
|
|
Ok(task) => Ok(Json(task)),
|
|
|
|
|
Err(e) => {
|
|
|
|
|
log::error!("Failed to create task: {}", e);
|
|
|
|
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn handle_task_update(
|
|
|
|
|
State(state): State<Arc<AppState>>,
|
|
|
|
|
Path(id): Path<Uuid>,
|
|
|
|
|
Json(payload): Json<TaskUpdate>,
|
|
|
|
|
) -> Result<Json<TaskResponse>, StatusCode> {
|
|
|
|
|
let task_engine = state
|
|
|
|
|
.task_engine
|
|
|
|
|
.as_ref()
|
|
|
|
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
|
|
|
|
|
|
|
|
|
match task_engine.update_task(id, payload).await {
|
|
|
|
|
Ok(task) => Ok(Json(task)),
|
|
|
|
|
Err(e) => {
|
|
|
|
|
log::error!("Failed to update task: {}", e);
|
|
|
|
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn handle_task_delete(
|
|
|
|
|
State(state): State<Arc<AppState>>,
|
|
|
|
|
Path(id): Path<Uuid>,
|
|
|
|
|
) -> Result<StatusCode, StatusCode> {
|
|
|
|
|
let task_engine = state
|
|
|
|
|
.task_engine
|
|
|
|
|
.as_ref()
|
|
|
|
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
|
|
|
|
|
|
|
|
|
match task_engine.delete_task(id).await {
|
|
|
|
|
Ok(_) => Ok(StatusCode::NO_CONTENT),
|
|
|
|
|
Err(e) => {
|
|
|
|
|
log::error!("Failed to delete task: {}", e);
|
|
|
|
|
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn handle_task_get(
|
|
|
|
|
State(state): State<Arc<AppState>>,
|
|
|
|
|
Path(id): Path<Uuid>,
|
|
|
|
|
) -> Result<Json<TaskResponse>, StatusCode> {
|
|
|
|
|
let task_engine = state
|
|
|
|
|
.task_engine
|
|
|
|
|
.as_ref()
|
|
|
|
|
.ok_or(StatusCode::SERVICE_UNAVAILABLE)?;
|
|
|
|
|
|
|
|
|
|
match task_engine.get_task(id).await {
|
|
|
|
|
Ok(task) => Ok(Json(task)),
|
|
|
|
|
Err(e) => {
|
|
|
|
|
log::error!("Failed to get task: {}", e);
|
|
|
|
|
Err(StatusCode::NOT_FOUND)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Database operations for TaskEngine
|
|
|
|
|
impl TaskEngine {
|
|
|
|
|
pub async fn create_task_with_db(
|
|
|
|
|
&self,
|
|
|
|
|
task: Task,
|
|
|
|
|
) -> Result<Task, Box<dyn std::error::Error>> {
|
2025-11-22 22:55:35 -03:00
|
|
|
// TODO: Implement with Diesel
|
|
|
|
|
/*
|
|
|
|
|
let result = sqlx::query!(
|
|
|
|
|
r#"
|
|
|
|
|
INSERT INTO tasks
|
|
|
|
|
(id, title, description, assignee, reporter, status, priority,
|
|
|
|
|
due_date, estimated_hours, tags, parent_task_id, created_at, updated_at)
|
|
|
|
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
|
|
|
|
RETURNING *
|
|
|
|
|
"#,
|
|
|
|
|
task.id,
|
|
|
|
|
task.title,
|
|
|
|
|
task.description,
|
2025-11-27 08:34:24 -03:00
|
|
|
task.assignee_id.map(|id| id.to_string()),
|
|
|
|
|
task.reporter_id.map(|id| id.to_string()),
|
2025-11-22 22:55:35 -03:00
|
|
|
serde_json::to_value(&task.status)?,
|
|
|
|
|
serde_json::to_value(&task.priority)?,
|
|
|
|
|
task.due_date,
|
|
|
|
|
task.estimated_hours,
|
|
|
|
|
&task.tags[..],
|
2025-11-27 08:34:24 -03:00
|
|
|
None, // parent_task_id field doesn't exist in Task struct
|
2025-11-22 22:55:35 -03:00
|
|
|
task.created_at,
|
|
|
|
|
task.updated_at
|
|
|
|
|
)
|
|
|
|
|
.fetch_one(self.db.as_ref())
|
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
|
|
let created_task: Task = serde_json::from_value(serde_json::to_value(result)?)?;
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
let created_task = task.clone();
|
|
|
|
|
|
|
|
|
|
// Update cache
|
|
|
|
|
let mut cache = self.cache.write().await;
|
|
|
|
|
cache.push(created_task.clone());
|
|
|
|
|
|
|
|
|
|
Ok(created_task)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Update an existing task
|
|
|
|
|
pub async fn update_task(
|
|
|
|
|
&self,
|
|
|
|
|
id: Uuid,
|
|
|
|
|
updates: TaskUpdate,
|
|
|
|
|
) -> Result<Task, Box<dyn std::error::Error>> {
|
2025-11-27 08:34:24 -03:00
|
|
|
// use crate::core::shared::models::schema::tasks::dsl;
|
|
|
|
|
let conn = &mut self.db.get()?;
|
2025-11-22 22:55:35 -03:00
|
|
|
let updated_at = Utc::now();
|
|
|
|
|
|
|
|
|
|
// Check if status is changing to Done
|
2025-11-26 22:54:22 -03:00
|
|
|
let completing = updates
|
|
|
|
|
.status
|
2025-11-22 22:55:35 -03:00
|
|
|
.as_ref()
|
2025-11-27 08:34:24 -03:00
|
|
|
.map(|s| s == "completed")
|
2025-11-22 22:55:35 -03:00
|
|
|
.unwrap_or(false);
|
|
|
|
|
|
2025-11-26 22:54:22 -03:00
|
|
|
let completed_at = if completing { Some(Utc::now()) } else { None };
|
2025-11-22 22:55:35 -03:00
|
|
|
|
|
|
|
|
// TODO: Implement with Diesel
|
|
|
|
|
/*
|
|
|
|
|
let result = sqlx::query!(
|
|
|
|
|
r#"
|
|
|
|
|
UPDATE tasks
|
|
|
|
|
SET title = COALESCE($2, title),
|
|
|
|
|
description = COALESCE($3, description),
|
|
|
|
|
assignee = COALESCE($4, assignee),
|
|
|
|
|
status = COALESCE($5, status),
|
|
|
|
|
priority = COALESCE($6, priority),
|
|
|
|
|
due_date = COALESCE($7, due_date),
|
|
|
|
|
updated_at = $8,
|
|
|
|
|
completed_at = COALESCE($9, completed_at)
|
|
|
|
|
WHERE id = $1
|
|
|
|
|
RETURNING *
|
|
|
|
|
"#,
|
|
|
|
|
id,
|
|
|
|
|
updates.get("title").and_then(|v| v.as_str()),
|
|
|
|
|
updates.get("description").and_then(|v| v.as_str()),
|
|
|
|
|
updates.get("assignee").and_then(|v| v.as_str()),
|
|
|
|
|
updates.get("status").and_then(|v| serde_json::to_value(v).ok()),
|
|
|
|
|
updates.get("priority").and_then(|v| serde_json::to_value(v).ok()),
|
|
|
|
|
updates
|
|
|
|
|
.get("due_date")
|
|
|
|
|
.and_then(|v| DateTime::parse_from_rfc3339(v.as_str()?).ok())
|
|
|
|
|
.map(|dt| dt.with_timezone(&Utc)),
|
|
|
|
|
updated_at,
|
|
|
|
|
completed_at
|
|
|
|
|
)
|
|
|
|
|
.fetch_one(self.db.as_ref())
|
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
|
|
let updated_task: Task = serde_json::from_value(serde_json::to_value(result)?)?;
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
// Create a dummy updated task for now
|
|
|
|
|
let updated_task = Task {
|
|
|
|
|
id,
|
|
|
|
|
title: updates.title.unwrap_or_else(|| "Updated Task".to_string()),
|
|
|
|
|
description: updates.description,
|
2025-11-27 08:34:24 -03:00
|
|
|
status: updates.status.unwrap_or("todo".to_string()),
|
|
|
|
|
priority: updates.priority.unwrap_or("medium".to_string()),
|
|
|
|
|
assignee_id: updates
|
|
|
|
|
.assignee
|
|
|
|
|
.and_then(|s| uuid::Uuid::parse_str(&s).ok()),
|
|
|
|
|
reporter_id: Some(uuid::Uuid::new_v4()),
|
|
|
|
|
project_id: None,
|
2025-11-22 22:55:35 -03:00
|
|
|
due_date: updates.due_date,
|
|
|
|
|
tags: updates.tags.unwrap_or_default(),
|
|
|
|
|
dependencies: Vec::new(),
|
2025-11-27 08:34:24 -03:00
|
|
|
estimated_hours: None,
|
|
|
|
|
actual_hours: None,
|
|
|
|
|
progress: 0,
|
2025-11-22 22:55:35 -03:00
|
|
|
created_at: Utc::now(),
|
|
|
|
|
updated_at: Utc::now(),
|
|
|
|
|
completed_at,
|
|
|
|
|
};
|
|
|
|
|
self.refresh_cache().await?;
|
|
|
|
|
|
|
|
|
|
Ok(updated_task)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Delete a task
|
|
|
|
|
pub async fn delete_task(&self, id: Uuid) -> Result<bool, Box<dyn std::error::Error>> {
|
|
|
|
|
// First, check for dependencies
|
|
|
|
|
let dependencies = self.get_task_dependencies(id).await?;
|
|
|
|
|
if !dependencies.is_empty() {
|
|
|
|
|
return Err("Cannot delete task with dependencies".into());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// TODO: Implement with Diesel
|
|
|
|
|
/*
|
|
|
|
|
let result = sqlx::query!("DELETE FROM tasks WHERE id = $1", id)
|
|
|
|
|
.execute(self.db.as_ref())
|
|
|
|
|
.await?;
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
self.refresh_cache().await?;
|
|
|
|
|
Ok(false)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Get tasks for a specific user
|
|
|
|
|
pub async fn get_user_tasks(
|
|
|
|
|
&self,
|
|
|
|
|
_user_id: &str,
|
|
|
|
|
) -> Result<Vec<Task>, Box<dyn std::error::Error>> {
|
|
|
|
|
// TODO: Implement with Diesel
|
|
|
|
|
/*
|
|
|
|
|
let results = sqlx::query!(
|
|
|
|
|
r#"
|
|
|
|
|
SELECT * FROM tasks
|
|
|
|
|
WHERE assignee = $1 OR reporter = $1
|
|
|
|
|
ORDER BY priority DESC, due_date ASC
|
|
|
|
|
"#,
|
|
|
|
|
user_id
|
|
|
|
|
)
|
|
|
|
|
.fetch_all(self.db.as_ref())
|
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
|
|
Ok(results
|
|
|
|
|
.into_iter()
|
|
|
|
|
.map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap())
|
|
|
|
|
.collect())
|
|
|
|
|
*/
|
|
|
|
|
Ok(vec![])
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Get tasks by status
|
|
|
|
|
pub async fn get_tasks_by_status(
|
|
|
|
|
&self,
|
2025-11-27 08:34:24 -03:00
|
|
|
status: String,
|
2025-11-22 22:55:35 -03:00
|
|
|
) -> Result<Vec<Task>, Box<dyn std::error::Error>> {
|
2025-11-27 08:34:24 -03:00
|
|
|
use crate::core::shared::models::schema::tasks::dsl;
|
|
|
|
|
let conn = &mut self.db.get()?;
|
2025-11-22 22:55:35 -03:00
|
|
|
|
2025-11-27 08:34:24 -03:00
|
|
|
let tasks = dsl::tasks
|
|
|
|
|
.filter(dsl::status.eq(status))
|
|
|
|
|
.order(dsl::created_at.desc())
|
|
|
|
|
.load::<Task>(conn)?;
|
|
|
|
|
|
|
|
|
|
Ok(tasks)
|
2025-11-22 22:55:35 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Get overdue tasks
|
|
|
|
|
pub async fn get_overdue_tasks(&self) -> Result<Vec<Task>, Box<dyn std::error::Error>> {
|
2025-11-27 08:34:24 -03:00
|
|
|
use crate::core::shared::models::schema::tasks::dsl;
|
|
|
|
|
let conn = &mut self.db.get()?;
|
2025-11-22 22:55:35 -03:00
|
|
|
let now = Utc::now();
|
|
|
|
|
|
2025-11-27 08:34:24 -03:00
|
|
|
let tasks = dsl::tasks
|
|
|
|
|
.filter(dsl::due_date.lt(Some(now)))
|
|
|
|
|
.filter(dsl::status.ne("completed"))
|
|
|
|
|
.filter(dsl::status.ne("cancelled"))
|
|
|
|
|
.order(dsl::due_date.asc())
|
|
|
|
|
.load::<Task>(conn)?;
|
|
|
|
|
|
|
|
|
|
Ok(tasks)
|
2025-11-22 22:55:35 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Add a comment to a task
|
|
|
|
|
pub async fn add_comment(
|
|
|
|
|
&self,
|
|
|
|
|
task_id: Uuid,
|
|
|
|
|
author: &str,
|
|
|
|
|
content: &str,
|
|
|
|
|
) -> Result<TaskComment, Box<dyn std::error::Error>> {
|
|
|
|
|
let comment = TaskComment {
|
|
|
|
|
id: Uuid::new_v4(),
|
|
|
|
|
task_id,
|
|
|
|
|
author: author.to_string(),
|
|
|
|
|
content: content.to_string(),
|
|
|
|
|
created_at: Utc::now(),
|
|
|
|
|
updated_at: None,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// TODO: Implement with Diesel
|
|
|
|
|
/*
|
|
|
|
|
sqlx::query!(
|
|
|
|
|
r#"
|
|
|
|
|
INSERT INTO task_comments (id, task_id, author, content, created_at)
|
|
|
|
|
VALUES ($1, $2, $3, $4, $5)
|
|
|
|
|
"#,
|
|
|
|
|
comment.id,
|
|
|
|
|
comment.task_id,
|
|
|
|
|
comment.author,
|
|
|
|
|
comment.content,
|
|
|
|
|
comment.created_at
|
|
|
|
|
)
|
|
|
|
|
.execute(self.db.as_ref())
|
|
|
|
|
.await?;
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
Ok(comment)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Create a subtask
|
|
|
|
|
pub async fn create_subtask(
|
|
|
|
|
&self,
|
|
|
|
|
parent_id: Uuid,
|
|
|
|
|
subtask: Task,
|
|
|
|
|
) -> Result<Task, Box<dyn std::error::Error>> {
|
2025-11-27 08:34:24 -03:00
|
|
|
// For subtasks, we store parent relationship separately
|
|
|
|
|
// or in a separate junction table
|
2025-11-22 22:55:35 -03:00
|
|
|
let created = self.create_task(subtask).await?;
|
|
|
|
|
|
|
|
|
|
// Update parent's subtasks list
|
|
|
|
|
// TODO: Implement with Diesel
|
|
|
|
|
/*
|
|
|
|
|
sqlx::query!(
|
|
|
|
|
r#"
|
2025-11-27 08:34:24 -03:00
|
|
|
-- Update parent's subtasks would be done via a separate junction table
|
|
|
|
|
-- This is a placeholder query
|
|
|
|
|
SELECT 1
|
2025-11-22 22:55:35 -03:00
|
|
|
"#,
|
|
|
|
|
created.id,
|
|
|
|
|
parent_id
|
|
|
|
|
)
|
|
|
|
|
.execute(self.db.as_ref())
|
|
|
|
|
.await?;
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
Ok(created)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Get task dependencies
|
|
|
|
|
pub async fn get_task_dependencies(
|
|
|
|
|
&self,
|
|
|
|
|
task_id: Uuid,
|
|
|
|
|
) -> Result<Vec<Task>, Box<dyn std::error::Error>> {
|
|
|
|
|
let task = self.get_task(task_id).await?;
|
|
|
|
|
let mut dependencies = Vec::new();
|
|
|
|
|
|
|
|
|
|
for dep_id in task.dependencies {
|
|
|
|
|
if let Ok(dep_task) = self.get_task(dep_id).await {
|
|
|
|
|
dependencies.push(dep_task);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(dependencies)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Get a single task by ID
|
2025-11-27 08:34:24 -03:00
|
|
|
pub async fn get_task(&self, id: Uuid) -> Result<Task, Box<dyn std::error::Error>> {
|
|
|
|
|
use crate::core::shared::models::schema::tasks::dsl;
|
|
|
|
|
let conn = &mut self.db.get()?;
|
2025-11-22 22:55:35 -03:00
|
|
|
|
2025-11-27 08:34:24 -03:00
|
|
|
let task = dsl::tasks.filter(dsl::id.eq(id)).first::<Task>(conn)?;
|
|
|
|
|
|
|
|
|
|
Ok(task)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Get all tasks
|
|
|
|
|
pub async fn get_all_tasks(&self) -> Result<Vec<Task>, Box<dyn std::error::Error>> {
|
|
|
|
|
use crate::core::shared::models::schema::tasks::dsl;
|
|
|
|
|
let conn = &mut self.db.get()?;
|
|
|
|
|
|
|
|
|
|
let tasks = dsl::tasks
|
|
|
|
|
.order(dsl::created_at.desc())
|
|
|
|
|
.load::<Task>(conn)?;
|
|
|
|
|
|
|
|
|
|
Ok(tasks)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Assign a task to a user
|
|
|
|
|
pub async fn assign_task(
|
|
|
|
|
&self,
|
|
|
|
|
id: Uuid,
|
|
|
|
|
assignee: String,
|
|
|
|
|
) -> Result<Task, Box<dyn std::error::Error>> {
|
|
|
|
|
use crate::core::shared::models::schema::tasks::dsl;
|
|
|
|
|
let conn = &mut self.db.get()?;
|
|
|
|
|
|
|
|
|
|
let assignee_id = Uuid::parse_str(&assignee).ok();
|
|
|
|
|
let updated_at = Utc::now();
|
|
|
|
|
|
|
|
|
|
diesel::update(dsl::tasks.filter(dsl::id.eq(id)))
|
|
|
|
|
.set((
|
|
|
|
|
dsl::assignee_id.eq(assignee_id),
|
|
|
|
|
dsl::updated_at.eq(updated_at),
|
|
|
|
|
))
|
|
|
|
|
.execute(conn)?;
|
|
|
|
|
|
|
|
|
|
self.get_task(id).await
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Set task dependencies
|
|
|
|
|
pub async fn set_dependencies(
|
|
|
|
|
&self,
|
|
|
|
|
id: Uuid,
|
|
|
|
|
dependencies: Vec<Uuid>,
|
|
|
|
|
) -> Result<Task, Box<dyn std::error::Error>> {
|
|
|
|
|
use crate::core::shared::models::schema::tasks::dsl;
|
|
|
|
|
let conn = &mut self.db.get()?;
|
|
|
|
|
|
|
|
|
|
let updated_at = Utc::now();
|
|
|
|
|
|
|
|
|
|
diesel::update(dsl::tasks.filter(dsl::id.eq(id)))
|
|
|
|
|
.set((
|
|
|
|
|
dsl::dependencies.eq(dependencies),
|
|
|
|
|
dsl::updated_at.eq(updated_at),
|
|
|
|
|
))
|
|
|
|
|
.execute(conn)?;
|
|
|
|
|
|
|
|
|
|
self.get_task(id).await
|
2025-11-22 22:55:35 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Calculate task progress (percentage)
|
2025-11-26 22:54:22 -03:00
|
|
|
pub async fn calculate_progress(
|
|
|
|
|
&self,
|
|
|
|
|
task_id: Uuid,
|
|
|
|
|
) -> Result<f32, Box<dyn std::error::Error>> {
|
2025-11-22 22:55:35 -03:00
|
|
|
let task = self.get_task(task_id).await?;
|
|
|
|
|
|
2025-11-27 08:34:24 -03:00
|
|
|
// Calculate progress based on status
|
|
|
|
|
Ok(match task.status.as_str() {
|
|
|
|
|
"todo" => 0.0,
|
|
|
|
|
"in_progress" | "in-progress" => 50.0,
|
|
|
|
|
"review" => 75.0,
|
|
|
|
|
"completed" | "done" => 100.0,
|
|
|
|
|
"blocked" => {
|
|
|
|
|
(task.actual_hours.unwrap_or(0.0) / task.estimated_hours.unwrap_or(1.0) * 100.0)
|
|
|
|
|
as f32
|
2025-11-22 22:55:35 -03:00
|
|
|
}
|
2025-11-27 08:34:24 -03:00
|
|
|
"cancelled" => 0.0,
|
|
|
|
|
_ => 0.0,
|
|
|
|
|
})
|
2025-11-22 22:55:35 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Create a task from template
|
|
|
|
|
pub async fn create_from_template(
|
|
|
|
|
&self,
|
|
|
|
|
_template_id: Uuid,
|
|
|
|
|
assignee: Option<String>,
|
|
|
|
|
) -> Result<Task, Box<dyn std::error::Error>> {
|
|
|
|
|
// TODO: Implement with Diesel
|
|
|
|
|
/*
|
|
|
|
|
let template = sqlx::query!(
|
|
|
|
|
"SELECT * FROM task_templates WHERE id = $1",
|
|
|
|
|
template_id
|
|
|
|
|
)
|
|
|
|
|
.fetch_one(self.db.as_ref())
|
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
|
|
let template: TaskTemplate = serde_json::from_value(serde_json::to_value(template)?)?;
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
let template = TaskTemplate {
|
|
|
|
|
id: Uuid::new_v4(),
|
|
|
|
|
name: "Default Template".to_string(),
|
|
|
|
|
description: Some("Default template".to_string()),
|
|
|
|
|
default_assignee: None,
|
|
|
|
|
default_priority: TaskPriority::Medium,
|
|
|
|
|
default_tags: vec![],
|
|
|
|
|
checklist: vec![],
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let task = Task {
|
|
|
|
|
id: Uuid::new_v4(),
|
|
|
|
|
title: template.name,
|
|
|
|
|
description: template.description,
|
2025-11-27 08:34:24 -03:00
|
|
|
status: "todo".to_string(),
|
|
|
|
|
priority: "medium".to_string(),
|
|
|
|
|
assignee_id: assignee.and_then(|s| uuid::Uuid::parse_str(&s).ok()),
|
|
|
|
|
reporter_id: Some(uuid::Uuid::new_v4()),
|
|
|
|
|
project_id: None,
|
2025-11-22 22:55:35 -03:00
|
|
|
due_date: None,
|
|
|
|
|
estimated_hours: None,
|
|
|
|
|
actual_hours: None,
|
|
|
|
|
tags: template.default_tags,
|
2025-11-27 08:34:24 -03:00
|
|
|
|
2025-11-22 22:55:35 -03:00
|
|
|
dependencies: Vec::new(),
|
2025-11-27 08:34:24 -03:00
|
|
|
progress: 0,
|
2025-11-22 22:55:35 -03:00
|
|
|
created_at: Utc::now(),
|
|
|
|
|
updated_at: Utc::now(),
|
|
|
|
|
completed_at: None,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let created = self.create_task(task).await?;
|
|
|
|
|
|
|
|
|
|
// Create checklist items
|
|
|
|
|
for item in template.checklist {
|
|
|
|
|
let _checklist_item = ChecklistItem {
|
|
|
|
|
id: Uuid::new_v4(),
|
|
|
|
|
task_id: created.id,
|
|
|
|
|
description: item.description,
|
|
|
|
|
completed: false,
|
|
|
|
|
completed_by: None,
|
|
|
|
|
completed_at: None,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// TODO: Implement with Diesel
|
|
|
|
|
/*
|
|
|
|
|
sqlx::query!(
|
|
|
|
|
r#"
|
|
|
|
|
INSERT INTO task_checklists (id, task_id, description, completed)
|
|
|
|
|
VALUES ($1, $2, $3, $4)
|
|
|
|
|
"#,
|
|
|
|
|
checklist_item.id,
|
|
|
|
|
checklist_item.task_id,
|
|
|
|
|
checklist_item.description,
|
|
|
|
|
checklist_item.completed
|
|
|
|
|
)
|
|
|
|
|
.execute(self.db.as_ref())
|
|
|
|
|
.await?;
|
|
|
|
|
*/
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(created)
|
|
|
|
|
}
|
|
|
|
|
/// Send notification to assignee
|
|
|
|
|
async fn notify_assignee(
|
|
|
|
|
&self,
|
|
|
|
|
assignee: &str,
|
|
|
|
|
task: &Task,
|
|
|
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
|
|
|
// This would integrate with your notification system
|
|
|
|
|
// For now, just log it
|
|
|
|
|
log::info!(
|
|
|
|
|
"Notifying {} about new task assignment: {}",
|
|
|
|
|
assignee,
|
|
|
|
|
task.title
|
|
|
|
|
);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Refresh the cache from database
|
|
|
|
|
async fn refresh_cache(&self) -> Result<(), Box<dyn std::error::Error>> {
|
|
|
|
|
// TODO: Implement with Diesel
|
|
|
|
|
/*
|
|
|
|
|
let results = sqlx::query!("SELECT * FROM tasks ORDER BY created_at DESC")
|
|
|
|
|
.fetch_all(self.db.as_ref())
|
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
|
|
let tasks: Vec<Task> = results
|
|
|
|
|
.into_iter()
|
|
|
|
|
.map(|r| serde_json::from_value(serde_json::to_value(r).unwrap()).unwrap())
|
|
|
|
|
.collect();
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
let tasks: Vec<Task> = vec![];
|
|
|
|
|
|
|
|
|
|
let mut cache = self.cache.write().await;
|
|
|
|
|
*cache = tasks;
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Get task statistics for reporting
|
|
|
|
|
pub async fn get_statistics(
|
|
|
|
|
&self,
|
|
|
|
|
user_id: Option<&str>,
|
|
|
|
|
) -> Result<serde_json::Value, Box<dyn std::error::Error>> {
|
2025-11-27 08:34:24 -03:00
|
|
|
let _base_query = if let Some(uid) = user_id {
|
2025-11-22 22:55:35 -03:00
|
|
|
format!("WHERE assignee = '{}' OR reporter = '{}'", uid, uid)
|
|
|
|
|
} else {
|
|
|
|
|
String::new()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// TODO: Implement with Diesel
|
|
|
|
|
/*
|
|
|
|
|
let stats = sqlx::query(&format!(
|
|
|
|
|
r#"
|
|
|
|
|
SELECT
|
|
|
|
|
COUNT(*) FILTER (WHERE status = 'todo') as todo_count,
|
|
|
|
|
COUNT(*) FILTER (WHERE status = 'in_progress') as in_progress_count,
|
|
|
|
|
COUNT(*) FILTER (WHERE status = 'done') as done_count,
|
|
|
|
|
COUNT(*) FILTER (WHERE due_date < NOW() AND status != 'done') as overdue_count,
|
|
|
|
|
AVG(actual_hours / NULLIF(estimated_hours, 0)) as avg_completion_ratio
|
|
|
|
|
FROM tasks
|
|
|
|
|
{}
|
|
|
|
|
"#,
|
|
|
|
|
base_query
|
|
|
|
|
))
|
|
|
|
|
.fetch_one(self.db.as_ref())
|
|
|
|
|
.await?;
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
// Return empty stats for now
|
|
|
|
|
Ok(serde_json::json!({
|
|
|
|
|
"todo_count": 0,
|
|
|
|
|
"in_progress_count": 0,
|
|
|
|
|
"done_count": 0,
|
|
|
|
|
"overdue_count": 0,
|
|
|
|
|
"avg_completion_ratio": null
|
|
|
|
|
}))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// HTTP API handlers
|
|
|
|
|
pub mod handlers {
|
|
|
|
|
use super::*;
|
2025-11-26 22:54:22 -03:00
|
|
|
use axum::extract::{Path as AxumPath, Query as AxumQuery, State as AxumState};
|
2025-11-22 22:55:35 -03:00
|
|
|
use axum::http::StatusCode;
|
2025-11-26 22:54:22 -03:00
|
|
|
use axum::response::{IntoResponse, Json as AxumJson};
|
2025-11-22 22:55:35 -03:00
|
|
|
|
|
|
|
|
pub async fn create_task_handler<S>(
|
|
|
|
|
AxumState(_engine): AxumState<S>,
|
2025-11-27 08:34:24 -03:00
|
|
|
AxumJson(task): AxumJson<TaskResponse>,
|
2025-11-22 22:55:35 -03:00
|
|
|
) -> impl IntoResponse {
|
|
|
|
|
// TODO: Implement with actual engine
|
|
|
|
|
let created = task;
|
|
|
|
|
(StatusCode::OK, AxumJson(serde_json::json!(created)))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn get_tasks_handler<S>(
|
|
|
|
|
AxumState(_engine): AxumState<S>,
|
|
|
|
|
AxumQuery(_query): AxumQuery<serde_json::Value>,
|
|
|
|
|
) -> impl IntoResponse {
|
|
|
|
|
// TODO: Implement with actual engine
|
2025-11-27 08:34:24 -03:00
|
|
|
let tasks: Vec<TaskResponse> = vec![];
|
2025-11-22 22:55:35 -03:00
|
|
|
(StatusCode::OK, AxumJson(serde_json::json!(tasks)))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn update_task_handler<S>(
|
|
|
|
|
AxumState(_engine): AxumState<S>,
|
|
|
|
|
AxumPath(_id): AxumPath<Uuid>,
|
|
|
|
|
AxumJson(_updates): AxumJson<TaskUpdate>,
|
|
|
|
|
) -> impl IntoResponse {
|
|
|
|
|
// TODO: Implement with actual engine
|
|
|
|
|
let updated = serde_json::json!({"message": "Task updated"});
|
|
|
|
|
(StatusCode::OK, AxumJson(updated))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn get_statistics_handler<S>(
|
|
|
|
|
AxumState(_engine): AxumState<S>,
|
|
|
|
|
AxumQuery(_query): AxumQuery<serde_json::Value>,
|
|
|
|
|
) -> impl IntoResponse {
|
|
|
|
|
// TODO: Implement with actual engine
|
|
|
|
|
let stats = serde_json::json!({
|
|
|
|
|
"todo_count": 0,
|
|
|
|
|
"in_progress_count": 0,
|
|
|
|
|
"done_count": 0,
|
|
|
|
|
"overdue_count": 0
|
|
|
|
|
});
|
|
|
|
|
(StatusCode::OK, AxumJson(stats))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-27 09:38:50 -03:00
|
|
|
// Duplicate handlers removed - using the ones defined above
|
2025-11-26 22:54:22 -03:00
|
|
|
|
|
|
|
|
pub async fn handle_task_list(
|
2025-11-27 08:34:24 -03:00
|
|
|
State(state): State<Arc<AppState>>,
|
2025-11-26 22:54:22 -03:00
|
|
|
Query(params): Query<std::collections::HashMap<String, String>>,
|
2025-11-27 08:34:24 -03:00
|
|
|
) -> Result<Json<Vec<TaskResponse>>, StatusCode> {
|
2025-11-26 22:54:22 -03:00
|
|
|
let tasks = if let Some(user_id) = params.get("user_id") {
|
2025-11-27 08:34:24 -03:00
|
|
|
state.task_engine.get_user_tasks(user_id).await
|
2025-11-26 22:54:22 -03:00
|
|
|
} else if let Some(status_str) = params.get("status") {
|
|
|
|
|
let status = match status_str.as_str() {
|
2025-11-27 08:34:24 -03:00
|
|
|
"todo" => "todo",
|
|
|
|
|
"in_progress" => "in_progress",
|
|
|
|
|
"review" => "review",
|
|
|
|
|
"done" => "completed",
|
|
|
|
|
"blocked" => "blocked",
|
|
|
|
|
"cancelled" => "cancelled",
|
|
|
|
|
_ => "todo",
|
2025-11-26 22:54:22 -03:00
|
|
|
};
|
2025-11-27 08:34:24 -03:00
|
|
|
state
|
|
|
|
|
.task_engine
|
|
|
|
|
.get_tasks_by_status(status.to_string())
|
|
|
|
|
.await
|
2025-11-26 22:54:22 -03:00
|
|
|
} else {
|
2025-11-27 08:34:24 -03:00
|
|
|
state.task_engine.get_all_tasks().await
|
2025-11-26 22:54:22 -03:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
match tasks {
|
2025-11-27 08:34:24 -03:00
|
|
|
Ok(task_list) => Ok(Json(
|
|
|
|
|
task_list
|
|
|
|
|
.into_iter()
|
|
|
|
|
.map(|t| t.into())
|
|
|
|
|
.collect::<Vec<TaskResponse>>(),
|
|
|
|
|
)),
|
2025-11-26 22:54:22 -03:00
|
|
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn handle_task_assign(
|
2025-11-27 08:34:24 -03:00
|
|
|
State(state): State<Arc<AppState>>,
|
2025-11-26 22:54:22 -03:00
|
|
|
Path(id): Path<Uuid>,
|
|
|
|
|
Json(payload): Json<serde_json::Value>,
|
2025-11-27 08:34:24 -03:00
|
|
|
) -> Result<Json<TaskResponse>, StatusCode> {
|
2025-11-26 22:54:22 -03:00
|
|
|
let assignee = payload["assignee"]
|
|
|
|
|
.as_str()
|
|
|
|
|
.ok_or(StatusCode::BAD_REQUEST)?;
|
|
|
|
|
|
2025-11-27 08:34:24 -03:00
|
|
|
match state
|
|
|
|
|
.task_engine
|
|
|
|
|
.assign_task(id, assignee.to_string())
|
|
|
|
|
.await
|
|
|
|
|
{
|
|
|
|
|
Ok(updated) => Ok(Json(updated.into())),
|
2025-11-26 22:54:22 -03:00
|
|
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn handle_task_status_update(
|
2025-11-27 08:34:24 -03:00
|
|
|
State(state): State<Arc<AppState>>,
|
2025-11-26 22:54:22 -03:00
|
|
|
Path(id): Path<Uuid>,
|
|
|
|
|
Json(payload): Json<serde_json::Value>,
|
2025-11-27 08:34:24 -03:00
|
|
|
) -> Result<Json<TaskResponse>, StatusCode> {
|
2025-11-26 22:54:22 -03:00
|
|
|
let status_str = payload["status"].as_str().ok_or(StatusCode::BAD_REQUEST)?;
|
|
|
|
|
let status = match status_str {
|
2025-11-27 08:34:24 -03:00
|
|
|
"todo" => "todo",
|
|
|
|
|
"in_progress" => "in_progress",
|
|
|
|
|
"review" => "review",
|
|
|
|
|
"done" => "completed",
|
|
|
|
|
"blocked" => "blocked",
|
|
|
|
|
"cancelled" => "cancelled",
|
2025-11-26 22:54:22 -03:00
|
|
|
_ => return Err(StatusCode::BAD_REQUEST),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let updates = TaskUpdate {
|
|
|
|
|
title: None,
|
|
|
|
|
description: None,
|
2025-11-27 08:34:24 -03:00
|
|
|
status: Some(status.to_string()),
|
2025-11-26 22:54:22 -03:00
|
|
|
priority: None,
|
|
|
|
|
assignee: None,
|
|
|
|
|
due_date: None,
|
|
|
|
|
tags: None,
|
|
|
|
|
};
|
|
|
|
|
|
2025-11-27 08:34:24 -03:00
|
|
|
match state.task_engine.update_task(id, updates).await {
|
|
|
|
|
Ok(updated) => Ok(Json(updated.into())),
|
2025-11-26 22:54:22 -03:00
|
|
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn handle_task_priority_set(
|
2025-11-27 08:34:24 -03:00
|
|
|
State(state): State<Arc<AppState>>,
|
2025-11-26 22:54:22 -03:00
|
|
|
Path(id): Path<Uuid>,
|
|
|
|
|
Json(payload): Json<serde_json::Value>,
|
2025-11-27 08:34:24 -03:00
|
|
|
) -> Result<Json<TaskResponse>, StatusCode> {
|
2025-11-26 22:54:22 -03:00
|
|
|
let priority_str = payload["priority"]
|
|
|
|
|
.as_str()
|
|
|
|
|
.ok_or(StatusCode::BAD_REQUEST)?;
|
|
|
|
|
let priority = match priority_str {
|
2025-11-27 08:34:24 -03:00
|
|
|
"low" => "low",
|
|
|
|
|
"medium" => "medium",
|
|
|
|
|
"high" => "high",
|
|
|
|
|
"urgent" => "urgent",
|
2025-11-26 22:54:22 -03:00
|
|
|
_ => return Err(StatusCode::BAD_REQUEST),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let updates = TaskUpdate {
|
|
|
|
|
title: None,
|
|
|
|
|
description: None,
|
|
|
|
|
status: None,
|
2025-11-27 08:34:24 -03:00
|
|
|
priority: Some(priority.to_string()),
|
2025-11-26 22:54:22 -03:00
|
|
|
assignee: None,
|
|
|
|
|
due_date: None,
|
|
|
|
|
tags: None,
|
|
|
|
|
};
|
|
|
|
|
|
2025-11-27 08:34:24 -03:00
|
|
|
match state.task_engine.update_task(id, updates).await {
|
|
|
|
|
Ok(updated) => Ok(Json(updated.into())),
|
2025-11-26 22:54:22 -03:00
|
|
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-27 08:34:24 -03:00
|
|
|
pub async fn handle_task_set_dependencies(
|
|
|
|
|
State(state): State<Arc<AppState>>,
|
2025-11-26 22:54:22 -03:00
|
|
|
Path(id): Path<Uuid>,
|
|
|
|
|
Json(payload): Json<serde_json::Value>,
|
2025-11-27 08:34:24 -03:00
|
|
|
) -> Result<Json<TaskResponse>, StatusCode> {
|
2025-11-26 22:54:22 -03:00
|
|
|
let deps = payload["dependencies"]
|
|
|
|
|
.as_array()
|
|
|
|
|
.ok_or(StatusCode::BAD_REQUEST)?
|
|
|
|
|
.iter()
|
|
|
|
|
.filter_map(|v| v.as_str().and_then(|s| Uuid::parse_str(s).ok()))
|
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
2025-11-27 08:34:24 -03:00
|
|
|
match state.task_engine.set_dependencies(id, deps).await {
|
|
|
|
|
Ok(updated) => Ok(Json(updated.into())),
|
2025-11-26 22:54:22 -03:00
|
|
|
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-22 22:55:35 -03:00
|
|
|
/// Configure task engine routes
|
2025-11-27 08:34:24 -03:00
|
|
|
pub fn configure_task_routes() -> Router<Arc<AppState>> {
|
2025-11-26 22:54:22 -03:00
|
|
|
Router::new()
|
|
|
|
|
.route("/api/tasks", post(handle_task_create))
|
|
|
|
|
.route("/api/tasks", get(handle_task_list))
|
|
|
|
|
.route("/api/tasks/:id", put(handle_task_update))
|
|
|
|
|
.route("/api/tasks/:id", delete(handle_task_delete))
|
|
|
|
|
.route("/api/tasks/:id/assign", post(handle_task_assign))
|
|
|
|
|
.route("/api/tasks/:id/status", put(handle_task_status_update))
|
|
|
|
|
.route("/api/tasks/:id/priority", put(handle_task_priority_set))
|
|
|
|
|
.route(
|
|
|
|
|
"/api/tasks/:id/dependencies",
|
2025-11-27 08:34:24 -03:00
|
|
|
put(handle_task_set_dependencies),
|
2025-11-26 22:54:22 -03:00
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Configure task engine routes (legacy)
|
2025-11-22 22:55:35 -03:00
|
|
|
pub fn configure<S>(router: Router<S>) -> Router<S>
|
|
|
|
|
where
|
|
|
|
|
S: Clone + Send + Sync + 'static,
|
|
|
|
|
{
|
|
|
|
|
use axum::routing::{get, post, put};
|
|
|
|
|
|
|
|
|
|
router
|
|
|
|
|
.route("/api/tasks", post(handlers::create_task_handler::<S>))
|
|
|
|
|
.route("/api/tasks", get(handlers::get_tasks_handler::<S>))
|
|
|
|
|
.route("/api/tasks/:id", put(handlers::update_task_handler::<S>))
|
2025-11-26 22:54:22 -03:00
|
|
|
.route(
|
|
|
|
|
"/api/tasks/statistics",
|
|
|
|
|
get(handlers::get_statistics_handler::<S>),
|
|
|
|
|
)
|
2025-11-22 22:55:35 -03:00
|
|
|
}
|