feat(security): add BASIC keywords for security protection tools

Add security_protection.rs with 8 new BASIC keywords:
- SECURITY TOOL STATUS - Check if tool is installed/running
- SECURITY RUN SCAN - Execute security scan
- SECURITY GET REPORT - Get latest scan report
- SECURITY UPDATE DEFINITIONS - Update signatures
- SECURITY START SERVICE - Start security service
- SECURITY STOP SERVICE - Stop security service
- SECURITY INSTALL TOOL - Install security tool
- SECURITY HARDENING SCORE - Get Lynis hardening index

Also:
- Registered protection routes in main.rs
- Added Security Protection category to keywords list
- All functions use proper error handling (no unwrap/expect)
This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2026-01-10 20:32:56 -03:00
parent b4003e3e0a
commit 46695c0f75
8 changed files with 947 additions and 5 deletions

View file

@ -207,6 +207,11 @@ qrcode = { version = "0.14", default-features = false }
# Excel/Spreadsheet Support
calamine = "0.26"
rust_xlsxwriter = "0.79"
spreadsheet-ods = "1.0"
# Word/PowerPoint Support
docx-rs = "0.4"
ppt-rs = { version = "0.2", default-features = false }
# Error handling
thiserror = "2.0"

View file

@ -55,6 +55,7 @@ pub mod procedures;
pub mod qrcode;
pub mod remember;
pub mod save_from_unstructured;
pub mod security_protection;
pub mod send_mail;
pub mod send_template;
pub mod set;
@ -85,6 +86,12 @@ pub mod webhook;
pub use app_server::configure_app_server_routes;
pub use db_api::configure_db_routes;
pub use mcp_client::{McpClient, McpRequest, McpResponse, McpServer, McpTool};
pub use security_protection::{
security_get_report, security_hardening_score, security_install_tool, security_run_scan,
security_service_is_running, security_start_service, security_stop_service,
security_tool_is_installed, security_tool_status, security_update_definitions,
SecurityScanResult, SecurityToolResult,
};
pub use mcp_directory::{McpDirectoryScanResult, McpDirectoryScanner, McpServerConfig};
pub use table_access::{
check_field_write_access, check_table_access, filter_fields_by_role, load_table_access_info,
@ -201,6 +208,14 @@ pub fn get_all_keywords() -> Vec<String> {
"OPTION A OR B".to_string(),
"DECIDE".to_string(),
"ESCALATE".to_string(),
"SECURITY TOOL STATUS".to_string(),
"SECURITY RUN SCAN".to_string(),
"SECURITY GET REPORT".to_string(),
"SECURITY UPDATE DEFINITIONS".to_string(),
"SECURITY START SERVICE".to_string(),
"SECURITY STOP SERVICE".to_string(),
"SECURITY INSTALL TOOL".to_string(),
"SECURITY HARDENING SCORE".to_string(),
]
}
@ -325,5 +340,19 @@ pub fn get_keyword_categories() -> std::collections::HashMap<String, Vec<String>
],
);
categories.insert(
"Security Protection".to_string(),
vec![
"SECURITY TOOL STATUS".to_string(),
"SECURITY RUN SCAN".to_string(),
"SECURITY GET REPORT".to_string(),
"SECURITY UPDATE DEFINITIONS".to_string(),
"SECURITY START SERVICE".to_string(),
"SECURITY STOP SERVICE".to_string(),
"SECURITY INSTALL TOOL".to_string(),
"SECURITY HARDENING SCORE".to_string(),
],
);
categories
}

View file

@ -0,0 +1,325 @@
use crate::security::protection::{ProtectionManager, ProtectionTool, ProtectionConfig};
use crate::shared::state::AppState;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SecurityToolResult {
pub tool: String,
pub success: bool,
pub installed: bool,
pub version: Option<String>,
pub running: Option<bool>,
pub message: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SecurityScanResult {
pub tool: String,
pub success: bool,
pub status: String,
pub findings_count: usize,
pub warnings_count: usize,
pub score: Option<i32>,
pub report_path: Option<String>,
}
pub async fn security_tool_status(
_state: Arc<AppState>,
tool_name: &str,
) -> Result<SecurityToolResult, String> {
let tool = parse_tool_name(tool_name)?;
let manager = ProtectionManager::new(ProtectionConfig::default());
match manager.check_tool_status(tool).await {
Ok(status) => Ok(SecurityToolResult {
tool: tool_name.to_lowercase(),
success: true,
installed: status.installed,
version: status.version,
running: status.service_running,
message: if status.installed {
"Tool is installed".to_string()
} else {
"Tool is not installed".to_string()
},
}),
Err(e) => Ok(SecurityToolResult {
tool: tool_name.to_lowercase(),
success: false,
installed: false,
version: None,
running: None,
message: format!("Failed to check status: {e}"),
}),
}
}
pub async fn security_run_scan(
_state: Arc<AppState>,
tool_name: &str,
) -> Result<SecurityScanResult, String> {
let tool = parse_tool_name(tool_name)?;
let manager = ProtectionManager::new(ProtectionConfig::default());
match manager.run_scan(tool).await {
Ok(result) => Ok(SecurityScanResult {
tool: tool_name.to_lowercase(),
success: true,
status: result.status,
findings_count: result.findings.len(),
warnings_count: result.warnings,
score: result.score,
report_path: result.report_path,
}),
Err(e) => Ok(SecurityScanResult {
tool: tool_name.to_lowercase(),
success: false,
status: "error".to_string(),
findings_count: 0,
warnings_count: 0,
score: None,
report_path: None,
}),
}
}
pub async fn security_get_report(
_state: Arc<AppState>,
tool_name: &str,
) -> Result<String, String> {
let tool = parse_tool_name(tool_name)?;
let manager = ProtectionManager::new(ProtectionConfig::default());
manager
.get_report(tool)
.await
.map_err(|e| format!("Failed to get report: {e}"))
}
pub async fn security_update_definitions(
_state: Arc<AppState>,
tool_name: &str,
) -> Result<SecurityToolResult, String> {
let tool = parse_tool_name(tool_name)?;
let manager = ProtectionManager::new(ProtectionConfig::default());
match manager.update_definitions(tool).await {
Ok(()) => Ok(SecurityToolResult {
tool: tool_name.to_lowercase(),
success: true,
installed: true,
version: None,
running: None,
message: "Definitions updated successfully".to_string(),
}),
Err(e) => Ok(SecurityToolResult {
tool: tool_name.to_lowercase(),
success: false,
installed: true,
version: None,
running: None,
message: format!("Failed to update definitions: {e}"),
}),
}
}
pub async fn security_start_service(
_state: Arc<AppState>,
tool_name: &str,
) -> Result<SecurityToolResult, String> {
let tool = parse_tool_name(tool_name)?;
let manager = ProtectionManager::new(ProtectionConfig::default());
match manager.start_service(tool).await {
Ok(()) => Ok(SecurityToolResult {
tool: tool_name.to_lowercase(),
success: true,
installed: true,
version: None,
running: Some(true),
message: "Service started successfully".to_string(),
}),
Err(e) => Ok(SecurityToolResult {
tool: tool_name.to_lowercase(),
success: false,
installed: true,
version: None,
running: Some(false),
message: format!("Failed to start service: {e}"),
}),
}
}
pub async fn security_stop_service(
_state: Arc<AppState>,
tool_name: &str,
) -> Result<SecurityToolResult, String> {
let tool = parse_tool_name(tool_name)?;
let manager = ProtectionManager::new(ProtectionConfig::default());
match manager.stop_service(tool).await {
Ok(()) => Ok(SecurityToolResult {
tool: tool_name.to_lowercase(),
success: true,
installed: true,
version: None,
running: Some(false),
message: "Service stopped successfully".to_string(),
}),
Err(e) => Ok(SecurityToolResult {
tool: tool_name.to_lowercase(),
success: false,
installed: true,
version: None,
running: None,
message: format!("Failed to stop service: {e}"),
}),
}
}
pub async fn security_install_tool(
_state: Arc<AppState>,
tool_name: &str,
) -> Result<SecurityToolResult, String> {
let tool = parse_tool_name(tool_name)?;
let manager = ProtectionManager::new(ProtectionConfig::default());
match manager.install_tool(tool).await {
Ok(()) => Ok(SecurityToolResult {
tool: tool_name.to_lowercase(),
success: true,
installed: true,
version: None,
running: None,
message: "Tool installed successfully".to_string(),
}),
Err(e) => Ok(SecurityToolResult {
tool: tool_name.to_lowercase(),
success: false,
installed: false,
version: None,
running: None,
message: format!("Failed to install tool: {e}"),
}),
}
}
pub async fn security_hardening_score(_state: Arc<AppState>) -> Result<i32, String> {
let manager = ProtectionManager::new(ProtectionConfig::default());
match manager.run_scan(ProtectionTool::Lynis).await {
Ok(result) => result.score.ok_or_else(|| "No hardening score available".to_string()),
Err(e) => Err(format!("Failed to get hardening score: {e}")),
}
}
pub fn security_tool_is_installed(status: &SecurityToolResult) -> bool {
status.installed
}
pub fn security_service_is_running(status: &SecurityToolResult) -> bool {
status.running.unwrap_or(false)
}
fn parse_tool_name(name: &str) -> Result<ProtectionTool, String> {
ProtectionTool::from_str(name)
.ok_or_else(|| format!("Unknown security tool: {name}. Valid tools: lynis, rkhunter, chkrootkit, suricata, lmd, clamav"))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_tool_name_valid() {
assert!(parse_tool_name("lynis").is_ok());
assert!(parse_tool_name("LYNIS").is_ok());
assert!(parse_tool_name("Lynis").is_ok());
assert!(parse_tool_name("rkhunter").is_ok());
assert!(parse_tool_name("chkrootkit").is_ok());
assert!(parse_tool_name("suricata").is_ok());
assert!(parse_tool_name("lmd").is_ok());
assert!(parse_tool_name("clamav").is_ok());
}
#[test]
fn test_parse_tool_name_invalid() {
assert!(parse_tool_name("unknown").is_err());
assert!(parse_tool_name("").is_err());
assert!(parse_tool_name("invalid_tool").is_err());
}
#[test]
fn test_security_tool_is_installed() {
let installed = SecurityToolResult {
tool: "lynis".to_string(),
success: true,
installed: true,
version: Some("3.0.9".to_string()),
running: None,
message: "Tool is installed".to_string(),
};
assert!(security_tool_is_installed(&installed));
let not_installed = SecurityToolResult {
tool: "lynis".to_string(),
success: true,
installed: false,
version: None,
running: None,
message: "Tool is not installed".to_string(),
};
assert!(!security_tool_is_installed(&not_installed));
}
#[test]
fn test_security_service_is_running() {
let running = SecurityToolResult {
tool: "suricata".to_string(),
success: true,
installed: true,
version: None,
running: Some(true),
message: "Service running".to_string(),
};
assert!(security_service_is_running(&running));
let stopped = SecurityToolResult {
tool: "suricata".to_string(),
success: true,
installed: true,
version: None,
running: Some(false),
message: "Service stopped".to_string(),
};
assert!(!security_service_is_running(&stopped));
let unknown = SecurityToolResult {
tool: "lynis".to_string(),
success: true,
installed: true,
version: None,
running: None,
message: "No service".to_string(),
};
assert!(!security_service_is_running(&unknown));
}
#[test]
fn test_security_scan_result_serialization() {
let result = SecurityScanResult {
tool: "lynis".to_string(),
success: true,
status: "completed".to_string(),
findings_count: 5,
warnings_count: 12,
score: Some(78),
report_path: Some("/var/log/lynis-report.dat".to_string()),
};
let json = serde_json::to_string(&result).expect("Failed to serialize");
assert!(json.contains("\"tool\":\"lynis\""));
assert!(json.contains("\"score\":78"));
}
}

View file

@ -7,7 +7,6 @@
//! - AI-powered writing assistance
//! - Export to multiple formats (PDF, DOCX, HTML, TXT, MD)
use crate::core::urls::ApiUrls;
use crate::shared::state::AppState;
use aws_sdk_s3::primitives::ByteStream;
@ -31,6 +30,11 @@ use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::broadcast;
use uuid::Uuid;
use docx_rs::{
Docx, Paragraph, Run, Table, TableRow, TableCell,
AlignmentType, BreakType, RunFonts, TableBorders, BorderType,
WidthType, TableCellWidth,
};
// =============================================================================
// COLLABORATION TYPES
@ -1129,11 +1133,110 @@ pub async fn handle_export_pdf(
}
pub async fn handle_export_docx(
State(_state): State<Arc<AppState>>,
Query(_params): Query<ExportQuery>,
State(state): State<Arc<AppState>>,
headers: HeaderMap,
Query(params): Query<ExportQuery>,
) -> impl IntoResponse {
// DOCX export would require a library like docx-rs
Html("<p>DOCX export coming soon</p>".to_string())
let (_user_id, user_identifier) = match get_current_user(&state, &headers).await {
Ok(u) => u,
Err(_) => return (
axum::http::StatusCode::UNAUTHORIZED,
[(axum::http::header::CONTENT_TYPE, "text/plain")],
Vec::new(),
),
};
let doc_id = match params.id {
Some(id) => id,
None => return (
axum::http::StatusCode::BAD_REQUEST,
[(axum::http::header::CONTENT_TYPE, "text/plain")],
Vec::new(),
),
};
match load_document_from_drive(&state, &user_identifier, &doc_id).await {
Ok(Some(doc)) => {
match html_to_docx(&doc.title, &doc.content) {
Ok(bytes) => (
axum::http::StatusCode::OK,
[(axum::http::header::CONTENT_TYPE, "application/vnd.openxmlformats-officedocument.wordprocessingml.document")],
bytes,
),
Err(_) => (
axum::http::StatusCode::INTERNAL_SERVER_ERROR,
[(axum::http::header::CONTENT_TYPE, "text/plain")],
Vec::new(),
),
}
}
_ => (
axum::http::StatusCode::NOT_FOUND,
[(axum::http::header::CONTENT_TYPE, "text/plain")],
Vec::new(),
),
}
}
fn html_to_docx(title: &str, html_content: &str) -> Result<Vec<u8>, String> {
let mut docx = Docx::new();
let title_para = Paragraph::new()
.add_run(
Run::new()
.add_text(title)
.size(48)
.bold()
.fonts(RunFonts::new().ascii("Calibri"))
)
.align(AlignmentType::Center);
docx = docx.add_paragraph(title_para);
docx = docx.add_paragraph(Paragraph::new());
let plain_text = strip_html(html_content);
let paragraphs: Vec<&str> = plain_text.split("\n\n").collect();
for para_text in paragraphs {
let trimmed = para_text.trim();
if trimmed.is_empty() {
continue;
}
let is_heading = trimmed.starts_with('#');
let (text, size, bold) = if is_heading {
let level = trimmed.chars().take_while(|c| *c == '#').count();
let heading_text = trimmed.trim_start_matches('#').trim();
let heading_size = match level {
1 => 36,
2 => 28,
3 => 24,
_ => 22,
};
(heading_text, heading_size, true)
} else {
(trimmed, 22, false)
};
let mut run = Run::new()
.add_text(text)
.size(size)
.fonts(RunFonts::new().ascii("Calibri"));
if bold {
run = run.bold();
}
let para = Paragraph::new().add_run(run);
docx = docx.add_paragraph(para);
}
let mut buffer = Vec::new();
docx.build()
.pack(&mut std::io::Cursor::new(&mut buffer))
.map_err(|e| format!("Failed to generate DOCX: {}", e))?;
Ok(buffer)
}
pub async fn handle_export_md(

View file

@ -174,10 +174,24 @@ pub struct BucketInfo {
pub is_gbai: bool,
}
#[derive(Debug, Deserialize)]
pub struct OpenRequest {
pub bucket: String,
pub path: String,
}
#[derive(Debug, Serialize)]
pub struct OpenResponse {
pub app: String,
pub url: String,
pub content_type: String,
}
pub fn configure() -> Router<Arc<AppState>> {
Router::new()
.route("/api/files/buckets", get(list_buckets))
.route("/api/files/list", get(list_files))
.route("/api/files/open", post(open_file))
.route("/api/files/read", post(read_file))
.route("/api/files/write", post(write_file))
.route("/api/files/save", post(write_file))
@ -209,6 +223,88 @@ pub fn configure() -> Router<Arc<AppState>> {
.route("/api/docs/import", post(document_processing::import_document))
}
pub async fn open_file(
Json(req): Json<OpenRequest>,
) -> Result<Json<OpenResponse>, (StatusCode, Json<serde_json::Value>)> {
let ext = req.path
.rsplit('.')
.next()
.unwrap_or("")
.to_lowercase();
let params = format!("bucket={}&path={}",
urlencoding::encode(&req.bucket),
urlencoding::encode(&req.path));
let (app, url, content_type) = match ext.as_str() {
// Designer - BASIC dialogs
"bas" => ("designer", format!("/suite/designer.html?{params}"), "text/x-basic"),
// Sheet - Spreadsheets
"csv" => ("sheet", format!("/suite/sheet/sheet.html?{params}"), "text/csv"),
"tsv" => ("sheet", format!("/suite/sheet/sheet.html?{params}"), "text/tab-separated-values"),
"xlsx" => ("sheet", format!("/suite/sheet/sheet.html?{params}"), "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"),
"xls" => ("sheet", format!("/suite/sheet/sheet.html?{params}"), "application/vnd.ms-excel"),
"ods" => ("sheet", format!("/suite/sheet/sheet.html?{params}"), "application/vnd.oasis.opendocument.spreadsheet"),
"numbers" => ("sheet", format!("/suite/sheet/sheet.html?{params}"), "application/vnd.apple.numbers"),
// Docs - Documents
"docx" => ("docs", format!("/suite/docs/docs.html?{params}"), "application/vnd.openxmlformats-officedocument.wordprocessingml.document"),
"doc" => ("docs", format!("/suite/docs/docs.html?{params}"), "application/msword"),
"odt" => ("docs", format!("/suite/docs/docs.html?{params}"), "application/vnd.oasis.opendocument.text"),
"rtf" => ("docs", format!("/suite/docs/docs.html?{params}"), "application/rtf"),
"pdf" => ("docs", format!("/suite/docs/docs.html?{params}"), "application/pdf"),
"md" => ("docs", format!("/suite/docs/docs.html?{params}"), "text/markdown"),
"markdown" => ("docs", format!("/suite/docs/docs.html?{params}"), "text/markdown"),
"txt" => ("docs", format!("/suite/docs/docs.html?{params}"), "text/plain"),
"tex" => ("docs", format!("/suite/docs/docs.html?{params}"), "application/x-tex"),
"latex" => ("docs", format!("/suite/docs/docs.html?{params}"), "application/x-latex"),
"epub" => ("docs", format!("/suite/docs/docs.html?{params}"), "application/epub+zip"),
"pages" => ("docs", format!("/suite/docs/docs.html?{params}"), "application/vnd.apple.pages"),
// Slides - Presentations
"pptx" => ("slides", format!("/suite/slides/slides.html?{params}"), "application/vnd.openxmlformats-officedocument.presentationml.presentation"),
"ppt" => ("slides", format!("/suite/slides/slides.html?{params}"), "application/vnd.ms-powerpoint"),
"odp" => ("slides", format!("/suite/slides/slides.html?{params}"), "application/vnd.oasis.opendocument.presentation"),
"key" => ("slides", format!("/suite/slides/slides.html?{params}"), "application/vnd.apple.keynote"),
// Images - use video player (supports images too)
"png" | "jpg" | "jpeg" | "gif" | "webp" | "svg" | "bmp" | "ico" | "tiff" | "tif" | "heic" | "heif" =>
("video", format!("/suite/video/video.html?{params}"), "image/*"),
// Video
"mp4" | "webm" | "mov" | "avi" | "mkv" | "wmv" | "flv" | "m4v" =>
("video", format!("/suite/video/video.html?{params}"), "video/*"),
// Audio - use player
"mp3" | "wav" | "ogg" | "oga" | "flac" | "aac" | "m4a" | "wma" | "aiff" | "aif" =>
("player", format!("/suite/player/player.html?{params}"), "audio/*"),
// Archives - direct download
"zip" | "rar" | "7z" | "tar" | "gz" | "bz2" | "xz" =>
("download", format!("/api/files/download?{params}"), "application/octet-stream"),
// Code/Config - Editor
"json" | "xml" | "yaml" | "yml" | "toml" | "ini" | "conf" | "config" |
"js" | "ts" | "jsx" | "tsx" | "css" | "scss" | "sass" | "less" |
"html" | "htm" | "vue" | "svelte" |
"py" | "rb" | "php" | "java" | "c" | "cpp" | "h" | "hpp" | "cs" |
"rs" | "go" | "swift" | "kt" | "scala" | "r" | "lua" | "pl" | "sh" | "bash" |
"sql" | "graphql" | "proto" |
"dockerfile" | "makefile" | "gitignore" | "env" | "log" =>
("editor", format!("/suite/editor/editor.html?{params}"), "text/plain"),
// Default - Editor for unknown text files
_ => ("editor", format!("/suite/editor/editor.html?{params}"), "application/octet-stream"),
};
Ok(Json(OpenResponse {
app: app.to_string(),
url,
content_type: content_type.to_string(),
}))
}
pub async fn list_buckets(
State(state): State<Arc<AppState>>,
) -> Result<Json<Vec<BucketInfo>>, (StatusCode, Json<serde_json::Value>)> {

View file

@ -394,6 +394,7 @@ async fn run_axum_server(
api_router = api_router.merge(botserver::designer::configure_designer_routes());
api_router = api_router.merge(botserver::dashboards::configure_dashboards_routes());
api_router = api_router.merge(botserver::monitoring::configure());
api_router = api_router.merge(crate::security::configure_protection_routes());
api_router = api_router.merge(botserver::settings::configure_settings_routes());
api_router = api_router.merge(botserver::basic::keywords::configure_db_routes());
api_router = api_router.merge(botserver::basic::keywords::configure_app_server_routes());

View file

@ -9,7 +9,9 @@ use axum::{
routing::{get, post},
Json, Router,
};
use calamine::{open_workbook_auto, Reader, Data};
use chrono::{DateTime, Datelike, Local, NaiveDate, Utc};
use rust_xlsxwriter::{Workbook, Format, Color, FormatAlign, FormatBorder};
use futures_util::{SinkExt, StreamExt};
use log::{error, info};
use serde::{Deserialize, Serialize};
@ -248,6 +250,12 @@ pub struct LoadQuery {
pub id: String,
}
#[derive(Debug, Deserialize)]
pub struct LoadFromDriveRequest {
pub bucket: String,
pub path: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchQuery {
pub q: Option<String>,
@ -447,6 +455,7 @@ pub fn configure_sheet_routes() -> Router<Arc<AppState>> {
.route("/api/sheet/list", get(handle_list_sheets))
.route("/api/sheet/search", get(handle_search_sheets))
.route("/api/sheet/load", get(handle_load_sheet))
.route("/api/sheet/load-from-drive", post(handle_load_from_drive))
.route("/api/sheet/save", post(handle_save_sheet))
.route("/api/sheet/delete", post(handle_delete_sheet))
.route("/api/sheet/cell", post(handle_update_cell))
@ -700,6 +709,175 @@ pub async fn handle_load_sheet(
}
}
pub async fn handle_load_from_drive(
State(state): State<Arc<AppState>>,
Json(req): Json<LoadFromDriveRequest>,
) -> Result<Json<Spreadsheet>, (StatusCode, Json<serde_json::Value>)> {
let drive = state.drive.as_ref().ok_or_else(|| {
(StatusCode::SERVICE_UNAVAILABLE, Json(serde_json::json!({ "error": "Drive not available" })))
})?;
let result = drive
.get_object()
.bucket(&req.bucket)
.key(&req.path)
.send()
.await
.map_err(|e| {
(StatusCode::NOT_FOUND, Json(serde_json::json!({ "error": format!("File not found: {e}") })))
})?;
let bytes = result.body.collect().await
.map_err(|e| {
(StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({ "error": format!("Failed to read file: {e}") })))
})?
.into_bytes();
let ext = req.path.rsplit('.').next().unwrap_or("").to_lowercase();
let file_name = req.path.rsplit('/').next().unwrap_or("Spreadsheet");
let sheet_name = file_name.rsplit('.').last().unwrap_or("Spreadsheet").to_string();
let worksheets = match ext.as_str() {
"csv" | "tsv" => {
let delimiter = if ext == "tsv" { b'\t' } else { b',' };
parse_csv_to_worksheets(&bytes, delimiter, &sheet_name)?
}
"xlsx" | "xls" | "ods" | "xlsb" | "xlsm" => {
parse_excel_to_worksheets(&bytes, &ext)?
}
_ => {
return Err((StatusCode::BAD_REQUEST, Json(serde_json::json!({ "error": format!("Unsupported format: .{ext}") }))));
}
};
let user_id = get_current_user_id();
let sheet = Spreadsheet {
id: Uuid::new_v4().to_string(),
name: sheet_name,
owner_id: user_id,
worksheets,
created_at: Utc::now(),
updated_at: Utc::now(),
};
Ok(Json(sheet))
}
fn parse_csv_to_worksheets(
bytes: &[u8],
delimiter: u8,
sheet_name: &str,
) -> Result<Vec<Worksheet>, (StatusCode, Json<serde_json::Value>)> {
let content = String::from_utf8_lossy(bytes);
let mut data: HashMap<String, CellData> = HashMap::new();
for (row_idx, line) in content.lines().enumerate() {
let cols: Vec<&str> = if delimiter == b'\t' {
line.split('\t').collect()
} else {
line.split(',').collect()
};
for (col_idx, value) in cols.iter().enumerate() {
let clean_value = value.trim().trim_matches('"').to_string();
if !clean_value.is_empty() {
let key = format!("{row_idx},{col_idx}");
data.insert(key, CellData {
value: Some(clean_value),
formula: None,
style: None,
format: None,
note: None,
});
}
}
}
Ok(vec![Worksheet {
name: sheet_name.to_string(),
data,
column_widths: None,
row_heights: None,
frozen_rows: None,
frozen_cols: None,
merged_cells: None,
filters: None,
hidden_rows: None,
validations: None,
conditional_formats: None,
charts: None,
}])
}
fn parse_excel_to_worksheets(
bytes: &[u8],
_ext: &str,
) -> Result<Vec<Worksheet>, (StatusCode, Json<serde_json::Value>)> {
use std::io::Cursor;
let cursor = Cursor::new(bytes);
let mut workbook = open_workbook_auto(cursor).map_err(|e| {
(StatusCode::BAD_REQUEST, Json(serde_json::json!({ "error": format!("Failed to parse spreadsheet: {e}") })))
})?;
let sheet_names: Vec<String> = workbook.sheet_names().to_vec();
let mut worksheets = Vec::new();
for sheet_name in sheet_names {
let range = workbook.worksheet_range(&sheet_name).map_err(|e| {
(StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({ "error": format!("Failed to read sheet {sheet_name}: {e}") })))
})?;
let mut data: HashMap<String, CellData> = HashMap::new();
for (row_idx, row) in range.rows().enumerate() {
for (col_idx, cell) in row.iter().enumerate() {
let value = match cell {
Data::Empty => continue,
Data::String(s) => s.clone(),
Data::Int(i) => i.to_string(),
Data::Float(f) => f.to_string(),
Data::Bool(b) => b.to_string(),
Data::DateTime(dt) => dt.to_string(),
Data::Error(e) => format!("#ERR:{e:?}"),
Data::DateTimeIso(s) => s.clone(),
Data::DurationIso(s) => s.clone(),
};
let key = format!("{row_idx},{col_idx}");
data.insert(key, CellData {
value: Some(value),
formula: None,
style: None,
format: None,
note: None,
});
}
}
worksheets.push(Worksheet {
name: sheet_name,
data,
column_widths: None,
row_heights: None,
frozen_rows: None,
frozen_cols: None,
merged_cells: None,
filters: None,
hidden_rows: None,
validations: None,
conditional_formats: None,
charts: None,
});
}
if worksheets.is_empty() {
return Err((StatusCode::BAD_REQUEST, Json(serde_json::json!({ "error": "Spreadsheet has no sheets" }))));
}
Ok(worksheets)
}
pub async fn handle_save_sheet(
State(state): State<Arc<AppState>>,
Json(req): Json<SaveRequest>,
@ -1967,6 +2145,12 @@ pub async fn handle_export_sheet(
let csv = export_to_csv(&sheet);
Ok(([(axum::http::header::CONTENT_TYPE, "text/csv")], csv))
}
"xlsx" => {
let xlsx = export_to_xlsx(&sheet).map_err(|e| {
(StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({ "error": e })))
})?;
Ok(([(axum::http::header::CONTENT_TYPE, "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")], xlsx))
}
"json" => {
let json = serde_json::to_string_pretty(&sheet).unwrap_or_default();
Ok(([(axum::http::header::CONTENT_TYPE, "application/json")], json))
@ -1975,6 +2159,125 @@ pub async fn handle_export_sheet(
}
}
fn export_to_xlsx(sheet: &Spreadsheet) -> Result<String, String> {
let mut workbook = Workbook::new();
for ws in &sheet.worksheets {
let worksheet = workbook.add_worksheet();
worksheet.set_name(&ws.name).map_err(|e| e.to_string())?;
let mut max_row: u32 = 0;
let mut max_col: u16 = 0;
for key in ws.data.keys() {
let parts: Vec<&str> = key.split(',').collect();
if parts.len() == 2 {
if let (Ok(row), Ok(col)) = (parts[0].parse::<u32>(), parts[1].parse::<u16>()) {
max_row = max_row.max(row);
max_col = max_col.max(col);
}
}
}
for (key, cell) in &ws.data {
let parts: Vec<&str> = key.split(',').collect();
if parts.len() != 2 {
continue;
}
let (row, col) = match (parts[0].parse::<u32>(), parts[1].parse::<u16>()) {
(Ok(r), Ok(c)) => (r, c),
_ => continue,
};
let value = cell.value.as_deref().unwrap_or("");
let mut format = Format::new();
if let Some(ref style) = cell.style {
if let Some(ref bg) = style.background {
if let Some(color) = parse_color(bg) {
format = format.set_background_color(color);
}
}
if let Some(ref fg) = style.color {
if let Some(color) = parse_color(fg) {
format = format.set_font_color(color);
}
}
if let Some(ref weight) = style.font_weight {
if weight == "bold" {
format = format.set_bold();
}
}
if let Some(ref style_val) = style.font_style {
if style_val == "italic" {
format = format.set_italic();
}
}
if let Some(ref align) = style.text_align {
format = match align.as_str() {
"center" => format.set_align(FormatAlign::Center),
"right" => format.set_align(FormatAlign::Right),
_ => format.set_align(FormatAlign::Left),
};
}
if let Some(ref size) = style.font_size {
format = format.set_font_size(*size as f64);
}
}
if let Some(ref formula) = cell.formula {
worksheet.write_formula_with_format(row, col, formula, &format)
.map_err(|e| e.to_string())?;
} else if let Ok(num) = value.parse::<f64>() {
worksheet.write_number_with_format(row, col, num, &format)
.map_err(|e| e.to_string())?;
} else {
worksheet.write_string_with_format(row, col, value, &format)
.map_err(|e| e.to_string())?;
}
}
if let Some(ref widths) = ws.column_widths {
for (col_str, width) in widths {
if let Ok(col) = col_str.parse::<u16>() {
worksheet.set_column_width(col, *width).map_err(|e| e.to_string())?;
}
}
}
if let Some(ref heights) = ws.row_heights {
for (row_str, height) in heights {
if let Ok(row) = row_str.parse::<u32>() {
worksheet.set_row_height(row, *height).map_err(|e| e.to_string())?;
}
}
}
if let Some(frozen_rows) = ws.frozen_rows {
if let Some(frozen_cols) = ws.frozen_cols {
worksheet.set_freeze_panes(frozen_rows, frozen_cols as u16)
.map_err(|e| e.to_string())?;
}
}
}
let buffer = workbook.save_to_buffer().map_err(|e| e.to_string())?;
Ok(base64::engine::general_purpose::STANDARD.encode(&buffer))
}
fn parse_color(color_str: &str) -> Option<Color> {
let hex = color_str.trim_start_matches('#');
if hex.len() == 6 {
let r = u8::from_str_radix(&hex[0..2], 16).ok()?;
let g = u8::from_str_radix(&hex[2..4], 16).ok()?;
let b = u8::from_str_radix(&hex[4..6], 16).ok()?;
Some(Color::RGB(((r as u32) << 16) | ((g as u32) << 8) | (b as u32)))
} else {
None
}
}
fn export_to_csv(sheet: &Spreadsheet) -> String {
let mut csv = String::new();
if let Some(worksheet) = sheet.worksheets.first() {

View file

@ -12,6 +12,7 @@ use axum::{
use chrono::{DateTime, Utc};
use futures_util::{SinkExt, StreamExt};
use log::{error, info};
use ppt_rs::{Pptx, Slide as PptSlide, TextBox, Shape, ShapeType};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::Arc;
@ -1159,10 +1160,89 @@ pub async fn handle_export_presentation(
let html = export_to_html(&presentation);
Ok(([(axum::http::header::CONTENT_TYPE, "text/html")], html))
}
"pptx" => {
match export_to_pptx(&presentation) {
Ok(bytes) => {
let encoded = base64::engine::general_purpose::STANDARD.encode(&bytes);
Ok(([(axum::http::header::CONTENT_TYPE, "application/vnd.openxmlformats-officedocument.presentationml.presentation")], encoded))
}
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({ "error": e })))),
}
}
_ => Err((StatusCode::BAD_REQUEST, Json(serde_json::json!({ "error": "Unsupported format" })))),
}
}
fn export_to_pptx(presentation: &Presentation) -> Result<Vec<u8>, String> {
let mut pptx = Pptx::new();
for slide in &presentation.slides {
let mut ppt_slide = PptSlide::new();
for element in &slide.elements {
match element.element_type.as_str() {
"text" => {
let content = element.content.as_deref().unwrap_or("");
let x = element.x as f64;
let y = element.y as f64;
let width = element.width as f64;
let height = element.height as f64;
let mut text_box = TextBox::new(content)
.position(x, y)
.size(width, height);
if let Some(ref style) = element.style {
if let Some(size) = style.font_size {
text_box = text_box.font_size(size as f64);
}
if let Some(ref weight) = style.font_weight {
if weight == "bold" {
text_box = text_box.bold(true);
}
}
if let Some(ref color) = style.color {
text_box = text_box.font_color(color);
}
}
ppt_slide = ppt_slide.add_text_box(text_box);
}
"shape" => {
let shape_type = element.shape_type.as_deref().unwrap_or("rectangle");
let x = element.x as f64;
let y = element.y as f64;
let width = element.width as f64;
let height = element.height as f64;
let ppt_shape_type = match shape_type {
"ellipse" | "circle" => ShapeType::Ellipse,
"triangle" => ShapeType::Triangle,
_ => ShapeType::Rectangle,
};
let mut shape = Shape::new(ppt_shape_type)
.position(x, y)
.size(width, height);
if let Some(ref style) = element.style {
if let Some(ref fill) = style.background {
shape = shape.fill_color(fill);
}
}
ppt_slide = ppt_slide.add_shape(shape);
}
_ => {}
}
}
pptx = pptx.add_slide(ppt_slide);
}
pptx.save_to_buffer().map_err(|e| format!("Failed to generate PPTX: {}", e))
}
fn export_to_html(presentation: &Presentation) -> String {
let mut html = format!(
r#"<!DOCTYPE html>