2025-11-26 22:54:22 -03:00
|
|
|
use crate::shared::models::UserSession;
|
|
|
|
|
use crate::shared::state::AppState;
|
|
|
|
|
use diesel::prelude::*;
|
|
|
|
|
use log::{error, info, trace};
|
|
|
|
|
use rhai::{Dynamic, Engine};
|
|
|
|
|
use std::sync::Arc;
|
|
|
|
|
use uuid::Uuid;
|
|
|
|
|
|
|
|
|
|
pub fn use_website_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
|
|
|
|
|
let state_clone = Arc::clone(&state);
|
2025-12-26 08:59:25 -03:00
|
|
|
let user_clone = user;
|
2025-11-26 22:54:22 -03:00
|
|
|
|
|
|
|
|
engine
|
2025-11-30 22:33:54 -03:00
|
|
|
.register_custom_syntax(
|
2025-12-26 08:59:25 -03:00
|
|
|
["USE", "WEBSITE", "$expr$"],
|
2025-11-30 22:33:54 -03:00
|
|
|
false,
|
|
|
|
|
move |context, inputs| {
|
|
|
|
|
let url = context.eval_expression_tree(&inputs[0])?;
|
|
|
|
|
let url_str = url.to_string().trim_matches('"').to_string();
|
|
|
|
|
|
|
|
|
|
trace!(
|
|
|
|
|
"USE WEBSITE command executed: {} for session: {}",
|
|
|
|
|
url_str,
|
|
|
|
|
user_clone.id
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
let is_valid = url_str.starts_with("http://") || url_str.starts_with("https://");
|
|
|
|
|
if !is_valid {
|
|
|
|
|
return Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
|
|
|
|
|
"Invalid URL format. Must start with http:// or https://".into(),
|
|
|
|
|
rhai::Position::NONE,
|
|
|
|
|
)));
|
2025-11-26 22:54:22 -03:00
|
|
|
}
|
2025-11-30 22:33:54 -03:00
|
|
|
|
|
|
|
|
let state_for_task = Arc::clone(&state_clone);
|
|
|
|
|
let user_for_task = user_clone.clone();
|
2025-12-26 08:59:25 -03:00
|
|
|
let url_for_task = url_str;
|
2025-11-30 22:33:54 -03:00
|
|
|
let (tx, rx) = std::sync::mpsc::channel();
|
|
|
|
|
|
|
|
|
|
std::thread::spawn(move || {
|
|
|
|
|
let rt = tokio::runtime::Builder::new_multi_thread()
|
|
|
|
|
.worker_threads(2)
|
|
|
|
|
.enable_all()
|
|
|
|
|
.build();
|
|
|
|
|
|
feat(autotask): Implement AutoTask system with intent classification and app generation
- Add IntentClassifier with 7 intent types (APP_CREATE, TODO, MONITOR, ACTION, SCHEDULE, GOAL, TOOL)
- Add AppGenerator with LLM-powered app structure analysis
- Add DesignerAI for modifying apps through conversation
- Add app_server for serving generated apps with clean URLs
- Add db_api for CRUD operations on bot database tables
- Add ask_later keyword for pending info collection
- Add migration 6.1.1 with tables: pending_info, auto_tasks, execution_plans, task_approvals, task_decisions, safety_audit_log, generated_apps, intent_classifications, designer_changes
- Write apps to S3 drive and sync to SITE_ROOT for serving
- Clean URL structure: /apps/{app_name}/
- Integrate with DriveMonitor for file sync
Based on Chapter 17 - Autonomous Tasks specification
2025-12-27 21:10:09 -03:00
|
|
|
let send_err = if let Ok(_rt) = rt {
|
|
|
|
|
let result = associate_website_with_session(
|
|
|
|
|
&state_for_task,
|
|
|
|
|
&user_for_task,
|
|
|
|
|
&url_for_task,
|
|
|
|
|
);
|
2025-11-30 22:33:54 -03:00
|
|
|
tx.send(result).err()
|
|
|
|
|
} else {
|
|
|
|
|
tx.send(Err("Failed to build tokio runtime".to_string()))
|
|
|
|
|
.err()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if send_err.is_some() {
|
|
|
|
|
error!("Failed to send result from thread");
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
match rx.recv_timeout(std::time::Duration::from_secs(10)) {
|
|
|
|
|
Ok(Ok(message)) => Ok(Dynamic::from(message)),
|
|
|
|
|
Ok(Err(e)) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
|
|
|
|
|
e.into(),
|
2025-11-26 22:54:22 -03:00
|
|
|
rhai::Position::NONE,
|
2025-11-30 22:33:54 -03:00
|
|
|
))),
|
|
|
|
|
Err(std::sync::mpsc::RecvTimeoutError::Timeout) => {
|
|
|
|
|
Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
|
|
|
|
|
"USE WEBSITE timed out".into(),
|
|
|
|
|
rhai::Position::NONE,
|
|
|
|
|
)))
|
|
|
|
|
}
|
|
|
|
|
Err(e) => Err(Box::new(rhai::EvalAltResult::ErrorRuntime(
|
|
|
|
|
format!("USE WEBSITE failed: {}", e).into(),
|
|
|
|
|
rhai::Position::NONE,
|
|
|
|
|
))),
|
2025-11-26 22:54:22 -03:00
|
|
|
}
|
2025-11-30 22:33:54 -03:00
|
|
|
},
|
|
|
|
|
)
|
feat(security): Complete security infrastructure implementation
SECURITY MODULES ADDED:
- security/auth.rs: Full RBAC with roles (Anonymous, User, Moderator, Admin, SuperAdmin, Service, Bot, BotOwner, BotOperator, BotViewer) and permissions
- security/cors.rs: Hardened CORS (no wildcard in production, env-based config)
- security/panic_handler.rs: Panic catching middleware with safe 500 responses
- security/path_guard.rs: Path traversal protection, null byte prevention
- security/request_id.rs: UUID request tracking with correlation IDs
- security/error_sanitizer.rs: Sensitive data redaction from responses
- security/zitadel_auth.rs: Zitadel token introspection and role mapping
- security/sql_guard.rs: SQL injection prevention with table whitelist
- security/command_guard.rs: Command injection prevention
- security/secrets.rs: Zeroizing secret management
- security/validation.rs: Input validation utilities
- security/rate_limiter.rs: Rate limiting with governor crate
- security/headers.rs: Security headers (CSP, HSTS, X-Frame-Options)
MAIN.RS UPDATES:
- Replaced tower_http::cors::Any with hardened create_cors_layer()
- Added panic handler middleware
- Added request ID tracking middleware
- Set global panic hook
SECURITY STATUS:
- 0 unwrap() in production code
- 0 panic! in production code
- 0 unsafe blocks
- cargo audit: PASS (no vulnerabilities)
- Estimated completion: ~98%
Remaining: Wire auth middleware to handlers, audit logs for sensitive data
2025-12-28 19:29:18 -03:00
|
|
|
.expect("valid syntax registration");
|
2025-11-26 22:54:22 -03:00
|
|
|
}
|
|
|
|
|
|
feat(autotask): Implement AutoTask system with intent classification and app generation
- Add IntentClassifier with 7 intent types (APP_CREATE, TODO, MONITOR, ACTION, SCHEDULE, GOAL, TOOL)
- Add AppGenerator with LLM-powered app structure analysis
- Add DesignerAI for modifying apps through conversation
- Add app_server for serving generated apps with clean URLs
- Add db_api for CRUD operations on bot database tables
- Add ask_later keyword for pending info collection
- Add migration 6.1.1 with tables: pending_info, auto_tasks, execution_plans, task_approvals, task_decisions, safety_audit_log, generated_apps, intent_classifications, designer_changes
- Write apps to S3 drive and sync to SITE_ROOT for serving
- Clean URL structure: /apps/{app_name}/
- Integrate with DriveMonitor for file sync
Based on Chapter 17 - Autonomous Tasks specification
2025-12-27 21:10:09 -03:00
|
|
|
fn associate_website_with_session(
|
2025-11-26 22:54:22 -03:00
|
|
|
state: &AppState,
|
|
|
|
|
user: &UserSession,
|
|
|
|
|
url: &str,
|
|
|
|
|
) -> Result<String, String> {
|
|
|
|
|
info!("Associating website {} with session {}", url, user.id);
|
|
|
|
|
|
|
|
|
|
let mut conn = state.conn.get().map_err(|e| format!("DB error: {}", e))?;
|
|
|
|
|
|
|
|
|
|
let collection_name = format!("website_{}", sanitize_url_for_collection(url));
|
|
|
|
|
|
|
|
|
|
let website_status = check_website_crawl_status(&mut conn, &user.bot_id, url)?;
|
|
|
|
|
|
|
|
|
|
match website_status {
|
|
|
|
|
WebsiteCrawlStatus::NotRegistered => {
|
|
|
|
|
return Err(format!(
|
|
|
|
|
"Website {} has not been registered for crawling. It should be added to the script for preprocessing.",
|
|
|
|
|
url
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
WebsiteCrawlStatus::Pending => {
|
|
|
|
|
info!("Website {} is pending crawl, associating anyway", url);
|
|
|
|
|
}
|
|
|
|
|
WebsiteCrawlStatus::Crawled => {
|
|
|
|
|
info!("Website {} is already crawled and ready", url);
|
|
|
|
|
}
|
|
|
|
|
WebsiteCrawlStatus::Failed => {
|
|
|
|
|
return Err(format!(
|
|
|
|
|
"Website {} crawling failed. Please check the logs.",
|
|
|
|
|
url
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
add_website_to_session(&mut conn, &user.id, &user.bot_id, url, &collection_name)?;
|
|
|
|
|
|
|
|
|
|
Ok(format!(
|
|
|
|
|
"Website {} is now available in this conversation.",
|
|
|
|
|
url
|
|
|
|
|
))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
enum WebsiteCrawlStatus {
|
|
|
|
|
NotRegistered,
|
|
|
|
|
Pending,
|
|
|
|
|
Crawled,
|
|
|
|
|
Failed,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn check_website_crawl_status(
|
|
|
|
|
conn: &mut PgConnection,
|
|
|
|
|
bot_id: &Uuid,
|
|
|
|
|
url: &str,
|
|
|
|
|
) -> Result<WebsiteCrawlStatus, String> {
|
|
|
|
|
#[derive(QueryableByName)]
|
|
|
|
|
struct CrawlStatus {
|
|
|
|
|
#[diesel(sql_type = diesel::sql_types::Nullable<diesel::sql_types::Integer>)]
|
|
|
|
|
crawl_status: Option<i32>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let query =
|
|
|
|
|
diesel::sql_query("SELECT crawl_status FROM website_crawls WHERE bot_id = $1 AND url = $2")
|
|
|
|
|
.bind::<diesel::sql_types::Uuid, _>(bot_id)
|
|
|
|
|
.bind::<diesel::sql_types::Text, _>(url);
|
|
|
|
|
|
|
|
|
|
let result: Result<CrawlStatus, _> = query.get_result(conn);
|
|
|
|
|
|
|
|
|
|
match result {
|
|
|
|
|
Ok(status) => match status.crawl_status {
|
|
|
|
|
Some(0) => Ok(WebsiteCrawlStatus::Pending),
|
|
|
|
|
Some(1) => Ok(WebsiteCrawlStatus::Crawled),
|
|
|
|
|
Some(2) => Ok(WebsiteCrawlStatus::Failed),
|
|
|
|
|
_ => Ok(WebsiteCrawlStatus::NotRegistered),
|
|
|
|
|
},
|
|
|
|
|
Err(_) => Ok(WebsiteCrawlStatus::NotRegistered),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn register_website_for_crawling(
|
|
|
|
|
conn: &mut PgConnection,
|
|
|
|
|
bot_id: &Uuid,
|
|
|
|
|
url: &str,
|
|
|
|
|
) -> Result<(), String> {
|
2025-12-23 18:40:58 -03:00
|
|
|
let expires_policy = "1d";
|
2025-11-26 22:54:22 -03:00
|
|
|
|
|
|
|
|
let query = diesel::sql_query(
|
|
|
|
|
"INSERT INTO website_crawls (id, bot_id, url, expires_policy, crawl_status, next_crawl)
|
|
|
|
|
VALUES (gen_random_uuid(), $1, $2, $3, 0, NOW())
|
|
|
|
|
ON CONFLICT (bot_id, url) DO UPDATE SET next_crawl =
|
|
|
|
|
CASE
|
|
|
|
|
WHEN website_crawls.crawl_status = 2 THEN NOW() -- Failed, retry now
|
|
|
|
|
ELSE website_crawls.next_crawl -- Keep existing schedule
|
|
|
|
|
END",
|
|
|
|
|
)
|
|
|
|
|
.bind::<diesel::sql_types::Uuid, _>(bot_id)
|
|
|
|
|
.bind::<diesel::sql_types::Text, _>(url)
|
|
|
|
|
.bind::<diesel::sql_types::Text, _>(expires_policy);
|
|
|
|
|
|
|
|
|
|
query
|
|
|
|
|
.execute(conn)
|
|
|
|
|
.map_err(|e| format!("Failed to register website for crawling: {}", e))?;
|
|
|
|
|
|
|
|
|
|
info!("Website {} registered for crawling for bot {}", url, bot_id);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn execute_use_website_preprocessing(
|
|
|
|
|
conn: &mut PgConnection,
|
|
|
|
|
url: &str,
|
|
|
|
|
bot_id: Uuid,
|
|
|
|
|
) -> Result<serde_json::Value, Box<dyn std::error::Error>> {
|
|
|
|
|
trace!("Preprocessing USE_WEBSITE: {}, bot_id: {:?}", url, bot_id);
|
|
|
|
|
|
|
|
|
|
if !url.starts_with("http://") && !url.starts_with("https://") {
|
|
|
|
|
return Err(format!(
|
|
|
|
|
"Invalid URL format: {}. Must start with http:// or https://",
|
|
|
|
|
url
|
|
|
|
|
)
|
|
|
|
|
.into());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
register_website_for_crawling(conn, &bot_id, url)?;
|
|
|
|
|
|
|
|
|
|
Ok(serde_json::json!({
|
|
|
|
|
"command": "use_website",
|
|
|
|
|
"url": url,
|
|
|
|
|
"bot_id": bot_id.to_string(),
|
|
|
|
|
"status": "registered_for_crawling"
|
|
|
|
|
}))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn add_website_to_session(
|
|
|
|
|
conn: &mut PgConnection,
|
|
|
|
|
session_id: &Uuid,
|
|
|
|
|
bot_id: &Uuid,
|
|
|
|
|
url: &str,
|
|
|
|
|
collection_name: &str,
|
|
|
|
|
) -> Result<(), String> {
|
|
|
|
|
let assoc_id = Uuid::new_v4();
|
|
|
|
|
|
|
|
|
|
diesel::sql_query(
|
|
|
|
|
"INSERT INTO session_website_associations
|
|
|
|
|
(id, session_id, bot_id, website_url, collection_name, is_active, added_at)
|
|
|
|
|
VALUES ($1, $2, $3, $4, $5, true, NOW())
|
|
|
|
|
ON CONFLICT (session_id, website_url)
|
|
|
|
|
DO UPDATE SET is_active = true, added_at = NOW()",
|
|
|
|
|
)
|
|
|
|
|
.bind::<diesel::sql_types::Uuid, _>(assoc_id)
|
|
|
|
|
.bind::<diesel::sql_types::Uuid, _>(session_id)
|
|
|
|
|
.bind::<diesel::sql_types::Uuid, _>(bot_id)
|
|
|
|
|
.bind::<diesel::sql_types::Text, _>(url)
|
|
|
|
|
.bind::<diesel::sql_types::Text, _>(collection_name)
|
|
|
|
|
.execute(conn)
|
|
|
|
|
.map_err(|e| format!("Failed to add website to session: {}", e))?;
|
|
|
|
|
|
|
|
|
|
info!(
|
2025-12-09 07:55:11 -03:00
|
|
|
" Added website '{}' to session {} (collection: {})",
|
2025-11-26 22:54:22 -03:00
|
|
|
url, session_id, collection_name
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn clear_websites_keyword(state: Arc<AppState>, user: UserSession, engine: &mut Engine) {
|
|
|
|
|
let state_clone = Arc::clone(&state);
|
2025-12-26 08:59:25 -03:00
|
|
|
let user_clone = user;
|
2025-11-26 22:54:22 -03:00
|
|
|
|
|
|
|
|
engine
|
2025-12-26 08:59:25 -03:00
|
|
|
.register_custom_syntax(["CLEAR", "WEBSITES"], true, move |_context, _inputs| {
|
2025-11-26 22:54:22 -03:00
|
|
|
info!(
|
2025-11-30 22:33:54 -03:00
|
|
|
"CLEAR WEBSITES keyword executed for session: {}",
|
2025-11-26 22:54:22 -03:00
|
|
|
user_clone.id
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
let session_id = user_clone.id;
|
|
|
|
|
let conn = state_clone.conn.clone();
|
|
|
|
|
|
|
|
|
|
let result = std::thread::spawn(move || clear_all_websites(conn, session_id)).join();
|
|
|
|
|
|
|
|
|
|
match result {
|
|
|
|
|
Ok(Ok(count)) => {
|
|
|
|
|
info!(
|
|
|
|
|
"Successfully cleared {} websites from session {}",
|
|
|
|
|
count, user_clone.id
|
|
|
|
|
);
|
|
|
|
|
Ok(Dynamic::from(format!(
|
|
|
|
|
"{} website(s) removed from conversation",
|
|
|
|
|
count
|
|
|
|
|
)))
|
|
|
|
|
}
|
|
|
|
|
Ok(Err(e)) => {
|
|
|
|
|
error!("Failed to clear websites: {}", e);
|
|
|
|
|
Err(format!("CLEAR_WEBSITES failed: {}", e).into())
|
|
|
|
|
}
|
|
|
|
|
Err(e) => {
|
|
|
|
|
error!("Thread panic in CLEAR_WEBSITES: {:?}", e);
|
|
|
|
|
Err("CLEAR_WEBSITES failed: thread panic".into())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
})
|
feat(security): Complete security infrastructure implementation
SECURITY MODULES ADDED:
- security/auth.rs: Full RBAC with roles (Anonymous, User, Moderator, Admin, SuperAdmin, Service, Bot, BotOwner, BotOperator, BotViewer) and permissions
- security/cors.rs: Hardened CORS (no wildcard in production, env-based config)
- security/panic_handler.rs: Panic catching middleware with safe 500 responses
- security/path_guard.rs: Path traversal protection, null byte prevention
- security/request_id.rs: UUID request tracking with correlation IDs
- security/error_sanitizer.rs: Sensitive data redaction from responses
- security/zitadel_auth.rs: Zitadel token introspection and role mapping
- security/sql_guard.rs: SQL injection prevention with table whitelist
- security/command_guard.rs: Command injection prevention
- security/secrets.rs: Zeroizing secret management
- security/validation.rs: Input validation utilities
- security/rate_limiter.rs: Rate limiting with governor crate
- security/headers.rs: Security headers (CSP, HSTS, X-Frame-Options)
MAIN.RS UPDATES:
- Replaced tower_http::cors::Any with hardened create_cors_layer()
- Added panic handler middleware
- Added request ID tracking middleware
- Set global panic hook
SECURITY STATUS:
- 0 unwrap() in production code
- 0 panic! in production code
- 0 unsafe blocks
- cargo audit: PASS (no vulnerabilities)
- Estimated completion: ~98%
Remaining: Wire auth middleware to handlers, audit logs for sensitive data
2025-12-28 19:29:18 -03:00
|
|
|
.expect("valid syntax registration");
|
2025-11-26 22:54:22 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn clear_all_websites(
|
|
|
|
|
conn_pool: crate::shared::utils::DbPool,
|
|
|
|
|
session_id: Uuid,
|
|
|
|
|
) -> Result<usize, String> {
|
|
|
|
|
let mut conn = conn_pool
|
|
|
|
|
.get()
|
|
|
|
|
.map_err(|e| format!("Failed to get DB connection: {}", e))?;
|
|
|
|
|
|
|
|
|
|
let rows_affected = diesel::sql_query(
|
|
|
|
|
"UPDATE session_website_associations
|
|
|
|
|
SET is_active = false
|
|
|
|
|
WHERE session_id = $1 AND is_active = true",
|
|
|
|
|
)
|
|
|
|
|
.bind::<diesel::sql_types::Uuid, _>(session_id)
|
|
|
|
|
.execute(&mut conn)
|
|
|
|
|
.map_err(|e| format!("Failed to clear websites: {}", e))?;
|
|
|
|
|
|
|
|
|
|
Ok(rows_affected)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn get_active_websites_for_session(
|
|
|
|
|
conn_pool: &crate::shared::utils::DbPool,
|
|
|
|
|
session_id: Uuid,
|
|
|
|
|
) -> Result<Vec<(String, String)>, String> {
|
|
|
|
|
let mut conn = conn_pool
|
|
|
|
|
.get()
|
|
|
|
|
.map_err(|e| format!("Failed to get DB connection: {}", e))?;
|
|
|
|
|
|
|
|
|
|
#[derive(QueryableByName, Debug)]
|
|
|
|
|
struct ActiveWebsiteResult {
|
|
|
|
|
#[diesel(sql_type = diesel::sql_types::Text)]
|
|
|
|
|
website_url: String,
|
|
|
|
|
#[diesel(sql_type = diesel::sql_types::Text)]
|
|
|
|
|
collection_name: String,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let results: Vec<ActiveWebsiteResult> = diesel::sql_query(
|
|
|
|
|
"SELECT website_url, collection_name
|
|
|
|
|
FROM session_website_associations
|
|
|
|
|
WHERE session_id = $1 AND is_active = true
|
|
|
|
|
ORDER BY added_at DESC",
|
|
|
|
|
)
|
|
|
|
|
.bind::<diesel::sql_types::Uuid, _>(session_id)
|
|
|
|
|
.load(&mut conn)
|
|
|
|
|
.map_err(|e| format!("Failed to get active websites: {}", e))?;
|
|
|
|
|
|
|
|
|
|
Ok(results
|
|
|
|
|
.into_iter()
|
|
|
|
|
.map(|r| (r.website_url, r.collection_name))
|
|
|
|
|
.collect())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn sanitize_url_for_collection(url: &str) -> String {
|
|
|
|
|
url.replace("http://", "")
|
|
|
|
|
.replace("https://", "")
|
2025-12-26 08:59:25 -03:00
|
|
|
.replace(['/', ':', '.'], "_")
|
2025-11-26 22:54:22 -03:00
|
|
|
.chars()
|
|
|
|
|
.filter(|c| c.is_alphanumeric() || *c == '_' || *c == '-')
|
|
|
|
|
.collect::<String>()
|
|
|
|
|
.to_lowercase()
|
|
|
|
|
}
|
2025-12-26 08:59:25 -03:00
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
|
mod tests {
|
|
|
|
|
use super::*;
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn test_url_sanitization() {
|
|
|
|
|
assert_eq!(
|
|
|
|
|
sanitize_url_for_collection("https://docs.example.com/path"),
|
|
|
|
|
"docs_example_com_path"
|
|
|
|
|
);
|
|
|
|
|
assert_eq!(
|
|
|
|
|
sanitize_url_for_collection("http://test.site:8080"),
|
|
|
|
|
"test_site_8080"
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|