2025-11-02 20:57:53 -03:00
|
|
|
use crate::basic::keywords::set_schedule::execute_set_schedule;
|
2025-11-30 15:07:29 -03:00
|
|
|
use crate::basic::keywords::table_definition::process_table_definitions;
|
Add tar/flate2 deps and document new BASIC keywords
Add flate2 and tar dependencies for archive extraction support in file
operations. Update documentation with:
- New BASIC keywords: SWITCH/CASE, WEBHOOK, INSTR, IS_NUMERIC
- HTTP operations: POST, PUT, PATCH, DELETE_HTTP, GRAPHQL, SOAP
- Data operations: SAVE, INSERT, UPDATE, DELETE, MERGE, FILTER, etc.
- File operations: READ, WRITE, COMPRESS, EXTRACT, GENERATE_PDF, etc.
Simplify README and add appendices for external services and environment
variables. Add monitoring dashboard and player UI docs.
2025-11-30 07:53:58 -03:00
|
|
|
use crate::basic::keywords::webhook::execute_webhook_registration;
|
2025-11-11 09:42:52 -03:00
|
|
|
use crate::shared::models::TriggerKind;
|
|
|
|
|
use crate::shared::state::AppState;
|
|
|
|
|
use diesel::ExpressionMethods;
|
|
|
|
|
use diesel::QueryDsl;
|
|
|
|
|
use diesel::RunQueryDsl;
|
2025-11-06 16:15:54 -03:00
|
|
|
use log::warn;
|
2025-10-18 18:20:02 -03:00
|
|
|
use serde::{Deserialize, Serialize};
|
|
|
|
|
use std::collections::HashMap;
|
2025-11-05 10:15:36 -03:00
|
|
|
use std::collections::HashSet;
|
2025-10-18 18:20:02 -03:00
|
|
|
use std::error::Error;
|
|
|
|
|
use std::fs;
|
|
|
|
|
use std::path::Path;
|
|
|
|
|
use std::sync::Arc;
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct ParamDeclaration {
|
|
|
|
|
pub name: String,
|
|
|
|
|
pub param_type: String,
|
|
|
|
|
pub example: Option<String>,
|
|
|
|
|
pub description: String,
|
|
|
|
|
pub required: bool,
|
|
|
|
|
}
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct ToolDefinition {
|
|
|
|
|
pub name: String,
|
|
|
|
|
pub description: String,
|
|
|
|
|
pub parameters: Vec<ParamDeclaration>,
|
|
|
|
|
pub source_file: String,
|
|
|
|
|
}
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct MCPTool {
|
|
|
|
|
pub name: String,
|
|
|
|
|
pub description: String,
|
|
|
|
|
pub input_schema: MCPInputSchema,
|
|
|
|
|
}
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct MCPInputSchema {
|
|
|
|
|
#[serde(rename = "type")]
|
|
|
|
|
pub schema_type: String,
|
|
|
|
|
pub properties: HashMap<String, MCPProperty>,
|
|
|
|
|
pub required: Vec<String>,
|
|
|
|
|
}
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct MCPProperty {
|
|
|
|
|
#[serde(rename = "type")]
|
|
|
|
|
pub prop_type: String,
|
|
|
|
|
pub description: String,
|
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
|
|
|
pub example: Option<String>,
|
|
|
|
|
}
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct OpenAITool {
|
|
|
|
|
#[serde(rename = "type")]
|
|
|
|
|
pub tool_type: String,
|
|
|
|
|
pub function: OpenAIFunction,
|
|
|
|
|
}
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct OpenAIFunction {
|
|
|
|
|
pub name: String,
|
|
|
|
|
pub description: String,
|
|
|
|
|
pub parameters: OpenAIParameters,
|
|
|
|
|
}
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct OpenAIParameters {
|
|
|
|
|
#[serde(rename = "type")]
|
|
|
|
|
pub param_type: String,
|
|
|
|
|
pub properties: HashMap<String, OpenAIProperty>,
|
|
|
|
|
pub required: Vec<String>,
|
|
|
|
|
}
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
|
pub struct OpenAIProperty {
|
|
|
|
|
#[serde(rename = "type")]
|
|
|
|
|
pub prop_type: String,
|
|
|
|
|
pub description: String,
|
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
|
|
|
pub example: Option<String>,
|
|
|
|
|
}
|
2025-11-22 12:26:16 -03:00
|
|
|
#[derive(Debug)]
|
2025-10-18 18:20:02 -03:00
|
|
|
pub struct BasicCompiler {
|
|
|
|
|
state: Arc<AppState>,
|
2025-11-02 20:57:53 -03:00
|
|
|
bot_id: uuid::Uuid,
|
2025-11-05 21:10:03 -03:00
|
|
|
previous_schedules: HashSet<String>,
|
2025-10-18 18:20:02 -03:00
|
|
|
}
|
|
|
|
|
impl BasicCompiler {
|
2025-11-27 15:19:17 -03:00
|
|
|
#[must_use]
|
2025-11-02 20:57:53 -03:00
|
|
|
pub fn new(state: Arc<AppState>, bot_id: uuid::Uuid) -> Self {
|
2025-11-05 21:10:03 -03:00
|
|
|
Self {
|
|
|
|
|
state,
|
2025-11-05 10:15:36 -03:00
|
|
|
bot_id,
|
|
|
|
|
previous_schedules: HashSet::new(),
|
|
|
|
|
}
|
2025-10-18 18:20:02 -03:00
|
|
|
}
|
|
|
|
|
pub fn compile_file(
|
2025-11-05 10:15:36 -03:00
|
|
|
&mut self,
|
2025-10-18 18:20:02 -03:00
|
|
|
source_path: &str,
|
|
|
|
|
output_dir: &str,
|
|
|
|
|
) -> Result<CompilationResult, Box<dyn Error + Send + Sync>> {
|
|
|
|
|
let source_content = fs::read_to_string(source_path)
|
2025-11-27 15:19:17 -03:00
|
|
|
.map_err(|e| format!("Failed to read source file: {e}"))?;
|
2025-11-30 15:07:29 -03:00
|
|
|
|
|
|
|
|
// Process TABLE...END TABLE definitions (creates tables on external DBs)
|
|
|
|
|
if let Err(e) =
|
|
|
|
|
process_table_definitions(Arc::clone(&self.state), self.bot_id, &source_content)
|
|
|
|
|
{
|
|
|
|
|
log::warn!("Failed to process TABLE definitions: {}", e);
|
|
|
|
|
}
|
|
|
|
|
|
2025-10-18 18:20:02 -03:00
|
|
|
let tool_def = self.parse_tool_definition(&source_content, source_path)?;
|
|
|
|
|
let file_name = Path::new(source_path)
|
|
|
|
|
.file_stem()
|
|
|
|
|
.and_then(|s| s.to_str())
|
|
|
|
|
.ok_or("Invalid file name")?;
|
2025-11-27 15:19:17 -03:00
|
|
|
let ast_path = format!("{output_dir}/{file_name}.ast");
|
2025-11-02 20:57:53 -03:00
|
|
|
let ast_content = self.preprocess_basic(&source_content, source_path, self.bot_id)?;
|
2025-11-27 15:19:17 -03:00
|
|
|
fs::write(&ast_path, &ast_content).map_err(|e| format!("Failed to write AST file: {e}"))?;
|
|
|
|
|
let (mcp_json, tool_json) = if tool_def.parameters.is_empty() {
|
|
|
|
|
(None, None)
|
|
|
|
|
} else {
|
2025-10-18 18:20:02 -03:00
|
|
|
let mcp = self.generate_mcp_tool(&tool_def)?;
|
|
|
|
|
let openai = self.generate_openai_tool(&tool_def)?;
|
2025-11-27 15:19:17 -03:00
|
|
|
let mcp_path = format!("{output_dir}/{file_name}.mcp.json");
|
|
|
|
|
let tool_path = format!("{output_dir}/{file_name}.tool.json");
|
2025-10-18 18:20:02 -03:00
|
|
|
let mcp_json_str = serde_json::to_string_pretty(&mcp)?;
|
|
|
|
|
fs::write(&mcp_path, mcp_json_str)
|
2025-11-27 15:19:17 -03:00
|
|
|
.map_err(|e| format!("Failed to write MCP JSON: {e}"))?;
|
2025-10-18 18:20:02 -03:00
|
|
|
let tool_json_str = serde_json::to_string_pretty(&openai)?;
|
|
|
|
|
fs::write(&tool_path, tool_json_str)
|
2025-11-27 15:19:17 -03:00
|
|
|
.map_err(|e| format!("Failed to write tool JSON: {e}"))?;
|
2025-10-18 18:20:02 -03:00
|
|
|
(Some(mcp), Some(openai))
|
|
|
|
|
};
|
|
|
|
|
Ok(CompilationResult {
|
|
|
|
|
mcp_tool: mcp_json,
|
2025-11-07 09:37:45 -03:00
|
|
|
_openai_tool: tool_json,
|
2025-10-18 18:20:02 -03:00
|
|
|
})
|
|
|
|
|
}
|
2025-11-04 23:11:33 -03:00
|
|
|
pub fn parse_tool_definition(
|
2025-10-18 18:20:02 -03:00
|
|
|
&self,
|
|
|
|
|
source: &str,
|
|
|
|
|
source_path: &str,
|
|
|
|
|
) -> Result<ToolDefinition, Box<dyn Error + Send + Sync>> {
|
|
|
|
|
let mut params = Vec::new();
|
|
|
|
|
let mut description = String::new();
|
|
|
|
|
let lines: Vec<&str> = source.lines().collect();
|
|
|
|
|
let mut i = 0;
|
|
|
|
|
while i < lines.len() {
|
|
|
|
|
let line = lines[i].trim();
|
|
|
|
|
if line.starts_with("PARAM ") {
|
|
|
|
|
if let Some(param) = self.parse_param_line(line)? {
|
|
|
|
|
params.push(param);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if line.starts_with("DESCRIPTION ") {
|
|
|
|
|
let desc_start = line.find('"').unwrap_or(0);
|
|
|
|
|
let desc_end = line.rfind('"').unwrap_or(line.len());
|
|
|
|
|
if desc_start < desc_end {
|
|
|
|
|
description = line[desc_start + 1..desc_end].to_string();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
i += 1;
|
|
|
|
|
}
|
|
|
|
|
let tool_name = Path::new(source_path)
|
|
|
|
|
.file_stem()
|
|
|
|
|
.and_then(|s| s.to_str())
|
|
|
|
|
.unwrap_or("unknown")
|
|
|
|
|
.to_string();
|
|
|
|
|
Ok(ToolDefinition {
|
|
|
|
|
name: tool_name,
|
|
|
|
|
description,
|
|
|
|
|
parameters: params,
|
|
|
|
|
source_file: source_path.to_string(),
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
fn parse_param_line(
|
|
|
|
|
&self,
|
|
|
|
|
line: &str,
|
|
|
|
|
) -> Result<Option<ParamDeclaration>, Box<dyn Error + Send + Sync>> {
|
|
|
|
|
let line = line.trim();
|
|
|
|
|
if !line.starts_with("PARAM ") {
|
|
|
|
|
return Ok(None);
|
|
|
|
|
}
|
|
|
|
|
let parts: Vec<&str> = line.split_whitespace().collect();
|
|
|
|
|
if parts.len() < 4 {
|
2025-11-27 15:19:17 -03:00
|
|
|
warn!("Invalid PARAM line: {line}");
|
2025-10-18 18:20:02 -03:00
|
|
|
return Ok(None);
|
|
|
|
|
}
|
|
|
|
|
let name = parts[1].to_string();
|
|
|
|
|
let as_index = parts.iter().position(|&p| p == "AS");
|
|
|
|
|
let param_type = if let Some(idx) = as_index {
|
|
|
|
|
if idx + 1 < parts.len() {
|
|
|
|
|
parts[idx + 1].to_lowercase()
|
|
|
|
|
} else {
|
|
|
|
|
"string".to_string()
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
"string".to_string()
|
|
|
|
|
};
|
|
|
|
|
let example = if let Some(like_pos) = line.find("LIKE") {
|
|
|
|
|
let rest = &line[like_pos + 4..].trim();
|
|
|
|
|
if let Some(start) = rest.find('"') {
|
|
|
|
|
if let Some(end) = rest[start + 1..].find('"') {
|
|
|
|
|
Some(rest[start + 1..start + 1 + end].to_string())
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
|
|
|
|
let description = if let Some(desc_pos) = line.find("DESCRIPTION") {
|
|
|
|
|
let rest = &line[desc_pos + 11..].trim();
|
|
|
|
|
if let Some(start) = rest.find('"') {
|
|
|
|
|
if let Some(end) = rest[start + 1..].rfind('"') {
|
|
|
|
|
rest[start + 1..start + 1 + end].to_string()
|
|
|
|
|
} else {
|
|
|
|
|
"".to_string()
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
"".to_string()
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
"".to_string()
|
|
|
|
|
};
|
|
|
|
|
Ok(Some(ParamDeclaration {
|
|
|
|
|
name,
|
|
|
|
|
param_type: self.normalize_type(¶m_type),
|
|
|
|
|
example,
|
|
|
|
|
description,
|
2025-11-05 21:10:03 -03:00
|
|
|
required: true,
|
2025-10-18 18:20:02 -03:00
|
|
|
}))
|
|
|
|
|
}
|
|
|
|
|
fn normalize_type(&self, basic_type: &str) -> String {
|
|
|
|
|
match basic_type.to_lowercase().as_str() {
|
|
|
|
|
"string" | "text" => "string".to_string(),
|
|
|
|
|
"integer" | "int" | "number" => "integer".to_string(),
|
|
|
|
|
"float" | "double" | "decimal" => "number".to_string(),
|
|
|
|
|
"boolean" | "bool" => "boolean".to_string(),
|
2025-11-05 10:15:36 -03:00
|
|
|
"date" | "datetime" => "string".to_string(),
|
2025-10-18 18:20:02 -03:00
|
|
|
"array" | "list" => "array".to_string(),
|
|
|
|
|
"object" | "map" => "object".to_string(),
|
2025-11-05 10:15:36 -03:00
|
|
|
_ => "string".to_string(),
|
2025-10-18 18:20:02 -03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
fn generate_mcp_tool(
|
|
|
|
|
&self,
|
|
|
|
|
tool_def: &ToolDefinition,
|
|
|
|
|
) -> Result<MCPTool, Box<dyn Error + Send + Sync>> {
|
|
|
|
|
let mut properties = HashMap::new();
|
|
|
|
|
let mut required = Vec::new();
|
|
|
|
|
for param in &tool_def.parameters {
|
|
|
|
|
properties.insert(
|
|
|
|
|
param.name.clone(),
|
|
|
|
|
MCPProperty {
|
|
|
|
|
prop_type: param.param_type.clone(),
|
|
|
|
|
description: param.description.clone(),
|
|
|
|
|
example: param.example.clone(),
|
|
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
if param.required {
|
|
|
|
|
required.push(param.name.clone());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok(MCPTool {
|
|
|
|
|
name: tool_def.name.clone(),
|
|
|
|
|
description: tool_def.description.clone(),
|
|
|
|
|
input_schema: MCPInputSchema {
|
|
|
|
|
schema_type: "object".to_string(),
|
|
|
|
|
properties,
|
|
|
|
|
required,
|
|
|
|
|
},
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
fn generate_openai_tool(
|
|
|
|
|
&self,
|
|
|
|
|
tool_def: &ToolDefinition,
|
|
|
|
|
) -> Result<OpenAITool, Box<dyn Error + Send + Sync>> {
|
|
|
|
|
let mut properties = HashMap::new();
|
|
|
|
|
let mut required = Vec::new();
|
|
|
|
|
for param in &tool_def.parameters {
|
|
|
|
|
properties.insert(
|
|
|
|
|
param.name.clone(),
|
|
|
|
|
OpenAIProperty {
|
|
|
|
|
prop_type: param.param_type.clone(),
|
|
|
|
|
description: param.description.clone(),
|
|
|
|
|
example: param.example.clone(),
|
|
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
if param.required {
|
|
|
|
|
required.push(param.name.clone());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok(OpenAITool {
|
|
|
|
|
tool_type: "function".to_string(),
|
|
|
|
|
function: OpenAIFunction {
|
|
|
|
|
name: tool_def.name.clone(),
|
|
|
|
|
description: tool_def.description.clone(),
|
|
|
|
|
parameters: OpenAIParameters {
|
|
|
|
|
param_type: "object".to_string(),
|
|
|
|
|
properties,
|
|
|
|
|
required,
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
})
|
|
|
|
|
}
|
2025-11-11 09:42:52 -03:00
|
|
|
fn preprocess_basic(
|
|
|
|
|
&mut self,
|
|
|
|
|
source: &str,
|
|
|
|
|
source_path: &str,
|
|
|
|
|
bot_id: uuid::Uuid,
|
|
|
|
|
) -> Result<String, Box<dyn Error + Send + Sync>> {
|
2025-11-05 10:15:36 -03:00
|
|
|
let bot_uuid = bot_id;
|
2025-10-18 18:20:02 -03:00
|
|
|
let mut result = String::new();
|
2025-11-05 10:15:36 -03:00
|
|
|
let mut has_schedule = false;
|
Add tar/flate2 deps and document new BASIC keywords
Add flate2 and tar dependencies for archive extraction support in file
operations. Update documentation with:
- New BASIC keywords: SWITCH/CASE, WEBHOOK, INSTR, IS_NUMERIC
- HTTP operations: POST, PUT, PATCH, DELETE_HTTP, GRAPHQL, SOAP
- Data operations: SAVE, INSERT, UPDATE, DELETE, MERGE, FILTER, etc.
- File operations: READ, WRITE, COMPRESS, EXTRACT, GENERATE_PDF, etc.
Simplify README and add appendices for external services and environment
variables. Add monitoring dashboard and player UI docs.
2025-11-30 07:53:58 -03:00
|
|
|
let mut has_webhook = false;
|
2025-11-05 10:15:36 -03:00
|
|
|
let script_name = Path::new(source_path)
|
|
|
|
|
.file_stem()
|
|
|
|
|
.and_then(|s| s.to_str())
|
|
|
|
|
.unwrap_or("unknown")
|
|
|
|
|
.to_string();
|
|
|
|
|
{
|
2025-11-11 09:42:52 -03:00
|
|
|
let mut conn = self
|
|
|
|
|
.state
|
|
|
|
|
.conn
|
|
|
|
|
.get()
|
2025-11-27 15:19:17 -03:00
|
|
|
.map_err(|e| format!("Failed to get database connection: {e}"))?;
|
2025-11-05 10:15:36 -03:00
|
|
|
use crate::shared::models::system_automations::dsl::*;
|
2025-11-11 09:42:52 -03:00
|
|
|
diesel::delete(
|
|
|
|
|
system_automations
|
|
|
|
|
.filter(bot_id.eq(bot_uuid))
|
|
|
|
|
.filter(kind.eq(TriggerKind::Scheduled as i32))
|
|
|
|
|
.filter(param.eq(&script_name)),
|
2025-11-05 10:15:36 -03:00
|
|
|
)
|
2025-11-11 09:42:52 -03:00
|
|
|
.execute(&mut conn)
|
2025-11-05 10:15:36 -03:00
|
|
|
.ok();
|
|
|
|
|
}
|
2025-10-18 18:20:02 -03:00
|
|
|
for line in source.lines() {
|
|
|
|
|
let trimmed = line.trim();
|
2025-11-11 09:42:52 -03:00
|
|
|
if trimmed.is_empty()
|
|
|
|
|
|| trimmed.starts_with("'")
|
|
|
|
|
|| trimmed.starts_with("//")
|
|
|
|
|
|| trimmed.starts_with("REM")
|
|
|
|
|
{
|
2025-10-18 18:20:02 -03:00
|
|
|
continue;
|
|
|
|
|
}
|
Looking at this diff, I can see it's a comprehensive documentation
update and code refactoring focused on:
1. Adding new documentation pages to the table of contents
2. Restructuring the bot templates documentation
3. Changing keyword syntax from underscore format to space format (e.g.,
`SET_BOT_MEMORY` → `SET BOT MEMORY`)
4. Updating compiler and keyword registration to support the new
space-based syntax
5. Adding new keyword modules (social media, lead scoring, templates,
etc.)
Refactor BASIC keywords to use spaces instead of underscores
Change keyword syntax from underscore format (SET_BOT_MEMORY) to more
natural space-separated format (SET BOT MEMORY) throughout the codebase.
Key changes:
- Update Rhai custom syntax registration to use space tokens
- Simplify compiler preprocessing (fewer replacements needed)
- Update all template .bas files to use new syntax
- Expand documentation with consolidated examples and new sections
- Add new keyword modules: social_media, lead_scoring, send_template,
core_functions, qrcode, sms, procedures, import_export, llm_macros,
on_form_submit
2025-11-30 10:53:59 -03:00
|
|
|
// Keywords now use spaces directly in Rhai registration
|
|
|
|
|
// Only normalize keywords that still need it for special preprocessing
|
2025-11-05 21:10:03 -03:00
|
|
|
let normalized = trimmed
|
|
|
|
|
.replace("FOR EACH", "FOR_EACH")
|
|
|
|
|
.replace("EXIT FOR", "EXIT_FOR")
|
Add tar/flate2 deps and document new BASIC keywords
Add flate2 and tar dependencies for archive extraction support in file
operations. Update documentation with:
- New BASIC keywords: SWITCH/CASE, WEBHOOK, INSTR, IS_NUMERIC
- HTTP operations: POST, PUT, PATCH, DELETE_HTTP, GRAPHQL, SOAP
- Data operations: SAVE, INSERT, UPDATE, DELETE, MERGE, FILTER, etc.
- File operations: READ, WRITE, COMPRESS, EXTRACT, GENERATE_PDF, etc.
Simplify README and add appendices for external services and environment
variables. Add monitoring dashboard and player UI docs.
2025-11-30 07:53:58 -03:00
|
|
|
.replace("GENERATE PDF", "GENERATE_PDF")
|
|
|
|
|
.replace("MERGE PDF", "MERGE_PDF")
|
|
|
|
|
.replace("GROUP BY", "GROUP_BY");
|
Looking at this diff, I can see it's a comprehensive documentation
update and code refactoring focused on:
1. Adding new documentation pages to the table of contents
2. Restructuring the bot templates documentation
3. Changing keyword syntax from underscore format to space format (e.g.,
`SET_BOT_MEMORY` → `SET BOT MEMORY`)
4. Updating compiler and keyword registration to support the new
space-based syntax
5. Adding new keyword modules (social media, lead scoring, templates,
etc.)
Refactor BASIC keywords to use spaces instead of underscores
Change keyword syntax from underscore format (SET_BOT_MEMORY) to more
natural space-separated format (SET BOT MEMORY) throughout the codebase.
Key changes:
- Update Rhai custom syntax registration to use space tokens
- Simplify compiler preprocessing (fewer replacements needed)
- Update all template .bas files to use new syntax
- Expand documentation with consolidated examples and new sections
- Add new keyword modules: social_media, lead_scoring, send_template,
core_functions, qrcode, sms, procedures, import_export, llm_macros,
on_form_submit
2025-11-30 10:53:59 -03:00
|
|
|
if normalized.starts_with("SET SCHEDULE") || trimmed.starts_with("SET SCHEDULE") {
|
2025-11-05 10:15:36 -03:00
|
|
|
has_schedule = true;
|
2025-11-05 21:10:03 -03:00
|
|
|
let parts: Vec<&str> = normalized.split('"').collect();
|
2025-11-02 20:57:53 -03:00
|
|
|
if parts.len() >= 3 {
|
|
|
|
|
let cron = parts[1];
|
2025-11-11 09:42:52 -03:00
|
|
|
let mut conn = self
|
|
|
|
|
.state
|
|
|
|
|
.conn
|
|
|
|
|
.get()
|
2025-11-27 15:19:17 -03:00
|
|
|
.map_err(|e| format!("Failed to get database connection: {e}"))?;
|
2025-11-11 09:42:52 -03:00
|
|
|
if let Err(e) = execute_set_schedule(&mut conn, cron, &script_name, bot_id) {
|
|
|
|
|
log::error!(
|
Looking at this diff, I can see it's a comprehensive documentation
update and code refactoring focused on:
1. Adding new documentation pages to the table of contents
2. Restructuring the bot templates documentation
3. Changing keyword syntax from underscore format to space format (e.g.,
`SET_BOT_MEMORY` → `SET BOT MEMORY`)
4. Updating compiler and keyword registration to support the new
space-based syntax
5. Adding new keyword modules (social media, lead scoring, templates,
etc.)
Refactor BASIC keywords to use spaces instead of underscores
Change keyword syntax from underscore format (SET_BOT_MEMORY) to more
natural space-separated format (SET BOT MEMORY) throughout the codebase.
Key changes:
- Update Rhai custom syntax registration to use space tokens
- Simplify compiler preprocessing (fewer replacements needed)
- Update all template .bas files to use new syntax
- Expand documentation with consolidated examples and new sections
- Add new keyword modules: social_media, lead_scoring, send_template,
core_functions, qrcode, sms, procedures, import_export, llm_macros,
on_form_submit
2025-11-30 10:53:59 -03:00
|
|
|
"Failed to schedule SET SCHEDULE during preprocessing: {}",
|
2025-11-11 09:42:52 -03:00
|
|
|
e
|
|
|
|
|
);
|
2025-11-02 20:57:53 -03:00
|
|
|
}
|
|
|
|
|
} else {
|
Looking at this diff, I can see it's a comprehensive documentation
update and code refactoring focused on:
1. Adding new documentation pages to the table of contents
2. Restructuring the bot templates documentation
3. Changing keyword syntax from underscore format to space format (e.g.,
`SET_BOT_MEMORY` → `SET BOT MEMORY`)
4. Updating compiler and keyword registration to support the new
space-based syntax
5. Adding new keyword modules (social media, lead scoring, templates,
etc.)
Refactor BASIC keywords to use spaces instead of underscores
Change keyword syntax from underscore format (SET_BOT_MEMORY) to more
natural space-separated format (SET BOT MEMORY) throughout the codebase.
Key changes:
- Update Rhai custom syntax registration to use space tokens
- Simplify compiler preprocessing (fewer replacements needed)
- Update all template .bas files to use new syntax
- Expand documentation with consolidated examples and new sections
- Add new keyword modules: social_media, lead_scoring, send_template,
core_functions, qrcode, sms, procedures, import_export, llm_macros,
on_form_submit
2025-11-30 10:53:59 -03:00
|
|
|
log::warn!("Malformed SET SCHEDULE line ignored: {}", trimmed);
|
2025-11-02 20:57:53 -03:00
|
|
|
}
|
|
|
|
|
continue;
|
|
|
|
|
}
|
Add tar/flate2 deps and document new BASIC keywords
Add flate2 and tar dependencies for archive extraction support in file
operations. Update documentation with:
- New BASIC keywords: SWITCH/CASE, WEBHOOK, INSTR, IS_NUMERIC
- HTTP operations: POST, PUT, PATCH, DELETE_HTTP, GRAPHQL, SOAP
- Data operations: SAVE, INSERT, UPDATE, DELETE, MERGE, FILTER, etc.
- File operations: READ, WRITE, COMPRESS, EXTRACT, GENERATE_PDF, etc.
Simplify README and add appendices for external services and environment
variables. Add monitoring dashboard and player UI docs.
2025-11-30 07:53:58 -03:00
|
|
|
// Handle WEBHOOK preprocessing - register webhook endpoint
|
|
|
|
|
if normalized.starts_with("WEBHOOK") {
|
|
|
|
|
has_webhook = true;
|
|
|
|
|
let parts: Vec<&str> = normalized.split('"').collect();
|
|
|
|
|
if parts.len() >= 2 {
|
|
|
|
|
let endpoint = parts[1];
|
|
|
|
|
let mut conn = self
|
|
|
|
|
.state
|
|
|
|
|
.conn
|
|
|
|
|
.get()
|
|
|
|
|
.map_err(|e| format!("Failed to get database connection: {}", e))?;
|
|
|
|
|
if let Err(e) =
|
|
|
|
|
execute_webhook_registration(&mut conn, endpoint, &script_name, bot_id)
|
|
|
|
|
{
|
|
|
|
|
log::error!("Failed to register WEBHOOK during preprocessing: {}", e);
|
|
|
|
|
} else {
|
|
|
|
|
log::info!(
|
|
|
|
|
"Registered webhook endpoint {} for script {} during preprocessing",
|
|
|
|
|
endpoint,
|
|
|
|
|
script_name
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
log::warn!("Malformed WEBHOOK line ignored: {}", normalized);
|
|
|
|
|
}
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
Looking at this diff, I can see it's a comprehensive documentation
update and code refactoring focused on:
1. Adding new documentation pages to the table of contents
2. Restructuring the bot templates documentation
3. Changing keyword syntax from underscore format to space format (e.g.,
`SET_BOT_MEMORY` → `SET BOT MEMORY`)
4. Updating compiler and keyword registration to support the new
space-based syntax
5. Adding new keyword modules (social media, lead scoring, templates,
etc.)
Refactor BASIC keywords to use spaces instead of underscores
Change keyword syntax from underscore format (SET_BOT_MEMORY) to more
natural space-separated format (SET BOT MEMORY) throughout the codebase.
Key changes:
- Update Rhai custom syntax registration to use space tokens
- Simplify compiler preprocessing (fewer replacements needed)
- Update all template .bas files to use new syntax
- Expand documentation with consolidated examples and new sections
- Add new keyword modules: social_media, lead_scoring, send_template,
core_functions, qrcode, sms, procedures, import_export, llm_macros,
on_form_submit
2025-11-30 10:53:59 -03:00
|
|
|
if trimmed.starts_with("USE WEBSITE") {
|
2025-11-26 22:54:22 -03:00
|
|
|
let parts: Vec<&str> = normalized.split('"').collect();
|
|
|
|
|
if parts.len() >= 2 {
|
|
|
|
|
let url = parts[1];
|
|
|
|
|
let mut conn = self
|
|
|
|
|
.state
|
|
|
|
|
.conn
|
|
|
|
|
.get()
|
|
|
|
|
.map_err(|e| format!("Failed to get database connection: {}", e))?;
|
|
|
|
|
if let Err(e) =
|
|
|
|
|
crate::basic::keywords::use_website::execute_use_website_preprocessing(
|
|
|
|
|
&mut conn, url, bot_id,
|
|
|
|
|
)
|
|
|
|
|
{
|
|
|
|
|
log::error!("Failed to register USE_WEBSITE during preprocessing: {}", e);
|
|
|
|
|
} else {
|
|
|
|
|
log::info!(
|
|
|
|
|
"Registered website {} for crawling during preprocessing",
|
|
|
|
|
url
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
log::warn!("Malformed USE_WEBSITE line ignored: {}", normalized);
|
|
|
|
|
}
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2025-11-05 21:10:03 -03:00
|
|
|
if normalized.starts_with("PARAM ") || normalized.starts_with("DESCRIPTION ") {
|
2025-10-18 18:20:02 -03:00
|
|
|
continue;
|
|
|
|
|
}
|
2025-11-05 21:10:03 -03:00
|
|
|
result.push_str(&normalized);
|
2025-10-18 18:20:02 -03:00
|
|
|
result.push('\n');
|
|
|
|
|
}
|
2025-11-05 10:15:36 -03:00
|
|
|
if self.previous_schedules.contains(&script_name) && !has_schedule {
|
2025-11-11 09:42:52 -03:00
|
|
|
let mut conn = self
|
|
|
|
|
.state
|
|
|
|
|
.conn
|
|
|
|
|
.get()
|
|
|
|
|
.map_err(|e| format!("Failed to get database connection: {}", e))?;
|
2025-11-05 10:15:36 -03:00
|
|
|
use crate::shared::models::system_automations::dsl::*;
|
2025-11-11 09:42:52 -03:00
|
|
|
diesel::delete(
|
|
|
|
|
system_automations
|
|
|
|
|
.filter(bot_id.eq(bot_uuid))
|
|
|
|
|
.filter(kind.eq(TriggerKind::Scheduled as i32))
|
|
|
|
|
.filter(param.eq(&script_name)),
|
2025-11-05 10:15:36 -03:00
|
|
|
)
|
2025-11-11 09:42:52 -03:00
|
|
|
.execute(&mut conn)
|
2025-11-05 10:15:36 -03:00
|
|
|
.map_err(|e| log::error!("Failed to remove schedule for {}: {}", script_name, e))
|
|
|
|
|
.ok();
|
|
|
|
|
}
|
|
|
|
|
if has_schedule {
|
|
|
|
|
self.previous_schedules.insert(script_name);
|
|
|
|
|
} else {
|
|
|
|
|
self.previous_schedules.remove(&script_name);
|
|
|
|
|
}
|
2025-10-18 18:20:02 -03:00
|
|
|
Ok(result)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
|
pub struct CompilationResult {
|
|
|
|
|
pub mcp_tool: Option<MCPTool>,
|
2025-11-07 09:37:45 -03:00
|
|
|
pub _openai_tool: Option<OpenAITool>,
|
2025-10-18 18:20:02 -03:00
|
|
|
}
|