feat: simplify LLM prompt and add debug logging
- Simplified build_llm_prompt by removing redundant formatting - Added info logging for LLM model and processed content - Updated README with development philosophy note - Adjusted announcement schedule timing from 55 to 59 minutes past the hour
This commit is contained in:
parent
aac36afaba
commit
cc741b378e
4 changed files with 10 additions and 6 deletions
|
|
@ -14,3 +14,4 @@ When initial attempts fail, sequentially try these LLMs:
|
|||
- **On error**: Stop and consult Claude for guidance
|
||||
- **Change progression**: Start with DeepSeek, conclude with gpt-oss-120b
|
||||
- **Final validation**: Use prompt "cargo check" with gpt-oss-120b
|
||||
- Be humble, one requirement, one commit. But sometimes, freedom of caos is welcome - when no deadlines are set.
|
||||
|
|
@ -65,10 +65,7 @@ pub fn llm_keyword(state: Arc<AppState>, _user: UserSession, engine: &mut Engine
|
|||
/// Builds a consistent LLM prompt used by all Rhai scripts.
|
||||
/// You can change the style/structure here to guide the model's behavior.
|
||||
fn build_llm_prompt(user_text: &str) -> String {
|
||||
format!(
|
||||
"User: {}",
|
||||
user_text.trim()
|
||||
)
|
||||
user_text.trim().to_string()
|
||||
}
|
||||
|
||||
/// Runs the async LLM provider call safely.
|
||||
|
|
@ -81,11 +78,14 @@ pub async fn execute_llm_generation(
|
|||
.get_config(&Uuid::nil(), "llm-model", None)
|
||||
.unwrap_or_default();
|
||||
|
||||
info!("Using LLM model: {}", model);
|
||||
let handler = crate::llm_models::get_handler(&model);
|
||||
let raw_response = state
|
||||
.llm_provider
|
||||
.generate(&prompt, &serde_json::Value::Null)
|
||||
.await?;
|
||||
|
||||
Ok(handler.process_content(&raw_response))
|
||||
let processed = handler.process_content(&raw_response);
|
||||
info!("Processed content: {}", processed);
|
||||
Ok(processed)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
SET_SCHEDULE "55 * * * *"
|
||||
SET_SCHEDULE "59 * * * *"
|
||||
|
||||
let text = GET "announcements.gbkb/news/news.pdf"
|
||||
let resume = LLM "In a few words, resume this: " + text
|
||||
|
|
|
|||
3
test_llm.rhai
Normal file
3
test_llm.rhai
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
// Simple test script for LLM keyword
|
||||
let result = LLM "Hello world";
|
||||
result
|
||||
Loading…
Add table
Reference in a new issue