From 44e3ecc0776f4a956886873b0c7c61cc1b530db4 Mon Sep 17 00:00:00 2001 From: "me@rodrigorodriguez.com" Date: Thu, 17 Oct 2024 16:52:04 -0300 Subject: [PATCH] new(core.gbapp): LLM alerts for data. --- packages/llm.gblib/services/ChatServices.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/llm.gblib/services/ChatServices.ts b/packages/llm.gblib/services/ChatServices.ts index 874bb64b..ddfe5c9e 100644 --- a/packages/llm.gblib/services/ChatServices.ts +++ b/packages/llm.gblib/services/ChatServices.ts @@ -551,9 +551,9 @@ export class ChatServices { let dataSource; if (answerSource === 'cache') { - let sqliteFilePath = - path.join('work', GBUtil.getGBAIPath(min.botId), `${con['name']}.sqlite`); + path.join('work', GBUtil.getGBAIPath(min.botId), `${con['name']}.sqlite`); + GBLogEx.info(min, `Using data from cache: Path.basename(${sqliteFilePath}).`); dataSource = new DataSource({ type: 'sqlite', @@ -619,7 +619,7 @@ export class ChatServices { * Create the final prompt template which is tasked with getting the natural * language response to the SQL query. */ - const finalResponsePrompt = + const finalResponsePrompt = PromptTemplate.fromTemplate(`Based on the table schema below, question, SQL query, and SQL response, write a natural language response: Optimize answers for KPI people. ${systemPrompt} ------------