fix (templates): llm-server OK.

This commit is contained in:
Rodrigo Rodriguez 2024-09-15 18:32:20 -03:00
parent cbcb1ae95b
commit 52d766667a
2 changed files with 13 additions and 12 deletions

View file

@ -877,7 +877,7 @@ export class GBVMService extends GBService {
// Auto-NLP generates BASIC variables related to entities. // Auto-NLP generates BASIC variables related to entities.
if (step ? step.context.activity.originalText : null && min['nerEngine']) { if (step?.context.activity.originalText && min['nerEngine']) {
const result = await min['nerEngine'].process(step.context.activity.originalText); const result = await min['nerEngine'].process(step.context.activity.originalText);
for (let i = 0; i < result.entities.length; i++) { for (let i = 0; i < result.entities.length; i++) {

View file

@ -381,14 +381,14 @@ export class ChatServices {
new MessagesPlaceholder('chat_history'), new MessagesPlaceholder('chat_history'),
HumanMessagePromptTemplate.fromTemplate(`Follow Up Input: {question} HumanMessagePromptTemplate.fromTemplate(`Follow Up Input: {question}
Standalone question:`) Standalone question:`)
]); ] as any);
const directPrompt = ChatPromptTemplate.fromMessages([ const directPrompt = ChatPromptTemplate.fromMessages([
SystemMessagePromptTemplate.fromTemplate(systemPrompt), SystemMessagePromptTemplate.fromTemplate(systemPrompt),
new MessagesPlaceholder('chat_history'), new MessagesPlaceholder('chat_history'),
HumanMessagePromptTemplate.fromTemplate(`Follow Up Input: {question} HumanMessagePromptTemplate.fromTemplate(`Follow Up Input: {question}
Standalone question:`) Standalone question:`)
]); ] as any);
const toolsResultPrompt = ChatPromptTemplate.fromMessages([ const toolsResultPrompt = ChatPromptTemplate.fromMessages([
SystemMessagePromptTemplate.fromTemplate( SystemMessagePromptTemplate.fromTemplate(
@ -408,7 +408,7 @@ export class ChatServices {
HumanMessagePromptTemplate.fromTemplate(`Tool output: {tool_output} HumanMessagePromptTemplate.fromTemplate(`Tool output: {tool_output}
Folowing answer:`) Folowing answer:`)
]); ] as any);
const jsonInformation = `VERY IMPORTANT: ALWAYS return VALID standard JSON with the folowing structure: 'text' as answer, const jsonInformation = `VERY IMPORTANT: ALWAYS return VALID standard JSON with the folowing structure: 'text' as answer,
sources as an array of ('file' indicating the PDF filename and 'page' indicating the page number) listing all segmented context. sources as an array of ('file' indicating the PDF filename and 'page' indicating the page number) listing all segmented context.
@ -442,7 +442,7 @@ export class ChatServices {
), ),
new MessagesPlaceholder('chat_history'), new MessagesPlaceholder('chat_history'),
HumanMessagePromptTemplate.fromTemplate('Question: {question}') HumanMessagePromptTemplate.fromTemplate('Question: {question}')
]); ] as any);
const directChain = RunnableSequence.from([ const directChain = RunnableSequence.from([
{ {
@ -455,7 +455,7 @@ export class ChatServices {
directPrompt, directPrompt,
model, model,
new StringOutputParser() new StringOutputParser()
]); ] as any);
const callToolChain = RunnableSequence.from([ const callToolChain = RunnableSequence.from([
{ {
@ -476,7 +476,7 @@ export class ChatServices {
toolsResultPrompt, toolsResultPrompt,
model, model,
new StringOutputParser() new StringOutputParser()
]); ] as any);
const combineDocumentsChain = RunnableSequence.from([ const combineDocumentsChain = RunnableSequence.from([
{ {
@ -493,7 +493,7 @@ export class ChatServices {
combineDocumentsPrompt, combineDocumentsPrompt,
model, model,
new GBLLMOutputParser(min, null, null) new GBLLMOutputParser(min, null, null)
]); ] as any);
const conversationalToolChain = RunnableSequence.from([ const conversationalToolChain = RunnableSequence.from([
{ {
@ -507,7 +507,7 @@ export class ChatServices {
modelWithTools, modelWithTools,
new GBLLMOutputParser(min, callToolChain, docsContext?.docstore?._docs.length > 0 ? combineDocumentsChain : null), new GBLLMOutputParser(min, callToolChain, docsContext?.docstore?._docs.length > 0 ? combineDocumentsChain : null),
new StringOutputParser() new StringOutputParser()
]); ] as any);
GBLogEx.info(min, `Calling LLM...`); GBLogEx.info(min, `Calling LLM...`);
let result, sources; let result, sources;
@ -591,7 +591,7 @@ export class ChatServices {
prompt, prompt,
model, model,
new StringOutputParser() new StringOutputParser()
]); ] as any);
/** /**
* Create the final prompt template which is tasked with getting the natural * Create the final prompt template which is tasked with getting the natural
@ -633,12 +633,13 @@ export class ChatServices {
table_info: () => 'any' table_info: () => 'any'
}, },
{ {
result: finalResponsePrompt.pipe(model).pipe(new StringOutputParser()), result: finalResponsePrompt.pipe(model).pipe(
new StringOutputParser() as any),
// Pipe the query through here unchanged so it gets logged alongside the result. // Pipe the query through here unchanged so it gets logged alongside the result.
sql: previousStepResult => previousStepResult.query sql: previousStepResult => previousStepResult.query
} }
]); ] as any);
result = await finalChain.invoke({ result = await finalChain.invoke({
question: question question: question
}); });