From 69ae9249377229a5ca5e042d7ce65898fede5167 Mon Sep 17 00:00:00 2001 From: Rodrigo Rodriguez Date: Fri, 24 May 2024 11:29:52 -0300 Subject: [PATCH] new(all): Auto import for logo, colors and website content. --- packages/gpt.gblib/services/ChatServices.ts | 46 ++++++++++++++------- 1 file changed, 30 insertions(+), 16 deletions(-) diff --git a/packages/gpt.gblib/services/ChatServices.ts b/packages/gpt.gblib/services/ChatServices.ts index 90187636..b4d4a4aa 100644 --- a/packages/gpt.gblib/services/ChatServices.ts +++ b/packages/gpt.gblib/services/ChatServices.ts @@ -267,16 +267,13 @@ export class ChatServices { memory = new BufferWindowMemory({ returnMessages: true, memoryKey: 'chat_history', - humanPrefix: 'input', - aiPrefix: 'output', + inputKey: 'input', k: 2 - }); + }); this.memoryMap[user.userSystemId] = memory; - } - else - { - memory = this.memoryMap[user.userSystemId] + } else { + memory = this.memoryMap[user.userSystemId]; } const systemPrompt = user ? this.userSystemPrompt[user.userSystemId] : ''; @@ -312,6 +309,13 @@ export class ChatServices { Standalone question:`) ]); + const directPrompt = ChatPromptTemplate.fromMessages([ + ['system', systemPrompt], + new MessagesPlaceholder('chat_history'), + HumanMessagePromptTemplate.fromTemplate(`Follow Up Input: {question} + Standalone question:`) + ]); + const toolsResultPrompt = ChatPromptTemplate.fromMessages([ AIMessagePromptTemplate.fromTemplate( `The tool just returned value in last call. Using {chat_history} @@ -348,7 +352,7 @@ export class ChatServices { of the answer, utilize any relevant context provided to answer the question effectively. Don“t output MD images tags url previously shown. - ${LLMMode==='document-ref'? jsonInformation: ''} + ${LLMMode === 'document-ref' ? jsonInformation : ''} And based on this chat history and question, answer combined. ` @@ -357,6 +361,19 @@ export class ChatServices { HumanMessagePromptTemplate.fromTemplate('Question: {question}') ]); + const directChain = RunnableSequence.from([ + { + question: (question: string) => question, + chat_history: async () => { + const { chat_history } = await memory.loadMemoryVariables({}); + return chat_history; + } + }, + directPrompt, + model, + new StringOutputParser() + ]); + const callToolChain = RunnableSequence.from([ { tool_output: async (output: object) => { @@ -415,16 +432,13 @@ export class ChatServices { // .gbot switch LLMMode and choose the corresponding chain. if (LLMMode === 'direct') { - result = await (tools.length > 0 ? modelWithTools : model).invoke(` - ${systemPrompt} - - ${question}`); - result = result.content; - } else if (LLMMode === 'document-ref' || LLMMode === 'document') { - const res = await combineDocumentsChain.invoke(question); + result = directChain.invoke(question); - result = res.text? res.text: res; + } else if (LLMMode === 'document-ref' || LLMMode === 'document') { + const res = await combineDocumentsChain.invoke(question); + + result = res.text ? res.text : res; sources = res.sources; } else if (LLMMode === 'function') { result = await conversationalToolChain.invoke({