new(all): Auto import for logo, colors and website content.

This commit is contained in:
Rodrigo Rodriguez 2024-05-24 11:29:52 -03:00
parent 533fefe0da
commit 69ae924937

View file

@ -267,16 +267,13 @@ export class ChatServices {
memory = new BufferWindowMemory({
returnMessages: true,
memoryKey: 'chat_history',
humanPrefix: 'input',
aiPrefix: 'output',
inputKey: 'input',
k: 2
});
});
this.memoryMap[user.userSystemId] = memory;
}
else
{
memory = this.memoryMap[user.userSystemId]
} else {
memory = this.memoryMap[user.userSystemId];
}
const systemPrompt = user ? this.userSystemPrompt[user.userSystemId] : '';
@ -312,6 +309,13 @@ export class ChatServices {
Standalone question:`)
]);
const directPrompt = ChatPromptTemplate.fromMessages([
['system', systemPrompt],
new MessagesPlaceholder('chat_history'),
HumanMessagePromptTemplate.fromTemplate(`Follow Up Input: {question}
Standalone question:`)
]);
const toolsResultPrompt = ChatPromptTemplate.fromMessages([
AIMessagePromptTemplate.fromTemplate(
`The tool just returned value in last call. Using {chat_history}
@ -348,7 +352,7 @@ export class ChatServices {
of the answer, utilize any relevant context provided to answer the question effectively.
Don´t output MD images tags url previously shown.
${LLMMode==='document-ref'? jsonInformation: ''}
${LLMMode === 'document-ref' ? jsonInformation : ''}
And based on this chat history and question, answer combined.
`
@ -357,6 +361,19 @@ export class ChatServices {
HumanMessagePromptTemplate.fromTemplate('Question: {question}')
]);
const directChain = RunnableSequence.from([
{
question: (question: string) => question,
chat_history: async () => {
const { chat_history } = await memory.loadMemoryVariables({});
return chat_history;
}
},
directPrompt,
model,
new StringOutputParser()
]);
const callToolChain = RunnableSequence.from([
{
tool_output: async (output: object) => {
@ -415,16 +432,13 @@ export class ChatServices {
// .gbot switch LLMMode and choose the corresponding chain.
if (LLMMode === 'direct') {
result = await (tools.length > 0 ? modelWithTools : model).invoke(`
${systemPrompt}
${question}`);
result = result.content;
} else if (LLMMode === 'document-ref' || LLMMode === 'document') {
const res = await combineDocumentsChain.invoke(question);
result = directChain.invoke(question);
result = res.text? res.text: res;
} else if (LLMMode === 'document-ref' || LLMMode === 'document') {
const res = await combineDocumentsChain.invoke(question);
result = res.text ? res.text : res;
sources = res.sources;
} else if (LLMMode === 'function') {
result = await conversationalToolChain.invoke({