2023-07-23 10:59:59 -03:00
|
|
|
|
/*****************************************************************************\
|
2024-01-09 17:40:48 -03:00
|
|
|
|
| █████ █████ ██ █ █████ █████ ████ ██ ████ █████ █████ ███ ® |
|
|
|
|
|
| ██ █ ███ █ █ ██ ██ ██ ██ ██ ██ █ ██ ██ █ █ |
|
|
|
|
|
| ██ ███ ████ █ ██ █ ████ █████ ██████ ██ ████ █ █ █ ██ |
|
|
|
|
|
| ██ ██ █ █ ██ █ █ ██ ██ ██ ██ ██ ██ █ ██ ██ █ █ |
|
|
|
|
|
| █████ █████ █ ███ █████ ██ ██ ██ ██ █████ ████ █████ █ ███ |
|
2023-07-23 10:59:59 -03:00
|
|
|
|
| |
|
2024-01-10 15:01:02 -03:00
|
|
|
|
| General Bots Copyright (c) pragmatismo.com.br. All rights reserved. |
|
2023-07-23 10:59:59 -03:00
|
|
|
|
| Licensed under the AGPL-3.0. |
|
|
|
|
|
| |
|
|
|
|
|
| According to our dual licensing model, this program can be used either |
|
|
|
|
|
| under the terms of the GNU Affero General Public License, version 3, |
|
|
|
|
|
| or under a proprietary license. |
|
|
|
|
|
| |
|
|
|
|
|
| The texts of the GNU Affero General Public License with an additional |
|
|
|
|
|
| permission and of our proprietary license can be found at and |
|
|
|
|
|
| in the LICENSE file you have received along with this program. |
|
|
|
|
|
| |
|
|
|
|
|
| This program is distributed in the hope that it will be useful, |
|
|
|
|
|
| but WITHOUT ANY WARRANTY, without even the implied warranty of |
|
|
|
|
|
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
|
|
|
|
| GNU Affero General Public License for more details. |
|
|
|
|
|
| |
|
2024-01-10 14:52:01 -03:00
|
|
|
|
| "General Bots" is a registered trademark of pragmatismo.com.br. |
|
2023-07-23 10:59:59 -03:00
|
|
|
|
| The licensing of the program under the AGPLv3 does not imply a |
|
|
|
|
|
| trademark license. Therefore any rights, title and interest in |
|
|
|
|
|
| our trademarks remain entirely with us. |
|
|
|
|
|
| |
|
|
|
|
|
\*****************************************************************************/
|
|
|
|
|
|
|
|
|
|
'use strict';
|
|
|
|
|
|
2023-08-02 13:58:11 -03:00
|
|
|
|
import { GBMinInstance } from 'botlib';
|
2024-02-05 12:36:20 -03:00
|
|
|
|
import { CallbackManager } from 'langchain/callbacks';
|
|
|
|
|
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
|
2024-03-04 20:05:56 -03:00
|
|
|
|
import { ConversationChain, LLMChain } from 'langchain/chains';
|
2024-02-05 12:36:20 -03:00
|
|
|
|
import { BufferWindowMemory } from 'langchain/memory';
|
2023-09-17 21:40:34 -03:00
|
|
|
|
import { CollectionUtil } from 'pragmatismo-io-framework';
|
2023-09-21 11:24:08 -03:00
|
|
|
|
import { DialogKeywords } from '../../basic.gblib/services/DialogKeywords.js';
|
2023-09-20 15:25:44 -03:00
|
|
|
|
import Path from 'path';
|
|
|
|
|
import * as Fs from 'fs';
|
2024-02-05 12:36:20 -03:00
|
|
|
|
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
|
|
|
|
|
import { GuaribasSubject } from '../../kb.gbapp/models/index.js';
|
|
|
|
|
import { GBConfigService } from '../../core.gbapp/services/GBConfigService.js';
|
2024-03-04 20:05:56 -03:00
|
|
|
|
import { ChatOpenAI } from "@langchain/openai";
|
|
|
|
|
import { JsonOutputFunctionsParser } from 'langchain/dist/output_parsers/openai_functions.js';
|
|
|
|
|
import { GBVMService } from '../../basic.gblib/services/GBVMService.js';
|
|
|
|
|
|
2023-07-23 10:59:59 -03:00
|
|
|
|
|
2023-07-23 15:39:05 -03:00
|
|
|
|
export class ChatServices {
|
2023-09-17 21:40:34 -03:00
|
|
|
|
|
2024-02-05 12:36:20 -03:00
|
|
|
|
private static async getRelevantContext(
|
|
|
|
|
vectorStore: HNSWLib,
|
|
|
|
|
sanitizedQuestion: string,
|
|
|
|
|
numDocuments: number
|
|
|
|
|
): Promise<string> {
|
|
|
|
|
const documents = await vectorStore.similaritySearch(sanitizedQuestion, numDocuments);
|
|
|
|
|
return documents
|
|
|
|
|
.map((doc) => doc.pageContent)
|
|
|
|
|
.join(', ')
|
|
|
|
|
.trim()
|
|
|
|
|
.replaceAll('\n', ' ');
|
|
|
|
|
}
|
|
|
|
|
|
2023-07-23 10:59:59 -03:00
|
|
|
|
/**
|
|
|
|
|
* Generate text
|
2023-08-02 13:58:11 -03:00
|
|
|
|
*
|
2023-07-23 10:59:59 -03:00
|
|
|
|
* CONTINUE keword.
|
2023-08-02 13:58:11 -03:00
|
|
|
|
*
|
2023-07-23 10:59:59 -03:00
|
|
|
|
* result = CONTINUE text
|
2023-08-02 13:58:11 -03:00
|
|
|
|
*
|
2023-07-23 10:59:59 -03:00
|
|
|
|
*/
|
2023-08-02 13:58:11 -03:00
|
|
|
|
public static async continue(min: GBMinInstance, text: string, chatId) {
|
2023-07-23 10:59:59 -03:00
|
|
|
|
|
|
|
|
|
}
|
2024-02-05 12:36:20 -03:00
|
|
|
|
|
2024-03-04 20:05:56 -03:00
|
|
|
|
|
|
|
|
|
public static async answerByGPT(min: GBMinInstance, pid,
|
2024-02-05 12:36:20 -03:00
|
|
|
|
query: string,
|
|
|
|
|
searchScore: number,
|
|
|
|
|
subjects: GuaribasSubject[]
|
|
|
|
|
) {
|
|
|
|
|
|
|
|
|
|
if (!process.env.OPENAI_KEY) {
|
|
|
|
|
return { answer: undefined, questionId: 0 };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(
|
2024-03-04 20:05:56 -03:00
|
|
|
|
`You are $${min.botId}`);
|
2024-02-05 12:36:20 -03:00
|
|
|
|
|
|
|
|
|
const contentLocale = min.core.getParam(
|
|
|
|
|
min.instance,
|
|
|
|
|
'Default Content Language',
|
|
|
|
|
GBConfigService.get('DEFAULT_CONTENT_LANGUAGE')
|
|
|
|
|
);
|
2024-03-03 16:20:50 -03:00
|
|
|
|
|
|
|
|
|
|
2024-03-04 20:05:56 -03:00
|
|
|
|
let functions = [];
|
|
|
|
|
|
|
|
|
|
// Adds .gbdialog as functions if any to GPT Functions.
|
|
|
|
|
|
|
|
|
|
await CollectionUtil.asyncForEach(Object.values(min.scriptMap), async script => {
|
|
|
|
|
const path = DialogKeywords.getGBAIPath(min.botId, "gbdialog", null);
|
|
|
|
|
const localFolder = Path.join('work', path, `${script}.json`);
|
|
|
|
|
|
|
|
|
|
if (Fs.existsSync(localFolder)) {
|
|
|
|
|
const func = Fs.readFileSync(localFolder).toJSON();
|
|
|
|
|
functions.push(func);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let SystemPromptTailored = ''; // TODO: Load from user context.
|
|
|
|
|
|
|
|
|
|
// Generates function definition for each function
|
|
|
|
|
// in plain text to be used in system prompt.
|
|
|
|
|
|
|
|
|
|
let functionDef = Object.keys(functions)
|
|
|
|
|
.map((toolname) => `${toolname}: ${functions[toolname].description}`)
|
|
|
|
|
.join("\n");
|
|
|
|
|
|
|
|
|
|
let promptTemplate = `Answer in ${contentLocale}.
|
2024-02-05 12:36:20 -03:00
|
|
|
|
You have access to the context (RELEVANTDOCS) provided by the user.
|
|
|
|
|
|
|
|
|
|
When answering think about whether the question in RELEVANTDOCS, but never mention
|
|
|
|
|
to user about the source.
|
|
|
|
|
Don’t justify your answers. Don't refer to yourself in any of the created content.
|
|
|
|
|
Don´t prefix RESPONSE: when answering the user.
|
|
|
|
|
RELEVANTDOCS: {context}
|
|
|
|
|
|
|
|
|
|
QUESTION: """{input}"""
|
|
|
|
|
|
2024-03-04 20:05:56 -03:00
|
|
|
|
${SystemPromptTailored}
|
|
|
|
|
|
2024-03-03 16:20:50 -03:00
|
|
|
|
You have the following tools that you can invoke based on the user inquiry.
|
|
|
|
|
Tools:
|
|
|
|
|
|
2024-03-04 20:05:56 -03:00
|
|
|
|
${functionDef}
|
2024-03-03 16:20:50 -03:00
|
|
|
|
|
2024-03-04 20:05:56 -03:00
|
|
|
|
`;
|
2024-03-03 16:20:50 -03:00
|
|
|
|
|
2024-03-04 20:05:56 -03:00
|
|
|
|
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
|
|
|
|
|
systemPrompt,
|
|
|
|
|
HumanMessagePromptTemplate.fromTemplate(promptTemplate),]);
|
2024-03-03 16:20:50 -03:00
|
|
|
|
|
2024-02-05 12:36:20 -03:00
|
|
|
|
const windowMemory = new BufferWindowMemory({
|
|
|
|
|
returnMessages: false,
|
|
|
|
|
memoryKey: 'immediate_history',
|
|
|
|
|
inputKey: 'input',
|
|
|
|
|
k: 2,
|
|
|
|
|
});
|
|
|
|
|
|
2024-03-04 20:05:56 -03:00
|
|
|
|
const llm = new ChatOpenAI({
|
|
|
|
|
modelName: "gpt-3.5-turbo-0125",
|
|
|
|
|
temperature: 0,
|
2024-02-05 12:36:20 -03:00
|
|
|
|
});
|
2024-03-04 20:05:56 -03:00
|
|
|
|
|
|
|
|
|
const llmWithTools = llm.bind({
|
|
|
|
|
tools: functions
|
2024-02-05 12:36:20 -03:00
|
|
|
|
});
|
2024-03-04 20:05:56 -03:00
|
|
|
|
|
2024-02-05 12:36:20 -03:00
|
|
|
|
const chain = new LLMChain({
|
|
|
|
|
memory: windowMemory,
|
2024-03-04 20:05:56 -03:00
|
|
|
|
prompt: chatPrompt,
|
|
|
|
|
llm: llmWithTools as any,
|
2024-02-05 12:36:20 -03:00
|
|
|
|
});
|
|
|
|
|
|
2024-03-04 20:05:56 -03:00
|
|
|
|
const contextVectorStore = min['vectorStore'];
|
|
|
|
|
const question = query.trim().replaceAll('\n', ' ');
|
|
|
|
|
const context = await this.getRelevantContext(contextVectorStore, question, 1);
|
2024-02-05 12:36:20 -03:00
|
|
|
|
|
2024-03-04 20:05:56 -03:00
|
|
|
|
let prompt;
|
|
|
|
|
|
|
|
|
|
// allow the LLM to iterate until it finds a final answer
|
|
|
|
|
while (true) {
|
|
|
|
|
const response = await chain.call({
|
|
|
|
|
input: question,
|
|
|
|
|
context,
|
|
|
|
|
history: '',
|
|
|
|
|
immediate_history: '',
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// add this to the prompt
|
|
|
|
|
prompt += response;
|
2024-02-05 12:36:20 -03:00
|
|
|
|
|
2024-03-04 20:05:56 -03:00
|
|
|
|
const action = response.match(/Action: (.*)/)?.[1];
|
|
|
|
|
if (action) {
|
|
|
|
|
// execute the action specified by the LLMs
|
|
|
|
|
const actionInput = response.match(/Action Input: "?(.*)"?/)?.[1];
|
|
|
|
|
const text = '';
|
|
|
|
|
|
|
|
|
|
const result = await GBVMService.callVM(actionInput, min, false, pid,false, [text]);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
prompt += `Observation: ${result}\n`;
|
|
|
|
|
} else {
|
|
|
|
|
return response.match(/Final Answer: (.*)/)?.[1];
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-02-05 12:36:20 -03:00
|
|
|
|
return { answer: undefined, questionId: 0 };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2023-08-02 13:58:11 -03:00
|
|
|
|
}
|