fix(basic.gblib): FUNCTION GPT.

This commit is contained in:
Rodrigo Rodriguez 2024-03-04 20:05:56 -03:00
parent 26b05aa394
commit 49af4cfab7
7 changed files with 183 additions and 180 deletions

View file

@ -82,6 +82,7 @@
"@google-cloud/translate": "7.0.4", "@google-cloud/translate": "7.0.4",
"@hubspot/api-client": "7.1.2", "@hubspot/api-client": "7.1.2",
"@koa/cors": "4.0.0", "@koa/cors": "4.0.0",
"@langchain/openai": "0.0.15",
"@microsoft/microsoft-graph-client": "3.0.4", "@microsoft/microsoft-graph-client": "3.0.4",
"@nlpjs/basic": "4.26.1", "@nlpjs/basic": "4.26.1",
"@nosferatu500/textract": "3.1.2", "@nosferatu500/textract": "3.1.2",
@ -211,7 +212,8 @@
"winston": "3.8.2", "winston": "3.8.2",
"winston-logs-display": "1.0.0", "winston-logs-display": "1.0.0",
"ws": "8.12.1", "ws": "8.12.1",
"yarn": "1.22.19" "yarn": "1.22.19",
"zod-to-json-schema": "^3.22.4"
}, },
"devDependencies": { "devDependencies": {
"@types/qrcode": "1.5.0", "@types/qrcode": "1.5.0",

View file

@ -425,14 +425,15 @@ export class AzureDeployerService implements IGBInstallationDeployer {
instance.storageDialect = 'mssql'; instance.storageDialect = 'mssql';
instance.storageServer = `${storageServer}.database.windows.net`; instance.storageServer = `${storageServer}.database.windows.net`;
GBLog.info(`Deploying Search...`); // TODO: Enable in .env
const searchName = `${name}-search`.toLowerCase(); // GBLog.info(`Deploying Search...`);
await this.createSearch(name, searchName, instance.cloudLocation); // const searchName = `${name}-search`.toLowerCase();
const searchKeys = await this.searchClient.adminKeys.get(name, searchName); // await this.createSearch(name, searchName, instance.cloudLocation);
instance.searchHost = `${searchName}.search.windows.net`; // const searchKeys = await this.searchClient.adminKeys.get(name, searchName);
instance.searchIndex = 'azuresql-index'; // instance.searchHost = `${searchName}.search.windows.net`;
instance.searchIndexer = 'azuresql-indexer'; // instance.searchIndex = 'azuresql-index';
instance.searchKey = searchKeys.primaryKey; // instance.searchIndexer = 'azuresql-indexer';
// instance.searchKey = searchKeys.primaryKey;
// GBLog.info(`Deploying Speech...`); // GBLog.info(`Deploying Speech...`);
// const speech = await this.createSpeech(name, `${name}speech`, instance.cloudLocation); // const speech = await this.createSpeech(name, `${name}speech`, instance.cloudLocation);

View file

@ -52,6 +52,8 @@ import { GBLogEx } from '../../core.gbapp/services/GBLogEx.js';
import { GuaribasUser } from '../../security.gbapp/models/index.js'; import { GuaribasUser } from '../../security.gbapp/models/index.js';
import { SystemKeywords } from './SystemKeywords.js'; import { SystemKeywords } from './SystemKeywords.js';
import { Sequelize, QueryTypes } from '@sequelize/core'; import { Sequelize, QueryTypes } from '@sequelize/core';
import { z } from "zod";
import { zodToJsonSchema } from "zod-to-json-schema";
/** /**
* @fileoverview Decision was to priorize security(isolation) and debugging, * @fileoverview Decision was to priorize security(isolation) and debugging,
@ -415,7 +417,7 @@ export class GBVMService extends GBService {
min.instance, min.instance,
'Synchronize Database', 'Synchronize Database',
false false
); );
if (sync && shouldSync) { if (sync && shouldSync) {
@ -641,7 +643,7 @@ export class GBVMService extends GBService {
try{ try{
await ensureTokens(true); await ensureTokens(true);
${ code } ${code}
} }
catch(e){ catch(e){
console.log(e); console.log(e);
@ -723,42 +725,55 @@ export class GBVMService extends GBService {
} }
public static getMetadata(mainName: string, propertiesText, description) { public static getMetadata(mainName: string, propertiesText, description) {
const properties = []; let properties;
if (!propertiesText) {
if (propertiesText) { return {}
const getType = asClause => {
if (asClause.indexOf('AS STRING')) {
return 'string';
} else {
return 'enum';
}
};
for (let i = 0; i < propertiesText.length; i++) {
const propertiesExp = propertiesText[i];
const t = getType(propertiesExp[2]);
let element = {};
element['type'] = t;
if (t === 'enum') {
element['enum'] = propertiesExp[2];
} else if (t === 'string') {
element['description'] = propertiesExp[2];
}
properties.push(element);
}
} }
const getType = asClause => {
let json = { if (asClause.indexOf('AS STRING') !== -1) {
name: `${mainName}`, return 'string';
description: description ? description[1] : '', }
parameters: { else if (asClause.indexOf('AS OBJECT') !== -1) {
type: 'object', return 'object';
properties: properties ? properties : [] }
else if (asClause.indexOf('AS INTEGER') !== -1 || asClause.indexOf('AS NUMBER') !== -1) {
return 'number';
} else {
return 'enum';
} }
}; };
for (let i = 0; i < propertiesText.length; i++) {
const propertiesExp = propertiesText[i];
const t = getType(propertiesExp[2]);
let element;
if (t === 'enum') {
element = z.enum(propertiesExp[2].split(','));
} else if (t === 'string') {
element = z.string();
} else if (t === 'object') {
element = z.quotelessJson({});
} else if (t === 'number') {
element = z.number();
}
element.describe(propertiesExp[3]);
element['type'] = t;
properties[propertiesExp[1]] = element;
}
let json = {
type: "function",
function: {
name: `${mainName}`,
description: description ? description[1] : '',
parameters: zodToJsonSchema(z.object(properties))
}
}
return json; return json;
} }
@ -766,15 +781,15 @@ export class GBVMService extends GBService {
let required = line.indexOf('*') !== -1; let required = line.indexOf('*') !== -1;
let unique = /\bunique\b/gi.test(line); let unique = /\bunique\b/gi.test(line);
let primaryKey = /\bkey\b/gi.test(line); let primaryKey = /\bkey\b/gi.test(line);
let autoIncrement = /\bauto\b/gi.test(line); let autoIncrement = /\bauto\b/gi.test(line);
if (primaryKey){ if (primaryKey) {
autoIncrement = true; autoIncrement = true;
unique = true; unique = true;
required = true; required = true;
} }
line = line.replace('*', ''); line = line.replace('*', '');
const fieldRegExp = /^\s*(\w+)\s*(\w+)(?:\((.*)\))?/gim; const fieldRegExp = /^\s*(\w+)\s*(\w+)(?:\((.*)\))?/gim;
@ -783,9 +798,11 @@ export class GBVMService extends GBService {
const name = reg[1]; const name = reg[1];
const t = reg[2]; const t = reg[2];
let definition = { allowNull: !required, let definition = {
unique: unique, primaryKey: primaryKey, allowNull: !required,
autoIncrement: autoIncrement }; unique: unique, primaryKey: primaryKey,
autoIncrement: autoIncrement
};
definition['type'] = t; definition['type'] = t;
if (reg[3]) { if (reg[3]) {
@ -829,7 +846,7 @@ export class GBVMService extends GBService {
line = line.replace(/^\s*\d+\s*/gi, ''); line = line.replace(/^\s*\d+\s*/gi, '');
if (!table && !talk){ if (!table && !talk) {
for (let j = 0; j < keywords.length; j++) { for (let j = 0; j < keywords.length; j++) {
line = line.replace(keywords[j][0], keywords[j][1]); // TODO: Investigate delay here. line = line.replace(keywords[j][0], keywords[j][1]); // TODO: Investigate delay here.
} }
@ -838,7 +855,7 @@ export class GBVMService extends GBService {
// Pre-process "off-line" static KEYWORDS. // Pre-process "off-line" static KEYWORDS.
let emmit = true; let emmit = true;
const params = /^\s*PARAM\s*(.*)\s*AS\s*(.*)/gim; const params = /^\s*PARAM\s*(.*)\s*AS\s*(.*)\s*LIKE\s*(.*)/gim;
const param = params.exec(line); const param = params.exec(line);
if (param) { if (param) {
properties.push(param); properties.push(param);
@ -856,7 +873,7 @@ export class GBVMService extends GBService {
let endTalkReg = endTalkKeyword.exec(line); let endTalkReg = endTalkKeyword.exec(line);
if (endTalkReg && talk) { if (endTalkReg && talk) {
line = talk + '`})'; line = talk + '`})';
talk = null; talk = null;
emmit = true; emmit = true;
} }
@ -912,8 +929,8 @@ export class GBVMService extends GBService {
let add = emmit ? line.split(/\r\n|\r|\n/).length : 0; let add = emmit ? line.split(/\r\n|\r|\n/).length : 0;
current = current + (add ? add : 0); current = current + (add ? add : 0);
if (emmit){ if (emmit) {
emmitIndex ++; emmitIndex++;
map[emmitIndex] = current; map[emmitIndex] = current;
outputLines[emmitIndex - 1] = line; outputLines[emmitIndex - 1] = line;
} }
@ -1000,15 +1017,15 @@ export class GBVMService extends GBService {
let code = min.sandBoxMap[text]; let code = min.sandBoxMap[text];
const channel = step ? step.context.activity.channelId : 'web'; const channel = step ? step.context.activity.channelId : 'web';
const dk = new DialogKeywords(); const dk = new DialogKeywords();
const sys = new SystemKeywords(); const sys = new SystemKeywords();
await dk.setFilter({ pid: pid, value: null }); await dk.setFilter({ pid: pid, value: null });
// Find all tokens in .gbot Config. // Find all tokens in .gbot Config.
const strFind = ' Client ID'; const strFind = ' Client ID';
const tokens = await min.core['findParam'](min.instance, strFind); const tokens = await min.core['findParam'](min.instance, strFind);
let tokensList = []; let tokensList = [];

View file

@ -1011,6 +1011,13 @@ export class GBMinService {
}); });
} }
} }
let pid = step.context.activity['pid'];
if (!pid){
pid = GBVMService.createProcessInfo(user, min, step.context.activity.channelId, null);
}
step.context.activity['pid'] = pid;
// Required for MSTEAMS handling of persisted conversations. // Required for MSTEAMS handling of persisted conversations.
if (step.context.activity.channelId === 'msteams') { if (step.context.activity.channelId === 'msteams') {
@ -1065,12 +1072,7 @@ export class GBMinService {
const startDialog = min.core.getParam(min.instance, 'Start Dialog', null); const startDialog = min.core.getParam(min.instance, 'Start Dialog', null);
let pid = step.context.activity['pid'];
if (!pid){
pid = GBVMService.createProcessInfo(user, min, step.context.activity.channelId, null);
}
step.context.activity['pid'] = pid;
if (context.activity.type === 'installationUpdate') { if (context.activity.type === 'installationUpdate') {
GBLog.info(`Bot installed on Teams.`); GBLog.info(`Bot installed on Teams.`);
} else if (context.activity.type === 'conversationUpdate' && context.activity.membersAdded.length > 0) { } else if (context.activity.type === 'conversationUpdate' && context.activity.membersAdded.length > 0) {
@ -1151,6 +1153,7 @@ export class GBMinService {
* Called to handle all event sent by .gbui clients. * Called to handle all event sent by .gbui clients.
*/ */
private async processEventActivity(min, user, context, step: GBDialogStep) { private async processEventActivity(min, user, context, step: GBDialogStep) {
const pid = step.context.activity['pid'];
if (context.activity.name === 'whoAmI') { if (context.activity.name === 'whoAmI') {
await step.beginDialog('/whoAmI'); await step.beginDialog('/whoAmI');
} else if (context.activity.name === 'showSubjects') { } else if (context.activity.name === 'showSubjects') {

View file

@ -31,11 +31,9 @@
'use strict'; 'use strict';
import { GBMinInstance } from 'botlib'; import { GBMinInstance } from 'botlib';
import OpenAI from "openai";
import { OpenAIChat } from 'langchain/llms/openai';
import { CallbackManager } from 'langchain/callbacks'; import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts'; import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains'; import { ConversationChain, LLMChain } from 'langchain/chains';
import { BufferWindowMemory } from 'langchain/memory'; import { BufferWindowMemory } from 'langchain/memory';
import { CollectionUtil } from 'pragmatismo-io-framework'; import { CollectionUtil } from 'pragmatismo-io-framework';
import { DialogKeywords } from '../../basic.gblib/services/DialogKeywords.js'; import { DialogKeywords } from '../../basic.gblib/services/DialogKeywords.js';
@ -44,6 +42,10 @@ import * as Fs from 'fs';
import { HNSWLib } from 'langchain/vectorstores/hnswlib'; import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { GuaribasSubject } from '../../kb.gbapp/models/index.js'; import { GuaribasSubject } from '../../kb.gbapp/models/index.js';
import { GBConfigService } from '../../core.gbapp/services/GBConfigService.js'; import { GBConfigService } from '../../core.gbapp/services/GBConfigService.js';
import { ChatOpenAI } from "@langchain/openai";
import { JsonOutputFunctionsParser } from 'langchain/dist/output_parsers/openai_functions.js';
import { GBVMService } from '../../basic.gblib/services/GBVMService.js';
export class ChatServices { export class ChatServices {
@ -60,19 +62,39 @@ export class ChatServices {
.replaceAll('\n', ' '); .replaceAll('\n', ' ');
} }
/**
* Generate text
*
* CONTINUE keword.
*
* result = CONTINUE text
*
*/
public static async continue(min: GBMinInstance, text: string, chatId) {
public static async sendMessage(min: GBMinInstance, text: string) { }
let key;
if (process.env.OPENAI_KEY) {
key = process.env.OPENAI_KEY; public static async answerByGPT(min: GBMinInstance, pid,
} query: string,
else { searchScore: number,
key = min.core.getParam(min.instance, 'Open AI Key', null); subjects: GuaribasSubject[]
) {
if (!process.env.OPENAI_KEY) {
return { answer: undefined, questionId: 0 };
} }
if (!key) { const systemPrompt = SystemMessagePromptTemplate.fromTemplate(
throw new Error('Open AI Key not configured in .gbot.'); `You are $${min.botId}`);
}
const contentLocale = min.core.getParam(
min.instance,
'Default Content Language',
GBConfigService.get('DEFAULT_CONTENT_LANGUAGE')
);
let functions = []; let functions = [];
// Adds .gbdialog as functions if any to GPT Functions. // Adds .gbdialog as functions if any to GPT Functions.
@ -88,82 +110,17 @@ export class ChatServices {
}); });
// Calls Model.
const openai = new OpenAI({ let SystemPromptTailored = ''; // TODO: Load from user context.
apiKey: key
});
const chatCompletion = await openai.chat.completions.create({
model: "gpt-3.5-turbo",
messages: [{ role: "user", content: text }],
functions: functions
});
return chatCompletion.choices[0].message.content;
}
// Generates function definition for each function
// in plain text to be used in system prompt.
let functionDef = Object.keys(functions)
.map((toolname) => `${toolname}: ${functions[toolname].description}`)
.join("\n");
/** let promptTemplate = `Answer in ${contentLocale}.
* Generate text
*
* CONTINUE keword.
*
* result = CONTINUE text
*
*/
public static async continue(min: GBMinInstance, text: string, chatId) {
let key;
if (process.env.OPENAI_KEY) {
key = process.env.OPENAI_KEY;
}
else {
key = min.core.getParam(min.instance, 'Open AI Key', null);
}
if (!key) {
throw new Error('Open AI Key not configured in .gbot.');
}
// const openai = new OpenAI({
// apiKey: key
// });
// const chatCompletion = await openai.chat.completions.create({
// model: "gpt-3.5-turbo",
// messages: [{ role: "user", content: text }]
// });
// return chatCompletion.choices[0].message.content;
}
public static async answerByGPT(min: GBMinInstance,
query: string,
searchScore: number,
subjects: GuaribasSubject[]
) {
if (!process.env.OPENAI_KEY) {
return { answer: undefined, questionId: 0 };
}
const contextVectorStore = min['vectorStore'];
const question = query.trim().replaceAll('\n', ' ');
const context = await this.getRelevantContext(contextVectorStore, question, 1);
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(
`You are $${min.botId}`);
const contentLocale = min.core.getParam(
min.instance,
'Default Content Language',
GBConfigService.get('DEFAULT_CONTENT_LANGUAGE')
);
const tools = ""// TODO: add func list.
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate(`Answer in ${contentLocale}.
You have access to the context (RELEVANTDOCS) provided by the user. You have access to the context (RELEVANTDOCS) provided by the user.
When answering think about whether the question in RELEVANTDOCS, but never mention When answering think about whether the question in RELEVANTDOCS, but never mention
@ -174,16 +131,18 @@ export class ChatServices {
QUESTION: """{input}""" QUESTION: """{input}"""
${SystemPromptTailored}
You have the following tools that you can invoke based on the user inquiry. You have the following tools that you can invoke based on the user inquiry.
Tools: Tools:
${tools} ${functionDef}
`),
]);
`;
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate(promptTemplate),]);
const windowMemory = new BufferWindowMemory({ const windowMemory = new BufferWindowMemory({
returnMessages: false, returnMessages: false,
@ -192,36 +151,53 @@ export class ChatServices {
k: 2, k: 2,
}); });
const callbackManager = CallbackManager.fromHandlers({ const llm = new ChatOpenAI({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word) modelName: "gpt-3.5-turbo-0125",
async handleLLMNewToken(token: string) { temperature: 0,
},
}); });
const llm = new OpenAIChat({ const llmWithTools = llm.bind({
streaming: true, tools: functions
callbackManager,
modelName: 'gpt-3.5-turbo',
}); });
const chain = new LLMChain({ const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory, memory: windowMemory,
llm, prompt: chatPrompt,
llm: llmWithTools as any,
}); });
const response = await chain.call({ const contextVectorStore = min['vectorStore'];
input: question, const question = query.trim().replaceAll('\n', ' ');
context, const context = await this.getRelevantContext(contextVectorStore, question, 1);
history: '',
immediate_history: '',
});
if (response) {
return { answer: response.text, questionId: 0 }; let prompt;
// allow the LLM to iterate until it finds a final answer
while (true) {
const response = await chain.call({
input: question,
context,
history: '',
immediate_history: '',
});
// add this to the prompt
prompt += response;
const action = response.match(/Action: (.*)/)?.[1];
if (action) {
// execute the action specified by the LLMs
const actionInput = response.match(/Action Input: "?(.*)"?/)?.[1];
const text = '';
const result = await GBVMService.callVM(actionInput, min, false, pid,false, [text]);
prompt += `Observation: ${result}\n`;
} else {
return response.match(/Final Answer: (.*)/)?.[1];
}
} }
return { answer: undefined, questionId: 0 }; return { answer: undefined, questionId: 0 };
} }

View file

@ -199,7 +199,8 @@ export class AskDialog extends IGBDialog {
if (!text && step.context.activity.channelId === 'msteams') { if (!text && step.context.activity.channelId === 'msteams') {
const startDialog = min.core.getParam(min.instance, 'Start Dialog', null); const startDialog = min.core.getParam(min.instance, 'Start Dialog', null);
if (startDialog) { if (startDialog) {
await GBVMService.callVM(startDialog.toLowerCase().trim(), min, step, user, this.deployer, false); const pid = step.context.activity['pid'];
await GBVMService.callVM(startDialog.toLowerCase().trim(), min, step, pid);
} }
return await step.endDialog(); return await step.endDialog();
@ -229,7 +230,7 @@ export class AskDialog extends IGBDialog {
// TODO: https://github.com/GeneralBots/BotServer/issues/9 user.lastQuestion = text; // TODO: https://github.com/GeneralBots/BotServer/issues/9 user.lastQuestion = text;
const resultsA = await service.ask(min, text, searchScore, null /* user.subjects */); const resultsA = await service.ask(min,step.context.activity['pid'], text, searchScore, null /* user.subjects */);
// If there is some result, answer immediately. // If there is some result, answer immediately.
@ -343,7 +344,8 @@ export class AskDialog extends IGBDialog {
if (text.endsWith('.docx')) { if (text.endsWith('.docx')) {
const mainName = GBVMService.getMethodNameFromVBSFilename(text); const mainName = GBVMService.getMethodNameFromVBSFilename(text);
await step.endDialog(); await step.endDialog();
return await GBVMService.callVM(mainName, min, step, user, this.deployer, false); const pid = step.context.activity['pid'];
return await GBVMService.callVM(mainName, min, step, pid);
} else if (text.startsWith('/')) { } else if (text.startsWith('/')) {
return await step.replaceDialog(text, { answer: answer }); return await step.replaceDialog(text, { answer: answer });
} else { } else {
@ -445,8 +447,9 @@ export class AskDialog extends IGBDialog {
); );
await step.endDialog(); await step.endDialog();
const pid = step.context.activity['pid'];
await GBVMService.callVM(dialogName.toLowerCase(), min, step, user, this.deployer, false); await GBVMService.callVM(dialogName.toLowerCase(), min, step, pid);
} }
} }
]; ];

View file

@ -269,6 +269,7 @@ export class KBService implements IGBKBService {
public async ask( public async ask(
min: GBMinInstance, min: GBMinInstance,
pid,
query: string, query: string,
searchScore: number, searchScore: number,
subjects: GuaribasSubject[] subjects: GuaribasSubject[]
@ -375,7 +376,7 @@ export class KBService implements IGBKBService {
returnedScore: ${returnedScore} < required (searchScore): ${searchScore}` returnedScore: ${returnedScore} < required (searchScore): ${searchScore}`
); );
return await ChatServices.answerByGPT(min, return await ChatServices.answerByGPT(min,pid,
query, query,
searchScore, searchScore,
subjects subjects