fix(basic.gblib): FUNCTION GPT.

This commit is contained in:
Rodrigo Rodriguez 2024-03-04 20:05:56 -03:00
parent 26b05aa394
commit 49af4cfab7
7 changed files with 183 additions and 180 deletions

View file

@ -82,6 +82,7 @@
"@google-cloud/translate": "7.0.4",
"@hubspot/api-client": "7.1.2",
"@koa/cors": "4.0.0",
"@langchain/openai": "0.0.15",
"@microsoft/microsoft-graph-client": "3.0.4",
"@nlpjs/basic": "4.26.1",
"@nosferatu500/textract": "3.1.2",
@ -211,7 +212,8 @@
"winston": "3.8.2",
"winston-logs-display": "1.0.0",
"ws": "8.12.1",
"yarn": "1.22.19"
"yarn": "1.22.19",
"zod-to-json-schema": "^3.22.4"
},
"devDependencies": {
"@types/qrcode": "1.5.0",

View file

@ -425,14 +425,15 @@ export class AzureDeployerService implements IGBInstallationDeployer {
instance.storageDialect = 'mssql';
instance.storageServer = `${storageServer}.database.windows.net`;
GBLog.info(`Deploying Search...`);
const searchName = `${name}-search`.toLowerCase();
await this.createSearch(name, searchName, instance.cloudLocation);
const searchKeys = await this.searchClient.adminKeys.get(name, searchName);
instance.searchHost = `${searchName}.search.windows.net`;
instance.searchIndex = 'azuresql-index';
instance.searchIndexer = 'azuresql-indexer';
instance.searchKey = searchKeys.primaryKey;
// TODO: Enable in .env
// GBLog.info(`Deploying Search...`);
// const searchName = `${name}-search`.toLowerCase();
// await this.createSearch(name, searchName, instance.cloudLocation);
// const searchKeys = await this.searchClient.adminKeys.get(name, searchName);
// instance.searchHost = `${searchName}.search.windows.net`;
// instance.searchIndex = 'azuresql-index';
// instance.searchIndexer = 'azuresql-indexer';
// instance.searchKey = searchKeys.primaryKey;
// GBLog.info(`Deploying Speech...`);
// const speech = await this.createSpeech(name, `${name}speech`, instance.cloudLocation);

View file

@ -52,6 +52,8 @@ import { GBLogEx } from '../../core.gbapp/services/GBLogEx.js';
import { GuaribasUser } from '../../security.gbapp/models/index.js';
import { SystemKeywords } from './SystemKeywords.js';
import { Sequelize, QueryTypes } from '@sequelize/core';
import { z } from "zod";
import { zodToJsonSchema } from "zod-to-json-schema";
/**
* @fileoverview Decision was to priorize security(isolation) and debugging,
@ -415,7 +417,7 @@ export class GBVMService extends GBService {
min.instance,
'Synchronize Database',
false
);
);
if (sync && shouldSync) {
@ -641,7 +643,7 @@ export class GBVMService extends GBService {
try{
await ensureTokens(true);
${ code }
${code}
}
catch(e){
console.log(e);
@ -723,42 +725,55 @@ export class GBVMService extends GBService {
}
public static getMetadata(mainName: string, propertiesText, description) {
const properties = [];
let properties;
if (!propertiesText) {
if (propertiesText) {
const getType = asClause => {
if (asClause.indexOf('AS STRING')) {
return 'string';
} else {
return 'enum';
}
};
for (let i = 0; i < propertiesText.length; i++) {
const propertiesExp = propertiesText[i];
const t = getType(propertiesExp[2]);
let element = {};
element['type'] = t;
if (t === 'enum') {
element['enum'] = propertiesExp[2];
} else if (t === 'string') {
element['description'] = propertiesExp[2];
}
properties.push(element);
}
return {}
}
let json = {
name: `${mainName}`,
description: description ? description[1] : '',
parameters: {
type: 'object',
properties: properties ? properties : []
const getType = asClause => {
if (asClause.indexOf('AS STRING') !== -1) {
return 'string';
}
else if (asClause.indexOf('AS OBJECT') !== -1) {
return 'object';
}
else if (asClause.indexOf('AS INTEGER') !== -1 || asClause.indexOf('AS NUMBER') !== -1) {
return 'number';
} else {
return 'enum';
}
};
for (let i = 0; i < propertiesText.length; i++) {
const propertiesExp = propertiesText[i];
const t = getType(propertiesExp[2]);
let element;
if (t === 'enum') {
element = z.enum(propertiesExp[2].split(','));
} else if (t === 'string') {
element = z.string();
} else if (t === 'object') {
element = z.quotelessJson({});
} else if (t === 'number') {
element = z.number();
}
element.describe(propertiesExp[3]);
element['type'] = t;
properties[propertiesExp[1]] = element;
}
let json = {
type: "function",
function: {
name: `${mainName}`,
description: description ? description[1] : '',
parameters: zodToJsonSchema(z.object(properties))
}
}
return json;
}
@ -766,15 +781,15 @@ export class GBVMService extends GBService {
let required = line.indexOf('*') !== -1;
let unique = /\bunique\b/gi.test(line);
let primaryKey = /\bkey\b/gi.test(line);
let autoIncrement = /\bauto\b/gi.test(line);
let primaryKey = /\bkey\b/gi.test(line);
let autoIncrement = /\bauto\b/gi.test(line);
if (primaryKey){
if (primaryKey) {
autoIncrement = true;
unique = true;
required = true;
}
line = line.replace('*', '');
const fieldRegExp = /^\s*(\w+)\s*(\w+)(?:\((.*)\))?/gim;
@ -783,9 +798,11 @@ export class GBVMService extends GBService {
const name = reg[1];
const t = reg[2];
let definition = { allowNull: !required,
unique: unique, primaryKey: primaryKey,
autoIncrement: autoIncrement };
let definition = {
allowNull: !required,
unique: unique, primaryKey: primaryKey,
autoIncrement: autoIncrement
};
definition['type'] = t;
if (reg[3]) {
@ -829,7 +846,7 @@ export class GBVMService extends GBService {
line = line.replace(/^\s*\d+\s*/gi, '');
if (!table && !talk){
if (!table && !talk) {
for (let j = 0; j < keywords.length; j++) {
line = line.replace(keywords[j][0], keywords[j][1]); // TODO: Investigate delay here.
}
@ -838,7 +855,7 @@ export class GBVMService extends GBService {
// Pre-process "off-line" static KEYWORDS.
let emmit = true;
const params = /^\s*PARAM\s*(.*)\s*AS\s*(.*)/gim;
const params = /^\s*PARAM\s*(.*)\s*AS\s*(.*)\s*LIKE\s*(.*)/gim;
const param = params.exec(line);
if (param) {
properties.push(param);
@ -856,7 +873,7 @@ export class GBVMService extends GBService {
let endTalkReg = endTalkKeyword.exec(line);
if (endTalkReg && talk) {
line = talk + '`})';
talk = null;
emmit = true;
}
@ -912,8 +929,8 @@ export class GBVMService extends GBService {
let add = emmit ? line.split(/\r\n|\r|\n/).length : 0;
current = current + (add ? add : 0);
if (emmit){
emmitIndex ++;
if (emmit) {
emmitIndex++;
map[emmitIndex] = current;
outputLines[emmitIndex - 1] = line;
}
@ -1000,15 +1017,15 @@ export class GBVMService extends GBService {
let code = min.sandBoxMap[text];
const channel = step ? step.context.activity.channelId : 'web';
const dk = new DialogKeywords();
const sys = new SystemKeywords();
await dk.setFilter({ pid: pid, value: null });
// Find all tokens in .gbot Config.
const strFind = ' Client ID';
const tokens = await min.core['findParam'](min.instance, strFind);
let tokensList = [];

View file

@ -1011,6 +1011,13 @@ export class GBMinService {
});
}
}
let pid = step.context.activity['pid'];
if (!pid){
pid = GBVMService.createProcessInfo(user, min, step.context.activity.channelId, null);
}
step.context.activity['pid'] = pid;
// Required for MSTEAMS handling of persisted conversations.
if (step.context.activity.channelId === 'msteams') {
@ -1065,12 +1072,7 @@ export class GBMinService {
const startDialog = min.core.getParam(min.instance, 'Start Dialog', null);
let pid = step.context.activity['pid'];
if (!pid){
pid = GBVMService.createProcessInfo(user, min, step.context.activity.channelId, null);
}
step.context.activity['pid'] = pid;
if (context.activity.type === 'installationUpdate') {
GBLog.info(`Bot installed on Teams.`);
} else if (context.activity.type === 'conversationUpdate' && context.activity.membersAdded.length > 0) {
@ -1151,6 +1153,7 @@ export class GBMinService {
* Called to handle all event sent by .gbui clients.
*/
private async processEventActivity(min, user, context, step: GBDialogStep) {
const pid = step.context.activity['pid'];
if (context.activity.name === 'whoAmI') {
await step.beginDialog('/whoAmI');
} else if (context.activity.name === 'showSubjects') {

View file

@ -31,11 +31,9 @@
'use strict';
import { GBMinInstance } from 'botlib';
import OpenAI from "openai";
import { OpenAIChat } from 'langchain/llms/openai';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { ConversationChain, LLMChain } from 'langchain/chains';
import { BufferWindowMemory } from 'langchain/memory';
import { CollectionUtil } from 'pragmatismo-io-framework';
import { DialogKeywords } from '../../basic.gblib/services/DialogKeywords.js';
@ -44,6 +42,10 @@ import * as Fs from 'fs';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { GuaribasSubject } from '../../kb.gbapp/models/index.js';
import { GBConfigService } from '../../core.gbapp/services/GBConfigService.js';
import { ChatOpenAI } from "@langchain/openai";
import { JsonOutputFunctionsParser } from 'langchain/dist/output_parsers/openai_functions.js';
import { GBVMService } from '../../basic.gblib/services/GBVMService.js';
export class ChatServices {
@ -60,19 +62,39 @@ export class ChatServices {
.replaceAll('\n', ' ');
}
/**
* Generate text
*
* CONTINUE keword.
*
* result = CONTINUE text
*
*/
public static async continue(min: GBMinInstance, text: string, chatId) {
public static async sendMessage(min: GBMinInstance, text: string) {
let key;
if (process.env.OPENAI_KEY) {
key = process.env.OPENAI_KEY;
}
else {
key = min.core.getParam(min.instance, 'Open AI Key', null);
}
public static async answerByGPT(min: GBMinInstance, pid,
query: string,
searchScore: number,
subjects: GuaribasSubject[]
) {
if (!process.env.OPENAI_KEY) {
return { answer: undefined, questionId: 0 };
}
if (!key) {
throw new Error('Open AI Key not configured in .gbot.');
}
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(
`You are $${min.botId}`);
const contentLocale = min.core.getParam(
min.instance,
'Default Content Language',
GBConfigService.get('DEFAULT_CONTENT_LANGUAGE')
);
let functions = [];
// Adds .gbdialog as functions if any to GPT Functions.
@ -88,82 +110,17 @@ export class ChatServices {
});
// Calls Model.
const openai = new OpenAI({
apiKey: key
});
const chatCompletion = await openai.chat.completions.create({
model: "gpt-3.5-turbo",
messages: [{ role: "user", content: text }],
functions: functions
});
return chatCompletion.choices[0].message.content;
}
let SystemPromptTailored = ''; // TODO: Load from user context.
// Generates function definition for each function
// in plain text to be used in system prompt.
let functionDef = Object.keys(functions)
.map((toolname) => `${toolname}: ${functions[toolname].description}`)
.join("\n");
/**
* Generate text
*
* CONTINUE keword.
*
* result = CONTINUE text
*
*/
public static async continue(min: GBMinInstance, text: string, chatId) {
let key;
if (process.env.OPENAI_KEY) {
key = process.env.OPENAI_KEY;
}
else {
key = min.core.getParam(min.instance, 'Open AI Key', null);
}
if (!key) {
throw new Error('Open AI Key not configured in .gbot.');
}
// const openai = new OpenAI({
// apiKey: key
// });
// const chatCompletion = await openai.chat.completions.create({
// model: "gpt-3.5-turbo",
// messages: [{ role: "user", content: text }]
// });
// return chatCompletion.choices[0].message.content;
}
public static async answerByGPT(min: GBMinInstance,
query: string,
searchScore: number,
subjects: GuaribasSubject[]
) {
if (!process.env.OPENAI_KEY) {
return { answer: undefined, questionId: 0 };
}
const contextVectorStore = min['vectorStore'];
const question = query.trim().replaceAll('\n', ' ');
const context = await this.getRelevantContext(contextVectorStore, question, 1);
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(
`You are $${min.botId}`);
const contentLocale = min.core.getParam(
min.instance,
'Default Content Language',
GBConfigService.get('DEFAULT_CONTENT_LANGUAGE')
);
const tools = ""// TODO: add func list.
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate(`Answer in ${contentLocale}.
let promptTemplate = `Answer in ${contentLocale}.
You have access to the context (RELEVANTDOCS) provided by the user.
When answering think about whether the question in RELEVANTDOCS, but never mention
@ -174,16 +131,18 @@ export class ChatServices {
QUESTION: """{input}"""
${SystemPromptTailored}
You have the following tools that you can invoke based on the user inquiry.
Tools:
${tools}
`),
]);
${functionDef}
`;
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate(promptTemplate),]);
const windowMemory = new BufferWindowMemory({
returnMessages: false,
@ -192,36 +151,53 @@ export class ChatServices {
k: 2,
});
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
},
const llm = new ChatOpenAI({
modelName: "gpt-3.5-turbo-0125",
temperature: 0,
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: 'gpt-3.5-turbo',
const llmWithTools = llm.bind({
tools: functions
});
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
prompt: chatPrompt,
llm: llmWithTools as any,
});
const response = await chain.call({
input: question,
context,
history: '',
immediate_history: '',
});
if (response) {
const contextVectorStore = min['vectorStore'];
const question = query.trim().replaceAll('\n', ' ');
const context = await this.getRelevantContext(contextVectorStore, question, 1);
return { answer: response.text, questionId: 0 };
let prompt;
// allow the LLM to iterate until it finds a final answer
while (true) {
const response = await chain.call({
input: question,
context,
history: '',
immediate_history: '',
});
// add this to the prompt
prompt += response;
const action = response.match(/Action: (.*)/)?.[1];
if (action) {
// execute the action specified by the LLMs
const actionInput = response.match(/Action Input: "?(.*)"?/)?.[1];
const text = '';
const result = await GBVMService.callVM(actionInput, min, false, pid,false, [text]);
prompt += `Observation: ${result}\n`;
} else {
return response.match(/Final Answer: (.*)/)?.[1];
}
}
return { answer: undefined, questionId: 0 };
}

View file

@ -199,7 +199,8 @@ export class AskDialog extends IGBDialog {
if (!text && step.context.activity.channelId === 'msteams') {
const startDialog = min.core.getParam(min.instance, 'Start Dialog', null);
if (startDialog) {
await GBVMService.callVM(startDialog.toLowerCase().trim(), min, step, user, this.deployer, false);
const pid = step.context.activity['pid'];
await GBVMService.callVM(startDialog.toLowerCase().trim(), min, step, pid);
}
return await step.endDialog();
@ -229,7 +230,7 @@ export class AskDialog extends IGBDialog {
// TODO: https://github.com/GeneralBots/BotServer/issues/9 user.lastQuestion = text;
const resultsA = await service.ask(min, text, searchScore, null /* user.subjects */);
const resultsA = await service.ask(min,step.context.activity['pid'], text, searchScore, null /* user.subjects */);
// If there is some result, answer immediately.
@ -343,7 +344,8 @@ export class AskDialog extends IGBDialog {
if (text.endsWith('.docx')) {
const mainName = GBVMService.getMethodNameFromVBSFilename(text);
await step.endDialog();
return await GBVMService.callVM(mainName, min, step, user, this.deployer, false);
const pid = step.context.activity['pid'];
return await GBVMService.callVM(mainName, min, step, pid);
} else if (text.startsWith('/')) {
return await step.replaceDialog(text, { answer: answer });
} else {
@ -445,8 +447,9 @@ export class AskDialog extends IGBDialog {
);
await step.endDialog();
const pid = step.context.activity['pid'];
await GBVMService.callVM(dialogName.toLowerCase(), min, step, user, this.deployer, false);
await GBVMService.callVM(dialogName.toLowerCase(), min, step, pid);
}
}
];

View file

@ -269,6 +269,7 @@ export class KBService implements IGBKBService {
public async ask(
min: GBMinInstance,
pid,
query: string,
searchScore: number,
subjects: GuaribasSubject[]
@ -375,7 +376,7 @@ export class KBService implements IGBKBService {
returnedScore: ${returnedScore} < required (searchScore): ${searchScore}`
);
return await ChatServices.answerByGPT(min,
return await ChatServices.answerByGPT(min,pid,
query,
searchScore,
subjects