fix(kb.gbapp): #361 GPT functions in Word.
This commit is contained in:
parent
853de9df17
commit
722e86c47d
3 changed files with 86 additions and 10 deletions
|
@ -389,7 +389,8 @@ export class GBVMService extends GBService {
|
|||
step,
|
||||
user: GuaribasUser,
|
||||
deployer: GBDeployer,
|
||||
debug: boolean = false
|
||||
debug: boolean = false,
|
||||
params = []
|
||||
) {
|
||||
// Creates a class DialogKeywords which is the *this* pointer
|
||||
// in BASIC.
|
||||
|
@ -414,6 +415,14 @@ export class GBVMService extends GBService {
|
|||
}
|
||||
}
|
||||
|
||||
// Adds params as variables to be added later as global objects..
|
||||
|
||||
let keys = Object.keys(params);
|
||||
for (let j = 0; j < keys.length; j++) {
|
||||
variables[keys[j]] = params[keys[j]];
|
||||
}
|
||||
|
||||
|
||||
const botId = min.botId;
|
||||
const path = DialogKeywords.getGBAIPath(min.botId, `gbdialog`);
|
||||
const gbdialogPath = urlJoin(process.cwd(), 'work', path);
|
||||
|
@ -489,7 +498,6 @@ export class GBVMService extends GBService {
|
|||
}
|
||||
} catch (error) {
|
||||
throw new Error(`BASIC RUNTIME ERR: ${error.message ? error.message : error}\n Stack:${error.stack}`);
|
||||
} finally {
|
||||
}
|
||||
|
||||
return result;
|
||||
|
|
|
@ -172,8 +172,6 @@ export class GBMinService {
|
|||
GBServer.globals.server.get('/instances/:botId', this.handleGetInstanceForClient.bind(this));
|
||||
}
|
||||
|
||||
await this.ensureAPI();
|
||||
|
||||
// Calls mountBot event to all bots.
|
||||
|
||||
let i = 1;
|
||||
|
@ -209,6 +207,10 @@ export class GBMinService {
|
|||
this.bar1.stop();
|
||||
}
|
||||
|
||||
// Loads API.
|
||||
|
||||
await this.ensureAPI();
|
||||
|
||||
// Loads schedules.
|
||||
|
||||
GBLog.info(`Loading SET SCHEDULE entries...`);
|
||||
|
@ -1474,8 +1476,7 @@ export class GBMinService {
|
|||
|
||||
public async ensureAPI() {
|
||||
|
||||
const instances = GBServer.globals.minInstances;
|
||||
|
||||
const mins = GBServer.globals.minInstances;
|
||||
|
||||
function getRemoteId(ctx: Koa.Context) {
|
||||
return '1'; // Each bot has its own API.
|
||||
|
@ -1500,16 +1501,35 @@ export class GBMinService {
|
|||
|
||||
await close();
|
||||
|
||||
|
||||
let proxies = {};
|
||||
await CollectionUtil.asyncForEach(instances, async instance => {
|
||||
await CollectionUtil.asyncForEach(mins, async min => {
|
||||
|
||||
let dialogs = {};
|
||||
await CollectionUtil.asyncForEach(Object.values(min.scriptMap), async script => {
|
||||
|
||||
|
||||
const f = new Function()
|
||||
|
||||
dialogs[script] = async (data)=> {
|
||||
let params;
|
||||
if (data){
|
||||
params = JSON.parse(data);
|
||||
}
|
||||
|
||||
await GBVMService.callVM(script, min, null, null, null, false, params);
|
||||
}
|
||||
});
|
||||
|
||||
const proxy = {
|
||||
dk: new DialogKeywords(),
|
||||
wa: new WebAutomationServices(),
|
||||
sys: new SystemKeywords(),
|
||||
dbg: new DebuggerService(),
|
||||
img: new ImageProcessingServices()
|
||||
img: new ImageProcessingServices(),
|
||||
dialogs: dialogs
|
||||
};
|
||||
proxies[instance.botId] = proxy;
|
||||
proxies[min.botId] = proxy;
|
||||
});
|
||||
|
||||
const opts = {
|
||||
|
|
|
@ -35,8 +35,55 @@
|
|||
import { GBMinInstance } from 'botlib';
|
||||
import OpenAI from "openai";
|
||||
import { ChatGPTAPIBrowser, getOpenAIAuth } from 'chatgpt'
|
||||
import { CollectionUtil } from 'pragmatismo-io-framework';
|
||||
|
||||
export class ChatServices {
|
||||
|
||||
public static async sendMessage(min: GBMinInstance, text: string) {
|
||||
let key;
|
||||
if (process.env.OPENAI_KEY) {
|
||||
key = process.env.OPENAI_KEY;
|
||||
}
|
||||
else {
|
||||
key = min.core.getParam(min.instance, 'Open AI Key', null);
|
||||
}
|
||||
|
||||
if (!key) {
|
||||
throw new Error('Open AI Key not configured in .gbot.');
|
||||
}
|
||||
let functions = [];
|
||||
|
||||
await CollectionUtil.asyncForEach(Object.values(min.scriptMap), async script => {
|
||||
|
||||
functions.push({
|
||||
"name": "get_current_weather",
|
||||
"description": "Get the current weather in a given location",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string",
|
||||
"description": "The city and state, e.g. San Francisco, CA",
|
||||
},
|
||||
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
|
||||
},
|
||||
"required": ["location"],
|
||||
} });
|
||||
});
|
||||
|
||||
const openai = new OpenAI({
|
||||
apiKey: key
|
||||
});
|
||||
const chatCompletion = await openai.chat.completions.create({
|
||||
model: "gpt-3.5-turbo",
|
||||
messages: [{ role: "user", content: text }],
|
||||
functions: functions
|
||||
});
|
||||
return chatCompletion.choices[0].message.content;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Generate text
|
||||
*
|
||||
|
@ -62,7 +109,8 @@ export class ChatServices {
|
|||
});
|
||||
const chatCompletion = await openai.chat.completions.create({
|
||||
model: "gpt-3.5-turbo",
|
||||
messages: [{ role: "user", content: text }],
|
||||
messages: [{ role: "user", content: text }]
|
||||
|
||||
});
|
||||
return chatCompletion.choices[0].message.content;
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue