new(gpt.gblib): GPT Tools and .gbdialog.
This commit is contained in:
parent
a0c3481c7d
commit
359c1beb02
7 changed files with 192 additions and 98 deletions
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
/*****************************************************************************\
|
/*****************************************************************************\
|
||||||
| █████ █████ ██ █ █████ █████ ████ ██ ████ █████ █████ ███ ® |
|
| █████ █████ ██ █ █████ █████ ████ ██ ████ █████ █████ ███ ® |
|
||||||
| ██ █ ███ █ █ ██ ██ ██ ██ ██ ██ █ ██ ██ █ █ |
|
| ██ █ ███ █ █ ██ ██ ██ ██ ██ ██ █ ██ ██ █ █ |
|
||||||
|
|
|
@ -1249,25 +1249,33 @@ export class DialogKeywords {
|
||||||
const { min, user } = await DialogKeywords.getProcessInfo(pid);
|
const { min, user } = await DialogKeywords.getProcessInfo(pid);
|
||||||
GBLogEx.info(min,`MESSAGE BOT: ${text}.`);
|
GBLogEx.info(min,`MESSAGE BOT: ${text}.`);
|
||||||
|
|
||||||
const conversation = min['apiConversations'][pid];
|
const { conversation, client} = min['apiConversations'][pid];
|
||||||
|
|
||||||
conversation.client.apis.Conversations.Conversations_PostActivity({
|
await client.apis.Conversations.Conversations_PostActivity({
|
||||||
conversationId: conversation.conversationId,
|
conversationId: conversation.conversationId,
|
||||||
activity: {
|
activity: {
|
||||||
textFormat: 'plain',
|
textFormat: 'plain',
|
||||||
text: text,
|
text: text,
|
||||||
type: 'message',
|
type: 'message',
|
||||||
from: {
|
from: {
|
||||||
id: 'word',
|
id: user.userSystemId,
|
||||||
name: 'word'
|
name: user.userName
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
const watermarkMap = conversation.watermarkMap;
|
|
||||||
|
|
||||||
let messages = [];
|
let messages = [];
|
||||||
|
|
||||||
const response = await conversation.client.apis.Conversations.Conversations_GetActivities({
|
|
||||||
|
GBLog.info(`MessageBot: Starting message polling ${conversation.conversationId}).`);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
const worker = async () => {
|
||||||
|
try {
|
||||||
|
|
||||||
|
const response = await client.apis.Conversations.Conversations_GetActivities({
|
||||||
conversationId: conversation.conversationId,
|
conversationId: conversation.conversationId,
|
||||||
watermark: conversation.watermark
|
watermark: conversation.watermark
|
||||||
});
|
});
|
||||||
|
@ -1286,6 +1294,15 @@ export class DialogKeywords {
|
||||||
}
|
}
|
||||||
|
|
||||||
return messages.join('\n');
|
return messages.join('\n');
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
GBLog.error(
|
||||||
|
`Error calling printMessages API ${err.data === undefined ? err : err.data} ${err.errObj ? err.errObj.message : ''
|
||||||
|
}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
setInterval(worker, DEFAULT_HEAR_POLL_INTERVAL);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -645,6 +645,8 @@ export class GBVMService extends GBService {
|
||||||
}
|
}
|
||||||
catch(e){
|
catch(e){
|
||||||
console.log(e);
|
console.log(e);
|
||||||
|
|
||||||
|
reject ({message: e.message, name: e.name});
|
||||||
}
|
}
|
||||||
finally{
|
finally{
|
||||||
|
|
||||||
|
@ -652,6 +654,8 @@ export class GBVMService extends GBService {
|
||||||
|
|
||||||
await wa.closeHandles({pid: pid});
|
await wa.closeHandles({pid: pid});
|
||||||
await sys.closeHandles({pid: pid});
|
await sys.closeHandles({pid: pid});
|
||||||
|
|
||||||
|
resolve(true);
|
||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
`;
|
`;
|
||||||
|
@ -1085,8 +1089,9 @@ export class GBVMService extends GBService {
|
||||||
try {
|
try {
|
||||||
if (GBConfigService.get('GBVM') === 'false') {
|
if (GBConfigService.get('GBVM') === 'false') {
|
||||||
return await (async () => {
|
return await (async () => {
|
||||||
return await new Promise(resolve => {
|
return await new Promise((resolve, reject) => {
|
||||||
sandbox['resolve'] = resolve;
|
sandbox['resolve'] = resolve;
|
||||||
|
sandbox['reject'] = reject;
|
||||||
const vm1 = new NodeVM({
|
const vm1 = new NodeVM({
|
||||||
allowAsync: true,
|
allowAsync: true,
|
||||||
sandbox: sandbox,
|
sandbox: sandbox,
|
||||||
|
|
|
@ -714,7 +714,7 @@ export class KeywordsExpressions {
|
||||||
keywords[i++] = [
|
keywords[i++] = [
|
||||||
/\= NEW OBJECT/gim,
|
/\= NEW OBJECT/gim,
|
||||||
($0, $1, $2, $3) => {
|
($0, $1, $2, $3) => {
|
||||||
return ` = {pid: pid}`;
|
return ` = {}`;
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|
|
@ -1440,16 +1440,19 @@ export class SystemKeywords {
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static async setSystemPrompt({ pid, systemPrompt }) {
|
public async setSystemPrompt({ pid, text }) {
|
||||||
|
|
||||||
let { min, user } = await DialogKeywords.getProcessInfo(pid);
|
let { min, user } = await DialogKeywords.getProcessInfo(pid);
|
||||||
const sec = new SecService();
|
const sec = new SecService();
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if (user) {
|
if (user) {
|
||||||
user['systemPrompt'] = systemPrompt;
|
ChatServices.userSystemPrompt[user.userSystemId] = text;
|
||||||
|
|
||||||
const path = DialogKeywords.getGBAIPath(min.botId);
|
const path = DialogKeywords.getGBAIPath(min.botId);
|
||||||
const systemPromptFile = urlJoin(process.cwd(), 'work', path, 'users',user.userSystemId, 'systemPrompt.txt');
|
const systemPromptFile = urlJoin(process.cwd(), 'work', path, 'users',user.userSystemId, 'systemPrompt.txt');
|
||||||
Fs.writeFileSync(systemPromptFile, systemPrompt);
|
Fs.writeFileSync(systemPromptFile, text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -455,8 +455,7 @@ export class GBMinService {
|
||||||
if (!res) {
|
if (!res) {
|
||||||
return 'GeneralBots';
|
return 'GeneralBots';
|
||||||
}
|
}
|
||||||
if (req.body?.AccountSid)
|
if (req.body?.AccountSid) {
|
||||||
{
|
|
||||||
return 'official';
|
return 'official';
|
||||||
}
|
}
|
||||||
return req.body.phone_id ? 'maytapi' : 'chatapi';
|
return req.body.phone_id ? 'maytapi' : 'chatapi';
|
||||||
|
@ -1027,7 +1026,7 @@ export class GBMinService {
|
||||||
}
|
}
|
||||||
|
|
||||||
let pid = step.context.activity['pid'];
|
let pid = step.context.activity['pid'];
|
||||||
if (!pid){
|
if (!pid) {
|
||||||
pid = GBVMService.createProcessInfo(user, min, step.context.activity.channelId, null);
|
pid = GBVMService.createProcessInfo(user, min, step.context.activity.channelId, null);
|
||||||
}
|
}
|
||||||
step.context.activity['pid'] = pid;
|
step.context.activity['pid'] = pid;
|
||||||
|
@ -1421,7 +1420,7 @@ export class GBMinService {
|
||||||
GBLog.info(
|
GBLog.info(
|
||||||
`Auto start (whatsapp) dialog is now being called: ${startDialog} for ${min.instance.instanceId}...`
|
`Auto start (whatsapp) dialog is now being called: ${startDialog} for ${min.instance.instanceId}...`
|
||||||
);
|
);
|
||||||
await GBVMService.callVM(startDialog.toLowerCase(), min, step,pid);
|
await GBVMService.callVM(startDialog.toLowerCase(), min, step, pid);
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -1613,8 +1612,40 @@ export class GBMinService {
|
||||||
await CollectionUtil.asyncForEach(Object.values(min.scriptMap), async script => {
|
await CollectionUtil.asyncForEach(Object.values(min.scriptMap), async script => {
|
||||||
|
|
||||||
dialogs[script] = async (data) => {
|
dialogs[script] = async (data) => {
|
||||||
let params = JSON.parse(data);
|
let sec = new SecService();
|
||||||
return await GBVMService.callVM(script, min, null, params.pid, false, params);
|
const user = await sec.ensureUser(
|
||||||
|
min,
|
||||||
|
data.userSystemId,
|
||||||
|
data.userSystemId,
|
||||||
|
'',
|
||||||
|
'api',
|
||||||
|
data.userSystemId,
|
||||||
|
null
|
||||||
|
);
|
||||||
|
|
||||||
|
let pid = data?.pid;
|
||||||
|
if (script === 'start'){
|
||||||
|
pid = GBVMService.createProcessInfo(user, min, 'api', null);
|
||||||
|
|
||||||
|
|
||||||
|
const client = await new SwaggerClient({
|
||||||
|
spec: JSON.parse(Fs.readFileSync('directline-3.0.json', 'utf8')),
|
||||||
|
requestInterceptor: req => {
|
||||||
|
req.headers['Authorization'] = `Bearer ${min.instance.webchatKey}`;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const response = await client.apis.Conversations.Conversations_StartConversation();
|
||||||
|
|
||||||
|
min['apiConversations'][pid] = {conversation: response.obj, client: client};
|
||||||
|
}
|
||||||
|
|
||||||
|
let ret = await GBVMService.callVM(script, min, null, pid, false, data);
|
||||||
|
|
||||||
|
if (script === 'start')
|
||||||
|
{
|
||||||
|
ret = pid;
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -69,7 +69,7 @@ export class CustomLLMOutputParser extends BaseLLMOutputParser<ExpectedOutput> {
|
||||||
private toolChain: RunnableSequence
|
private toolChain: RunnableSequence
|
||||||
private documentChain: RunnableSequence;
|
private documentChain: RunnableSequence;
|
||||||
|
|
||||||
constructor( toolChain: RunnableSequence, documentChain: RunnableSequence) {
|
constructor(toolChain: RunnableSequence, documentChain: RunnableSequence) {
|
||||||
super();
|
super();
|
||||||
this.toolChain = toolChain;
|
this.toolChain = toolChain;
|
||||||
this.documentChain = documentChain;
|
this.documentChain = documentChain;
|
||||||
|
@ -83,19 +83,19 @@ export class CustomLLMOutputParser extends BaseLLMOutputParser<ExpectedOutput> {
|
||||||
"Output parser did not receive any generations."
|
"Output parser did not receive any generations."
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let parsedOutput;
|
let result;
|
||||||
|
|
||||||
if (llmOutputs[0]['message'].lc_kwargs.additional_kwargs.tool_calls) {
|
if (llmOutputs[0]['message'].lc_kwargs.additional_kwargs.tool_calls) {
|
||||||
this.toolChain.invoke({func: llmOutputs[0]['message'].lc_kwargs.additional_kwargs.tool_calls});
|
return this.toolChain.invoke({ func: llmOutputs[0]['message'].lc_kwargs.additional_kwargs.tool_calls });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isChatGeneration(llmOutputs[0])) {
|
if (isChatGeneration(llmOutputs[0])) {
|
||||||
parsedOutput = llmOutputs[0].message.content;
|
result = llmOutputs[0].message.content;
|
||||||
} else {
|
} else {
|
||||||
parsedOutput = llmOutputs[0].text;
|
result = llmOutputs[0].text;
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.documentChain.invoke(parsedOutput);
|
return this.documentChain ? this.documentChain.invoke(result) : result;
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -131,6 +131,8 @@ export class ChatServices {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static memoryMap = {};
|
||||||
|
public static userSystemPrompt = {};
|
||||||
|
|
||||||
public static async answerByGPT(min: GBMinInstance, user, pid,
|
public static async answerByGPT(min: GBMinInstance, user, pid,
|
||||||
question: string,
|
question: string,
|
||||||
|
@ -142,22 +144,24 @@ export class ChatServices {
|
||||||
return { answer: undefined, questionId: 0 };
|
return { answer: undefined, questionId: 0 };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
const contentLocale = min.core.getParam(
|
const contentLocale = min.core.getParam(
|
||||||
min.instance,
|
min.instance,
|
||||||
'Default Content Language',
|
'Default Content Language',
|
||||||
GBConfigService.get('DEFAULT_CONTENT_LANGUAGE')
|
GBConfigService.get('DEFAULT_CONTENT_LANGUAGE')
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const docsContext = min['vectorStore'];
|
||||||
|
|
||||||
const context = min['vectorStore'];
|
if (!this.memoryMap[user.userSystemId]) {
|
||||||
|
this.memoryMap[user.userSystemId] = new BufferWindowMemory({
|
||||||
const memory = new BufferWindowMemory({
|
|
||||||
returnMessages: true,
|
returnMessages: true,
|
||||||
memoryKey: 'chat_history',
|
memoryKey: 'chat_history',
|
||||||
inputKey: 'input',
|
inputKey: 'input',
|
||||||
k: 2,
|
k: 2,
|
||||||
});
|
})
|
||||||
|
}
|
||||||
|
const memory = this.memoryMap[user.userSystemId];
|
||||||
|
const systemPrompt = this.userSystemPrompt[user.userSystemId];
|
||||||
|
|
||||||
const model = new ChatOpenAI({
|
const model = new ChatOpenAI({
|
||||||
openAIApiKey: process.env.OPENAI_API_KEY,
|
openAIApiKey: process.env.OPENAI_API_KEY,
|
||||||
|
@ -168,15 +172,19 @@ export class ChatServices {
|
||||||
|
|
||||||
let tools = await ChatServices.getTools(min);
|
let tools = await ChatServices.getTools(min);
|
||||||
let toolsAsText = ChatServices.getToolsAsText(tools);
|
let toolsAsText = ChatServices.getToolsAsText(tools);
|
||||||
|
|
||||||
const modelWithTools = model.bind({
|
const modelWithTools = model.bind({
|
||||||
tools: tools.map(convertToOpenAITool)
|
tools: tools.map(convertToOpenAITool)
|
||||||
});
|
});
|
||||||
|
|
||||||
const questionGeneratorTemplate = ChatPromptTemplate.fromMessages([
|
const questionGeneratorTemplate = ChatPromptTemplate.fromMessages([
|
||||||
AIMessagePromptTemplate.fromTemplate(
|
AIMessagePromptTemplate.fromTemplate(
|
||||||
`Answer the question without calling any tool, but if there is a need to call:
|
`
|
||||||
|
Answer the question without calling any tool, but if there is a need to call:
|
||||||
You have access to the following set of tools. Here are the names and descriptions for each tool:
|
You have access to the following set of tools. Here are the names and descriptions for each tool:
|
||||||
${toolsAsText}
|
${toolsAsText}
|
||||||
|
|
||||||
|
Do not use any previous tools output in the chat_history.
|
||||||
`
|
`
|
||||||
),
|
),
|
||||||
new MessagesPlaceholder("chat_history"),
|
new MessagesPlaceholder("chat_history"),
|
||||||
|
@ -184,10 +192,22 @@ export class ChatServices {
|
||||||
Standalone question:`),
|
Standalone question:`),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
const toolsResultPrompt = ChatPromptTemplate.fromMessages([
|
||||||
|
AIMessagePromptTemplate.fromTemplate(
|
||||||
|
`The tool just returned value in last call. Using {chat_history}
|
||||||
|
rephrase the answer to the user using this tool output.
|
||||||
|
`
|
||||||
|
),
|
||||||
|
new MessagesPlaceholder("chat_history"),
|
||||||
|
AIMessagePromptTemplate.fromTemplate(`Tool output: {tool_output}
|
||||||
|
Standalone question:`),
|
||||||
|
]);
|
||||||
|
|
||||||
const combineDocumentsPrompt = ChatPromptTemplate.fromMessages([
|
const combineDocumentsPrompt = ChatPromptTemplate.fromMessages([
|
||||||
AIMessagePromptTemplate.fromTemplate(
|
AIMessagePromptTemplate.fromTemplate(
|
||||||
`Use the following pieces of context to answer the question at the end.
|
`
|
||||||
If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
||||||
|
Use the following pieces, if any, of context to answer the question at the end.
|
||||||
\n\n{context}\n\n
|
\n\n{context}\n\n
|
||||||
`
|
`
|
||||||
),
|
),
|
||||||
|
@ -197,13 +217,11 @@ export class ChatServices {
|
||||||
|
|
||||||
const callToolChain = RunnableSequence.from([
|
const callToolChain = RunnableSequence.from([
|
||||||
{
|
{
|
||||||
func: async (output: object) =>{
|
tool_output: async (output: object) => {
|
||||||
|
|
||||||
const name = output['func'][0].function.name;
|
const name = output['func'][0].function.name;
|
||||||
const args = JSON.parse(output['func'][0].function.arguments);
|
const args = JSON.parse(output['func'][0].function.arguments);
|
||||||
|
|
||||||
GBLog.info(`Running .gbdialog '${name}' as GPT tool...`);
|
GBLog.info(`Running .gbdialog '${name}' as GPT tool...`);
|
||||||
|
|
||||||
const pid = GBVMService.createProcessInfo(null, min, 'gpt', null);
|
const pid = GBVMService.createProcessInfo(null, min, 'gpt', null);
|
||||||
|
|
||||||
return await GBVMService.callVM(name, min, false, pid, false, args);
|
return await GBVMService.callVM(name, min, false, pid, false, args);
|
||||||
|
@ -212,12 +230,13 @@ export class ChatServices {
|
||||||
const { chat_history } = await memory.loadMemoryVariables({});
|
const { chat_history } = await memory.loadMemoryVariables({});
|
||||||
return chat_history;
|
return chat_history;
|
||||||
},
|
},
|
||||||
|
|
||||||
},
|
},
|
||||||
|
toolsResultPrompt,
|
||||||
|
model,
|
||||||
new StringOutputParser()
|
new StringOutputParser()
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
const combineDocumentsChain = RunnableSequence.from([
|
const combineDocumentsChain = RunnableSequence.from([
|
||||||
{
|
{
|
||||||
question: (output: string) => output,
|
question: (output: string) => output,
|
||||||
|
@ -226,7 +245,8 @@ export class ChatServices {
|
||||||
return chat_history;
|
return chat_history;
|
||||||
},
|
},
|
||||||
context: async (output: string) => {
|
context: async (output: string) => {
|
||||||
return await ChatServices.getRelevantContext(context, output, 1);
|
const c = await ChatServices.getRelevantContext(docsContext, output, 1);
|
||||||
|
return c ?? 'answer just with user question.';
|
||||||
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -245,34 +265,63 @@ export class ChatServices {
|
||||||
},
|
},
|
||||||
questionGeneratorTemplate,
|
questionGeneratorTemplate,
|
||||||
modelWithTools,
|
modelWithTools,
|
||||||
new CustomLLMOutputParser(callToolChain, combineDocumentsChain)
|
new CustomLLMOutputParser(callToolChain, docsContext?.docstore?._docs.length > 0 ? combineDocumentsChain : null),
|
||||||
|
new StringOutputParser()
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const systemPrompt = user['systemPrompt'];
|
const directChain = RunnableSequence.from([
|
||||||
|
{
|
||||||
|
question: (i: { question: string }) => {
|
||||||
|
return `
|
||||||
|
${systemPrompt}
|
||||||
|
|
||||||
let result = await conversationalQaChain.invoke({
|
${i.question}`
|
||||||
|
},
|
||||||
|
chat_history: async () => {
|
||||||
|
const { chat_history } = await memory.loadMemoryVariables({});
|
||||||
|
return chat_history;
|
||||||
|
},
|
||||||
|
},
|
||||||
|
modelWithTools,
|
||||||
|
new CustomLLMOutputParser(callToolChain, docsContext?.docstore?._docs.length > 0 ? combineDocumentsChain : null),
|
||||||
|
new StringOutputParser()
|
||||||
|
]);
|
||||||
|
|
||||||
|
const direct = true;
|
||||||
|
let result;
|
||||||
|
|
||||||
|
if (direct) {
|
||||||
|
result = await (tools.length > 0 ? modelWithTools : model).invoke(`
|
||||||
|
${systemPrompt}
|
||||||
|
|
||||||
|
${question}`);
|
||||||
|
|
||||||
|
result = result.content;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
|
||||||
|
result = await (directChain ?? conversationalQaChain).invoke({
|
||||||
question,
|
question,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (result['name']) {
|
}
|
||||||
const func = result['func'];
|
|
||||||
await func.func(min, result['args']);
|
|
||||||
|
|
||||||
} else {
|
|
||||||
|
|
||||||
// await memory.saveContext(
|
|
||||||
// {
|
|
||||||
// input: query,
|
await memory.saveContext(
|
||||||
// },
|
{
|
||||||
// {
|
input: question,
|
||||||
// output: result,
|
},
|
||||||
// }
|
{
|
||||||
// );
|
output: result,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
GBLog.info(`GPT Result: ${result.toString()}`);
|
GBLog.info(`GPT Result: ${result.toString()}`);
|
||||||
return { answer: result.toString(), questionId: 0 };
|
return { answer: result.toString(), questionId: 0 };
|
||||||
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static getToolsAsText(tools) {
|
private static getToolsAsText(tools) {
|
||||||
|
@ -286,14 +335,17 @@ export class ChatServices {
|
||||||
|
|
||||||
// Adds .gbdialog as functions if any to GPT Functions.
|
// Adds .gbdialog as functions if any to GPT Functions.
|
||||||
await CollectionUtil.asyncForEach(Object.keys(min.scriptMap), async (script) => {
|
await CollectionUtil.asyncForEach(Object.keys(min.scriptMap), async (script) => {
|
||||||
|
|
||||||
|
|
||||||
const path = DialogKeywords.getGBAIPath(min.botId, "gbdialog", null);
|
const path = DialogKeywords.getGBAIPath(min.botId, "gbdialog", null);
|
||||||
const jsonFile = Path.join('work', path, `${script}.json`);
|
const jsonFile = Path.join('work', path, `${script}.json`);
|
||||||
|
|
||||||
if (Fs.existsSync(jsonFile)) {
|
if (Fs.existsSync(jsonFile) && script.toLowerCase() !== 'start.vbs') {
|
||||||
|
|
||||||
const funcJSON = JSON.parse(Fs.readFileSync(jsonFile, 'utf8'));
|
const funcJSON = JSON.parse(Fs.readFileSync(jsonFile, 'utf8'));
|
||||||
const funcObj = funcJSON?.function;
|
const funcObj = funcJSON?.function;
|
||||||
|
|
||||||
if (funcObj){
|
if (funcObj) {
|
||||||
|
|
||||||
// TODO: Use ajv.
|
// TODO: Use ajv.
|
||||||
funcObj.schema = eval(jsonSchemaToZod(funcObj.parameters));
|
funcObj.schema = eval(jsonSchemaToZod(funcObj.parameters));
|
||||||
|
@ -304,21 +356,6 @@ export class ChatServices {
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const multiplyTool = new DynamicStructuredTool({
|
|
||||||
name: "multiply",
|
|
||||||
description: "Multiply two integers together.",
|
|
||||||
schema: z.object({
|
|
||||||
firstInt: z.number(),
|
|
||||||
secondInt: z.number(),
|
|
||||||
}),
|
|
||||||
func: async ({ firstInt, secondInt }) => {
|
|
||||||
return (firstInt * secondInt).toString();
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
functions.push(multiplyTool);
|
|
||||||
|
|
||||||
|
|
||||||
return functions;
|
return functions;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue