new(gpt.gblib): GPT Tools and .gbdialog.

This commit is contained in:
Rodrigo Rodriguez 2024-03-13 20:12:05 -03:00
parent e4a4c127c9
commit ce36ac476e
4 changed files with 69 additions and 80 deletions

1
.vscode/launch.json vendored
View file

@ -6,6 +6,7 @@
"request": "launch", "request": "launch",
"sourceMaps": true, "sourceMaps": true,
"name": "Debug Program", "name": "Debug Program",
"runtimeExecutable": "/root/.nvm/versions/node/v19.9.0/bin/node",
"program": "${workspaceRoot}/boot.mjs", "program": "${workspaceRoot}/boot.mjs",
"cwd": "${workspaceRoot}", "cwd": "${workspaceRoot}",
"env": { "env": {

View file

@ -1,24 +1,23 @@
#!/usr/bin/env node #!/usr/bin/env node
import chalkAnimation from 'chalk-animation';
import Fs from 'fs'; import Fs from 'fs';
import os from 'node:os';
import Path from 'path'; import Path from 'path';
import { exec } from 'child_process'; import { exec } from 'child_process';
import pjson from './package.json' assert { type: 'json' }; import pjson from './package.json' assert { type: 'json' };
import { GBUtil } from './dist/src/util.js';
// Displays version of Node JS being used at runtime and others attributes. // Displays version of Node JS being used at runtime and others attributes.
console.log(`General Bots is loading source code files...`); process.stdout.write(`General Bots. BotServer@${pjson.version}, botlib@${pjson.dependencies.botlib}, botbuilder@${pjson.dependencies.botbuilder}, node@${process.version.replace('v', '')}, ${process.platform} ${process.arch} `);
os.setPriority(process.pid, os.constants.priority.PRIORITY_HIGHEST);
console.log(`\nLoading virtual machine source code files...`);
var __dirname = process.env.PWD || process.cwd(); var __dirname = process.env.PWD || process.cwd();
try { try {
var run = () => { var run = () => {
import('./dist/src/app.js').then((gb)=> { import('./dist/src/app.js').then((gb)=> {
console.log(`\n`);
process.stdout.write(`${pjson.version}, botlib@${pjson.dependencies.botlib}, botbuilder@${pjson.dependencies.botbuilder}, node@${process.version.replace('v', '')}, ${process.platform} ${process.arch} `);
console.log(`\n`);
gb.GBServer.run() gb.GBServer.run()
}); });
}; };

View file

@ -56,8 +56,8 @@ import MicrosoftGraph from '@microsoft/microsoft-graph-client';
import { GBLogEx } from './GBLogEx.js'; import { GBLogEx } from './GBLogEx.js';
import { DialogKeywords } from '../../basic.gblib/services/DialogKeywords.js'; import { DialogKeywords } from '../../basic.gblib/services/DialogKeywords.js';
import { GBUtil } from '../../../src/util.js'; import { GBUtil } from '../../../src/util.js';
import { HNSWLib } from 'langchain/vectorstores/hnswlib'; import { HNSWLib } from '@langchain/community/vectorstores/hnswlib';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai'; import { OpenAIEmbeddings } from '@langchain/openai';
/** /**
* Deployer service for bots, themes, ai and more. * Deployer service for bots, themes, ai and more.

View file

@ -48,16 +48,6 @@ import { GBConfigService } from '../../core.gbapp/services/GBConfigService.js';
import { GuaribasSubject } from '../../kb.gbapp/models/index.js'; import { GuaribasSubject } from '../../kb.gbapp/models/index.js';
import { z } from "zod"; import { z } from "zod";
import { DynamicStructuredTool } from "@langchain/core/tools"; import { DynamicStructuredTool } from "@langchain/core/tools";
import { JsonOutputToolsParser } from "langchain/output_parsers";
import {
RunnableLambda,
RunnablePassthrough,
} from "@langchain/core/runnables";
import {
CombiningOutputParser,
} from "langchain/output_parsers";
import { import {
BaseLLMOutputParser, BaseLLMOutputParser,
OutputParserException, OutputParserException,
@ -65,8 +55,6 @@ import {
import { ChatGeneration, Generation } from "@langchain/core/outputs"; import { ChatGeneration, Generation } from "@langchain/core/outputs";
export interface CustomOutputParserFields { } export interface CustomOutputParserFields { }
// This can be more generic, like Record<string, string>
export type ExpectedOutput = string; export type ExpectedOutput = string;
function isChatGeneration( function isChatGeneration(
@ -78,8 +66,13 @@ function isChatGeneration(
export class CustomLLMOutputParser extends BaseLLMOutputParser<ExpectedOutput> { export class CustomLLMOutputParser extends BaseLLMOutputParser<ExpectedOutput> {
lc_namespace = ["langchain", "output_parsers"]; lc_namespace = ["langchain", "output_parsers"];
constructor(fields?: CustomOutputParserFields) { private toolChain: RunnableSequence
super(fields); private documentChain: RunnableSequence;
constructor( toolChain: RunnableSequence, documentChain: RunnableSequence) {
super();
this.toolChain = toolChain;
this.documentChain = documentChain;
} }
async parseResult( async parseResult(
@ -92,15 +85,19 @@ export class CustomLLMOutputParser extends BaseLLMOutputParser<ExpectedOutput> {
} }
let parsedOutput; let parsedOutput;
if (llmOutputs[0]['message'].lc_kwargs.additional_kwargs.tool_calls) {
this.toolChain.invoke({func: llmOutputs[0]['message'].lc_kwargs.additional_kwargs.tool_calls});
}
if (isChatGeneration(llmOutputs[0])) { if (isChatGeneration(llmOutputs[0])) {
parsedOutput = llmOutputs[0].message.content; parsedOutput = llmOutputs[0].message.content;
} else { } else {
parsedOutput = llmOutputs[0].text; parsedOutput = llmOutputs[0].text;
} }
let parsedText;
parsedText = parsedOutput; this.documentChain.invoke(parsedOutput);
return parsedText;
return ``;
} }
} }
@ -153,12 +150,8 @@ export class ChatServices {
GBConfigService.get('DEFAULT_CONTENT_LANGUAGE') GBConfigService.get('DEFAULT_CONTENT_LANGUAGE')
); );
let tools = await ChatServices.getTools(min);
let toolsAsText = ChatServices.getToolsAsText(tools);
const toolMap: Record<string, any> = { const context = min['vectorStore'];
multiply: tools[0]
};
const memory = new BufferWindowMemory({ const memory = new BufferWindowMemory({
returnMessages: true, returnMessages: true,
@ -173,41 +166,17 @@ export class ChatServices {
temperature: 0, temperature: 0,
}); });
const context = min['vectorStore'];
let tools = await ChatServices.getTools(min);
let toolsAsText = ChatServices.getToolsAsText(tools);
const modelWithTools = model.bind({ const modelWithTools = model.bind({
tools: tools.map(convertToOpenAITool) tools: tools.map(convertToOpenAITool)
}); });
// Function for dynamically constructing the end of the chain based on the model-selected tool.
const callSelectedTool = RunnableLambda.from(
(toolInvocation: Record<string, any>) => {
const selectedTool = toolMap[toolInvocation.type];
if (!selectedTool) {
throw new Error(
`No matching tool available for requested type "${toolInvocation.type}".`
);
}
const toolCallChain = RunnableSequence.from([
(toolInvocation) => toolInvocation.args,
selectedTool,
]);
// We use `RunnablePassthrough.assign` here to return the intermediate `toolInvocation` params
// as well, but you can omit if you only care about the answer.
return RunnablePassthrough.assign({
output: toolCallChain,
});
},
);
const questionGeneratorTemplate = ChatPromptTemplate.fromMessages([ const questionGeneratorTemplate = ChatPromptTemplate.fromMessages([
AIMessagePromptTemplate.fromTemplate( AIMessagePromptTemplate.fromTemplate(
`Answer the question without calling any tool, but if there is a need to call: `Answer the question without calling any tool, but if there is a need to call:
You have access to the following set of tools. Here are the names and descriptions for each tool: You have access to the following set of tools. Here are the names and descriptions for each tool:
${toolsAsText} ${toolsAsText}
` `
), ),
@ -227,6 +196,26 @@ export class ChatServices {
HumanMessagePromptTemplate.fromTemplate("Question: {question}"), HumanMessagePromptTemplate.fromTemplate("Question: {question}"),
]); ]);
const callToolChain = RunnableSequence.from([
{
func: async (output: object) =>{
const pid = 1;
const name = output['func'][0].function.name;
const args = JSON.parse(output['func'][0].function.arguments);
return await GBVMService.callVM(name, min, false, pid, false, args);
},
chat_history: async () => {
const { chat_history } = await memory.loadMemoryVariables({});
return chat_history;
},
},
new StringOutputParser()
]);
const combineDocumentsChain = RunnableSequence.from([ const combineDocumentsChain = RunnableSequence.from([
{ {
question: (output: string) => output, question: (output: string) => output,
@ -254,7 +243,7 @@ export class ChatServices {
}, },
questionGeneratorTemplate, questionGeneratorTemplate,
modelWithTools, modelWithTools,
new CustomLLMOutputParser() new CustomLLMOutputParser(callToolChain, combineDocumentsChain)
]); ]);
const systemPrompt = user['systemPrompt']; const systemPrompt = user['systemPrompt'];
@ -262,6 +251,13 @@ export class ChatServices {
let result = await conversationalQaChain.invoke({ let result = await conversationalQaChain.invoke({
question, question,
}); });
if (result['name']) {
const func = result['func'];
await func.func(min, result['args']);
} else {
// await memory.saveContext( // await memory.saveContext(
// { // {
// input: query, // input: query,
@ -270,11 +266,12 @@ export class ChatServices {
// output: result, // output: result,
// } // }
// ); // );
GBLog.info(`GPT Result: ${result.toString()}`); GBLog.info(`GPT Result: ${result.toString()}`);
return { answer: result.toString(), questionId: 0 }; return { answer: result.toString(), questionId: 0 };
} }
}
private static getToolsAsText(tools) { private static getToolsAsText(tools) {
return Object.keys(tools) return Object.keys(tools)
@ -293,14 +290,6 @@ export class ChatServices {
if (Fs.existsSync(functionJSON)) { if (Fs.existsSync(functionJSON)) {
const func = JSON.parse(Fs.readFileSync(functionJSON, 'utf8')); const func = JSON.parse(Fs.readFileSync(functionJSON, 'utf8'));
func.schema = jsonSchemaToZod(func.properties, { module: "esm" }); func.schema = jsonSchemaToZod(func.properties, { module: "esm" });
func.func = async () => {
const name = '';
const pid = 1;
const text = ''; // TODO:
const result = await GBVMService.callVM(name, min, false, pid, false, [text]);
}
functions.push(func); functions.push(func);
} }