2023-07-23 10:59:59 -03:00
/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * \
2024-01-09 17:40:48 -03:00
| █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ ® |
| █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ |
| █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ |
| █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ |
| █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ █ |
2023-07-23 10:59:59 -03:00
| |
2024-01-10 15:01:02 -03:00
| General Bots Copyright ( c ) pragmatismo . com . br . All rights reserved . |
2023-07-23 10:59:59 -03:00
| Licensed under the AGPL - 3.0 . |
| |
| According to our dual licensing model , this program can be used either |
| under the terms of the GNU Affero General Public License , version 3 , |
| or under a proprietary license . |
| |
| The texts of the GNU Affero General Public License with an additional |
| permission and of our proprietary license can be found at and |
| in the LICENSE file you have received along with this program . |
| |
| This program is distributed in the hope that it will be useful , |
| but WITHOUT ANY WARRANTY , without even the implied warranty of |
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the |
| GNU Affero General Public License for more details . |
| |
2024-01-10 14:52:01 -03:00
| "General Bots" is a registered trademark of pragmatismo . com . br . |
2023-07-23 10:59:59 -03:00
| The licensing of the program under the AGPLv3 does not imply a |
| trademark license . Therefore any rights , title and interest in |
| our trademarks remain entirely with us . |
| |
\ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * /
'use strict' ;
2024-03-11 13:30:11 -03:00
import { HNSWLib } from '@langchain/community/vectorstores/hnswlib' ;
import { StringOutputParser } from "@langchain/core/output_parsers" ;
import { AIMessagePromptTemplate , ChatPromptTemplate , HumanMessagePromptTemplate , MessagesPlaceholder } from '@langchain/core/prompts' ;
2024-03-10 00:16:24 -03:00
import { RunnableSequence } from "@langchain/core/runnables" ;
2024-03-11 13:30:11 -03:00
import { convertToOpenAITool } from "@langchain/core/utils/function_calling" ;
2024-03-10 00:16:24 -03:00
import { ChatOpenAI } from "@langchain/openai" ;
2024-03-11 13:30:11 -03:00
import { GBLog , GBMinInstance } from 'botlib' ;
2024-03-10 00:16:24 -03:00
import * as Fs from 'fs' ;
import { jsonSchemaToZod } from "json-schema-to-zod" ;
2024-02-05 12:36:20 -03:00
import { BufferWindowMemory } from 'langchain/memory' ;
2024-03-10 00:16:24 -03:00
import Path from 'path' ;
2023-09-17 21:40:34 -03:00
import { CollectionUtil } from 'pragmatismo-io-framework' ;
2023-09-21 11:24:08 -03:00
import { DialogKeywords } from '../../basic.gblib/services/DialogKeywords.js' ;
2024-03-04 20:05:56 -03:00
import { GBVMService } from '../../basic.gblib/services/GBVMService.js' ;
2024-03-10 00:16:24 -03:00
import { GBConfigService } from '../../core.gbapp/services/GBConfigService.js' ;
import { GuaribasSubject } from '../../kb.gbapp/models/index.js' ;
2024-03-16 21:36:03 -03:00
import { Serialized } from "@langchain/core/load/serializable" ;
import { BaseCallbackHandler } from "@langchain/core/callbacks/base" ;
2024-03-20 00:42:44 -03:00
import { pdfToPng , PngPageOutput } from 'pdf-to-png-converter' ;
2024-03-11 13:30:11 -03:00
import { DynamicStructuredTool } from "@langchain/core/tools" ;
2024-03-21 17:35:09 -03:00
import { WikipediaQueryRun } from "@langchain/community/tools/wikipedia_query_run" ;
2024-03-11 13:30:11 -03:00
import {
BaseLLMOutputParser ,
OutputParserException ,
} from "@langchain/core/output_parsers" ;
import { ChatGeneration , Generation } from "@langchain/core/outputs" ;
2024-03-20 00:42:44 -03:00
import { GBAdminService } from '../../admin.gbapp/services/GBAdminService.js' ;
import { GBServer } from '../../../src/app.js' ;
import urlJoin from 'url-join' ;
import { getDocument } from "pdfjs-dist/legacy/build/pdf.mjs" ;
2024-03-11 13:30:11 -03:00
export interface CustomOutputParserFields { }
2024-03-13 09:04:30 -03:00
export type ExpectedOutput = string ;
2024-03-11 13:30:11 -03:00
function isChatGeneration (
llmOutput : ChatGeneration | Generation
) : llmOutput is ChatGeneration {
return "message" in llmOutput ;
}
2024-03-16 21:36:03 -03:00
class CustomHandler extends BaseCallbackHandler {
name = "custom_handler" ;
handleLLMNewToken ( token : string ) {
GBLog . info ( ` LLM: token: ${ JSON . stringify ( token ) } ` ) ;
}
handleLLMStart ( llm : Serialized , _prompts : string [ ] ) {
GBLog . info ( ` LLM: handleLLMStart ${ JSON . stringify ( llm ) } , Prompts: ${ _prompts . join ( '\n' ) } ` ) ;
}
handleChainStart ( chain : Serialized ) {
GBLog . info ( ` LLM: handleChainStart: ${ JSON . stringify ( chain ) } ` ) ;
}
handleToolStart ( tool : Serialized ) {
GBLog . info ( ` LLM: handleToolStart: ${ JSON . stringify ( tool ) } ` ) ;
}
}
const logHandler = new CustomHandler ( ) ;
2024-03-21 23:41:33 -03:00
export class GBLLMOutputParser extends BaseLLMOutputParser < ExpectedOutput > {
2024-03-11 13:30:11 -03:00
lc_namespace = [ "langchain" , "output_parsers" ] ;
2024-03-10 00:16:24 -03:00
2024-03-13 20:12:05 -03:00
private toolChain : RunnableSequence
private documentChain : RunnableSequence ;
2024-03-21 23:41:33 -03:00
private min ;
2024-03-13 20:12:05 -03:00
2024-03-21 23:41:33 -03:00
constructor ( min , toolChain : RunnableSequence , documentChain : RunnableSequence ) {
2024-03-13 20:12:05 -03:00
super ( ) ;
2024-03-21 23:41:33 -03:00
this . min = min ;
2024-03-13 20:12:05 -03:00
this . toolChain = toolChain ;
2024-03-21 23:41:33 -03:00
2024-03-11 13:30:11 -03:00
}
async parseResult (
llmOutputs : ChatGeneration [ ] | Generation [ ]
) : Promise < ExpectedOutput > {
if ( ! llmOutputs . length ) {
throw new OutputParserException (
"Output parser did not receive any generations."
) ;
}
2024-03-15 07:14:21 -03:00
let result ;
2024-03-04 20:05:56 -03:00
2024-03-13 20:12:05 -03:00
if ( llmOutputs [ 0 ] [ 'message' ] . lc_kwargs . additional_kwargs . tool_calls ) {
2024-03-15 07:14:21 -03:00
return this . toolChain . invoke ( { func : llmOutputs [ 0 ] [ 'message' ] . lc_kwargs . additional_kwargs . tool_calls } ) ;
2024-03-13 20:12:05 -03:00
}
2023-07-23 10:59:59 -03:00
2024-03-11 13:30:11 -03:00
if ( isChatGeneration ( llmOutputs [ 0 ] ) ) {
2024-03-15 07:14:21 -03:00
result = llmOutputs [ 0 ] . message . content ;
2024-03-11 13:30:11 -03:00
} else {
2024-03-15 07:14:21 -03:00
result = llmOutputs [ 0 ] . text ;
2024-03-11 13:30:11 -03:00
}
2024-03-13 20:12:05 -03:00
2024-03-21 23:41:33 -03:00
const naiveJSONFromText = ( text ) = > {
const match = text . match ( /\{[\s\S]*\}/ ) ;
if ( ! match ) return null ;
try {
2024-03-22 18:14:03 -03:00
return { metadata : JSON.parse ( match [ 0 ] ) ,
text : text.replace ( match , '' ) } ;
2024-03-21 23:41:33 -03:00
} catch {
return null ;
}
} ;
if ( result ) {
2024-03-22 18:14:03 -03:00
const res = naiveJSONFromText ( result ) ;
2024-03-21 23:41:33 -03:00
2024-03-22 18:14:03 -03:00
if ( res ) {
const { metadata , text } = res ;
2024-03-21 23:41:33 -03:00
const { url } = await ChatServices . pdfPageAsImage ( this . min , metadata . filename ,
2024-03-22 18:14:03 -03:00
text ) ;
2024-03-21 23:41:33 -03:00
result = ` 
$ { result } ` ;
}
}
2024-03-13 20:12:05 -03:00
2024-03-21 23:41:33 -03:00
return result ;
2024-03-11 13:30:11 -03:00
}
}
2023-07-23 15:39:05 -03:00
export class ChatServices {
2024-03-21 23:41:33 -03:00
public static async pdfPageAsImage ( min , filename , pageNumber ) {
const gbaiName = DialogKeywords . getGBAIPath ( min . botId , 'gbkb' ) ;
const localName = Path . join ( 'work' , gbaiName , 'docs' , filename ) ;
2024-03-20 00:42:44 -03:00
// Converts the PDF to PNG.
2024-03-21 23:41:33 -03:00
const pngPages : PngPageOutput [ ] = await pdfToPng ( localName , {
2024-03-22 18:14:03 -03:00
disableFontFace : true ,
useSystemFonts : true ,
2024-03-20 00:42:44 -03:00
viewportScale : 2.0 ,
2024-03-21 23:41:33 -03:00
pagesToProcess : [ pageNumber ] ,
2024-03-20 00:42:44 -03:00
strictPagesToProcess : false ,
verbosityLevel : 0
} ) ;
// Prepare an image on cache and return the GBFILE information.
if ( pngPages . length > 0 ) {
2024-03-21 23:41:33 -03:00
const buffer = pngPages [ 0 ] . content ;
const gbaiName = DialogKeywords . getGBAIPath ( min . botId , null ) ;
const localName = Path . join ( 'work' , gbaiName , 'cache' , ` img ${ GBAdminService . getRndReadableIdentifier ( ) } .png ` ) ;
2024-03-20 00:42:44 -03:00
const url = urlJoin ( GBServer . globals . publicAddress , min . botId , 'cache' , Path . basename ( localName ) ) ;
Fs . writeFileSync ( localName , buffer , { encoding : null } ) ;
return { localName : localName , url : url , data : buffer } ;
}
}
2024-02-05 12:36:20 -03:00
private static async getRelevantContext (
vectorStore : HNSWLib ,
sanitizedQuestion : string ,
2024-03-16 21:36:03 -03:00
numDocuments : number = 10
2024-02-05 12:36:20 -03:00
) : Promise < string > {
2024-03-20 00:42:44 -03:00
2024-03-11 13:30:11 -03:00
if ( sanitizedQuestion === '' ) {
return '' ;
}
2024-02-05 12:36:20 -03:00
const documents = await vectorStore . similaritySearch ( sanitizedQuestion , numDocuments ) ;
2024-03-20 00:42:44 -03:00
let output = '' ;
await CollectionUtil . asyncForEach ( documents , async ( doc ) = > {
const metadata = doc . metadata ;
const filename = Path . basename ( metadata . source ) ;
2024-03-22 18:14:03 -03:00
const page = await ChatServices . findPageForText ( doc . metadata . source ,
doc . pageContent ) ;
2024-03-20 00:42:44 -03:00
output = ` ${ output } \ n \ n \ n \ nThe following context is coming from ${ filename } at page: ${ page } ,
memorize this block among document information and return when you are refering this part of content : \ n \ n \ n \ n $ { doc . pageContent } \ n \ n \ n \ n . ` ;
} ) ;
return output ;
2024-02-05 12:36:20 -03:00
}
2024-03-22 18:14:03 -03:00
private static async findPageForText ( pdfPath , searchText ) {
2024-03-20 00:42:44 -03:00
const data = new Uint8Array ( Fs . readFileSync ( pdfPath ) ) ;
const pdf = await getDocument ( { data } ) . promise ;
2024-03-22 18:14:03 -03:00
searchText = searchText . replace ( /\s/g , '' )
2024-03-20 00:42:44 -03:00
2024-03-22 18:14:03 -03:00
for ( let i = 1 ; i <= pdf . numPages ; i ++ ) {
const page = await pdf . getPage ( i ) ;
const textContent = await page . getTextContent ( ) ;
const text = textContent . items . map ( item = > item [ 'str' ] ) . join ( '' ) . replace ( /\s/g , '' ) ;
2024-03-20 00:42:44 -03:00
2024-03-22 18:14:03 -03:00
if ( text . includes ( searchText ) ) return i ;
2024-03-20 00:42:44 -03:00
}
2024-03-22 18:14:03 -03:00
return - 1 ; // Texto não encontrado
}
/ * *
2023-07-23 10:59:59 -03:00
* Generate text
2023-08-02 13:58:11 -03:00
*
2023-07-23 10:59:59 -03:00
* CONTINUE keword .
2023-08-02 13:58:11 -03:00
*
2023-07-23 10:59:59 -03:00
* result = CONTINUE text
2023-08-02 13:58:11 -03:00
*
2023-07-23 10:59:59 -03:00
* /
2024-03-11 13:30:11 -03:00
public static async continue ( min : GBMinInstance , question : string , chatId ) {
2023-07-23 10:59:59 -03:00
}
2024-02-05 12:36:20 -03:00
2024-03-15 07:14:21 -03:00
private static memoryMap = { } ;
public static userSystemPrompt = { } ;
2024-03-04 20:05:56 -03:00
2024-03-11 13:30:11 -03:00
public static async answerByGPT ( min : GBMinInstance , user , pid ,
question : string ,
2024-02-05 12:36:20 -03:00
searchScore : number ,
subjects : GuaribasSubject [ ]
) {
2024-03-06 14:38:37 -03:00
if ( ! process . env . OPENAI_API_KEY ) {
2024-02-05 12:36:20 -03:00
return { answer : undefined , questionId : 0 } ;
}
2024-03-16 21:36:03 -03:00
const LLMMode = min . core . getParam (
min . instance ,
'Answer Mode' , 'direct'
) ;
2024-03-21 23:41:33 -03:00
2024-03-15 07:14:21 -03:00
const docsContext = min [ 'vectorStore' ] ;
2024-03-03 16:20:50 -03:00
2024-03-15 07:14:21 -03:00
if ( ! this . memoryMap [ user . userSystemId ] ) {
this . memoryMap [ user . userSystemId ] = new BufferWindowMemory ( {
returnMessages : true ,
memoryKey : 'chat_history' ,
inputKey : 'input' ,
k : 2 ,
} )
}
const memory = this . memoryMap [ user . userSystemId ] ;
const systemPrompt = this . userSystemPrompt [ user . userSystemId ] ;
2024-03-04 20:05:56 -03:00
2024-03-10 00:16:24 -03:00
const model = new ChatOpenAI ( {
openAIApiKey : process.env.OPENAI_API_KEY ,
modelName : "gpt-3.5-turbo-0125" ,
temperature : 0 ,
2024-03-16 21:36:03 -03:00
callbacks : [ logHandler ] ,
2024-03-10 00:16:24 -03:00
} ) ;
2024-03-04 20:05:56 -03:00
2024-02-05 12:36:20 -03:00
2024-03-13 20:12:05 -03:00
let tools = await ChatServices . getTools ( min ) ;
let toolsAsText = ChatServices . getToolsAsText ( tools ) ;
2024-03-15 07:14:21 -03:00
2024-03-10 00:16:24 -03:00
const modelWithTools = model . bind ( {
2024-03-13 09:04:30 -03:00
tools : tools.map ( convertToOpenAITool )
2024-03-10 00:16:24 -03:00
} ) ;
2024-02-05 12:36:20 -03:00
2024-03-10 00:16:24 -03:00
const questionGeneratorTemplate = ChatPromptTemplate . fromMessages ( [
AIMessagePromptTemplate . fromTemplate (
2024-03-15 07:14:21 -03:00
`
Answer the question without calling any tool , but if there is a need to call :
2024-03-21 17:35:09 -03:00
You have access to the following set of tools .
Here are the names and descriptions for each tool :
2024-03-21 23:41:33 -03:00
2024-03-13 09:04:30 -03:00
$ { toolsAsText }
2024-03-15 07:14:21 -03:00
Do not use any previous tools output in the chat_history .
2024-03-13 09:04:30 -03:00
`
2024-03-10 00:16:24 -03:00
) ,
new MessagesPlaceholder ( "chat_history" ) ,
2024-03-11 13:30:11 -03:00
AIMessagePromptTemplate . fromTemplate ( ` Follow Up Input: {question}
Standalone question : ` ),
2024-03-10 00:16:24 -03:00
] ) ;
2024-03-15 07:14:21 -03:00
const toolsResultPrompt = ChatPromptTemplate . fromMessages ( [
AIMessagePromptTemplate . fromTemplate (
` The tool just returned value in last call. Using {chat_history}
rephrase the answer to the user using this tool output .
`
) ,
new MessagesPlaceholder ( "chat_history" ) ,
AIMessagePromptTemplate . fromTemplate ( ` Tool output: {tool_output}
Standalone question : ` ),
] ) ;
2024-03-10 00:16:24 -03:00
const combineDocumentsPrompt = ChatPromptTemplate . fromMessages ( [
AIMessagePromptTemplate . fromTemplate (
2024-03-15 07:14:21 -03:00
`
2024-03-22 18:14:03 -03:00
This is a segmented context .
VERY IMPORTANT : When responding , include the following information at the end of your message as JSON : 'file' indicating the PDF filename and 'page' indicating the page number . Example JSON format : "file" : "filename.pdf" , "page" : 3 , return valid JSON with brackets . Avoid explaining the context directly to the user ; instead , refer to the document source .
2024-03-21 23:41:33 -03:00
2024-03-16 21:36:03 -03:00
\ n \ n { context } \ n \ n
2024-03-22 18:14:03 -03:00
And based on \ n \ n { chat_history } \ n \ n
rephrase the response to the user using the aforementioned context . If you 're unsure of the answer, simply state that you don' t know . Do not invent an answer . Utilize any relevant context provided to answer the question effectively .
2024-03-13 09:04:30 -03:00
`
2024-03-10 00:16:24 -03:00
) ,
new MessagesPlaceholder ( "chat_history" ) ,
HumanMessagePromptTemplate . fromTemplate ( "Question: {question}" ) ,
] ) ;
2024-03-13 20:12:05 -03:00
const callToolChain = RunnableSequence . from ( [
{
2024-03-15 07:14:21 -03:00
tool_output : async ( output : object ) = > {
2024-03-13 20:12:05 -03:00
const name = output [ 'func' ] [ 0 ] . function . name ;
const args = JSON . parse ( output [ 'func' ] [ 0 ] . function . arguments ) ;
2024-03-13 20:26:13 -03:00
GBLog . info ( ` Running .gbdialog ' ${ name } ' as GPT tool... ` ) ;
const pid = GBVMService . createProcessInfo ( null , min , 'gpt' , null ) ;
return await GBVMService . callVM ( name , min , false , pid , false , args ) ;
2024-03-13 20:12:05 -03:00
} ,
chat_history : async ( ) = > {
const { chat_history } = await memory . loadMemoryVariables ( { } ) ;
return chat_history ;
} ,
2024-03-15 07:14:21 -03:00
2024-03-13 20:12:05 -03:00
} ,
2024-03-15 07:14:21 -03:00
toolsResultPrompt ,
model ,
2024-03-13 20:12:05 -03:00
new StringOutputParser ( )
] ) ;
2024-03-10 00:16:24 -03:00
const combineDocumentsChain = RunnableSequence . from ( [
{
2024-03-16 21:36:03 -03:00
question : ( question : string ) = > question ,
2024-03-10 00:16:24 -03:00
chat_history : async ( ) = > {
const { chat_history } = await memory . loadMemoryVariables ( { } ) ;
return chat_history ;
} ,
context : async ( output : string ) = > {
2024-03-16 21:36:03 -03:00
const c = await ChatServices . getRelevantContext ( docsContext , output ) ;
2024-03-20 00:42:44 -03:00
return ` ${ systemPrompt } \ n ${ c ? 'Use this context to answer:\n' + c : 'answer just with user question.' } ` ;
2024-03-13 20:12:05 -03:00
2024-03-10 00:16:24 -03:00
} ,
} ,
combineDocumentsPrompt ,
2024-03-13 20:26:13 -03:00
model ,
2024-03-21 23:41:33 -03:00
new GBLLMOutputParser ( min , null , null )
2024-03-10 00:16:24 -03:00
] ) ;
2024-03-13 20:12:05 -03:00
2024-03-10 00:16:24 -03:00
const conversationalQaChain = RunnableSequence . from ( [
{
question : ( i : { question : string } ) = > i . question ,
chat_history : async ( ) = > {
const { chat_history } = await memory . loadMemoryVariables ( { } ) ;
return chat_history ;
} ,
} ,
questionGeneratorTemplate ,
modelWithTools ,
2024-03-21 23:41:33 -03:00
new GBLLMOutputParser ( min , callToolChain , docsContext ? . docstore ? . _docs . length > 0 ? combineDocumentsChain : null ) ,
2024-03-15 07:14:21 -03:00
new StringOutputParser ( )
2024-03-10 00:16:24 -03:00
] ) ;
2024-03-11 13:30:11 -03:00
2024-03-16 21:36:03 -03:00
const conversationalToolChain = RunnableSequence . from ( [
2024-03-15 07:14:21 -03:00
{
2024-03-16 21:36:03 -03:00
question : ( i : { question : string } ) = > i . question ,
2024-03-15 07:14:21 -03:00
chat_history : async ( ) = > {
const { chat_history } = await memory . loadMemoryVariables ( { } ) ;
return chat_history ;
} ,
} ,
2024-03-16 21:36:03 -03:00
questionGeneratorTemplate ,
2024-03-15 07:14:21 -03:00
modelWithTools ,
2024-03-21 23:41:33 -03:00
new GBLLMOutputParser ( min , callToolChain , docsContext ? . docstore ? . _docs . length > 0 ? combineDocumentsChain : null ) ,
2024-03-15 07:14:21 -03:00
new StringOutputParser ( )
] ) ;
2024-03-03 16:20:50 -03:00
2024-03-15 07:14:21 -03:00
let result ;
2024-03-13 20:12:05 -03:00
2024-03-21 23:41:33 -03:00
// Choose the operation mode of answer generation, based on
// .gbot switch LLMMode and choose the corresponding chain.
2024-03-16 21:36:03 -03:00
if ( LLMMode === "direct" ) {
2024-03-15 07:14:21 -03:00
result = await ( tools . length > 0 ? modelWithTools : model ) . invoke ( `
$ { systemPrompt }
$ { question } ` );
2024-03-13 20:12:05 -03:00
2024-03-15 07:14:21 -03:00
result = result . content ;
}
2024-03-16 21:36:03 -03:00
else if ( LLMMode === "document" ) {
2024-03-21 23:41:33 -03:00
2024-03-16 21:36:03 -03:00
result = await combineDocumentsChain . invoke ( question ) ;
} else if ( LLMMode === "function" ) {
2024-03-03 16:20:50 -03:00
2024-03-16 21:36:03 -03:00
result = await conversationalToolChain . invoke ( {
2024-03-15 07:14:21 -03:00
question ,
} ) ;
2024-03-13 20:12:05 -03:00
}
2024-03-21 23:41:33 -03:00
else if ( LLMMode === "full" ) {
throw new Error ( 'Not implemented.' ) ; // TODO: #407.
}
2024-03-16 21:36:03 -03:00
else {
GBLog . info ( ` Invalid Answer Mode in Config.xlsx: ${ LLMMode } . ` ) ;
}
2024-03-15 07:14:21 -03:00
await memory . saveContext (
{
input : question ,
} ,
{
output : result ,
}
) ;
GBLog . info ( ` GPT Result: ${ result . toString ( ) } ` ) ;
return { answer : result.toString ( ) , questionId : 0 } ;
2024-03-13 20:12:05 -03:00
}
2024-03-03 16:20:50 -03:00
2024-03-10 00:16:24 -03:00
private static getToolsAsText ( tools ) {
return Object . keys ( tools )
2024-03-13 09:04:30 -03:00
. map ( ( toolname ) = > ` - ${ tools [ toolname ] . name } : ${ tools [ toolname ] . description } ` )
2024-03-10 00:16:24 -03:00
. join ( "\n" ) ;
}
2024-03-03 16:20:50 -03:00
2024-03-10 00:16:24 -03:00
private static async getTools ( min : GBMinInstance ) {
let functions = [ ] ;
2024-02-05 12:36:20 -03:00
2024-03-10 00:16:24 -03:00
// Adds .gbdialog as functions if any to GPT Functions.
await CollectionUtil . asyncForEach ( Object . keys ( min . scriptMap ) , async ( script ) = > {
2024-03-15 07:14:21 -03:00
2024-03-10 00:16:24 -03:00
const path = DialogKeywords . getGBAIPath ( min . botId , "gbdialog" , null ) ;
2024-03-13 20:26:13 -03:00
const jsonFile = Path . join ( 'work' , path , ` ${ script } .json ` ) ;
2024-03-15 07:14:21 -03:00
if ( Fs . existsSync ( jsonFile ) && script . toLowerCase ( ) !== 'start.vbs' ) {
2024-03-13 20:26:13 -03:00
const funcJSON = JSON . parse ( Fs . readFileSync ( jsonFile , 'utf8' ) ) ;
const funcObj = funcJSON ? . function ;
2024-03-15 07:14:21 -03:00
if ( funcObj ) {
2024-03-13 20:26:13 -03:00
// TODO: Use ajv.
funcObj . schema = eval ( jsonSchemaToZod ( funcObj . parameters ) ) ;
functions . push ( new DynamicStructuredTool ( funcObj ) ) ;
}
2024-03-15 07:14:21 -03:00
2024-03-04 20:05:56 -03:00
}
2024-02-05 12:36:20 -03:00
2024-03-10 00:16:24 -03:00
} ) ;
2024-03-11 13:30:11 -03:00
2024-03-21 17:35:09 -03:00
const tool = new WikipediaQueryRun ( {
topKResults : 3 ,
maxDocContentLength : 4000 ,
} ) ;
functions . push ( tool ) ;
2024-03-10 00:16:24 -03:00
return functions ;
}
2023-08-02 13:58:11 -03:00
}