new(basic.gblib): GPT replacing ALLEN NLP Reading Comp.
This commit is contained in:
parent
ff6adacf9b
commit
3ddeabdf19
4 changed files with 43 additions and 3 deletions
|
@ -119,7 +119,7 @@
|
|||
"express-remove-route": "1.0.0",
|
||||
"ffmpeg-static": "5.1.0",
|
||||
"google-libphonenumber": "3.2.31",
|
||||
"googleapis": "109.0.1",
|
||||
"googleapis": "126.0.1",
|
||||
"ibm-watson": "7.1.2",
|
||||
"join-images-updated": "1.1.4",
|
||||
"js-md5": "0.8.3",
|
||||
|
@ -128,6 +128,7 @@
|
|||
"koa": "2.13.4",
|
||||
"koa-body": "6.0.1",
|
||||
"koa-router": "12.0.0",
|
||||
"langchain": "^0.0.163",
|
||||
"line-replace": "2.0.1",
|
||||
"lodash": "4.17.21",
|
||||
"luxon": "3.1.0",
|
||||
|
|
|
@ -56,6 +56,8 @@ import MicrosoftGraph from '@microsoft/microsoft-graph-client';
|
|||
import { GBLogEx } from './GBLogEx.js';
|
||||
import { DialogKeywords } from '../../basic.gblib/services/DialogKeywords.js';
|
||||
import { GBUtil } from '../../../src/util.js';
|
||||
import { HNSWLib } from 'langchain/vectorstores/hnswlib.js';
|
||||
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
|
||||
|
||||
/**
|
||||
* Deployer service for bots, themes, ai and more.
|
||||
|
|
|
@ -230,7 +230,7 @@ export class AskDialog extends IGBDialog {
|
|||
|
||||
// TODO: https://github.com/GeneralBots/BotServer/issues/9 user.lastQuestion = text;
|
||||
|
||||
const resultsA = await service.ask(min.instance, text, searchScore, null /* user.subjects */);
|
||||
const resultsA = await service.ask(min, text, searchScore, null /* user.subjects */);
|
||||
|
||||
// If there is some result, answer immediately.
|
||||
|
||||
|
|
|
@ -34,12 +34,16 @@
|
|||
|
||||
import Path from 'path';
|
||||
import Fs from 'fs';
|
||||
import { OpenAIChat } from 'langchain/llms/openai';
|
||||
import { CallbackManager } from 'langchain/callbacks';
|
||||
import urlJoin from 'url-join';
|
||||
import asyncPromise from 'async-promises';
|
||||
import walkPromise from 'walk-promise';
|
||||
import { SearchClient } from '@azure/search-documents';
|
||||
import Excel from 'exceljs';
|
||||
import getSlug from 'speakingurl';
|
||||
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
|
||||
import { LLMChain } from 'langchain/chains';
|
||||
import { GBServer } from '../../../src/app.js';
|
||||
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
|
||||
import { JSONLoader } from 'langchain/document_loaders/fs/json';
|
||||
|
@ -49,9 +53,9 @@ import { DocxLoader } from 'langchain/document_loaders/fs/docx';
|
|||
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
|
||||
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
|
||||
import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
|
||||
import { BufferWindowMemory } from 'langchain/memory';
|
||||
import { Document } from 'langchain/document';
|
||||
import path from 'path';
|
||||
import { YoutubeTranscript } from 'youtube-transcript';
|
||||
|
||||
import {
|
||||
GBDialogStep,
|
||||
|
@ -395,6 +399,39 @@ export class KBService implements IGBKBService {
|
|||
const question = query.trim().replaceAll('\n', ' ');
|
||||
const context = await this.getRelevantContext(contextVectorStore, question, 1);
|
||||
|
||||
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(
|
||||
"You are General Bots");
|
||||
|
||||
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
|
||||
systemPrompt,
|
||||
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
|
||||
]);
|
||||
const windowMemory = new BufferWindowMemory({
|
||||
returnMessages: false,
|
||||
memoryKey: 'immediate_history',
|
||||
inputKey: 'input',
|
||||
k: 2,
|
||||
});
|
||||
|
||||
const callbackManager = CallbackManager.fromHandlers({
|
||||
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
|
||||
async handleLLMNewToken(token: string) {
|
||||
|
||||
},
|
||||
});
|
||||
|
||||
const llm = new OpenAIChat({
|
||||
streaming: true,
|
||||
callbackManager,
|
||||
modelName: 'gpt-3.5-turbo',
|
||||
});
|
||||
|
||||
const chain = new LLMChain({
|
||||
prompt: chatPrompt,
|
||||
memory: windowMemory,
|
||||
llm,
|
||||
});
|
||||
|
||||
const response = await chain.call({
|
||||
input: question,
|
||||
context,
|
||||
|
|
Loading…
Add table
Reference in a new issue