- LocalLLM Embedded.
Some checks are pending
GBCI / build (push) Waiting to run

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2025-09-10 11:11:32 -03:00
parent b5bf44fd70
commit d7d6afa5cc
4 changed files with 195 additions and 121 deletions

View file

@ -270,21 +270,18 @@ export class GBDeployer implements IGBDeployer {
* Verifies if bot exists on bot catalog. * Verifies if bot exists on bot catalog.
*/ */
public async botExists(botId: string): Promise<boolean> { public async botExists(botId: string): Promise<boolean> {
if (GBConfigService.get('GB_MODE') !== 'legacy') { if (GBConfigService.get('GB_MODE') !== 'legacy') {
const where = { botId: botId }; const where = { botId: botId };
return await GuaribasInstance.findOne({ return (
where: where (await GuaribasInstance.findOne({
}) !== null; where: where
})) !== null
} );
else { } else {
const service = await AzureDeployerService.createInstance(this); const service = await AzureDeployerService.createInstance(this);
return await service.botExists(botId); return await service.botExists(botId);
} }
} }
@ -339,33 +336,68 @@ export class GBDeployer implements IGBDeployer {
public async loadOrCreateEmptyVectorStore(min: GBMinInstance): Promise<HNSWLib> { public async loadOrCreateEmptyVectorStore(min: GBMinInstance): Promise<HNSWLib> {
let vectorStore: HNSWLib; let vectorStore: HNSWLib;
const azureOpenAIKey = await (min.core as any)['getParam'](min.instance, 'Azure Open AI Key', null, true); // Get AI mode (default to 'azure' for backward compatibility)
const azureOpenAIVersion = await (min.core as any)['getParam'](min.instance, 'Azure Open AI Version', null, true); const aiMode = (await (min.core as any)['getParam'](min.instance, 'AI Mode', 'azure', true)) || 'azure';
const azureOpenAIApiInstanceName = await (min.core as any)['getParam'](
min.instance,
'Azure Open AI Instance',
null,
true
);
const azureOpenAIEmbeddingModel = await (min.core as any)['getParam'](
min.instance,
'Azure Open AI Embedding Model',
null,
true
);
let embedding; let embedding;
if (!azureOpenAIEmbeddingModel) {
return;
}
embedding = new OpenAIEmbeddings({ if (aiMode === 'local') {
maxConcurrency: 5, // Local embedding configuration
azureOpenAIApiKey: azureOpenAIKey, const localEmbeddingEndpoint = await (min.core as any)['getParam'](
azureOpenAIApiDeploymentName: azureOpenAIEmbeddingModel, min.instance,
azureOpenAIApiVersion: azureOpenAIVersion, 'Local Embedding Endpoint',
azureOpenAIApiInstanceName: azureOpenAIApiInstanceName 'http://localhost:5858/v1',
}); true
);
const localEmbeddingModel = await (min.core as any)['getParam'](
min.instance,
'Local Embedding Model',
'model',
true
);
if (!localEmbeddingEndpoint || !localEmbeddingModel) {
GBLogEx.error(min, 'Local embedding configuration incomplete. Please set Local Embedding Endpoint and Model.');
return;
}
embedding = new OpenAIEmbeddings({
maxConcurrency: 5,
openAIApiKey: 'null', // Required field but not used for local
modelName: localEmbeddingModel,
configuration: {
baseURL: localEmbeddingEndpoint
}
} as any);
} else {
// Azure OpenAI configuration (original code)
const azureOpenAIKey = await (min.core as any)['getParam'](min.instance, 'Azure Open AI Key', null, true);
const azureOpenAIVersion = await (min.core as any)['getParam'](min.instance, 'Azure Open AI Version', null, true);
const azureOpenAIApiInstanceName = await (min.core as any)['getParam'](
min.instance,
'Azure Open AI Instance',
null,
true
);
const azureOpenAIEmbeddingModel = await (min.core as any)['getParam'](
min.instance,
'Azure Open AI Embedding Model',
null,
true
);
if (!azureOpenAIEmbeddingModel) {
return;
}
embedding = new OpenAIEmbeddings({
maxConcurrency: 5,
azureOpenAIApiKey: azureOpenAIKey,
azureOpenAIApiDeploymentName: azureOpenAIEmbeddingModel,
azureOpenAIApiVersion: azureOpenAIVersion,
azureOpenAIApiInstanceName: azureOpenAIApiInstanceName
});
}
try { try {
vectorStore = await HNSWLib.load(min['vectorStorePath'], embedding); vectorStore = await HNSWLib.load(min['vectorStorePath'], embedding);
@ -376,7 +408,7 @@ export class GBDeployer implements IGBDeployer {
{}, // Optional metadata {}, // Optional metadata
embedding, embedding,
{ {
'space': 'cosine', space: 'cosine'
} as any } as any
); );
const dir = path.dirname(min['vectorStorePath']); const dir = path.dirname(min['vectorStorePath']);
@ -494,15 +526,16 @@ export class GBDeployer implements IGBDeployer {
} else { } else {
return []; return [];
} }
1 1;
await asyncPromise.eachSeries(rows, async (line: any) => { await asyncPromise.eachSeries(rows, async (line: any) => {
if (line && line.length > 0) { if (line && line.length > 0) {
const key = line[1]; const key = line[1];
let value = line[2]; let value = line[2];
if (key && value) { if (key && value) {
if (value.text) { value = value.text }; if (value.text) {
value = value.text;
}
obj[key] = value; obj[key] = value;
} }
} }
@ -521,7 +554,8 @@ export class GBDeployer implements IGBDeployer {
localPath: string, localPath: string,
remotePath: string, remotePath: string,
baseUrl: string = null, baseUrl: string = null,
client = null, onlyTextFiles = false client = null,
onlyTextFiles = false
): Promise<any> { ): Promise<any> {
const storageMode = process.env.GB_MODE; const storageMode = process.env.GB_MODE;
@ -531,7 +565,7 @@ export class GBDeployer implements IGBDeployer {
port: parseInt(process.env.DRIVE_PORT || '9000', 10), port: parseInt(process.env.DRIVE_PORT || '9000', 10),
useSSL: process.env.DRIVE_USE_SSL === 'true', useSSL: process.env.DRIVE_USE_SSL === 'true',
accessKey: process.env.DRIVE_ACCESSKEY, accessKey: process.env.DRIVE_ACCESSKEY,
secretKey: process.env.DRIVE_SECRET, secretKey: process.env.DRIVE_SECRET
}); });
const bucketName = (process.env.DRIVE_ORG_PREFIX + min.botId + '.gbai').toLowerCase(); const bucketName = (process.env.DRIVE_ORG_PREFIX + min.botId + '.gbai').toLowerCase();
@ -566,7 +600,6 @@ export class GBDeployer implements IGBDeployer {
} }
} }
if (download) { if (download) {
await minioClient.fGetObject(bucketName, obj.name, itemPath); await minioClient.fGetObject(bucketName, obj.name, itemPath);
await fs.utimes(itemPath, new Date(), new Date(obj.lastModified)); await fs.utimes(itemPath, new Date(), new Date(obj.lastModified));
@ -585,7 +618,7 @@ export class GBDeployer implements IGBDeployer {
await fs.mkdir(pathBase, { recursive: true }); await fs.mkdir(pathBase, { recursive: true });
} }
await CollectionUtil.asyncForEach(parts, async (item) => { await CollectionUtil.asyncForEach(parts, async item => {
pathBase = path.join(pathBase, item); pathBase = path.join(pathBase, item);
if (!(await GBUtil.exists(pathBase))) { if (!(await GBUtil.exists(pathBase))) {
await fs.mkdir(pathBase, { recursive: true }); await fs.mkdir(pathBase, { recursive: true });
@ -609,7 +642,7 @@ export class GBDeployer implements IGBDeployer {
return null; return null;
} }
await CollectionUtil.asyncForEach(documents, async (item) => { await CollectionUtil.asyncForEach(documents, async item => {
const itemPath = path.join(localPath, remotePath, item.name); const itemPath = path.join(localPath, remotePath, item.name);
if (item.folder) { if (item.folder) {
@ -639,7 +672,6 @@ export class GBDeployer implements IGBDeployer {
}); });
} }
} }
} }
/** /**
@ -699,11 +731,9 @@ export class GBDeployer implements IGBDeployer {
await GBUtil.copyIfNewerRecursive(filePath, packageWorkFolder, false); await GBUtil.copyIfNewerRecursive(filePath, packageWorkFolder, false);
} }
} else { } else {
if (packageType === '.gbdrive' || packageType === '.gbdata') { if (packageType === '.gbdrive' || packageType === '.gbdata') {
await this.downloadFolder(min, path.join('work', `${gbai}`), packageName, undefined, undefined, true); await this.downloadFolder(min, path.join('work', `${gbai}`), packageName, undefined, undefined, true);
} } else {
else {
await this.downloadFolder(min, path.join('work', `${gbai}`), packageName); await this.downloadFolder(min, path.join('work', `${gbai}`), packageName);
} }
} }

View file

@ -29,7 +29,7 @@
\*****************************************************************************/ \*****************************************************************************/
'use strict'; 'use strict';
import { ChatAnthropic } from "@langchain/anthropic"; import { ChatAnthropic } from '@langchain/anthropic';
import { PromptTemplate } from '@langchain/core/prompts'; import { PromptTemplate } from '@langchain/core/prompts';
import { WikipediaQueryRun } from '@langchain/community/tools/wikipedia_query_run'; import { WikipediaQueryRun } from '@langchain/community/tools/wikipedia_query_run';
import { HNSWLib } from '@langchain/community/vectorstores/hnswlib'; import { HNSWLib } from '@langchain/community/vectorstores/hnswlib';
@ -60,8 +60,8 @@ import { DialogKeywords } from '../../basic.gblib/services/DialogKeywords.js';
import { GBVMService } from '../../basic.gblib/services/GBVMService.js'; import { GBVMService } from '../../basic.gblib/services/GBVMService.js';
import { GBLogEx } from '../../core.gbapp/services/GBLogEx.js'; import { GBLogEx } from '../../core.gbapp/services/GBLogEx.js';
import { GBUtil } from '../../../src/util.js'; import { GBUtil } from '../../../src/util.js';
import { GBConfigService } from "../../core.gbapp/services/GBConfigService.js"; import { GBConfigService } from '../../core.gbapp/services/GBConfigService.js';
export interface CustomOutputParserFields { } export interface CustomOutputParserFields {}
export type ExpectedOutput = any; export type ExpectedOutput = any;
function isChatGeneration(llmOutput: ChatGeneration | Generation): llmOutput is ChatGeneration { function isChatGeneration(llmOutput: ChatGeneration | Generation): llmOutput is ChatGeneration {
@ -135,16 +135,13 @@ export class GBLLMOutputParser extends BaseLLMOutputParser<ExpectedOutput> {
let securityEnabled = false; let securityEnabled = false;
if (!sources) { if (!sources) {
GBLogEx.verbose(this.min, `LLM JSON output sources is NULL.`); GBLogEx.verbose(this.min, `LLM JSON output sources is NULL.`);
} } else {
else {
await CollectionUtil.asyncForEach(sources, async source => { await CollectionUtil.asyncForEach(sources, async source => {
let found = false; let found = false;
if (securityEnabled) { if (securityEnabled) {
GBLogEx.info(this.min, `LLM JSON output security enabled.`); GBLogEx.info(this.min, `LLM JSON output security enabled.`);
} }
if (source && source.file.endsWith('.pdf')) { if (source && source.file.endsWith('.pdf')) {
@ -156,11 +153,14 @@ export class GBLLMOutputParser extends BaseLLMOutputParser<ExpectedOutput> {
if (!isNaN(this.user.userSystemId)) { if (!isNaN(this.user.userSystemId)) {
await this.min.whatsAppDirectLine.sendFileToDevice( await this.min.whatsAppDirectLine.sendFileToDevice(
this.user.userSystemId, pngs[0].url, this.user.userSystemId,
localName, null, undefined, true); pngs[0].url,
localName,
} null,
else { undefined,
true
);
} else {
text = `![alt text](${pngs[0].url}) text = `![alt text](${pngs[0].url})
${text}`; ${text}`;
} }
@ -179,8 +179,6 @@ export class GBLLMOutputParser extends BaseLLMOutputParser<ExpectedOutput> {
} }
export class ChatServices { export class ChatServices {
private static async getRelevantContext( private static async getRelevantContext(
vectorStore: HNSWLib, vectorStore: HNSWLib,
sanitizedQuestion: string, sanitizedQuestion: string,
@ -189,7 +187,7 @@ export class ChatServices {
if (sanitizedQuestion === '' || !vectorStore) { if (sanitizedQuestion === '' || !vectorStore) {
return ''; return '';
} }
let documents = await vectorStore.similaritySearch(sanitizedQuestion, numDocuments ); let documents = await vectorStore.similaritySearch(sanitizedQuestion, numDocuments);
const uniqueDocuments = {}; const uniqueDocuments = {};
const MAX_DOCUMENTS = numDocuments; const MAX_DOCUMENTS = numDocuments;
@ -219,16 +217,14 @@ export class ChatServices {
page = await ChatServices.findPageForText(metadata.source, doc.pageContent); page = await ChatServices.findPageForText(metadata.source, doc.pageContent);
} }
output = `${output}\n\n\n\nUse also the following context which is coming from Source Document: ${filename} at page: ${
page ? page : 'entire document'
output = `${output}\n\n\n\nUse also the following context which is coming from Source Document: ${filename} at page: ${page ? page : 'entire document' }
}
(you will fill the JSON sources collection field later), (you will fill the JSON sources collection field later),
Use other page if this block is an index or table of contents (TOC). Use other page if this block is an index or table of contents (TOC).
And memorize this block (if it is not an Index or TOC) among document And memorize this block (if it is not an Index or TOC) among document
information and return when you information and return when you
are refering this part of content:\n\n\n\n ${doc.pageContent are refering this part of content:\n\n\n\n ${doc.pageContent} \n\n\n\n.`;
} \n\n\n\n.`;
} }
return output; return output;
} }
@ -258,7 +254,7 @@ export class ChatServices {
model = await ChatServices.getModel(min); model = await ChatServices.getModel(min);
return await model .invoke(text); return await model.invoke(text);
} }
public static memoryMap = {}; public static memoryMap = {};
@ -266,29 +262,41 @@ export class ChatServices {
public static usersMode = {}; public static usersMode = {};
private static async getModel(min: GBMinInstance) { private static async getModel(min: GBMinInstance) {
const provider = await (min.core as any)['getParam']( const provider = await (min.core as any)['getParam'](min.instance, 'LLM Provider', null, 'openai');
min.instance,
'LLM Provider',
null,
'openai'
);
let model; let model;
if (provider === 'claude') { if (provider === 'claude') {
model = new ChatAnthropic({ model = new ChatAnthropic({
model: "claude-3-haiku-20240307", model: 'claude-3-haiku-20240307',
temperature: 0, temperature: 0,
maxTokens: undefined, maxTokens: undefined,
maxRetries: 2, maxRetries: 2
}); });
} else { } else if (process.env.AI_MODE === 'local') {
const azureOpenAIKey = process.env.AZURE_OPEN_AI_KEY; const azureOpenAIKey = process.env.AZURE_OPEN_AI_KEY;
const azureOpenAILLMModel = process.env.AZURE_OPEN_AI_LLM_MODEL; const azureOpenAILLMModel = process.env.AZURE_OPEN_AI_LLM_MODEL;
const azureOpenAIVersion = process.env.AZURE_OPEN_AI_VERSION; const azureOpenAIVersion = process.env.AZURE_OPEN_AI_VERSION;
const azureOpenAIApiInstanceName = process.env.AZURE_OPEN_AI_INSTANCE; const azureOpenAIApiInstanceName = process.env.AZURE_OPEN_AI_INSTANCE;
const azureOpenAIEndPoint = process.env.AZURE_OPEN_AI_ENDPOINT; const azureOpenAIEndPoint = process.env.AZURE_OPEN_AI_ENDPOINT;
model = new ChatOpenAI({
model: process.env.LOCAL_LLM_MODEL,
apiKey: 'empty',
azureOpenAIApiDeploymentName: 'v1',
azureOpenAIApiInstanceName: 'v1',
azureOpenAIApiKey: 'empty',
azureOpenAIApiVersion: 'empty',
azureOpenAIBasePath: process.env.LOCAL_LLM_ENDPOINT,
openAIApiKey: 'empty',
configuration: {
baseURL: process.env.LOCAL_LLM_ENDPOINT
}
});
} else {
const azureOpenAIKey = process.env.AZURE_OPEN_AI_KEY;
const azureOpenAILLMModel = process.env.AZURE_OPEN_AI_LLM_MODEL;
const azureOpenAIVersion = process.env.AZURE_OPEN_AI_VERSION;
const azureOpenAIApiInstanceName = process.env.AZURE_OPEN_AI_INSTANCE;
const azureOpenAIEndPoint = process.env.AZURE_OPEN_AI_ENDPOINT;
model = new ChatOpenAI({ model = new ChatOpenAI({
azureOpenAIApiKey: azureOpenAIKey, azureOpenAIApiKey: azureOpenAIKey,
@ -296,7 +304,7 @@ export class ChatServices {
azureOpenAIApiDeploymentName: azureOpenAILLMModel, azureOpenAIApiDeploymentName: azureOpenAILLMModel,
azureOpenAIApiVersion: azureOpenAIVersion, azureOpenAIApiVersion: azureOpenAIVersion,
azureOpenAIBasePath: azureOpenAIEndPoint, azureOpenAIBasePath: azureOpenAIEndPoint,
temperature: 0, temperature: 0
}); });
} }
return model; return model;
@ -313,7 +321,6 @@ export class ChatServices {
const LLMMode = (mode ?? answerMode).toLowerCase(); const LLMMode = (mode ?? answerMode).toLowerCase();
let memory; let memory;
if (user && !this.memoryMap[user.userSystemId]) { if (user && !this.memoryMap[user.userSystemId]) {
memory = new BufferWindowMemory({ memory = new BufferWindowMemory({
@ -406,7 +413,6 @@ export class ChatServices {
Folowing answer:`) Folowing answer:`)
] as any); ] as any);
const jsonInformation = ` const jsonInformation = `
RESPONSE FORMAT: Return only a single valid JSON object with no surrounding text. Structure: RESPONSE FORMAT: Return only a single valid JSON object with no surrounding text. Structure:
{{"text": "Complete response as a single string, using \\n for all line breaks, \n1. bullets and; \n2.lists.", "sources": [{{"file": "filename", "page": number}}]}} {{"text": "Complete response as a single string, using \\n for all line breaks, \n1. bullets and; \n2.lists.", "sources": [{{"file": "filename", "page": number}}]}}
@ -431,7 +437,6 @@ export class ChatServices {
ERROR IF: Line breaks in JSON, text outside JSON, invalid format`; ERROR IF: Line breaks in JSON, text outside JSON, invalid format`;
const combineDocumentsPrompt = ChatPromptTemplate.fromMessages([ const combineDocumentsPrompt = ChatPromptTemplate.fromMessages([
AIMessagePromptTemplate.fromTemplate( AIMessagePromptTemplate.fromTemplate(
` `
@ -496,10 +501,10 @@ export class ChatServices {
const { chat_history } = await memory.loadMemoryVariables({}); const { chat_history } = await memory.loadMemoryVariables({});
return chat_history; return chat_history;
}, },
context: (async (output: string) => { context: async (output: string) => {
const c = await ChatServices.getRelevantContext(min['vectorStore'], output); const c = await ChatServices.getRelevantContext(min['vectorStore'], output);
return `${systemPrompt} \n ${c ? 'Use this context to answer:\n' + c : 'answer just with user question.'}`; return `${systemPrompt} \n ${c ? 'Use this context to answer:\n' + c : 'answer just with user question.'}`;
}) }
}, },
combineDocumentsPrompt, combineDocumentsPrompt,
model, model,
@ -516,7 +521,12 @@ export class ChatServices {
}, },
questionGeneratorTemplate, questionGeneratorTemplate,
modelWithTools, modelWithTools,
new GBLLMOutputParser(min, user, callToolChain, min['vectorStore']?.docstore?._docs.length > 0 ? combineDocumentsChain : null), new GBLLMOutputParser(
min,
user,
callToolChain,
min['vectorStore']?.docstore?._docs.length > 0 ? combineDocumentsChain : null
),
new StringOutputParser() new StringOutputParser()
] as any); ] as any);
@ -544,15 +554,13 @@ export class ChatServices {
let tables = con['storageTables']; let tables = con['storageTables'];
tables = tables ? tables.split(';') : null; tables = tables ? tables.split(';') : null;
const answerSource = await (min.core as any)['getParam'](min.instance, const answerSource = await (min.core as any)['getParam'](min.instance, 'Answer Source', 'server');
'Answer Source', 'server');
GBLogEx.info(min, `Answer Source = ${answerSource}.`); GBLogEx.info(min, `Answer Source = ${answerSource}.`);
let dataSource; let dataSource;
if (answerSource === 'cache') { if (answerSource === 'cache') {
let sqliteFilePath = let sqliteFilePath = path.join('work', GBUtil.getGBAIPath(min.botId), `${con['name']}.sqlite`);
path.join('work', GBUtil.getGBAIPath(min.botId), `${con['name']}.sqlite`);
GBLogEx.info(min, `Using data from cache: Path.basename(${sqliteFilePath}).`); GBLogEx.info(min, `Using data from cache: Path.basename(${sqliteFilePath}).`);
dataSource = new DataSource({ dataSource = new DataSource({
@ -562,8 +570,6 @@ export class ChatServices {
logging: true logging: true
}); });
} else { } else {
if (dialect === 'sqlite') { if (dialect === 'sqlite') {
const storageFile = con['storageFile']; const storageFile = con['storageFile'];
dataSource = new DataSource({ dataSource = new DataSource({
@ -572,9 +578,7 @@ export class ChatServices {
synchronize: false, synchronize: false,
logging: true logging: true
}); });
} else {
}
else {
const host = con['storageServer']; const host = con['storageServer'];
const port = con['storagePort']; const port = con['storagePort'];
const storageName = con['storageName']; const storageName = con['storageName'];
@ -624,7 +628,7 @@ export class ChatServices {
schema: async () => db.getTableInfo(tables), schema: async () => db.getTableInfo(tables),
question: (input: { question: string }) => input.question, question: (input: { question: string }) => input.question,
top_k: () => 10, top_k: () => 10,
table_info: () => 'any', table_info: () => 'any'
}, },
prompt, prompt,
model, model,
@ -672,8 +676,7 @@ export class ChatServices {
table_names_to_use: () => tables table_names_to_use: () => tables
}, },
{ {
result: finalResponsePrompt.pipe(model).pipe( result: finalResponsePrompt.pipe(model).pipe(new StringOutputParser() as any),
new StringOutputParser() as any),
// Pipe the query through here unchanged so it gets logged alongside the result. // Pipe the query through here unchanged so it gets logged alongside the result.
sql: previousStepResult => previousStepResult.query sql: previousStepResult => previousStepResult.query
@ -752,7 +755,7 @@ export class ChatServices {
const packagePath = GBUtil.getGBAIPath(min.botId, 'gbdialog', null); const packagePath = GBUtil.getGBAIPath(min.botId, 'gbdialog', null);
const jsonFile = path.join('work', packagePath, `${script}.json`); const jsonFile = path.join('work', packagePath, `${script}.json`);
if (await GBUtil.exists(jsonFile) && script.toLowerCase() !== 'start.vbs') { if ((await GBUtil.exists(jsonFile)) && script.toLowerCase() !== 'start.vbs') {
const funcJSON = JSON.parse(await fs.readFile(jsonFile, 'utf8')); const funcJSON = JSON.parse(await fs.readFile(jsonFile, 'utf8'));
const funcObj = funcJSON?.function; const funcObj = funcJSON?.function;

View file

@ -0,0 +1 @@
O céu é azul-lilás e pronto.

View file

@ -0,0 +1,40 @@
TALK "For favor, digite a mensagem que deseja enviar:"
HEAR message
TALK "Analizando template ... (antes de mandar para a META)"
report = LLM "Esta mensagem vai ser aprovada pelo WhatsApp META como Template? Tem recomendação? Se estiver OK, responda o texto: OK. Do contrário, avalie o que deve ser feito."
IF report <> "OK" THEN
TALK "A mensagem não será aprovada pela Meta. " + report
END IF
TALK "Envie agora o arquivo de imagem de cabefalho:"
HEAR plan AS FILE
TALK "É para um arquivo ou todos?"
HEAR in AS FILE
PUBLISH
IF in.isValid THEN
list = FIND in.filename, "Perfil=" + grupos
ELSE
list = GET "broadcast"
END IF
SET MAX LINES 2020
index = 1
DO WHILE index < UBOUND(list)
row = list[index]
SEND TEMPLATE TO row.telefone. filename
WAIT 0.1
index = index + 1
LOOP
TALK "OK, o envio foi realizado. Para saber mais, digite /report."