fix(basic.gblib): #394 UPLOAD keyword.
This commit is contained in:
parent
0dc09db4e7
commit
825d6c1a0f
4 changed files with 99 additions and 93 deletions
|
@ -47,7 +47,7 @@ import * as Fs from 'fs';
|
|||
import { CollectionUtil } from 'pragmatismo-io-framework';
|
||||
import { GBConversationalService } from '../../core.gbapp/services/GBConversationalService.js';
|
||||
import libphonenumber from 'google-libphonenumber';
|
||||
import DateDiff from 'date-diff';
|
||||
import * as df from 'date-diff';
|
||||
import tesseract from 'node-tesseract-ocr';
|
||||
import Path from 'path';
|
||||
import sgMail from '@sendgrid/mail';
|
||||
|
@ -288,7 +288,7 @@ export class DialogKeywords {
|
|||
* @example days = DATEDIFF date1,date2,mode
|
||||
*
|
||||
*/
|
||||
public async dateDiff(date1, date2, mode) {
|
||||
public async getDateDiff({pid, date1, date2, mode}) {
|
||||
let dt1 = date1;
|
||||
let dt2 = date2;
|
||||
if (!(dt1 instanceof Date)) {
|
||||
|
@ -297,7 +297,9 @@ export class DialogKeywords {
|
|||
if (!(dt2 instanceof Date)) {
|
||||
dt2 = new Date(dt2);
|
||||
}
|
||||
const diff = new DateDiff(date1, date2);
|
||||
const diff1 = df.default.constructor(date1, date2);
|
||||
const diff = Date['diff'](date1, date2);
|
||||
|
||||
switch (mode) {
|
||||
case 'year':
|
||||
return diff.years();
|
||||
|
|
|
@ -509,6 +509,7 @@ export class GBVMService extends GBService {
|
|||
let httpPs = this.httpPs;
|
||||
let today = this.today;
|
||||
let now = this.now;
|
||||
let date = new Date();
|
||||
let page = null;
|
||||
const files = [];
|
||||
let col = 1;
|
||||
|
|
|
@ -103,20 +103,19 @@ export class KeywordsExpressions {
|
|||
($0, $1, $2) => {
|
||||
|
||||
let separator;
|
||||
if ($1.indexOf(',') > -1){
|
||||
separator = ',';
|
||||
if ($1.indexOf(',') > -1) {
|
||||
separator = ',';
|
||||
}
|
||||
else if ($1.indexOf(';') > -1){
|
||||
separator = ';';
|
||||
else if ($1.indexOf(';') > -1) {
|
||||
separator = ';';
|
||||
}
|
||||
let parts;
|
||||
if ( separator && (parts = $1.split(separator)) && parts.length > 1){
|
||||
if (separator && (parts = $1.split(separator)) && parts.length > 1) {
|
||||
return `
|
||||
TALK ${parts[0]}
|
||||
HEAR ${parts[1]}`;
|
||||
}
|
||||
else
|
||||
{
|
||||
else {
|
||||
return `
|
||||
HEAR ${$1}`;
|
||||
}
|
||||
|
@ -154,11 +153,11 @@ export class KeywordsExpressions {
|
|||
$1 = $1.substr($1.indexOf(',') + 1);
|
||||
|
||||
let separator;
|
||||
if ($1.indexOf(',') > -1){
|
||||
separator = ',';
|
||||
if ($1.indexOf(',') > -1) {
|
||||
separator = ',';
|
||||
}
|
||||
else if ($1.indexOf(';') > -1){
|
||||
separator = ';';
|
||||
else if ($1.indexOf(';') > -1) {
|
||||
separator = ';';
|
||||
}
|
||||
let items;
|
||||
if (separator && (items = $1.split(separator)) && items.length > 1) {
|
||||
|
@ -205,7 +204,7 @@ export class KeywordsExpressions {
|
|||
const jParams = JSON.parse(`{${params}}`);
|
||||
const filename = `${jParams.url.substr(0, jParams.url.lastIndexOf("."))}.xlsx`;
|
||||
let code =
|
||||
`
|
||||
`
|
||||
col = 1
|
||||
await sys.save({pid: pid,file: "${filename}", args: [id] })
|
||||
await dk.setFilter ({pid: pid, value: "id=" + id })
|
||||
|
@ -213,7 +212,7 @@ export class KeywordsExpressions {
|
|||
`;
|
||||
return code;
|
||||
} else {
|
||||
sessionName = sessionName?`"${sessionName}"`:null;
|
||||
sessionName = sessionName ? `"${sessionName}"` : null;
|
||||
kind = `"${kind}"`;
|
||||
return `page = await wa.openPage({pid: pid, handle: page, sessionKind: ${kind}, sessionName: ${sessionName}, ${params}})`;
|
||||
}
|
||||
|
@ -251,7 +250,6 @@ export class KeywordsExpressions {
|
|||
|
||||
keywords[i++] = [/^\s*for +(.*to.*)/gim, 'for ($1) {'];
|
||||
|
||||
keywords[i++] = [/^\s*next *$/gim, '}'];
|
||||
|
||||
keywords[i++] = [
|
||||
/^\s*((?:[a-z]+.?)(?:(?:\w+).)(?:\w+)*)\s*=\s*pay\s*(.*)/gim,
|
||||
|
@ -349,9 +347,9 @@ export class KeywordsExpressions {
|
|||
__data = ${$2};
|
||||
__pageMode = __data?.pageMode ? __data.pageMode : "none";
|
||||
|
||||
__url = __data.links?.next?.uri;
|
||||
__seekToken = __data.links?.self?.headers["MS-ContinuationToken"]
|
||||
__totalCount = __data["totalCount"] ? __data["totalCount"] : __data.length;
|
||||
__url = __data?.links?.next?.uri;
|
||||
__seekToken = __data?.links?.self?.headers["MS-ContinuationToken"]
|
||||
__totalCount = __data?.totalCount ? __data.totalCount : __data.length;
|
||||
|
||||
while (__next && __totalCount)
|
||||
{
|
||||
|
@ -360,8 +358,7 @@ export class KeywordsExpressions {
|
|||
}
|
||||
];
|
||||
|
||||
keywords[i++] = [
|
||||
/^\s*END FOR\s*/gim,
|
||||
keywords[i++] = [/^\s*next *$/gim,
|
||||
($0, $1, $2) => {
|
||||
|
||||
return `
|
||||
|
@ -382,9 +379,9 @@ export class KeywordsExpressions {
|
|||
|
||||
// Updates current variable handlers.
|
||||
|
||||
__url = __data.links?.next?.uri;
|
||||
__seekToken = __data.links?.self?.headers["MS-ContinuationToken"]
|
||||
__totalCount = __data["totalCount"];
|
||||
__url = __data?.links?.next?.uri;
|
||||
__seekToken = __data?.links?.self?.headers["MS-ContinuationToken"]
|
||||
__totalCount = __data?.totalCount ? __data.totalCount : __data.length;
|
||||
|
||||
__index = 0;
|
||||
__calls++;
|
||||
|
@ -1091,6 +1088,7 @@ export class KeywordsExpressions {
|
|||
($0, $1, $2, $3, $4) => {
|
||||
$3 = $3.replace(/\'/g, '');
|
||||
$3 = $3.replace(/\"/g, '');
|
||||
$3 = $3.replace(/\`/g, '');
|
||||
$4 = $4.substr(2);
|
||||
return `await sys.save({pid: pid, file: "${$3}", args: [${$4}]})`;
|
||||
}
|
||||
|
@ -1109,7 +1107,7 @@ export class KeywordsExpressions {
|
|||
$3 = $3.replace(/\'/g, '');
|
||||
$3 = $3.replace(/\"/g, '');
|
||||
let fields = $3.split(',');
|
||||
const table = fields[0].trim();
|
||||
const table = fields[0].trim();
|
||||
fields.shift();
|
||||
|
||||
const fieldsAsText = fields.join(',');
|
||||
|
@ -1126,7 +1124,7 @@ export class KeywordsExpressions {
|
|||
const fieldRegExp = /(?:.*\.)(.*)/gim;
|
||||
let name = fieldRegExp.exec(field)[1]
|
||||
|
||||
fieldsNamesOnly.push (`'${name}'`);
|
||||
fieldsNamesOnly.push(`'${name}'`);
|
||||
});
|
||||
let fieldsNames = fieldsNamesOnly.join(',');
|
||||
|
||||
|
|
|
@ -658,62 +658,63 @@ export class SystemKeywords {
|
|||
*/
|
||||
public async uploadFile({ pid, file }): Promise<any> {
|
||||
const { min, user } = await DialogKeywords.getProcessInfo(pid);
|
||||
GBLog.info(`BASIC: Saving Blob'${file}' (SAVE file).`);
|
||||
GBLog.info(`BASIC: UPLOAD '${file.name}' ${file.size} bytes.`);
|
||||
|
||||
// Checks if it is a GB FILE object.
|
||||
|
||||
try {
|
||||
const accountName = min.getParam('Blob Account');
|
||||
const accountKey = min.getParam('Blob Key');
|
||||
const blobName = min.getParam('Blob Name');
|
||||
const sharedKeyCredential = new StorageSharedKeyCredential(
|
||||
accountName,
|
||||
accountKey
|
||||
);
|
||||
const baseUrl = `https://${accountName}.blob.core.windows.net`;
|
||||
const accountName = min.core.getParam(min.instance, 'Blob Account');
|
||||
const accountKey = min.core.getParam(min.instance, 'Blob Key');
|
||||
const blobName = min.core.getParam(min.instance, 'Blob Name');
|
||||
const sharedKeyCredential = new StorageSharedKeyCredential(
|
||||
accountName,
|
||||
accountKey
|
||||
);
|
||||
const baseUrl = `https://${accountName}.blob.core.windows.net`;
|
||||
|
||||
const blobServiceClient = new BlobServiceClient(
|
||||
`${baseUrl}`,
|
||||
sharedKeyCredential
|
||||
);
|
||||
const blobServiceClient = new BlobServiceClient(
|
||||
`${baseUrl}`,
|
||||
sharedKeyCredential
|
||||
);
|
||||
|
||||
|
||||
let data;
|
||||
// It is an SharePoint object that needs to be downloaded.
|
||||
|
||||
// It is an SharePoint object that needs to be downloaded.
|
||||
const gbaiName = DialogKeywords.getGBAIPath(min.botId);
|
||||
const localName = Path.join('work', gbaiName, 'cache', `${GBAdminService.getRndReadableIdentifier()}.tmp`);
|
||||
const url = file['url'];
|
||||
const response = await fetch(url);
|
||||
|
||||
const gbaiName = DialogKeywords.getGBAIPath(min.botId);
|
||||
const localName = Path.join('work', gbaiName, 'cache', `${GBAdminService.getRndReadableIdentifier()}.tmp`);
|
||||
const url = file['url'];
|
||||
const response = await fetch(url);
|
||||
Fs.writeFileSync(localName, Buffer.from(await response.arrayBuffer()), { encoding: null });
|
||||
// Writes it to disk and calculate hash.
|
||||
|
||||
const container = blobServiceClient.getContainerClient(accountName);
|
||||
const hash = new Uint8Array(md5.array(data));
|
||||
const blockBlobClient: BlockBlobClient = container.getBlockBlobClient(blobName);
|
||||
const data = await response.arrayBuffer();
|
||||
Fs.writeFileSync(localName, Buffer.from(data), { encoding: null });
|
||||
const hash = new Uint8Array(md5.array(data));
|
||||
|
||||
const res = await blockBlobClient.uploadFile(localName,
|
||||
{
|
||||
blobHTTPHeaders: {
|
||||
blobContentMD5: hash
|
||||
}
|
||||
});
|
||||
// Performs uploading passing local hash.
|
||||
|
||||
if (res._response.status === 200 && res.contentMD5 === hash) {
|
||||
Fs.rmSync(localName);
|
||||
}
|
||||
else {
|
||||
GBLog.error(`BASIC: BLOB HTTP ${res.errorCode} ${res._response.status} .`);
|
||||
}
|
||||
const container = blobServiceClient.getContainerClient(accountName);
|
||||
const blockBlobClient: BlockBlobClient = container.getBlockBlobClient(blobName);
|
||||
const res = await blockBlobClient.uploadFile(localName,
|
||||
{
|
||||
blobHTTPHeaders: {
|
||||
blobContentMD5: hash
|
||||
}
|
||||
});
|
||||
|
||||
// If upload is OK including hash check, removes the temporary file.
|
||||
|
||||
if ((res._response.status === 200 || res._response.status === 201) && res.contentMD5 === hash) {
|
||||
Fs.rmSync(localName);
|
||||
|
||||
file['md5'] = res.contentMD5;
|
||||
|
||||
return file;
|
||||
|
||||
} catch (error) {
|
||||
if (error.code === 'itemNotFound') {
|
||||
GBLog.info(`BASIC: BASIC source file not found: ${file}.`);
|
||||
} else if (error.code === 'nameAlreadyExists') {
|
||||
GBLog.info(`BASIC: BASIC destination file already exists: ${file}.`);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
else {
|
||||
GBLog.error(`BASIC: BLOB HTTP ${res.errorCode} ${res._response.status} .`);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@ -2156,7 +2157,7 @@ export class SystemKeywords {
|
|||
|
||||
while (page === 0 || count === pageSize) {
|
||||
const paged = await t.findAll(
|
||||
{offset:page * pageSize, limit:pageSize, subquery:false, where:{}}
|
||||
{ offset: page * pageSize, limit: pageSize, subquery: false, where: {} }
|
||||
);
|
||||
rows = [...paged, ...rows];
|
||||
page++;
|
||||
|
@ -2580,9 +2581,13 @@ export class SystemKeywords {
|
|||
const { min } = await DialogKeywords.getProcessInfo(pid);
|
||||
GBLogEx.info(min, `dirFolder: remotePath=${remotePath}, baseUrl=${baseUrl}`);
|
||||
|
||||
if (!array) {
|
||||
array = [];
|
||||
}
|
||||
|
||||
if (!baseUrl) {
|
||||
let client;
|
||||
[baseUrl, client] = await GBDeployer.internalGetDriveClient(min);
|
||||
|
||||
let { baseUrl, client } = await GBDeployer.internalGetDriveClient(min);
|
||||
|
||||
remotePath = remotePath.replace(/\\/gi, '/');
|
||||
|
||||
|
@ -2619,10 +2624,10 @@ export class SystemKeywords {
|
|||
obj['url'] = item['@microsoft.graph.downloadUrl'];
|
||||
|
||||
array.push(obj);
|
||||
|
||||
return array;
|
||||
}
|
||||
});
|
||||
|
||||
return array;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue