fix(basic.gblib): Allow MERGE keyword in storage #380. @othonlima
This commit is contained in:
parent
a7308ef1e8
commit
f5f64fd09b
3 changed files with 115 additions and 94 deletions
|
@ -41,7 +41,6 @@ import Path from 'path';
|
||||||
*/
|
*/
|
||||||
export class KeywordsExpressions {
|
export class KeywordsExpressions {
|
||||||
private static getParams = (text: string, names) => {
|
private static getParams = (text: string, names) => {
|
||||||
let ret = {};
|
|
||||||
const splitParamsButIgnoreCommasInDoublequotes = (str: string) => {
|
const splitParamsButIgnoreCommasInDoublequotes = (str: string) => {
|
||||||
return str.split(',').reduce(
|
return str.split(',').reduce(
|
||||||
(accum, curr) => {
|
(accum, curr) => {
|
||||||
|
@ -428,16 +427,16 @@ export class KeywordsExpressions {
|
||||||
|
|
||||||
keywords[i++] = [
|
keywords[i++] = [
|
||||||
/^\s*(DELETE)(\s*)(.*)/gim,
|
/^\s*(DELETE)(\s*)(.*)/gim,
|
||||||
($0, $1, $2, $3, $4) => {
|
($0, $1, $2, $3) => {
|
||||||
const params = this.getParams($4, ['file']);
|
const params = this.getParams($3, ['file']);
|
||||||
return `await sys.deleteFile ({pid: pid, ${params}})`;
|
return `await sys.deleteFile ({pid: pid, ${params}})`;
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
keywords[i++] = [
|
keywords[i++] = [
|
||||||
/^\s*(.*)\=\s*(UPLOAD)(\s*)(.*)/gim,
|
/^\s*(.*)\=\s*(UPLOAD)(\s*)(.*)/gim,
|
||||||
($0, $1, $2, $3, $4) => {
|
($0, $1, $2, $3) => {
|
||||||
const params = this.getParams($4, ['file']);
|
const params = this.getParams($3, ['file']);
|
||||||
return `await sys.uploadFile ({pid: pid, ${params}})`;
|
return `await sys.uploadFile ({pid: pid, ${params}})`;
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
|
@ -2139,7 +2139,37 @@ export class SystemKeywords {
|
||||||
if (!this.cachedMerge[pid][file]) {
|
if (!this.cachedMerge[pid][file]) {
|
||||||
await retry(
|
await retry(
|
||||||
async (bail) => {
|
async (bail) => {
|
||||||
rows = await t.findAll({});
|
let rows = [];
|
||||||
|
|
||||||
|
const paginate = (query, { page, pageSize }) => {
|
||||||
|
const offset = page * pageSize;
|
||||||
|
const limit = pageSize;
|
||||||
|
|
||||||
|
return {
|
||||||
|
...query,
|
||||||
|
offset,
|
||||||
|
limit,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let page = 0, pageSize = 1000;
|
||||||
|
let count = 0;
|
||||||
|
|
||||||
|
do {
|
||||||
|
|
||||||
|
rows = [
|
||||||
|
t.findAll(
|
||||||
|
paginate(
|
||||||
|
{
|
||||||
|
where: {},
|
||||||
|
},
|
||||||
|
{ page, pageSize },
|
||||||
|
),
|
||||||
|
), ...rows];
|
||||||
|
|
||||||
|
count = rows.length;
|
||||||
|
|
||||||
|
} while (count !== 1000)
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
retries: 5,
|
retries: 5,
|
||||||
|
@ -2543,16 +2573,14 @@ export class SystemKeywords {
|
||||||
/**
|
/**
|
||||||
* Loads all para from tabular file Config.xlsx.
|
* Loads all para from tabular file Config.xlsx.
|
||||||
*/
|
*/
|
||||||
public async dirFolder(
|
public async dirFolder({ pid, remotePath, baseUrl = null, array = null }) {
|
||||||
min: GBMinInstance,
|
|
||||||
remotePath: string,
|
const { min } = await DialogKeywords.getProcessInfo(pid);
|
||||||
baseUrl: string = null,
|
|
||||||
array = null
|
|
||||||
): Promise<any> {
|
|
||||||
GBLogEx.info(min, `dirFolder: remotePath=${remotePath}, baseUrl=${baseUrl}`);
|
GBLogEx.info(min, `dirFolder: remotePath=${remotePath}, baseUrl=${baseUrl}`);
|
||||||
|
|
||||||
if (!baseUrl) {
|
if (!baseUrl) {
|
||||||
let { baseUrl, client } = await GBDeployer.internalGetDriveClient(min);
|
let client;
|
||||||
|
[baseUrl, client] = await GBDeployer.internalGetDriveClient(min);
|
||||||
|
|
||||||
remotePath = remotePath.replace(/\\/gi, '/');
|
remotePath = remotePath.replace(/\\/gi, '/');
|
||||||
|
|
||||||
|
@ -2574,8 +2602,8 @@ export class SystemKeywords {
|
||||||
await CollectionUtil.asyncForEach(documents, async item => {
|
await CollectionUtil.asyncForEach(documents, async item => {
|
||||||
|
|
||||||
if (item.folder) {
|
if (item.folder) {
|
||||||
const nextFolder = urlJoin(remotePath, item.name);
|
remotePath = urlJoin(remotePath, item.name);
|
||||||
array = [array, ... await this.dirFolder(min, null, nextFolder, array)];
|
array = [array, ... await this.dirFolder({ pid, remotePath, baseUrl, array })];
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
// TODO: https://raw.githubusercontent.com/ishanarora04/quickxorhash/master/quickxorhash.js
|
// TODO: https://raw.githubusercontent.com/ishanarora04/quickxorhash/master/quickxorhash.js
|
||||||
|
|
|
@ -106,17 +106,11 @@ export class GBDeployer implements IGBDeployer {
|
||||||
public static async internalGetDriveClient(min: GBMinInstance) {
|
public static async internalGetDriveClient(min: GBMinInstance) {
|
||||||
let token;
|
let token;
|
||||||
|
|
||||||
// TODO: Add expiration logic.
|
|
||||||
|
|
||||||
if (min['cacheToken'] && null) {
|
|
||||||
return min['cacheToken'];
|
|
||||||
} else {
|
|
||||||
|
|
||||||
// Get token as root only if the bot does not have
|
// Get token as root only if the bot does not have
|
||||||
// an custom tenant for retrieving packages.
|
// an custom tenant for retrieving packages.
|
||||||
|
|
||||||
token = await (min.adminService as any)['acquireElevatedToken']
|
token = await (min.adminService as any)['acquireElevatedToken']
|
||||||
(min.instance.instanceId, min.instance.authenticatorTenant?false:true);
|
(min.instance.instanceId, min.instance.authenticatorTenant ? false : true);
|
||||||
|
|
||||||
const siteId = process.env.STORAGE_SITE_ID;
|
const siteId = process.env.STORAGE_SITE_ID;
|
||||||
const libraryId = process.env.STORAGE_LIBRARY;
|
const libraryId = process.env.STORAGE_LIBRARY;
|
||||||
|
@ -130,7 +124,7 @@ export class GBDeployer implements IGBDeployer {
|
||||||
min['cacheToken'] = { baseUrl, client };
|
min['cacheToken'] = { baseUrl, client };
|
||||||
|
|
||||||
return min['cacheToken'];
|
return min['cacheToken'];
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -624,12 +618,12 @@ export class GBDeployer implements IGBDeployer {
|
||||||
const connectionName = t.replace(strFind, '');
|
const connectionName = t.replace(strFind, '');
|
||||||
let con = {};
|
let con = {};
|
||||||
con['name'] = connectionName;
|
con['name'] = connectionName;
|
||||||
con['storageServer']= min.core.getParam<string>(min.instance, `${connectionName} Server`, null),
|
con['storageServer'] = min.core.getParam<string>(min.instance, `${connectionName} Server`, null),
|
||||||
con['storageName']= min.core.getParam<string>(min.instance, `${connectionName} Name`, null),
|
con['storageName'] = min.core.getParam<string>(min.instance, `${connectionName} Name`, null),
|
||||||
con['storageUsername']= min.core.getParam<string>(min.instance, `${connectionName} Username`, null),
|
con['storageUsername'] = min.core.getParam<string>(min.instance, `${connectionName} Username`, null),
|
||||||
con['storagePort']= min.core.getParam<string>(min.instance, `${connectionName} Port`, null),
|
con['storagePort'] = min.core.getParam<string>(min.instance, `${connectionName} Port`, null),
|
||||||
con['storagePassword']= min.core.getParam<string>(min.instance, `${connectionName} Password`, null),
|
con['storagePassword'] = min.core.getParam<string>(min.instance, `${connectionName} Password`, null),
|
||||||
con['storageDriver']= min.core.getParam<string>(min.instance, `${connectionName} Driver`, null)
|
con['storageDriver'] = min.core.getParam<string>(min.instance, `${connectionName} Driver`, null)
|
||||||
connections.push(con);
|
connections.push(con);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -731,7 +725,7 @@ export class GBDeployer implements IGBDeployer {
|
||||||
const service = new KBService(this.core.sequelize);
|
const service = new KBService(this.core.sequelize);
|
||||||
rimraf.sync(localPath);
|
rimraf.sync(localPath);
|
||||||
|
|
||||||
if (p){
|
if (p) {
|
||||||
await service.undeployKbFromStorage(instance, this, p.packageId);
|
await service.undeployKbFromStorage(instance, this, p.packageId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue