fix(basic.gblib): Allow MERGE keyword in storage #380. @othonlima
This commit is contained in:
parent
a7308ef1e8
commit
f5f64fd09b
3 changed files with 115 additions and 94 deletions
|
@ -41,7 +41,6 @@ import Path from 'path';
|
||||||
*/
|
*/
|
||||||
export class KeywordsExpressions {
|
export class KeywordsExpressions {
|
||||||
private static getParams = (text: string, names) => {
|
private static getParams = (text: string, names) => {
|
||||||
let ret = {};
|
|
||||||
const splitParamsButIgnoreCommasInDoublequotes = (str: string) => {
|
const splitParamsButIgnoreCommasInDoublequotes = (str: string) => {
|
||||||
return str.split(',').reduce(
|
return str.split(',').reduce(
|
||||||
(accum, curr) => {
|
(accum, curr) => {
|
||||||
|
@ -424,20 +423,20 @@ export class KeywordsExpressions {
|
||||||
const params = this.getParams($4, ['path']);
|
const params = this.getParams($4, ['path']);
|
||||||
return `await sys.dirFolder ({pid: pid, ${params}})`;
|
return `await sys.dirFolder ({pid: pid, ${params}})`;
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
keywords[i++] = [
|
keywords[i++] = [
|
||||||
/^\s*(DELETE)(\s*)(.*)/gim,
|
/^\s*(DELETE)(\s*)(.*)/gim,
|
||||||
($0, $1, $2, $3, $4) => {
|
($0, $1, $2, $3) => {
|
||||||
const params = this.getParams($4, ['file']);
|
const params = this.getParams($3, ['file']);
|
||||||
return `await sys.deleteFile ({pid: pid, ${params}})`;
|
return `await sys.deleteFile ({pid: pid, ${params}})`;
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
keywords[i++] = [
|
keywords[i++] = [
|
||||||
/^\s*(.*)\=\s*(UPLOAD)(\s*)(.*)/gim,
|
/^\s*(.*)\=\s*(UPLOAD)(\s*)(.*)/gim,
|
||||||
($0, $1, $2, $3, $4) => {
|
($0, $1, $2, $3) => {
|
||||||
const params = this.getParams($4, ['file']);
|
const params = this.getParams($3, ['file']);
|
||||||
return `await sys.uploadFile ({pid: pid, ${params}})`;
|
return `await sys.uploadFile ({pid: pid, ${params}})`;
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
|
@ -2139,7 +2139,37 @@ export class SystemKeywords {
|
||||||
if (!this.cachedMerge[pid][file]) {
|
if (!this.cachedMerge[pid][file]) {
|
||||||
await retry(
|
await retry(
|
||||||
async (bail) => {
|
async (bail) => {
|
||||||
rows = await t.findAll({});
|
let rows = [];
|
||||||
|
|
||||||
|
const paginate = (query, { page, pageSize }) => {
|
||||||
|
const offset = page * pageSize;
|
||||||
|
const limit = pageSize;
|
||||||
|
|
||||||
|
return {
|
||||||
|
...query,
|
||||||
|
offset,
|
||||||
|
limit,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let page = 0, pageSize = 1000;
|
||||||
|
let count = 0;
|
||||||
|
|
||||||
|
do {
|
||||||
|
|
||||||
|
rows = [
|
||||||
|
t.findAll(
|
||||||
|
paginate(
|
||||||
|
{
|
||||||
|
where: {},
|
||||||
|
},
|
||||||
|
{ page, pageSize },
|
||||||
|
),
|
||||||
|
), ...rows];
|
||||||
|
|
||||||
|
count = rows.length;
|
||||||
|
|
||||||
|
} while (count !== 1000)
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
retries: 5,
|
retries: 5,
|
||||||
|
@ -2543,16 +2573,14 @@ export class SystemKeywords {
|
||||||
/**
|
/**
|
||||||
* Loads all para from tabular file Config.xlsx.
|
* Loads all para from tabular file Config.xlsx.
|
||||||
*/
|
*/
|
||||||
public async dirFolder(
|
public async dirFolder({ pid, remotePath, baseUrl = null, array = null }) {
|
||||||
min: GBMinInstance,
|
|
||||||
remotePath: string,
|
const { min } = await DialogKeywords.getProcessInfo(pid);
|
||||||
baseUrl: string = null,
|
|
||||||
array = null
|
|
||||||
): Promise<any> {
|
|
||||||
GBLogEx.info(min, `dirFolder: remotePath=${remotePath}, baseUrl=${baseUrl}`);
|
GBLogEx.info(min, `dirFolder: remotePath=${remotePath}, baseUrl=${baseUrl}`);
|
||||||
|
|
||||||
if (!baseUrl) {
|
if (!baseUrl) {
|
||||||
let { baseUrl, client } = await GBDeployer.internalGetDriveClient(min);
|
let client;
|
||||||
|
[baseUrl, client] = await GBDeployer.internalGetDriveClient(min);
|
||||||
|
|
||||||
remotePath = remotePath.replace(/\\/gi, '/');
|
remotePath = remotePath.replace(/\\/gi, '/');
|
||||||
|
|
||||||
|
@ -2574,8 +2602,8 @@ export class SystemKeywords {
|
||||||
await CollectionUtil.asyncForEach(documents, async item => {
|
await CollectionUtil.asyncForEach(documents, async item => {
|
||||||
|
|
||||||
if (item.folder) {
|
if (item.folder) {
|
||||||
const nextFolder = urlJoin(remotePath, item.name);
|
remotePath = urlJoin(remotePath, item.name);
|
||||||
array = [array, ... await this.dirFolder(min, null, nextFolder, array)];
|
array = [array, ... await this.dirFolder({ pid, remotePath, baseUrl, array })];
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
// TODO: https://raw.githubusercontent.com/ishanarora04/quickxorhash/master/quickxorhash.js
|
// TODO: https://raw.githubusercontent.com/ishanarora04/quickxorhash/master/quickxorhash.js
|
||||||
|
|
|
@ -106,17 +106,11 @@ export class GBDeployer implements IGBDeployer {
|
||||||
public static async internalGetDriveClient(min: GBMinInstance) {
|
public static async internalGetDriveClient(min: GBMinInstance) {
|
||||||
let token;
|
let token;
|
||||||
|
|
||||||
// TODO: Add expiration logic.
|
|
||||||
|
|
||||||
if (min['cacheToken'] && null) {
|
|
||||||
return min['cacheToken'];
|
|
||||||
} else {
|
|
||||||
|
|
||||||
// Get token as root only if the bot does not have
|
// Get token as root only if the bot does not have
|
||||||
// an custom tenant for retrieving packages.
|
// an custom tenant for retrieving packages.
|
||||||
|
|
||||||
token = await (min.adminService as any)['acquireElevatedToken']
|
token = await (min.adminService as any)['acquireElevatedToken']
|
||||||
(min.instance.instanceId, min.instance.authenticatorTenant?false:true);
|
(min.instance.instanceId, min.instance.authenticatorTenant ? false : true);
|
||||||
|
|
||||||
const siteId = process.env.STORAGE_SITE_ID;
|
const siteId = process.env.STORAGE_SITE_ID;
|
||||||
const libraryId = process.env.STORAGE_LIBRARY;
|
const libraryId = process.env.STORAGE_LIBRARY;
|
||||||
|
@ -130,7 +124,7 @@ export class GBDeployer implements IGBDeployer {
|
||||||
min['cacheToken'] = { baseUrl, client };
|
min['cacheToken'] = { baseUrl, client };
|
||||||
|
|
||||||
return min['cacheToken'];
|
return min['cacheToken'];
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -624,19 +618,19 @@ export class GBDeployer implements IGBDeployer {
|
||||||
const connectionName = t.replace(strFind, '');
|
const connectionName = t.replace(strFind, '');
|
||||||
let con = {};
|
let con = {};
|
||||||
con['name'] = connectionName;
|
con['name'] = connectionName;
|
||||||
con['storageServer']= min.core.getParam<string>(min.instance, `${connectionName} Server`, null),
|
con['storageServer'] = min.core.getParam<string>(min.instance, `${connectionName} Server`, null),
|
||||||
con['storageName']= min.core.getParam<string>(min.instance, `${connectionName} Name`, null),
|
con['storageName'] = min.core.getParam<string>(min.instance, `${connectionName} Name`, null),
|
||||||
con['storageUsername']= min.core.getParam<string>(min.instance, `${connectionName} Username`, null),
|
con['storageUsername'] = min.core.getParam<string>(min.instance, `${connectionName} Username`, null),
|
||||||
con['storagePort']= min.core.getParam<string>(min.instance, `${connectionName} Port`, null),
|
con['storagePort'] = min.core.getParam<string>(min.instance, `${connectionName} Port`, null),
|
||||||
con['storagePassword']= min.core.getParam<string>(min.instance, `${connectionName} Password`, null),
|
con['storagePassword'] = min.core.getParam<string>(min.instance, `${connectionName} Password`, null),
|
||||||
con['storageDriver']= min.core.getParam<string>(min.instance, `${connectionName} Driver`, null)
|
con['storageDriver'] = min.core.getParam<string>(min.instance, `${connectionName} Driver`, null)
|
||||||
connections.push(con);
|
connections.push(con);
|
||||||
});
|
});
|
||||||
|
|
||||||
const path = DialogKeywords.getGBAIPath(min.botId, null);
|
const path = DialogKeywords.getGBAIPath(min.botId, null);
|
||||||
const localFolder = Path.join('work', path, 'connections.json');
|
const localFolder = Path.join('work', path, 'connections.json');
|
||||||
Fs.writeFileSync(localFolder, JSON.stringify(connections), { encoding: null });
|
Fs.writeFileSync(localFolder, JSON.stringify(connections), { encoding: null });
|
||||||
|
|
||||||
// Updates instance object.
|
// Updates instance object.
|
||||||
|
|
||||||
await this.core.saveInstance(min.instance);
|
await this.core.saveInstance(min.instance);
|
||||||
|
@ -685,15 +679,15 @@ export class GBDeployer implements IGBDeployer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Removes the package local files from cache.
|
* Removes the package local files from cache.
|
||||||
*/
|
*/
|
||||||
public async cleanupPackage(instance: IGBInstance, packageName: string) {
|
public async cleanupPackage(instance: IGBInstance, packageName: string) {
|
||||||
const path = DialogKeywords.getGBAIPath(instance.botId, null, packageName);
|
const path = DialogKeywords.getGBAIPath(instance.botId, null, packageName);
|
||||||
const localFolder = Path.join('work', path);
|
const localFolder = Path.join('work', path);
|
||||||
rimraf.sync(localFolder);
|
rimraf.sync(localFolder);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Removes the package from the storage and local work folders.
|
* Removes the package from the storage and local work folders.
|
||||||
*/
|
*/
|
||||||
|
@ -705,7 +699,7 @@ export class GBDeployer implements IGBDeployer {
|
||||||
|
|
||||||
const path = DialogKeywords.getGBAIPath(instance.botId, null, packageName);
|
const path = DialogKeywords.getGBAIPath(instance.botId, null, packageName);
|
||||||
const localFolder = Path.join('work', path);
|
const localFolder = Path.join('work', path);
|
||||||
|
|
||||||
return await this.undeployPackageFromLocalPath(instance, localFolder);
|
return await this.undeployPackageFromLocalPath(instance, localFolder);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -731,7 +725,7 @@ export class GBDeployer implements IGBDeployer {
|
||||||
const service = new KBService(this.core.sequelize);
|
const service = new KBService(this.core.sequelize);
|
||||||
rimraf.sync(localPath);
|
rimraf.sync(localPath);
|
||||||
|
|
||||||
if (p){
|
if (p) {
|
||||||
await service.undeployKbFromStorage(instance, this, p.packageId);
|
await service.undeployKbFromStorage(instance, this, p.packageId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -769,68 +763,68 @@ export class GBDeployer implements IGBDeployer {
|
||||||
|
|
||||||
// TODO: Semaphore logic.
|
// TODO: Semaphore logic.
|
||||||
//try {
|
//try {
|
||||||
GBLogEx.info(instance.instanceId, `rebuildIndex running...`);
|
GBLogEx.info(instance.instanceId, `rebuildIndex running...`);
|
||||||
// release = await GBServer.globals.indexSemaphore.acquire();
|
// release = await GBServer.globals.indexSemaphore.acquire();
|
||||||
// GBLogEx.info(instance.instanceId, `Acquire rebuildIndex done.`);
|
// GBLogEx.info(instance.instanceId, `Acquire rebuildIndex done.`);
|
||||||
|
|
||||||
const key = instance.searchKey ? instance.searchKey : GBServer.globals.minBoot.instance.searchKey;
|
const key = instance.searchKey ? instance.searchKey : GBServer.globals.minBoot.instance.searchKey;
|
||||||
const searchIndex = instance.searchIndex ? instance.searchIndex : GBServer.globals.minBoot.instance.searchIndex;
|
const searchIndex = instance.searchIndex ? instance.searchIndex : GBServer.globals.minBoot.instance.searchIndex;
|
||||||
const searchIndexer = instance.searchIndexer
|
const searchIndexer = instance.searchIndexer
|
||||||
? instance.searchIndexer
|
? instance.searchIndexer
|
||||||
: GBServer.globals.minBoot.instance.searchIndexer;
|
: GBServer.globals.minBoot.instance.searchIndexer;
|
||||||
const host = instance.searchHost ? instance.searchHost : GBServer.globals.minBoot.instance.searchHost;
|
const host = instance.searchHost ? instance.searchHost : GBServer.globals.minBoot.instance.searchHost;
|
||||||
|
|
||||||
// Prepares search.
|
// Prepares search.
|
||||||
|
|
||||||
const search = new AzureSearch(
|
const search = new AzureSearch(
|
||||||
key,
|
key,
|
||||||
host,
|
host,
|
||||||
searchIndex,
|
searchIndex,
|
||||||
searchIndexer
|
searchIndexer
|
||||||
);
|
);
|
||||||
const connectionString = GBDeployer.getConnectionStringFromInstance(GBServer.globals.minBoot.instance);
|
const connectionString = GBDeployer.getConnectionStringFromInstance(GBServer.globals.minBoot.instance);
|
||||||
const dsName = 'gb';
|
const dsName = 'gb';
|
||||||
|
|
||||||
// Removes any previous index.
|
// Removes any previous index.
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await search.deleteDataSource(dsName);
|
await search.deleteDataSource(dsName);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// If it is a 404 there is nothing to delete as it is the first creation.
|
// If it is a 404 there is nothing to delete as it is the first creation.
|
||||||
|
|
||||||
if (err.code !== 404) {
|
if (err.code !== 404) {
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Removes the index.
|
|
||||||
|
|
||||||
try {
|
|
||||||
await search.deleteIndex();
|
|
||||||
} catch (err) {
|
|
||||||
// If it is a 404 there is nothing to delete as it is the first creation.
|
|
||||||
|
|
||||||
if (err.code !== 404 && err.code !== 'OperationNotAllowed') {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Creates the data source and index on the cloud.
|
|
||||||
|
|
||||||
try {
|
|
||||||
await search.createDataSource(dsName, dsName, 'GuaribasQuestion', 'azuresql', connectionString);
|
|
||||||
} catch (err) {
|
|
||||||
GBLog.error(err);
|
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
await search.createIndex(searchSchema, dsName);
|
}
|
||||||
|
|
||||||
// release();
|
// Removes the index.
|
||||||
GBLogEx.info(instance.instanceId, `Released rebuildIndex mutex.`);
|
|
||||||
|
try {
|
||||||
|
await search.deleteIndex();
|
||||||
|
} catch (err) {
|
||||||
|
// If it is a 404 there is nothing to delete as it is the first creation.
|
||||||
|
|
||||||
|
if (err.code !== 404 && err.code !== 'OperationNotAllowed') {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates the data source and index on the cloud.
|
||||||
|
|
||||||
|
try {
|
||||||
|
await search.createDataSource(dsName, dsName, 'GuaribasQuestion', 'azuresql', connectionString);
|
||||||
|
} catch (err) {
|
||||||
|
GBLog.error(err);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
await search.createIndex(searchSchema, dsName);
|
||||||
|
|
||||||
|
// release();
|
||||||
|
GBLogEx.info(instance.instanceId, `Released rebuildIndex mutex.`);
|
||||||
//} catch {
|
//} catch {
|
||||||
// if (release) {
|
// if (release) {
|
||||||
// release();
|
// release();
|
||||||
// }
|
// }
|
||||||
//}
|
//}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue