SYNC: Merge pull request #3 from dbgate/feature/zip

This commit is contained in:
Jan Prochazka
2025-04-23 13:17:54 +02:00
committed by Diflow
parent 54c53f0b56
commit 8f4118a6b8
82 changed files with 3981 additions and 2814 deletions

View File

@@ -87,6 +87,9 @@ module.exports = ({ editMenu, isMac }) => [
{ command: 'folder.showData', hideDisabled: true },
{ command: 'new.gist', hideDisabled: true },
{ command: 'app.resetSettings', hideDisabled: true },
{ divider: true },
{ command: 'app.exportConnections', hideDisabled: true },
{ command: 'app.importConnections', hideDisabled: true },
],
},
...(isMac

View File

@@ -468,15 +468,15 @@ describe('Data browser data', () => {
cy.themeshot('database-model-table-yaml');
});
it('Data duplicator', () => {
it('Data replicator', () => {
cy.contains('MySql-connection').click();
cy.contains('MyChinook').click();
cy.testid('WidgetIconPanel_archive').click();
cy.contains('chinook-archive').rightclick();
cy.contains('Data duplicator').click();
cy.contains('Data replicator').click();
cy.contains('Dry run').click();
cy.testid('DataDuplicatorTab_importIntoDb').click();
cy.contains('Duplicated Album, inserted 347 rows, mapped 0 rows, missing 0 rows, skipped 0 rows');
cy.themeshot('data-duplicator');
cy.testid('DataReplicatorTab_importIntoDb').click();
cy.contains('Replicated Album, inserted 347 rows, mapped 0 rows, missing 0 rows, skipped 0 rows');
cy.themeshot('data-replicator');
});
});

View File

@@ -1,160 +0,0 @@
const engines = require('../engines');
const stream = require('stream');
const { testWrapper } = require('../tools');
const dataDuplicator = require('dbgate-api/src/shell/dataDuplicator');
const { runCommandOnDriver, runQueryOnDriver } = require('dbgate-tools');
describe('Data duplicator', () => {
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
'Insert simple data - %s',
testWrapper(async (conn, driver, engine) => {
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't1',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
})
);
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't2',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
{ columnName: 'valfk', dataType: 'int', notNull: true },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
})
);
const gett1 = () =>
stream.Readable.from([
{ __isStreamHeader: true, __isDynamicStructure: true },
{ id: 1, val: 'v1' },
{ id: 2, val: 'v2' },
{ id: 3, val: 'v3' },
]);
const gett2 = () =>
stream.Readable.from([
{ __isStreamHeader: true, __isDynamicStructure: true },
{ id: 1, val: 'v1', valfk: 1 },
{ id: 2, val: 'v2', valfk: 2 },
{ id: 3, val: 'v3', valfk: 3 },
]);
await dataDuplicator({
systemConnection: conn,
driver,
items: [
{
name: 't1',
operation: 'copy',
openStream: gett1,
},
{
name: 't2',
operation: 'copy',
openStream: gett2,
},
],
});
await dataDuplicator({
systemConnection: conn,
driver,
items: [
{
name: 't1',
operation: 'copy',
openStream: gett1,
},
{
name: 't2',
operation: 'copy',
openStream: gett2,
},
],
});
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res1.rows[0].cnt.toString()).toEqual('6');
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
'Skip nullable weak refs - %s',
testWrapper(async (conn, driver, engine) => {
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't1',
columns: [
{ columnName: 'id', dataType: 'int', notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
})
);
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't2',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
{ columnName: 'valfk', dataType: 'int', notNull: false },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
})
);
runCommandOnDriver(conn, driver, dmp => dmp.put("insert into ~t1 (~id, ~val) values (1, 'first')"));
const gett2 = () =>
stream.Readable.from([
{ __isStreamHeader: true, __isDynamicStructure: true },
{ id: 1, val: 'v1', valfk: 1 },
{ id: 2, val: 'v2', valfk: 2 },
]);
await dataDuplicator({
systemConnection: conn,
driver,
items: [
{
name: 't2',
operation: 'copy',
openStream: gett2,
},
],
options: {
setNullForUnresolvedNullableRefs: true,
},
});
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res1.rows[0].cnt.toString()).toEqual('1');
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
expect(res2.rows[0].cnt.toString()).toEqual('2');
const res3 = await runQueryOnDriver(conn, driver, dmp =>
dmp.put(`select count(*) as ~cnt from ~t2 where ~valfk is not null`)
);
expect(res3.rows[0].cnt.toString()).toEqual('1');
})
);
});

View File

@@ -0,0 +1,305 @@
const engines = require('../engines');
const stream = require('stream');
const { testWrapper } = require('../tools');
const dataReplicator = require('dbgate-api/src/shell/dataReplicator');
const deployDb = require('dbgate-api/src/shell/deployDb');
const storageModel = require('dbgate-api/src/storageModel');
const { runCommandOnDriver, runQueryOnDriver } = require('dbgate-tools');
describe('Data replicator', () => {
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
'Insert simple data - %s',
testWrapper(async (conn, driver, engine) => {
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't1',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
})
);
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't2',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
{ columnName: 'valfk', dataType: 'int', notNull: true },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
})
);
const gett1 = () =>
stream.Readable.from([
{ __isStreamHeader: true, __isDynamicStructure: true },
{ id: 1, val: 'v1' },
{ id: 2, val: 'v2' },
{ id: 3, val: 'v3' },
]);
const gett2 = () =>
stream.Readable.from([
{ __isStreamHeader: true, __isDynamicStructure: true },
{ id: 1, val: 'v1', valfk: 1 },
{ id: 2, val: 'v2', valfk: 2 },
{ id: 3, val: 'v3', valfk: 3 },
]);
await dataReplicator({
systemConnection: conn,
driver,
items: [
{
name: 't1',
createNew: true,
openStream: gett1,
},
{
name: 't2',
createNew: true,
openStream: gett2,
},
],
});
await dataReplicator({
systemConnection: conn,
driver,
items: [
{
name: 't1',
createNew: true,
openStream: gett1,
},
{
name: 't2',
createNew: true,
openStream: gett2,
},
],
});
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res1.rows[0].cnt.toString()).toEqual('6');
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
'Skip nullable weak refs - %s',
testWrapper(async (conn, driver, engine) => {
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't1',
columns: [
{ columnName: 'id', dataType: 'int', notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
})
);
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't2',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
{ columnName: 'valfk', dataType: 'int', notNull: false },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
})
);
runCommandOnDriver(conn, driver, dmp => dmp.put("insert into ~t1 (~id, ~val) values (1, 'first')"));
await dataReplicator({
systemConnection: conn,
driver,
items: [
{
name: 't2',
createNew: true,
jsonArray: [
{ id: 1, val: 'v1', valfk: 1 },
{ id: 2, val: 'v2', valfk: 2 },
],
},
],
options: {
setNullForUnresolvedNullableRefs: true,
},
});
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res1.rows[0].cnt.toString()).toEqual('1');
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
expect(res2.rows[0].cnt.toString()).toEqual('2');
const res3 = await runQueryOnDriver(conn, driver, dmp =>
dmp.put(`select count(*) as ~cnt from ~t2 where ~valfk is not null`)
);
expect(res3.rows[0].cnt.toString()).toEqual('1');
})
);
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
'Import storage DB - %s',
testWrapper(async (conn, driver, engine) => {
await deployDb({
systemConnection: conn,
driver,
loadedDbModel: storageModel,
targetSchema: driver.defaultSchemaName,
});
async function queryValue(sql) {
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(sql));
return res1.rows[0].val?.toString();
}
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('2');
expect(
await queryValue(
`select ~is_disabled as ~val from ~auth_methods where ~amoid='790ca4d2-7f01-4800-955b-d691b890cc50'`
)
).toBeFalsy();
const DB1 = {
auth_methods: [
{ id: -1, name: 'Anonymous', amoid: '790ca4d2-7f01-4800-955b-d691b890cc50', is_disabled: 1 },
{ id: 10, name: 'OAuth', amoid: '4269b660-54b6-11ef-a3aa-a9021250bf4b' },
],
auth_methods_config: [{ id: 20, auth_method_id: 10, key: 'oauthClient', value: 'dbgate' }],
config: [
{ group: 'admin', key: 'encyptKey', value: '1234' },
{ group: 'admin', key: 'adminPasswordState', value: 'set' },
{ group: 'license', key: 'licenseKey', value: '123467' },
],
roles: [
{ id: -3, name: 'superadmin' },
{ id: -2, name: 'logged-user' },
{ id: -1, name: 'anonymous-user' },
],
role_permissions: [
{ id: 14, role_id: -1, permission: 'perm1' },
{ id: 29, role_id: -1, permission: 'perm2' },
{ id: 1, role_id: -1, permission: 'perm3' },
],
};
const DB2 = {
auth_methods: [{ id: 10, name: 'My Auth', amoid: 'myauth1' }],
auth_methods_config: [{ id: 20, auth_method_id: 10, key: 'my authClient', value: 'mydbgate' }],
config: [],
roles: [{ id: 1, name: 'test' }],
role_permissions: [{ id: 14, role_id: 1, permission: 'permxx' }],
};
function createDuplConfig(db) {
return {
systemConnection: conn,
driver,
items: [
{
name: 'auth_methods',
findExisting: true,
updateExisting: true,
createNew: true,
matchColumns: ['amoid'],
jsonArray: db.auth_methods,
},
{
name: 'auth_methods_config',
findExisting: true,
updateExisting: true,
createNew: true,
matchColumns: ['auth_method_id', 'key'],
jsonArray: db.auth_methods_config,
},
{
name: 'config',
findExisting: true,
updateExisting: true,
createNew: true,
matchColumns: ['group', 'key'],
jsonArray: db.config,
},
{
name: 'roles',
findExisting: true,
updateExisting: true,
createNew: true,
matchColumns: ['name'],
jsonArray: db.roles,
},
{
name: 'role_permissions',
findExisting: true,
updateExisting: true,
createNew: true,
deleteMissing: true,
matchColumns: ['role_id', 'permission'],
deleteRestrictionColumns: ['role_id'],
jsonArray: db.role_permissions,
},
],
};
}
await dataReplicator(createDuplConfig(DB1));
expect(
await queryValue(
`select ~is_disabled as ~val from ~auth_methods where ~amoid='790ca4d2-7f01-4800-955b-d691b890cc50'`
)
).toBeTruthy();
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('3');
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('1');
expect(await queryValue(`select count(*) as ~val from ~config`)).toEqual('3');
expect(await queryValue(`select ~value as ~val from ~auth_methods_config`)).toEqual('dbgate');
expect(
await queryValue(`select ~value as ~val from ~config where ~group='license' and ~key='licenseKey'`)
).toEqual('123467');
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('3');
DB1.auth_methods_config[0].value = 'dbgate2';
DB1.config[2].value = '567';
DB1.role_permissions.splice(2, 1);
await dataReplicator(createDuplConfig(DB1));
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('1');
expect(await queryValue(`select count(*) as ~val from ~config`)).toEqual('3');
expect(await queryValue(`select ~value as ~val from ~auth_methods_config`)).toEqual('dbgate2');
expect(
await queryValue(`select ~value as ~val from ~config where ~group='license' and ~key='licenseKey'`)
).toEqual('567');
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('2');
// now add DB2
await dataReplicator(createDuplConfig(DB2));
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('4');
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('2');
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('3');
DB1.role_permissions.splice(1, 1);
await dataReplicator(createDuplConfig(DB1));
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('2');
})
);
});

View File

@@ -188,7 +188,7 @@ describe('Query', () => {
})
);
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
'Select scope identity - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp =>

View File

@@ -8,14 +8,14 @@ services:
# ports:
# - 15000:5432
#
# mariadb:
# image: mariadb
# command: --default-authentication-plugin=mysql_native_password
# restart: always
# ports:
# - 15004:3306
# environment:
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
mariadb:
image: mariadb
command: --default-authentication-plugin=mysql_native_password
restart: always
ports:
- 15004:3306
environment:
- MYSQL_ROOT_PASSWORD=Pwd2020Db
# mysql:
# image: mysql:8.0.18
@@ -25,7 +25,7 @@ services:
# - 15001:3306
# environment:
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
#
# cassandradb:
# image: cassandra:5.0.2
@@ -81,11 +81,11 @@ services:
# ports:
# - 15006:1521
libsql:
image: ghcr.io/tursodatabase/libsql-server:latest
platform: linux/amd64
ports:
- '8080:8080'
- '5002:5001'
volumes:
- ./data/libsql:/var/lib/sqld
# libsql:
# image: ghcr.io/tursodatabase/libsql-server:latest
# platform: linux/amd64
# ports:
# - '8080:8080'
# - '5002:5001'
# volumes:
# - ./data/libsql:/var/lib/sqld

View File

@@ -551,7 +551,7 @@ const clickhouseEngine = {
skipUnique: true,
skipAutoIncrement: true,
skipPkColumnTesting: true,
skipDataDuplicator: true,
skipDataReplicator: true,
skipStringLength: true,
alterTableAddColumnSyntax: true,
dbSnapshotBySeconds: true,
@@ -643,7 +643,7 @@ const cassandraEngine = {
skipOrderBy: true,
skipAutoIncrement: true,
skipDataModifications: true,
skipDataDuplicator: true,
skipDataReplicator: true,
skipDeploy: true,
skipImportModel: true,
@@ -673,14 +673,14 @@ const enginesOnLocal = [
// all engines, which would be run on local test
// cassandraEngine,
// mysqlEngine,
// mariaDbEngine,
mariaDbEngine,
// postgreSqlEngine,
// sqlServerEngine,
// sqliteEngine,
// cockroachDbEngine,
// clickhouseEngine,
// libsqlFileEngine,
libsqlWsEngine,
// libsqlWsEngine,
// oracleEngine,
];

View File

@@ -12,7 +12,7 @@
"wait:local": "cross-env DEVMODE=1 LOCALTEST=1 node wait.js",
"wait:ci": "cross-env DEVMODE=1 CITEST=1 node wait.js",
"test:local": "cross-env DEVMODE=1 LOCALTEST=1 jest --testTimeout=5000",
"test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/data-duplicator.spec.js",
"test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/data-replicator.spec.js",
"test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults --detectOpenHandles --forceExit --testTimeout=10000",
"run:local": "docker-compose down && docker-compose up -d && yarn wait:local && yarn test:local"
},

View File

@@ -22,6 +22,7 @@
"dependencies": {
"@aws-sdk/rds-signer": "^3.665.0",
"activedirectory2": "^2.1.0",
"archiver": "^7.0.1",
"async-lock": "^1.2.6",
"axios": "^0.21.1",
"body-parser": "^1.19.0",
@@ -62,7 +63,8 @@
"simple-encryptor": "^4.0.0",
"ssh2": "^1.16.0",
"stream-json": "^1.8.0",
"tar": "^6.0.5"
"tar": "^6.0.5",
"yauzl": "^3.2.0"
},
"scripts": {
"start": "env-cmd -f .env node src/index.js --listen-api",

View File

@@ -2,14 +2,20 @@ const fs = require('fs-extra');
const readline = require('readline');
const crypto = require('crypto');
const path = require('path');
const { archivedir, clearArchiveLinksCache, resolveArchiveFolder } = require('../utility/directories');
const { archivedir, clearArchiveLinksCache, resolveArchiveFolder, uploadsdir } = require('../utility/directories');
const socket = require('../utility/socket');
const loadFilesRecursive = require('../utility/loadFilesRecursive');
const getJslFileName = require('../utility/getJslFileName');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const { getLogger, extractErrorLogData, jsonLinesParse } = require('dbgate-tools');
const dbgateApi = require('../shell');
const jsldata = require('./jsldata');
const platformInfo = require('../utility/platformInfo');
const { isProApp } = require('../utility/checkLicense');
const listZipEntries = require('../utility/listZipEntries');
const unzipJsonLinesFile = require('../shell/unzipJsonLinesFile');
const { zip } = require('lodash');
const zipDirectory = require('../shell/zipDirectory');
const unzipDirectory = require('../shell/unzipDirectory');
const logger = getLogger('archive');
@@ -47,9 +53,31 @@ module.exports = {
return folder;
},
async getZipFiles({ file }) {
const entries = await listZipEntries(path.join(archivedir(), file));
const files = entries.map(entry => {
let name = entry.fileName;
if (isProApp() && entry.fileName.endsWith('.jsonl')) {
name = entry.fileName.slice(0, -6);
}
return {
name: name,
label: name,
type: isProApp() && entry.fileName.endsWith('.jsonl') ? 'jsonl' : 'other',
};
});
return files;
},
files_meta: true,
async files({ folder }) {
try {
if (folder.endsWith('.zip')) {
if (await fs.exists(path.join(archivedir(), folder))) {
return this.getZipFiles({ file: folder });
}
return [];
}
const dir = resolveArchiveFolder(folder);
if (!(await fs.exists(dir))) return [];
const files = await loadFilesRecursive(dir); // fs.readdir(dir);
@@ -91,6 +119,16 @@ module.exports = {
return true;
},
createFile_meta: true,
async createFile({ folder, file, fileType, tableInfo }) {
await fs.writeFile(
path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
tableInfo ? JSON.stringify({ __isStreamHeader: true, tableInfo }) : ''
);
socket.emitChanged(`archive-files-changed`, { folder });
return true;
},
deleteFile_meta: true,
async deleteFile({ folder, file, fileType }) {
await fs.unlink(path.join(resolveArchiveFolder(folder), `${file}.${fileType}`));
@@ -158,7 +196,7 @@ module.exports = {
deleteFolder_meta: true,
async deleteFolder({ folder }) {
if (!folder) throw new Error('Missing folder parameter');
if (folder.endsWith('.link')) {
if (folder.endsWith('.link') || folder.endsWith('.zip')) {
await fs.unlink(path.join(archivedir(), folder));
} else {
await fs.rmdir(path.join(archivedir(), folder), { recursive: true });
@@ -204,9 +242,10 @@ module.exports = {
},
async getNewArchiveFolder({ database }) {
const isLink = database.endsWith(database);
const name = isLink ? database.slice(0, -5) : database;
const suffix = isLink ? '.link' : '';
const isLink = database.endsWith('.link');
const isZip = database.endsWith('.zip');
const name = isLink ? database.slice(0, -5) : isZip ? database.slice(0, -4) : database;
const suffix = isLink ? '.link' : isZip ? '.zip' : '';
if (!(await fs.exists(path.join(archivedir(), database)))) return database;
let index = 2;
while (await fs.exists(path.join(archivedir(), `${name}${index}${suffix}`))) {
@@ -214,4 +253,58 @@ module.exports = {
}
return `${name}${index}${suffix}`;
},
getArchiveData_meta: true,
async getArchiveData({ folder, file }) {
let rows;
if (folder.endsWith('.zip')) {
rows = await unzipJsonLinesFile(path.join(archivedir(), folder), `${file}.jsonl`);
} else {
rows = jsonLinesParse(await fs.readFile(path.join(archivedir(), folder, `${file}.jsonl`), { encoding: 'utf8' }));
}
return rows.filter(x => !x.__isStreamHeader);
},
saveUploadedZip_meta: true,
async saveUploadedZip({ filePath, fileName }) {
if (!fileName?.endsWith('.zip')) {
throw new Error(`${fileName} is not a ZIP file`);
}
const folder = await this.getNewArchiveFolder({ database: fileName });
await fs.copyFile(filePath, path.join(archivedir(), folder));
socket.emitChanged(`archive-folders-changed`);
return null;
},
zip_meta: true,
async zip({ folder }) {
const newFolder = await this.getNewArchiveFolder({ database: folder + '.zip' });
await zipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
socket.emitChanged(`archive-folders-changed`);
return null;
},
unzip_meta: true,
async unzip({ folder }) {
const newFolder = await this.getNewArchiveFolder({ database: folder.slice(0, -4) });
await unzipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
socket.emitChanged(`archive-folders-changed`);
return null;
},
getZippedPath_meta: true,
async getZippedPath({ folder }) {
if (folder.endsWith('.zip')) {
return { filePath: path.join(archivedir(), folder) };
}
const uploadName = crypto.randomUUID();
const filePath = path.join(uploadsdir(), uploadName);
await zipDirectory(path.join(archivedir(), folder), filePath);
return { filePath };
},
};

View File

@@ -19,6 +19,14 @@ const storage = require('./storage');
const { getAuthProxyUrl } = require('../utility/authProxy');
const { getPublicHardwareFingerprint } = require('../utility/hardwareFingerprint');
const { extractErrorMessage } = require('dbgate-tools');
const {
generateTransportEncryptionKey,
createTransportEncryptor,
recryptConnection,
getInternalEncryptor,
recryptUser,
recryptObjectPasswordFieldInPlace,
} = require('../utility/crypting');
const lock = new AsyncLock();
@@ -107,6 +115,7 @@ module.exports = {
datadir(),
processArgs.runE2eTests ? 'connections-e2etests.jsonl' : 'connections.jsonl'
),
supportCloudAutoUpgrade: !!process.env.CLOUD_UPGRADE_FILE,
...currentVersion,
};
@@ -144,7 +153,7 @@ module.exports = {
const res = {
...value,
};
if (value['app.useNativeMenu'] !== true && value['app.useNativeMenu'] !== false) {
if (platformInfo.isElectron && value['app.useNativeMenu'] !== true && value['app.useNativeMenu'] !== false) {
// res['app.useNativeMenu'] = os.platform() == 'darwin' ? true : false;
res['app.useNativeMenu'] = false;
}
@@ -161,14 +170,19 @@ module.exports = {
async loadSettings() {
try {
const settingsText = await fs.readFile(
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
{ encoding: 'utf-8' }
);
return {
...this.fillMissingSettings(JSON.parse(settingsText)),
'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined,
};
if (process.env.STORAGE_DATABASE) {
const settings = await storage.readConfig({ group: 'settings' });
return this.fillMissingSettings(settings);
} else {
const settingsText = await fs.readFile(
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
{ encoding: 'utf-8' }
);
return {
...this.fillMissingSettings(JSON.parse(settingsText)),
'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined,
};
}
} catch (err) {
return this.fillMissingSettings({});
}
@@ -246,19 +260,31 @@ module.exports = {
const res = await lock.acquire('settings', async () => {
const currentValue = await this.loadSettings();
try {
const updated = {
...currentValue,
..._.omit(values, ['other.licenseKey']),
};
await fs.writeFile(
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
JSON.stringify(updated, undefined, 2)
);
// this.settingsValue = updated;
let updated = currentValue;
if (process.env.STORAGE_DATABASE) {
updated = {
...currentValue,
...values,
};
await storage.writeConfig({
group: 'settings',
config: updated,
});
} else {
updated = {
...currentValue,
..._.omit(values, ['other.licenseKey']),
};
await fs.writeFile(
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
JSON.stringify(updated, undefined, 2)
);
// this.settingsValue = updated;
if (currentValue['other.licenseKey'] != values['other.licenseKey']) {
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'] });
socket.emitChanged(`config-changed`);
if (currentValue['other.licenseKey'] != values['other.licenseKey']) {
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'] });
socket.emitChanged(`config-changed`);
}
}
socket.emitChanged(`settings-changed`);
@@ -281,4 +307,91 @@ module.exports = {
const resp = await checkLicenseKey(licenseKey);
return resp;
},
recryptDatabaseForExport(db) {
const encryptionKey = generateTransportEncryptionKey();
const transportEncryptor = createTransportEncryptor(encryptionKey);
const config = _.cloneDeep([
...(db.config?.filter(c => !(c.group == 'admin' && c.key == 'encryptionKey')) || []),
{ group: 'admin', key: 'encryptionKey', value: encryptionKey },
]);
const adminPassword = config.find(c => c.group == 'admin' && c.key == 'adminPassword');
recryptObjectPasswordFieldInPlace(adminPassword, 'value', getInternalEncryptor(), transportEncryptor);
return {
...db,
connections: db.connections?.map(conn => recryptConnection(conn, getInternalEncryptor(), transportEncryptor)),
users: db.users?.map(conn => recryptUser(conn, getInternalEncryptor(), transportEncryptor)),
config,
};
},
recryptDatabaseFromImport(db) {
const encryptionKey = db.config?.find(c => c.group == 'admin' && c.key == 'encryptionKey')?.value;
if (!encryptionKey) {
throw new Error('Missing encryption key in the database');
}
const config = _.cloneDeep(db.config || []).filter(c => !(c.group == 'admin' && c.key == 'encryptionKey'));
const transportEncryptor = createTransportEncryptor(encryptionKey);
const adminPassword = config.find(c => c.group == 'admin' && c.key == 'adminPassword');
recryptObjectPasswordFieldInPlace(adminPassword, 'value', transportEncryptor, getInternalEncryptor());
return {
...db,
connections: db.connections?.map(conn => recryptConnection(conn, transportEncryptor, getInternalEncryptor())),
users: db.users?.map(conn => recryptUser(conn, transportEncryptor, getInternalEncryptor())),
config,
};
},
exportConnectionsAndSettings_meta: true,
async exportConnectionsAndSettings(_params, req) {
if (!hasPermission(`admin/config`, req)) {
throw new Error('Permission denied: admin/config');
}
if (connections.portalConnections) {
throw new Error('Not allowed');
}
if (process.env.STORAGE_DATABASE) {
const db = await storage.getExportedDatabase();
return this.recryptDatabaseForExport(db);
}
return this.recryptDatabaseForExport({
connections: (await connections.list(null, req)).map((conn, index) => ({
..._.omit(conn, ['_id']),
id: index + 1,
conid: conn._id,
})),
});
},
importConnectionsAndSettings_meta: true,
async importConnectionsAndSettings({ db }, req) {
if (!hasPermission(`admin/config`, req)) {
throw new Error('Permission denied: admin/config');
}
if (connections.portalConnections) {
throw new Error('Not allowed');
}
const recryptedDb = this.recryptDatabaseFromImport(db);
if (process.env.STORAGE_DATABASE) {
await storage.replicateImportedDatabase(recryptedDb);
} else {
await connections.importFromArray(
recryptedDb.connections.map(conn => ({
..._.omit(conn, ['conid', 'id']),
_id: conn.conid,
}))
);
}
return true;
},
};

View File

@@ -102,8 +102,8 @@ function getPortalCollections() {
trustServerCertificate: process.env[`SSL_TRUST_CERTIFICATE_${id}`],
}));
for(const conn of connections) {
for(const prop in process.env) {
for (const conn of connections) {
for (const prop in process.env) {
if (prop.startsWith(`CONNECTION_${conn._id}_`)) {
const name = prop.substring(`CONNECTION_${conn._id}_`.length);
conn[name] = process.env[prop];
@@ -316,6 +316,18 @@ module.exports = {
return res;
},
importFromArray(list) {
this.datastore.transformAll(connections => {
const mapped = connections.map(x => {
const found = list.find(y => y._id == x._id);
if (found) return found;
return x;
});
return [...mapped, ...list.filter(x => !connections.find(y => y._id == x._id))];
});
socket.emitChanged('connection-list-changed');
},
async checkUnsavedConnectionsLimit() {
if (!this.datastore) {
return;

View File

@@ -9,6 +9,9 @@ const scheduler = require('./scheduler');
const getDiagramExport = require('../utility/getDiagramExport');
const apps = require('./apps');
const getMapExport = require('../utility/getMapExport');
const dbgateApi = require('../shell');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('files');
function serialize(format, data) {
if (format == 'text') return data;
@@ -219,4 +222,60 @@ module.exports = {
return path.join(dir, file);
}
},
createZipFromJsons_meta: true,
async createZipFromJsons({ db, filePath }) {
logger.info(`Creating zip file from JSONS ${filePath}`);
await dbgateApi.zipJsonLinesData(db, filePath);
return true;
},
getJsonsFromZip_meta: true,
async getJsonsFromZip({ filePath }) {
const res = await dbgateApi.unzipJsonLinesData(filePath);
return res;
},
downloadText_meta: true,
async downloadText({ uri }, req) {
if (!uri) return null;
const filePath = await dbgateApi.download(uri);
const text = await fs.readFile(filePath, {
encoding: 'utf-8',
});
return text;
},
saveUploadedFile_meta: true,
async saveUploadedFile({ filePath, fileName }) {
const FOLDERS = ['sql', 'sqlite'];
for (const folder of FOLDERS) {
if (fileName.toLowerCase().endsWith('.' + folder)) {
logger.info(`Saving ${folder} file ${fileName}`);
await fs.copyFile(filePath, path.join(filesdir(), folder, fileName));
socket.emitChanged(`files-changed`, { folder: folder });
socket.emitChanged(`all-files-changed`);
return {
name: path.basename(filePath),
folder: folder,
};
}
}
throw new Error(`${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`);
},
exportFile_meta: true,
async exportFile({ folder, file, filePath }, req) {
if (!hasPermission(`files/${folder}/read`, req)) return false;
await fs.copyFile(path.join(filesdir(), folder, file), filePath);
return true;
},
simpleCopy_meta: true,
async simpleCopy({ sourceFilePath, targetFilePath }, req) {
await fs.copyFile(sourceFilePath, targetFilePath);
return true;
},
};

View File

@@ -8,6 +8,8 @@ const getJslFileName = require('../utility/getJslFileName');
const JsonLinesDatastore = require('../utility/JsonLinesDatastore');
const requirePluginFunction = require('../utility/requirePluginFunction');
const socket = require('../utility/socket');
const crypto = require('crypto');
const dbgateApi = require('../shell');
function readFirstLine(file) {
return new Promise((resolve, reject) => {
@@ -293,4 +295,11 @@ module.exports = {
})),
};
},
downloadJslData_meta: true,
async downloadJslData({ uri }) {
const jslid = crypto.randomUUID();
await dbgateApi.download(uri, { targetFile: getJslFileName(jslid) });
return { jslid };
},
};

View File

@@ -96,9 +96,9 @@ module.exports = {
handle_ping() {},
handle_freeData(runid, { freeData }) {
handle_dataResult(runid, { dataResult }) {
const { resolve } = this.requests[runid];
resolve(freeData);
resolve(dataResult);
delete this.requests[runid];
},
@@ -328,4 +328,24 @@ module.exports = {
});
return promise;
},
scriptResult_meta: true,
async scriptResult({ script }) {
if (script.type != 'json') {
return { errorMessage: 'Only JSON scripts are allowed' };
}
const promise = new Promise((resolve, reject) => {
const runid = crypto.randomUUID();
this.requests[runid] = { resolve, reject, exitOnStreamError: true };
const cloned = _.cloneDeepWith(script, node => {
if (node?.$replace == 'runid') {
return runid;
}
});
const js = jsonScriptToJavascript(cloned);
this.startCore(runid, scriptTemplate(js, false));
});
return promise;
},
};

View File

@@ -39,52 +39,6 @@ module.exports = {
});
},
uploadDataFile_meta: {
method: 'post',
raw: true,
},
uploadDataFile(req, res) {
const { data } = req.files || {};
if (!data) {
res.json(null);
return;
}
if (data.name.toLowerCase().endsWith('.sql')) {
logger.info(`Uploading SQL file ${data.name}, size=${data.size}`);
data.mv(path.join(filesdir(), 'sql', data.name), () => {
res.json({
name: data.name,
folder: 'sql',
});
socket.emitChanged(`files-changed`, { folder: 'sql' });
socket.emitChanged(`all-files-changed`);
});
return;
}
res.json(null);
},
saveDataFile_meta: true,
async saveDataFile({ filePath }) {
if (filePath.toLowerCase().endsWith('.sql')) {
logger.info(`Saving SQL file ${filePath}`);
await fs.copyFile(filePath, path.join(filesdir(), 'sql', path.basename(filePath)));
socket.emitChanged(`files-changed`, { folder: 'sql' });
socket.emitChanged(`all-files-changed`);
return {
name: path.basename(filePath),
folder: 'sql',
};
}
return null;
},
get_meta: {
method: 'get',
raw: true,

View File

@@ -3,7 +3,9 @@ const { archivedir, resolveArchiveFolder } = require('../utility/directories');
const jsonLinesReader = require('./jsonLinesReader');
function archiveReader({ folderName, fileName, ...other }) {
const jsonlFile = path.join(resolveArchiveFolder(folderName), `${fileName}.jsonl`);
const jsonlFile = folderName.endsWith('.zip')
? `zip://archive:${folderName}//${fileName}.jsonl`
: path.join(resolveArchiveFolder(folderName), `${fileName}.jsonl`);
const res = jsonLinesReader({ fileName: jsonlFile, ...other });
return res;
}

View File

@@ -15,9 +15,9 @@ class CollectorWriterStream extends stream.Writable {
_final(callback) {
process.send({
msgtype: 'freeData',
msgtype: 'dataResult',
runid: this.runid,
freeData: { rows: this.rows, structure: this.structure },
dataResult: { rows: this.rows, structure: this.structure },
});
callback();
}

View File

@@ -1,61 +0,0 @@
const stream = require('stream');
const path = require('path');
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
const requireEngineDriver = require('../utility/requireEngineDriver');
const { connectUtility } = require('../utility/connectUtility');
const logger = getLogger('dataDuplicator');
const { DataDuplicator } = require('dbgate-datalib');
const copyStream = require('./copyStream');
const jsonLinesReader = require('./jsonLinesReader');
const { resolveArchiveFolder } = require('../utility/directories');
async function dataDuplicator({
connection,
archive,
folder,
items,
options,
analysedStructure = null,
driver,
systemConnection,
}) {
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
try {
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(dbhan);
}
const sourceDir = archive
? resolveArchiveFolder(archive)
: folder?.startsWith('archive:')
? resolveArchiveFolder(folder.substring('archive:'.length))
: folder;
const dupl = new DataDuplicator(
dbhan,
driver,
analysedStructure,
items.map(item => ({
name: item.name,
operation: item.operation,
matchColumns: item.matchColumns,
openStream:
item.openStream || (() => jsonLinesReader({ fileName: path.join(sourceDir, `${item.name}.jsonl`) })),
})),
stream,
copyStream,
options
);
await dupl.run();
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
}
module.exports = dataDuplicator;

View File

@@ -0,0 +1,96 @@
const stream = require('stream');
const path = require('path');
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
const requireEngineDriver = require('../utility/requireEngineDriver');
const { connectUtility } = require('../utility/connectUtility');
const logger = getLogger('datareplicator');
const { DataReplicator } = require('dbgate-datalib');
const { compileCompoudEvalCondition } = require('dbgate-filterparser');
const copyStream = require('./copyStream');
const jsonLinesReader = require('./jsonLinesReader');
const { resolveArchiveFolder } = require('../utility/directories');
const { evaluateCondition } = require('dbgate-sqltree');
function compileOperationFunction(enabled, condition) {
if (!enabled) return _row => false;
const conditionCompiled = compileCompoudEvalCondition(condition);
if (condition) {
return row => evaluateCondition(conditionCompiled, row);
}
return _row => true;
}
async function dataReplicator({
connection,
archive,
folder,
items,
options,
analysedStructure = null,
driver,
systemConnection,
}) {
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
try {
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(dbhan);
}
let joinPath;
if (archive?.endsWith('.zip')) {
joinPath = file => `zip://archive:${archive}//${file}`;
} else {
const sourceDir = archive
? resolveArchiveFolder(archive)
: folder?.startsWith('archive:')
? resolveArchiveFolder(folder.substring('archive:'.length))
: folder;
joinPath = file => path.join(sourceDir, file);
}
const repl = new DataReplicator(
dbhan,
driver,
analysedStructure,
items.map(item => {
return {
name: item.name,
matchColumns: item.matchColumns,
findExisting: compileOperationFunction(item.findExisting, item.findCondition),
createNew: compileOperationFunction(item.createNew, item.createCondition),
updateExisting: compileOperationFunction(item.updateExisting, item.updateCondition),
deleteMissing: !!item.deleteMissing,
deleteRestrictionColumns: item.deleteRestrictionColumns ?? [],
openStream: item.openStream
? item.openStream
: item.jsonArray
? () => stream.Readable.from(item.jsonArray)
: () => jsonLinesReader({ fileName: joinPath(`${item.name}.jsonl`) }),
};
}),
stream,
copyStream,
options
);
await repl.run();
if (options?.runid) {
process.send({
msgtype: 'dataResult',
runid: options?.runid,
dataResult: repl.result,
});
}
return repl.result;
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
}
module.exports = dataReplicator;

View File

@@ -1,14 +1,30 @@
const crypto = require('crypto');
const path = require('path');
const { uploadsdir } = require('../utility/directories');
const { uploadsdir, archivedir } = require('../utility/directories');
const { downloadFile } = require('../utility/downloader');
const extractSingleFileFromZip = require('../utility/extractSingleFileFromZip');
async function download(url) {
if (url && url.match(/(^http:\/\/)|(^https:\/\/)/)) {
const tmpFile = path.join(uploadsdir(), crypto.randomUUID());
await downloadFile(url, tmpFile);
return tmpFile;
async function download(url, options = {}) {
const { targetFile } = options || {};
if (url) {
if (url.match(/(^http:\/\/)|(^https:\/\/)/)) {
const destFile = targetFile || path.join(uploadsdir(), crypto.randomUUID());
await downloadFile(url, destFile);
return destFile;
}
const zipMatch = url.match(/^zip\:\/\/(.*)\/\/(.*)$/);
if (zipMatch) {
const destFile = targetFile || path.join(uploadsdir(), crypto.randomUUID());
let zipFile = zipMatch[1];
if (zipFile.startsWith('archive:')) {
zipFile = path.join(archivedir(), zipFile.substring('archive:'.length));
}
await extractSingleFileFromZip(zipFile, zipMatch[2], destFile);
return destFile;
}
}
return url;
}

View File

@@ -25,7 +25,7 @@ const importDatabase = require('./importDatabase');
const loadDatabase = require('./loadDatabase');
const generateModelSql = require('./generateModelSql');
const modifyJsonLinesReader = require('./modifyJsonLinesReader');
const dataDuplicator = require('./dataDuplicator');
const dataReplicator = require('./dataReplicator');
const dbModelToJson = require('./dbModelToJson');
const jsonToDbModel = require('./jsonToDbModel');
const jsonReader = require('./jsonReader');
@@ -35,6 +35,11 @@ const autoIndexForeignKeysTransform = require('./autoIndexForeignKeysTransform')
const generateDeploySql = require('./generateDeploySql');
const dropAllDbObjects = require('./dropAllDbObjects');
const importDbFromFolder = require('./importDbFromFolder');
const zipDirectory = require('./zipDirectory');
const unzipDirectory = require('./unzipDirectory');
const zipJsonLinesData = require('./zipJsonLinesData');
const unzipJsonLinesData = require('./unzipJsonLinesData');
const unzipJsonLinesFile = require('./unzipJsonLinesFile');
const dbgateApi = {
queryReader,
@@ -64,7 +69,7 @@ const dbgateApi = {
loadDatabase,
generateModelSql,
modifyJsonLinesReader,
dataDuplicator,
dataReplicator,
dbModelToJson,
jsonToDbModel,
dataTypeMapperTransform,
@@ -73,6 +78,11 @@ const dbgateApi = {
generateDeploySql,
dropAllDbObjects,
importDbFromFolder,
zipDirectory,
unzipDirectory,
zipJsonLinesData,
unzipJsonLinesData,
unzipJsonLinesFile,
};
requirePlugin.initializeDbgateApi(dbgateApi);

View File

@@ -36,9 +36,10 @@ async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true })
logger.info(`Writing file ${fileName}`);
const stringify = new StringifyStream({ header });
const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream);
stringify['finisher'] = fileStream;
return stringify;
return [stringify, fileStream];
// stringify.pipe(fileStream);
// stringify['finisher'] = fileStream;
// return stringify;
}
module.exports = jsonLinesWriter;

View File

@@ -0,0 +1,91 @@
const yauzl = require('yauzl');
const fs = require('fs');
const path = require('path');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('unzipDirectory');
/**
* Extracts an entire ZIP file, preserving its internal directory layout.
*
* @param {string} zipPath Path to the ZIP file on disk.
* @param {string} outputDirectory Folder to create / overwrite with the contents.
* @returns {Promise<boolean>} Resolves `true` on success, rejects on error.
*/
function unzipDirectory(zipPath, outputDirectory) {
return new Promise((resolve, reject) => {
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
if (err) return reject(err);
/** Pending per-file extractions we resolve the main promise after theyre all done */
const pending = [];
// kick things off
zipFile.readEntry();
zipFile.on('entry', entry => {
const destPath = path.join(outputDirectory, entry.fileName);
// Handle directories (their names always end with “/” in ZIPs)
if (/\/$/.test(entry.fileName)) {
// Ensure directory exists, then continue to next entry
fs.promises
.mkdir(destPath, { recursive: true })
.then(() => zipFile.readEntry())
.catch(reject);
return;
}
// Handle files
const filePromise = fs.promises
.mkdir(path.dirname(destPath), { recursive: true }) // make sure parent dirs exist
.then(
() =>
new Promise((res, rej) => {
zipFile.openReadStream(entry, (err, readStream) => {
if (err) return rej(err);
const writeStream = fs.createWriteStream(destPath);
readStream.pipe(writeStream);
// proceed to next entry once weve consumed *this* one
readStream.on('end', () => zipFile.readEntry());
writeStream.on('finish', () => {
logger.info(`Extracted "${entry.fileName}" → "${destPath}".`);
res();
});
writeStream.on('error', writeErr => {
logger.error(
extractErrorLogData(writeErr),
`Error extracting "${entry.fileName}" from "${zipPath}".`
);
rej(writeErr);
});
});
})
);
pending.push(filePromise);
});
// Entire archive enumerated; wait for all streams to finish
zipFile.on('end', () => {
Promise.all(pending)
.then(() => {
logger.info(`Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
resolve(true);
})
.catch(reject);
});
zipFile.on('error', err => {
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
reject(err);
});
});
});
}
module.exports = unzipDirectory;

View File

@@ -0,0 +1,60 @@
const yauzl = require('yauzl');
const fs = require('fs');
const { jsonLinesParse } = require('dbgate-tools');
function unzipJsonLinesData(zipPath) {
return new Promise((resolve, reject) => {
// Open the zip file
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
if (err) {
return reject(err);
}
const results = {};
// Start reading entries
zipfile.readEntry();
zipfile.on('entry', entry => {
// Only process .json files
if (/\.jsonl$/i.test(entry.fileName)) {
zipfile.openReadStream(entry, (err, readStream) => {
if (err) {
return reject(err);
}
const chunks = [];
readStream.on('data', chunk => chunks.push(chunk));
readStream.on('end', () => {
try {
const fileContent = Buffer.concat(chunks).toString('utf-8');
const parsedJson = jsonLinesParse(fileContent);
results[entry.fileName.replace(/\.jsonl$/, '')] = parsedJson;
} catch (parseError) {
return reject(parseError);
}
// Move to the next entry
zipfile.readEntry();
});
});
} else {
// Not a JSON file, skip
zipfile.readEntry();
}
});
// Resolve when no more entries
zipfile.on('end', () => {
resolve(results);
});
// Catch errors from zipfile
zipfile.on('error', zipErr => {
reject(zipErr);
});
});
});
}
module.exports = unzipJsonLinesData;

View File

@@ -0,0 +1,59 @@
const yauzl = require('yauzl');
const fs = require('fs');
const { jsonLinesParse } = require('dbgate-tools');
function unzipJsonLinesFile(zipPath, fileInZip) {
return new Promise((resolve, reject) => {
// Open the zip file
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
if (err) {
return reject(err);
}
let result = null;
// Start reading entries
zipfile.readEntry();
zipfile.on('entry', entry => {
if (entry.fileName == fileInZip) {
zipfile.openReadStream(entry, (err, readStream) => {
if (err) {
return reject(err);
}
const chunks = [];
readStream.on('data', chunk => chunks.push(chunk));
readStream.on('end', () => {
try {
const fileContent = Buffer.concat(chunks).toString('utf-8');
const parsedJson = jsonLinesParse(fileContent);
result = parsedJson;
} catch (parseError) {
return reject(parseError);
}
// Move to the next entry
zipfile.readEntry();
});
});
} else {
// Not a JSON file, skip
zipfile.readEntry();
}
});
// Resolve when no more entries
zipfile.on('end', () => {
resolve(result);
});
// Catch errors from zipfile
zipfile.on('error', zipErr => {
reject(zipErr);
});
});
});
}
module.exports = unzipJsonLinesFile;

View File

@@ -0,0 +1,49 @@
const fs = require('fs');
const path = require('path');
const archiver = require('archiver');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const { archivedir } = require('../utility/directories');
const logger = getLogger('compressDirectory');
function zipDirectory(inputDirectory, outputFile) {
if (outputFile.startsWith('archive:')) {
outputFile = path.join(archivedir(), outputFile.substring('archive:'.length));
}
return new Promise((resolve, reject) => {
const output = fs.createWriteStream(outputFile);
const archive = archiver('zip', { zlib: { level: 9 } }); // level: 9 => best compression
// Listen for all archive data to be written
output.on('close', () => {
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
resolve();
});
archive.on('warning', err => {
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
});
archive.on('error', err => {
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
reject(err);
});
// Pipe archive data to the file
archive.pipe(output);
// Append files from a folder
archive.directory(inputDirectory, false, entryData => {
if (entryData.name.endsWith('.zip')) {
return false; // returning false means "do not include"
}
// otherwise, include it
return entryData;
});
// Finalize the archive
archive.finalize();
});
}
module.exports = zipDirectory;

View File

@@ -0,0 +1,49 @@
const fs = require('fs');
const _ = require('lodash');
const path = require('path');
const archiver = require('archiver');
const { getLogger, extractErrorLogData, jsonLinesStringify } = require('dbgate-tools');
const { archivedir } = require('../utility/directories');
const logger = getLogger('compressDirectory');
function zipDirectory(jsonDb, outputFile) {
if (outputFile.startsWith('archive:')) {
outputFile = path.join(archivedir(), outputFile.substring('archive:'.length));
}
return new Promise((resolve, reject) => {
const output = fs.createWriteStream(outputFile);
const archive = archiver('zip', { zlib: { level: 9 } }); // level: 9 => best compression
// Listen for all archive data to be written
output.on('close', () => {
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
resolve();
});
archive.on('warning', err => {
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
});
archive.on('error', err => {
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
reject(err);
});
// Pipe archive data to the file
archive.pipe(output);
for (const key in jsonDb) {
const data = jsonDb[key];
if (_.isArray(data)) {
const jsonString = jsonLinesStringify(data);
archive.append(jsonString, { name: `${key}.jsonl` });
}
}
// Finalize the archive
archive.finalize();
});
}
module.exports = zipDirectory;

View File

@@ -4,11 +4,20 @@ const fsp = require('fs/promises');
const semver = require('semver');
const currentVersion = require('../currentVersion');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const { storageReadConfig } = require('../controllers/storageDb');
const logger = getLogger('cloudUpgrade');
async function checkCloudUpgrade() {
try {
if (process.env.STORAGE_DATABASE) {
const settings = await storageReadConfig('settings');
if (settings['cloud.useAutoUpgrade'] != 1) {
// auto-upgrade not allowed
return;
}
}
const resp = await axios.default.get('https://api.github.com/repos/dbgate/dbgate/releases/latest');
const json = resp.data;
const version = json.name.substring(1);
@@ -43,7 +52,11 @@ async function checkCloudUpgrade() {
logger.info(`Downloaded new version from ${zipUrl}`);
} else {
logger.info(`Checked version ${version} is not newer than ${cloudDownloadedVersion ?? currentVersion.version}, upgrade skippped`);
logger.info(
`Checked version ${version} is not newer than ${
cloudDownloadedVersion ?? currentVersion.version
}, upgrade skippped`
);
}
} catch (err) {
logger.error(extractErrorLogData(err), 'Error checking cloud upgrade');

View File

@@ -59,7 +59,7 @@ async function loadEncryptionKeyFromExternal(storedValue, setStoredValue) {
let _encryptor = null;
function getEncryptor() {
function getInternalEncryptor() {
if (_encryptor) {
return _encryptor;
}
@@ -69,14 +69,14 @@ function getEncryptor() {
function encryptPasswordString(password) {
if (password && !password.startsWith('crypt:')) {
return 'crypt:' + getEncryptor().encrypt(password);
return 'crypt:' + getInternalEncryptor().encrypt(password);
}
return password;
}
function decryptPasswordString(password) {
if (password && password.startsWith('crypt:')) {
return getEncryptor().decrypt(password.substring('crypt:'.length));
return getInternalEncryptor().decrypt(password.substring('crypt:'.length));
}
return password;
}
@@ -85,7 +85,7 @@ function encryptObjectPasswordField(obj, field) {
if (obj && obj[field] && !obj[field].startsWith('crypt:')) {
return {
...obj,
[field]: 'crypt:' + getEncryptor().encrypt(obj[field]),
[field]: 'crypt:' + getInternalEncryptor().encrypt(obj[field]),
};
}
return obj;
@@ -95,7 +95,7 @@ function decryptObjectPasswordField(obj, field) {
if (obj && obj[field] && obj[field].startsWith('crypt:')) {
return {
...obj,
[field]: getEncryptor().decrypt(obj[field].substring('crypt:'.length)),
[field]: getInternalEncryptor().decrypt(obj[field].substring('crypt:'.length)),
};
}
return obj;
@@ -156,6 +156,49 @@ function getEncryptionKey() {
return _encryptionKey;
}
function generateTransportEncryptionKey() {
const encryptor = simpleEncryptor.createEncryptor(defaultEncryptionKey);
const result = {
encryptionKey: crypto.randomBytes(32).toString('hex'),
};
return encryptor.encrypt(result);
}
function createTransportEncryptor(encryptionData) {
const encryptor = simpleEncryptor.createEncryptor(defaultEncryptionKey);
const data = encryptor.decrypt(encryptionData);
const res = simpleEncryptor.createEncryptor(data['encryptionKey']);
return res;
}
function recryptObjectPasswordField(obj, field, decryptEncryptor, encryptEncryptor) {
if (obj && obj[field] && obj[field].startsWith('crypt:')) {
return {
...obj,
[field]: 'crypt:' + encryptEncryptor.encrypt(decryptEncryptor.decrypt(obj[field].substring('crypt:'.length))),
};
}
return obj;
}
function recryptObjectPasswordFieldInPlace(obj, field, decryptEncryptor, encryptEncryptor) {
if (obj && obj[field] && obj[field].startsWith('crypt:')) {
obj[field] = 'crypt:' + encryptEncryptor.encrypt(decryptEncryptor.decrypt(obj[field].substring('crypt:'.length)));
}
}
function recryptConnection(connection, decryptEncryptor, encryptEncryptor) {
connection = recryptObjectPasswordField(connection, 'password', decryptEncryptor, encryptEncryptor);
connection = recryptObjectPasswordField(connection, 'sshPassword', decryptEncryptor, encryptEncryptor);
connection = recryptObjectPasswordField(connection, 'sshKeyfilePassword', decryptEncryptor, encryptEncryptor);
return connection;
}
function recryptUser(user, decryptEncryptor, encryptEncryptor) {
user = recryptObjectPasswordField(user, 'password', decryptEncryptor, encryptEncryptor);
return user;
}
module.exports = {
loadEncryptionKey,
encryptConnection,
@@ -169,4 +212,12 @@ module.exports = {
setEncryptionKey,
encryptPasswordString,
decryptPasswordString,
getInternalEncryptor,
recryptConnection,
recryptUser,
generateTransportEncryptionKey,
createTransportEncryptor,
recryptObjectPasswordField,
recryptObjectPasswordFieldInPlace,
};

View File

@@ -0,0 +1,77 @@
const yauzl = require('yauzl');
const fs = require('fs');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('extractSingleFileFromZip');
/**
* Extracts a single file from a ZIP using yauzl.
* Stops reading the rest of the archive once the file is found.
*
* @param {string} zipPath - Path to the ZIP file on disk.
* @param {string} fileInZip - The file path *inside* the ZIP to extract.
* @param {string} outputPath - Where to write the extracted file on disk.
* @returns {Promise<boolean>} - Resolves with a success message or a "not found" message.
*/
function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
return new Promise((resolve, reject) => {
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
if (err) return reject(err);
let fileFound = false;
// Start reading the first entry
zipFile.readEntry();
zipFile.on('entry', entry => {
// Compare the entry name to the file we want
if (entry.fileName === fileInZip) {
fileFound = true;
// Open a read stream for this entry
zipFile.openReadStream(entry, (err, readStream) => {
if (err) return reject(err);
// Create a write stream to outputPath
const writeStream = fs.createWriteStream(outputPath);
readStream.pipe(writeStream);
// When the read stream ends, we can close the zipFile
readStream.on('end', () => {
// We won't read further entries
zipFile.close();
});
// When the file is finished writing, resolve
writeStream.on('finish', () => {
logger.info(`File "${fileInZip}" extracted to "${outputPath}".`);
resolve(true);
});
// Handle write errors
writeStream.on('error', writeErr => {
logger.error(extractErrorLogData(writeErr), `Error extracting "${fileInZip}" from "${zipPath}".`);
reject(writeErr);
});
});
} else {
// Not the file we want; skip to the next entry
zipFile.readEntry();
}
});
// If we reach the end without finding the file
zipFile.on('end', () => {
if (!fileFound) {
resolve(false);
}
});
// Handle general errors
zipFile.on('error', err => {
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
reject(err);
});
});
});
}
module.exports = extractSingleFileFromZip;

View File

@@ -0,0 +1,41 @@
const yauzl = require('yauzl');
const path = require('path');
/**
* Lists the files in a ZIP archive using yauzl,
* returning an array of { fileName, uncompressedSize } objects.
*
* @param {string} zipPath - The path to the ZIP file.
* @returns {Promise<Array<{fileName: string, uncompressedSize: number}>>}
*/
function listZipEntries(zipPath) {
return new Promise((resolve, reject) => {
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
if (err) return reject(err);
const entries = [];
// Start reading entries
zipfile.readEntry();
// Handle each entry
zipfile.on('entry', entry => {
entries.push({
fileName: entry.fileName,
uncompressedSize: entry.uncompressedSize,
});
// Move on to the next entry (were only listing, not reading file data)
zipfile.readEntry();
});
// Finished reading all entries
zipfile.on('end', () => resolve(entries));
// Handle errors
zipfile.on('error', err => reject(err));
});
});
}
module.exports = listZipEntries;

View File

@@ -572,6 +572,27 @@ export function changeSetInsertDocuments(
};
}
export function createMergedRowsChangeSet(
table: TableInfo,
updatedRows: any[],
insertedRows: any[],
mergeKey: string[]
): ChangeSet {
const res = createChangeSet();
res.updates = updatedRows.map(row => ({
pureName: table.pureName,
schemaName: table.schemaName,
fields: _.omit(row, mergeKey),
condition: _.pick(row, mergeKey),
}));
res.inserts = insertedRows.map(row => ({
pureName: table.pureName,
schemaName: table.schemaName,
fields: row,
}));
return res;
}
export function changeSetContainsChanges(changeSet: ChangeSet) {
if (!changeSet) return false;
return (

View File

@@ -1,326 +0,0 @@
import {
createAsyncWriteStream,
extractErrorLogData,
getLogger,
runCommandOnDriver,
runQueryOnDriver,
} from 'dbgate-tools';
import { DatabaseInfo, EngineDriver, ForeignKeyInfo, TableInfo } from 'dbgate-types';
import _pick from 'lodash/pick';
import _omit from 'lodash/omit';
const logger = getLogger('dataDuplicator');
export interface DataDuplicatorItem {
openStream: () => Promise<ReadableStream>;
name: string;
operation: 'copy' | 'lookup' | 'insertMissing';
matchColumns: string[];
}
export interface DataDuplicatorOptions {
rollbackAfterFinish?: boolean;
skipRowsWithUnresolvedRefs?: boolean;
setNullForUnresolvedNullableRefs?: boolean;
}
class DuplicatorReference {
constructor(
public base: DuplicatorItemHolder,
public ref: DuplicatorItemHolder,
public isMandatory: boolean,
public foreignKey: ForeignKeyInfo
) {}
get columnName() {
return this.foreignKey.columns[0].columnName;
}
}
class DuplicatorWeakReference {
constructor(public base: DuplicatorItemHolder, public ref: TableInfo, public foreignKey: ForeignKeyInfo) {}
get columnName() {
return this.foreignKey.columns[0].columnName;
}
}
class DuplicatorItemHolder {
references: DuplicatorReference[] = [];
backReferences: DuplicatorReference[] = [];
// not mandatory references to entities out of the model
weakReferences: DuplicatorWeakReference[] = [];
table: TableInfo;
isPlanned = false;
idMap = {};
autoColumn: string;
refByColumn: { [columnName: string]: DuplicatorReference } = {};
isReferenced: boolean;
get name() {
return this.item.name;
}
constructor(public item: DataDuplicatorItem, public duplicator: DataDuplicator) {
this.table = duplicator.db.tables.find(x => x.pureName.toUpperCase() == item.name.toUpperCase());
this.autoColumn = this.table.columns.find(x => x.autoIncrement)?.columnName;
if (
this.table.primaryKey?.columns?.length != 1 ||
this.table.primaryKey?.columns?.[0]?.columnName != this.autoColumn
) {
this.autoColumn = null;
}
}
initializeReferences() {
for (const fk of this.table.foreignKeys) {
if (fk.columns?.length != 1) continue;
const refHolder = this.duplicator.itemHolders.find(y => y.name.toUpperCase() == fk.refTableName.toUpperCase());
const isMandatory = this.table.columns.find(x => x.columnName == fk.columns[0]?.columnName)?.notNull;
if (refHolder == null) {
if (!isMandatory) {
const weakref = new DuplicatorWeakReference(
this,
this.duplicator.db.tables.find(x => x.pureName == fk.refTableName),
fk
);
this.weakReferences.push(weakref);
}
} else {
const newref = new DuplicatorReference(this, refHolder, isMandatory, fk);
this.references.push(newref);
this.refByColumn[newref.columnName] = newref;
refHolder.isReferenced = true;
}
}
}
createInsertObject(chunk, weakrefcols: string[]) {
const res = _omit(
_pick(
chunk,
this.table.columns.map(x => x.columnName)
),
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...weakrefcols]
);
for (const key in res) {
const ref = this.refByColumn[key];
if (ref) {
// remap id
res[key] = ref.ref.idMap[res[key]];
if (ref.isMandatory && res[key] == null) {
// mandatory refertence not matched
if (this.duplicator.options.skipRowsWithUnresolvedRefs) {
return null;
}
throw new Error(`Unresolved reference, base=${ref.base.name}, ref=${ref.ref.name}, ${key}=${chunk[key]}`);
}
}
}
return res;
}
// returns list of columns that are weak references and are not resolved
async getMissingWeakRefsForRow(row): Promise<string[]> {
if (!this.duplicator.options.setNullForUnresolvedNullableRefs || !this.weakReferences?.length) {
return [];
}
const qres = await runQueryOnDriver(this.duplicator.pool, this.duplicator.driver, dmp => {
dmp.put('^select ');
dmp.putCollection(',', this.weakReferences, weakref => {
dmp.put(
'(^case ^when ^exists (^select * ^from %f where %i = %v) ^then 1 ^else 0 ^end) as %i',
weakref.ref,
weakref.foreignKey.columns[0].refColumnName,
row[weakref.foreignKey.columns[0].columnName],
weakref.foreignKey.columns[0].columnName
);
});
if (this.duplicator.driver.dialect.requireFromDual) {
dmp.put(' ^from ^dual');
}
});
const qrow = qres.rows[0];
return this.weakReferences.filter(x => qrow[x.columnName] == 0).map(x => x.columnName);
}
async runImport() {
const readStream = await this.item.openStream();
const driver = this.duplicator.driver;
const pool = this.duplicator.pool;
let inserted = 0;
let mapped = 0;
let missing = 0;
let skipped = 0;
let lastLogged = new Date();
const existingWeakRefs = {};
const writeStream = createAsyncWriteStream(this.duplicator.stream, {
processItem: async chunk => {
if (chunk.__isStreamHeader) {
return;
}
const doCopy = async () => {
// console.log('chunk', this.name, JSON.stringify(chunk));
const weakrefcols = await this.getMissingWeakRefsForRow(chunk);
const insertedObj = this.createInsertObject(chunk, weakrefcols);
// console.log('insertedObj', this.name, JSON.stringify(insertedObj));
if (insertedObj == null) {
skipped += 1;
return;
}
let res = await runQueryOnDriver(pool, driver, dmp => {
dmp.put(
'^insert ^into %f (%,i) ^values (%,v)',
this.table,
Object.keys(insertedObj),
Object.values(insertedObj)
);
if (
this.autoColumn &&
this.isReferenced &&
!this.duplicator.driver.dialect.requireStandaloneSelectForScopeIdentity
) {
dmp.selectScopeIdentity(this.table);
}
});
inserted += 1;
if (this.autoColumn && this.isReferenced) {
if (this.duplicator.driver.dialect.requireStandaloneSelectForScopeIdentity) {
res = await runQueryOnDriver(pool, driver, dmp => dmp.selectScopeIdentity(this.table));
}
// console.log('IDRES', JSON.stringify(res));
// console.log('*********** ENTRIES OF', res?.rows?.[0]);
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
if (resId != null) {
this.idMap[chunk[this.autoColumn]] = resId;
}
}
};
switch (this.item.operation) {
case 'copy': {
await doCopy();
break;
}
case 'insertMissing':
case 'lookup': {
const res = await runQueryOnDriver(pool, driver, dmp =>
dmp.put(
'^select %i ^from %f ^where %i = %v',
this.autoColumn,
this.table,
this.item.matchColumns[0],
chunk[this.item.matchColumns[0]]
)
);
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
if (resId != null) {
mapped += 1;
this.idMap[chunk[this.autoColumn]] = resId;
} else if (this.item.operation == 'insertMissing') {
await doCopy();
} else {
missing += 1;
}
break;
}
}
if (new Date().getTime() - lastLogged.getTime() > 5000) {
logger.info(
`Duplicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows`
);
lastLogged = new Date();
}
// this.idMap[oldId] = newId;
},
});
await this.duplicator.copyStream(readStream, writeStream);
// await this.duplicator.driver.writeQueryStream(this.duplicator.pool, {
// mapResultId: (oldId, newId) => {
// this.idMap[oldId] = newId;
// },
// });
return { inserted, mapped, missing, skipped };
}
}
export class DataDuplicator {
itemHolders: DuplicatorItemHolder[];
itemPlan: DuplicatorItemHolder[] = [];
constructor(
public pool: any,
public driver: EngineDriver,
public db: DatabaseInfo,
public items: DataDuplicatorItem[],
public stream,
public copyStream: (input, output) => Promise<void>,
public options: DataDuplicatorOptions = {}
) {
this.itemHolders = items.map(x => new DuplicatorItemHolder(x, this));
this.itemHolders.forEach(x => x.initializeReferences());
}
findItemToPlan(): DuplicatorItemHolder {
for (const item of this.itemHolders) {
if (item.isPlanned) continue;
if (item.references.every(x => x.ref.isPlanned)) {
return item;
}
}
for (const item of this.itemHolders) {
if (item.isPlanned) continue;
if (item.references.every(x => x.ref.isPlanned || !x.isMandatory)) {
const backReferences = item.references.filter(x => !x.ref.isPlanned);
item.backReferences = backReferences;
return item;
}
}
throw new Error('Cycle in mandatory references');
}
createPlan() {
while (this.itemPlan.length < this.itemHolders.length) {
const item = this.findItemToPlan();
item.isPlanned = true;
this.itemPlan.push(item);
}
}
async run() {
this.createPlan();
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.beginTransaction());
try {
for (const item of this.itemPlan) {
const stats = await item.runImport();
logger.info(
`Duplicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows`
);
}
} catch (err) {
logger.error(extractErrorLogData(err), `Failed duplicator job, rollbacking. ${err.message}`);
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.rollbackTransaction());
return;
}
if (this.options.rollbackAfterFinish) {
logger.info('Rollbacking transaction, nothing was changed');
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.rollbackTransaction());
} else {
logger.info('Committing duplicator transaction');
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.commitTransaction());
}
}
}

View File

@@ -0,0 +1,509 @@
import {
createAsyncWriteStream,
extractErrorLogData,
getLogger,
isTypeNumber,
runCommandOnDriver,
runQueryOnDriver,
SqlDumper,
} from 'dbgate-tools';
import { DatabaseInfo, EngineDriver, ForeignKeyInfo, NamedObjectInfo, QueryResult, TableInfo } from 'dbgate-types';
import _pick from 'lodash/pick';
import _omit from 'lodash/omit';
import stableStringify from 'json-stable-stringify';
const logger = getLogger('dataReplicator');
export interface DataReplicatorItem {
openStream: () => Promise<ReadableStream>;
name: string;
findExisting: (row: any) => boolean;
createNew: (row: any) => boolean;
updateExisting: (row: any) => boolean;
deleteMissing: boolean;
deleteRestrictionColumns: string[];
matchColumns: string[];
}
export interface DataReplicatorOptions {
rollbackAfterFinish?: boolean;
skipRowsWithUnresolvedRefs?: boolean;
setNullForUnresolvedNullableRefs?: boolean;
generateSqlScript?: boolean;
runid?: string;
}
class ReplicatorReference {
constructor(
public base: ReplicatorItemHolder,
public ref: ReplicatorItemHolder,
public isMandatory: boolean,
public foreignKey: ForeignKeyInfo
) {}
get columnName() {
return this.foreignKey.columns[0].columnName;
}
}
class ReplicatorWeakReference {
constructor(public base: ReplicatorItemHolder, public ref: TableInfo, public foreignKey: ForeignKeyInfo) {}
get columnName() {
return this.foreignKey.columns[0].columnName;
}
}
class ReplicatorItemHolder {
references: ReplicatorReference[] = [];
backReferences: ReplicatorReference[] = [];
// not mandatory references to entities out of the model
weakReferences: ReplicatorWeakReference[] = [];
table: TableInfo;
isPlanned = false;
idMap = {};
autoColumn: string;
isManualAutoColumn: boolean;
refByColumn: { [columnName: string]: ReplicatorReference } = {};
isReferenced: boolean;
get name() {
return this.item.name;
}
constructor(public item: DataReplicatorItem, public replicator: DataReplicator) {
this.table = replicator.db.tables.find(x => x.pureName.toUpperCase() == item.name.toUpperCase());
this.autoColumn = this.table.columns.find(x => x.autoIncrement)?.columnName;
if (
this.table.primaryKey?.columns?.length != 1 ||
this.table.primaryKey?.columns?.[0]?.columnName != this.autoColumn
) {
this.autoColumn = null;
}
if (!this.autoColumn && this.table.primaryKey?.columns?.length == 1) {
const name = this.table.primaryKey.columns[0].columnName;
const column = this.table.columns.find(x => x.columnName == name);
if (isTypeNumber(column?.dataType)) {
this.autoColumn = name;
this.isManualAutoColumn = true;
}
}
if (this.autoColumn && this.replicator.options.generateSqlScript) {
this.isManualAutoColumn = true;
}
}
initializeReferences() {
for (const fk of this.table.foreignKeys) {
if (fk.columns?.length != 1) continue;
const refHolder = this.replicator.itemHolders.find(y => y.name.toUpperCase() == fk.refTableName.toUpperCase());
const isMandatory = this.table.columns.find(x => x.columnName == fk.columns[0]?.columnName)?.notNull;
if (refHolder == null) {
if (!isMandatory) {
const weakref = new ReplicatorWeakReference(
this,
this.replicator.db.tables.find(x => x.pureName == fk.refTableName),
fk
);
this.weakReferences.push(weakref);
}
} else {
const newref = new ReplicatorReference(this, refHolder, isMandatory, fk);
this.references.push(newref);
this.refByColumn[newref.columnName] = newref;
refHolder.isReferenced = true;
}
}
}
createInsertObject(chunk, weakrefcols?: string[]) {
const res = _omit(
_pick(
chunk,
this.table.columns.map(x => x.columnName)
),
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...(weakrefcols ? weakrefcols : [])]
);
for (const key in res) {
const ref = this.refByColumn[key];
if (ref) {
// remap id
res[key] = ref.ref.idMap[res[key]];
if (ref.isMandatory && res[key] == null) {
// mandatory refertence not matched
if (this.replicator.options.skipRowsWithUnresolvedRefs) {
return null;
}
throw new Error(`Unresolved reference, base=${ref.base.name}, ref=${ref.ref.name}, ${key}=${chunk[key]}`);
}
}
}
return res;
}
createUpdateObject(chunk) {
const res = _omit(
_pick(
chunk,
this.table.columns.map(x => x.columnName)
),
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...this.references.map(x => x.columnName)]
);
return res;
}
// returns list of columns that are weak references and are not resolved
async getMissingWeakRefsForRow(row): Promise<string[]> {
if (!this.replicator.options.setNullForUnresolvedNullableRefs || !this.weakReferences?.length) {
return [];
}
const qres = await runQueryOnDriver(this.replicator.pool, this.replicator.driver, dmp => {
dmp.put('^select ');
dmp.putCollection(',', this.weakReferences, weakref => {
dmp.put(
'(^case ^when ^exists (^select * ^from %f where %i = %v) ^then 1 ^else 0 ^end) as %i',
weakref.ref,
weakref.foreignKey.columns[0].refColumnName,
row[weakref.foreignKey.columns[0].columnName],
weakref.foreignKey.columns[0].columnName
);
});
if (this.replicator.driver.dialect.requireFromDual) {
dmp.put(' ^from ^dual');
}
});
const qrow = qres.rows[0];
return this.weakReferences.filter(x => qrow[x.columnName] == 0).map(x => x.columnName);
}
async runImport() {
const readStream = await this.item.openStream();
const driver = this.replicator.driver;
const pool = this.replicator.pool;
let inserted = 0;
let mapped = 0;
let updated = 0;
let deleted = 0;
let missing = 0;
let skipped = 0;
let lastLogged = new Date();
const { deleteMissing, deleteRestrictionColumns } = this.item;
const deleteRestrictions = {};
const usedKeyRows = {};
const writeStream = createAsyncWriteStream(this.replicator.stream, {
processItem: async chunk => {
if (chunk.__isStreamHeader) {
return;
}
const doFind = async () => {
let insertedObj = this.createInsertObject(chunk);
const res = await runQueryOnDriver(pool, driver, dmp => {
dmp.put('^select %i ^from %f ^where ', this.autoColumn, this.table);
dmp.putCollection(' and ', this.item.matchColumns, x => {
dmp.put('%i = %v', x, insertedObj[x]);
});
});
const resId = Object.entries(res?.rows?.[0] || {})?.[0]?.[1];
if (resId != null) {
mapped += 1;
this.idMap[chunk[this.autoColumn]] = resId;
}
return resId;
};
const doUpdate = async recordId => {
const updateObj = this.createUpdateObject(chunk);
if (Object.keys(updateObj).length == 0) {
skipped += 1;
return;
}
await this.replicator.runDumperCommand(dmp => {
dmp.put('^update %f ^ set ', this.table);
dmp.putCollection(',', Object.keys(updateObj), x => {
dmp.put('%i = %v', x, updateObj[x]);
});
dmp.put(' ^where %i = %v', this.autoColumn, recordId);
dmp.endCommand();
});
updated += 1;
};
const doInsert = async () => {
// console.log('chunk', this.name, JSON.stringify(chunk));
const weakrefcols = await this.getMissingWeakRefsForRow(chunk);
let insertedObj = this.createInsertObject(chunk, weakrefcols);
// console.log('insertedObj', this.name, JSON.stringify(insertedObj));
if (insertedObj == null) {
skipped += 1;
return;
}
if (this.isManualAutoColumn) {
const maxId = await this.replicator.generateIdentityValue(this.autoColumn, this.table);
insertedObj = {
...insertedObj,
[this.autoColumn]: maxId,
};
this.idMap[chunk[this.autoColumn]] = maxId;
}
let res = await this.replicator.runDumperQuery(dmp => {
dmp.put(
'^insert ^into %f (%,i) ^values (%,v)',
this.table,
Object.keys(insertedObj),
Object.values(insertedObj)
);
dmp.endCommand();
if (
this.autoColumn &&
this.isReferenced &&
!this.replicator.driver.dialect.requireStandaloneSelectForScopeIdentity &&
!this.isManualAutoColumn
) {
dmp.selectScopeIdentity(this.table);
}
});
inserted += 1;
if (this.autoColumn && this.isReferenced && !this.isManualAutoColumn) {
if (this.replicator.driver.dialect.requireStandaloneSelectForScopeIdentity) {
res = await runQueryOnDriver(pool, driver, dmp => dmp.selectScopeIdentity(this.table));
}
// console.log('IDRES', JSON.stringify(res));
// console.log('*********** ENTRIES OF', res?.rows?.[0]);
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
if (resId != null) {
this.idMap[chunk[this.autoColumn]] = resId;
}
return resId;
}
};
const doMarkDelete = () => {
const insertedObj = this.createInsertObject(chunk);
if (deleteRestrictionColumns?.length > 0) {
const restriction = _pick(insertedObj, deleteRestrictionColumns);
const key = stableStringify(restriction);
deleteRestrictions[key] = restriction;
}
const usedKey = _pick(insertedObj, this.item.matchColumns);
usedKeyRows[stableStringify(usedKey)] = usedKey;
};
const findExisting = this.item.findExisting(chunk);
const updateExisting = this.item.updateExisting(chunk);
const createNew = this.item.createNew(chunk);
if (deleteMissing) {
doMarkDelete();
}
let recordId = null;
if (findExisting) {
recordId = await doFind();
}
if (updateExisting && recordId != null) {
await doUpdate(recordId);
}
if (createNew && recordId == null) {
recordId = await doInsert();
}
if (recordId == null && findExisting) {
missing += 1;
}
if (new Date().getTime() - lastLogged.getTime() > 5000) {
logger.info(
`Replicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows, updated ${updated} rows`
);
lastLogged = new Date();
}
// this.idMap[oldId] = newId;
},
});
const dumpConditionArray = (dmp: SqlDumper, array: any[], positive: boolean) => {
dmp.putCollection(positive ? ' or ' : ' and ', array, x => {
dmp.put('(');
dmp.putCollection(positive ? ' and ' : ' or ', Object.keys(x), y => {
dmp.put(positive ? '%i = %v' : 'not (%i = %v)', y, x[y]);
});
dmp.put(')');
});
};
const dumpDeleteCondition = (dmp: SqlDumper) => {
const deleteRestrictionValues = Object.values(deleteRestrictions);
const usedKeyRowsValues = Object.values(usedKeyRows);
if (deleteRestrictionValues.length == 0 && usedKeyRowsValues.length == 0) {
return;
}
dmp.put(' ^where ');
if (deleteRestrictionColumns?.length > 0) {
dmp.put('(');
dumpConditionArray(dmp, deleteRestrictionValues, true);
dmp.put(')');
if (usedKeyRowsValues.length > 0) {
dmp.put(' ^and ');
}
}
dumpConditionArray(dmp, Object.values(usedKeyRows), false);
};
const doDelete = async () => {
const countRes = await runQueryOnDriver(pool, driver, dmp => {
dmp.put('^select count(*) as ~cnt ^from %f', this.table);
dumpDeleteCondition(dmp);
dmp.endCommand();
});
const count = parseInt(countRes.rows[0].cnt);
if (count > 0) {
await this.replicator.runDumperCommand(dmp => {
dmp.put('^delete ^from %f', this.table);
dumpDeleteCondition(dmp);
dmp.endCommand();
});
deleted += count;
}
};
await this.replicator.copyStream(readStream, writeStream, {});
if (deleteMissing) {
await doDelete();
}
// await this.replicator.driver.writeQueryStream(this.replicator.pool, {
// mapResultId: (oldId, newId) => {
// this.idMap[oldId] = newId;
// },
// });
return { inserted, mapped, missing, skipped, updated, deleted };
}
}
export class DataReplicator {
itemHolders: ReplicatorItemHolder[];
itemPlan: ReplicatorItemHolder[] = [];
result: string = '';
dumper: SqlDumper;
identityValues: { [fullTableName: string]: number } = {};
constructor(
public pool: any,
public driver: EngineDriver,
public db: DatabaseInfo,
public items: DataReplicatorItem[],
public stream,
public copyStream: (input, output, options) => Promise<void>,
public options: DataReplicatorOptions = {}
) {
this.itemHolders = items.map(x => new ReplicatorItemHolder(x, this));
this.itemHolders.forEach(x => x.initializeReferences());
// @ts-ignore
this.dumper = driver.createDumper();
}
findItemToPlan(): ReplicatorItemHolder {
for (const item of this.itemHolders) {
if (item.isPlanned) continue;
if (item.references.every(x => x.ref.isPlanned)) {
return item;
}
}
for (const item of this.itemHolders) {
if (item.isPlanned) continue;
if (item.references.every(x => x.ref.isPlanned || !x.isMandatory)) {
const backReferences = item.references.filter(x => !x.ref.isPlanned);
item.backReferences = backReferences;
return item;
}
}
throw new Error('Cycle in mandatory references');
}
createPlan() {
while (this.itemPlan.length < this.itemHolders.length) {
const item = this.findItemToPlan();
item.isPlanned = true;
this.itemPlan.push(item);
}
}
async runDumperCommand(cmd: (dmp: SqlDumper) => void | string): Promise<void> {
if (this.options.generateSqlScript) {
cmd(this.dumper);
} else {
await runCommandOnDriver(this.pool, this.driver, cmd);
}
}
async runDumperQuery(cmd: (dmp: SqlDumper) => void | string): Promise<QueryResult> {
if (this.options.generateSqlScript) {
cmd(this.dumper);
return {
rows: [],
};
} else {
return await runQueryOnDriver(this.pool, this.driver, cmd);
}
}
async generateIdentityValue(column: string, table: NamedObjectInfo): Promise<number> {
const tableKey = `${table.schemaName}.${table.pureName}`;
if (!(tableKey in this.identityValues)) {
const max = await runQueryOnDriver(this.pool, this.driver, dmp => {
dmp.put('^select max(%i) as ~maxid ^from %f', column, table);
});
const maxId = Math.max(max.rows[0]['maxid'] ?? 0, 0) + 1;
this.identityValues[tableKey] = maxId;
return maxId;
}
this.identityValues[tableKey] += 1;
return this.identityValues[tableKey];
}
async run() {
this.createPlan();
await this.runDumperCommand(dmp => dmp.beginTransaction());
try {
for (const item of this.itemPlan) {
const stats = await item.runImport();
logger.info(
`Replicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows, updated ${stats.updated} rows, deleted ${stats.deleted} rows`
);
}
} catch (err) {
logger.error(extractErrorLogData(err), `Failed replicator job, rollbacking. ${err.message}`);
await this.runDumperCommand(dmp => dmp.rollbackTransaction());
return;
}
if (this.options.rollbackAfterFinish) {
logger.info('Rollbacking transaction, nothing was changed');
await this.runDumperCommand(dmp => dmp.rollbackTransaction());
} else {
logger.info('Committing replicator transaction');
await this.runDumperCommand(dmp => dmp.commitTransaction());
}
this.result = this.dumper.s;
}
}

View File

@@ -18,7 +18,7 @@ export * from './processPerspectiveDefaultColunns';
export * from './PerspectiveDataPattern';
export * from './PerspectiveDataLoader';
export * from './perspectiveTools';
export * from './DataDuplicator';
export * from './DataReplicator';
export * from './FreeTableGridDisplay';
export * from './FreeTableModel';
export * from './CustomGridDisplay';

View File

@@ -1,5 +1,5 @@
import type { EngineDriver, SqlDumper } from 'dbgate-types';
import { Command, Condition } from './types';
import { Command, Condition, Select, Source } from './types';
import { dumpSqlCommand } from './dumpSqlCommand';
export function treeToSql<T>(driver: EngineDriver, object: T, func: (dmp: SqlDumper, obj: T) => void) {
@@ -43,3 +43,43 @@ export function mergeConditions(condition1: Condition, condition2: Condition): C
conditions: [condition1, condition2],
};
}
export function selectKeysFromTable(options: {
pureName: string;
schemaName: string;
keyColumns: [];
loadKeys: any[][];
}): Select {
const source: Source = {
name: { pureName: options.pureName, schemaName: options.schemaName },
};
const res: Select = {
commandType: 'select',
columns: options.keyColumns.map(col => ({
exprType: 'column',
columnName: col,
source,
})),
from: source,
where: {
conditionType: 'or',
conditions: options.loadKeys.map(key => ({
conditionType: 'and',
conditions: key.map((keyValue, index) => ({
conditionType: 'binary',
operator: '=',
left: {
exprType: 'column',
columnName: options.keyColumns[index],
source,
},
right: {
exprType: 'value',
value: keyValue,
},
})),
})),
},
};
return res;
}

View File

@@ -54,8 +54,8 @@ export class ScriptWriter {
this._put(`await dbgateApi.importDatabase(${JSON.stringify(options)});`);
}
dataDuplicator(options) {
this._put(`await dbgateApi.dataDuplicator(${JSON.stringify(options, null, 2)});`);
dataReplicator(options) {
this._put(`await dbgateApi.dataReplicator(${JSON.stringify(options, null, 2)});`);
}
comment(s) {
@@ -72,6 +72,10 @@ export class ScriptWriter {
return prefix + this.s;
}
zipDirectory(inputDirectory, outputFile) {
this._put(`await dbgateApi.zipDirectory('${inputDirectory}', '${outputFile}');`);
}
}
export class ScriptWriterJson {
@@ -138,13 +142,21 @@ export class ScriptWriterJson {
});
}
dataDuplicator(options) {
dataReplicator(options) {
this.commands.push({
type: 'dataDuplicator',
type: 'dataReplicator',
options,
});
}
zipDirectory(inputDirectory, outputFile) {
this.commands.push({
type: 'zipDirectory',
inputDirectory,
outputFile,
});
}
getScript(schedule = null) {
return {
type: 'json',
@@ -185,8 +197,11 @@ export function jsonScriptToJavascript(json) {
case 'importDatabase':
script.importDatabase(cmd.options);
break;
case 'dataDuplicator':
script.dataDuplicator(cmd.options);
case 'dataReplicator':
script.dataReplicator(cmd.options);
break;
case 'zipDirectory':
script.zipDirectory(cmd.inputDirectory, cmd.outputFile);
break;
}
}

View File

@@ -100,7 +100,9 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
dmp.putRaw(';');
// require('fs').writeFileSync('/home/jena/test.sql', dmp.s);
// console.log(dmp.s);
await driver.query(dbhan, dmp.s, { discardResult: true });
if (rows.length > 0) {
await driver.query(dbhan, dmp.s, { discardResult: true });
}
writable.rowsReporter.add(rows.length);
} else {
for (const row of rows) {

View File

@@ -549,3 +549,20 @@ export function pinoLogRecordToMessageRecord(logRecord, defaultSeverity = 'info'
severity: levelToSeverity[level] ?? defaultSeverity,
};
}
export function jsonLinesStringify(jsonArray: any[]): string {
return jsonArray.map(json => JSON.stringify(json)).join('\n');
}
export function jsonLinesParse(jsonLines: string): any[] {
return jsonLines
.split('\n')
.filter(x => x.trim())
.map(line => {
try {
return JSON.parse(line);
} catch (e) {
return null;
}
})
.filter(x => x);
}

View File

@@ -31,7 +31,7 @@ export type TestEngineInfo = {
skipUnique?: boolean;
skipAutoIncrement?: boolean;
skipPkColumnTesting?: boolean;
skipDataDuplicator?: boolean;
skipDataReplicator?: boolean;
skipDeploy?: boolean;
skipStringLength?: boolean;
skipChangeColumn?: boolean;

View File

@@ -157,6 +157,7 @@
}
.snackbar-container {
z-index: 1000;
position: fixed;
right: 0;
bottom: var(--dim-statusbar-height);

View File

@@ -1,5 +1,5 @@
<script lang="ts" context="module">
function openArchive(fileName, folderName) {
async function openArchive(fileName, folderName) {
openNewTab({
title: fileName,
icon: 'img archive',
@@ -10,17 +10,21 @@
archiveFolder: folderName,
},
});
// }
}
async function openTextFile(fileName, fileType, folderName, tabComponent, icon) {
const connProps: any = {};
let tooltip = undefined;
const isZipped = folderName.endsWith('.zip');
const resp = await apiCall('files/load', {
folder: 'archive:' + folderName,
file: fileName + '.' + fileType,
format: 'text',
});
const resp = isZipped
? await apiCall('files/download-text', { uri: `zip://archive:${folderName}//${fileName}.jsonl` })
: await apiCall('files/load', {
folder: 'archive:' + folderName,
file: fileName + '.' + fileType,
format: 'text',
});
openNewTab(
{
@@ -58,7 +62,7 @@
if (data.fileType == 'jsonl') {
return 'img archive';
}
return ARCHIVE_ICONS[data.fileType];
return ARCHIVE_ICONS[data.fileType] ?? 'img anyfile';
}
</script>
@@ -79,6 +83,7 @@
import { openImportExportTab } from '../utility/importExportTools';
export let data;
$: isZipped = data.folderName?.endsWith('.zip');
const handleRename = () => {
showModal(InputTextModal, {
@@ -112,6 +117,9 @@
openArchive(data.fileName, data.folderName);
};
const handleClick = () => {
if (!data.fileType) {
return;
}
if (data.fileType == 'jsonl') {
handleOpenArchive();
}
@@ -133,11 +141,15 @@
};
function createMenu() {
if (!data.fileType) {
return [];
}
return [
data.fileType == 'jsonl' && { text: 'Open', onClick: handleOpenArchive },
data.fileType == 'jsonl' && { text: 'Open in text editor', onClick: handleOpenJsonLinesText },
{ text: 'Delete', onClick: handleDelete },
{ text: 'Rename', onClick: handleRename },
!isZipped && { text: 'Delete', onClick: handleDelete },
!isZipped && { text: 'Rename', onClick: handleRename },
data.fileType == 'jsonl' &&
createQuickExportMenu(
fmt => async () => {
@@ -174,29 +186,30 @@
),
data.fileType.endsWith('.sql') && { text: 'Open SQL', onClick: handleOpenSqlFile },
data.fileType.endsWith('.yaml') && { text: 'Open YAML', onClick: handleOpenYamlFile },
data.fileType == 'jsonl' && {
text: 'Open in profiler',
submenu: getExtensions()
.drivers.filter(eng => eng.profilerFormatterFunction)
.map(eng => ({
text: eng.title,
onClick: () => {
openNewTab({
title: 'Profiler',
icon: 'img profiler',
tabComponent: 'ProfilerTab',
props: {
jslidLoad: `archive://${data.folderName}/${data.fileName}`,
engine: eng.engine,
// profilerFormatterFunction: eng.profilerFormatterFunction,
// profilerTimestampFunction: eng.profilerTimestampFunction,
// profilerChartAggregateFunction: eng.profilerChartAggregateFunction,
// profilerChartMeasures: eng.profilerChartMeasures,
},
});
},
})),
},
!isZipped &&
data.fileType == 'jsonl' && {
text: 'Open in profiler',
submenu: getExtensions()
.drivers.filter(eng => eng.profilerFormatterFunction)
.map(eng => ({
text: eng.title,
onClick: () => {
openNewTab({
title: 'Profiler',
icon: 'img profiler',
tabComponent: 'ProfilerTab',
props: {
jslidLoad: `archive://${data.folderName}/${data.fileName}`,
engine: eng.engine,
// profilerFormatterFunction: eng.profilerFormatterFunction,
// profilerTimestampFunction: eng.profilerTimestampFunction,
// profilerChartAggregateFunction: eng.profilerChartAggregateFunction,
// profilerChartMeasures: eng.profilerChartMeasures,
},
});
},
})),
},
];
}
</script>

View File

@@ -20,6 +20,7 @@
import hasPermission from '../utility/hasPermission';
import { isProApp } from '../utility/proTools';
import { extractShellConnection } from '../impexp/createImpExpScript';
import { saveFileToDisk } from '../utility/exportFileTools';
export let data;
@@ -100,7 +101,7 @@ await dbgateApi.deployDb(${JSON.stringify(
props: {
conid: $currentDatabase?.connection?._id,
database: $currentDatabase?.name,
}
},
},
{
editor: {
@@ -113,12 +114,12 @@ await dbgateApi.deployDb(${JSON.stringify(
);
};
const handleOpenDuplicatorTab = () => {
const handleOpenDataDeployTab = () => {
openNewTab(
{
title: data.name,
icon: 'img duplicator',
tabComponent: 'DataDuplicatorTab',
icon: 'img data-deploy',
tabComponent: 'DataDeployTab',
props: {
conid: $currentDatabase?.connection?._id,
database: $currentDatabase?.name,
@@ -127,21 +128,56 @@ await dbgateApi.deployDb(${JSON.stringify(
{
editor: {
archiveFolder: data.name,
conid: $currentDatabase?.connection?._id,
database: $currentDatabase?.name,
},
}
);
};
const handleZipUnzip = async method => {
await apiCall(method, {
folder: data.name,
});
};
const handleDownloadZip = async () => {
saveFileToDisk(
async filePath => {
const zipped = await apiCall('archive/get-zipped-path', {
folder: data.name,
});
await apiCall('files/simple-copy', {
sourceFilePath: zipped.filePath,
targetFilePath: filePath,
});
},
{
formatLabel: 'ZIP files',
formatExtension: 'zip',
defaultFileName: data.name?.endsWith('.zip') ? data.name : data.name + '.zip',
}
);
};
function createMenu() {
return [
data.name != 'default' && { text: 'Delete', onClick: handleDelete },
data.name != 'default' && { text: 'Rename', onClick: handleRename },
data.name != 'default' &&
$currentDatabase && [
{ text: 'Data duplicator', onClick: handleOpenDuplicatorTab },
isProApp() && { text: 'Data deployer', onClick: handleOpenDataDeployTab },
{ text: 'Generate deploy DB SQL', onClick: handleGenerateDeploySql },
{ text: 'Shell: Deploy DB', onClick: handleGenerateDeployScript },
],
data.name != 'default' &&
isProApp() &&
data.name.endsWith('.zip') && { text: 'Unpack ZIP', onClick: () => handleZipUnzip('archive/unzip') },
data.name != 'default' &&
isProApp() &&
!data.name.endsWith('.zip') && { text: 'Pack (create ZIP)', onClick: () => handleZipUnzip('archive/zip') },
isProApp() && { text: 'Download ZIP', onClick: handleDownloadZip },
data.name != 'default' &&
hasPermission('dbops/model/compare') &&
@@ -158,7 +194,7 @@ await dbgateApi.deployDb(${JSON.stringify(
{...$$restProps}
{data}
title={data.name.endsWith('.link') ? data.name.slice(0, -5) : data.name}
icon={data.name.endsWith('.link') ? 'img link' : 'img archive-folder'}
icon={data.name.endsWith('.link') ? 'img link' : data.name.endsWith('.zip') ? 'img zipfile' : 'img archive-folder'}
isBold={data.name == $currentArchive}
on:click={() => ($currentArchive = data.name)}
menu={createMenu}

View File

@@ -330,15 +330,15 @@ await dbgateApi.dropAllDbObjects(${JSON.stringify(
});
};
const handleImportWithDbDuplicator = () => {
const handleShowDataDeployer = () => {
showModal(ChooseArchiveFolderModal, {
message: 'Choose archive folder for import from',
message: 'Choose archive folder for data deployer',
onConfirm: archiveFolder => {
openNewTab(
{
title: archiveFolder,
icon: 'img duplicator',
tabComponent: 'DataDuplicatorTab',
icon: 'img replicator',
tabComponent: 'DataDeployerTab',
props: {
conid: connection?._id,
database: name,
@@ -439,8 +439,8 @@ await dbgateApi.dropAllDbObjects(${JSON.stringify(
driver?.databaseEngineTypes?.includes('sql') &&
hasPermission(`dbops/import`) && {
onClick: handleImportWithDbDuplicator,
text: 'Import with DB duplicator',
onClick: handleShowDataDeployer,
text: 'Data deployer',
},
{ divider: true },

View File

@@ -7,6 +7,8 @@
tabComponent: string;
folder: string;
currentConnection: boolean;
extension: string;
label: string;
}
const sql: FileTypeHandler = {
@@ -15,6 +17,8 @@
tabComponent: 'QueryTab',
folder: 'sql',
currentConnection: true,
extension: 'sql',
label: 'SQL file',
};
const shell: FileTypeHandler = {
@@ -23,6 +27,8 @@
tabComponent: 'ShellTab',
folder: 'shell',
currentConnection: false,
extension: 'js',
label: 'JavaScript Shell script',
};
const markdown: FileTypeHandler = {
@@ -31,6 +37,8 @@
tabComponent: 'MarkdownEditorTab',
folder: 'markdown',
currentConnection: false,
extension: 'md',
label: 'Markdown file',
};
const charts: FileTypeHandler = {
@@ -39,6 +47,8 @@
tabComponent: 'ChartTab',
folder: 'charts',
currentConnection: true,
extension: 'json',
label: 'Chart file',
};
const query: FileTypeHandler = {
@@ -47,6 +57,8 @@
tabComponent: 'QueryDesignTab',
folder: 'query',
currentConnection: true,
extension: 'json',
label: 'Query design file',
};
const sqlite: FileTypeHandler = {
@@ -55,6 +67,8 @@
tabComponent: null,
folder: 'sqlite',
currentConnection: true,
extension: 'sqlite',
label: 'SQLite database',
};
const diagrams: FileTypeHandler = {
@@ -63,22 +77,52 @@
tabComponent: 'DiagramTab',
folder: 'diagrams',
currentConnection: true,
extension: 'json',
label: 'Diagram file',
};
const jobs: FileTypeHandler = {
const impexp: FileTypeHandler = {
icon: 'img export',
format: 'json',
tabComponent: 'ImportExportTab',
folder: 'jobs',
folder: 'impexp',
currentConnection: false,
extension: 'json',
label: 'Import/Export file',
};
const datadeploy: FileTypeHandler = isProApp()
? {
icon: 'img data-deploy',
format: 'json',
tabComponent: 'DataDeployTab',
folder: 'datadeploy',
currentConnection: false,
extension: 'json',
label: 'Data deploy file',
}
: undefined;
const dbcompare: FileTypeHandler = isProApp()
? {
icon: 'img compare',
format: 'json',
tabComponent: 'CompareModelTab',
folder: 'dbcompare',
currentConnection: false,
extension: 'json',
label: 'Database compare file',
}
: undefined;
const perspectives: FileTypeHandler = {
icon: 'img perspective',
format: 'json',
tabComponent: 'PerspectiveTab',
folder: 'pesrpectives',
currentConnection: true,
extension: 'json',
label: 'Perspective file',
};
const modtrans: FileTypeHandler = {
@@ -87,6 +131,8 @@
tabComponent: 'ModelTransformTab',
folder: 'modtrans',
currentConnection: false,
extension: 'json',
label: 'Model transform file',
};
export const SAVED_FILE_HANDLERS = {
@@ -98,8 +144,10 @@
sqlite,
diagrams,
perspectives,
jobs,
impexp,
modtrans,
datadeploy,
dbcompare,
};
export const extractKey = data => data.file;
@@ -122,6 +170,8 @@
import openNewTab from '../utility/openNewTab';
import AppObjectCore from './AppObjectCore.svelte';
import { isProApp } from '../utility/proTools';
import { saveFileToDisk } from '../utility/exportFileTools';
export let data;
@@ -148,6 +198,7 @@
hasPermission(`files/${data.folder}/write`) && { text: 'Create copy', onClick: handleCopy },
hasPermission(`files/${data.folder}/write`) && { text: 'Delete', onClick: handleDelete },
folder == 'markdown' && { text: 'Show page', onClick: showMarkdownPage },
{ text: 'Download', onClick: handleDownload },
];
}
@@ -182,6 +233,19 @@
});
};
const handleDownload = () => {
saveFileToDisk(
async filePath => {
await apiCall('files/export-file', {
folder,
file: data.file,
filePath,
});
},
{ formatLabel: handler.label, formatExtension: handler.format, defaultFileName: data.file }
);
};
async function openTab() {
const resp = await apiCall('files/load', { folder, file: data.file, format: handler.format });

View File

@@ -17,6 +17,7 @@
border-radius: 2px;
position: relative;
top: 3px;
font-size: 10pt;
}
label:hover:not(.disabled) {

View File

@@ -0,0 +1,61 @@
<script lang="ts">
import _ from 'lodash';
import InlineButton from '../buttons/InlineButton.svelte';
import FontIcon from '../icons/FontIcon.svelte';
import getElectron from '../utility/getElectron';
import InlineButtonLabel from '../buttons/InlineButtonLabel.svelte';
import resolveApi, { resolveApiHeaders } from '../utility/resolveApi';
import uuidv1 from 'uuid/v1';
export let filters;
export let onProcessFile;
export let icon = 'icon plus-thick';
const inputId = `uploadFileButton-${uuidv1()}`;
const electron = getElectron();
async function handleUploadedFile(e) {
const files = [...e.target.files];
for (const file of files) {
const formData = new FormData();
formData.append('name', file.name);
formData.append('data', file);
const fetchOptions = {
method: 'POST',
body: formData,
headers: resolveApiHeaders(),
};
const apiBase = resolveApi();
const resp = await fetch(`${apiBase}/uploads/upload`, fetchOptions);
const { filePath, originalName } = await resp.json();
await onProcessFile(filePath, originalName);
}
}
async function handleOpenElectronFile() {
const filePaths = await electron.showOpenDialog({
filters,
properties: ['showHiddenFiles', 'openFile'],
});
const filePath = filePaths && filePaths[0];
if (!filePath) return;
onProcessFile(filePath, filePath.split(/[\/\\]/).pop());
}
</script>
{#if electron}
<InlineButton on:click={handleOpenElectronFile} title="Open file" data-testid={$$props['data-testid']}>
<FontIcon {icon} />
</InlineButton>
{:else}
<InlineButtonLabel on:click={() => {}} title="Upload file" data-testid={$$props['data-testid']} htmlFor={inputId}>
<FontIcon {icon} />
</InlineButtonLabel>
{/if}
<input type="file" id={inputId} hidden on:change={handleUploadedFile} />

View File

@@ -13,7 +13,7 @@
}
</script>
<div class="button" on:click={handleClick} class:disabled class:fillHorizontal>
<div class="button" on:click={handleClick} class:disabled class:fillHorizontal data-testid={$$props['data-testid']}>
<div class="icon">
<FontIcon {icon} />
</div>

View File

@@ -46,6 +46,7 @@ import { openImportExportTab } from '../utility/importExportTools';
import newTable from '../tableeditor/newTable';
import { isProApp } from '../utility/proTools';
import { openWebLink } from '../utility/simpleTools';
import ExportImportConnectionsModal from '../modals/ExportImportConnectionsModal.svelte';
// function themeCommand(theme: ThemeDefinition) {
// return {
@@ -509,6 +510,44 @@ registerCommand({
},
});
registerCommand({
id: 'app.exportConnections',
category: 'Settings',
name: 'Export connections',
testEnabled: () => getElectron() != null,
onClick: () => {
showModal(ExportImportConnectionsModal, {
mode: 'export',
});
},
});
registerCommand({
id: 'app.importConnections',
category: 'Settings',
name: 'Import connections',
testEnabled: () => getElectron() != null,
onClick: async () => {
const files = await electron.showOpenDialog({
properties: ['showHiddenFiles', 'openFile'],
filters: [
{
name: `All supported files`,
extensions: ['zip'],
},
{ name: `ZIP files`, extensions: ['zip'] },
],
});
if (files?.length > 0) {
showModal(ExportImportConnectionsModal, {
mode: 'import',
uploadedFilePath: files[0],
});
}
},
});
registerCommand({
id: 'file.import',
category: 'File',

View File

@@ -21,6 +21,7 @@
export let isModifiedCell = false;
export let isInserted = false;
export let isDeleted = false;
export let isMissing = false;
export let isAutofillSelected = false;
export let isFocusedColumn = false;
export let domCell = undefined;
@@ -33,6 +34,9 @@
export let onSetValue;
export let editorTypes = null;
export let isReadonly;
export let hasOverlayValue = false;
export let overlayValue = null;
export let isMissingOverlayField = false;
$: value = col.isStructured ? _.get(rowData || {}, col.uniquePath) : (rowData || {})[col.uniqueName];
@@ -68,75 +72,88 @@
class:isModifiedCell
class:isInserted
class:isDeleted
class:isMissing
class:isAutofillSelected
class:isFocusedColumn
class:hasOverlayValue
class:isMissingOverlayField
class:alignRight={_.isNumber(value) && !showHint}
{style}
>
<CellValue
{rowData}
{value}
{jsonParsedValue}
{editorTypes}
rightMargin={_.isNumber(value) && !showHint && (editorTypes?.explicitDataType || col.foreignKey)}
/>
{#if showHint}
<span class="hint"
>{col.hintColumnNames.map(hintColumnName => rowData[hintColumnName]).join(col.hintColumnDelimiter || ' ')}</span
>
{/if}
{#if editorTypes?.explicitDataType}
{#if value !== undefined}
<ShowFormDropDownButton
icon={detectTypeIcon(value)}
menu={() => getConvertValueMenu(value, onSetValue, editorTypes)}
/>
{#if hasOverlayValue}
<div class="flex1 flex">
<div class="replacedValue overlayCell overlayCell1">
<CellValue {rowData} {value} {jsonParsedValue} {editorTypes} />
</div>
<div class="overlayCell overlayCell2">
<CellValue {rowData} value={overlayValue} {editorTypes} />
</div>
</div>
{:else}
<CellValue
{rowData}
{value}
{jsonParsedValue}
{editorTypes}
rightMargin={_.isNumber(value) && !showHint && (editorTypes?.explicitDataType || col.foreignKey)}
/>
{#if showHint}
<span class="hint"
>{col.hintColumnNames.map(hintColumnName => rowData[hintColumnName]).join(col.hintColumnDelimiter || ' ')}</span
>
{/if}
{#if _.isPlainObject(value)}
<ShowFormButton secondary icon="icon open-in-new" on:click={() => openJsonDocument(value, undefined, true)} />
{/if}
{#if _.isArray(value)}
{#if editorTypes?.explicitDataType}
{#if value !== undefined}
<ShowFormDropDownButton
icon={detectTypeIcon(value)}
menu={() => getConvertValueMenu(value, onSetValue, editorTypes)}
/>
{/if}
{#if _.isPlainObject(value)}
<ShowFormButton secondary icon="icon open-in-new" on:click={() => openJsonDocument(value, undefined, true)} />
{/if}
{#if _.isArray(value)}
<ShowFormButton
secondary
icon="icon open-in-new"
on:click={() => {
if (_.every(value, x => _.isPlainObject(x))) {
openJsonLinesData(value);
} else {
openJsonDocument(value, undefined, true);
}
}}
/>
{/if}
{:else if col.foreignKey && rowData && rowData[col.uniqueName] && !isCurrentCell}
<ShowFormButton on:click={() => onSetFormView(rowData, col)} />
{:else if col.foreignKey && isCurrentCell && onDictionaryLookup && !isReadonly}
<ShowFormButton icon="icon dots-horizontal" on:click={onDictionaryLookup} />
{:else if isJson}
<ShowFormButton icon="icon open-in-new" on:click={() => openJsonDocument(value, undefined, true)} />
{:else if jsonParsedValue && _.isPlainObject(jsonParsedValue)}
<ShowFormButton icon="icon open-in-new" on:click={() => openJsonDocument(jsonParsedValue, undefined, true)} />
{:else if _.isArray(jsonParsedValue || value)}
<ShowFormButton
secondary
icon="icon open-in-new"
on:click={() => {
if (_.every(value, x => _.isPlainObject(x))) {
openJsonLinesData(value);
if (_.every(jsonParsedValue || value, x => _.isPlainObject(x))) {
openJsonLinesData(jsonParsedValue || value);
} else {
openJsonDocument(value, undefined, true);
openJsonDocument(jsonParsedValue || value, undefined, true);
}
}}
/>
{/if}
{:else if col.foreignKey && rowData && rowData[col.uniqueName] && !isCurrentCell}
<ShowFormButton on:click={() => onSetFormView(rowData, col)} />
{:else if col.foreignKey && isCurrentCell && onDictionaryLookup && !isReadonly}
<ShowFormButton icon="icon dots-horizontal" on:click={onDictionaryLookup} />
{:else if isJson}
<ShowFormButton icon="icon open-in-new" on:click={() => openJsonDocument(value, undefined, true)} />
{:else if jsonParsedValue && _.isPlainObject(jsonParsedValue)}
<ShowFormButton icon="icon open-in-new" on:click={() => openJsonDocument(jsonParsedValue, undefined, true)} />
{:else if _.isArray(jsonParsedValue || value)}
<ShowFormButton
icon="icon open-in-new"
on:click={() => {
if (_.every(jsonParsedValue || value, x => _.isPlainObject(x))) {
openJsonLinesData(jsonParsedValue || value);
} else {
openJsonDocument(jsonParsedValue || value, undefined, true);
}
}}
/>
{/if}
{#if isAutoFillMarker}
<div class="autoFillMarker autofillHandleMarker" />
{/if}
{#if isAutoFillMarker}
<div class="autoFillMarker autofillHandleMarker" />
{/if}
{#if showSlot}
<slot />
{#if showSlot}
<slot />
{/if}
{/if}
</td>
@@ -181,6 +198,9 @@
td.isDeleted {
background: var(--theme-bg-volcano);
}
td.isMissing {
background: var(--theme-bg-volcano);
}
td.isSelected {
background: var(--theme-bg-3);
}
@@ -188,9 +208,9 @@
background: var(--theme-bg-selected);
}
td.isDeleted {
background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAEElEQVQImWNgIAX8x4KJBAD+agT8INXz9wAAAABJRU5ErkJggg==');
background-repeat: repeat-x;
background-position: 50% 50%;
background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAEElEQVQImWNgIAX8x4KJBAD+agT8INXz9wAAAABJRU5ErkJggg==') !important;
background-repeat: repeat-x !important;
background-position: 50% 50% !important;
}
.hint {
@@ -213,4 +233,31 @@
color: var(--theme-icon-green);
text-align: var(--data-grid-numbers-align);
}
.hasOverlayValue .overlayCell {
width: 50%;
overflow: hidden;
}
.hasOverlayValue .overlayCell1 {
margin-right: 5px;
}
.hasOverlayValue .overlayCell2 {
margin-left: 5px;
}
.replacedValue {
background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAEElEQVQImWNgIAX8x4KJBAD+agT8INXz9wAAAABJRU5ErkJggg==');
background-repeat: repeat-x;
background-position: 50% 50%;
}
td.isMissingOverlayField {
background: var(--theme-bg-orange);
background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAEElEQVQImWNgIAX8x4KJBAD+agT8INXz9wAAAABJRU5ErkJggg==');
background-repeat: repeat-x;
background-position: 50% 50%;
}
</style>

View File

@@ -282,48 +282,59 @@
testEnabled: () => getCurrentDataGrid()?.editCellValueEnabled(),
onClick: () => getCurrentDataGrid().editCellValue(),
});
registerCommand({
id: 'dataGrid.mergeSelectedCellsIntoMirror',
category: 'Data grid',
name: 'Merge selected cells',
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'merge', fullRows: false }),
});
registerCommand({
id: 'dataGrid.mergeSelectedRowsIntoMirror',
category: 'Data grid',
name: 'Merge selected rows',
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'merge', fullRows: true }),
});
registerCommand({
id: 'dataGrid.appendSelectedCellsIntoMirror',
category: 'Data grid',
name: 'Append selected cells',
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'append', fullRows: false }),
});
registerCommand({
id: 'dataGrid.appendSelectedRowsIntoMirror',
category: 'Data grid',
name: 'Append selected rows',
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'append', fullRows: true }),
});
registerCommand({
id: 'dataGrid.replaceSelectedCellsIntoMirror',
category: 'Data grid',
name: 'Replace with selected cells',
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'replace', fullRows: false }),
});
registerCommand({
id: 'dataGrid.replaceSelectedRowsIntoMirror',
category: 'Data grid',
name: 'Replace with selected rows',
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'replace', fullRows: true }),
});
if (isProApp()) {
registerCommand({
id: 'dataGrid.sendToDataDeploy',
category: 'Data grid',
name: 'Send to data deployer',
testEnabled: () => getCurrentDataGrid()?.sendToDataDeployEnabled(),
onClick: () => getCurrentDataGrid().sendToDataDeploy(),
});
}
// registerCommand({
// id: 'dataGrid.mergeSelectedCellsIntoMirror',
// category: 'Data grid',
// name: 'Merge selected cells',
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'merge', fullRows: false }),
// });
// registerCommand({
// id: 'dataGrid.mergeSelectedRowsIntoMirror',
// category: 'Data grid',
// name: 'Merge selected rows',
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'merge', fullRows: true }),
// });
// registerCommand({
// id: 'dataGrid.appendSelectedCellsIntoMirror',
// category: 'Data grid',
// name: 'Append selected cells',
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'append', fullRows: false }),
// });
// registerCommand({
// id: 'dataGrid.appendSelectedRowsIntoMirror',
// category: 'Data grid',
// name: 'Append selected rows',
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'append', fullRows: true }),
// });
// registerCommand({
// id: 'dataGrid.replaceSelectedCellsIntoMirror',
// category: 'Data grid',
// name: 'Replace with selected cells',
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'replace', fullRows: false }),
// });
// registerCommand({
// id: 'dataGrid.replaceSelectedRowsIntoMirror',
// category: 'Data grid',
// name: 'Replace with selected rows',
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'replace', fullRows: true }),
// });
function getSelectedCellsInfo(selectedCells, grider, realColumnUniqueNames, selectedRowData) {
if (selectedCells.length > 1 && selectedCells.every(x => _.isNumber(x[0]) && _.isNumber(x[1]))) {
@@ -418,6 +429,8 @@
import contextMenuActivator from '../utility/contextMenuActivator';
import InputTextModal from '../modals/InputTextModal.svelte';
import { _t } from '../translations';
import { isProApp } from '../utility/proTools';
import SaveArchiveModal from '../modals/SaveArchiveModal.svelte';
export let onLoadNextData = undefined;
export let grider = undefined;
@@ -454,6 +467,8 @@
export let jslid;
// export let generalAllowSave = false;
export let hideGridLeftColumn = false;
export let overlayDefinition = null;
export let onGetSelectionMenu = null;
export const activator = createActivator('DataGridCore', false);
@@ -482,6 +497,7 @@
const domFilterControlsRef = createRef({});
let isGridFocused = false;
let selectionMenu = null;
const tabid = getContext('tabid');
@@ -1003,11 +1019,11 @@
});
}
export function mirrorWriteEnabled(requireKey) {
return requireKey ? !!display.baseTable?.primaryKey || !!display.baseCollection : !!display.baseTableOrSimilar;
export function sendToDataDeployEnabled() {
return !!display.baseTable?.primaryKey || !!display.baseCollection;
}
export async function mergeSelectionIntoMirror({ fullRows, mergeMode = 'merge' }) {
export async function sendToDataDeploy() {
const file = display.baseTableOrSimilar?.pureName;
const mergeKey = display.baseCollection
? display.baseCollection?.uniqueKey?.map(x => x.columnName)
@@ -1019,20 +1035,77 @@
const rows = rowIndexes.map(rowIndex => grider.getRowData(rowIndex));
// @ts-ignore
const columns = colIndexes.map(col => realColumnUniqueNames[col]);
const mergedRows = fullRows ? rows : rows.map(x => _.pick(x, _.uniq([...columns, ...mergeKey])));
const res = await apiCall('archive/modify-file', {
const mergedRows = rows.map(x => _.pick(x, _.uniq([...columns, ...mergeKey])));
showModal(SaveArchiveModal, {
folder: $currentArchive,
file,
mergedRows,
mergeKey,
mergeMode,
fileIsReadOnly: true,
onSave: async folder => {
const res = await apiCall('archive/modify-file', {
folder,
file,
mergedRows,
mergeKey,
mergeMode: 'merge',
});
if (res) {
showSnackbarSuccess(`Merged ${mergedRows.length} rows into ${file} in archive ${folder}`);
openNewTab(
{
title: folder,
icon: 'img data-deploy',
tabComponent: 'DataDeployTab',
props: {
conid,
database,
},
},
{
editor: {
archiveFolder: folder,
conid,
database,
},
}
);
}
},
});
if (res) {
showSnackbarSuccess(`Merged ${mergedRows.length} rows into ${file} in archive ${$currentArchive}`);
}
}
// export function mirrorWriteEnabled(requireKey) {
// return requireKey ? !!display.baseTable?.primaryKey || !!display.baseCollection : !!display.baseTableOrSimilar;
// }
// export async function mergeSelectionIntoMirror({ fullRows, mergeMode = 'merge' }) {
// const file = display.baseTableOrSimilar?.pureName;
// const mergeKey = display.baseCollection
// ? display.baseCollection?.uniqueKey?.map(x => x.columnName)
// : display.baseTable?.primaryKey.columns.map(x => x.columnName);
// const cells = cellsToRegularCells(selectedCells);
// const rowIndexes = _.sortBy(_.uniq(cells.map(x => x[0])));
// const colIndexes = _.sortBy(_.uniq(cells.map(x => x[1])));
// const rows = rowIndexes.map(rowIndex => grider.getRowData(rowIndex));
// // @ts-ignore
// const columns = colIndexes.map(col => realColumnUniqueNames[col]);
// const mergedRows = fullRows ? rows : rows.map(x => _.pick(x, _.uniq([...columns, ...mergeKey])));
// const res = await apiCall('archive/modify-file', {
// folder: $currentArchive,
// file,
// mergedRows,
// mergeKey,
// mergeMode,
// });
// if (res) {
// showSnackbarSuccess(`Merged ${mergedRows.length} rows into ${file} in archive ${$currentArchive}`);
// }
// }
export function canShowLeftPanel() {
return !hideGridLeftColumn;
}
@@ -1152,8 +1225,16 @@
onChangeSelectedColumns(getSelectedColumns().map(x => x.columnName));
}
let publishedCells = null;
if (onPublishedCellsChanged) {
onPublishedCellsChanged(getCellsPublished(selectedCells));
if (!publishedCells) publishedCells = getCellsPublished(selectedCells);
onPublishedCellsChanged(publishedCells);
}
if (onGetSelectionMenu) {
if (!publishedCells) publishedCells = getCellsPublished(selectedCells);
selectionMenu = onGetSelectionMenu(publishedCells);
}
}
});
@@ -1192,6 +1273,7 @@
engine: display?.driver,
condition: display?.getChangeSetCondition(rowData),
insertedRowIndex: grider?.getInsertedRowIndex(row),
rowStatus: grider.getRowStatus(row),
};
})
.filter(x => x.column);
@@ -1747,14 +1829,14 @@
{ placeTag: 'save' },
{ command: 'dataGrid.revertRowChanges', hideDisabled: true },
{ command: 'dataGrid.revertAllChanges', hideDisabled: true },
{ command: 'dataGrid.deleteSelectedRows' },
{ command: 'dataGrid.insertNewRow' },
{ command: 'dataGrid.cloneRows' },
{ command: 'dataGrid.deleteSelectedRows', hideDisabled: true },
{ command: 'dataGrid.insertNewRow', hideDisabled: true },
{ command: 'dataGrid.cloneRows', hideDisabled: true },
{ command: 'dataGrid.setNull', hideDisabled: true },
{ command: 'dataGrid.removeField', hideDisabled: true },
{ placeTag: 'edit' },
{ divider: true },
{ command: 'dataGrid.findColumn' },
{ command: 'dataGrid.findColumn', hideDisabled: true },
{ command: 'dataGrid.hideColumn', hideDisabled: true },
{ command: 'dataGrid.filterSelected' },
{ command: 'dataGrid.clearFilter' },
@@ -1773,17 +1855,18 @@
// { command: 'dataGrid.copyJsonDocument', hideDisabled: true },
{ divider: true },
{ placeTag: 'export' },
{
label: 'Save to current archive',
submenu: [
{ command: 'dataGrid.mergeSelectedCellsIntoMirror' },
{ command: 'dataGrid.mergeSelectedRowsIntoMirror' },
{ command: 'dataGrid.appendSelectedCellsIntoMirror' },
{ command: 'dataGrid.appendSelectedRowsIntoMirror' },
{ command: 'dataGrid.replaceSelectedCellsIntoMirror' },
{ command: 'dataGrid.replaceSelectedRowsIntoMirror' },
],
},
// {
// label: 'Save to current archive',
// submenu: [
// { command: 'dataGrid.mergeSelectedCellsIntoMirror' },
// { command: 'dataGrid.mergeSelectedRowsIntoMirror' },
// { command: 'dataGrid.appendSelectedCellsIntoMirror' },
// { command: 'dataGrid.appendSelectedRowsIntoMirror' },
// { command: 'dataGrid.replaceSelectedCellsIntoMirror' },
// { command: 'dataGrid.replaceSelectedRowsIntoMirror' },
// ],
// },
isProApp() && { command: 'dataGrid.sendToDataDeploy' },
{ command: 'dataGrid.generateSqlFromData' },
{ command: 'dataGrid.openFreeTable' },
{ command: 'dataGrid.openChartFromSelection' },
@@ -2017,6 +2100,7 @@
onSetFormView={formViewAvailable && display?.baseTable?.primaryKey ? handleSetFormView : null}
{dataEditorTypesBehaviourOverride}
{gridColoringMode}
{overlayDefinition}
/>
{/each}
</tbody>
@@ -2053,7 +2137,19 @@
on:scroll={e => (firstVisibleRowScrollIndex = e.detail)}
bind:this={domVerticalScroll}
/>
{#if selectedCellsInfo}
{#if selectionMenu}
<div class="selection-menu">
{#each selectionMenu as item}
<InlineButton
on:click={() => {
item.onClick();
}}
>
{item.text}
</InlineButton>
{/each}
</div>
{:else if selectedCellsInfo}
<div class="row-count-label">
{selectedCellsInfo}
</div>
@@ -2118,6 +2214,13 @@
bottom: 20px;
}
.selection-menu {
position: absolute;
background-color: var(--theme-bg-2);
right: 40px;
bottom: 20px;
}
.no-rows-info {
margin-top: 60px;
}

View File

@@ -1,5 +1,19 @@
<script lang="ts" context="module">
const OVERLAY_STATUS_ICONS = {
regular: 'icon equal',
updated: 'icon not-equal',
missing: 'img table',
inserted: 'img archive',
};
const OVERLAY_STATUS_TOOLTIPS = {
regular: 'Row is the same in database and archive',
updated: 'Row is different in database and archive',
missing: 'Row is only in database',
inserted: 'Row is only in archive',
};
</script>
<script lang="ts">
import openReferenceForm from '../formview/openReferenceForm';
import DictionaryLookupModal from '../modals/DictionaryLookupModal.svelte';
import { showModal } from '../modals/modalTools';
@@ -27,6 +41,7 @@
export let database;
export let driver;
export let gridColoringMode = '36';
export let overlayDefinition = null;
export let dataEditorTypesBehaviourOverride = null;
@@ -51,10 +66,17 @@
onConfirm: value => grider.setCellValue(rowIndex, col.uniqueName, value),
});
}
// $: console.log('rowStatus', rowStatus);
</script>
<tr style={`height: ${rowHeight}px`} class={`coloring-mode-${gridColoringMode}`}>
<RowHeaderCell {rowIndex} onShowForm={onSetFormView ? () => onSetFormView(rowData, null) : null} />
<RowHeaderCell
{rowIndex}
onShowForm={onSetFormView && !overlayDefinition ? () => onSetFormView(rowData, null) : null}
extraIcon={overlayDefinition ? OVERLAY_STATUS_ICONS[rowStatus.status] : null}
extraIconTooltip={overlayDefinition ? OVERLAY_STATUS_TOOLTIPS[rowStatus.status] : null}
/>
{#each visibleRealColumns as col (col.uniqueName)}
{#if inplaceEditorState.cell && rowIndex == inplaceEditorState.cell[0] && col.colIndex == inplaceEditorState.cell[1]}
<InplaceEditor
@@ -83,11 +105,15 @@
isAutofillSelected={cellIsSelected(rowIndex, col.colIndex, autofillSelectedCells)}
isFocusedColumn={focusedColumns?.includes(col.uniqueName)}
isModifiedCell={rowStatus.modifiedFields && rowStatus.modifiedFields.has(col.uniqueName)}
overlayValue={rowStatus.overlayFields?.[col.uniqueName]}
hasOverlayValue={rowStatus.overlayFields && col.uniqueName in rowStatus.overlayFields}
isMissingOverlayField={rowStatus.missingOverlayFields && rowStatus.missingOverlayFields.has(col.uniqueName)}
isModifiedRow={rowStatus.status == 'updated'}
isInserted={rowStatus.status == 'inserted' ||
(rowStatus.insertedFields && rowStatus.insertedFields.has(col.uniqueName))}
isDeleted={rowStatus.status == 'deleted' ||
(rowStatus.deletedFields && rowStatus.deletedFields.has(col.uniqueName))}
isMissing={rowStatus.status == 'missing'}
{onSetFormView}
{isDynamicStructure}
isAutoFillMarker={autofillMarkerCell &&

View File

@@ -1,8 +1,10 @@
export interface GriderRowStatus {
status: 'regular' | 'updated' | 'deleted' | 'inserted';
status: 'regular' | 'updated' | 'deleted' | 'inserted' | 'missing';
modifiedFields?: Set<string>;
insertedFields?: Set<string>;
deletedFields?: Set<string>;
overlayFields?: { [field: string]: string };
missingOverlayFields?: Set<string>;
}
export default abstract class Grider {
@@ -61,4 +63,7 @@ export default abstract class Grider {
this.setCellValue(index, key, changeObject[key]);
}
}
getInsertedRowIndex(index) {
return null;
}
}

View File

@@ -20,7 +20,7 @@
export let allowChangeChangeSetStructure = false;
export let infoLoadCounter = 0;
export let driver;
export let driver = null;
let loadedRows;
let infoCounter = 0;

View File

@@ -0,0 +1,110 @@
import { GridDisplay } from 'dbgate-datalib';
import Grider from './Grider';
import { GriderRowStatus } from './Grider';
import _uniq from 'lodash/uniq';
export default class OverlayDiffGrider extends Grider {
private prependRows: any[];
private rowCacheIndexes: Set<number>;
private rowDataCache;
private rowStatusCache;
private overlayRowsByStr: { [key: string]: any };
constructor(
public sourceRows: any[],
public display: GridDisplay,
public matchColumns: string[],
public overlayData: any[],
public matchedDbKeys: any[][]
) {
super();
const matchedDbKeysByStr = new Set(matchedDbKeys.map(x => x.join('||')));
this.prependRows = overlayData.filter(row => !matchedDbKeysByStr.has(matchColumns.map(x => row[x]).join('||')));
this.overlayRowsByStr = {};
for (const row of overlayData) {
const key = matchColumns.map(x => row[x]).join('||');
this.overlayRowsByStr[key] = row;
}
this.rowDataCache = {};
this.rowStatusCache = {};
this.rowCacheIndexes = new Set();
}
requireRowCache(index: number) {
if (this.rowCacheIndexes.has(index)) return;
if (index < this.prependRows.length) {
this.rowStatusCache[index] = {
status: 'inserted',
};
this.rowDataCache[index] = this.prependRows[index];
this.rowCacheIndexes.add(index);
return;
}
const row = this.sourceRows[index - this.prependRows.length];
if (!row) {
this.rowStatusCache[index] = {
status: 'missing',
};
this.rowDataCache[index] = row;
this.rowCacheIndexes.add(index);
return;
}
const overlayKey = this.matchColumns.map(x => row[x]).join('||');
const overlayRow = this.overlayRowsByStr[overlayKey];
if (!overlayRow) {
this.rowStatusCache[index] = {
status: 'missing',
};
this.rowDataCache[index] = row;
this.rowCacheIndexes.add(index);
return;
}
const overlayFields = {};
const missingOverlayFields = new Set();
for (const field of this.display.columns.map(x => x.columnName)) {
if (!(field in overlayRow)) {
missingOverlayFields.add(field);
} else if (row[field] != overlayRow[field]) {
overlayFields[field] = overlayRow[field];
}
}
if (Object.keys(overlayFields).length > 0 || missingOverlayFields.size > 0) {
this.rowStatusCache[index] = {
status: 'updated',
overlayFields,
missingOverlayFields,
modifiedFields: new Set(Object.keys(overlayFields)),
};
this.rowDataCache[index] = row;
} else {
this.rowStatusCache[index] = {
status: 'regular',
};
this.rowDataCache[index] = row;
}
this.rowCacheIndexes.add(index);
}
getRowData(index: number) {
this.requireRowCache(index);
return this.rowDataCache[index];
}
getRowStatus(index): GriderRowStatus {
this.requireRowCache(index);
return this.rowStatusCache[index];
}
get rowCount() {
return this.sourceRows.length + this.prependRows.length;
}
}

View File

@@ -1,9 +1,13 @@
<script lang="ts">
import ShowFormButton from '../formview/ShowFormButton.svelte';
import FontIcon from '../icons/FontIcon.svelte';
export let rowIndex;
export let onShowForm;
export let extraIcon = null;
export let extraIconTooltip = null;
let mouseIn = false;
</script>
@@ -18,6 +22,11 @@
{#if mouseIn && onShowForm}
<ShowFormButton on:click={onShowForm} />
{/if}
{#if extraIcon}
<div class="extraIcon" title={extraIconTooltip}>
<FontIcon icon={extraIcon} />
</div>
{/if}
</td>
<style>
@@ -29,4 +38,9 @@
overflow: hidden;
position: relative;
}
.extraIcon {
position: absolute;
right: 0px;
top: 1px;
}
</style>

View File

@@ -83,6 +83,7 @@
import hasPermission from '../utility/hasPermission';
import { openImportExportTab } from '../utility/importExportTools';
import { getIntSettingsValue } from '../settings/settingsTools';
import OverlayDiffGrider from './OverlayDiffGrider';
export let conid;
export let display;
@@ -92,6 +93,7 @@
export let config;
export let changeSetState;
export let dispatchChangeSet;
export let overlayDefinition = null;
export let macroPreview;
export let macroValues;
@@ -110,7 +112,7 @@
// $: console.log('loadedRows BIND', loadedRows);
$: {
if (macroPreview) {
if (!overlayDefinition && macroPreview) {
grider = new ChangeSetGrider(
loadedRows,
changeSetState,
@@ -124,13 +126,25 @@
}
// prevent recreate grider, if no macro is selected, so there is no need to selectedcells in macro
$: {
if (!macroPreview) {
if (!overlayDefinition && !macroPreview) {
grider = new ChangeSetGrider(loadedRows, changeSetState, dispatchChangeSet, display);
}
}
// $: console.log('GRIDER', grider);
// $: if (onChangeGrider) onChangeGrider(grider);
$: {
if (overlayDefinition) {
grider = new OverlayDiffGrider(
loadedRows,
display,
overlayDefinition.matchColumns,
overlayDefinition.overlayData,
overlayDefinition.matchedDbKeys
);
}
}
export async function exportGrid() {
const coninfo = await getConnectionInfo({ conid });

View File

@@ -47,6 +47,8 @@
export let isRawMode = false;
export let forceReadOnly = false;
$: connection = useConnectionInfo({ conid });
$: dbinfo = useDatabaseInfo({ conid, database });
$: serverVersion = useDatabaseServerVersion({ conid, database });
@@ -73,7 +75,7 @@
{ showHintColumns: getBoolSettingsValue('dataGrid.showHintColumns', true) },
$serverVersion,
table => getDictionaryDescription(table, conid, database, $apps, $connections),
$connection?.isReadOnly,
forceReadOnly || $connection?.isReadOnly,
isRawMode
)
: null;
@@ -161,7 +163,7 @@
formViewComponent={SqlFormView}
{display}
showReferences
showMacros={!$connection?.isReadOnly}
showMacros={!forceReadOnly && !$connection?.isReadOnly}
hasMultiColumnFilter
onRunMacro={handleRunMacro}
macroCondition={macro => macro.type == 'transformValue'}

View File

@@ -7,6 +7,7 @@
props?: any;
formatter?: any;
slot?: number;
slotKey?: string;
isHighlighted?: Function;
sortable?: boolean;
filterable?: boolean;
@@ -25,12 +26,15 @@
import { evalFilterBehaviour } from 'dbgate-tools';
import { evaluateCondition } from 'dbgate-sqltree';
import { compileCompoudEvalCondition } from 'dbgate-filterparser';
import { chevronExpandIcon } from '../icons/expandIcons';
export let columns: (TableControlColumn | false)[];
export let rows;
export let rows = null;
export let groupedRows = null;
export let focusOnCreate = false;
export let selectable = false;
export let selectedIndex = 0;
export let selectedKey = null;
export let clickable = false;
export let disableFocusOutline = false;
export let emptyMessage = null;
@@ -41,9 +45,12 @@
export let checkedKeys = null;
export let onSetCheckedKeys = null;
export let extractCheckedKey = x => x.id;
export let extractTableItemKey = x => x.id;
export let itemSupportsCheckbox = x => true;
export let filters = null;
export let selectionMode: 'index' | 'key' = 'index';
const dispatch = createEventDispatcher();
$: columnList = _.compact(_.flatten(columns));
@@ -53,37 +60,120 @@
});
const handleKeyDown = event => {
if (event.keyCode == keycodes.downArrow) {
selectedIndex = Math.min(selectedIndex + 1, sortedRows.length - 1);
const oldSelectedIndex =
selectionMode == 'index' ? selectedIndex : _.findIndex(flatRowsShown, x => extractTableItemKey(x) == selectedKey);
let newIndex = oldSelectedIndex;
switch (event.keyCode) {
case keycodes.downArrow:
newIndex = Math.min(newIndex + 1, flatRowsShown.length - 1);
break;
case keycodes.upArrow:
newIndex = Math.max(0, newIndex - 1);
break;
case keycodes.home:
newIndex = 0;
break;
case keycodes.end:
newIndex = rows.length - 1;
break;
case keycodes.pageUp:
newIndex -= 10;
break;
case keycodes.pageDown:
newIndex += 10;
break;
}
if (event.keyCode == keycodes.upArrow) {
selectedIndex = Math.max(0, selectedIndex - 1);
if (newIndex < 0) {
newIndex = 0;
}
if (newIndex >= flatRowsShown.length) {
newIndex = flatRowsShown.length - 1;
}
if (clickable && oldSelectedIndex != newIndex) {
event.preventDefault();
domRows[newIndex]?.scrollIntoView();
if (clickable) {
dispatch('clickrow', flatRowsShown[newIndex]);
}
if (selectionMode == 'index') {
selectedIndex = newIndex;
} else {
selectedKey = extractTableItemKey(flatRowsShown[newIndex]);
}
}
};
function filterRows(rows, filters) {
function filterRows(grows, filters) {
const condition = compileCompoudEvalCondition(filters);
if (!condition) return rows;
if (!condition) return grows;
return rows.filter(row => {
const newrow = { ...row };
for (const col of columnList) {
if (col.filteredExpression) {
newrow[col.fieldName] = col.filteredExpression(row);
}
}
return evaluateCondition(condition, newrow);
});
return grows
.map(gitem => {
return {
group: gitem.group,
rows: gitem.rows.filter(row => {
const newrow = { ...row };
for (const col of columnList) {
if (col.filteredExpression) {
newrow[col.fieldName] = col.filteredExpression(row);
}
}
return evaluateCondition(condition, newrow);
}),
};
})
.filter(gitem => gitem.rows.length > 0);
}
// function computeGroupedRows(array) {
// if (!extractGroupName) {
// return [{ label: null, rows: array }];
// }
// const res = [];
// let lastGroupName = null;
// let buildArray = [];
// for (const item of array) {
// const groupName = extractGroupName(item);
// if (lastGroupName != groupName) {
// if (buildArray.length > 0) {
// res.push({ label: lastGroupName, rows: buildArray });
// }
// lastGroupName = groupName;
// buildArray = [];
// }
// buildArray.push(item);
// }
// if (buildArray.length > 0) {
// res.push({ label: lastGroupName, rows: buildArray });
// }
// }
let sortedByField = null;
let sortOrderIsDesc = false;
let collapsedGroupIndexes = [];
let domRows = {};
$: filteredRows = filters ? filterRows(rows, $filters) : rows;
$: rowsSource = groupedRows ? groupedRows : [{ group: null, rows }];
$: sortedRowsTmp = sortedByField ? _.sortBy(filteredRows || [], sortedByField) : filteredRows;
$: sortedRows = sortOrderIsDesc ? [...sortedRowsTmp].reverse() : sortedRowsTmp;
$: filteredRows = filters ? filterRows(rowsSource, $filters) : rowsSource;
$: sortedRows = sortedByField
? filteredRows.map(gitem => {
let res = _.sortBy(gitem.rows, sortedByField);
if (sortOrderIsDesc) res = [...res].reverse();
return { group: gitem.group, rows: res };
})
: filteredRows;
// $: console.log('sortedRows', sortedRows);
$: flatRowsShown = sortedRows.map(gitem => gitem.rows).flat();
$: checkableFlatRowsShown = flatRowsShown.filter(x => itemSupportsCheckbox(x));
// $: groupedRows = computeGroupedRows(sortedRows);
</script>
<table
@@ -98,7 +188,16 @@
<thead class:stickyHeader>
<tr>
{#if checkedKeys}
<th></th>
<th>
<input
type="checkbox"
checked={checkableFlatRowsShown.every(r => checkedKeys.includes(extractTableItemKey(r)))}
on:change={e => {
if (e.target['checked']) onSetCheckedKeys(checkableFlatRowsShown.map(r => extractTableItemKey(r)));
else onSetCheckedKeys([]);
}}
/>
</th>
{/if}
{#each columnList as col}
<th
@@ -129,7 +228,7 @@
{#if filters}
<tr>
{#if checkedKeys}
<td></td>
<td class="empty-cell"></td>
{/if}
{#each columnList as col}
<td class="filter-cell" class:empty-cell={!col.filterable}>
@@ -147,58 +246,91 @@
{/if}
</thead>
<tbody>
{#each sortedRows as row, index}
<tr
class:selected={selectable && selectedIndex == index}
class:clickable
on:click={() => {
if (selectable) {
selectedIndex = index;
domTable.focus();
}
if (clickable) {
dispatch('clickrow', row);
}
}}
>
{#if checkedKeys}
<td>
<input
type="checkbox"
checked={checkedKeys.includes(extractCheckedKey(row))}
on:change={e => {
if (e.target['checked']) onSetCheckedKeys(_.uniq([...checkedKeys, extractCheckedKey(row)]));
else onSetCheckedKeys(checkedKeys.filter(x => x != extractCheckedKey(row)));
}}
/>
{#each sortedRows as gitem, groupIndex}
{#if gitem.group}
<tr class="group-row">
<td
colspan={columnList.length + (checkedKeys ? 1 : 0)}
class="groupcell"
on:click={() => {
if (collapsedGroupIndexes.includes(groupIndex)) {
collapsedGroupIndexes = collapsedGroupIndexes.filter(x => x != groupIndex);
} else {
collapsedGroupIndexes = [...collapsedGroupIndexes, groupIndex];
}
}}
>
<FontIcon icon={chevronExpandIcon(!collapsedGroupIndexes.includes(groupIndex))} padRight />
<strong>{gitem.group} ({gitem.rows.length})</strong>
</td>
{/if}
{#each columnList as col}
{@const rowProps = { ...col.props, ...(col.getProps ? col.getProps(row) : null) }}
<td class:isHighlighted={col.isHighlighted && col.isHighlighted(row)} class:noCellPadding>
{#if col.component}
<svelte:component this={col.component} {...rowProps} />
{:else if col.formatter}
{col.formatter(row)}
{:else if col.slot != null}
{#if col.slot == -1}<slot name="-1" {row} {col} {index} />
{:else if col.slot == 0}<slot name="0" {row} {col} {index} {...rowProps} />
{:else if col.slot == 1}<slot name="1" {row} {col} {index} {...rowProps} />
{:else if col.slot == 2}<slot name="2" {row} {col} {index} {...rowProps} />
{:else if col.slot == 3}<slot name="3" {row} {col} {index} {...rowProps} />
{:else if col.slot == 4}<slot name="4" {row} {col} {index} {...rowProps} />
{:else if col.slot == 5}<slot name="5" {row} {col} {index} {...rowProps} />
{:else if col.slot == 6}<slot name="6" {row} {col} {index} {...rowProps} />
{:else if col.slot == 7}<slot name="7" {row} {col} {index} {...rowProps} />
{:else if col.slot == 8}<slot name="8" {row} {col} {index} {...rowProps} />
{:else if col.slot == 9}<slot name="9" {row} {col} {index} {...rowProps} />
{/if}
{:else}
{row[col.fieldName] || ''}
</tr>
{/if}
{#if !collapsedGroupIndexes.includes(groupIndex)}
{#each gitem.rows as row}
{@const index = _.indexOf(flatRowsShown, row)}
<tr
class:selected={selectable &&
(selectionMode == 'index' ? selectedIndex == index : selectedKey == extractTableItemKey(row))}
class:clickable
bind:this={domRows[index]}
on:click={() => {
if (selectable) {
if (selectionMode == 'index') {
selectedIndex = index;
} else {
selectedKey = extractTableItemKey(row);
}
domTable.focus();
}
if (clickable) {
dispatch('clickrow', row);
}
}}
>
{#if checkedKeys}
<td>
{#if itemSupportsCheckbox(row)}
<input
type="checkbox"
checked={checkedKeys.includes(extractTableItemKey(row))}
on:change={e => {
if (e.target['checked']) onSetCheckedKeys(_.uniq([...checkedKeys, extractTableItemKey(row)]));
else onSetCheckedKeys(checkedKeys.filter(x => x != extractTableItemKey(row)));
}}
/>
{/if}
</td>
{/if}
</td>
{#each columnList as col}
{@const rowProps = { ...col.props, ...(col.getProps ? col.getProps(row) : null) }}
<td class:isHighlighted={col.isHighlighted && col.isHighlighted(row)} class:noCellPadding>
{#if col.component}
<svelte:component this={col.component} {...rowProps} />
{:else if col.formatter}
{col.formatter(row)}
{:else if col.slot != null}
{#key row[col.slotKey] || 'key'}
{#if col.slot == -1}<slot name="-1" {row} {col} {index} />
{:else if col.slot == 0}<slot name="0" {row} {col} {index} {...rowProps} />
{:else if col.slot == 1}<slot name="1" {row} {col} {index} {...rowProps} />
{:else if col.slot == 2}<slot name="2" {row} {col} {index} {...rowProps} />
{:else if col.slot == 3}<slot name="3" {row} {col} {index} {...rowProps} />
{:else if col.slot == 4}<slot name="4" {row} {col} {index} {...rowProps} />
{:else if col.slot == 5}<slot name="5" {row} {col} {index} {...rowProps} />
{:else if col.slot == 6}<slot name="6" {row} {col} {index} {...rowProps} />
{:else if col.slot == 7}<slot name="7" {row} {col} {index} {...rowProps} />
{:else if col.slot == 8}<slot name="8" {row} {col} {index} {...rowProps} />
{:else if col.slot == 9}<slot name="9" {row} {col} {index} {...rowProps} />
{/if}
{/key}
{:else}
{row[col.fieldName] || ''}
{/if}
</td>
{/each}
</tr>
{/each}
</tr>
{/if}
{/each}
{#if emptyMessage && sortedRows.length == 0}
<tr>
@@ -223,6 +355,9 @@
background: var(--theme-bg-0);
}
tbody tr.selected {
background: var(--theme-bg-3);
}
table:focus tbody tr.selected {
background: var(--theme-bg-selected);
}
tbody tr.clickable:hover {
@@ -287,4 +422,9 @@
.empty-cell {
background-color: var(--theme-bg-1);
}
.groupcell {
background-color: var(--theme-bg-1);
cursor: pointer;
}
</style>

View File

@@ -10,14 +10,17 @@
export let folderName;
export let name;
export let filterExtension = null;
const { setFieldValue, values } = getFormContext();
$: files = useArchiveFiles({ folder: folderName });
$: filesOptions = ($files || []).map(x => ({
value: x.name,
label: x.name,
}));
$: filesOptions = ($files || [])
.filter(x => (filterExtension ? x.name.endsWith('.' + filterExtension) : true))
.map(x => ({
value: x.name,
label: x.name,
}));
</script>
<div class="wrapper">

View File

@@ -11,16 +11,22 @@
export let additionalFolders = [];
export let name;
export let allowCreateNew = false;
export let zipFilesOnly = false;
export let skipZipFiles = false;
const { setFieldValue } = getFormContext();
const folders = useArchiveFolders();
$: folderOptions = [
...($folders || []).map(folder => ({
value: folder.name,
label: folder.name,
})),
...($folders || [])
.filter(folder => (zipFilesOnly ? folder.name.endsWith('.zip') : true))
.filter(folder => (skipZipFiles ? !folder.name.endsWith('.zip') : true))
.map(folder => ({
value: folder.name,
label: folder.name,
})),
...additionalFolders
.filter(x => x != '@create')
.filter(x => !($folders || []).find(y => y.name == x))
@@ -28,7 +34,7 @@
value: folder,
label: folder,
})),
{
allowCreateNew && {
label: '(Create new)',
value: '@create',
},

View File

@@ -22,7 +22,7 @@
? { disabled: true }
: {
onClick: () => {
setFieldValue(name, !$values[name]);
setFieldValue(name, $values?.[name] == 0 ? true : $values?.[name] == 1 ? false : !$values?.[name]);
dispatch('change');
},
}}

View File

@@ -11,4 +11,9 @@
}
</script>
<CheckboxField {...$$restProps} checked={$values[name] ?? defaultValue} on:change={handleChange} on:change />
<CheckboxField
{...$$restProps}
checked={$values?.[name] == 0 ? false : $values?.[name] == '1' ? true : ($values?.[name] ?? defaultValue)}
on:change={handleChange}
on:change
/>

View File

@@ -110,6 +110,7 @@
'icon history': 'mdi mdi-history',
'icon structure': 'mdi mdi-tools',
'icon square': 'mdi mdi-square',
'icon data-deploy': 'mdi mdi-database-settings',
'icon edit': 'mdi mdi-pencil',
'icon delete': 'mdi mdi-delete',
@@ -206,6 +207,8 @@
'icon type-objectid': 'mdi mdi-alpha-i-box',
'icon type-null': 'mdi mdi-code-equal',
'icon type-unknown': 'mdi mdi-help-box',
'icon equal': 'mdi mdi-equal',
'icon not-equal': 'mdi mdi-not-equal-variant',
'icon at': 'mdi mdi-at',
'icon expand-all': 'mdi mdi-expand-all',
@@ -218,6 +221,7 @@
'icon autocommit-off': 'mdi mdi-check-circle-outline',
'icon premium': 'mdi mdi-star',
'icon upload': 'mdi mdi-upload',
'img ok': 'mdi mdi-check-circle color-icon-green',
'img ok-inv': 'mdi mdi-check-circle color-icon-inv-green',
@@ -232,12 +236,14 @@
'img archive': 'mdi mdi-table color-icon-gold',
'img archive-folder': 'mdi mdi-database-outline color-icon-green',
'img zipfile': 'mdi mdi-zip-box color-icon-gold',
'img autoincrement': 'mdi mdi-numeric-1-box-multiple-outline',
'img column': 'mdi mdi-table-column',
'img server': 'mdi mdi-server color-icon-blue',
'img primary-key': 'mdi mdi-key-star color-icon-yellow',
'img foreign-key': 'mdi mdi-key-link',
'img sql-file': 'mdi mdi-file',
'img anyfile': 'mdi mdi-file-question color-icon-red',
'img shell': 'mdi mdi-flash color-icon-blue',
'img chart': 'mdi mdi-chart-bar color-icon-magenta',
'img markdown': 'mdi mdi-application color-icon-red',
@@ -301,7 +307,7 @@
'img type-rejson': 'mdi mdi-color-json color-icon-blue',
'img keydb': 'mdi mdi-key color-icon-blue',
'img duplicator': 'mdi mdi-content-duplicate color-icon-green',
'img replicator': 'mdi mdi-content-duplicate color-icon-green',
'img import': 'mdi mdi-database-import color-icon-green',
'img export': 'mdi mdi-database-export color-icon-green',
'img transform': 'mdi mdi-rotate-orbit color-icon-blue',
@@ -311,6 +317,8 @@
'img db-backup': 'mdi mdi-database-export color-icon-yellow',
'img db-restore': 'mdi mdi-database-import color-icon-red',
'img settings': 'mdi mdi-cog color-icon-blue',
'img data-deploy': 'mdi mdi-database-settings color-icon-green',
};
</script>

View File

@@ -5,7 +5,7 @@
import { getConnectionLabel } from 'dbgate-tools';
export let allowChooseModel = false;
export let direction;
export let direction = 'source';
$: connections = useConnectionList();
$: connectionOptions = [

View File

@@ -25,6 +25,9 @@
import { _t } from '../translations';
import { showModal } from '../modals/modalTools';
import InputTextModal from '../modals/InputTextModal.svelte';
import FormCheckboxField from '../forms/FormCheckboxField.svelte';
import { isProApp } from '../utility/proTools';
import FormTextField from '../forms/FormTextField.svelte';
export let direction;
export let storageTypeField;
@@ -133,6 +136,41 @@
label="Storage type"
/>
{#if format && isProApp()}
{#if direction == 'source'}
<FormCheckboxField
name={`importFromZipFile`}
label={_t('importExport.importFromZipFile', { defaultMessage: 'Import from ZIP file (in archive folder)' })}
/>
{#if $values.importFromZipFile}
<FormArchiveFolderSelect
label={_t('importExport.importFromZipArchive', { defaultMessage: 'Input ZIP archive' })}
name={archiveFolderField}
additionalFolders={_.compact([$values[archiveFolderField]])}
zipFilesOnly
/>
{/if}
{/if}
{#if direction == 'target'}
<FormCheckboxField
name={`exportToZipFile`}
label={_t('importExport.exportToZipFile', { defaultMessage: 'Export to ZIP file' })}
/>
{#if $values.exportToZipFile}
<FormCheckboxField
name={`createZipFileInArchive`}
label={_t('importExport.createZipFileInArchive', { defaultMessage: 'Create ZIP file in archive' })}
/>
<FormTextField
label={_t('importExport.exportToZipArchive', { defaultMessage: 'Output ZIP archive' })}
name={archiveFolderField}
placeholder={'zip-archive-yyyy-mm-dd-hh-mm-ss.zip'}
/>
{/if}
{/if}
{/if}
{#if storageType == 'database' || storageType == 'query'}
<FormConnectionSelect name={connectionIdField} label="Server" {direction} />
<FormDatabaseSelect conidName={connectionIdField} name={databaseNameField} label="Database" />
@@ -173,18 +211,20 @@
label="Archive folder"
name={archiveFolderField}
additionalFolders={_.compact([$values[archiveFolderField]])}
allowCreateNew={direction == 'target'}
/>
{/if}
{#if storageType == 'archive' && direction == 'source'}
{#if direction == 'source' && (storageType == 'archive' || $values.importFromZipFile)}
<FormArchiveFilesSelect
label={_t('importExport.sourceFiles', { defaultMessage: 'Source files' })}
folderName={$values[archiveFolderField]}
name={tablesField}
filterExtension={format?.extension}
/>
{/if}
{#if format && direction == 'source'}
{#if format && direction == 'source' && !$values.importFromZipFile}
<FilesInput {setPreviewSource} />
{/if}

View File

@@ -1,4 +1,5 @@
import _ from 'lodash';
import moment from 'moment';
import { ScriptWriter, ScriptWriterJson } from 'dbgate-tools';
import getAsArray from '../utility/getAsArray';
import { getConnectionInfo } from '../utility/metadataLoaders';
@@ -93,7 +94,13 @@ function getSourceExpr(extensions, sourceName, values, sourceConnection, sourceD
return [
format.readerFunc,
{
..._.omit(sourceFile, ['isDownload']),
...(sourceFile
? _.omit(sourceFile, ['isDownload'])
: {
fileName: values.importFromZipFile
? `zip://archive:${values.sourceArchiveFolder}//${sourceName}`
: sourceName,
}),
...extractFormatApiParameters(values, 'source', format),
},
];
@@ -237,6 +244,13 @@ export default async function createImpExpScript(extensions, values, forceScript
script.copyStream(sourceVar, targetVar, colmapVar, sourceName);
script.endLine();
}
if (values.exportToZipFile) {
let zipFileName = values.exportToZipFileName || `zip-archive-${moment().format('YYYY-MM-DD-HH-mm-ss')}.zip`;
if (!zipFileName.endsWith('.zip')) zipFileName += '.zip';
script.zipDirectory('.', values.createZipFileInArchive ? 'archive:' + zipFileName : zipFileName);
}
return script.getScript(values.schedule);
}

View File

@@ -17,7 +17,7 @@
<div>{message}</div>
<FormArchiveFolderSelect label="Archive folder" name="archiveFolder" isNative />
<FormArchiveFolderSelect label="Archive folder" name="archiveFolder" isNative allowCreateNew />
<svelte:fragment slot="footer">
<FormSubmit

View File

@@ -0,0 +1,368 @@
<script lang="ts">
import { onMount } from 'svelte';
import FormStyledButton from '../buttons/FormStyledButton.svelte';
import FormProvider from '../forms/FormProvider.svelte';
import ModalBase from './ModalBase.svelte';
import { closeCurrentModal } from './modalTools';
import { _t } from '../translations';
import { apiCall } from '../utility/api';
import TabControl from '../elements/TabControl.svelte';
import TableControl from '../elements/TableControl.svelte';
import { writable } from 'svelte/store';
import LargeButton from '../buttons/LargeButton.svelte';
import { downloadFromApi } from '../utility/exportFileTools';
import getElectron from '../utility/getElectron';
import { showSnackbarSuccess } from '../utility/snackbar';
import { format } from 'date-fns';
import Link from '../elements/Link.svelte';
import _ from 'lodash';
export let mode: 'export' | 'import';
export let uploadedFilePath = undefined;
let fullData: any = {};
async function loadExportedData() {
fullData = await apiCall('config/export-connections-and-settings');
initFromFullData();
}
async function loadImportedData() {
fullData = await apiCall('files/get-jsons-from-zip', { filePath: uploadedFilePath });
initFromFullData();
}
function initFromFullData() {
connections = fullData.connections || [];
users = fullData.users || [];
roles = fullData.roles || [];
authMethods = fullData.auth_methods || [];
config = fullData.config || [];
handleCheckAll(true);
}
function handleCheckAll(checked) {
if (checked) {
checkedConnections = connections.map(x => x.id);
checkedUsers = users.map(x => x.id);
checkedRoles = roles.map(x => x.id);
checkedAuthMethods = authMethods.map(x => x.id);
checkedConfig = config.map(x => x.id);
} else {
checkedConnections = [];
checkedUsers = [];
checkedRoles = [];
checkedAuthMethods = [];
checkedConfig = [];
}
}
onMount(() => {
if (mode == 'export') {
loadExportedData();
}
if (mode == 'import') {
loadImportedData();
}
});
function getLimitedData() {
const limitedData: any = {
connections: fullData.connections?.filter(x => checkedConnections.includes(x.id)),
users: fullData.users?.filter(x => checkedUsers.includes(x.id)),
user_connections: fullData.user_connections?.filter(
x => checkedUsers.includes(x.user_id) && checkedConnections.includes(x.connection_id)
),
user_roles: fullData.user_roles?.filter(x => checkedUsers.includes(x.user_id) && checkedRoles.includes(x.role_id)),
user_permissions: fullData.user_permissions?.filter(x => checkedUsers.includes(x.user_id)),
roles: fullData.roles?.filter(x => checkedRoles.includes(x.id)),
role_connections: fullData.role_connections?.filter(
x => checkedRoles.includes(x.role_id) && checkedConnections.includes(x.connection_id)
),
role_permissions: fullData.role_permissions?.filter(x => checkedRoles.includes(x.role_id)),
auth_methods: fullData.auth_methods?.filter(x => checkedAuthMethods.includes(x.id)),
auth_methods_config: fullData.auth_methods_config?.filter(x => checkedAuthMethods.includes(x.auth_method_id)),
config: fullData.config?.filter(
x => checkedConfig.includes(x.id) || (x.group == 'admin' && x.key == 'encryptionKey')
),
};
return limitedData;
}
async function handleExport() {
const electron = getElectron();
let filePath;
let fileName;
if (electron) {
const electron = getElectron();
filePath = await electron.showSaveDialog({
filters: [
{ name: `ZIP files`, extensions: ['zip'] },
{ name: `All files`, extensions: ['*'] },
],
defaultPath: `dbgateconfig.zip`,
properties: ['showOverwriteConfirmation'],
});
} else {
const resp = await apiCall('files/generate-uploads-file', { extension: 'sql' });
filePath = resp.filePath;
fileName = resp.fileName;
}
console.log('SELECTED PATH', filePath);
if (!filePath) {
return;
}
await apiCall('files/create-zip-from-jsons', { db: getLimitedData(), filePath });
if (electron) {
showSnackbarSuccess(`Saved to file ${filePath}`);
} else {
await downloadFromApi(`uploads/get?file=${fileName}`, `dbgateconfig.zip`);
}
}
async function handleSaveToArchive() {
const filePath = `archive:dbgateconfig-${format(new Date(), 'yyyy-MM-dd-HH-mm-ss')}.zip`;
await apiCall('files/create-zip-from-jsons', { db: getLimitedData(), filePath });
showSnackbarSuccess(`Saved to ${filePath}`);
}
async function handleImport() {
await apiCall('config/import-connections-and-settings', { db: getLimitedData() });
showSnackbarSuccess(`Imported connections and settings`);
}
let connections = [];
let checkedConnections = [];
let users = [];
let checkedUsers = [];
let roles = [];
let checkedRoles = [];
let authMethods = [];
let checkedAuthMethods = [];
let config = [];
let checkedConfig = [];
const connectionFilters = writable({});
const userFilters = writable({});
const roleFilters = writable({});
const authMethodFilters = writable({});
const configFilters = writable({});
</script>
<FormProvider>
<ModalBase {...$$restProps} fullScreen>
<div slot="header">
{mode == 'export' ? 'Export' : 'Import'} connections &amp; settings
<span class="check-uncheck">
<Link onClick={() => handleCheckAll(true)}>Check all</Link>
|
<Link onClick={() => handleCheckAll(false)}>Uncheck all</Link>
</span>
</div>
<div class="tabs">
<TabControl
tabs={_.compact([
connections?.length && {
label: `Connections (${checkedConnections?.length}/${connections?.length})`,
slot: 1,
},
users?.length && { label: `Users (${checkedUsers?.length}/${users?.length})`, slot: 2 },
roles?.length && { label: `Roles (${checkedRoles?.length}/${roles?.length})`, slot: 3 },
authMethods?.length && {
label: `Auth methods (${checkedAuthMethods?.length}/${authMethods?.length})`,
slot: 4,
},
config?.length && { label: `Config (${checkedConfig?.length}/${config?.length})`, slot: 5 },
])}
>
<svelte:fragment slot="1">
<div class="tablewrap">
<TableControl
filters={connectionFilters}
stickyHeader
columns={[
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
{ header: 'Display name', fieldName: 'displayName', sortable: true, filterable: true },
{ header: 'Engine', fieldName: 'engine', sortable: true, filterable: true },
{ header: 'Server', fieldName: 'server', sortable: true, filterable: true },
{ header: 'User', fieldName: 'user', sortable: true, filterable: true },
]}
clickable
rows={connections}
on:clickrow={event => {
checkedConnections = checkedConnections.includes(event.detail.id)
? checkedConnections.filter(id => id !== event.detail.id)
: [...checkedConnections, event.detail.id];
}}
checkedKeys={checkedConnections}
onSetCheckedKeys={keys => {
checkedConnections = keys;
}}
></TableControl>
</div>
</svelte:fragment>
<svelte:fragment slot="2">
<div class="tablewrap">
<TableControl
filters={userFilters}
stickyHeader
columns={[
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
{ header: 'Login', fieldName: 'login', sortable: true, filterable: true },
{ header: 'E-mail', fieldName: 'email', sortable: true, filterable: true },
]}
clickable
rows={users}
on:clickrow={event => {
checkedUsers = checkedUsers.includes(event.detail.id)
? checkedUsers.filter(id => id !== event.detail.id)
: [...checkedUsers, event.detail.id];
}}
checkedKeys={checkedUsers}
onSetCheckedKeys={keys => {
checkedUsers = keys;
}}
></TableControl>
</div>
</svelte:fragment>
<svelte:fragment slot="3">
<div class="tablewrap">
<TableControl
filters={roleFilters}
stickyHeader
columns={[
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
{ header: 'Name', fieldName: 'name', sortable: true, filterable: true },
]}
clickable
rows={roles}
on:clickrow={event => {
checkedRoles = checkedRoles.includes(event.detail.id)
? checkedRoles.filter(id => id !== event.detail.id)
: [...checkedRoles, event.detail.id];
}}
checkedKeys={checkedRoles}
onSetCheckedKeys={keys => {
checkedRoles = keys;
}}
></TableControl>
</div>
</svelte:fragment>
<svelte:fragment slot="4">
<div class="tablewrap">
<TableControl
filters={authMethodFilters}
stickyHeader
columns={[
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
{ header: 'Name', fieldName: 'name', sortable: true, filterable: true },
{ header: 'Type', fieldName: 'type', sortable: true, filterable: true },
]}
clickable
rows={authMethods}
on:clickrow={event => {
checkedAuthMethods = checkedAuthMethods.includes(event.detail.id)
? checkedAuthMethods.filter(id => id !== event.detail.id)
: [...checkedAuthMethods, event.detail.id];
}}
checkedKeys={checkedAuthMethods}
onSetCheckedKeys={keys => {
checkedAuthMethods = keys;
}}
></TableControl>
</div>
</svelte:fragment>
<svelte:fragment slot="5">
<div class="tablewrap">
<TableControl
filters={configFilters}
stickyHeader
columns={[
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
{ header: 'Group', fieldName: 'group', sortable: true, filterable: true },
{ header: 'Key', fieldName: 'key', sortable: true, filterable: true },
{ header: 'Value', fieldName: 'value', sortable: true, filterable: true },
]}
clickable
rows={config}
on:clickrow={event => {
checkedConfig = checkedConfig.includes(event.detail.id)
? checkedConfig.filter(id => id !== event.detail.id)
: [...checkedConfig, event.detail.id];
}}
checkedKeys={checkedConfig}
onSetCheckedKeys={keys => {
checkedConfig = keys;
}}
></TableControl>
</div>
</svelte:fragment>
</TabControl>
</div>
<div slot="footer">
<div class="flex m-2">
{#if mode == 'export'}
<LargeButton
data-testid="ExportImportConnectionsModal_exportButton"
icon="icon export"
on:click={handleExport}>{_t('common.export', { defaultMessage: 'Export' })}</LargeButton
>
<LargeButton
data-testid="ExportImportConnectionsModal_saveToArchive"
icon="icon archive"
on:click={handleSaveToArchive}
>{_t('common.saveToArchive', { defaultMessage: 'Save to archive' })}</LargeButton
>
{/if}
{#if mode == 'import'}
<LargeButton
data-testid="ExportImportConnectionsModal_importButton"
icon="icon import"
on:click={handleImport}>{_t('common.import', { defaultMessage: 'Import' })}</LargeButton
>
{/if}
<LargeButton icon="icon close" on:click={closeCurrentModal} data-testid="EditJsonModal_closeButton"
>Close</LargeButton
>
</div>
</div>
</ModalBase>
</FormProvider>
<style>
.tablewrap {
overflow: auto;
width: 100%;
height: calc(100vh - 220px);
margin: 1rem;
}
.tabs {
flex: 1;
}
.check-uncheck {
margin-left: 1rem;
font-size: 0.8rem;
}
</style>

View File

@@ -13,6 +13,7 @@
export let file = 'new-table';
export let folder = $currentArchive;
export let onSave;
export let fileIsReadOnly = false;
const handleSubmit = async e => {
const { file, folder } = e.detail;
@@ -25,8 +26,8 @@
<ModalBase {...$$restProps}>
<svelte:fragment slot="header">Save to archive</svelte:fragment>
<FormArchiveFolderSelect label="Folder" name="folder" isNative />
<FormTextField label="File name" name="file" />
<FormArchiveFolderSelect label="Folder" name="folder" isNative allowCreateNew skipZipFiles />
<FormTextField label="File name" name="file" disabled={fileIsReadOnly} />
<svelte:fragment slot="footer">
<FormSubmit value={_t('common.save', { defaultMessage: 'Save' })} on:click={handleSubmit} />

View File

@@ -42,60 +42,63 @@
{#if !files || files.length == 0}
<ErrorInfo message="No output files" icon="img alert" />
{:else}
<TableControl
rows={files}
columns={[
{ fieldName: 'name', header: 'Name' },
{ fieldName: 'size', header: 'Size', formatter: row => formatFileSize(row.size) },
!electron && {
fieldName: 'download',
header: 'Download',
slot: 0,
},
electron && {
fieldName: 'copy',
header: 'Copy',
slot: 1,
},
electron && {
fieldName: 'show',
header: 'Show',
slot: 2,
},
]}
>
<svelte:fragment slot="0" let:row>
<Link
onClick={() => {
downloadFromApi(`runners/data/${runnerId}/${row.name}`, row.name);
}}
>
download
</Link>
</svelte:fragment>
<div class="flex1 scroll">
<TableControl
rows={files}
stickyHeader
columns={[
{ fieldName: 'name', header: 'Name' },
{ fieldName: 'size', header: 'Size', formatter: row => formatFileSize(row.size) },
!electron && {
fieldName: 'download',
header: 'Download',
slot: 0,
},
electron && {
fieldName: 'copy',
header: 'Copy',
slot: 1,
},
electron && {
fieldName: 'show',
header: 'Show',
slot: 2,
},
]}
>
<svelte:fragment slot="0" let:row>
<Link
onClick={() => {
downloadFromApi(`runners/data/${runnerId}/${row.name}`, row.name);
}}
>
download
</Link>
</svelte:fragment>
<svelte:fragment slot="1" let:row>
<Link
onClick={async () => {
const file = await electron.showSaveDialog({});
if (file) {
const fs = window.require('fs');
fs.copyFile(row.path, file, () => {});
}
}}
>
save
</Link>
</svelte:fragment>
<svelte:fragment slot="1" let:row>
<Link
onClick={async () => {
const file = await electron.showSaveDialog({});
if (file) {
const fs = window.require('fs');
fs.copyFile(row.path, file, () => {});
}
}}
>
save
</Link>
</svelte:fragment>
<svelte:fragment slot="2" let:row>
<Link
onClick={() => {
electron.showItemInFolder(row.path);
}}
>
show
</Link>
</svelte:fragment>
</TableControl>
<svelte:fragment slot="2" let:row>
<Link
onClick={() => {
electron.showItemInFolder(row.path);
}}
>
show
</Link>
</svelte:fragment>
</TableControl>
</div>
{/if}

View File

@@ -57,8 +57,10 @@
export let jslid = undefined;
export let tabid;
let infoLoadCounter = 0;
let jslidChecked = false;
let extractedJslId = null;
const quickExportHandlerRef = createQuickExportHandlerRef();
@@ -155,6 +157,14 @@
}
}
}
if (archiveFolder?.endsWith('.zip')) {
const resp = await apiCall('jsldata/download-jsl-data', {
uri: `zip://archive:${archiveFolder}//${archiveFile}.jsonl`,
});
extractedJslId = resp.jslid;
}
jslidChecked = true;
}
@@ -166,7 +176,7 @@
<ToolStripContainer>
{#if jslidChecked || !jslid}
<JslDataGrid
jslid={jslid || `archive://${archiveFolder}/${archiveFile}`}
jslid={extractedJslId || jslid || `archive://${archiveFolder}/${archiveFile}`}
supportsReload
allowChangeChangeSetStructure
changeSetState={$changeSetStore}

View File

@@ -1,469 +0,0 @@
<script lang="ts" context="module">
const getCurrentEditor = () => getActiveComponent('DataDuplicatorTab');
registerCommand({
id: 'dataDuplicator.run',
category: 'Data duplicator',
name: 'Import into DB',
keyText: 'F5 | CtrlOrCommand+Enter',
toolbar: true,
isRelatedToTab: true,
icon: 'icon run',
testEnabled: () => getCurrentEditor()?.canRun(),
onClick: () => getCurrentEditor().run(),
});
registerCommand({
id: 'dataDuplicator.kill',
category: 'Data duplicator',
icon: 'icon close',
name: 'Kill',
toolbar: true,
isRelatedToTab: true,
testEnabled: () => getCurrentEditor()?.canKill(),
onClick: () => getCurrentEditor().kill(),
});
registerCommand({
id: 'dataDuplicator.generateScript',
category: 'Data duplicator',
icon: 'img shell',
name: 'Generate Script',
toolbar: true,
isRelatedToTab: true,
testEnabled: () => getCurrentEditor()?.canRun(),
onClick: () => getCurrentEditor().generateScript(),
});
</script>
<script lang="ts">
import { ScriptWriter, ScriptWriterJson } from 'dbgate-tools';
import _ from 'lodash';
import ToolStripCommandButton from '../buttons/ToolStripCommandButton.svelte';
import ToolStripContainer from '../buttons/ToolStripContainer.svelte';
import invalidateCommands from '../commands/invalidateCommands';
import registerCommand from '../commands/registerCommand';
import Link from '../elements/Link.svelte';
import ObjectConfigurationControl from '../elements/ObjectConfigurationControl.svelte';
import TableControl from '../elements/TableControl.svelte';
import VerticalSplitter from '../elements/VerticalSplitter.svelte';
import CheckboxField from '../forms/CheckboxField.svelte';
import FormFieldTemplateLarge from '../forms/FormFieldTemplateLarge.svelte';
import SelectField from '../forms/SelectField.svelte';
import FontIcon from '../icons/FontIcon.svelte';
import { extractShellConnection } from '../impexp/createImpExpScript';
import SocketMessageView from '../query/SocketMessageView.svelte';
import useEditorData from '../query/useEditorData';
import { getCurrentConfig } from '../stores';
import { apiCall, apiOff, apiOn } from '../utility/api';
import { changeTab } from '../utility/common';
import createActivator, { getActiveComponent } from '../utility/createActivator';
import { useArchiveFiles, useArchiveFolders, useConnectionInfo, useDatabaseInfo } from '../utility/metadataLoaders';
import openNewTab from '../utility/openNewTab';
import useEffect from '../utility/useEffect';
import useTimerLabel from '../utility/useTimerLabel';
import appObjectTypes from '../appobj';
export let conid;
export let database;
export let tabid;
let busy = false;
let runnerId = null;
let executeNumber = 0;
export const activator = createActivator('DataDuplicatorTab', true);
const timerLabel = useTimerLabel();
$: connection = useConnectionInfo({ conid });
$: dbinfo = useDatabaseInfo({ conid, database });
$: archiveFolders = useArchiveFolders();
$: archiveFiles = useArchiveFiles({ folder: $editorState?.value?.archiveFolder });
$: pairedNames = _.sortBy(
_.intersectionBy(
$dbinfo?.tables?.map(x => x.pureName),
$archiveFiles?.map(x => x.name),
(x: string) => _.toUpper(x)
)
);
$: {
changeTab(tabid, tab => ({ ...tab, busy }));
}
$: {
busy;
runnerId;
tableRows;
invalidateCommands();
}
const { editorState, editorValue, setEditorData } = useEditorData({
tabid,
onInitialData: value => {
invalidateCommands();
},
});
function changeTable(row) {
setEditorData(old => ({
...old,
tables: {
...old?.tables,
[row.name]: row,
},
}));
}
function createScript(forceScript = false) {
const config = getCurrentConfig();
const script = config.allowShellScripting || forceScript ? new ScriptWriter() : new ScriptWriterJson();
script.dataDuplicator({
connection: extractShellConnection($connection, database),
archive: $editorState.value.archiveFolder,
items: tableRows
.filter(x => x.isChecked)
.map(row => ({
name: row.name,
operation: row.operation,
matchColumns: _.compact([row.matchColumn1]),
})),
options: {
rollbackAfterFinish: !!$editorState.value?.rollbackAfterFinish,
skipRowsWithUnresolvedRefs: !!$editorState.value?.skipRowsWithUnresolvedRefs,
setNullForUnresolvedNullableRefs: !!$editorState.value?.setNullForUnresolvedNullableRefs,
},
});
return script.getScript();
}
export function canRun() {
return !!tableRows.find(x => x.isChecked) && !busy;
}
export async function run() {
if (busy) return;
executeNumber += 1;
busy = true;
const script = await createScript();
let runid = runnerId;
const resp = await apiCall('runners/start', { script });
runid = resp.runid;
runnerId = runid;
timerLabel.start();
}
export async function generateScript() {
const code = await createScript();
openNewTab(
{
title: 'Shell #',
icon: 'img shell',
tabComponent: 'ShellTab',
},
{ editor: code }
);
}
$: effect = useEffect(() => registerRunnerDone(runnerId));
function registerRunnerDone(rid) {
if (rid) {
apiOn(`runner-done-${rid}`, handleRunnerDone);
return () => {
apiOff(`runner-done-${rid}`, handleRunnerDone);
};
} else {
return () => {};
}
}
$: $effect;
const handleRunnerDone = () => {
busy = false;
timerLabel.stop();
};
export function canKill() {
return busy;
}
export function kill() {
apiCall('runners/cancel', {
runid: runnerId,
});
timerLabel.stop();
}
// $: console.log('$archiveFiles', $archiveFiles);
// $: console.log('$editorState', $editorState.value);
$: tableRows = pairedNames.map(name => {
const item = $editorState?.value?.tables?.[name];
const isChecked = item?.isChecked ?? true;
const operation = item?.operation ?? 'copy';
const tableInfo = $dbinfo?.tables?.find(x => x.pureName?.toUpperCase() == name.toUpperCase());
const matchColumn1 =
item?.matchColumn1 ?? tableInfo?.primaryKey?.columns?.[0]?.columnName ?? tableInfo?.columns?.[0]?.columnName;
return {
name,
isChecked,
operation,
matchColumn1,
file: name,
table: tableInfo?.schemaName ? `${tableInfo?.schemaName}.${tableInfo?.pureName}` : tableInfo?.pureName,
schemaName: tableInfo?.schemaName,
pureName: tableInfo?.pureName,
tableInfo,
};
});
// $: console.log('$archiveFolders', $archiveFolders);
const changeCheckStatus = isChecked => () => {
setEditorData(old => {
const tables = { ...old?.tables };
for (const table of pairedNames) {
tables[table] = {
...old?.tables?.[table],
isChecked,
};
}
return {
...old,
tables,
};
});
};
</script>
<ToolStripContainer>
<VerticalSplitter initialValue="70%">
<svelte:fragment slot="1">
<div class="wrapper">
<ObjectConfigurationControl title="Configuration">
<FormFieldTemplateLarge label="Source archive" type="combo">
<SelectField
isNative
value={$editorState.value?.archiveFolder}
on:change={e => {
setEditorData(old => ({
...old,
archiveFolder: e.detail,
}));
}}
options={$archiveFolders?.map(x => ({
label: x.name,
value: x.name,
})) || []}
/>
</FormFieldTemplateLarge>
<FormFieldTemplateLarge
label="Dry run - no changes (rollback when finished)"
type="checkbox"
labelProps={{
onClick: () => {
setEditorData(old => ({
...old,
rollbackAfterFinish: !$editorState.value?.rollbackAfterFinish,
}));
},
}}
>
<CheckboxField
checked={$editorState.value?.rollbackAfterFinish}
on:change={e => {
setEditorData(old => ({
...old,
rollbackAfterFinish: e.target.checked,
}));
}}
/>
</FormFieldTemplateLarge>
<FormFieldTemplateLarge
label="Skip rows with unresolved mandatory references"
type="checkbox"
labelProps={{
onClick: () => {
setEditorData(old => ({
...old,
skipRowsWithUnresolvedRefs: !$editorState.value?.skipRowsWithUnresolvedRefs,
}));
},
}}
>
<CheckboxField
checked={$editorState.value?.skipRowsWithUnresolvedRefs}
on:change={e => {
setEditorData(old => ({
...old,
skipRowsWithUnresolvedRefs: e.target.checked,
}));
}}
/>
</FormFieldTemplateLarge>
<FormFieldTemplateLarge
label="Set NULL for nullable unresolved references"
type="checkbox"
labelProps={{
onClick: () => {
setEditorData(old => ({
...old,
setNullForUnresolvedNullableRefs: !$editorState.value?.setNullForUnresolvedNullableRefs,
}));
},
}}
>
<CheckboxField
checked={$editorState.value?.setNullForUnresolvedNullableRefs}
on:change={e => {
setEditorData(old => ({
...old,
setNullForUnresolvedNullableRefs: e.target.checked,
}));
}}
/>
</FormFieldTemplateLarge>
</ObjectConfigurationControl>
<ObjectConfigurationControl title="Imported files">
<div class="mb-2">
<Link onClick={changeCheckStatus(true)}>Check all</Link>
|
<Link onClick={changeCheckStatus(false)}>Uncheck all</Link>
</div>
<TableControl
rows={tableRows}
columns={[
{ header: '', fieldName: 'isChecked', slot: 1 },
{ header: 'Source file', fieldName: 'file', slot: 4 },
{ header: 'Target table', fieldName: 'table', slot: 5 },
{ header: 'Operation', fieldName: 'operation', slot: 2 },
{ header: 'Match column', fieldName: 'matchColumn1', slot: 3 },
]}
>
<svelte:fragment slot="1" let:row>
<CheckboxField
checked={row.isChecked}
on:change={e => {
changeTable({ ...row, isChecked: e.target.checked });
}}
/>
</svelte:fragment>
<svelte:fragment slot="2" let:row>
<SelectField
isNative
value={row.operation}
on:change={e => {
changeTable({ ...row, operation: e.detail });
}}
disabled={!row.isChecked}
options={[
{ label: 'Copy row', value: 'copy' },
{ label: 'Lookup (find matching row)', value: 'lookup' },
{ label: 'Insert if not exists', value: 'insertMissing' },
]}
/>
</svelte:fragment>
<svelte:fragment slot="3" let:row>
{#if row.operation != 'copy'}
<SelectField
isNative
value={row.matchColumn1}
on:change={e => {
changeTable({ ...row, matchColumn1: e.detail });
}}
disabled={!row.isChecked}
options={$dbinfo?.tables
?.find(x => x.pureName?.toUpperCase() == row.name.toUpperCase())
?.columns?.map(col => ({
label: col.columnName,
value: col.columnName,
})) || []}
/>
{/if}
</svelte:fragment>
<svelte:fragment slot="4" let:row>
<Link
onClick={() => {
openNewTab({
title: row.file,
icon: 'img archive',
tooltip: `${$editorState.value?.archiveFolder}\n${row.file}`,
tabComponent: 'ArchiveFileTab',
props: {
archiveFile: row.file,
archiveFolder: $editorState.value?.archiveFolder,
},
});
}}><FontIcon icon="img archive" /> {row.file}</Link
>
</svelte:fragment>
<svelte:fragment slot="5" let:row>
<Link
menu={appObjectTypes.DatabaseObjectAppObject.createAppObjectMenu({ ...row.tableInfo, conid, database })}
onClick={() => {
openNewTab({
title: row.pureName,
icon: 'img table',
tabComponent: 'TableDataTab',
props: {
schemaName: row.schemaName,
pureName: row.pureName,
conid,
database,
objectTypeField: 'tables',
},
});
}}><FontIcon icon="img table" /> {row.table}</Link
>
</svelte:fragment>
</TableControl>
</ObjectConfigurationControl>
</div>
</svelte:fragment>
<svelte:fragment slot="2">
<SocketMessageView
eventName={runnerId ? `runner-info-${runnerId}` : null}
{executeNumber}
showNoMessagesAlert
showCaller
/>
</svelte:fragment>
</VerticalSplitter>
<svelte:fragment slot="toolstrip">
<ToolStripCommandButton command="dataDuplicator.run" data-testid="DataDuplicatorTab_importIntoDb" />
<ToolStripCommandButton command="dataDuplicator.kill" data-testid="DataDuplicatorTab_kill" />
<ToolStripCommandButton command="dataDuplicator.generateScript" data-testid="DataDuplicatorTab_generateScript" />
</svelte:fragment>
</ToolStripContainer>
<!-- <div>
{#each pairedNames as name}
<div>{name}</div>
{/each}
</div> -->
<!-- <style>
.title {
font-weight: bold;
}
</style> -->
<style>
.wrapper {
overflow-y: auto;
background-color: var(--theme-bg-0);
flex: 1;
display: flex;
flex-direction: column;
}
</style>

View File

@@ -2,12 +2,12 @@
const getCurrentEditor = () => getActiveComponent('ImportExportTab');
registerFileCommands({
idPrefix: 'job',
category: 'Job',
idPrefix: 'impexp',
category: 'Impoirt & Export',
getCurrentEditor,
folder: 'jobs',
folder: 'impexp',
format: 'json',
fileExtension: 'job',
fileExtension: 'impexp',
// undoRedo: true,
});
@@ -319,7 +319,7 @@
<ToolStripButton icon="img shell" on:click={handleGenerateScript} data-testid="ImportExportTab_generateScriptButton"
>Generate script</ToolStripButton
>
<ToolStripSaveButton idPrefix="job" />
<ToolStripSaveButton idPrefix="impexp" />
</svelte:fragment>
</ToolStripContainer>

View File

@@ -24,7 +24,6 @@ import * as ConnectionTab from './ConnectionTab.svelte';
import * as MapTab from './MapTab.svelte';
import * as ServerSummaryTab from './ServerSummaryTab.svelte';
import * as ProfilerTab from './ProfilerTab.svelte';
import * as DataDuplicatorTab from './DataDuplicatorTab.svelte';
import * as ImportExportTab from './ImportExportTab.svelte';
import * as SqlObjectTab from './SqlObjectTab.svelte';
@@ -57,7 +56,6 @@ export default {
MapTab,
ServerSummaryTab,
ProfilerTab,
DataDuplicatorTab,
ImportExportTab,
SqlObjectTab,
...protabs,

View File

@@ -184,7 +184,7 @@ export async function exportQuickExportFile(dataName, reader, format: QuickExpor
export async function saveFileToDisk(
filePathFunc,
options: any = { formatLabel: 'HTML page', formatExtension: 'html' }
options: any = { formatLabel: 'HTML page', formatExtension: 'html', defaultFileName: null }
) {
const { formatLabel, formatExtension } = options;
const electron = getElectron();
@@ -193,7 +193,7 @@ export async function saveFileToDisk(
const filters = [{ name: formatLabel, extensions: [formatExtension] }];
const filePath = await electron.showSaveDialog({
filters,
defaultPath: `file.${formatExtension}`,
defaultPath: options.defaultFileName ?? `file.${formatExtension}`,
properties: ['showOverwriteConfirmation'],
});
if (!filePath) return;
@@ -202,7 +202,7 @@ export async function saveFileToDisk(
} else {
const resp = await apiCall('files/generate-uploads-file');
await filePathFunc(resp.filePath);
await downloadFromApi(`uploads/get?file=${resp.fileName}`, `file.${formatExtension}`);
await downloadFromApi(`uploads/get?file=${resp.fileName}`, options.defaultFileName ?? `file.${formatExtension}`);
}
}

View File

@@ -1,6 +1,6 @@
export default function formatFileSize(size) {
if (size > 1000000000) return `${Math.round(size / 10000000000) * 10} GB`;
if (size > 1000000) return `${Math.round(size / 10000000) * 10} MB`;
if (size > 1000) return `${Math.round(size / 10000) * 10} KB`;
if (size > 1000000000) return `${Math.round(size / 100000000) / 10} GB`;
if (size > 1000000) return `${Math.round(size / 100000) / 10} MB`;
if (size > 1000) return `${Math.round(size / 100) / 10} KB`;
return `${size} bytes`;
}

View File

@@ -14,6 +14,8 @@
import { apiCall } from '../utility/api';
import { useArchiveFolders } from '../utility/metadataLoaders';
import WidgetsInnerContainer from './WidgetsInnerContainer.svelte';
import InlineUploadButton from '../buttons/InlineUploadButton.svelte';
import { isProApp } from '../utility/proTools';
let filter = '';
@@ -22,11 +24,47 @@
const handleRefreshFolders = () => {
apiCall('archive/refresh-folders');
};
async function handleUploadedFile(filePath, fileName) {
await apiCall('archive/save-uploaded-zip', { filePath, fileName });
}
</script>
<SearchBoxWrapper>
<SearchInput placeholder="Search archive folders" bind:value={filter} />
<CloseSearchButton bind:filter />
{#if isProApp()}
<InlineUploadButton
icon="icon upload"
filters={[
{
name: `All supported files`,
extensions: ['zip'],
},
{ name: `ZIP files`, extensions: ['zip'] },
]}
onProcessFile={handleUploadedFile}
/>
{/if}
<!-- {#if electron}
<InlineButton on:click={handleOpenElectronFile} title="Add file" data-testid="ArchiveFolderList_uploadZipFile">
<FontIcon icon="icon plus-thick" />
</InlineButton>
{:else}
<InlineButtonLabel
on:click={() => {}}
title="Add file"
data-testid="ArchiveFolderList_uploadZipFile"
htmlFor="uploadZipFileButton"
>
<FontIcon icon="icon plus-thick" />
</InlineButtonLabel>
{/if}
<input type="file" id="uploadZipFileButton" hidden on:change={handleUploadedFile} /> -->
<InlineButton on:click={() => runCommand('new.archiveFolder')} title="Add new archive folder">
<FontIcon icon="icon plus-thick" />
</InlineButton>

View File

@@ -0,0 +1,75 @@
<script lang="ts">
import { evalFilterBehaviour } from 'dbgate-tools';
import DataFilterControl from '../datagrid/DataFilterControl.svelte';
import InlineButton from '../buttons/InlineButton.svelte';
import SelectField from '../forms/SelectField.svelte';
import _ from 'lodash';
import FontIcon from '../icons/FontIcon.svelte';
export let compoudFilter: { [key: string]: string };
export let onSetCompoudFilter;
export let columnNames: string[];
export let filterBehaviour = evalFilterBehaviour;
$: columnsReal = Object.keys(compoudFilter || {});
$: columnsUsed = columnsReal.length > 0 ? columnsReal : [columnNames[0]];
</script>
{#each columnsUsed as column, index}
<div class="flex">
<SelectField
isNative
value={column}
on:change={e => {
const keys = Object.keys(compoudFilter || {});
const values = Object.values(compoudFilter || {});
keys[index] = e.detail;
const newFilter = _.zipObject(keys, values);
onSetCompoudFilter(newFilter);
}}
options={columnNames.map(col => ({
label: col,
value: col,
})) || []}
/>
<DataFilterControl
{filterBehaviour}
filter={compoudFilter?.[column] ?? ''}
setFilter={value => {
onSetCompoudFilter({
...compoudFilter,
[column]: value,
});
}}
placeholder="Filter"
/>
{#if index == 0}
<InlineButton
on:click={() => {
const newColumn = columnNames.find(x => !columnsUsed.includes(x));
if (!newColumn) return;
onSetCompoudFilter({
...compoudFilter,
[newColumn]: '',
});
}}
title="Add filter column"
square
>
<FontIcon icon="icon plus-thick" />
</InlineButton>
{:else}
<InlineButton
on:click={() => {
onSetCompoudFilter(_.omit(compoudFilter, column));
}}
title="Remove filter column"
square
>
<FontIcon icon="icon minus-thick" />
</InlineButton>
{/if}
</div>
{/each}

View File

@@ -10,9 +10,8 @@
import { apiCall } from '../utility/api';
import { useFiles } from '../utility/metadataLoaders';
import WidgetsInnerContainer from './WidgetsInnerContainer.svelte';
import getElectron from '../utility/getElectron';
import InlineButtonLabel from '../buttons/InlineButtonLabel.svelte';
import resolveApi, { resolveApiHeaders } from '../utility/resolveApi';
import { isProApp } from '../utility/proTools';
import InlineUploadButton from '../buttons/InlineUploadButton.svelte';
let filter = '';
@@ -23,12 +22,12 @@
const queryFiles = useFiles({ folder: 'query' });
const sqliteFiles = useFiles({ folder: 'sqlite' });
const diagramFiles = useFiles({ folder: 'diagrams' });
const jobFiles = useFiles({ folder: 'jobs' });
const importExportJobFiles = useFiles({ folder: 'impexp' });
const dataDeployJobFiles = useFiles({ folder: 'datadeploy' });
const dbCompareJobFiles = useFiles({ folder: 'dbcompare' });
const perspectiveFiles = useFiles({ folder: 'perspectives' });
const modelTransformFiles = useFiles({ folder: 'modtrans' });
const electron = getElectron();
$: files = [
...($sqlFiles || []),
...($shellFiles || []),
@@ -38,8 +37,10 @@
...($sqliteFiles || []),
...($diagramFiles || []),
...($perspectiveFiles || []),
...($jobFiles || []),
...($importExportJobFiles || []),
...($modelTransformFiles || []),
...((isProApp() && $dataDeployJobFiles) || []),
...((isProApp() && $dbCompareJobFiles) || []),
];
function handleRefreshFiles() {
@@ -53,50 +54,23 @@
'sqlite',
'diagrams',
'perspectives',
'jobs',
'impexp',
'modtrans',
'datadeploy',
'dbcompare',
],
});
}
function dataFolderTitle(folder) {
if (folder == 'modtrans') return 'Model transforms';
if (folder == 'datadeploy') return 'Data deploy jobs';
if (folder == 'dbcompare') return 'Database compare jobs';
return _.startCase(folder);
}
async function handleUploadedFile(e) {
const files = [...e.target.files];
for (const file of files) {
const formData = new FormData();
formData.append('name', file.name);
formData.append('data', file);
const fetchOptions = {
method: 'POST',
body: formData,
headers: resolveApiHeaders(),
};
const apiBase = resolveApi();
const resp = await fetch(`${apiBase}/uploads/upload-data-file`, fetchOptions);
const fileData = await resp.json();
}
}
async function handleOpenElectronFile() {
const filePaths = await electron.showOpenDialog({
filters: [
{
name: `All supported files`,
extensions: ['sql'],
},
{ name: `SQL files`, extensions: ['sql'] },
],
properties: ['showHiddenFiles', 'openFile'],
});
const filePath = filePaths && filePaths[0];
await apiCall('uploads/save-data-file', { filePath });
async function handleUploadedFile(filePath, fileName) {
await apiCall('files/save-uploaded-file', { filePath, fileName });
}
</script>
@@ -104,26 +78,20 @@
<SearchBoxWrapper>
<SearchInput placeholder="Search saved files" bind:value={filter} />
<CloseSearchButton bind:filter />
{#if electron}
<InlineButton on:click={handleOpenElectronFile} title="Add file" data-testid="SavedFileList_buttonAddFile">
<FontIcon icon="icon plus-thick" />
</InlineButton>
{:else}
<InlineButtonLabel
on:click={() => {}}
title="Add file"
data-testid="SavedFileList_buttonAddFile"
htmlFor="uploadSavedFileButton"
>
<FontIcon icon="icon plus-thick" />
</InlineButtonLabel>
{/if}
<InlineUploadButton
filters={[
{
name: `All supported files`,
extensions: ['sql'],
},
{ name: `SQL files`, extensions: ['sql'] },
]}
onProcessFile={handleUploadedFile}
/>
<InlineButton on:click={handleRefreshFiles} title="Refresh files" data-testid="SavedFileList_buttonRefresh">
<FontIcon icon="icon refresh" />
</InlineButton>
</SearchBoxWrapper>
<input type="file" id="uploadSavedFileButton" hidden on:change={handleUploadedFile} />
<AppObjectList list={files} module={savedFileAppObject} groupFunc={data => dataFolderTitle(data.folder)} {filter} />
</WidgetsInnerContainer>

1711
yarn.lock

File diff suppressed because it is too large Load Diff