mirror of
https://github.com/DeNNiiInc/dbgate.git
synced 2026-04-17 21:26:00 +00:00
Merge branch 'master' into feature/duckdb-2
This commit is contained in:
2
.github/workflows/build-app-pro-beta.yaml
vendored
2
.github/workflows/build-app-pro-beta.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 00da2abe10e1ec8a3887b49dfabd42ccda365514
|
||||
ref: bc38030228a7d77b45032476b4d920b830120d3d
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
2
.github/workflows/build-app-pro.yaml
vendored
2
.github/workflows/build-app-pro.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 00da2abe10e1ec8a3887b49dfabd42ccda365514
|
||||
ref: bc38030228a7d77b45032476b4d920b830120d3d
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
2
.github/workflows/build-cloud-pro.yaml
vendored
2
.github/workflows/build-cloud-pro.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 00da2abe10e1ec8a3887b49dfabd42ccda365514
|
||||
ref: bc38030228a7d77b45032476b4d920b830120d3d
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
2
.github/workflows/build-docker-pro.yaml
vendored
2
.github/workflows/build-docker-pro.yaml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 00da2abe10e1ec8a3887b49dfabd42ccda365514
|
||||
ref: bc38030228a7d77b45032476b4d920b830120d3d
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
2
.github/workflows/build-npm-pro.yaml
vendored
2
.github/workflows/build-npm-pro.yaml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 00da2abe10e1ec8a3887b49dfabd42ccda365514
|
||||
ref: bc38030228a7d77b45032476b4d920b830120d3d
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
2
.github/workflows/e2e-pro.yaml
vendored
2
.github/workflows/e2e-pro.yaml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: dbgate-pro
|
||||
ref: 00da2abe10e1ec8a3887b49dfabd42ccda365514
|
||||
ref: bc38030228a7d77b45032476b4d920b830120d3d
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
20
CHANGELOG.md
20
CHANGELOG.md
@@ -8,6 +8,26 @@ Builds:
|
||||
- linux - application for linux
|
||||
- win - application for Windows
|
||||
|
||||
## 6.4.0 - not released yet
|
||||
- ADDED: Data deployer (Premium)
|
||||
- ADDED: Compare data between JSON lines file in archive and database table
|
||||
- CHANGED: Data Duplicator => Data Replicator (suitable for update, create and delete data, much more customizable)
|
||||
- REMOVED: Data duplicator GUI (replaced with Data Deployer)
|
||||
- ADDED: Exporting to ZIP file
|
||||
- ADDED: Download SQL and SQLite files
|
||||
- ADDED: Upload SQLite files
|
||||
- ADDED: Upload archive as ZIP folder (Premium)
|
||||
- ADDED: Compress, uncompress archive folder (Premium)
|
||||
|
||||
## 6.3.3
|
||||
- CHANGED: New administration UI, redesigned administration of users, connections and roles
|
||||
- ADDED: Encrypting passwords in team-premium edition
|
||||
- ADDED: Show scale bar on map #1090
|
||||
- FIXED: Fixed native backup/restore for MySQL+PostgreSQL over SSH tunnel #1092
|
||||
- CHANGED: Column mapping dialog - fixes and improvements for copying from one existing table into another
|
||||
- ADDED: Search in columns in table editor
|
||||
- ADDED: Line Wrap for JSON viewer #768
|
||||
|
||||
### 6.3.2
|
||||
- ADDED: "Use system theme" switch, use changed system theme without restart #1084
|
||||
- ADDED: "Skip SETNAME instruction" option for Redis #1077
|
||||
|
||||
@@ -88,6 +88,9 @@ module.exports = ({ editMenu, isMac }) => [
|
||||
{ command: 'folder.showData', hideDisabled: true },
|
||||
{ command: 'new.gist', hideDisabled: true },
|
||||
{ command: 'app.resetSettings', hideDisabled: true },
|
||||
{ divider: true },
|
||||
{ command: 'app.exportConnections', hideDisabled: true },
|
||||
{ command: 'app.importConnections', hideDisabled: true },
|
||||
],
|
||||
},
|
||||
...(isMac
|
||||
|
||||
@@ -126,7 +126,7 @@ describe('Data browser data', () => {
|
||||
cy.themeshot('data-browser-form-view');
|
||||
});
|
||||
|
||||
it.only('Column search', () => {
|
||||
it('Column search', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.contains('Customer').click();
|
||||
@@ -468,15 +468,24 @@ describe('Data browser data', () => {
|
||||
cy.themeshot('database-model-table-yaml');
|
||||
});
|
||||
|
||||
it('Data duplicator', () => {
|
||||
it('Data replicator', () => {
|
||||
cy.contains('MySql-connection').click();
|
||||
cy.contains('MyChinook').click();
|
||||
cy.testid('WidgetIconPanel_archive').click();
|
||||
cy.contains('chinook-archive').rightclick();
|
||||
cy.contains('Data duplicator').click();
|
||||
cy.contains('Data deployer').click();
|
||||
cy.contains('Dry run').click();
|
||||
cy.testid('DataDuplicatorTab_importIntoDb').click();
|
||||
cy.contains('Duplicated Album, inserted 347 rows, mapped 0 rows, missing 0 rows, skipped 0 rows');
|
||||
cy.themeshot('data-duplicator');
|
||||
cy.testid('TableControl_row_2_checkbox').click();
|
||||
cy.testid('TableControl_row_2').click();
|
||||
cy.testid('DataDeploySettings_find_checkbox').click();
|
||||
cy.testid('DataDeploySettings_create_checkbox').click();
|
||||
cy.testid('WidgetIconPanel_archive').click();
|
||||
cy.themeshot('data-deployer');
|
||||
cy.testid('DataDeployTab_importIntoDb').click();
|
||||
cy.testid('ConfirmDataDeployModal_okButton').click();
|
||||
cy.contains('Replicated Customer, inserted 59 rows');
|
||||
cy.contains('Finished job script');
|
||||
cy.testid('DataDeployTab_importIntoDb').click();
|
||||
cy.themeshot('data-replicator');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -18,6 +18,10 @@ describe('Team edition tests', () => {
|
||||
cy.contains('logged-user').click();
|
||||
cy.themeshot('role-administration');
|
||||
|
||||
cy.testid('AdminMenuWidget_itemUsers').click();
|
||||
cy.contains('New user').click();
|
||||
cy.themeshot('user-administration');
|
||||
|
||||
cy.testid('AdminMenuWidget_itemAuthentication').click();
|
||||
cy.contains('Add authentication').click();
|
||||
cy.contains('Use database login').click();
|
||||
|
||||
@@ -21,8 +21,8 @@ services:
|
||||
build: containers/mysql-ssh-login
|
||||
restart: always
|
||||
ports:
|
||||
- 16005:3306
|
||||
- "16015:22"
|
||||
- 16017:3306
|
||||
- "16012:22"
|
||||
|
||||
mysql-ssh-keyfile:
|
||||
build: containers/mysql-ssh-keyfile
|
||||
|
||||
@@ -1,160 +0,0 @@
|
||||
const engines = require('../engines');
|
||||
const stream = require('stream');
|
||||
const { testWrapper } = require('../tools');
|
||||
const dataDuplicator = require('dbgate-api/src/shell/dataDuplicator');
|
||||
const { runCommandOnDriver, runQueryOnDriver } = require('dbgate-tools');
|
||||
|
||||
describe('Data duplicator', () => {
|
||||
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
|
||||
'Insert simple data - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't2',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
{ columnName: 'valfk', dataType: 'int', notNull: true },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
|
||||
})
|
||||
);
|
||||
|
||||
const gett1 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1' },
|
||||
{ id: 2, val: 'v2' },
|
||||
{ id: 3, val: 'v3' },
|
||||
]);
|
||||
const gett2 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1', valfk: 1 },
|
||||
{ id: 2, val: 'v2', valfk: 2 },
|
||||
{ id: 3, val: 'v3', valfk: 3 },
|
||||
]);
|
||||
|
||||
await dataDuplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't1',
|
||||
operation: 'copy',
|
||||
openStream: gett1,
|
||||
},
|
||||
{
|
||||
name: 't2',
|
||||
operation: 'copy',
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await dataDuplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't1',
|
||||
operation: 'copy',
|
||||
openStream: gett1,
|
||||
},
|
||||
{
|
||||
name: 't2',
|
||||
operation: 'copy',
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('6');
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('6');
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
|
||||
'Skip nullable weak refs - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't2',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
{ columnName: 'valfk', dataType: 'int', notNull: false },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp => dmp.put("insert into ~t1 (~id, ~val) values (1, 'first')"));
|
||||
|
||||
const gett2 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1', valfk: 1 },
|
||||
{ id: 2, val: 'v2', valfk: 2 },
|
||||
]);
|
||||
|
||||
await dataDuplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't2',
|
||||
operation: 'copy',
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
options: {
|
||||
setNullForUnresolvedNullableRefs: true,
|
||||
},
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('1');
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('2');
|
||||
|
||||
const res3 = await runQueryOnDriver(conn, driver, dmp =>
|
||||
dmp.put(`select count(*) as ~cnt from ~t2 where ~valfk is not null`)
|
||||
);
|
||||
expect(res3.rows[0].cnt.toString()).toEqual('1');
|
||||
})
|
||||
);
|
||||
});
|
||||
306
integration-tests/__tests__/data-replicator.spec.js
Normal file
306
integration-tests/__tests__/data-replicator.spec.js
Normal file
@@ -0,0 +1,306 @@
|
||||
const engines = require('../engines');
|
||||
const stream = require('stream');
|
||||
const { testWrapper } = require('../tools');
|
||||
const dataReplicator = require('dbgate-api/src/shell/dataReplicator');
|
||||
const deployDb = require('dbgate-api/src/shell/deployDb');
|
||||
const storageModel = require('dbgate-api/src/storageModel');
|
||||
const { runCommandOnDriver, runQueryOnDriver } = require('dbgate-tools');
|
||||
|
||||
describe('Data replicator', () => {
|
||||
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
|
||||
'Insert simple data - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't2',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
{ columnName: 'valfk', dataType: 'int', notNull: true },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
|
||||
})
|
||||
);
|
||||
|
||||
const gett1 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1' },
|
||||
{ id: 2, val: 'v2' },
|
||||
{ id: 3, val: 'v3' },
|
||||
]);
|
||||
const gett2 = () =>
|
||||
stream.Readable.from([
|
||||
{ __isStreamHeader: true, __isDynamicStructure: true },
|
||||
{ id: 1, val: 'v1', valfk: 1 },
|
||||
{ id: 2, val: 'v2', valfk: 2 },
|
||||
{ id: 3, val: 'v3', valfk: 3 },
|
||||
]);
|
||||
|
||||
await dataReplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't1',
|
||||
createNew: true,
|
||||
openStream: gett1,
|
||||
},
|
||||
{
|
||||
name: 't2',
|
||||
createNew: true,
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await dataReplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't1',
|
||||
createNew: true,
|
||||
openStream: gett1,
|
||||
},
|
||||
{
|
||||
name: 't2',
|
||||
createNew: true,
|
||||
openStream: gett2,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('6');
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('6');
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
|
||||
'Skip nullable weak refs - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't1',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp =>
|
||||
dmp.createTable({
|
||||
pureName: 't2',
|
||||
columns: [
|
||||
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
|
||||
{ columnName: 'val', dataType: 'varchar(50)' },
|
||||
{ columnName: 'valfk', dataType: 'int', notNull: false },
|
||||
],
|
||||
primaryKey: {
|
||||
columns: [{ columnName: 'id' }],
|
||||
},
|
||||
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
|
||||
})
|
||||
);
|
||||
runCommandOnDriver(conn, driver, dmp => dmp.put("insert into ~t1 (~id, ~val) values (1, 'first')"));
|
||||
|
||||
await dataReplicator({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 't2',
|
||||
createNew: true,
|
||||
jsonArray: [
|
||||
{ id: 1, val: 'v1', valfk: 1 },
|
||||
{ id: 2, val: 'v2', valfk: 2 },
|
||||
],
|
||||
},
|
||||
],
|
||||
options: {
|
||||
setNullForUnresolvedNullableRefs: true,
|
||||
},
|
||||
});
|
||||
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
|
||||
expect(res1.rows[0].cnt.toString()).toEqual('1');
|
||||
|
||||
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
|
||||
expect(res2.rows[0].cnt.toString()).toEqual('2');
|
||||
|
||||
const res3 = await runQueryOnDriver(conn, driver, dmp =>
|
||||
dmp.put(`select count(*) as ~cnt from ~t2 where ~valfk is not null`)
|
||||
);
|
||||
expect(res3.rows[0].cnt.toString()).toEqual('1');
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
|
||||
'Import storage DB - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await deployDb({
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
loadedDbModel: storageModel,
|
||||
targetSchema: engine.defaultSchemaName,
|
||||
});
|
||||
|
||||
async function queryValue(sql) {
|
||||
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(sql));
|
||||
return res1.rows[0].val?.toString();
|
||||
}
|
||||
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('2');
|
||||
expect(
|
||||
await queryValue(
|
||||
`select ~is_disabled as ~val from ~auth_methods where ~amoid='790ca4d2-7f01-4800-955b-d691b890cc50'`
|
||||
)
|
||||
).toBeFalsy();
|
||||
|
||||
const DB1 = {
|
||||
auth_methods: [
|
||||
{ id: -1, name: 'Anonymous', amoid: '790ca4d2-7f01-4800-955b-d691b890cc50', is_disabled: 1 },
|
||||
{ id: 10, name: 'OAuth', amoid: '4269b660-54b6-11ef-a3aa-a9021250bf4b' },
|
||||
],
|
||||
auth_methods_config: [{ id: 20, auth_method_id: 10, key: 'oauthClient', value: 'dbgate' }],
|
||||
config: [
|
||||
{ group: 'admin', key: 'encyptKey', value: '1234' },
|
||||
{ group: 'admin', key: 'adminPasswordState', value: 'set' },
|
||||
{ group: 'license', key: 'licenseKey', value: '123467' },
|
||||
],
|
||||
roles: [
|
||||
{ id: -3, name: 'superadmin' },
|
||||
{ id: -2, name: 'logged-user' },
|
||||
{ id: -1, name: 'anonymous-user' },
|
||||
],
|
||||
role_permissions: [
|
||||
{ id: 14, role_id: -1, permission: 'perm1' },
|
||||
{ id: 29, role_id: -1, permission: 'perm2' },
|
||||
{ id: 1, role_id: -1, permission: 'perm3' },
|
||||
],
|
||||
};
|
||||
|
||||
const DB2 = {
|
||||
auth_methods: [{ id: 10, name: 'My Auth', amoid: 'myauth1' }],
|
||||
auth_methods_config: [{ id: 20, auth_method_id: 10, key: 'my authClient', value: 'mydbgate' }],
|
||||
config: [],
|
||||
roles: [{ id: 1, name: 'test' }],
|
||||
role_permissions: [{ id: 14, role_id: 1, permission: 'permxx' }],
|
||||
};
|
||||
|
||||
function createDuplConfig(db) {
|
||||
return {
|
||||
systemConnection: conn,
|
||||
driver,
|
||||
items: [
|
||||
{
|
||||
name: 'auth_methods',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
matchColumns: ['amoid'],
|
||||
jsonArray: db.auth_methods,
|
||||
},
|
||||
{
|
||||
name: 'auth_methods_config',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
matchColumns: ['auth_method_id', 'key'],
|
||||
jsonArray: db.auth_methods_config,
|
||||
},
|
||||
{
|
||||
name: 'config',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
matchColumns: ['group', 'key'],
|
||||
jsonArray: db.config,
|
||||
},
|
||||
{
|
||||
name: 'roles',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
matchColumns: ['name'],
|
||||
jsonArray: db.roles,
|
||||
},
|
||||
{
|
||||
name: 'role_permissions',
|
||||
findExisting: true,
|
||||
updateExisting: true,
|
||||
createNew: true,
|
||||
deleteMissing: true,
|
||||
matchColumns: ['role_id', 'permission'],
|
||||
deleteRestrictionColumns: ['role_id'],
|
||||
jsonArray: db.role_permissions,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
await dataReplicator(createDuplConfig(DB1));
|
||||
|
||||
expect(
|
||||
await queryValue(
|
||||
`select ~is_disabled as ~val from ~auth_methods where ~amoid='790ca4d2-7f01-4800-955b-d691b890cc50'`
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('3');
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('1');
|
||||
expect(await queryValue(`select count(*) as ~val from ~config`)).toEqual('3');
|
||||
expect(await queryValue(`select ~value as ~val from ~auth_methods_config`)).toEqual('dbgate');
|
||||
expect(
|
||||
await queryValue(`select ~value as ~val from ~config where ~group='license' and ~key='licenseKey'`)
|
||||
).toEqual('123467');
|
||||
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('3');
|
||||
|
||||
DB1.auth_methods_config[0].value = 'dbgate2';
|
||||
DB1.config[2].value = '567';
|
||||
DB1.role_permissions.splice(2, 1);
|
||||
|
||||
await dataReplicator(createDuplConfig(DB1));
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('1');
|
||||
expect(await queryValue(`select count(*) as ~val from ~config`)).toEqual('3');
|
||||
expect(await queryValue(`select ~value as ~val from ~auth_methods_config`)).toEqual('dbgate2');
|
||||
expect(
|
||||
await queryValue(`select ~value as ~val from ~config where ~group='license' and ~key='licenseKey'`)
|
||||
).toEqual('567');
|
||||
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('2');
|
||||
|
||||
// now add DB2
|
||||
await dataReplicator(createDuplConfig(DB2));
|
||||
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('4');
|
||||
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('2');
|
||||
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('3');
|
||||
|
||||
DB1.role_permissions.splice(1, 1);
|
||||
await dataReplicator(createDuplConfig(DB1));
|
||||
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('2');
|
||||
}),
|
||||
15 * 1000
|
||||
);
|
||||
});
|
||||
@@ -190,7 +190,7 @@ describe('Query', () => {
|
||||
})
|
||||
);
|
||||
|
||||
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
|
||||
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
|
||||
'Select scope identity - %s',
|
||||
testWrapper(async (conn, driver, engine) => {
|
||||
await runCommandOnDriver(conn, driver, dmp =>
|
||||
|
||||
@@ -8,14 +8,14 @@ services:
|
||||
# ports:
|
||||
# - 15000:5432
|
||||
#
|
||||
# mariadb:
|
||||
# image: mariadb
|
||||
# command: --default-authentication-plugin=mysql_native_password
|
||||
# restart: always
|
||||
# ports:
|
||||
# - 15004:3306
|
||||
# environment:
|
||||
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
mariadb:
|
||||
image: mariadb
|
||||
command: --default-authentication-plugin=mysql_native_password
|
||||
restart: always
|
||||
ports:
|
||||
- 15004:3306
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
|
||||
# mysql:
|
||||
# image: mysql:8.0.18
|
||||
@@ -25,7 +25,7 @@ services:
|
||||
# - 15001:3306
|
||||
# environment:
|
||||
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
#
|
||||
|
||||
|
||||
# cassandradb:
|
||||
# image: cassandra:5.0.2
|
||||
@@ -81,11 +81,11 @@ services:
|
||||
# ports:
|
||||
# - 15006:1521
|
||||
|
||||
libsql:
|
||||
image: ghcr.io/tursodatabase/libsql-server:latest
|
||||
platform: linux/amd64
|
||||
ports:
|
||||
- '8080:8080'
|
||||
- '5002:5001'
|
||||
volumes:
|
||||
- ./data/libsql:/var/lib/sqld
|
||||
# libsql:
|
||||
# image: ghcr.io/tursodatabase/libsql-server:latest
|
||||
# platform: linux/amd64
|
||||
# ports:
|
||||
# - '8080:8080'
|
||||
# - '5002:5001'
|
||||
# volumes:
|
||||
# - ./data/libsql:/var/lib/sqld
|
||||
|
||||
@@ -551,7 +551,7 @@ const clickhouseEngine = {
|
||||
skipUnique: true,
|
||||
skipAutoIncrement: true,
|
||||
skipPkColumnTesting: true,
|
||||
skipDataDuplicator: true,
|
||||
skipDataReplicator: true,
|
||||
skipStringLength: true,
|
||||
alterTableAddColumnSyntax: true,
|
||||
dbSnapshotBySeconds: true,
|
||||
@@ -643,7 +643,7 @@ const cassandraEngine = {
|
||||
skipOrderBy: true,
|
||||
skipAutoIncrement: true,
|
||||
skipDataModifications: true,
|
||||
skipDataDuplicator: true,
|
||||
skipDataReplicator: true,
|
||||
skipDeploy: true,
|
||||
skipImportModel: true,
|
||||
|
||||
@@ -693,7 +693,7 @@ const enginesOnLocal = [
|
||||
// all engines, which would be run on local test
|
||||
// cassandraEngine,
|
||||
// mysqlEngine,
|
||||
// mariaDbEngine,
|
||||
mariaDbEngine,
|
||||
// postgreSqlEngine,
|
||||
// sqlServerEngine,
|
||||
// sqliteEngine,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"version": "6.3.2",
|
||||
"version": "6.3.4-premium-beta.1",
|
||||
"name": "dbgate-all",
|
||||
"workspaces": [
|
||||
"packages/*",
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
"dependencies": {
|
||||
"@aws-sdk/rds-signer": "^3.665.0",
|
||||
"activedirectory2": "^2.1.0",
|
||||
"archiver": "^7.0.1",
|
||||
"async-lock": "^1.2.6",
|
||||
"axios": "^0.21.1",
|
||||
"body-parser": "^1.19.0",
|
||||
@@ -62,7 +63,8 @@
|
||||
"simple-encryptor": "^4.0.0",
|
||||
"ssh2": "^1.16.0",
|
||||
"stream-json": "^1.8.0",
|
||||
"tar": "^6.0.5"
|
||||
"tar": "^6.0.5",
|
||||
"yauzl": "^3.2.0"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "env-cmd -f .env node src/index.js --listen-api",
|
||||
|
||||
@@ -2,14 +2,20 @@ const fs = require('fs-extra');
|
||||
const readline = require('readline');
|
||||
const crypto = require('crypto');
|
||||
const path = require('path');
|
||||
const { archivedir, clearArchiveLinksCache, resolveArchiveFolder } = require('../utility/directories');
|
||||
const { archivedir, clearArchiveLinksCache, resolveArchiveFolder, uploadsdir } = require('../utility/directories');
|
||||
const socket = require('../utility/socket');
|
||||
const loadFilesRecursive = require('../utility/loadFilesRecursive');
|
||||
const getJslFileName = require('../utility/getJslFileName');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const { getLogger, extractErrorLogData, jsonLinesParse } = require('dbgate-tools');
|
||||
const dbgateApi = require('../shell');
|
||||
const jsldata = require('./jsldata');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const { isProApp } = require('../utility/checkLicense');
|
||||
const listZipEntries = require('../utility/listZipEntries');
|
||||
const unzipJsonLinesFile = require('../shell/unzipJsonLinesFile');
|
||||
const { zip } = require('lodash');
|
||||
const zipDirectory = require('../shell/zipDirectory');
|
||||
const unzipDirectory = require('../shell/unzipDirectory');
|
||||
|
||||
const logger = getLogger('archive');
|
||||
|
||||
@@ -47,9 +53,31 @@ module.exports = {
|
||||
return folder;
|
||||
},
|
||||
|
||||
async getZipFiles({ file }) {
|
||||
const entries = await listZipEntries(path.join(archivedir(), file));
|
||||
const files = entries.map(entry => {
|
||||
let name = entry.fileName;
|
||||
if (isProApp() && entry.fileName.endsWith('.jsonl')) {
|
||||
name = entry.fileName.slice(0, -6);
|
||||
}
|
||||
return {
|
||||
name: name,
|
||||
label: name,
|
||||
type: isProApp() && entry.fileName.endsWith('.jsonl') ? 'jsonl' : 'other',
|
||||
};
|
||||
});
|
||||
return files;
|
||||
},
|
||||
|
||||
files_meta: true,
|
||||
async files({ folder }) {
|
||||
try {
|
||||
if (folder.endsWith('.zip')) {
|
||||
if (await fs.exists(path.join(archivedir(), folder))) {
|
||||
return this.getZipFiles({ file: folder });
|
||||
}
|
||||
return [];
|
||||
}
|
||||
const dir = resolveArchiveFolder(folder);
|
||||
if (!(await fs.exists(dir))) return [];
|
||||
const files = await loadFilesRecursive(dir); // fs.readdir(dir);
|
||||
@@ -91,6 +119,16 @@ module.exports = {
|
||||
return true;
|
||||
},
|
||||
|
||||
createFile_meta: true,
|
||||
async createFile({ folder, file, fileType, tableInfo }) {
|
||||
await fs.writeFile(
|
||||
path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
|
||||
tableInfo ? JSON.stringify({ __isStreamHeader: true, tableInfo }) : ''
|
||||
);
|
||||
socket.emitChanged(`archive-files-changed`, { folder });
|
||||
return true;
|
||||
},
|
||||
|
||||
deleteFile_meta: true,
|
||||
async deleteFile({ folder, file, fileType }) {
|
||||
await fs.unlink(path.join(resolveArchiveFolder(folder), `${file}.${fileType}`));
|
||||
@@ -158,7 +196,7 @@ module.exports = {
|
||||
deleteFolder_meta: true,
|
||||
async deleteFolder({ folder }) {
|
||||
if (!folder) throw new Error('Missing folder parameter');
|
||||
if (folder.endsWith('.link')) {
|
||||
if (folder.endsWith('.link') || folder.endsWith('.zip')) {
|
||||
await fs.unlink(path.join(archivedir(), folder));
|
||||
} else {
|
||||
await fs.rmdir(path.join(archivedir(), folder), { recursive: true });
|
||||
@@ -204,9 +242,10 @@ module.exports = {
|
||||
},
|
||||
|
||||
async getNewArchiveFolder({ database }) {
|
||||
const isLink = database.endsWith(database);
|
||||
const name = isLink ? database.slice(0, -5) : database;
|
||||
const suffix = isLink ? '.link' : '';
|
||||
const isLink = database.endsWith('.link');
|
||||
const isZip = database.endsWith('.zip');
|
||||
const name = isLink ? database.slice(0, -5) : isZip ? database.slice(0, -4) : database;
|
||||
const suffix = isLink ? '.link' : isZip ? '.zip' : '';
|
||||
if (!(await fs.exists(path.join(archivedir(), database)))) return database;
|
||||
let index = 2;
|
||||
while (await fs.exists(path.join(archivedir(), `${name}${index}${suffix}`))) {
|
||||
@@ -214,4 +253,58 @@ module.exports = {
|
||||
}
|
||||
return `${name}${index}${suffix}`;
|
||||
},
|
||||
|
||||
getArchiveData_meta: true,
|
||||
async getArchiveData({ folder, file }) {
|
||||
let rows;
|
||||
if (folder.endsWith('.zip')) {
|
||||
rows = await unzipJsonLinesFile(path.join(archivedir(), folder), `${file}.jsonl`);
|
||||
} else {
|
||||
rows = jsonLinesParse(await fs.readFile(path.join(archivedir(), folder, `${file}.jsonl`), { encoding: 'utf8' }));
|
||||
}
|
||||
return rows.filter(x => !x.__isStreamHeader);
|
||||
},
|
||||
|
||||
saveUploadedZip_meta: true,
|
||||
async saveUploadedZip({ filePath, fileName }) {
|
||||
if (!fileName?.endsWith('.zip')) {
|
||||
throw new Error(`${fileName} is not a ZIP file`);
|
||||
}
|
||||
|
||||
const folder = await this.getNewArchiveFolder({ database: fileName });
|
||||
await fs.copyFile(filePath, path.join(archivedir(), folder));
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
zip_meta: true,
|
||||
async zip({ folder }) {
|
||||
const newFolder = await this.getNewArchiveFolder({ database: folder + '.zip' });
|
||||
await zipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
unzip_meta: true,
|
||||
async unzip({ folder }) {
|
||||
const newFolder = await this.getNewArchiveFolder({ database: folder.slice(0, -4) });
|
||||
await unzipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
|
||||
socket.emitChanged(`archive-folders-changed`);
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
getZippedPath_meta: true,
|
||||
async getZippedPath({ folder }) {
|
||||
if (folder.endsWith('.zip')) {
|
||||
return { filePath: path.join(archivedir(), folder) };
|
||||
}
|
||||
|
||||
const uploadName = crypto.randomUUID();
|
||||
const filePath = path.join(uploadsdir(), uploadName);
|
||||
await zipDirectory(path.join(archivedir(), folder), filePath);
|
||||
return { filePath };
|
||||
},
|
||||
};
|
||||
|
||||
@@ -12,6 +12,7 @@ const {
|
||||
getAuthProviderById,
|
||||
} = require('../auth/authProvider');
|
||||
const storage = require('./storage');
|
||||
const { decryptPasswordString } = require('../utility/crypting');
|
||||
|
||||
const logger = getLogger('auth');
|
||||
|
||||
@@ -44,6 +45,7 @@ function authMiddleware(req, res, next) {
|
||||
'/connections/dblogin-auth',
|
||||
'/connections/dblogin-auth-token',
|
||||
'/health',
|
||||
'/__health',
|
||||
];
|
||||
|
||||
// console.log('********************* getAuthProvider()', getAuthProvider());
|
||||
@@ -95,7 +97,7 @@ module.exports = {
|
||||
let adminPassword = process.env.ADMIN_PASSWORD;
|
||||
if (!adminPassword) {
|
||||
const adminConfig = await storage.readConfig({ group: 'admin' });
|
||||
adminPassword = adminConfig?.adminPassword;
|
||||
adminPassword = decryptPasswordString(adminConfig?.adminPassword);
|
||||
}
|
||||
if (adminPassword && adminPassword == password) {
|
||||
return {
|
||||
|
||||
@@ -19,6 +19,14 @@ const storage = require('./storage');
|
||||
const { getAuthProxyUrl } = require('../utility/authProxy');
|
||||
const { getPublicHardwareFingerprint } = require('../utility/hardwareFingerprint');
|
||||
const { extractErrorMessage } = require('dbgate-tools');
|
||||
const {
|
||||
generateTransportEncryptionKey,
|
||||
createTransportEncryptor,
|
||||
recryptConnection,
|
||||
getInternalEncryptor,
|
||||
recryptUser,
|
||||
recryptObjectPasswordFieldInPlace,
|
||||
} = require('../utility/crypting');
|
||||
|
||||
const lock = new AsyncLock();
|
||||
|
||||
@@ -107,6 +115,7 @@ module.exports = {
|
||||
datadir(),
|
||||
processArgs.runE2eTests ? 'connections-e2etests.jsonl' : 'connections.jsonl'
|
||||
),
|
||||
supportCloudAutoUpgrade: !!process.env.CLOUD_UPGRADE_FILE,
|
||||
...currentVersion,
|
||||
};
|
||||
|
||||
@@ -144,7 +153,7 @@ module.exports = {
|
||||
const res = {
|
||||
...value,
|
||||
};
|
||||
if (value['app.useNativeMenu'] !== true && value['app.useNativeMenu'] !== false) {
|
||||
if (platformInfo.isElectron && value['app.useNativeMenu'] !== true && value['app.useNativeMenu'] !== false) {
|
||||
// res['app.useNativeMenu'] = os.platform() == 'darwin' ? true : false;
|
||||
res['app.useNativeMenu'] = false;
|
||||
}
|
||||
@@ -161,14 +170,19 @@ module.exports = {
|
||||
|
||||
async loadSettings() {
|
||||
try {
|
||||
const settingsText = await fs.readFile(
|
||||
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
|
||||
{ encoding: 'utf-8' }
|
||||
);
|
||||
return {
|
||||
...this.fillMissingSettings(JSON.parse(settingsText)),
|
||||
'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined,
|
||||
};
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
const settings = await storage.readConfig({ group: 'settings' });
|
||||
return this.fillMissingSettings(settings);
|
||||
} else {
|
||||
const settingsText = await fs.readFile(
|
||||
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
|
||||
{ encoding: 'utf-8' }
|
||||
);
|
||||
return {
|
||||
...this.fillMissingSettings(JSON.parse(settingsText)),
|
||||
'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined,
|
||||
};
|
||||
}
|
||||
} catch (err) {
|
||||
return this.fillMissingSettings({});
|
||||
}
|
||||
@@ -246,19 +260,31 @@ module.exports = {
|
||||
const res = await lock.acquire('settings', async () => {
|
||||
const currentValue = await this.loadSettings();
|
||||
try {
|
||||
const updated = {
|
||||
...currentValue,
|
||||
..._.omit(values, ['other.licenseKey']),
|
||||
};
|
||||
await fs.writeFile(
|
||||
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
|
||||
JSON.stringify(updated, undefined, 2)
|
||||
);
|
||||
// this.settingsValue = updated;
|
||||
let updated = currentValue;
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
updated = {
|
||||
...currentValue,
|
||||
...values,
|
||||
};
|
||||
await storage.writeConfig({
|
||||
group: 'settings',
|
||||
config: updated,
|
||||
});
|
||||
} else {
|
||||
updated = {
|
||||
...currentValue,
|
||||
..._.omit(values, ['other.licenseKey']),
|
||||
};
|
||||
await fs.writeFile(
|
||||
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
|
||||
JSON.stringify(updated, undefined, 2)
|
||||
);
|
||||
// this.settingsValue = updated;
|
||||
|
||||
if (currentValue['other.licenseKey'] != values['other.licenseKey']) {
|
||||
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'] });
|
||||
socket.emitChanged(`config-changed`);
|
||||
if (currentValue['other.licenseKey'] != values['other.licenseKey']) {
|
||||
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'] });
|
||||
socket.emitChanged(`config-changed`);
|
||||
}
|
||||
}
|
||||
|
||||
socket.emitChanged(`settings-changed`);
|
||||
@@ -281,4 +307,91 @@ module.exports = {
|
||||
const resp = await checkLicenseKey(licenseKey);
|
||||
return resp;
|
||||
},
|
||||
|
||||
recryptDatabaseForExport(db) {
|
||||
const encryptionKey = generateTransportEncryptionKey();
|
||||
const transportEncryptor = createTransportEncryptor(encryptionKey);
|
||||
|
||||
const config = _.cloneDeep([
|
||||
...(db.config?.filter(c => !(c.group == 'admin' && c.key == 'encryptionKey')) || []),
|
||||
{ group: 'admin', key: 'encryptionKey', value: encryptionKey },
|
||||
]);
|
||||
const adminPassword = config.find(c => c.group == 'admin' && c.key == 'adminPassword');
|
||||
recryptObjectPasswordFieldInPlace(adminPassword, 'value', getInternalEncryptor(), transportEncryptor);
|
||||
|
||||
return {
|
||||
...db,
|
||||
connections: db.connections?.map(conn => recryptConnection(conn, getInternalEncryptor(), transportEncryptor)),
|
||||
users: db.users?.map(conn => recryptUser(conn, getInternalEncryptor(), transportEncryptor)),
|
||||
config,
|
||||
};
|
||||
},
|
||||
|
||||
recryptDatabaseFromImport(db) {
|
||||
const encryptionKey = db.config?.find(c => c.group == 'admin' && c.key == 'encryptionKey')?.value;
|
||||
if (!encryptionKey) {
|
||||
throw new Error('Missing encryption key in the database');
|
||||
}
|
||||
const config = _.cloneDeep(db.config || []).filter(c => !(c.group == 'admin' && c.key == 'encryptionKey'));
|
||||
const transportEncryptor = createTransportEncryptor(encryptionKey);
|
||||
|
||||
const adminPassword = config.find(c => c.group == 'admin' && c.key == 'adminPassword');
|
||||
recryptObjectPasswordFieldInPlace(adminPassword, 'value', transportEncryptor, getInternalEncryptor());
|
||||
|
||||
return {
|
||||
...db,
|
||||
connections: db.connections?.map(conn => recryptConnection(conn, transportEncryptor, getInternalEncryptor())),
|
||||
users: db.users?.map(conn => recryptUser(conn, transportEncryptor, getInternalEncryptor())),
|
||||
config,
|
||||
};
|
||||
},
|
||||
|
||||
exportConnectionsAndSettings_meta: true,
|
||||
async exportConnectionsAndSettings(_params, req) {
|
||||
if (!hasPermission(`admin/config`, req)) {
|
||||
throw new Error('Permission denied: admin/config');
|
||||
}
|
||||
|
||||
if (connections.portalConnections) {
|
||||
throw new Error('Not allowed');
|
||||
}
|
||||
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
const db = await storage.getExportedDatabase();
|
||||
return this.recryptDatabaseForExport(db);
|
||||
}
|
||||
|
||||
return this.recryptDatabaseForExport({
|
||||
connections: (await connections.list(null, req)).map((conn, index) => ({
|
||||
..._.omit(conn, ['_id']),
|
||||
id: index + 1,
|
||||
conid: conn._id,
|
||||
})),
|
||||
});
|
||||
},
|
||||
|
||||
importConnectionsAndSettings_meta: true,
|
||||
async importConnectionsAndSettings({ db }, req) {
|
||||
if (!hasPermission(`admin/config`, req)) {
|
||||
throw new Error('Permission denied: admin/config');
|
||||
}
|
||||
|
||||
if (connections.portalConnections) {
|
||||
throw new Error('Not allowed');
|
||||
}
|
||||
|
||||
const recryptedDb = this.recryptDatabaseFromImport(db);
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
await storage.replicateImportedDatabase(recryptedDb);
|
||||
} else {
|
||||
await connections.importFromArray(
|
||||
recryptedDb.connections.map(conn => ({
|
||||
..._.omit(conn, ['conid', 'id']),
|
||||
_id: conn.conid,
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -107,8 +107,8 @@ function getPortalCollections() {
|
||||
trustServerCertificate: process.env[`SSL_TRUST_CERTIFICATE_${id}`],
|
||||
}));
|
||||
|
||||
for(const conn of connections) {
|
||||
for(const prop in process.env) {
|
||||
for (const conn of connections) {
|
||||
for (const prop in process.env) {
|
||||
if (prop.startsWith(`CONNECTION_${conn._id}_`)) {
|
||||
const name = prop.substring(`CONNECTION_${conn._id}_`.length);
|
||||
conn[name] = process.env[prop];
|
||||
@@ -321,6 +321,18 @@ module.exports = {
|
||||
return res;
|
||||
},
|
||||
|
||||
importFromArray(list) {
|
||||
this.datastore.transformAll(connections => {
|
||||
const mapped = connections.map(x => {
|
||||
const found = list.find(y => y._id == x._id);
|
||||
if (found) return found;
|
||||
return x;
|
||||
});
|
||||
return [...mapped, ...list.filter(x => !connections.find(y => y._id == x._id))];
|
||||
});
|
||||
socket.emitChanged('connection-list-changed');
|
||||
},
|
||||
|
||||
async checkUnsavedConnectionsLimit() {
|
||||
if (!this.datastore) {
|
||||
return;
|
||||
|
||||
@@ -37,6 +37,8 @@ const loadModelTransform = require('../utility/loadModelTransform');
|
||||
const exportDbModelSql = require('../utility/exportDbModelSql');
|
||||
const axios = require('axios');
|
||||
const { callTextToSqlApi, callCompleteOnCursorApi, callRefactorSqlQueryApi } = require('../utility/authProxy');
|
||||
const { decryptConnection } = require('../utility/crypting');
|
||||
const { getSshTunnel } = require('../utility/sshTunnel');
|
||||
|
||||
const logger = getLogger('databaseConnections');
|
||||
|
||||
@@ -140,6 +142,11 @@ module.exports = {
|
||||
if (newOpened.disconnected) return;
|
||||
this.close(conid, database, false);
|
||||
});
|
||||
subprocess.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'Error in database connection subprocess');
|
||||
if (newOpened.disconnected) return;
|
||||
this.close(conid, database, false);
|
||||
});
|
||||
|
||||
subprocess.send({
|
||||
msgtype: 'connect',
|
||||
@@ -619,9 +626,26 @@ module.exports = {
|
||||
command,
|
||||
{ conid, database, outputFile, inputFile, options, selectedTables, skippedTables, argsFormat }
|
||||
) {
|
||||
const connection = await connections.getCore({ conid });
|
||||
const sourceConnection = await connections.getCore({ conid });
|
||||
const connection = {
|
||||
...decryptConnection(sourceConnection),
|
||||
};
|
||||
const driver = requireEngineDriver(connection);
|
||||
|
||||
if (!connection.port && driver.defaultPort) {
|
||||
connection.port = driver.defaultPort.toString();
|
||||
}
|
||||
|
||||
if (connection.useSshTunnel) {
|
||||
const tunnel = await getSshTunnel(connection);
|
||||
if (tunnel.state == 'error') {
|
||||
throw new Error(tunnel.message);
|
||||
}
|
||||
|
||||
connection.server = tunnel.localHost;
|
||||
connection.port = tunnel.localPort;
|
||||
}
|
||||
|
||||
const settingsValue = await config.getSettings();
|
||||
|
||||
const externalTools = {};
|
||||
|
||||
@@ -9,6 +9,9 @@ const scheduler = require('./scheduler');
|
||||
const getDiagramExport = require('../utility/getDiagramExport');
|
||||
const apps = require('./apps');
|
||||
const getMapExport = require('../utility/getMapExport');
|
||||
const dbgateApi = require('../shell');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const logger = getLogger('files');
|
||||
|
||||
function serialize(format, data) {
|
||||
if (format == 'text') return data;
|
||||
@@ -219,4 +222,60 @@ module.exports = {
|
||||
return path.join(dir, file);
|
||||
}
|
||||
},
|
||||
|
||||
createZipFromJsons_meta: true,
|
||||
async createZipFromJsons({ db, filePath }) {
|
||||
logger.info(`Creating zip file from JSONS ${filePath}`);
|
||||
await dbgateApi.zipJsonLinesData(db, filePath);
|
||||
return true;
|
||||
},
|
||||
|
||||
getJsonsFromZip_meta: true,
|
||||
async getJsonsFromZip({ filePath }) {
|
||||
const res = await dbgateApi.unzipJsonLinesData(filePath);
|
||||
return res;
|
||||
},
|
||||
|
||||
downloadText_meta: true,
|
||||
async downloadText({ uri }, req) {
|
||||
if (!uri) return null;
|
||||
const filePath = await dbgateApi.download(uri);
|
||||
const text = await fs.readFile(filePath, {
|
||||
encoding: 'utf-8',
|
||||
});
|
||||
return text;
|
||||
},
|
||||
|
||||
saveUploadedFile_meta: true,
|
||||
async saveUploadedFile({ filePath, fileName }) {
|
||||
const FOLDERS = ['sql', 'sqlite'];
|
||||
for (const folder of FOLDERS) {
|
||||
if (fileName.toLowerCase().endsWith('.' + folder)) {
|
||||
logger.info(`Saving ${folder} file ${fileName}`);
|
||||
await fs.copyFile(filePath, path.join(filesdir(), folder, fileName));
|
||||
|
||||
socket.emitChanged(`files-changed`, { folder: folder });
|
||||
socket.emitChanged(`all-files-changed`);
|
||||
return {
|
||||
name: path.basename(filePath),
|
||||
folder: folder,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`);
|
||||
},
|
||||
|
||||
exportFile_meta: true,
|
||||
async exportFile({ folder, file, filePath }, req) {
|
||||
if (!hasPermission(`files/${folder}/read`, req)) return false;
|
||||
await fs.copyFile(path.join(filesdir(), folder, file), filePath);
|
||||
return true;
|
||||
},
|
||||
|
||||
simpleCopy_meta: true,
|
||||
async simpleCopy({ sourceFilePath, targetFilePath }, req) {
|
||||
await fs.copyFile(sourceFilePath, targetFilePath);
|
||||
return true;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -8,6 +8,8 @@ const getJslFileName = require('../utility/getJslFileName');
|
||||
const JsonLinesDatastore = require('../utility/JsonLinesDatastore');
|
||||
const requirePluginFunction = require('../utility/requirePluginFunction');
|
||||
const socket = require('../utility/socket');
|
||||
const crypto = require('crypto');
|
||||
const dbgateApi = require('../shell');
|
||||
|
||||
function readFirstLine(file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
@@ -293,4 +295,11 @@ module.exports = {
|
||||
})),
|
||||
};
|
||||
},
|
||||
|
||||
downloadJslData_meta: true,
|
||||
async downloadJslData({ uri }) {
|
||||
const jslid = crypto.randomUUID();
|
||||
await dbgateApi.download(uri, { targetFile: getJslFileName(jslid) });
|
||||
return { jslid };
|
||||
},
|
||||
};
|
||||
|
||||
@@ -96,9 +96,9 @@ module.exports = {
|
||||
|
||||
handle_ping() {},
|
||||
|
||||
handle_freeData(runid, { freeData }) {
|
||||
handle_dataResult(runid, { dataResult }) {
|
||||
const { resolve } = this.requests[runid];
|
||||
resolve(freeData);
|
||||
resolve(dataResult);
|
||||
delete this.requests[runid];
|
||||
},
|
||||
|
||||
@@ -328,4 +328,24 @@ module.exports = {
|
||||
});
|
||||
return promise;
|
||||
},
|
||||
|
||||
scriptResult_meta: true,
|
||||
async scriptResult({ script }) {
|
||||
if (script.type != 'json') {
|
||||
return { errorMessage: 'Only JSON scripts are allowed' };
|
||||
}
|
||||
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
const runid = crypto.randomUUID();
|
||||
this.requests[runid] = { resolve, reject, exitOnStreamError: true };
|
||||
const cloned = _.cloneDeepWith(script, node => {
|
||||
if (node?.$replace == 'runid') {
|
||||
return runid;
|
||||
}
|
||||
});
|
||||
const js = jsonScriptToJavascript(cloned);
|
||||
this.startCore(runid, scriptTemplate(js, false));
|
||||
});
|
||||
return promise;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -98,6 +98,11 @@ module.exports = {
|
||||
if (newOpened.disconnected) return;
|
||||
this.close(conid, false);
|
||||
});
|
||||
subprocess.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'Error in server connection subprocess');
|
||||
if (newOpened.disconnected) return;
|
||||
this.close(conid, false);
|
||||
});
|
||||
subprocess.send({ msgtype: 'connect', ...connection, globalSettings: await config.getSettings() });
|
||||
return newOpened;
|
||||
});
|
||||
|
||||
@@ -4,6 +4,10 @@ module.exports = {
|
||||
return null;
|
||||
},
|
||||
|
||||
async getExportedDatabase() {
|
||||
return {};
|
||||
},
|
||||
|
||||
getConnection_meta: true,
|
||||
async getConnection({ conid }) {
|
||||
return null;
|
||||
|
||||
@@ -39,52 +39,6 @@ module.exports = {
|
||||
});
|
||||
},
|
||||
|
||||
uploadDataFile_meta: {
|
||||
method: 'post',
|
||||
raw: true,
|
||||
},
|
||||
uploadDataFile(req, res) {
|
||||
const { data } = req.files || {};
|
||||
|
||||
if (!data) {
|
||||
res.json(null);
|
||||
return;
|
||||
}
|
||||
|
||||
if (data.name.toLowerCase().endsWith('.sql')) {
|
||||
logger.info(`Uploading SQL file ${data.name}, size=${data.size}`);
|
||||
data.mv(path.join(filesdir(), 'sql', data.name), () => {
|
||||
res.json({
|
||||
name: data.name,
|
||||
folder: 'sql',
|
||||
});
|
||||
|
||||
socket.emitChanged(`files-changed`, { folder: 'sql' });
|
||||
socket.emitChanged(`all-files-changed`);
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
res.json(null);
|
||||
},
|
||||
|
||||
saveDataFile_meta: true,
|
||||
async saveDataFile({ filePath }) {
|
||||
if (filePath.toLowerCase().endsWith('.sql')) {
|
||||
logger.info(`Saving SQL file ${filePath}`);
|
||||
await fs.copyFile(filePath, path.join(filesdir(), 'sql', path.basename(filePath)));
|
||||
|
||||
socket.emitChanged(`files-changed`, { folder: 'sql' });
|
||||
socket.emitChanged(`all-files-changed`);
|
||||
return {
|
||||
name: path.basename(filePath),
|
||||
folder: 'sql',
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
get_meta: {
|
||||
method: 'get',
|
||||
raw: true,
|
||||
|
||||
@@ -38,7 +38,7 @@ const { getLogger } = require('dbgate-tools');
|
||||
const { getDefaultAuthProvider } = require('./auth/authProvider');
|
||||
const startCloudUpgradeTimer = require('./utility/cloudUpgrade');
|
||||
const { isProApp } = require('./utility/checkLicense');
|
||||
const getHealthStatus = require('./utility/healthStatus');
|
||||
const { getHealthStatus, getHealthStatusSprinx } = require('./utility/healthStatus');
|
||||
|
||||
const logger = getLogger('main');
|
||||
|
||||
@@ -124,6 +124,12 @@ function start() {
|
||||
res.end(JSON.stringify(health, null, 2));
|
||||
});
|
||||
|
||||
app.get(getExpressPath('/__health'), async function (req, res) {
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
const health = await getHealthStatusSprinx();
|
||||
res.end(JSON.stringify(health, null, 2));
|
||||
});
|
||||
|
||||
app.use(bodyParser.json({ limit: '50mb' }));
|
||||
|
||||
app.use(
|
||||
|
||||
@@ -4,6 +4,8 @@ const { connectUtility } = require('../utility/connectUtility');
|
||||
const { handleProcessCommunication } = require('../utility/processComm');
|
||||
const { pickSafeConnectionInfo } = require('../utility/crypting');
|
||||
const _ = require('lodash');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const logger = getLogger('connectProcess');
|
||||
|
||||
const formatErrorDetail = (e, connection) => `${e.stack}
|
||||
|
||||
@@ -23,12 +25,22 @@ function start() {
|
||||
try {
|
||||
const driver = requireEngineDriver(connection);
|
||||
const dbhan = await connectUtility(driver, connection, 'app');
|
||||
const res = await driver.getVersion(dbhan);
|
||||
let version = {
|
||||
version: 'Unknown',
|
||||
};
|
||||
try {
|
||||
version = await driver.getVersion(dbhan);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting DB server version');
|
||||
version = {
|
||||
version: 'Unknown',
|
||||
};
|
||||
}
|
||||
let databases = undefined;
|
||||
if (requestDbList) {
|
||||
databases = await driver.listDatabases(dbhan);
|
||||
}
|
||||
process.send({ msgtype: 'connected', ...res, databases });
|
||||
process.send({ msgtype: 'connected', ...version, databases });
|
||||
await driver.close(dbhan);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
|
||||
@@ -120,10 +120,15 @@ function setStatusName(name) {
|
||||
|
||||
async function readVersion() {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
const version = await driver.getVersion(dbhan);
|
||||
logger.debug(`Got server version: ${version.version}`);
|
||||
process.send({ msgtype: 'version', version });
|
||||
serverVersion = version;
|
||||
try {
|
||||
const version = await driver.getVersion(dbhan);
|
||||
logger.debug(`Got server version: ${version.version}`);
|
||||
serverVersion = version;
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting DB server version');
|
||||
serverVersion = { version: 'Unknown' };
|
||||
}
|
||||
process.send({ msgtype: 'version', version: serverVersion });
|
||||
}
|
||||
|
||||
async function handleConnect({ connection, structure, globalSettings }) {
|
||||
|
||||
@@ -46,7 +46,13 @@ async function handleRefresh() {
|
||||
|
||||
async function readVersion() {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
const version = await driver.getVersion(dbhan);
|
||||
let version;
|
||||
try {
|
||||
version = await driver.getVersion(dbhan);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting DB server version');
|
||||
version = { version: 'Unknown' };
|
||||
}
|
||||
process.send({ msgtype: 'version', version });
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,9 @@ const { archivedir, resolveArchiveFolder } = require('../utility/directories');
|
||||
const jsonLinesReader = require('./jsonLinesReader');
|
||||
|
||||
function archiveReader({ folderName, fileName, ...other }) {
|
||||
const jsonlFile = path.join(resolveArchiveFolder(folderName), `${fileName}.jsonl`);
|
||||
const jsonlFile = folderName.endsWith('.zip')
|
||||
? `zip://archive:${folderName}//${fileName}.jsonl`
|
||||
: path.join(resolveArchiveFolder(folderName), `${fileName}.jsonl`);
|
||||
const res = jsonLinesReader({ fileName: jsonlFile, ...other });
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -15,9 +15,9 @@ class CollectorWriterStream extends stream.Writable {
|
||||
|
||||
_final(callback) {
|
||||
process.send({
|
||||
msgtype: 'freeData',
|
||||
msgtype: 'dataResult',
|
||||
runid: this.runid,
|
||||
freeData: { rows: this.rows, structure: this.structure },
|
||||
dataResult: { rows: this.rows, structure: this.structure },
|
||||
});
|
||||
callback();
|
||||
}
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
const stream = require('stream');
|
||||
const path = require('path');
|
||||
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { connectUtility } = require('../utility/connectUtility');
|
||||
const logger = getLogger('dataDuplicator');
|
||||
const { DataDuplicator } = require('dbgate-datalib');
|
||||
const copyStream = require('./copyStream');
|
||||
const jsonLinesReader = require('./jsonLinesReader');
|
||||
const { resolveArchiveFolder } = require('../utility/directories');
|
||||
|
||||
async function dataDuplicator({
|
||||
connection,
|
||||
archive,
|
||||
folder,
|
||||
items,
|
||||
options,
|
||||
analysedStructure = null,
|
||||
driver,
|
||||
systemConnection,
|
||||
}) {
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
|
||||
|
||||
try {
|
||||
if (!analysedStructure) {
|
||||
analysedStructure = await driver.analyseFull(dbhan);
|
||||
}
|
||||
|
||||
const sourceDir = archive
|
||||
? resolveArchiveFolder(archive)
|
||||
: folder?.startsWith('archive:')
|
||||
? resolveArchiveFolder(folder.substring('archive:'.length))
|
||||
: folder;
|
||||
|
||||
const dupl = new DataDuplicator(
|
||||
dbhan,
|
||||
driver,
|
||||
analysedStructure,
|
||||
items.map(item => ({
|
||||
name: item.name,
|
||||
operation: item.operation,
|
||||
matchColumns: item.matchColumns,
|
||||
openStream:
|
||||
item.openStream || (() => jsonLinesReader({ fileName: path.join(sourceDir, `${item.name}.jsonl`) })),
|
||||
})),
|
||||
stream,
|
||||
copyStream,
|
||||
options
|
||||
);
|
||||
|
||||
await dupl.run();
|
||||
} finally {
|
||||
if (!systemConnection) {
|
||||
await driver.close(dbhan);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = dataDuplicator;
|
||||
96
packages/api/src/shell/dataReplicator.js
Normal file
96
packages/api/src/shell/dataReplicator.js
Normal file
@@ -0,0 +1,96 @@
|
||||
const stream = require('stream');
|
||||
const path = require('path');
|
||||
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { connectUtility } = require('../utility/connectUtility');
|
||||
const logger = getLogger('datareplicator');
|
||||
const { DataReplicator } = require('dbgate-datalib');
|
||||
const { compileCompoudEvalCondition } = require('dbgate-filterparser');
|
||||
const copyStream = require('./copyStream');
|
||||
const jsonLinesReader = require('./jsonLinesReader');
|
||||
const { resolveArchiveFolder } = require('../utility/directories');
|
||||
const { evaluateCondition } = require('dbgate-sqltree');
|
||||
|
||||
function compileOperationFunction(enabled, condition) {
|
||||
if (!enabled) return _row => false;
|
||||
const conditionCompiled = compileCompoudEvalCondition(condition);
|
||||
if (condition) {
|
||||
return row => evaluateCondition(conditionCompiled, row);
|
||||
}
|
||||
return _row => true;
|
||||
}
|
||||
|
||||
async function dataReplicator({
|
||||
connection,
|
||||
archive,
|
||||
folder,
|
||||
items,
|
||||
options,
|
||||
analysedStructure = null,
|
||||
driver,
|
||||
systemConnection,
|
||||
}) {
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
|
||||
|
||||
try {
|
||||
if (!analysedStructure) {
|
||||
analysedStructure = await driver.analyseFull(dbhan);
|
||||
}
|
||||
|
||||
let joinPath;
|
||||
|
||||
if (archive?.endsWith('.zip')) {
|
||||
joinPath = file => `zip://archive:${archive}//${file}`;
|
||||
} else {
|
||||
const sourceDir = archive
|
||||
? resolveArchiveFolder(archive)
|
||||
: folder?.startsWith('archive:')
|
||||
? resolveArchiveFolder(folder.substring('archive:'.length))
|
||||
: folder;
|
||||
joinPath = file => path.join(sourceDir, file);
|
||||
}
|
||||
|
||||
const repl = new DataReplicator(
|
||||
dbhan,
|
||||
driver,
|
||||
analysedStructure,
|
||||
items.map(item => {
|
||||
return {
|
||||
name: item.name,
|
||||
matchColumns: item.matchColumns,
|
||||
findExisting: compileOperationFunction(item.findExisting, item.findCondition),
|
||||
createNew: compileOperationFunction(item.createNew, item.createCondition),
|
||||
updateExisting: compileOperationFunction(item.updateExisting, item.updateCondition),
|
||||
deleteMissing: !!item.deleteMissing,
|
||||
deleteRestrictionColumns: item.deleteRestrictionColumns ?? [],
|
||||
openStream: item.openStream
|
||||
? item.openStream
|
||||
: item.jsonArray
|
||||
? () => stream.Readable.from(item.jsonArray)
|
||||
: () => jsonLinesReader({ fileName: joinPath(`${item.name}.jsonl`) }),
|
||||
};
|
||||
}),
|
||||
stream,
|
||||
copyStream,
|
||||
options
|
||||
);
|
||||
|
||||
await repl.run();
|
||||
if (options?.runid) {
|
||||
process.send({
|
||||
msgtype: 'dataResult',
|
||||
runid: options?.runid,
|
||||
dataResult: repl.result,
|
||||
});
|
||||
}
|
||||
return repl.result;
|
||||
} finally {
|
||||
if (!systemConnection) {
|
||||
await driver.close(dbhan);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = dataReplicator;
|
||||
@@ -1,14 +1,30 @@
|
||||
const crypto = require('crypto');
|
||||
const path = require('path');
|
||||
const { uploadsdir } = require('../utility/directories');
|
||||
const { uploadsdir, archivedir } = require('../utility/directories');
|
||||
const { downloadFile } = require('../utility/downloader');
|
||||
const extractSingleFileFromZip = require('../utility/extractSingleFileFromZip');
|
||||
|
||||
async function download(url) {
|
||||
if (url && url.match(/(^http:\/\/)|(^https:\/\/)/)) {
|
||||
const tmpFile = path.join(uploadsdir(), crypto.randomUUID());
|
||||
await downloadFile(url, tmpFile);
|
||||
return tmpFile;
|
||||
async function download(url, options = {}) {
|
||||
const { targetFile } = options || {};
|
||||
if (url) {
|
||||
if (url.match(/(^http:\/\/)|(^https:\/\/)/)) {
|
||||
const destFile = targetFile || path.join(uploadsdir(), crypto.randomUUID());
|
||||
await downloadFile(url, destFile);
|
||||
return destFile;
|
||||
}
|
||||
const zipMatch = url.match(/^zip\:\/\/(.*)\/\/(.*)$/);
|
||||
if (zipMatch) {
|
||||
const destFile = targetFile || path.join(uploadsdir(), crypto.randomUUID());
|
||||
let zipFile = zipMatch[1];
|
||||
if (zipFile.startsWith('archive:')) {
|
||||
zipFile = path.join(archivedir(), zipFile.substring('archive:'.length));
|
||||
}
|
||||
|
||||
await extractSingleFileFromZip(zipFile, zipMatch[2], destFile);
|
||||
return destFile;
|
||||
}
|
||||
}
|
||||
|
||||
return url;
|
||||
}
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ const importDatabase = require('./importDatabase');
|
||||
const loadDatabase = require('./loadDatabase');
|
||||
const generateModelSql = require('./generateModelSql');
|
||||
const modifyJsonLinesReader = require('./modifyJsonLinesReader');
|
||||
const dataDuplicator = require('./dataDuplicator');
|
||||
const dataReplicator = require('./dataReplicator');
|
||||
const dbModelToJson = require('./dbModelToJson');
|
||||
const jsonToDbModel = require('./jsonToDbModel');
|
||||
const jsonReader = require('./jsonReader');
|
||||
@@ -35,6 +35,11 @@ const autoIndexForeignKeysTransform = require('./autoIndexForeignKeysTransform')
|
||||
const generateDeploySql = require('./generateDeploySql');
|
||||
const dropAllDbObjects = require('./dropAllDbObjects');
|
||||
const importDbFromFolder = require('./importDbFromFolder');
|
||||
const zipDirectory = require('./zipDirectory');
|
||||
const unzipDirectory = require('./unzipDirectory');
|
||||
const zipJsonLinesData = require('./zipJsonLinesData');
|
||||
const unzipJsonLinesData = require('./unzipJsonLinesData');
|
||||
const unzipJsonLinesFile = require('./unzipJsonLinesFile');
|
||||
|
||||
const dbgateApi = {
|
||||
queryReader,
|
||||
@@ -64,7 +69,7 @@ const dbgateApi = {
|
||||
loadDatabase,
|
||||
generateModelSql,
|
||||
modifyJsonLinesReader,
|
||||
dataDuplicator,
|
||||
dataReplicator,
|
||||
dbModelToJson,
|
||||
jsonToDbModel,
|
||||
dataTypeMapperTransform,
|
||||
@@ -73,6 +78,11 @@ const dbgateApi = {
|
||||
generateDeploySql,
|
||||
dropAllDbObjects,
|
||||
importDbFromFolder,
|
||||
zipDirectory,
|
||||
unzipDirectory,
|
||||
zipJsonLinesData,
|
||||
unzipJsonLinesData,
|
||||
unzipJsonLinesFile,
|
||||
};
|
||||
|
||||
requirePlugin.initializeDbgateApi(dbgateApi);
|
||||
|
||||
@@ -36,9 +36,10 @@ async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true })
|
||||
logger.info(`Writing file ${fileName}`);
|
||||
const stringify = new StringifyStream({ header });
|
||||
const fileStream = fs.createWriteStream(fileName, encoding);
|
||||
stringify.pipe(fileStream);
|
||||
stringify['finisher'] = fileStream;
|
||||
return stringify;
|
||||
return [stringify, fileStream];
|
||||
// stringify.pipe(fileStream);
|
||||
// stringify['finisher'] = fileStream;
|
||||
// return stringify;
|
||||
}
|
||||
|
||||
module.exports = jsonLinesWriter;
|
||||
|
||||
91
packages/api/src/shell/unzipDirectory.js
Normal file
91
packages/api/src/shell/unzipDirectory.js
Normal file
@@ -0,0 +1,91 @@
|
||||
const yauzl = require('yauzl');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
|
||||
const logger = getLogger('unzipDirectory');
|
||||
|
||||
/**
|
||||
* Extracts an entire ZIP file, preserving its internal directory layout.
|
||||
*
|
||||
* @param {string} zipPath Path to the ZIP file on disk.
|
||||
* @param {string} outputDirectory Folder to create / overwrite with the contents.
|
||||
* @returns {Promise<boolean>} Resolves `true` on success, rejects on error.
|
||||
*/
|
||||
function unzipDirectory(zipPath, outputDirectory) {
|
||||
return new Promise((resolve, reject) => {
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
/** Pending per-file extractions – we resolve the main promise after they’re all done */
|
||||
const pending = [];
|
||||
|
||||
// kick things off
|
||||
zipFile.readEntry();
|
||||
|
||||
zipFile.on('entry', entry => {
|
||||
const destPath = path.join(outputDirectory, entry.fileName);
|
||||
|
||||
// Handle directories (their names always end with “/” in ZIPs)
|
||||
if (/\/$/.test(entry.fileName)) {
|
||||
// Ensure directory exists, then continue to next entry
|
||||
fs.promises
|
||||
.mkdir(destPath, { recursive: true })
|
||||
.then(() => zipFile.readEntry())
|
||||
.catch(reject);
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle files
|
||||
const filePromise = fs.promises
|
||||
.mkdir(path.dirname(destPath), { recursive: true }) // make sure parent dirs exist
|
||||
.then(
|
||||
() =>
|
||||
new Promise((res, rej) => {
|
||||
zipFile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) return rej(err);
|
||||
|
||||
const writeStream = fs.createWriteStream(destPath);
|
||||
readStream.pipe(writeStream);
|
||||
|
||||
// proceed to next entry once we’ve consumed *this* one
|
||||
readStream.on('end', () => zipFile.readEntry());
|
||||
|
||||
writeStream.on('finish', () => {
|
||||
logger.info(`Extracted "${entry.fileName}" → "${destPath}".`);
|
||||
res();
|
||||
});
|
||||
|
||||
writeStream.on('error', writeErr => {
|
||||
logger.error(
|
||||
extractErrorLogData(writeErr),
|
||||
`Error extracting "${entry.fileName}" from "${zipPath}".`
|
||||
);
|
||||
rej(writeErr);
|
||||
});
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
pending.push(filePromise);
|
||||
});
|
||||
|
||||
// Entire archive enumerated; wait for all streams to finish
|
||||
zipFile.on('end', () => {
|
||||
Promise.all(pending)
|
||||
.then(() => {
|
||||
logger.info(`Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
|
||||
resolve(true);
|
||||
})
|
||||
.catch(reject);
|
||||
});
|
||||
|
||||
zipFile.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = unzipDirectory;
|
||||
60
packages/api/src/shell/unzipJsonLinesData.js
Normal file
60
packages/api/src/shell/unzipJsonLinesData.js
Normal file
@@ -0,0 +1,60 @@
|
||||
const yauzl = require('yauzl');
|
||||
const fs = require('fs');
|
||||
const { jsonLinesParse } = require('dbgate-tools');
|
||||
|
||||
function unzipJsonLinesData(zipPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Open the zip file
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const results = {};
|
||||
|
||||
// Start reading entries
|
||||
zipfile.readEntry();
|
||||
|
||||
zipfile.on('entry', entry => {
|
||||
// Only process .json files
|
||||
if (/\.jsonl$/i.test(entry.fileName)) {
|
||||
zipfile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const chunks = [];
|
||||
readStream.on('data', chunk => chunks.push(chunk));
|
||||
readStream.on('end', () => {
|
||||
try {
|
||||
const fileContent = Buffer.concat(chunks).toString('utf-8');
|
||||
const parsedJson = jsonLinesParse(fileContent);
|
||||
results[entry.fileName.replace(/\.jsonl$/, '')] = parsedJson;
|
||||
} catch (parseError) {
|
||||
return reject(parseError);
|
||||
}
|
||||
|
||||
// Move to the next entry
|
||||
zipfile.readEntry();
|
||||
});
|
||||
});
|
||||
} else {
|
||||
// Not a JSON file, skip
|
||||
zipfile.readEntry();
|
||||
}
|
||||
});
|
||||
|
||||
// Resolve when no more entries
|
||||
zipfile.on('end', () => {
|
||||
resolve(results);
|
||||
});
|
||||
|
||||
// Catch errors from zipfile
|
||||
zipfile.on('error', zipErr => {
|
||||
reject(zipErr);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = unzipJsonLinesData;
|
||||
59
packages/api/src/shell/unzipJsonLinesFile.js
Normal file
59
packages/api/src/shell/unzipJsonLinesFile.js
Normal file
@@ -0,0 +1,59 @@
|
||||
const yauzl = require('yauzl');
|
||||
const fs = require('fs');
|
||||
const { jsonLinesParse } = require('dbgate-tools');
|
||||
|
||||
function unzipJsonLinesFile(zipPath, fileInZip) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Open the zip file
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
let result = null;
|
||||
|
||||
// Start reading entries
|
||||
zipfile.readEntry();
|
||||
|
||||
zipfile.on('entry', entry => {
|
||||
if (entry.fileName == fileInZip) {
|
||||
zipfile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const chunks = [];
|
||||
readStream.on('data', chunk => chunks.push(chunk));
|
||||
readStream.on('end', () => {
|
||||
try {
|
||||
const fileContent = Buffer.concat(chunks).toString('utf-8');
|
||||
const parsedJson = jsonLinesParse(fileContent);
|
||||
result = parsedJson;
|
||||
} catch (parseError) {
|
||||
return reject(parseError);
|
||||
}
|
||||
|
||||
// Move to the next entry
|
||||
zipfile.readEntry();
|
||||
});
|
||||
});
|
||||
} else {
|
||||
// Not a JSON file, skip
|
||||
zipfile.readEntry();
|
||||
}
|
||||
});
|
||||
|
||||
// Resolve when no more entries
|
||||
zipfile.on('end', () => {
|
||||
resolve(result);
|
||||
});
|
||||
|
||||
// Catch errors from zipfile
|
||||
zipfile.on('error', zipErr => {
|
||||
reject(zipErr);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = unzipJsonLinesFile;
|
||||
49
packages/api/src/shell/zipDirectory.js
Normal file
49
packages/api/src/shell/zipDirectory.js
Normal file
@@ -0,0 +1,49 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const archiver = require('archiver');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const { archivedir } = require('../utility/directories');
|
||||
const logger = getLogger('compressDirectory');
|
||||
|
||||
function zipDirectory(inputDirectory, outputFile) {
|
||||
if (outputFile.startsWith('archive:')) {
|
||||
outputFile = path.join(archivedir(), outputFile.substring('archive:'.length));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const output = fs.createWriteStream(outputFile);
|
||||
const archive = archiver('zip', { zlib: { level: 9 } }); // level: 9 => best compression
|
||||
|
||||
// Listen for all archive data to be written
|
||||
output.on('close', () => {
|
||||
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
|
||||
resolve();
|
||||
});
|
||||
|
||||
archive.on('warning', err => {
|
||||
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
|
||||
});
|
||||
|
||||
archive.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
// Pipe archive data to the file
|
||||
archive.pipe(output);
|
||||
|
||||
// Append files from a folder
|
||||
archive.directory(inputDirectory, false, entryData => {
|
||||
if (entryData.name.endsWith('.zip')) {
|
||||
return false; // returning false means "do not include"
|
||||
}
|
||||
// otherwise, include it
|
||||
return entryData;
|
||||
});
|
||||
|
||||
// Finalize the archive
|
||||
archive.finalize();
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = zipDirectory;
|
||||
49
packages/api/src/shell/zipJsonLinesData.js
Normal file
49
packages/api/src/shell/zipJsonLinesData.js
Normal file
@@ -0,0 +1,49 @@
|
||||
const fs = require('fs');
|
||||
const _ = require('lodash');
|
||||
const path = require('path');
|
||||
const archiver = require('archiver');
|
||||
const { getLogger, extractErrorLogData, jsonLinesStringify } = require('dbgate-tools');
|
||||
const { archivedir } = require('../utility/directories');
|
||||
const logger = getLogger('compressDirectory');
|
||||
|
||||
function zipDirectory(jsonDb, outputFile) {
|
||||
if (outputFile.startsWith('archive:')) {
|
||||
outputFile = path.join(archivedir(), outputFile.substring('archive:'.length));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const output = fs.createWriteStream(outputFile);
|
||||
const archive = archiver('zip', { zlib: { level: 9 } }); // level: 9 => best compression
|
||||
|
||||
// Listen for all archive data to be written
|
||||
output.on('close', () => {
|
||||
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
|
||||
resolve();
|
||||
});
|
||||
|
||||
archive.on('warning', err => {
|
||||
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
|
||||
});
|
||||
|
||||
archive.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
// Pipe archive data to the file
|
||||
archive.pipe(output);
|
||||
|
||||
for (const key in jsonDb) {
|
||||
const data = jsonDb[key];
|
||||
if (_.isArray(data)) {
|
||||
const jsonString = jsonLinesStringify(data);
|
||||
archive.append(jsonString, { name: `${key}.jsonl` });
|
||||
}
|
||||
}
|
||||
|
||||
// Finalize the archive
|
||||
archive.finalize();
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = zipDirectory;
|
||||
819
packages/api/src/storageModel.js
Normal file
819
packages/api/src/storageModel.js
Normal file
@@ -0,0 +1,819 @@
|
||||
module.exports = {
|
||||
"tables": [
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "name",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "type",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "amoid",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "is_disabled",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "is_default",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods",
|
||||
"columnName": "is_collapsed",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "auth_methods",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
"preloadedRows": [
|
||||
{
|
||||
"id": -1,
|
||||
"amoid": "790ca4d2-7f01-4800-955b-d691b890cc50",
|
||||
"name": "Anonymous",
|
||||
"type": "none"
|
||||
},
|
||||
{
|
||||
"id": -2,
|
||||
"amoid": "53db1cbf-f488-44d9-8670-7162510eb09c",
|
||||
"name": "Local",
|
||||
"type": "local"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columnName": "auth_method_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columnName": "key",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "auth_methods_config",
|
||||
"columnName": "value",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "auth_methods_config",
|
||||
"refTableName": "auth_methods",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "auth_method_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "auth_methods_config",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "config",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "config",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "config",
|
||||
"columnName": "group",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "config",
|
||||
"columnName": "key",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "config",
|
||||
"columnName": "value",
|
||||
"dataType": "varchar(1000)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "config",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "conid",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "displayName",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "connectionColor",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "engine",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "server",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "databaseFile",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "useDatabaseUrl",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "databaseUrl",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "authType",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "port",
|
||||
"dataType": "varchar(20)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "serviceName",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "serviceNameType",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "socketPath",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "user",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "password",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "passwordMode",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "treeKeySeparator",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "windowsDomain",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "isReadOnly",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "trustServerCertificate",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "defaultDatabase",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "singleDatabase",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "useSshTunnel",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshHost",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshPort",
|
||||
"dataType": "varchar(20)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshMode",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshKeyFile",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshKeyfilePassword",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshLogin",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshPassword",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sshBastionHost",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "useSsl",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sslCaFile",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sslCertFilePassword",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sslKeyFile",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "sslRejectUnauthorized",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "clientLibraryPath",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "useRedirectDbLogin",
|
||||
"dataType": "int",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "allowedDatabases",
|
||||
"dataType": "varchar(500)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "allowedDatabasesRegex",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "endpoint",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "endpointKey",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "accessKeyId",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "secretAccessKey",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "connections",
|
||||
"columnName": "awsRegion",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "connections",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "roles",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "roles",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "roles",
|
||||
"columnName": "name",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "roles",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
"preloadedRows": [
|
||||
{
|
||||
"id": -1,
|
||||
"name": "anonymous-user"
|
||||
},
|
||||
{
|
||||
"id": -2,
|
||||
"name": "logged-user"
|
||||
},
|
||||
{
|
||||
"id": -3,
|
||||
"name": "superadmin"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"pureName": "role_connections",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "role_connections",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "role_connections",
|
||||
"columnName": "role_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "role_connections",
|
||||
"columnName": "connection_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "role_connections",
|
||||
"refTableName": "roles",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "role_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "role_connections",
|
||||
"refTableName": "connections",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "connection_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "role_connections",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "role_permissions",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "role_permissions",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "role_permissions",
|
||||
"columnName": "role_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "role_permissions",
|
||||
"columnName": "permission",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "role_permissions",
|
||||
"refTableName": "roles",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "role_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "role_permissions",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "users",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "users",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "users",
|
||||
"columnName": "login",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "users",
|
||||
"columnName": "password",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
},
|
||||
{
|
||||
"pureName": "users",
|
||||
"columnName": "email",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": false
|
||||
}
|
||||
],
|
||||
"foreignKeys": [],
|
||||
"primaryKey": {
|
||||
"pureName": "users",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "user_connections",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "user_connections",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_connections",
|
||||
"columnName": "user_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_connections",
|
||||
"columnName": "connection_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_connections",
|
||||
"refTableName": "users",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "user_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_connections",
|
||||
"refTableName": "connections",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "connection_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "user_connections",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "user_permissions",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "user_permissions",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_permissions",
|
||||
"columnName": "user_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_permissions",
|
||||
"columnName": "permission",
|
||||
"dataType": "varchar(250)",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_permissions",
|
||||
"refTableName": "users",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "user_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "user_permissions",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"pureName": "user_roles",
|
||||
"columns": [
|
||||
{
|
||||
"pureName": "user_roles",
|
||||
"columnName": "id",
|
||||
"dataType": "int",
|
||||
"autoIncrement": true,
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_roles",
|
||||
"columnName": "user_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
},
|
||||
{
|
||||
"pureName": "user_roles",
|
||||
"columnName": "role_id",
|
||||
"dataType": "int",
|
||||
"notNull": true
|
||||
}
|
||||
],
|
||||
"foreignKeys": [
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_roles",
|
||||
"refTableName": "users",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "user_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"constraintType": "foreignKey",
|
||||
"pureName": "user_roles",
|
||||
"refTableName": "roles",
|
||||
"deleteAction": "CASCADE",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "role_id",
|
||||
"refColumnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"primaryKey": {
|
||||
"pureName": "user_roles",
|
||||
"constraintType": "primaryKey",
|
||||
"columns": [
|
||||
{
|
||||
"columnName": "id"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"collections": [],
|
||||
"views": [],
|
||||
"matviews": [],
|
||||
"functions": [],
|
||||
"procedures": [],
|
||||
"triggers": []
|
||||
};
|
||||
@@ -60,6 +60,10 @@ class DatastoreProxy {
|
||||
// if (this.disconnected) return;
|
||||
this.subprocess = null;
|
||||
});
|
||||
this.subprocess.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'Error in data store subprocess');
|
||||
this.subprocess = null;
|
||||
});
|
||||
this.subprocess.send({ msgtype: 'open', file: this.file });
|
||||
}
|
||||
return this.subprocess;
|
||||
|
||||
@@ -4,11 +4,20 @@ const fsp = require('fs/promises');
|
||||
const semver = require('semver');
|
||||
const currentVersion = require('../currentVersion');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const { storageReadConfig } = require('../controllers/storageDb');
|
||||
|
||||
const logger = getLogger('cloudUpgrade');
|
||||
|
||||
async function checkCloudUpgrade() {
|
||||
try {
|
||||
if (process.env.STORAGE_DATABASE) {
|
||||
const settings = await storageReadConfig('settings');
|
||||
if (settings['cloud.useAutoUpgrade'] != 1) {
|
||||
// auto-upgrade not allowed
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const resp = await axios.default.get('https://api.github.com/repos/dbgate/dbgate/releases/latest');
|
||||
const json = resp.data;
|
||||
const version = json.name.substring(1);
|
||||
@@ -43,7 +52,11 @@ async function checkCloudUpgrade() {
|
||||
|
||||
logger.info(`Downloaded new version from ${zipUrl}`);
|
||||
} else {
|
||||
logger.info(`Checked version ${version} is not newer than ${cloudDownloadedVersion ?? currentVersion.version}, upgrade skippped`);
|
||||
logger.info(
|
||||
`Checked version ${version} is not newer than ${
|
||||
cloudDownloadedVersion ?? currentVersion.version
|
||||
}, upgrade skippped`
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error checking cloud upgrade');
|
||||
|
||||
@@ -96,7 +96,9 @@ async function connectUtility(driver, storedConnection, connectionMode, addition
|
||||
...decryptConnection(connectionLoaded),
|
||||
};
|
||||
|
||||
if (!connection.port && driver.defaultPort) connection.port = driver.defaultPort.toString();
|
||||
if (!connection.port && driver.defaultPort) {
|
||||
connection.port = driver.defaultPort.toString();
|
||||
}
|
||||
|
||||
if (connection.useSshTunnel) {
|
||||
const tunnel = await getSshTunnelProxy(connection);
|
||||
|
||||
@@ -5,12 +5,16 @@ const path = require('path');
|
||||
const _ = require('lodash');
|
||||
|
||||
const { datadir } = require('./directories');
|
||||
const { encryptionKeyArg } = require('./processArgs');
|
||||
|
||||
const defaultEncryptionKey = 'mQAUaXhavRGJDxDTXSCg7Ej0xMmGCrx6OKA07DIMBiDcYYkvkaXjTAzPUEHEHEf9';
|
||||
|
||||
let _encryptionKey = null;
|
||||
|
||||
function loadEncryptionKey() {
|
||||
if (encryptionKeyArg) {
|
||||
return encryptionKeyArg;
|
||||
}
|
||||
if (_encryptionKey) {
|
||||
return _encryptionKey;
|
||||
}
|
||||
@@ -55,7 +59,7 @@ async function loadEncryptionKeyFromExternal(storedValue, setStoredValue) {
|
||||
|
||||
let _encryptor = null;
|
||||
|
||||
function getEncryptor() {
|
||||
function getInternalEncryptor() {
|
||||
if (_encryptor) {
|
||||
return _encryptor;
|
||||
}
|
||||
@@ -63,11 +67,25 @@ function getEncryptor() {
|
||||
return _encryptor;
|
||||
}
|
||||
|
||||
function encryptPasswordString(password) {
|
||||
if (password && !password.startsWith('crypt:')) {
|
||||
return 'crypt:' + getInternalEncryptor().encrypt(password);
|
||||
}
|
||||
return password;
|
||||
}
|
||||
|
||||
function decryptPasswordString(password) {
|
||||
if (password && password.startsWith('crypt:')) {
|
||||
return getInternalEncryptor().decrypt(password.substring('crypt:'.length));
|
||||
}
|
||||
return password;
|
||||
}
|
||||
|
||||
function encryptObjectPasswordField(obj, field) {
|
||||
if (obj && obj[field] && !obj[field].startsWith('crypt:')) {
|
||||
return {
|
||||
...obj,
|
||||
[field]: 'crypt:' + getEncryptor().encrypt(obj[field]),
|
||||
[field]: 'crypt:' + getInternalEncryptor().encrypt(obj[field]),
|
||||
};
|
||||
}
|
||||
return obj;
|
||||
@@ -77,7 +95,7 @@ function decryptObjectPasswordField(obj, field) {
|
||||
if (obj && obj[field] && obj[field].startsWith('crypt:')) {
|
||||
return {
|
||||
...obj,
|
||||
[field]: getEncryptor().decrypt(obj[field].substring('crypt:'.length)),
|
||||
[field]: getInternalEncryptor().decrypt(obj[field].substring('crypt:'.length)),
|
||||
};
|
||||
}
|
||||
return obj;
|
||||
@@ -131,6 +149,54 @@ function pickSafeConnectionInfo(connection) {
|
||||
function setEncryptionKey(encryptionKey) {
|
||||
_encryptionKey = encryptionKey;
|
||||
_encryptor = null;
|
||||
global.ENCRYPTION_KEY = encryptionKey;
|
||||
}
|
||||
|
||||
function getEncryptionKey() {
|
||||
return _encryptionKey;
|
||||
}
|
||||
|
||||
function generateTransportEncryptionKey() {
|
||||
const encryptor = simpleEncryptor.createEncryptor(defaultEncryptionKey);
|
||||
const result = {
|
||||
encryptionKey: crypto.randomBytes(32).toString('hex'),
|
||||
};
|
||||
return encryptor.encrypt(result);
|
||||
}
|
||||
|
||||
function createTransportEncryptor(encryptionData) {
|
||||
const encryptor = simpleEncryptor.createEncryptor(defaultEncryptionKey);
|
||||
const data = encryptor.decrypt(encryptionData);
|
||||
const res = simpleEncryptor.createEncryptor(data['encryptionKey']);
|
||||
return res;
|
||||
}
|
||||
|
||||
function recryptObjectPasswordField(obj, field, decryptEncryptor, encryptEncryptor) {
|
||||
if (obj && obj[field] && obj[field].startsWith('crypt:')) {
|
||||
return {
|
||||
...obj,
|
||||
[field]: 'crypt:' + encryptEncryptor.encrypt(decryptEncryptor.decrypt(obj[field].substring('crypt:'.length))),
|
||||
};
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
function recryptObjectPasswordFieldInPlace(obj, field, decryptEncryptor, encryptEncryptor) {
|
||||
if (obj && obj[field] && obj[field].startsWith('crypt:')) {
|
||||
obj[field] = 'crypt:' + encryptEncryptor.encrypt(decryptEncryptor.decrypt(obj[field].substring('crypt:'.length)));
|
||||
}
|
||||
}
|
||||
|
||||
function recryptConnection(connection, decryptEncryptor, encryptEncryptor) {
|
||||
connection = recryptObjectPasswordField(connection, 'password', decryptEncryptor, encryptEncryptor);
|
||||
connection = recryptObjectPasswordField(connection, 'sshPassword', decryptEncryptor, encryptEncryptor);
|
||||
connection = recryptObjectPasswordField(connection, 'sshKeyfilePassword', decryptEncryptor, encryptEncryptor);
|
||||
return connection;
|
||||
}
|
||||
|
||||
function recryptUser(user, decryptEncryptor, encryptEncryptor) {
|
||||
user = recryptObjectPasswordField(user, 'password', decryptEncryptor, encryptEncryptor);
|
||||
return user;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
@@ -142,4 +208,16 @@ module.exports = {
|
||||
maskConnection,
|
||||
pickSafeConnectionInfo,
|
||||
loadEncryptionKeyFromExternal,
|
||||
getEncryptionKey,
|
||||
setEncryptionKey,
|
||||
encryptPasswordString,
|
||||
decryptPasswordString,
|
||||
|
||||
getInternalEncryptor,
|
||||
recryptConnection,
|
||||
recryptUser,
|
||||
generateTransportEncryptionKey,
|
||||
createTransportEncryptor,
|
||||
recryptObjectPasswordField,
|
||||
recryptObjectPasswordFieldInPlace,
|
||||
};
|
||||
|
||||
77
packages/api/src/utility/extractSingleFileFromZip.js
Normal file
77
packages/api/src/utility/extractSingleFileFromZip.js
Normal file
@@ -0,0 +1,77 @@
|
||||
const yauzl = require('yauzl');
|
||||
const fs = require('fs');
|
||||
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const logger = getLogger('extractSingleFileFromZip');
|
||||
/**
|
||||
* Extracts a single file from a ZIP using yauzl.
|
||||
* Stops reading the rest of the archive once the file is found.
|
||||
*
|
||||
* @param {string} zipPath - Path to the ZIP file on disk.
|
||||
* @param {string} fileInZip - The file path *inside* the ZIP to extract.
|
||||
* @param {string} outputPath - Where to write the extracted file on disk.
|
||||
* @returns {Promise<boolean>} - Resolves with a success message or a "not found" message.
|
||||
*/
|
||||
function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
let fileFound = false;
|
||||
|
||||
// Start reading the first entry
|
||||
zipFile.readEntry();
|
||||
|
||||
zipFile.on('entry', entry => {
|
||||
// Compare the entry name to the file we want
|
||||
if (entry.fileName === fileInZip) {
|
||||
fileFound = true;
|
||||
|
||||
// Open a read stream for this entry
|
||||
zipFile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
// Create a write stream to outputPath
|
||||
const writeStream = fs.createWriteStream(outputPath);
|
||||
readStream.pipe(writeStream);
|
||||
|
||||
// When the read stream ends, we can close the zipFile
|
||||
readStream.on('end', () => {
|
||||
// We won't read further entries
|
||||
zipFile.close();
|
||||
});
|
||||
|
||||
// When the file is finished writing, resolve
|
||||
writeStream.on('finish', () => {
|
||||
logger.info(`File "${fileInZip}" extracted to "${outputPath}".`);
|
||||
resolve(true);
|
||||
});
|
||||
|
||||
// Handle write errors
|
||||
writeStream.on('error', writeErr => {
|
||||
logger.error(extractErrorLogData(writeErr), `Error extracting "${fileInZip}" from "${zipPath}".`);
|
||||
reject(writeErr);
|
||||
});
|
||||
});
|
||||
} else {
|
||||
// Not the file we want; skip to the next entry
|
||||
zipFile.readEntry();
|
||||
}
|
||||
});
|
||||
|
||||
// If we reach the end without finding the file
|
||||
zipFile.on('end', () => {
|
||||
if (!fileFound) {
|
||||
resolve(false);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle general errors
|
||||
zipFile.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = extractSingleFileFromZip;
|
||||
@@ -22,6 +22,8 @@ const getMapExport = (geoJson) => {
|
||||
})
|
||||
.addTo(map);
|
||||
|
||||
leaflet.control.scale().addTo(map);
|
||||
|
||||
const geoJsonObj = leaflet
|
||||
.geoJSON(${JSON.stringify(geoJson)}, {
|
||||
style: function () {
|
||||
|
||||
@@ -24,4 +24,15 @@ async function getHealthStatus() {
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = getHealthStatus;
|
||||
async function getHealthStatusSprinx() {
|
||||
return {
|
||||
overallStatus: 'OK',
|
||||
timeStamp: new Date().toISOString(),
|
||||
timeStampUnix: Math.floor(Date.now() / 1000),
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getHealthStatus,
|
||||
getHealthStatusSprinx,
|
||||
};
|
||||
|
||||
41
packages/api/src/utility/listZipEntries.js
Normal file
41
packages/api/src/utility/listZipEntries.js
Normal file
@@ -0,0 +1,41 @@
|
||||
const yauzl = require('yauzl');
|
||||
const path = require('path');
|
||||
|
||||
/**
|
||||
* Lists the files in a ZIP archive using yauzl,
|
||||
* returning an array of { fileName, uncompressedSize } objects.
|
||||
*
|
||||
* @param {string} zipPath - The path to the ZIP file.
|
||||
* @returns {Promise<Array<{fileName: string, uncompressedSize: number}>>}
|
||||
*/
|
||||
function listZipEntries(zipPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
const entries = [];
|
||||
|
||||
// Start reading entries
|
||||
zipfile.readEntry();
|
||||
|
||||
// Handle each entry
|
||||
zipfile.on('entry', entry => {
|
||||
entries.push({
|
||||
fileName: entry.fileName,
|
||||
uncompressedSize: entry.uncompressedSize,
|
||||
});
|
||||
|
||||
// Move on to the next entry (we’re only listing, not reading file data)
|
||||
zipfile.readEntry();
|
||||
});
|
||||
|
||||
// Finished reading all entries
|
||||
zipfile.on('end', () => resolve(entries));
|
||||
|
||||
// Handle errors
|
||||
zipfile.on('error', err => reject(err));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = listZipEntries;
|
||||
@@ -17,6 +17,7 @@ const processDisplayName = getNamedArg('--process-display-name');
|
||||
const listenApi = process.argv.includes('--listen-api');
|
||||
const listenApiChild = process.argv.includes('--listen-api-child') || listenApi;
|
||||
const runE2eTests = process.argv.includes('--run-e2e-tests');
|
||||
const encryptionKeyArg = getNamedArg('--encryption-key');
|
||||
|
||||
function getPassArgs() {
|
||||
const res = [];
|
||||
@@ -31,6 +32,9 @@ function getPassArgs() {
|
||||
if (runE2eTests) {
|
||||
res.push('--run-e2e-tests');
|
||||
}
|
||||
if (global['ENCRYPTION_KEY']) {
|
||||
res.push('--encryption-key', global['ENCRYPTION_KEY']);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -45,4 +49,5 @@ module.exports = {
|
||||
listenApiChild,
|
||||
processDisplayName,
|
||||
runE2eTests,
|
||||
encryptionKeyArg,
|
||||
};
|
||||
|
||||
@@ -57,10 +57,21 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
|
||||
}
|
||||
});
|
||||
subprocess.on('exit', code => {
|
||||
logger.info('SSH forward process exited');
|
||||
logger.info(`SSH forward process exited with code ${code}`);
|
||||
delete sshTunnelCache[tunnelCacheKey];
|
||||
if (!promiseHandled) {
|
||||
reject(new Error('SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'));
|
||||
reject(
|
||||
new Error(
|
||||
'SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'
|
||||
)
|
||||
);
|
||||
}
|
||||
});
|
||||
subprocess.on('error', error => {
|
||||
logger.error(extractErrorLogData(error), 'SSH forward process error');
|
||||
delete sshTunnelCache[tunnelCacheKey];
|
||||
if (!promiseHandled) {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -572,6 +572,27 @@ export function changeSetInsertDocuments(
|
||||
};
|
||||
}
|
||||
|
||||
export function createMergedRowsChangeSet(
|
||||
table: TableInfo,
|
||||
updatedRows: any[],
|
||||
insertedRows: any[],
|
||||
mergeKey: string[]
|
||||
): ChangeSet {
|
||||
const res = createChangeSet();
|
||||
res.updates = updatedRows.map(row => ({
|
||||
pureName: table.pureName,
|
||||
schemaName: table.schemaName,
|
||||
fields: _.omit(row, mergeKey),
|
||||
condition: _.pick(row, mergeKey),
|
||||
}));
|
||||
res.inserts = insertedRows.map(row => ({
|
||||
pureName: table.pureName,
|
||||
schemaName: table.schemaName,
|
||||
fields: row,
|
||||
}));
|
||||
return res;
|
||||
}
|
||||
|
||||
export function changeSetContainsChanges(changeSet: ChangeSet) {
|
||||
if (!changeSet) return false;
|
||||
return (
|
||||
|
||||
@@ -1,326 +0,0 @@
|
||||
import {
|
||||
createAsyncWriteStream,
|
||||
extractErrorLogData,
|
||||
getLogger,
|
||||
runCommandOnDriver,
|
||||
runQueryOnDriver,
|
||||
} from 'dbgate-tools';
|
||||
import { DatabaseInfo, EngineDriver, ForeignKeyInfo, TableInfo } from 'dbgate-types';
|
||||
import _pick from 'lodash/pick';
|
||||
import _omit from 'lodash/omit';
|
||||
|
||||
const logger = getLogger('dataDuplicator');
|
||||
|
||||
export interface DataDuplicatorItem {
|
||||
openStream: () => Promise<ReadableStream>;
|
||||
name: string;
|
||||
operation: 'copy' | 'lookup' | 'insertMissing';
|
||||
matchColumns: string[];
|
||||
}
|
||||
|
||||
export interface DataDuplicatorOptions {
|
||||
rollbackAfterFinish?: boolean;
|
||||
skipRowsWithUnresolvedRefs?: boolean;
|
||||
setNullForUnresolvedNullableRefs?: boolean;
|
||||
}
|
||||
|
||||
class DuplicatorReference {
|
||||
constructor(
|
||||
public base: DuplicatorItemHolder,
|
||||
public ref: DuplicatorItemHolder,
|
||||
public isMandatory: boolean,
|
||||
public foreignKey: ForeignKeyInfo
|
||||
) {}
|
||||
|
||||
get columnName() {
|
||||
return this.foreignKey.columns[0].columnName;
|
||||
}
|
||||
}
|
||||
|
||||
class DuplicatorWeakReference {
|
||||
constructor(public base: DuplicatorItemHolder, public ref: TableInfo, public foreignKey: ForeignKeyInfo) {}
|
||||
|
||||
get columnName() {
|
||||
return this.foreignKey.columns[0].columnName;
|
||||
}
|
||||
}
|
||||
|
||||
class DuplicatorItemHolder {
|
||||
references: DuplicatorReference[] = [];
|
||||
backReferences: DuplicatorReference[] = [];
|
||||
// not mandatory references to entities out of the model
|
||||
weakReferences: DuplicatorWeakReference[] = [];
|
||||
table: TableInfo;
|
||||
isPlanned = false;
|
||||
idMap = {};
|
||||
autoColumn: string;
|
||||
refByColumn: { [columnName: string]: DuplicatorReference } = {};
|
||||
isReferenced: boolean;
|
||||
|
||||
get name() {
|
||||
return this.item.name;
|
||||
}
|
||||
|
||||
constructor(public item: DataDuplicatorItem, public duplicator: DataDuplicator) {
|
||||
this.table = duplicator.db.tables.find(x => x.pureName.toUpperCase() == item.name.toUpperCase());
|
||||
this.autoColumn = this.table.columns.find(x => x.autoIncrement)?.columnName;
|
||||
if (
|
||||
this.table.primaryKey?.columns?.length != 1 ||
|
||||
this.table.primaryKey?.columns?.[0]?.columnName != this.autoColumn
|
||||
) {
|
||||
this.autoColumn = null;
|
||||
}
|
||||
}
|
||||
|
||||
initializeReferences() {
|
||||
for (const fk of this.table.foreignKeys) {
|
||||
if (fk.columns?.length != 1) continue;
|
||||
const refHolder = this.duplicator.itemHolders.find(y => y.name.toUpperCase() == fk.refTableName.toUpperCase());
|
||||
const isMandatory = this.table.columns.find(x => x.columnName == fk.columns[0]?.columnName)?.notNull;
|
||||
if (refHolder == null) {
|
||||
if (!isMandatory) {
|
||||
const weakref = new DuplicatorWeakReference(
|
||||
this,
|
||||
this.duplicator.db.tables.find(x => x.pureName == fk.refTableName),
|
||||
fk
|
||||
);
|
||||
this.weakReferences.push(weakref);
|
||||
}
|
||||
} else {
|
||||
const newref = new DuplicatorReference(this, refHolder, isMandatory, fk);
|
||||
this.references.push(newref);
|
||||
this.refByColumn[newref.columnName] = newref;
|
||||
|
||||
refHolder.isReferenced = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
createInsertObject(chunk, weakrefcols: string[]) {
|
||||
const res = _omit(
|
||||
_pick(
|
||||
chunk,
|
||||
this.table.columns.map(x => x.columnName)
|
||||
),
|
||||
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...weakrefcols]
|
||||
);
|
||||
|
||||
for (const key in res) {
|
||||
const ref = this.refByColumn[key];
|
||||
if (ref) {
|
||||
// remap id
|
||||
res[key] = ref.ref.idMap[res[key]];
|
||||
if (ref.isMandatory && res[key] == null) {
|
||||
// mandatory refertence not matched
|
||||
if (this.duplicator.options.skipRowsWithUnresolvedRefs) {
|
||||
return null;
|
||||
}
|
||||
throw new Error(`Unresolved reference, base=${ref.base.name}, ref=${ref.ref.name}, ${key}=${chunk[key]}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
// returns list of columns that are weak references and are not resolved
|
||||
async getMissingWeakRefsForRow(row): Promise<string[]> {
|
||||
if (!this.duplicator.options.setNullForUnresolvedNullableRefs || !this.weakReferences?.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const qres = await runQueryOnDriver(this.duplicator.pool, this.duplicator.driver, dmp => {
|
||||
dmp.put('^select ');
|
||||
dmp.putCollection(',', this.weakReferences, weakref => {
|
||||
dmp.put(
|
||||
'(^case ^when ^exists (^select * ^from %f where %i = %v) ^then 1 ^else 0 ^end) as %i',
|
||||
weakref.ref,
|
||||
weakref.foreignKey.columns[0].refColumnName,
|
||||
row[weakref.foreignKey.columns[0].columnName],
|
||||
weakref.foreignKey.columns[0].columnName
|
||||
);
|
||||
});
|
||||
if (this.duplicator.driver.dialect.requireFromDual) {
|
||||
dmp.put(' ^from ^dual');
|
||||
}
|
||||
});
|
||||
const qrow = qres.rows[0];
|
||||
return this.weakReferences.filter(x => qrow[x.columnName] == 0).map(x => x.columnName);
|
||||
}
|
||||
|
||||
async runImport() {
|
||||
const readStream = await this.item.openStream();
|
||||
const driver = this.duplicator.driver;
|
||||
const pool = this.duplicator.pool;
|
||||
let inserted = 0;
|
||||
let mapped = 0;
|
||||
let missing = 0;
|
||||
let skipped = 0;
|
||||
let lastLogged = new Date();
|
||||
|
||||
const existingWeakRefs = {};
|
||||
|
||||
const writeStream = createAsyncWriteStream(this.duplicator.stream, {
|
||||
processItem: async chunk => {
|
||||
if (chunk.__isStreamHeader) {
|
||||
return;
|
||||
}
|
||||
|
||||
const doCopy = async () => {
|
||||
// console.log('chunk', this.name, JSON.stringify(chunk));
|
||||
const weakrefcols = await this.getMissingWeakRefsForRow(chunk);
|
||||
const insertedObj = this.createInsertObject(chunk, weakrefcols);
|
||||
// console.log('insertedObj', this.name, JSON.stringify(insertedObj));
|
||||
if (insertedObj == null) {
|
||||
skipped += 1;
|
||||
return;
|
||||
}
|
||||
let res = await runQueryOnDriver(pool, driver, dmp => {
|
||||
dmp.put(
|
||||
'^insert ^into %f (%,i) ^values (%,v)',
|
||||
this.table,
|
||||
Object.keys(insertedObj),
|
||||
Object.values(insertedObj)
|
||||
);
|
||||
|
||||
if (
|
||||
this.autoColumn &&
|
||||
this.isReferenced &&
|
||||
!this.duplicator.driver.dialect.requireStandaloneSelectForScopeIdentity
|
||||
) {
|
||||
dmp.selectScopeIdentity(this.table);
|
||||
}
|
||||
});
|
||||
inserted += 1;
|
||||
if (this.autoColumn && this.isReferenced) {
|
||||
if (this.duplicator.driver.dialect.requireStandaloneSelectForScopeIdentity) {
|
||||
res = await runQueryOnDriver(pool, driver, dmp => dmp.selectScopeIdentity(this.table));
|
||||
}
|
||||
// console.log('IDRES', JSON.stringify(res));
|
||||
// console.log('*********** ENTRIES OF', res?.rows?.[0]);
|
||||
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
|
||||
if (resId != null) {
|
||||
this.idMap[chunk[this.autoColumn]] = resId;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
switch (this.item.operation) {
|
||||
case 'copy': {
|
||||
await doCopy();
|
||||
break;
|
||||
}
|
||||
case 'insertMissing':
|
||||
case 'lookup': {
|
||||
const res = await runQueryOnDriver(pool, driver, dmp =>
|
||||
dmp.put(
|
||||
'^select %i ^from %f ^where %i = %v',
|
||||
this.autoColumn,
|
||||
this.table,
|
||||
this.item.matchColumns[0],
|
||||
chunk[this.item.matchColumns[0]]
|
||||
)
|
||||
);
|
||||
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
|
||||
if (resId != null) {
|
||||
mapped += 1;
|
||||
this.idMap[chunk[this.autoColumn]] = resId;
|
||||
} else if (this.item.operation == 'insertMissing') {
|
||||
await doCopy();
|
||||
} else {
|
||||
missing += 1;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (new Date().getTime() - lastLogged.getTime() > 5000) {
|
||||
logger.info(
|
||||
`Duplicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows`
|
||||
);
|
||||
lastLogged = new Date();
|
||||
}
|
||||
// this.idMap[oldId] = newId;
|
||||
},
|
||||
});
|
||||
|
||||
await this.duplicator.copyStream(readStream, writeStream);
|
||||
|
||||
// await this.duplicator.driver.writeQueryStream(this.duplicator.pool, {
|
||||
// mapResultId: (oldId, newId) => {
|
||||
// this.idMap[oldId] = newId;
|
||||
// },
|
||||
// });
|
||||
|
||||
return { inserted, mapped, missing, skipped };
|
||||
}
|
||||
}
|
||||
|
||||
export class DataDuplicator {
|
||||
itemHolders: DuplicatorItemHolder[];
|
||||
itemPlan: DuplicatorItemHolder[] = [];
|
||||
|
||||
constructor(
|
||||
public pool: any,
|
||||
public driver: EngineDriver,
|
||||
public db: DatabaseInfo,
|
||||
public items: DataDuplicatorItem[],
|
||||
public stream,
|
||||
public copyStream: (input, output) => Promise<void>,
|
||||
public options: DataDuplicatorOptions = {}
|
||||
) {
|
||||
this.itemHolders = items.map(x => new DuplicatorItemHolder(x, this));
|
||||
this.itemHolders.forEach(x => x.initializeReferences());
|
||||
}
|
||||
|
||||
findItemToPlan(): DuplicatorItemHolder {
|
||||
for (const item of this.itemHolders) {
|
||||
if (item.isPlanned) continue;
|
||||
if (item.references.every(x => x.ref.isPlanned)) {
|
||||
return item;
|
||||
}
|
||||
}
|
||||
for (const item of this.itemHolders) {
|
||||
if (item.isPlanned) continue;
|
||||
if (item.references.every(x => x.ref.isPlanned || !x.isMandatory)) {
|
||||
const backReferences = item.references.filter(x => !x.ref.isPlanned);
|
||||
item.backReferences = backReferences;
|
||||
return item;
|
||||
}
|
||||
}
|
||||
throw new Error('Cycle in mandatory references');
|
||||
}
|
||||
|
||||
createPlan() {
|
||||
while (this.itemPlan.length < this.itemHolders.length) {
|
||||
const item = this.findItemToPlan();
|
||||
item.isPlanned = true;
|
||||
this.itemPlan.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
async run() {
|
||||
this.createPlan();
|
||||
|
||||
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.beginTransaction());
|
||||
try {
|
||||
for (const item of this.itemPlan) {
|
||||
const stats = await item.runImport();
|
||||
logger.info(
|
||||
`Duplicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows`
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Failed duplicator job, rollbacking. ${err.message}`);
|
||||
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.rollbackTransaction());
|
||||
return;
|
||||
}
|
||||
if (this.options.rollbackAfterFinish) {
|
||||
logger.info('Rollbacking transaction, nothing was changed');
|
||||
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.rollbackTransaction());
|
||||
} else {
|
||||
logger.info('Committing duplicator transaction');
|
||||
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.commitTransaction());
|
||||
}
|
||||
}
|
||||
}
|
||||
510
packages/datalib/src/DataReplicator.ts
Normal file
510
packages/datalib/src/DataReplicator.ts
Normal file
@@ -0,0 +1,510 @@
|
||||
import {
|
||||
createAsyncWriteStream,
|
||||
extractErrorLogData,
|
||||
getLogger,
|
||||
isTypeNumber,
|
||||
runCommandOnDriver,
|
||||
runQueryOnDriver,
|
||||
SqlDumper,
|
||||
} from 'dbgate-tools';
|
||||
import { DatabaseInfo, EngineDriver, ForeignKeyInfo, NamedObjectInfo, QueryResult, TableInfo } from 'dbgate-types';
|
||||
import _pick from 'lodash/pick';
|
||||
import _omit from 'lodash/omit';
|
||||
import stableStringify from 'json-stable-stringify';
|
||||
|
||||
const logger = getLogger('dataReplicator');
|
||||
|
||||
export interface DataReplicatorItem {
|
||||
openStream: () => Promise<ReadableStream>;
|
||||
name: string;
|
||||
findExisting: (row: any) => boolean;
|
||||
createNew: (row: any) => boolean;
|
||||
updateExisting: (row: any) => boolean;
|
||||
deleteMissing: boolean;
|
||||
deleteRestrictionColumns: string[];
|
||||
matchColumns: string[];
|
||||
}
|
||||
|
||||
export interface DataReplicatorOptions {
|
||||
rollbackAfterFinish?: boolean;
|
||||
skipRowsWithUnresolvedRefs?: boolean;
|
||||
setNullForUnresolvedNullableRefs?: boolean;
|
||||
generateSqlScript?: boolean;
|
||||
runid?: string;
|
||||
}
|
||||
|
||||
class ReplicatorReference {
|
||||
constructor(
|
||||
public base: ReplicatorItemHolder,
|
||||
public ref: ReplicatorItemHolder,
|
||||
public isMandatory: boolean,
|
||||
public foreignKey: ForeignKeyInfo
|
||||
) {}
|
||||
|
||||
get columnName() {
|
||||
return this.foreignKey.columns[0].columnName;
|
||||
}
|
||||
}
|
||||
|
||||
class ReplicatorWeakReference {
|
||||
constructor(public base: ReplicatorItemHolder, public ref: TableInfo, public foreignKey: ForeignKeyInfo) {}
|
||||
|
||||
get columnName() {
|
||||
return this.foreignKey.columns[0].columnName;
|
||||
}
|
||||
}
|
||||
|
||||
class ReplicatorItemHolder {
|
||||
references: ReplicatorReference[] = [];
|
||||
backReferences: ReplicatorReference[] = [];
|
||||
// not mandatory references to entities out of the model
|
||||
weakReferences: ReplicatorWeakReference[] = [];
|
||||
table: TableInfo;
|
||||
isPlanned = false;
|
||||
idMap = {};
|
||||
autoColumn: string;
|
||||
isManualAutoColumn: boolean;
|
||||
refByColumn: { [columnName: string]: ReplicatorReference } = {};
|
||||
isReferenced: boolean;
|
||||
|
||||
get name() {
|
||||
return this.item.name;
|
||||
}
|
||||
|
||||
constructor(public item: DataReplicatorItem, public replicator: DataReplicator) {
|
||||
this.table = replicator.db.tables.find(x => x.pureName.toUpperCase() == item.name.toUpperCase());
|
||||
this.autoColumn = this.table.columns.find(x => x.autoIncrement)?.columnName;
|
||||
if (
|
||||
this.table.primaryKey?.columns?.length != 1 ||
|
||||
this.table.primaryKey?.columns?.[0]?.columnName != this.autoColumn
|
||||
) {
|
||||
this.autoColumn = null;
|
||||
}
|
||||
if (!this.autoColumn && this.table.primaryKey?.columns?.length == 1) {
|
||||
const name = this.table.primaryKey.columns[0].columnName;
|
||||
const column = this.table.columns.find(x => x.columnName == name);
|
||||
if (isTypeNumber(column?.dataType)) {
|
||||
this.autoColumn = name;
|
||||
this.isManualAutoColumn = true;
|
||||
}
|
||||
}
|
||||
if (this.autoColumn && this.replicator.options.generateSqlScript) {
|
||||
this.isManualAutoColumn = true;
|
||||
}
|
||||
}
|
||||
|
||||
initializeReferences() {
|
||||
for (const fk of this.table.foreignKeys) {
|
||||
if (fk.columns?.length != 1) continue;
|
||||
const refHolder = this.replicator.itemHolders.find(y => y.name.toUpperCase() == fk.refTableName.toUpperCase());
|
||||
const isMandatory = this.table.columns.find(x => x.columnName == fk.columns[0]?.columnName)?.notNull;
|
||||
if (refHolder == null) {
|
||||
if (!isMandatory) {
|
||||
const weakref = new ReplicatorWeakReference(
|
||||
this,
|
||||
this.replicator.db.tables.find(x => x.pureName == fk.refTableName),
|
||||
fk
|
||||
);
|
||||
this.weakReferences.push(weakref);
|
||||
}
|
||||
} else {
|
||||
const newref = new ReplicatorReference(this, refHolder, isMandatory, fk);
|
||||
this.references.push(newref);
|
||||
this.refByColumn[newref.columnName] = newref;
|
||||
|
||||
refHolder.isReferenced = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
createInsertObject(chunk, weakrefcols?: string[]) {
|
||||
const res = _omit(
|
||||
_pick(
|
||||
chunk,
|
||||
this.table.columns.map(x => x.columnName)
|
||||
),
|
||||
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...(weakrefcols ? weakrefcols : [])]
|
||||
);
|
||||
|
||||
for (const key in res) {
|
||||
const ref = this.refByColumn[key];
|
||||
if (ref) {
|
||||
// remap id
|
||||
const oldId = res[key];
|
||||
res[key] = ref.ref.idMap[oldId];
|
||||
if (ref.isMandatory && res[key] == null) {
|
||||
// mandatory refertence not matched
|
||||
if (this.replicator.options.skipRowsWithUnresolvedRefs) {
|
||||
return null;
|
||||
}
|
||||
throw new Error(`Unresolved reference, base=${ref.base.name}, ref=${ref.ref.name}, ${key}=${chunk[key]}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
createUpdateObject(chunk) {
|
||||
const res = _omit(
|
||||
_pick(
|
||||
chunk,
|
||||
this.table.columns.map(x => x.columnName)
|
||||
),
|
||||
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...this.references.map(x => x.columnName)]
|
||||
);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
// returns list of columns that are weak references and are not resolved
|
||||
async getMissingWeakRefsForRow(row): Promise<string[]> {
|
||||
if (!this.replicator.options.setNullForUnresolvedNullableRefs || !this.weakReferences?.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const qres = await runQueryOnDriver(this.replicator.pool, this.replicator.driver, dmp => {
|
||||
dmp.put('^select ');
|
||||
dmp.putCollection(',', this.weakReferences, weakref => {
|
||||
dmp.put(
|
||||
'(^case ^when ^exists (^select * ^from %f where %i = %v) ^then 1 ^else 0 ^end) as %i',
|
||||
weakref.ref,
|
||||
weakref.foreignKey.columns[0].refColumnName,
|
||||
row[weakref.foreignKey.columns[0].columnName],
|
||||
weakref.foreignKey.columns[0].columnName
|
||||
);
|
||||
});
|
||||
if (this.replicator.driver.dialect.requireFromDual) {
|
||||
dmp.put(' ^from ^dual');
|
||||
}
|
||||
});
|
||||
const qrow = qres.rows[0];
|
||||
return this.weakReferences.filter(x => qrow[x.columnName] == 0).map(x => x.columnName);
|
||||
}
|
||||
|
||||
async runImport() {
|
||||
const readStream = await this.item.openStream();
|
||||
const driver = this.replicator.driver;
|
||||
const pool = this.replicator.pool;
|
||||
let inserted = 0;
|
||||
let mapped = 0;
|
||||
let updated = 0;
|
||||
let deleted = 0;
|
||||
let missing = 0;
|
||||
let skipped = 0;
|
||||
let lastLogged = new Date();
|
||||
|
||||
const { deleteMissing, deleteRestrictionColumns } = this.item;
|
||||
const deleteRestrictions = {};
|
||||
const usedKeyRows = {};
|
||||
|
||||
const writeStream = createAsyncWriteStream(this.replicator.stream, {
|
||||
processItem: async chunk => {
|
||||
if (chunk.__isStreamHeader) {
|
||||
return;
|
||||
}
|
||||
|
||||
const doFind = async () => {
|
||||
let insertedObj = this.createInsertObject(chunk);
|
||||
|
||||
const res = await runQueryOnDriver(pool, driver, dmp => {
|
||||
dmp.put('^select %i ^from %f ^where ', this.autoColumn, this.table);
|
||||
dmp.putCollection(' and ', this.item.matchColumns, x => {
|
||||
dmp.put('%i = %v', x, insertedObj[x]);
|
||||
});
|
||||
});
|
||||
const resId = Object.entries(res?.rows?.[0] || {})?.[0]?.[1];
|
||||
if (resId != null) {
|
||||
mapped += 1;
|
||||
this.idMap[chunk[this.autoColumn]] = resId;
|
||||
}
|
||||
return resId;
|
||||
};
|
||||
|
||||
const doUpdate = async recordId => {
|
||||
const updateObj = this.createUpdateObject(chunk);
|
||||
if (Object.keys(updateObj).length == 0) {
|
||||
skipped += 1;
|
||||
return;
|
||||
}
|
||||
|
||||
await this.replicator.runDumperCommand(dmp => {
|
||||
dmp.put('^update %f ^ set ', this.table);
|
||||
dmp.putCollection(',', Object.keys(updateObj), x => {
|
||||
dmp.put('%i = %v', x, updateObj[x]);
|
||||
});
|
||||
dmp.put(' ^where %i = %v', this.autoColumn, recordId);
|
||||
dmp.endCommand();
|
||||
});
|
||||
updated += 1;
|
||||
};
|
||||
|
||||
const doInsert = async () => {
|
||||
// console.log('chunk', this.name, JSON.stringify(chunk));
|
||||
const weakrefcols = await this.getMissingWeakRefsForRow(chunk);
|
||||
let insertedObj = this.createInsertObject(chunk, weakrefcols);
|
||||
// console.log('insertedObj', this.name, JSON.stringify(insertedObj));
|
||||
if (insertedObj == null) {
|
||||
skipped += 1;
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.isManualAutoColumn) {
|
||||
const maxId = await this.replicator.generateIdentityValue(this.autoColumn, this.table);
|
||||
insertedObj = {
|
||||
...insertedObj,
|
||||
[this.autoColumn]: maxId,
|
||||
};
|
||||
this.idMap[chunk[this.autoColumn]] = maxId;
|
||||
}
|
||||
|
||||
let res = await this.replicator.runDumperQuery(dmp => {
|
||||
dmp.put(
|
||||
'^insert ^into %f (%,i) ^values (%,v)',
|
||||
this.table,
|
||||
Object.keys(insertedObj),
|
||||
Object.values(insertedObj)
|
||||
);
|
||||
dmp.endCommand();
|
||||
|
||||
if (
|
||||
this.autoColumn &&
|
||||
this.isReferenced &&
|
||||
!this.replicator.driver.dialect.requireStandaloneSelectForScopeIdentity &&
|
||||
!this.isManualAutoColumn
|
||||
) {
|
||||
dmp.selectScopeIdentity(this.table);
|
||||
}
|
||||
});
|
||||
inserted += 1;
|
||||
if (this.autoColumn && this.isReferenced && !this.isManualAutoColumn) {
|
||||
if (this.replicator.driver.dialect.requireStandaloneSelectForScopeIdentity) {
|
||||
res = await runQueryOnDriver(pool, driver, dmp => dmp.selectScopeIdentity(this.table));
|
||||
}
|
||||
// console.log('IDRES', JSON.stringify(res));
|
||||
// console.log('*********** ENTRIES OF', res?.rows?.[0]);
|
||||
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
|
||||
if (resId != null) {
|
||||
this.idMap[chunk[this.autoColumn]] = resId;
|
||||
}
|
||||
return resId;
|
||||
}
|
||||
};
|
||||
|
||||
const doMarkDelete = () => {
|
||||
const insertedObj = this.createInsertObject(chunk);
|
||||
if (deleteRestrictionColumns?.length > 0) {
|
||||
const restriction = _pick(insertedObj, deleteRestrictionColumns);
|
||||
const key = stableStringify(restriction);
|
||||
deleteRestrictions[key] = restriction;
|
||||
}
|
||||
|
||||
const usedKey = _pick(insertedObj, this.item.matchColumns);
|
||||
usedKeyRows[stableStringify(usedKey)] = usedKey;
|
||||
};
|
||||
|
||||
const findExisting = this.item.findExisting(chunk);
|
||||
const updateExisting = this.item.updateExisting(chunk);
|
||||
const createNew = this.item.createNew(chunk);
|
||||
|
||||
if (deleteMissing) {
|
||||
doMarkDelete();
|
||||
}
|
||||
|
||||
let recordId = null;
|
||||
if (findExisting) {
|
||||
recordId = await doFind();
|
||||
}
|
||||
|
||||
if (updateExisting && recordId != null) {
|
||||
await doUpdate(recordId);
|
||||
}
|
||||
|
||||
if (createNew && recordId == null) {
|
||||
recordId = await doInsert();
|
||||
}
|
||||
|
||||
if (recordId == null && findExisting) {
|
||||
missing += 1;
|
||||
}
|
||||
|
||||
if (new Date().getTime() - lastLogged.getTime() > 5000) {
|
||||
logger.info(
|
||||
`Replicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows, updated ${updated} rows`
|
||||
);
|
||||
lastLogged = new Date();
|
||||
}
|
||||
// this.idMap[oldId] = newId;
|
||||
},
|
||||
});
|
||||
|
||||
const dumpConditionArray = (dmp: SqlDumper, array: any[], positive: boolean) => {
|
||||
dmp.putCollection(positive ? ' or ' : ' and ', array, x => {
|
||||
dmp.put('(');
|
||||
dmp.putCollection(positive ? ' and ' : ' or ', Object.keys(x), y => {
|
||||
dmp.put(positive ? '%i = %v' : 'not (%i = %v)', y, x[y]);
|
||||
});
|
||||
dmp.put(')');
|
||||
});
|
||||
};
|
||||
const dumpDeleteCondition = (dmp: SqlDumper) => {
|
||||
const deleteRestrictionValues = Object.values(deleteRestrictions);
|
||||
const usedKeyRowsValues = Object.values(usedKeyRows);
|
||||
|
||||
if (deleteRestrictionValues.length == 0 && usedKeyRowsValues.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
dmp.put(' ^where ');
|
||||
if (deleteRestrictionColumns?.length > 0) {
|
||||
dmp.put('(');
|
||||
dumpConditionArray(dmp, deleteRestrictionValues, true);
|
||||
dmp.put(')');
|
||||
if (usedKeyRowsValues.length > 0) {
|
||||
dmp.put(' ^and ');
|
||||
}
|
||||
}
|
||||
dumpConditionArray(dmp, Object.values(usedKeyRows), false);
|
||||
};
|
||||
const doDelete = async () => {
|
||||
const countRes = await runQueryOnDriver(pool, driver, dmp => {
|
||||
dmp.put('^select count(*) as ~cnt ^from %f', this.table);
|
||||
dumpDeleteCondition(dmp);
|
||||
dmp.endCommand();
|
||||
});
|
||||
const count = parseInt(countRes.rows[0].cnt);
|
||||
if (count > 0) {
|
||||
await this.replicator.runDumperCommand(dmp => {
|
||||
dmp.put('^delete ^from %f', this.table);
|
||||
dumpDeleteCondition(dmp);
|
||||
dmp.endCommand();
|
||||
});
|
||||
deleted += count;
|
||||
}
|
||||
};
|
||||
|
||||
await this.replicator.copyStream(readStream, writeStream, {});
|
||||
|
||||
if (deleteMissing) {
|
||||
await doDelete();
|
||||
}
|
||||
|
||||
// await this.replicator.driver.writeQueryStream(this.replicator.pool, {
|
||||
// mapResultId: (oldId, newId) => {
|
||||
// this.idMap[oldId] = newId;
|
||||
// },
|
||||
// });
|
||||
|
||||
return { inserted, mapped, missing, skipped, updated, deleted };
|
||||
}
|
||||
}
|
||||
|
||||
export class DataReplicator {
|
||||
itemHolders: ReplicatorItemHolder[];
|
||||
itemPlan: ReplicatorItemHolder[] = [];
|
||||
result: string = '';
|
||||
dumper: SqlDumper;
|
||||
identityValues: { [fullTableName: string]: number } = {};
|
||||
|
||||
constructor(
|
||||
public pool: any,
|
||||
public driver: EngineDriver,
|
||||
public db: DatabaseInfo,
|
||||
public items: DataReplicatorItem[],
|
||||
public stream,
|
||||
public copyStream: (input, output, options) => Promise<void>,
|
||||
public options: DataReplicatorOptions = {}
|
||||
) {
|
||||
this.itemHolders = items.map(x => new ReplicatorItemHolder(x, this));
|
||||
this.itemHolders.forEach(x => x.initializeReferences());
|
||||
// @ts-ignore
|
||||
this.dumper = driver.createDumper();
|
||||
}
|
||||
|
||||
findItemToPlan(): ReplicatorItemHolder {
|
||||
for (const item of this.itemHolders) {
|
||||
if (item.isPlanned) continue;
|
||||
if (item.references.every(x => x.ref.isPlanned)) {
|
||||
return item;
|
||||
}
|
||||
}
|
||||
for (const item of this.itemHolders) {
|
||||
if (item.isPlanned) continue;
|
||||
if (item.references.every(x => x.ref.isPlanned || !x.isMandatory)) {
|
||||
const backReferences = item.references.filter(x => !x.ref.isPlanned);
|
||||
item.backReferences = backReferences;
|
||||
return item;
|
||||
}
|
||||
}
|
||||
throw new Error('Cycle in mandatory references');
|
||||
}
|
||||
|
||||
createPlan() {
|
||||
while (this.itemPlan.length < this.itemHolders.length) {
|
||||
const item = this.findItemToPlan();
|
||||
item.isPlanned = true;
|
||||
this.itemPlan.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
async runDumperCommand(cmd: (dmp: SqlDumper) => void | string): Promise<void> {
|
||||
if (this.options.generateSqlScript) {
|
||||
cmd(this.dumper);
|
||||
} else {
|
||||
await runCommandOnDriver(this.pool, this.driver, cmd);
|
||||
}
|
||||
}
|
||||
|
||||
async runDumperQuery(cmd: (dmp: SqlDumper) => void | string): Promise<QueryResult> {
|
||||
if (this.options.generateSqlScript) {
|
||||
cmd(this.dumper);
|
||||
return {
|
||||
rows: [],
|
||||
};
|
||||
} else {
|
||||
return await runQueryOnDriver(this.pool, this.driver, cmd);
|
||||
}
|
||||
}
|
||||
|
||||
async generateIdentityValue(column: string, table: NamedObjectInfo): Promise<number> {
|
||||
const tableKey = `${table.schemaName}.${table.pureName}`;
|
||||
if (!(tableKey in this.identityValues)) {
|
||||
const max = await runQueryOnDriver(this.pool, this.driver, dmp => {
|
||||
dmp.put('^select max(%i) as ~maxid ^from %f', column, table);
|
||||
});
|
||||
const maxId = Math.max(max.rows[0]['maxid'] ?? 0, 0) + 1;
|
||||
this.identityValues[tableKey] = maxId;
|
||||
return maxId;
|
||||
}
|
||||
|
||||
this.identityValues[tableKey] += 1;
|
||||
return this.identityValues[tableKey];
|
||||
}
|
||||
|
||||
async run() {
|
||||
this.createPlan();
|
||||
|
||||
await this.runDumperCommand(dmp => dmp.beginTransaction());
|
||||
try {
|
||||
for (const item of this.itemPlan) {
|
||||
const stats = await item.runImport();
|
||||
logger.info(
|
||||
`Replicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows, updated ${stats.updated} rows, deleted ${stats.deleted} rows`
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Failed replicator job, rollbacking. ${err.message}`);
|
||||
await this.runDumperCommand(dmp => dmp.rollbackTransaction());
|
||||
return;
|
||||
}
|
||||
if (this.options.rollbackAfterFinish) {
|
||||
logger.info('Rollbacking transaction, nothing was changed');
|
||||
await this.runDumperCommand(dmp => dmp.rollbackTransaction());
|
||||
} else {
|
||||
logger.info('Committing replicator transaction');
|
||||
await this.runDumperCommand(dmp => dmp.commitTransaction());
|
||||
}
|
||||
|
||||
this.result = this.dumper.s;
|
||||
}
|
||||
}
|
||||
@@ -18,7 +18,7 @@ export * from './processPerspectiveDefaultColunns';
|
||||
export * from './PerspectiveDataPattern';
|
||||
export * from './PerspectiveDataLoader';
|
||||
export * from './perspectiveTools';
|
||||
export * from './DataDuplicator';
|
||||
export * from './DataReplicator';
|
||||
export * from './FreeTableGridDisplay';
|
||||
export * from './FreeTableModel';
|
||||
export * from './CustomGridDisplay';
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { arrayToHexString, isTypeDateTime } from 'dbgate-tools';
|
||||
import { arrayToHexString, evalFilterBehaviour, isTypeDateTime } from 'dbgate-tools';
|
||||
import { format, toDate } from 'date-fns';
|
||||
import _isString from 'lodash/isString';
|
||||
import _cloneDeepWith from 'lodash/cloneDeepWith';
|
||||
import { Condition, Expression } from 'dbgate-sqltree';
|
||||
import { parseFilter } from './parseFilter';
|
||||
|
||||
export type FilterMultipleValuesMode = 'is' | 'is_not' | 'contains' | 'begins' | 'ends';
|
||||
|
||||
@@ -61,3 +64,29 @@ export function createMultiLineFilter(mode: FilterMultipleValuesMode, text: stri
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
export function compileCompoudEvalCondition(filters: { [column: string]: string }): Condition {
|
||||
if (!filters) return null;
|
||||
const conditions = [];
|
||||
for (const name in filters) {
|
||||
try {
|
||||
const condition = parseFilter(filters[name], evalFilterBehaviour);
|
||||
const replaced = _cloneDeepWith(condition, (expr: Expression) => {
|
||||
if (expr.exprType == 'placeholder')
|
||||
return {
|
||||
exprType: 'column',
|
||||
columnName: name,
|
||||
};
|
||||
});
|
||||
conditions.push(replaced);
|
||||
} catch (err) {
|
||||
// filter parse error - ignore filter
|
||||
}
|
||||
}
|
||||
|
||||
if (conditions.length == 0) return null;
|
||||
return {
|
||||
conditionType: 'and',
|
||||
conditions,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { EngineDriver, SqlDumper } from 'dbgate-types';
|
||||
import { Command, Condition } from './types';
|
||||
import { Command, Condition, Select, Source } from './types';
|
||||
import { dumpSqlCommand } from './dumpSqlCommand';
|
||||
|
||||
export function treeToSql<T>(driver: EngineDriver, object: T, func: (dmp: SqlDumper, obj: T) => void) {
|
||||
@@ -43,3 +43,43 @@ export function mergeConditions(condition1: Condition, condition2: Condition): C
|
||||
conditions: [condition1, condition2],
|
||||
};
|
||||
}
|
||||
|
||||
export function selectKeysFromTable(options: {
|
||||
pureName: string;
|
||||
schemaName: string;
|
||||
keyColumns: [];
|
||||
loadKeys: any[][];
|
||||
}): Select {
|
||||
const source: Source = {
|
||||
name: { pureName: options.pureName, schemaName: options.schemaName },
|
||||
};
|
||||
const res: Select = {
|
||||
commandType: 'select',
|
||||
columns: options.keyColumns.map(col => ({
|
||||
exprType: 'column',
|
||||
columnName: col,
|
||||
source,
|
||||
})),
|
||||
from: source,
|
||||
where: {
|
||||
conditionType: 'or',
|
||||
conditions: options.loadKeys.map(key => ({
|
||||
conditionType: 'and',
|
||||
conditions: key.map((keyValue, index) => ({
|
||||
conditionType: 'binary',
|
||||
operator: '=',
|
||||
left: {
|
||||
exprType: 'column',
|
||||
columnName: options.keyColumns[index],
|
||||
source,
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value: keyValue,
|
||||
},
|
||||
})),
|
||||
})),
|
||||
},
|
||||
};
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -54,8 +54,8 @@ export class ScriptWriter {
|
||||
this._put(`await dbgateApi.importDatabase(${JSON.stringify(options)});`);
|
||||
}
|
||||
|
||||
dataDuplicator(options) {
|
||||
this._put(`await dbgateApi.dataDuplicator(${JSON.stringify(options, null, 2)});`);
|
||||
dataReplicator(options) {
|
||||
this._put(`await dbgateApi.dataReplicator(${JSON.stringify(options, null, 2)});`);
|
||||
}
|
||||
|
||||
comment(s) {
|
||||
@@ -72,6 +72,10 @@ export class ScriptWriter {
|
||||
|
||||
return prefix + this.s;
|
||||
}
|
||||
|
||||
zipDirectory(inputDirectory, outputFile) {
|
||||
this._put(`await dbgateApi.zipDirectory('${inputDirectory}', '${outputFile}');`);
|
||||
}
|
||||
}
|
||||
|
||||
export class ScriptWriterJson {
|
||||
@@ -138,13 +142,21 @@ export class ScriptWriterJson {
|
||||
});
|
||||
}
|
||||
|
||||
dataDuplicator(options) {
|
||||
dataReplicator(options) {
|
||||
this.commands.push({
|
||||
type: 'dataDuplicator',
|
||||
type: 'dataReplicator',
|
||||
options,
|
||||
});
|
||||
}
|
||||
|
||||
zipDirectory(inputDirectory, outputFile) {
|
||||
this.commands.push({
|
||||
type: 'zipDirectory',
|
||||
inputDirectory,
|
||||
outputFile,
|
||||
});
|
||||
}
|
||||
|
||||
getScript(schedule = null) {
|
||||
return {
|
||||
type: 'json',
|
||||
@@ -185,8 +197,11 @@ export function jsonScriptToJavascript(json) {
|
||||
case 'importDatabase':
|
||||
script.importDatabase(cmd.options);
|
||||
break;
|
||||
case 'dataDuplicator':
|
||||
script.dataDuplicator(cmd.options);
|
||||
case 'dataReplicator':
|
||||
script.dataReplicator(cmd.options);
|
||||
break;
|
||||
case 'zipDirectory':
|
||||
script.zipDirectory(cmd.inputDirectory, cmd.outputFile);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ export function isTypeNumeric(dataType) {
|
||||
}
|
||||
|
||||
export function isTypeFloat(dataType) {
|
||||
return dataType && /float|single|double/i.test(dataType);
|
||||
return dataType && /float|single|double|number/i.test(dataType);
|
||||
}
|
||||
|
||||
export function isTypeNumber(dataType) {
|
||||
|
||||
@@ -100,7 +100,9 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
|
||||
dmp.putRaw(';');
|
||||
// require('fs').writeFileSync('/home/jena/test.sql', dmp.s);
|
||||
// console.log(dmp.s);
|
||||
await driver.query(dbhan, dmp.s, { discardResult: true });
|
||||
if (rows.length > 0) {
|
||||
await driver.query(dbhan, dmp.s, { discardResult: true });
|
||||
}
|
||||
writable.rowsReporter.add(rows.length);
|
||||
} else {
|
||||
for (const row of rows) {
|
||||
|
||||
@@ -549,3 +549,20 @@ export function pinoLogRecordToMessageRecord(logRecord, defaultSeverity = 'info'
|
||||
severity: levelToSeverity[level] ?? defaultSeverity,
|
||||
};
|
||||
}
|
||||
|
||||
export function jsonLinesStringify(jsonArray: any[]): string {
|
||||
return jsonArray.map(json => JSON.stringify(json)).join('\n');
|
||||
}
|
||||
export function jsonLinesParse(jsonLines: string): any[] {
|
||||
return jsonLines
|
||||
.split('\n')
|
||||
.filter(x => x.trim())
|
||||
.map(line => {
|
||||
try {
|
||||
return JSON.parse(line);
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.filter(x => x);
|
||||
}
|
||||
|
||||
2
packages/types/test-engines.d.ts
vendored
2
packages/types/test-engines.d.ts
vendored
@@ -31,7 +31,7 @@ export type TestEngineInfo = {
|
||||
skipUnique?: boolean;
|
||||
skipAutoIncrement?: boolean;
|
||||
skipPkColumnTesting?: boolean;
|
||||
skipDataDuplicator?: boolean;
|
||||
skipDataReplicator?: boolean;
|
||||
skipDeploy?: boolean;
|
||||
skipStringLength?: boolean;
|
||||
skipChangeColumn?: boolean;
|
||||
|
||||
@@ -157,6 +157,7 @@
|
||||
}
|
||||
|
||||
.snackbar-container {
|
||||
z-index: 1000;
|
||||
position: fixed;
|
||||
right: 0;
|
||||
bottom: var(--dim-statusbar-height);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<script lang="ts" context="module">
|
||||
function openArchive(fileName, folderName) {
|
||||
async function openArchive(fileName, folderName) {
|
||||
openNewTab({
|
||||
title: fileName,
|
||||
icon: 'img archive',
|
||||
@@ -10,17 +10,21 @@
|
||||
archiveFolder: folderName,
|
||||
},
|
||||
});
|
||||
// }
|
||||
}
|
||||
|
||||
async function openTextFile(fileName, fileType, folderName, tabComponent, icon) {
|
||||
const connProps: any = {};
|
||||
let tooltip = undefined;
|
||||
const isZipped = folderName.endsWith('.zip');
|
||||
|
||||
const resp = await apiCall('files/load', {
|
||||
folder: 'archive:' + folderName,
|
||||
file: fileName + '.' + fileType,
|
||||
format: 'text',
|
||||
});
|
||||
const resp = isZipped
|
||||
? await apiCall('files/download-text', { uri: `zip://archive:${folderName}//${fileName}.jsonl` })
|
||||
: await apiCall('files/load', {
|
||||
folder: 'archive:' + folderName,
|
||||
file: fileName + '.' + fileType,
|
||||
format: 'text',
|
||||
});
|
||||
|
||||
openNewTab(
|
||||
{
|
||||
@@ -58,7 +62,7 @@
|
||||
if (data.fileType == 'jsonl') {
|
||||
return 'img archive';
|
||||
}
|
||||
return ARCHIVE_ICONS[data.fileType];
|
||||
return ARCHIVE_ICONS[data.fileType] ?? 'img anyfile';
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -79,6 +83,7 @@
|
||||
import { openImportExportTab } from '../utility/importExportTools';
|
||||
|
||||
export let data;
|
||||
$: isZipped = data.folderName?.endsWith('.zip');
|
||||
|
||||
const handleRename = () => {
|
||||
showModal(InputTextModal, {
|
||||
@@ -112,6 +117,9 @@
|
||||
openArchive(data.fileName, data.folderName);
|
||||
};
|
||||
const handleClick = () => {
|
||||
if (!data.fileType) {
|
||||
return;
|
||||
}
|
||||
if (data.fileType == 'jsonl') {
|
||||
handleOpenArchive();
|
||||
}
|
||||
@@ -133,11 +141,15 @@
|
||||
};
|
||||
|
||||
function createMenu() {
|
||||
if (!data.fileType) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return [
|
||||
data.fileType == 'jsonl' && { text: 'Open', onClick: handleOpenArchive },
|
||||
data.fileType == 'jsonl' && { text: 'Open in text editor', onClick: handleOpenJsonLinesText },
|
||||
{ text: 'Delete', onClick: handleDelete },
|
||||
{ text: 'Rename', onClick: handleRename },
|
||||
!isZipped && { text: 'Delete', onClick: handleDelete },
|
||||
!isZipped && { text: 'Rename', onClick: handleRename },
|
||||
data.fileType == 'jsonl' &&
|
||||
createQuickExportMenu(
|
||||
fmt => async () => {
|
||||
@@ -174,29 +186,30 @@
|
||||
),
|
||||
data.fileType.endsWith('.sql') && { text: 'Open SQL', onClick: handleOpenSqlFile },
|
||||
data.fileType.endsWith('.yaml') && { text: 'Open YAML', onClick: handleOpenYamlFile },
|
||||
data.fileType == 'jsonl' && {
|
||||
text: 'Open in profiler',
|
||||
submenu: getExtensions()
|
||||
.drivers.filter(eng => eng.profilerFormatterFunction)
|
||||
.map(eng => ({
|
||||
text: eng.title,
|
||||
onClick: () => {
|
||||
openNewTab({
|
||||
title: 'Profiler',
|
||||
icon: 'img profiler',
|
||||
tabComponent: 'ProfilerTab',
|
||||
props: {
|
||||
jslidLoad: `archive://${data.folderName}/${data.fileName}`,
|
||||
engine: eng.engine,
|
||||
// profilerFormatterFunction: eng.profilerFormatterFunction,
|
||||
// profilerTimestampFunction: eng.profilerTimestampFunction,
|
||||
// profilerChartAggregateFunction: eng.profilerChartAggregateFunction,
|
||||
// profilerChartMeasures: eng.profilerChartMeasures,
|
||||
},
|
||||
});
|
||||
},
|
||||
})),
|
||||
},
|
||||
!isZipped &&
|
||||
data.fileType == 'jsonl' && {
|
||||
text: 'Open in profiler',
|
||||
submenu: getExtensions()
|
||||
.drivers.filter(eng => eng.profilerFormatterFunction)
|
||||
.map(eng => ({
|
||||
text: eng.title,
|
||||
onClick: () => {
|
||||
openNewTab({
|
||||
title: 'Profiler',
|
||||
icon: 'img profiler',
|
||||
tabComponent: 'ProfilerTab',
|
||||
props: {
|
||||
jslidLoad: `archive://${data.folderName}/${data.fileName}`,
|
||||
engine: eng.engine,
|
||||
// profilerFormatterFunction: eng.profilerFormatterFunction,
|
||||
// profilerTimestampFunction: eng.profilerTimestampFunction,
|
||||
// profilerChartAggregateFunction: eng.profilerChartAggregateFunction,
|
||||
// profilerChartMeasures: eng.profilerChartMeasures,
|
||||
},
|
||||
});
|
||||
},
|
||||
})),
|
||||
},
|
||||
];
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
import hasPermission from '../utility/hasPermission';
|
||||
import { isProApp } from '../utility/proTools';
|
||||
import { extractShellConnection } from '../impexp/createImpExpScript';
|
||||
import { saveFileToDisk } from '../utility/exportFileTools';
|
||||
|
||||
export let data;
|
||||
|
||||
@@ -100,7 +101,7 @@ await dbgateApi.deployDb(${JSON.stringify(
|
||||
props: {
|
||||
conid: $currentDatabase?.connection?._id,
|
||||
database: $currentDatabase?.name,
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
editor: {
|
||||
@@ -113,12 +114,12 @@ await dbgateApi.deployDb(${JSON.stringify(
|
||||
);
|
||||
};
|
||||
|
||||
const handleOpenDuplicatorTab = () => {
|
||||
const handleOpenDataDeployTab = () => {
|
||||
openNewTab(
|
||||
{
|
||||
title: data.name,
|
||||
icon: 'img duplicator',
|
||||
tabComponent: 'DataDuplicatorTab',
|
||||
icon: 'img data-deploy',
|
||||
tabComponent: 'DataDeployTab',
|
||||
props: {
|
||||
conid: $currentDatabase?.connection?._id,
|
||||
database: $currentDatabase?.name,
|
||||
@@ -127,21 +128,56 @@ await dbgateApi.deployDb(${JSON.stringify(
|
||||
{
|
||||
editor: {
|
||||
archiveFolder: data.name,
|
||||
conid: $currentDatabase?.connection?._id,
|
||||
database: $currentDatabase?.name,
|
||||
},
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
const handleZipUnzip = async method => {
|
||||
await apiCall(method, {
|
||||
folder: data.name,
|
||||
});
|
||||
};
|
||||
|
||||
const handleDownloadZip = async () => {
|
||||
saveFileToDisk(
|
||||
async filePath => {
|
||||
const zipped = await apiCall('archive/get-zipped-path', {
|
||||
folder: data.name,
|
||||
});
|
||||
await apiCall('files/simple-copy', {
|
||||
sourceFilePath: zipped.filePath,
|
||||
targetFilePath: filePath,
|
||||
});
|
||||
},
|
||||
{
|
||||
formatLabel: 'ZIP files',
|
||||
formatExtension: 'zip',
|
||||
defaultFileName: data.name?.endsWith('.zip') ? data.name : data.name + '.zip',
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
function createMenu() {
|
||||
return [
|
||||
data.name != 'default' && { text: 'Delete', onClick: handleDelete },
|
||||
data.name != 'default' && { text: 'Rename', onClick: handleRename },
|
||||
data.name != 'default' &&
|
||||
$currentDatabase && [
|
||||
{ text: 'Data duplicator', onClick: handleOpenDuplicatorTab },
|
||||
isProApp() && { text: 'Data deployer', onClick: handleOpenDataDeployTab },
|
||||
{ text: 'Generate deploy DB SQL', onClick: handleGenerateDeploySql },
|
||||
{ text: 'Shell: Deploy DB', onClick: handleGenerateDeployScript },
|
||||
],
|
||||
data.name != 'default' &&
|
||||
isProApp() &&
|
||||
data.name.endsWith('.zip') && { text: 'Unpack ZIP', onClick: () => handleZipUnzip('archive/unzip') },
|
||||
data.name != 'default' &&
|
||||
isProApp() &&
|
||||
!data.name.endsWith('.zip') && { text: 'Pack (create ZIP)', onClick: () => handleZipUnzip('archive/zip') },
|
||||
|
||||
isProApp() && { text: 'Download ZIP', onClick: handleDownloadZip },
|
||||
|
||||
data.name != 'default' &&
|
||||
hasPermission('dbops/model/compare') &&
|
||||
@@ -158,7 +194,7 @@ await dbgateApi.deployDb(${JSON.stringify(
|
||||
{...$$restProps}
|
||||
{data}
|
||||
title={data.name.endsWith('.link') ? data.name.slice(0, -5) : data.name}
|
||||
icon={data.name.endsWith('.link') ? 'img link' : 'img archive-folder'}
|
||||
icon={data.name.endsWith('.link') ? 'img link' : data.name.endsWith('.zip') ? 'img zipfile' : 'img archive-folder'}
|
||||
isBold={data.name == $currentArchive}
|
||||
on:click={() => ($currentArchive = data.name)}
|
||||
menu={createMenu}
|
||||
|
||||
@@ -330,15 +330,15 @@ await dbgateApi.dropAllDbObjects(${JSON.stringify(
|
||||
});
|
||||
};
|
||||
|
||||
const handleImportWithDbDuplicator = () => {
|
||||
const handleShowDataDeployer = () => {
|
||||
showModal(ChooseArchiveFolderModal, {
|
||||
message: 'Choose archive folder for import from',
|
||||
message: 'Choose archive folder for data deployer',
|
||||
onConfirm: archiveFolder => {
|
||||
openNewTab(
|
||||
{
|
||||
title: archiveFolder,
|
||||
icon: 'img duplicator',
|
||||
tabComponent: 'DataDuplicatorTab',
|
||||
icon: 'img replicator',
|
||||
tabComponent: 'DataDeployerTab',
|
||||
props: {
|
||||
conid: connection?._id,
|
||||
database: name,
|
||||
@@ -439,8 +439,8 @@ await dbgateApi.dropAllDbObjects(${JSON.stringify(
|
||||
|
||||
driver?.databaseEngineTypes?.includes('sql') &&
|
||||
hasPermission(`dbops/import`) && {
|
||||
onClick: handleImportWithDbDuplicator,
|
||||
text: 'Import with DB duplicator',
|
||||
onClick: handleShowDataDeployer,
|
||||
text: 'Data deployer',
|
||||
},
|
||||
|
||||
{ divider: true },
|
||||
|
||||
@@ -7,6 +7,8 @@
|
||||
tabComponent: string;
|
||||
folder: string;
|
||||
currentConnection: boolean;
|
||||
extension: string;
|
||||
label: string;
|
||||
}
|
||||
|
||||
const sql: FileTypeHandler = {
|
||||
@@ -15,6 +17,8 @@
|
||||
tabComponent: 'QueryTab',
|
||||
folder: 'sql',
|
||||
currentConnection: true,
|
||||
extension: 'sql',
|
||||
label: 'SQL file',
|
||||
};
|
||||
|
||||
const shell: FileTypeHandler = {
|
||||
@@ -23,6 +27,8 @@
|
||||
tabComponent: 'ShellTab',
|
||||
folder: 'shell',
|
||||
currentConnection: false,
|
||||
extension: 'js',
|
||||
label: 'JavaScript Shell script',
|
||||
};
|
||||
|
||||
const markdown: FileTypeHandler = {
|
||||
@@ -31,6 +37,8 @@
|
||||
tabComponent: 'MarkdownEditorTab',
|
||||
folder: 'markdown',
|
||||
currentConnection: false,
|
||||
extension: 'md',
|
||||
label: 'Markdown file',
|
||||
};
|
||||
|
||||
const charts: FileTypeHandler = {
|
||||
@@ -39,6 +47,8 @@
|
||||
tabComponent: 'ChartTab',
|
||||
folder: 'charts',
|
||||
currentConnection: true,
|
||||
extension: 'json',
|
||||
label: 'Chart file',
|
||||
};
|
||||
|
||||
const query: FileTypeHandler = {
|
||||
@@ -47,6 +57,8 @@
|
||||
tabComponent: 'QueryDesignTab',
|
||||
folder: 'query',
|
||||
currentConnection: true,
|
||||
extension: 'json',
|
||||
label: 'Query design file',
|
||||
};
|
||||
|
||||
const sqlite: FileTypeHandler = {
|
||||
@@ -55,6 +67,8 @@
|
||||
tabComponent: null,
|
||||
folder: 'sqlite',
|
||||
currentConnection: true,
|
||||
extension: 'sqlite',
|
||||
label: 'SQLite database',
|
||||
};
|
||||
|
||||
const diagrams: FileTypeHandler = {
|
||||
@@ -63,22 +77,52 @@
|
||||
tabComponent: 'DiagramTab',
|
||||
folder: 'diagrams',
|
||||
currentConnection: true,
|
||||
extension: 'json',
|
||||
label: 'Diagram file',
|
||||
};
|
||||
|
||||
const jobs: FileTypeHandler = {
|
||||
const impexp: FileTypeHandler = {
|
||||
icon: 'img export',
|
||||
format: 'json',
|
||||
tabComponent: 'ImportExportTab',
|
||||
folder: 'jobs',
|
||||
folder: 'impexp',
|
||||
currentConnection: false,
|
||||
extension: 'json',
|
||||
label: 'Import/Export file',
|
||||
};
|
||||
|
||||
const datadeploy: FileTypeHandler = isProApp()
|
||||
? {
|
||||
icon: 'img data-deploy',
|
||||
format: 'json',
|
||||
tabComponent: 'DataDeployTab',
|
||||
folder: 'datadeploy',
|
||||
currentConnection: false,
|
||||
extension: 'json',
|
||||
label: 'Data deploy file',
|
||||
}
|
||||
: undefined;
|
||||
|
||||
const dbcompare: FileTypeHandler = isProApp()
|
||||
? {
|
||||
icon: 'img compare',
|
||||
format: 'json',
|
||||
tabComponent: 'CompareModelTab',
|
||||
folder: 'dbcompare',
|
||||
currentConnection: false,
|
||||
extension: 'json',
|
||||
label: 'Database compare file',
|
||||
}
|
||||
: undefined;
|
||||
|
||||
const perspectives: FileTypeHandler = {
|
||||
icon: 'img perspective',
|
||||
format: 'json',
|
||||
tabComponent: 'PerspectiveTab',
|
||||
folder: 'pesrpectives',
|
||||
currentConnection: true,
|
||||
extension: 'json',
|
||||
label: 'Perspective file',
|
||||
};
|
||||
|
||||
const modtrans: FileTypeHandler = {
|
||||
@@ -87,6 +131,8 @@
|
||||
tabComponent: 'ModelTransformTab',
|
||||
folder: 'modtrans',
|
||||
currentConnection: false,
|
||||
extension: 'json',
|
||||
label: 'Model transform file',
|
||||
};
|
||||
|
||||
export const SAVED_FILE_HANDLERS = {
|
||||
@@ -98,8 +144,10 @@
|
||||
sqlite,
|
||||
diagrams,
|
||||
perspectives,
|
||||
jobs,
|
||||
impexp,
|
||||
modtrans,
|
||||
datadeploy,
|
||||
dbcompare,
|
||||
};
|
||||
|
||||
export const extractKey = data => data.file;
|
||||
@@ -122,6 +170,8 @@
|
||||
import openNewTab from '../utility/openNewTab';
|
||||
|
||||
import AppObjectCore from './AppObjectCore.svelte';
|
||||
import { isProApp } from '../utility/proTools';
|
||||
import { saveFileToDisk } from '../utility/exportFileTools';
|
||||
|
||||
export let data;
|
||||
|
||||
@@ -148,6 +198,7 @@
|
||||
hasPermission(`files/${data.folder}/write`) && { text: 'Create copy', onClick: handleCopy },
|
||||
hasPermission(`files/${data.folder}/write`) && { text: 'Delete', onClick: handleDelete },
|
||||
folder == 'markdown' && { text: 'Show page', onClick: showMarkdownPage },
|
||||
{ text: 'Download', onClick: handleDownload },
|
||||
];
|
||||
}
|
||||
|
||||
@@ -182,6 +233,19 @@
|
||||
});
|
||||
};
|
||||
|
||||
const handleDownload = () => {
|
||||
saveFileToDisk(
|
||||
async filePath => {
|
||||
await apiCall('files/export-file', {
|
||||
folder,
|
||||
file: data.file,
|
||||
filePath,
|
||||
});
|
||||
},
|
||||
{ formatLabel: handler.label, formatExtension: handler.format, defaultFileName: data.file }
|
||||
);
|
||||
};
|
||||
|
||||
async function openTab() {
|
||||
const resp = await apiCall('files/load', { folder, file: data.file, format: handler.format });
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
border-radius: 2px;
|
||||
position: relative;
|
||||
top: 3px;
|
||||
font-size: 10pt;
|
||||
}
|
||||
|
||||
label:hover:not(.disabled) {
|
||||
|
||||
61
packages/web/src/buttons/InlineUploadButton.svelte
Normal file
61
packages/web/src/buttons/InlineUploadButton.svelte
Normal file
@@ -0,0 +1,61 @@
|
||||
<script lang="ts">
|
||||
import _ from 'lodash';
|
||||
import InlineButton from '../buttons/InlineButton.svelte';
|
||||
import FontIcon from '../icons/FontIcon.svelte';
|
||||
import getElectron from '../utility/getElectron';
|
||||
import InlineButtonLabel from '../buttons/InlineButtonLabel.svelte';
|
||||
import resolveApi, { resolveApiHeaders } from '../utility/resolveApi';
|
||||
|
||||
import uuidv1 from 'uuid/v1';
|
||||
|
||||
export let filters;
|
||||
export let onProcessFile;
|
||||
export let icon = 'icon plus-thick';
|
||||
|
||||
const inputId = `uploadFileButton-${uuidv1()}`;
|
||||
|
||||
const electron = getElectron();
|
||||
|
||||
async function handleUploadedFile(e) {
|
||||
const files = [...e.target.files];
|
||||
|
||||
for (const file of files) {
|
||||
const formData = new FormData();
|
||||
formData.append('name', file.name);
|
||||
formData.append('data', file);
|
||||
|
||||
const fetchOptions = {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
headers: resolveApiHeaders(),
|
||||
};
|
||||
|
||||
const apiBase = resolveApi();
|
||||
const resp = await fetch(`${apiBase}/uploads/upload`, fetchOptions);
|
||||
const { filePath, originalName } = await resp.json();
|
||||
await onProcessFile(filePath, originalName);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleOpenElectronFile() {
|
||||
const filePaths = await electron.showOpenDialog({
|
||||
filters,
|
||||
properties: ['showHiddenFiles', 'openFile'],
|
||||
});
|
||||
const filePath = filePaths && filePaths[0];
|
||||
if (!filePath) return;
|
||||
onProcessFile(filePath, filePath.split(/[\/\\]/).pop());
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if electron}
|
||||
<InlineButton on:click={handleOpenElectronFile} title="Open file" data-testid={$$props['data-testid']}>
|
||||
<FontIcon {icon} />
|
||||
</InlineButton>
|
||||
{:else}
|
||||
<InlineButtonLabel on:click={() => {}} title="Upload file" data-testid={$$props['data-testid']} htmlFor={inputId}>
|
||||
<FontIcon {icon} />
|
||||
</InlineButtonLabel>
|
||||
{/if}
|
||||
|
||||
<input type="file" id={inputId} hidden on:change={handleUploadedFile} />
|
||||
@@ -13,7 +13,7 @@
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="button" on:click={handleClick} class:disabled class:fillHorizontal>
|
||||
<div class="button" on:click={handleClick} class:disabled class:fillHorizontal data-testid={$$props['data-testid']}>
|
||||
<div class="icon">
|
||||
<FontIcon {icon} />
|
||||
</div>
|
||||
|
||||
@@ -47,6 +47,7 @@ import newTable from '../tableeditor/newTable';
|
||||
import { isProApp } from '../utility/proTools';
|
||||
import { openWebLink } from '../utility/simpleTools';
|
||||
import { _t } from '../translations';
|
||||
import ExportImportConnectionsModal from '../modals/ExportImportConnectionsModal.svelte';
|
||||
|
||||
// function themeCommand(theme: ThemeDefinition) {
|
||||
// return {
|
||||
@@ -530,6 +531,44 @@ registerCommand({
|
||||
},
|
||||
});
|
||||
|
||||
registerCommand({
|
||||
id: 'app.exportConnections',
|
||||
category: 'Settings',
|
||||
name: 'Export connections',
|
||||
testEnabled: () => getElectron() != null,
|
||||
onClick: () => {
|
||||
showModal(ExportImportConnectionsModal, {
|
||||
mode: 'export',
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
registerCommand({
|
||||
id: 'app.importConnections',
|
||||
category: 'Settings',
|
||||
name: 'Import connections',
|
||||
testEnabled: () => getElectron() != null,
|
||||
onClick: async () => {
|
||||
const files = await electron.showOpenDialog({
|
||||
properties: ['showHiddenFiles', 'openFile'],
|
||||
filters: [
|
||||
{
|
||||
name: `All supported files`,
|
||||
extensions: ['zip'],
|
||||
},
|
||||
{ name: `ZIP files`, extensions: ['zip'] },
|
||||
],
|
||||
});
|
||||
|
||||
if (files?.length > 0) {
|
||||
showModal(ExportImportConnectionsModal, {
|
||||
mode: 'import',
|
||||
uploadedFilePath: files[0],
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
registerCommand({
|
||||
id: 'file.import',
|
||||
category: 'File',
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
export let value;
|
||||
export let jsonParsedValue = undefined;
|
||||
export let editorTypes;
|
||||
export let rightMargin = false;
|
||||
|
||||
$: stringified = stringifyCellValue(
|
||||
value,
|
||||
@@ -20,7 +21,7 @@
|
||||
{#if rowData == null}
|
||||
<span class="null">(No row)</span>
|
||||
{:else}
|
||||
<span class={stringified.gridStyle} title={stringified.gridTitle}>{stringified.value}</span>
|
||||
<span class={stringified.gridStyle} title={stringified.gridTitle} class:rightMargin>{stringified.value}</span>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
@@ -31,4 +32,8 @@
|
||||
.valueCellStyle {
|
||||
color: var(--theme-icon-green);
|
||||
}
|
||||
|
||||
.rightMargin {
|
||||
margin-right: 16px;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
export let isModifiedCell = false;
|
||||
export let isInserted = false;
|
||||
export let isDeleted = false;
|
||||
export let isMissing = false;
|
||||
export let isAutofillSelected = false;
|
||||
export let isFocusedColumn = false;
|
||||
export let domCell = undefined;
|
||||
@@ -33,6 +34,9 @@
|
||||
export let onSetValue;
|
||||
export let editorTypes = null;
|
||||
export let isReadonly;
|
||||
export let hasOverlayValue = false;
|
||||
export let overlayValue = null;
|
||||
export let isMissingOverlayField = false;
|
||||
|
||||
$: value = col.isStructured ? _.get(rowData || {}, col.uniquePath) : (rowData || {})[col.uniqueName];
|
||||
|
||||
@@ -68,69 +72,88 @@
|
||||
class:isModifiedCell
|
||||
class:isInserted
|
||||
class:isDeleted
|
||||
class:isMissing
|
||||
class:isAutofillSelected
|
||||
class:isFocusedColumn
|
||||
class:hasOverlayValue
|
||||
class:isMissingOverlayField
|
||||
class:alignRight={_.isNumber(value) && !showHint}
|
||||
{style}
|
||||
>
|
||||
<CellValue {rowData} {value} {jsonParsedValue} {editorTypes} />
|
||||
|
||||
{#if showHint}
|
||||
<span class="hint"
|
||||
>{col.hintColumnNames.map(hintColumnName => rowData[hintColumnName]).join(col.hintColumnDelimiter || ' ')}</span
|
||||
>
|
||||
{/if}
|
||||
|
||||
{#if editorTypes?.explicitDataType}
|
||||
{#if value !== undefined}
|
||||
<ShowFormDropDownButton
|
||||
icon={detectTypeIcon(value)}
|
||||
menu={() => getConvertValueMenu(value, onSetValue, editorTypes)}
|
||||
/>
|
||||
{#if hasOverlayValue}
|
||||
<div class="flex1 flex">
|
||||
<div class="replacedValue overlayCell overlayCell1">
|
||||
<CellValue {rowData} {value} {jsonParsedValue} {editorTypes} />
|
||||
</div>
|
||||
<div class="overlayCell overlayCell2">
|
||||
<CellValue {rowData} value={overlayValue} {editorTypes} />
|
||||
</div>
|
||||
</div>
|
||||
{:else}
|
||||
<CellValue
|
||||
{rowData}
|
||||
{value}
|
||||
{jsonParsedValue}
|
||||
{editorTypes}
|
||||
rightMargin={_.isNumber(value) && !showHint && (editorTypes?.explicitDataType || col.foreignKey)}
|
||||
/>
|
||||
{#if showHint}
|
||||
<span class="hint"
|
||||
>{col.hintColumnNames.map(hintColumnName => rowData[hintColumnName]).join(col.hintColumnDelimiter || ' ')}</span
|
||||
>
|
||||
{/if}
|
||||
{#if _.isPlainObject(value)}
|
||||
<ShowFormButton secondary icon="icon open-in-new" on:click={() => openJsonDocument(value, undefined, true)} />
|
||||
{/if}
|
||||
{#if _.isArray(value)}
|
||||
|
||||
{#if editorTypes?.explicitDataType}
|
||||
{#if value !== undefined}
|
||||
<ShowFormDropDownButton
|
||||
icon={detectTypeIcon(value)}
|
||||
menu={() => getConvertValueMenu(value, onSetValue, editorTypes)}
|
||||
/>
|
||||
{/if}
|
||||
{#if _.isPlainObject(value)}
|
||||
<ShowFormButton secondary icon="icon open-in-new" on:click={() => openJsonDocument(value, undefined, true)} />
|
||||
{/if}
|
||||
{#if _.isArray(value)}
|
||||
<ShowFormButton
|
||||
secondary
|
||||
icon="icon open-in-new"
|
||||
on:click={() => {
|
||||
if (_.every(value, x => _.isPlainObject(x))) {
|
||||
openJsonLinesData(value);
|
||||
} else {
|
||||
openJsonDocument(value, undefined, true);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
{:else if col.foreignKey && rowData && rowData[col.uniqueName] && !isCurrentCell}
|
||||
<ShowFormButton on:click={() => onSetFormView(rowData, col)} />
|
||||
{:else if col.foreignKey && isCurrentCell && onDictionaryLookup && !isReadonly}
|
||||
<ShowFormButton icon="icon dots-horizontal" on:click={onDictionaryLookup} />
|
||||
{:else if isJson}
|
||||
<ShowFormButton icon="icon open-in-new" on:click={() => openJsonDocument(value, undefined, true)} />
|
||||
{:else if jsonParsedValue && _.isPlainObject(jsonParsedValue)}
|
||||
<ShowFormButton icon="icon open-in-new" on:click={() => openJsonDocument(jsonParsedValue, undefined, true)} />
|
||||
{:else if _.isArray(jsonParsedValue || value)}
|
||||
<ShowFormButton
|
||||
secondary
|
||||
icon="icon open-in-new"
|
||||
on:click={() => {
|
||||
if (_.every(value, x => _.isPlainObject(x))) {
|
||||
openJsonLinesData(value);
|
||||
if (_.every(jsonParsedValue || value, x => _.isPlainObject(x))) {
|
||||
openJsonLinesData(jsonParsedValue || value);
|
||||
} else {
|
||||
openJsonDocument(value, undefined, true);
|
||||
openJsonDocument(jsonParsedValue || value, undefined, true);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
{:else if col.foreignKey && rowData && rowData[col.uniqueName] && !isCurrentCell}
|
||||
<ShowFormButton on:click={() => onSetFormView(rowData, col)} />
|
||||
{:else if col.foreignKey && isCurrentCell && onDictionaryLookup && !isReadonly}
|
||||
<ShowFormButton icon="icon dots-horizontal" on:click={onDictionaryLookup} />
|
||||
{:else if isJson}
|
||||
<ShowFormButton icon="icon open-in-new" on:click={() => openJsonDocument(value, undefined, true)} />
|
||||
{:else if jsonParsedValue && _.isPlainObject(jsonParsedValue)}
|
||||
<ShowFormButton icon="icon open-in-new" on:click={() => openJsonDocument(jsonParsedValue, undefined, true)} />
|
||||
{:else if _.isArray(jsonParsedValue || value)}
|
||||
<ShowFormButton
|
||||
icon="icon open-in-new"
|
||||
on:click={() => {
|
||||
if (_.every(jsonParsedValue || value, x => _.isPlainObject(x))) {
|
||||
openJsonLinesData(jsonParsedValue || value);
|
||||
} else {
|
||||
openJsonDocument(jsonParsedValue || value, undefined, true);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
|
||||
{#if isAutoFillMarker}
|
||||
<div class="autoFillMarker autofillHandleMarker" />
|
||||
{/if}
|
||||
{#if isAutoFillMarker}
|
||||
<div class="autoFillMarker autofillHandleMarker" />
|
||||
{/if}
|
||||
|
||||
{#if showSlot}
|
||||
<slot />
|
||||
{#if showSlot}
|
||||
<slot />
|
||||
{/if}
|
||||
{/if}
|
||||
</td>
|
||||
|
||||
@@ -175,6 +198,9 @@
|
||||
td.isDeleted {
|
||||
background: var(--theme-bg-volcano);
|
||||
}
|
||||
td.isMissing {
|
||||
background: var(--theme-bg-volcano);
|
||||
}
|
||||
td.isSelected {
|
||||
background: var(--theme-bg-3);
|
||||
}
|
||||
@@ -182,9 +208,9 @@
|
||||
background: var(--theme-bg-selected);
|
||||
}
|
||||
td.isDeleted {
|
||||
background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAEElEQVQImWNgIAX8x4KJBAD+agT8INXz9wAAAABJRU5ErkJggg==');
|
||||
background-repeat: repeat-x;
|
||||
background-position: 50% 50%;
|
||||
background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAEElEQVQImWNgIAX8x4KJBAD+agT8INXz9wAAAABJRU5ErkJggg==') !important;
|
||||
background-repeat: repeat-x !important;
|
||||
background-position: 50% 50% !important;
|
||||
}
|
||||
|
||||
.hint {
|
||||
@@ -207,4 +233,31 @@
|
||||
color: var(--theme-icon-green);
|
||||
text-align: var(--data-grid-numbers-align);
|
||||
}
|
||||
|
||||
.hasOverlayValue .overlayCell {
|
||||
width: 50%;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.hasOverlayValue .overlayCell1 {
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
.hasOverlayValue .overlayCell2 {
|
||||
margin-left: 5px;
|
||||
}
|
||||
|
||||
.replacedValue {
|
||||
background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAEElEQVQImWNgIAX8x4KJBAD+agT8INXz9wAAAABJRU5ErkJggg==');
|
||||
background-repeat: repeat-x;
|
||||
background-position: 50% 50%;
|
||||
}
|
||||
|
||||
td.isMissingOverlayField {
|
||||
background: var(--theme-bg-orange);
|
||||
|
||||
background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAEElEQVQImWNgIAX8x4KJBAD+agT8INXz9wAAAABJRU5ErkJggg==');
|
||||
background-repeat: repeat-x;
|
||||
background-position: 50% 50%;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -282,48 +282,59 @@
|
||||
testEnabled: () => getCurrentDataGrid()?.editCellValueEnabled(),
|
||||
onClick: () => getCurrentDataGrid().editCellValue(),
|
||||
});
|
||||
registerCommand({
|
||||
id: 'dataGrid.mergeSelectedCellsIntoMirror',
|
||||
category: 'Data grid',
|
||||
name: 'Merge selected cells',
|
||||
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
|
||||
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'merge', fullRows: false }),
|
||||
});
|
||||
registerCommand({
|
||||
id: 'dataGrid.mergeSelectedRowsIntoMirror',
|
||||
category: 'Data grid',
|
||||
name: 'Merge selected rows',
|
||||
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
|
||||
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'merge', fullRows: true }),
|
||||
});
|
||||
registerCommand({
|
||||
id: 'dataGrid.appendSelectedCellsIntoMirror',
|
||||
category: 'Data grid',
|
||||
name: 'Append selected cells',
|
||||
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
|
||||
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'append', fullRows: false }),
|
||||
});
|
||||
registerCommand({
|
||||
id: 'dataGrid.appendSelectedRowsIntoMirror',
|
||||
category: 'Data grid',
|
||||
name: 'Append selected rows',
|
||||
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
|
||||
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'append', fullRows: true }),
|
||||
});
|
||||
registerCommand({
|
||||
id: 'dataGrid.replaceSelectedCellsIntoMirror',
|
||||
category: 'Data grid',
|
||||
name: 'Replace with selected cells',
|
||||
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
|
||||
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'replace', fullRows: false }),
|
||||
});
|
||||
registerCommand({
|
||||
id: 'dataGrid.replaceSelectedRowsIntoMirror',
|
||||
category: 'Data grid',
|
||||
name: 'Replace with selected rows',
|
||||
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
|
||||
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'replace', fullRows: true }),
|
||||
});
|
||||
|
||||
if (isProApp()) {
|
||||
registerCommand({
|
||||
id: 'dataGrid.sendToDataDeploy',
|
||||
category: 'Data grid',
|
||||
name: 'Send to data deployer',
|
||||
testEnabled: () => getCurrentDataGrid()?.sendToDataDeployEnabled(),
|
||||
onClick: () => getCurrentDataGrid().sendToDataDeploy(),
|
||||
});
|
||||
}
|
||||
|
||||
// registerCommand({
|
||||
// id: 'dataGrid.mergeSelectedCellsIntoMirror',
|
||||
// category: 'Data grid',
|
||||
// name: 'Merge selected cells',
|
||||
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
|
||||
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'merge', fullRows: false }),
|
||||
// });
|
||||
// registerCommand({
|
||||
// id: 'dataGrid.mergeSelectedRowsIntoMirror',
|
||||
// category: 'Data grid',
|
||||
// name: 'Merge selected rows',
|
||||
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
|
||||
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'merge', fullRows: true }),
|
||||
// });
|
||||
// registerCommand({
|
||||
// id: 'dataGrid.appendSelectedCellsIntoMirror',
|
||||
// category: 'Data grid',
|
||||
// name: 'Append selected cells',
|
||||
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
|
||||
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'append', fullRows: false }),
|
||||
// });
|
||||
// registerCommand({
|
||||
// id: 'dataGrid.appendSelectedRowsIntoMirror',
|
||||
// category: 'Data grid',
|
||||
// name: 'Append selected rows',
|
||||
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
|
||||
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'append', fullRows: true }),
|
||||
// });
|
||||
// registerCommand({
|
||||
// id: 'dataGrid.replaceSelectedCellsIntoMirror',
|
||||
// category: 'Data grid',
|
||||
// name: 'Replace with selected cells',
|
||||
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
|
||||
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'replace', fullRows: false }),
|
||||
// });
|
||||
// registerCommand({
|
||||
// id: 'dataGrid.replaceSelectedRowsIntoMirror',
|
||||
// category: 'Data grid',
|
||||
// name: 'Replace with selected rows',
|
||||
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
|
||||
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'replace', fullRows: true }),
|
||||
// });
|
||||
|
||||
function getSelectedCellsInfo(selectedCells, grider, realColumnUniqueNames, selectedRowData) {
|
||||
if (selectedCells.length > 1 && selectedCells.every(x => _.isNumber(x[0]) && _.isNumber(x[1]))) {
|
||||
@@ -418,6 +429,8 @@
|
||||
import contextMenuActivator from '../utility/contextMenuActivator';
|
||||
import InputTextModal from '../modals/InputTextModal.svelte';
|
||||
import { _t } from '../translations';
|
||||
import { isProApp } from '../utility/proTools';
|
||||
import SaveArchiveModal from '../modals/SaveArchiveModal.svelte';
|
||||
|
||||
export let onLoadNextData = undefined;
|
||||
export let grider = undefined;
|
||||
@@ -454,6 +467,8 @@
|
||||
export let jslid;
|
||||
// export let generalAllowSave = false;
|
||||
export let hideGridLeftColumn = false;
|
||||
export let overlayDefinition = null;
|
||||
export let onGetSelectionMenu = null;
|
||||
|
||||
export const activator = createActivator('DataGridCore', false);
|
||||
|
||||
@@ -482,6 +497,7 @@
|
||||
const domFilterControlsRef = createRef({});
|
||||
|
||||
let isGridFocused = false;
|
||||
let selectionMenu = null;
|
||||
|
||||
const tabid = getContext('tabid');
|
||||
|
||||
@@ -1003,11 +1019,11 @@
|
||||
});
|
||||
}
|
||||
|
||||
export function mirrorWriteEnabled(requireKey) {
|
||||
return requireKey ? !!display.baseTable?.primaryKey || !!display.baseCollection : !!display.baseTableOrSimilar;
|
||||
export function sendToDataDeployEnabled() {
|
||||
return !!display.baseTable?.primaryKey || !!display.baseCollection;
|
||||
}
|
||||
|
||||
export async function mergeSelectionIntoMirror({ fullRows, mergeMode = 'merge' }) {
|
||||
export async function sendToDataDeploy() {
|
||||
const file = display.baseTableOrSimilar?.pureName;
|
||||
const mergeKey = display.baseCollection
|
||||
? display.baseCollection?.uniqueKey?.map(x => x.columnName)
|
||||
@@ -1019,20 +1035,77 @@
|
||||
const rows = rowIndexes.map(rowIndex => grider.getRowData(rowIndex));
|
||||
// @ts-ignore
|
||||
const columns = colIndexes.map(col => realColumnUniqueNames[col]);
|
||||
const mergedRows = fullRows ? rows : rows.map(x => _.pick(x, _.uniq([...columns, ...mergeKey])));
|
||||
|
||||
const res = await apiCall('archive/modify-file', {
|
||||
const mergedRows = rows.map(x => _.pick(x, _.uniq([...columns, ...mergeKey])));
|
||||
|
||||
showModal(SaveArchiveModal, {
|
||||
folder: $currentArchive,
|
||||
file,
|
||||
mergedRows,
|
||||
mergeKey,
|
||||
mergeMode,
|
||||
fileIsReadOnly: true,
|
||||
onSave: async folder => {
|
||||
const res = await apiCall('archive/modify-file', {
|
||||
folder,
|
||||
file,
|
||||
mergedRows,
|
||||
mergeKey,
|
||||
mergeMode: 'merge',
|
||||
});
|
||||
if (res) {
|
||||
showSnackbarSuccess(`Merged ${mergedRows.length} rows into ${file} in archive ${folder}`);
|
||||
|
||||
openNewTab(
|
||||
{
|
||||
title: folder,
|
||||
icon: 'img data-deploy',
|
||||
tabComponent: 'DataDeployTab',
|
||||
props: {
|
||||
conid,
|
||||
database,
|
||||
},
|
||||
},
|
||||
{
|
||||
editor: {
|
||||
archiveFolder: folder,
|
||||
conid,
|
||||
database,
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
},
|
||||
});
|
||||
if (res) {
|
||||
showSnackbarSuccess(`Merged ${mergedRows.length} rows into ${file} in archive ${$currentArchive}`);
|
||||
}
|
||||
}
|
||||
|
||||
// export function mirrorWriteEnabled(requireKey) {
|
||||
// return requireKey ? !!display.baseTable?.primaryKey || !!display.baseCollection : !!display.baseTableOrSimilar;
|
||||
// }
|
||||
|
||||
// export async function mergeSelectionIntoMirror({ fullRows, mergeMode = 'merge' }) {
|
||||
// const file = display.baseTableOrSimilar?.pureName;
|
||||
// const mergeKey = display.baseCollection
|
||||
// ? display.baseCollection?.uniqueKey?.map(x => x.columnName)
|
||||
// : display.baseTable?.primaryKey.columns.map(x => x.columnName);
|
||||
|
||||
// const cells = cellsToRegularCells(selectedCells);
|
||||
// const rowIndexes = _.sortBy(_.uniq(cells.map(x => x[0])));
|
||||
// const colIndexes = _.sortBy(_.uniq(cells.map(x => x[1])));
|
||||
// const rows = rowIndexes.map(rowIndex => grider.getRowData(rowIndex));
|
||||
// // @ts-ignore
|
||||
// const columns = colIndexes.map(col => realColumnUniqueNames[col]);
|
||||
// const mergedRows = fullRows ? rows : rows.map(x => _.pick(x, _.uniq([...columns, ...mergeKey])));
|
||||
|
||||
// const res = await apiCall('archive/modify-file', {
|
||||
// folder: $currentArchive,
|
||||
// file,
|
||||
// mergedRows,
|
||||
// mergeKey,
|
||||
// mergeMode,
|
||||
// });
|
||||
// if (res) {
|
||||
// showSnackbarSuccess(`Merged ${mergedRows.length} rows into ${file} in archive ${$currentArchive}`);
|
||||
// }
|
||||
// }
|
||||
|
||||
export function canShowLeftPanel() {
|
||||
return !hideGridLeftColumn;
|
||||
}
|
||||
@@ -1152,8 +1225,16 @@
|
||||
onChangeSelectedColumns(getSelectedColumns().map(x => x.columnName));
|
||||
}
|
||||
|
||||
let publishedCells = null;
|
||||
|
||||
if (onPublishedCellsChanged) {
|
||||
onPublishedCellsChanged(getCellsPublished(selectedCells));
|
||||
if (!publishedCells) publishedCells = getCellsPublished(selectedCells);
|
||||
onPublishedCellsChanged(publishedCells);
|
||||
}
|
||||
|
||||
if (onGetSelectionMenu) {
|
||||
if (!publishedCells) publishedCells = getCellsPublished(selectedCells);
|
||||
selectionMenu = onGetSelectionMenu(publishedCells);
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -1192,6 +1273,7 @@
|
||||
engine: display?.driver,
|
||||
condition: display?.getChangeSetCondition(rowData),
|
||||
insertedRowIndex: grider?.getInsertedRowIndex(row),
|
||||
rowStatus: grider.getRowStatus(row),
|
||||
};
|
||||
})
|
||||
.filter(x => x.column);
|
||||
@@ -1747,14 +1829,14 @@
|
||||
{ placeTag: 'save' },
|
||||
{ command: 'dataGrid.revertRowChanges', hideDisabled: true },
|
||||
{ command: 'dataGrid.revertAllChanges', hideDisabled: true },
|
||||
{ command: 'dataGrid.deleteSelectedRows' },
|
||||
{ command: 'dataGrid.insertNewRow' },
|
||||
{ command: 'dataGrid.cloneRows' },
|
||||
{ command: 'dataGrid.deleteSelectedRows', hideDisabled: true },
|
||||
{ command: 'dataGrid.insertNewRow', hideDisabled: true },
|
||||
{ command: 'dataGrid.cloneRows', hideDisabled: true },
|
||||
{ command: 'dataGrid.setNull', hideDisabled: true },
|
||||
{ command: 'dataGrid.removeField', hideDisabled: true },
|
||||
{ placeTag: 'edit' },
|
||||
{ divider: true },
|
||||
{ command: 'dataGrid.findColumn' },
|
||||
{ command: 'dataGrid.findColumn', hideDisabled: true },
|
||||
{ command: 'dataGrid.hideColumn', hideDisabled: true },
|
||||
{ command: 'dataGrid.filterSelected' },
|
||||
{ command: 'dataGrid.clearFilter' },
|
||||
@@ -1773,17 +1855,18 @@
|
||||
// { command: 'dataGrid.copyJsonDocument', hideDisabled: true },
|
||||
{ divider: true },
|
||||
{ placeTag: 'export' },
|
||||
{
|
||||
label: 'Save to current archive',
|
||||
submenu: [
|
||||
{ command: 'dataGrid.mergeSelectedCellsIntoMirror' },
|
||||
{ command: 'dataGrid.mergeSelectedRowsIntoMirror' },
|
||||
{ command: 'dataGrid.appendSelectedCellsIntoMirror' },
|
||||
{ command: 'dataGrid.appendSelectedRowsIntoMirror' },
|
||||
{ command: 'dataGrid.replaceSelectedCellsIntoMirror' },
|
||||
{ command: 'dataGrid.replaceSelectedRowsIntoMirror' },
|
||||
],
|
||||
},
|
||||
// {
|
||||
// label: 'Save to current archive',
|
||||
// submenu: [
|
||||
// { command: 'dataGrid.mergeSelectedCellsIntoMirror' },
|
||||
// { command: 'dataGrid.mergeSelectedRowsIntoMirror' },
|
||||
// { command: 'dataGrid.appendSelectedCellsIntoMirror' },
|
||||
// { command: 'dataGrid.appendSelectedRowsIntoMirror' },
|
||||
// { command: 'dataGrid.replaceSelectedCellsIntoMirror' },
|
||||
// { command: 'dataGrid.replaceSelectedRowsIntoMirror' },
|
||||
// ],
|
||||
// },
|
||||
isProApp() && { command: 'dataGrid.sendToDataDeploy' },
|
||||
{ command: 'dataGrid.generateSqlFromData' },
|
||||
{ command: 'dataGrid.openFreeTable' },
|
||||
{ command: 'dataGrid.openChartFromSelection' },
|
||||
@@ -2017,6 +2100,7 @@
|
||||
onSetFormView={formViewAvailable && display?.baseTable?.primaryKey ? handleSetFormView : null}
|
||||
{dataEditorTypesBehaviourOverride}
|
||||
{gridColoringMode}
|
||||
{overlayDefinition}
|
||||
/>
|
||||
{/each}
|
||||
</tbody>
|
||||
@@ -2053,7 +2137,19 @@
|
||||
on:scroll={e => (firstVisibleRowScrollIndex = e.detail)}
|
||||
bind:this={domVerticalScroll}
|
||||
/>
|
||||
{#if selectedCellsInfo}
|
||||
{#if selectionMenu}
|
||||
<div class="selection-menu">
|
||||
{#each selectionMenu as item}
|
||||
<InlineButton
|
||||
on:click={() => {
|
||||
item.onClick();
|
||||
}}
|
||||
>
|
||||
{item.text}
|
||||
</InlineButton>
|
||||
{/each}
|
||||
</div>
|
||||
{:else if selectedCellsInfo}
|
||||
<div class="row-count-label">
|
||||
{selectedCellsInfo}
|
||||
</div>
|
||||
@@ -2118,6 +2214,13 @@
|
||||
bottom: 20px;
|
||||
}
|
||||
|
||||
.selection-menu {
|
||||
position: absolute;
|
||||
background-color: var(--theme-bg-2);
|
||||
right: 40px;
|
||||
bottom: 20px;
|
||||
}
|
||||
|
||||
.no-rows-info {
|
||||
margin-top: 60px;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,19 @@
|
||||
<script lang="ts" context="module">
|
||||
const OVERLAY_STATUS_ICONS = {
|
||||
regular: 'icon equal',
|
||||
updated: 'icon not-equal',
|
||||
missing: 'img table',
|
||||
inserted: 'img archive',
|
||||
};
|
||||
const OVERLAY_STATUS_TOOLTIPS = {
|
||||
regular: 'Row is the same in database and archive',
|
||||
updated: 'Row is different in database and archive',
|
||||
missing: 'Row is only in database',
|
||||
inserted: 'Row is only in archive',
|
||||
};
|
||||
</script>
|
||||
|
||||
<script lang="ts">
|
||||
import openReferenceForm from '../formview/openReferenceForm';
|
||||
import DictionaryLookupModal from '../modals/DictionaryLookupModal.svelte';
|
||||
import { showModal } from '../modals/modalTools';
|
||||
|
||||
@@ -27,6 +41,7 @@
|
||||
export let database;
|
||||
export let driver;
|
||||
export let gridColoringMode = '36';
|
||||
export let overlayDefinition = null;
|
||||
|
||||
export let dataEditorTypesBehaviourOverride = null;
|
||||
|
||||
@@ -51,10 +66,17 @@
|
||||
onConfirm: value => grider.setCellValue(rowIndex, col.uniqueName, value),
|
||||
});
|
||||
}
|
||||
|
||||
// $: console.log('rowStatus', rowStatus);
|
||||
</script>
|
||||
|
||||
<tr style={`height: ${rowHeight}px`} class={`coloring-mode-${gridColoringMode}`}>
|
||||
<RowHeaderCell {rowIndex} onShowForm={onSetFormView ? () => onSetFormView(rowData, null) : null} />
|
||||
<RowHeaderCell
|
||||
{rowIndex}
|
||||
onShowForm={onSetFormView && !overlayDefinition ? () => onSetFormView(rowData, null) : null}
|
||||
extraIcon={overlayDefinition ? OVERLAY_STATUS_ICONS[rowStatus.status] : null}
|
||||
extraIconTooltip={overlayDefinition ? OVERLAY_STATUS_TOOLTIPS[rowStatus.status] : null}
|
||||
/>
|
||||
{#each visibleRealColumns as col (col.uniqueName)}
|
||||
{#if inplaceEditorState.cell && rowIndex == inplaceEditorState.cell[0] && col.colIndex == inplaceEditorState.cell[1]}
|
||||
<InplaceEditor
|
||||
@@ -83,11 +105,15 @@
|
||||
isAutofillSelected={cellIsSelected(rowIndex, col.colIndex, autofillSelectedCells)}
|
||||
isFocusedColumn={focusedColumns?.includes(col.uniqueName)}
|
||||
isModifiedCell={rowStatus.modifiedFields && rowStatus.modifiedFields.has(col.uniqueName)}
|
||||
overlayValue={rowStatus.overlayFields?.[col.uniqueName]}
|
||||
hasOverlayValue={rowStatus.overlayFields && col.uniqueName in rowStatus.overlayFields}
|
||||
isMissingOverlayField={rowStatus.missingOverlayFields && rowStatus.missingOverlayFields.has(col.uniqueName)}
|
||||
isModifiedRow={rowStatus.status == 'updated'}
|
||||
isInserted={rowStatus.status == 'inserted' ||
|
||||
(rowStatus.insertedFields && rowStatus.insertedFields.has(col.uniqueName))}
|
||||
isDeleted={rowStatus.status == 'deleted' ||
|
||||
(rowStatus.deletedFields && rowStatus.deletedFields.has(col.uniqueName))}
|
||||
isMissing={rowStatus.status == 'missing'}
|
||||
{onSetFormView}
|
||||
{isDynamicStructure}
|
||||
isAutoFillMarker={autofillMarkerCell &&
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
export interface GriderRowStatus {
|
||||
status: 'regular' | 'updated' | 'deleted' | 'inserted';
|
||||
status: 'regular' | 'updated' | 'deleted' | 'inserted' | 'missing';
|
||||
modifiedFields?: Set<string>;
|
||||
insertedFields?: Set<string>;
|
||||
deletedFields?: Set<string>;
|
||||
overlayFields?: { [field: string]: string };
|
||||
missingOverlayFields?: Set<string>;
|
||||
}
|
||||
|
||||
export default abstract class Grider {
|
||||
@@ -61,4 +63,7 @@ export default abstract class Grider {
|
||||
this.setCellValue(index, key, changeObject[key]);
|
||||
}
|
||||
}
|
||||
getInsertedRowIndex(index) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
export let allowChangeChangeSetStructure = false;
|
||||
export let infoLoadCounter = 0;
|
||||
|
||||
export let driver;
|
||||
export let driver = null;
|
||||
|
||||
let loadedRows;
|
||||
let infoCounter = 0;
|
||||
|
||||
110
packages/web/src/datagrid/OverlayDiffGrider.ts
Normal file
110
packages/web/src/datagrid/OverlayDiffGrider.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { GridDisplay } from 'dbgate-datalib';
|
||||
import Grider from './Grider';
|
||||
import { GriderRowStatus } from './Grider';
|
||||
import _uniq from 'lodash/uniq';
|
||||
|
||||
export default class OverlayDiffGrider extends Grider {
|
||||
private prependRows: any[];
|
||||
private rowCacheIndexes: Set<number>;
|
||||
private rowDataCache;
|
||||
private rowStatusCache;
|
||||
private overlayRowsByStr: { [key: string]: any };
|
||||
|
||||
constructor(
|
||||
public sourceRows: any[],
|
||||
public display: GridDisplay,
|
||||
public matchColumns: string[],
|
||||
public overlayData: any[],
|
||||
public matchedDbKeys: any[][]
|
||||
) {
|
||||
super();
|
||||
const matchedDbKeysByStr = new Set(matchedDbKeys.map(x => x.join('||')));
|
||||
this.prependRows = overlayData.filter(row => !matchedDbKeysByStr.has(matchColumns.map(x => row[x]).join('||')));
|
||||
this.overlayRowsByStr = {};
|
||||
for (const row of overlayData) {
|
||||
const key = matchColumns.map(x => row[x]).join('||');
|
||||
this.overlayRowsByStr[key] = row;
|
||||
}
|
||||
|
||||
this.rowDataCache = {};
|
||||
this.rowStatusCache = {};
|
||||
this.rowCacheIndexes = new Set();
|
||||
}
|
||||
|
||||
requireRowCache(index: number) {
|
||||
if (this.rowCacheIndexes.has(index)) return;
|
||||
|
||||
if (index < this.prependRows.length) {
|
||||
this.rowStatusCache[index] = {
|
||||
status: 'inserted',
|
||||
};
|
||||
this.rowDataCache[index] = this.prependRows[index];
|
||||
this.rowCacheIndexes.add(index);
|
||||
return;
|
||||
}
|
||||
|
||||
const row = this.sourceRows[index - this.prependRows.length];
|
||||
|
||||
if (!row) {
|
||||
this.rowStatusCache[index] = {
|
||||
status: 'missing',
|
||||
};
|
||||
this.rowDataCache[index] = row;
|
||||
this.rowCacheIndexes.add(index);
|
||||
return;
|
||||
}
|
||||
|
||||
const overlayKey = this.matchColumns.map(x => row[x]).join('||');
|
||||
const overlayRow = this.overlayRowsByStr[overlayKey];
|
||||
|
||||
if (!overlayRow) {
|
||||
this.rowStatusCache[index] = {
|
||||
status: 'missing',
|
||||
};
|
||||
this.rowDataCache[index] = row;
|
||||
this.rowCacheIndexes.add(index);
|
||||
return;
|
||||
}
|
||||
|
||||
const overlayFields = {};
|
||||
const missingOverlayFields = new Set();
|
||||
|
||||
for (const field of this.display.columns.map(x => x.columnName)) {
|
||||
if (!(field in overlayRow)) {
|
||||
missingOverlayFields.add(field);
|
||||
} else if (row[field] != overlayRow[field]) {
|
||||
overlayFields[field] = overlayRow[field];
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(overlayFields).length > 0 || missingOverlayFields.size > 0) {
|
||||
this.rowStatusCache[index] = {
|
||||
status: 'updated',
|
||||
overlayFields,
|
||||
missingOverlayFields,
|
||||
modifiedFields: new Set(Object.keys(overlayFields)),
|
||||
};
|
||||
this.rowDataCache[index] = row;
|
||||
} else {
|
||||
this.rowStatusCache[index] = {
|
||||
status: 'regular',
|
||||
};
|
||||
this.rowDataCache[index] = row;
|
||||
}
|
||||
this.rowCacheIndexes.add(index);
|
||||
}
|
||||
|
||||
getRowData(index: number) {
|
||||
this.requireRowCache(index);
|
||||
return this.rowDataCache[index];
|
||||
}
|
||||
|
||||
getRowStatus(index): GriderRowStatus {
|
||||
this.requireRowCache(index);
|
||||
return this.rowStatusCache[index];
|
||||
}
|
||||
|
||||
get rowCount() {
|
||||
return this.sourceRows.length + this.prependRows.length;
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,13 @@
|
||||
<script lang="ts">
|
||||
import ShowFormButton from '../formview/ShowFormButton.svelte';
|
||||
import FontIcon from '../icons/FontIcon.svelte';
|
||||
|
||||
export let rowIndex;
|
||||
export let onShowForm;
|
||||
|
||||
export let extraIcon = null;
|
||||
export let extraIconTooltip = null;
|
||||
|
||||
let mouseIn = false;
|
||||
</script>
|
||||
|
||||
@@ -18,6 +22,11 @@
|
||||
{#if mouseIn && onShowForm}
|
||||
<ShowFormButton on:click={onShowForm} />
|
||||
{/if}
|
||||
{#if extraIcon}
|
||||
<div class="extraIcon" title={extraIconTooltip}>
|
||||
<FontIcon icon={extraIcon} />
|
||||
</div>
|
||||
{/if}
|
||||
</td>
|
||||
|
||||
<style>
|
||||
@@ -29,4 +38,9 @@
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
}
|
||||
.extraIcon {
|
||||
position: absolute;
|
||||
right: 0px;
|
||||
top: 1px;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -83,6 +83,7 @@
|
||||
import hasPermission from '../utility/hasPermission';
|
||||
import { openImportExportTab } from '../utility/importExportTools';
|
||||
import { getIntSettingsValue } from '../settings/settingsTools';
|
||||
import OverlayDiffGrider from './OverlayDiffGrider';
|
||||
|
||||
export let conid;
|
||||
export let display;
|
||||
@@ -92,6 +93,7 @@
|
||||
export let config;
|
||||
export let changeSetState;
|
||||
export let dispatchChangeSet;
|
||||
export let overlayDefinition = null;
|
||||
|
||||
export let macroPreview;
|
||||
export let macroValues;
|
||||
@@ -110,7 +112,7 @@
|
||||
// $: console.log('loadedRows BIND', loadedRows);
|
||||
|
||||
$: {
|
||||
if (macroPreview) {
|
||||
if (!overlayDefinition && macroPreview) {
|
||||
grider = new ChangeSetGrider(
|
||||
loadedRows,
|
||||
changeSetState,
|
||||
@@ -124,13 +126,25 @@
|
||||
}
|
||||
// prevent recreate grider, if no macro is selected, so there is no need to selectedcells in macro
|
||||
$: {
|
||||
if (!macroPreview) {
|
||||
if (!overlayDefinition && !macroPreview) {
|
||||
grider = new ChangeSetGrider(loadedRows, changeSetState, dispatchChangeSet, display);
|
||||
}
|
||||
}
|
||||
// $: console.log('GRIDER', grider);
|
||||
// $: if (onChangeGrider) onChangeGrider(grider);
|
||||
|
||||
$: {
|
||||
if (overlayDefinition) {
|
||||
grider = new OverlayDiffGrider(
|
||||
loadedRows,
|
||||
display,
|
||||
overlayDefinition.matchColumns,
|
||||
overlayDefinition.overlayData,
|
||||
overlayDefinition.matchedDbKeys
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function exportGrid() {
|
||||
const coninfo = await getConnectionInfo({ conid });
|
||||
|
||||
|
||||
@@ -47,6 +47,8 @@
|
||||
|
||||
export let isRawMode = false;
|
||||
|
||||
export let forceReadOnly = false;
|
||||
|
||||
$: connection = useConnectionInfo({ conid });
|
||||
$: dbinfo = useDatabaseInfo({ conid, database });
|
||||
$: serverVersion = useDatabaseServerVersion({ conid, database });
|
||||
@@ -73,7 +75,7 @@
|
||||
{ showHintColumns: getBoolSettingsValue('dataGrid.showHintColumns', true) },
|
||||
$serverVersion,
|
||||
table => getDictionaryDescription(table, conid, database, $apps, $connections),
|
||||
$connection?.isReadOnly,
|
||||
forceReadOnly || $connection?.isReadOnly,
|
||||
isRawMode
|
||||
)
|
||||
: null;
|
||||
@@ -161,7 +163,7 @@
|
||||
formViewComponent={SqlFormView}
|
||||
{display}
|
||||
showReferences
|
||||
showMacros={!$connection?.isReadOnly}
|
||||
showMacros={!forceReadOnly && !$connection?.isReadOnly}
|
||||
hasMultiColumnFilter
|
||||
onRunMacro={handleRunMacro}
|
||||
macroCondition={macro => macro.type == 'transformValue'}
|
||||
|
||||
@@ -69,6 +69,8 @@
|
||||
})
|
||||
.addTo(map);
|
||||
|
||||
leaflet.control.scale().addTo(map);
|
||||
|
||||
addObjectToMap();
|
||||
});
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import FontIcon from '../icons/FontIcon.svelte';
|
||||
import Link from './Link.svelte';
|
||||
import TableControl from './TableControl.svelte';
|
||||
import { writable } from 'svelte/store';
|
||||
|
||||
export let title;
|
||||
export let collection;
|
||||
@@ -12,6 +13,9 @@
|
||||
export let hideDisplayName = false;
|
||||
export let clickable = false;
|
||||
export let onAddNew = null;
|
||||
export let displayNameFieldName = null;
|
||||
|
||||
export let filters = writable({});
|
||||
|
||||
let collapsed = false;
|
||||
</script>
|
||||
@@ -43,14 +47,16 @@
|
||||
rows={collection || []}
|
||||
columns={_.compact([
|
||||
!hideDisplayName && {
|
||||
fieldName: 'displayName',
|
||||
fieldName: displayNameFieldName || 'displayName',
|
||||
header: 'Name',
|
||||
slot: -1,
|
||||
sortable: true,
|
||||
filterable: !!displayNameFieldName,
|
||||
},
|
||||
...columns,
|
||||
])}
|
||||
{clickable}
|
||||
{filters}
|
||||
on:clickrow
|
||||
>
|
||||
<svelte:fragment slot="-1" let:row let:col>
|
||||
|
||||
@@ -7,8 +7,11 @@
|
||||
props?: any;
|
||||
formatter?: any;
|
||||
slot?: number;
|
||||
slotKey?: string;
|
||||
isHighlighted?: Function;
|
||||
sortable?: boolean;
|
||||
filterable?: boolean;
|
||||
filteredExpression?: (row: any) => string;
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -19,12 +22,19 @@
|
||||
import keycodes from '../utility/keycodes';
|
||||
import { createEventDispatcher } from 'svelte';
|
||||
import FontIcon from '../icons/FontIcon.svelte';
|
||||
import DataFilterControl from '../datagrid/DataFilterControl.svelte';
|
||||
import { evalFilterBehaviour } from 'dbgate-tools';
|
||||
import { evaluateCondition } from 'dbgate-sqltree';
|
||||
import { compileCompoudEvalCondition } from 'dbgate-filterparser';
|
||||
import { chevronExpandIcon } from '../icons/expandIcons';
|
||||
|
||||
export let columns: (TableControlColumn | false)[];
|
||||
export let rows;
|
||||
export let rows = null;
|
||||
export let groupedRows = null;
|
||||
export let focusOnCreate = false;
|
||||
export let selectable = false;
|
||||
export let selectedIndex = 0;
|
||||
export let selectedKey = null;
|
||||
export let clickable = false;
|
||||
export let disableFocusOutline = false;
|
||||
export let emptyMessage = null;
|
||||
@@ -35,7 +45,11 @@
|
||||
|
||||
export let checkedKeys = null;
|
||||
export let onSetCheckedKeys = null;
|
||||
export let extractCheckedKey = x => x.id;
|
||||
export let extractTableItemKey = x => x.id;
|
||||
export let itemSupportsCheckbox = x => true;
|
||||
export let filters = null;
|
||||
|
||||
export let selectionMode: 'index' | 'key' = 'index';
|
||||
|
||||
const dispatch = createEventDispatcher();
|
||||
|
||||
@@ -46,19 +60,120 @@
|
||||
});
|
||||
|
||||
const handleKeyDown = event => {
|
||||
if (event.keyCode == keycodes.downArrow) {
|
||||
selectedIndex = Math.min(selectedIndex + 1, sortedRows.length - 1);
|
||||
const oldSelectedIndex =
|
||||
selectionMode == 'index' ? selectedIndex : _.findIndex(flatRowsShown, x => extractTableItemKey(x) == selectedKey);
|
||||
let newIndex = oldSelectedIndex;
|
||||
|
||||
switch (event.keyCode) {
|
||||
case keycodes.downArrow:
|
||||
newIndex = Math.min(newIndex + 1, flatRowsShown.length - 1);
|
||||
break;
|
||||
case keycodes.upArrow:
|
||||
newIndex = Math.max(0, newIndex - 1);
|
||||
break;
|
||||
case keycodes.home:
|
||||
newIndex = 0;
|
||||
break;
|
||||
case keycodes.end:
|
||||
newIndex = rows.length - 1;
|
||||
break;
|
||||
case keycodes.pageUp:
|
||||
newIndex -= 10;
|
||||
break;
|
||||
case keycodes.pageDown:
|
||||
newIndex += 10;
|
||||
break;
|
||||
}
|
||||
if (event.keyCode == keycodes.upArrow) {
|
||||
selectedIndex = Math.max(0, selectedIndex - 1);
|
||||
if (newIndex < 0) {
|
||||
newIndex = 0;
|
||||
}
|
||||
if (newIndex >= flatRowsShown.length) {
|
||||
newIndex = flatRowsShown.length - 1;
|
||||
}
|
||||
|
||||
if (clickable && oldSelectedIndex != newIndex) {
|
||||
event.preventDefault();
|
||||
domRows[newIndex]?.scrollIntoView();
|
||||
if (clickable) {
|
||||
dispatch('clickrow', flatRowsShown[newIndex]);
|
||||
}
|
||||
if (selectionMode == 'index') {
|
||||
selectedIndex = newIndex;
|
||||
} else {
|
||||
selectedKey = extractTableItemKey(flatRowsShown[newIndex]);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function filterRows(grows, filters) {
|
||||
const condition = compileCompoudEvalCondition(filters);
|
||||
|
||||
if (!condition) return grows;
|
||||
|
||||
return grows
|
||||
.map(gitem => {
|
||||
return {
|
||||
group: gitem.group,
|
||||
rows: gitem.rows.filter(row => {
|
||||
const newrow = { ...row };
|
||||
for (const col of columnList) {
|
||||
if (col.filteredExpression) {
|
||||
newrow[col.fieldName] = col.filteredExpression(row);
|
||||
}
|
||||
}
|
||||
return evaluateCondition(condition, newrow);
|
||||
}),
|
||||
};
|
||||
})
|
||||
.filter(gitem => gitem.rows.length > 0);
|
||||
}
|
||||
|
||||
// function computeGroupedRows(array) {
|
||||
// if (!extractGroupName) {
|
||||
// return [{ label: null, rows: array }];
|
||||
// }
|
||||
// const res = [];
|
||||
// let lastGroupName = null;
|
||||
// let buildArray = [];
|
||||
// for (const item of array) {
|
||||
// const groupName = extractGroupName(item);
|
||||
// if (lastGroupName != groupName) {
|
||||
// if (buildArray.length > 0) {
|
||||
// res.push({ label: lastGroupName, rows: buildArray });
|
||||
// }
|
||||
// lastGroupName = groupName;
|
||||
// buildArray = [];
|
||||
// }
|
||||
// buildArray.push(item);
|
||||
// }
|
||||
// if (buildArray.length > 0) {
|
||||
// res.push({ label: lastGroupName, rows: buildArray });
|
||||
// }
|
||||
// }
|
||||
|
||||
let sortedByField = null;
|
||||
let sortOrderIsDesc = false;
|
||||
let collapsedGroupIndexes = [];
|
||||
let domRows = {};
|
||||
|
||||
$: sortedRowsTmp = sortedByField ? _.sortBy(rows || [], sortedByField) : rows;
|
||||
$: sortedRows = sortOrderIsDesc ? [...sortedRowsTmp].reverse() : sortedRowsTmp;
|
||||
$: rowsSource = groupedRows ? groupedRows : [{ group: null, rows }];
|
||||
|
||||
$: filteredRows = filters ? filterRows(rowsSource, $filters) : rowsSource;
|
||||
|
||||
$: sortedRows = sortedByField
|
||||
? filteredRows.map(gitem => {
|
||||
let res = _.sortBy(gitem.rows, sortedByField);
|
||||
if (sortOrderIsDesc) res = [...res].reverse();
|
||||
return { group: gitem.group, rows: res };
|
||||
})
|
||||
: filteredRows;
|
||||
|
||||
// $: console.log('sortedRows', sortedRows);
|
||||
|
||||
$: flatRowsShown = sortedRows.map(gitem => gitem.rows).flat();
|
||||
$: checkableFlatRowsShown = flatRowsShown.filter(x => itemSupportsCheckbox(x));
|
||||
|
||||
// $: groupedRows = computeGroupedRows(sortedRows);
|
||||
</script>
|
||||
|
||||
<table
|
||||
@@ -73,7 +188,17 @@
|
||||
<thead class:stickyHeader>
|
||||
<tr>
|
||||
{#if checkedKeys}
|
||||
<th></th>
|
||||
<th>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={checkableFlatRowsShown.every(r => checkedKeys.includes(extractTableItemKey(r)))}
|
||||
data-testid="TableControl_selectAllCheckBox"
|
||||
on:change={e => {
|
||||
if (e.target['checked']) onSetCheckedKeys(checkableFlatRowsShown.map(r => extractTableItemKey(r)));
|
||||
else onSetCheckedKeys([]);
|
||||
}}
|
||||
/>
|
||||
</th>
|
||||
{/if}
|
||||
{#each columnList as col}
|
||||
<th
|
||||
@@ -101,60 +226,114 @@
|
||||
</th>
|
||||
{/each}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each sortedRows as row, index}
|
||||
<tr
|
||||
class:selected={selectable && selectedIndex == index}
|
||||
class:clickable
|
||||
on:click={() => {
|
||||
if (selectable) {
|
||||
selectedIndex = index;
|
||||
domTable.focus();
|
||||
}
|
||||
if (clickable) {
|
||||
dispatch('clickrow', row);
|
||||
}
|
||||
}}
|
||||
>
|
||||
{#if filters}
|
||||
<tr>
|
||||
{#if checkedKeys}
|
||||
<td>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={checkedKeys.includes(extractCheckedKey(row))}
|
||||
on:change={e => {
|
||||
if (e.target['checked']) onSetCheckedKeys(_.uniq([...checkedKeys, extractCheckedKey(row)]));
|
||||
else onSetCheckedKeys(checkedKeys.filter(x => x != extractCheckedKey(row)));
|
||||
}}
|
||||
/>
|
||||
</td>
|
||||
<td class="empty-cell"></td>
|
||||
{/if}
|
||||
{#each columnList as col}
|
||||
{@const rowProps = { ...col.props, ...(col.getProps ? col.getProps(row) : null) }}
|
||||
<td class:isHighlighted={col.isHighlighted && col.isHighlighted(row)} class:noCellPadding>
|
||||
{#if col.component}
|
||||
<svelte:component this={col.component} {...rowProps} />
|
||||
{:else if col.formatter}
|
||||
{col.formatter(row)}
|
||||
{:else if col.slot != null}
|
||||
{#if col.slot == -1}<slot name="-1" {row} {col} {index} />
|
||||
{:else if col.slot == 0}<slot name="0" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 1}<slot name="1" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 2}<slot name="2" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 3}<slot name="3" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 4}<slot name="4" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 5}<slot name="5" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 6}<slot name="6" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 7}<slot name="7" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 8}<slot name="8" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 9}<slot name="9" {row} {col} {index} {...rowProps} />
|
||||
{/if}
|
||||
{:else}
|
||||
{row[col.fieldName] || ''}
|
||||
<td class="filter-cell" class:empty-cell={!col.filterable}>
|
||||
{#if col.filterable}
|
||||
<DataFilterControl
|
||||
filterBehaviour={evalFilterBehaviour}
|
||||
filter={$filters[col.fieldName]}
|
||||
setFilter={value => filters.update(f => ({ ...f, [col.fieldName]: value }))}
|
||||
placeholder="Data filter"
|
||||
/>
|
||||
{/if}
|
||||
</td>
|
||||
{/each}
|
||||
</tr>
|
||||
{/if}
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each sortedRows as gitem, groupIndex}
|
||||
{#if gitem.group}
|
||||
<tr class="group-row">
|
||||
<td
|
||||
colspan={columnList.length + (checkedKeys ? 1 : 0)}
|
||||
class="groupcell"
|
||||
on:click={() => {
|
||||
if (collapsedGroupIndexes.includes(groupIndex)) {
|
||||
collapsedGroupIndexes = collapsedGroupIndexes.filter(x => x != groupIndex);
|
||||
} else {
|
||||
collapsedGroupIndexes = [...collapsedGroupIndexes, groupIndex];
|
||||
}
|
||||
}}
|
||||
>
|
||||
<FontIcon icon={chevronExpandIcon(!collapsedGroupIndexes.includes(groupIndex))} padRight />
|
||||
<strong>{gitem.group} ({gitem.rows.length})</strong>
|
||||
</td>
|
||||
</tr>
|
||||
{/if}
|
||||
{#if !collapsedGroupIndexes.includes(groupIndex)}
|
||||
{#each gitem.rows as row}
|
||||
{@const index = _.indexOf(flatRowsShown, row)}
|
||||
<tr
|
||||
class:selected={selectable &&
|
||||
(selectionMode == 'index' ? selectedIndex == index : selectedKey == extractTableItemKey(row))}
|
||||
class:clickable
|
||||
bind:this={domRows[index]}
|
||||
on:click={() => {
|
||||
if (selectable) {
|
||||
if (selectionMode == 'index') {
|
||||
selectedIndex = index;
|
||||
} else {
|
||||
selectedKey = extractTableItemKey(row);
|
||||
}
|
||||
domTable.focus();
|
||||
}
|
||||
if (clickable) {
|
||||
dispatch('clickrow', row);
|
||||
}
|
||||
}}
|
||||
data-testid={`TableControl_row_${index}`}
|
||||
>
|
||||
{#if checkedKeys}
|
||||
<td>
|
||||
{#if itemSupportsCheckbox(row)}
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={checkedKeys.includes(extractTableItemKey(row))}
|
||||
on:change={e => {
|
||||
if (e.target['checked']) onSetCheckedKeys(_.uniq([...checkedKeys, extractTableItemKey(row)]));
|
||||
else onSetCheckedKeys(checkedKeys.filter(x => x != extractTableItemKey(row)));
|
||||
}}
|
||||
data-testid={`TableControl_row_${index}_checkbox`}
|
||||
/>
|
||||
{/if}
|
||||
</td>
|
||||
{/if}
|
||||
{#each columnList as col}
|
||||
{@const rowProps = { ...col.props, ...(col.getProps ? col.getProps(row) : null) }}
|
||||
<td class:isHighlighted={col.isHighlighted && col.isHighlighted(row)} class:noCellPadding>
|
||||
{#if col.component}
|
||||
<svelte:component this={col.component} {...rowProps} />
|
||||
{:else if col.formatter}
|
||||
{col.formatter(row)}
|
||||
{:else if col.slot != null}
|
||||
{#key row[col.slotKey] || 'key'}
|
||||
{#if col.slot == -1}<slot name="-1" {row} {col} {index} />
|
||||
{:else if col.slot == 0}<slot name="0" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 1}<slot name="1" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 2}<slot name="2" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 3}<slot name="3" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 4}<slot name="4" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 5}<slot name="5" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 6}<slot name="6" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 7}<slot name="7" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 8}<slot name="8" {row} {col} {index} {...rowProps} />
|
||||
{:else if col.slot == 9}<slot name="9" {row} {col} {index} {...rowProps} />
|
||||
{/if}
|
||||
{/key}
|
||||
{:else}
|
||||
{row[col.fieldName] || ''}
|
||||
{/if}
|
||||
</td>
|
||||
{/each}
|
||||
</tr>
|
||||
{/each}
|
||||
{/if}
|
||||
{/each}
|
||||
{#if emptyMessage && sortedRows.length == 0}
|
||||
<tr>
|
||||
@@ -179,6 +358,9 @@
|
||||
background: var(--theme-bg-0);
|
||||
}
|
||||
tbody tr.selected {
|
||||
background: var(--theme-bg-3);
|
||||
}
|
||||
table:focus tbody tr.selected {
|
||||
background: var(--theme-bg-selected);
|
||||
}
|
||||
tbody tr.clickable:hover {
|
||||
@@ -232,4 +414,20 @@
|
||||
border-collapse: separate;
|
||||
border-left: 1px solid var(--theme-border);
|
||||
}
|
||||
|
||||
.filter-cell {
|
||||
text-align: left;
|
||||
overflow: hidden;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.empty-cell {
|
||||
background-color: var(--theme-bg-1);
|
||||
}
|
||||
|
||||
.groupcell {
|
||||
background-color: var(--theme-bg-1);
|
||||
cursor: pointer;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -10,14 +10,17 @@
|
||||
|
||||
export let folderName;
|
||||
export let name;
|
||||
export let filterExtension = null;
|
||||
|
||||
const { setFieldValue, values } = getFormContext();
|
||||
|
||||
$: files = useArchiveFiles({ folder: folderName });
|
||||
$: filesOptions = ($files || []).map(x => ({
|
||||
value: x.name,
|
||||
label: x.name,
|
||||
}));
|
||||
$: filesOptions = ($files || [])
|
||||
.filter(x => (filterExtension ? x.name.endsWith('.' + filterExtension) : true))
|
||||
.map(x => ({
|
||||
value: x.name,
|
||||
label: x.name,
|
||||
}));
|
||||
</script>
|
||||
|
||||
<div class="wrapper">
|
||||
|
||||
@@ -11,16 +11,22 @@
|
||||
|
||||
export let additionalFolders = [];
|
||||
export let name;
|
||||
export let allowCreateNew = false;
|
||||
export let zipFilesOnly = false;
|
||||
export let skipZipFiles = false;
|
||||
|
||||
const { setFieldValue } = getFormContext();
|
||||
|
||||
const folders = useArchiveFolders();
|
||||
|
||||
$: folderOptions = [
|
||||
...($folders || []).map(folder => ({
|
||||
value: folder.name,
|
||||
label: folder.name,
|
||||
})),
|
||||
...($folders || [])
|
||||
.filter(folder => (zipFilesOnly ? folder.name.endsWith('.zip') : true))
|
||||
.filter(folder => (skipZipFiles ? !folder.name.endsWith('.zip') : true))
|
||||
.map(folder => ({
|
||||
value: folder.name,
|
||||
label: folder.name,
|
||||
})),
|
||||
...additionalFolders
|
||||
.filter(x => x != '@create')
|
||||
.filter(x => !($folders || []).find(y => y.name == x))
|
||||
@@ -28,7 +34,7 @@
|
||||
value: folder,
|
||||
label: folder,
|
||||
})),
|
||||
{
|
||||
allowCreateNew && {
|
||||
label: '(Create new)',
|
||||
value: '@create',
|
||||
},
|
||||
@@ -43,7 +49,7 @@
|
||||
if (e.detail == '@create') {
|
||||
showModal(InputTextModal, {
|
||||
header: 'Archive',
|
||||
label: 'Name of new folder',
|
||||
label: 'Name of new archive folder',
|
||||
onConfirm: createOption,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
? { disabled: true }
|
||||
: {
|
||||
onClick: () => {
|
||||
setFieldValue(name, !$values[name]);
|
||||
setFieldValue(name, $values?.[name] == 0 ? true : $values?.[name] == 1 ? false : !$values?.[name]);
|
||||
dispatch('change');
|
||||
},
|
||||
}}
|
||||
|
||||
@@ -11,4 +11,9 @@
|
||||
}
|
||||
</script>
|
||||
|
||||
<CheckboxField {...$$restProps} checked={$values[name] ?? defaultValue} on:change={handleChange} on:change />
|
||||
<CheckboxField
|
||||
{...$$restProps}
|
||||
checked={$values?.[name] == 0 ? false : $values?.[name] == '1' ? true : ($values?.[name] ?? defaultValue)}
|
||||
on:change={handleChange}
|
||||
on:change
|
||||
/>
|
||||
|
||||
@@ -110,6 +110,7 @@
|
||||
'icon history': 'mdi mdi-history',
|
||||
'icon structure': 'mdi mdi-tools',
|
||||
'icon square': 'mdi mdi-square',
|
||||
'icon data-deploy': 'mdi mdi-database-settings',
|
||||
|
||||
'icon edit': 'mdi mdi-pencil',
|
||||
'icon delete': 'mdi mdi-delete',
|
||||
@@ -206,6 +207,8 @@
|
||||
'icon type-objectid': 'mdi mdi-alpha-i-box',
|
||||
'icon type-null': 'mdi mdi-code-equal',
|
||||
'icon type-unknown': 'mdi mdi-help-box',
|
||||
'icon equal': 'mdi mdi-equal',
|
||||
'icon not-equal': 'mdi mdi-not-equal-variant',
|
||||
|
||||
'icon at': 'mdi mdi-at',
|
||||
'icon expand-all': 'mdi mdi-expand-all',
|
||||
@@ -218,6 +221,7 @@
|
||||
'icon autocommit-off': 'mdi mdi-check-circle-outline',
|
||||
|
||||
'icon premium': 'mdi mdi-star',
|
||||
'icon upload': 'mdi mdi-upload',
|
||||
|
||||
'img ok': 'mdi mdi-check-circle color-icon-green',
|
||||
'img ok-inv': 'mdi mdi-check-circle color-icon-inv-green',
|
||||
@@ -232,12 +236,14 @@
|
||||
|
||||
'img archive': 'mdi mdi-table color-icon-gold',
|
||||
'img archive-folder': 'mdi mdi-database-outline color-icon-green',
|
||||
'img zipfile': 'mdi mdi-zip-box color-icon-gold',
|
||||
'img autoincrement': 'mdi mdi-numeric-1-box-multiple-outline',
|
||||
'img column': 'mdi mdi-table-column',
|
||||
'img server': 'mdi mdi-server color-icon-blue',
|
||||
'img primary-key': 'mdi mdi-key-star color-icon-yellow',
|
||||
'img foreign-key': 'mdi mdi-key-link',
|
||||
'img sql-file': 'mdi mdi-file',
|
||||
'img anyfile': 'mdi mdi-file-question color-icon-red',
|
||||
'img shell': 'mdi mdi-flash color-icon-blue',
|
||||
'img chart': 'mdi mdi-chart-bar color-icon-magenta',
|
||||
'img markdown': 'mdi mdi-application color-icon-red',
|
||||
@@ -301,7 +307,7 @@
|
||||
'img type-rejson': 'mdi mdi-color-json color-icon-blue',
|
||||
'img keydb': 'mdi mdi-key color-icon-blue',
|
||||
|
||||
'img duplicator': 'mdi mdi-content-duplicate color-icon-green',
|
||||
'img replicator': 'mdi mdi-content-duplicate color-icon-green',
|
||||
'img import': 'mdi mdi-database-import color-icon-green',
|
||||
'img export': 'mdi mdi-database-export color-icon-green',
|
||||
'img transform': 'mdi mdi-rotate-orbit color-icon-blue',
|
||||
@@ -311,6 +317,8 @@
|
||||
|
||||
'img db-backup': 'mdi mdi-database-export color-icon-yellow',
|
||||
'img db-restore': 'mdi mdi-database-import color-icon-red',
|
||||
'img settings': 'mdi mdi-cog color-icon-blue',
|
||||
'img data-deploy': 'mdi mdi-database-settings color-icon-green',
|
||||
};
|
||||
</script>
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
import { getConnectionLabel } from 'dbgate-tools';
|
||||
|
||||
export let allowChooseModel = false;
|
||||
export let direction;
|
||||
export let direction = 'source';
|
||||
|
||||
$: connections = useConnectionList();
|
||||
$: connectionOptions = [
|
||||
|
||||
@@ -23,6 +23,11 @@
|
||||
import { findEngineDriver } from 'dbgate-tools';
|
||||
import AceEditor from '../query/AceEditor.svelte';
|
||||
import { _t } from '../translations';
|
||||
import { showModal } from '../modals/modalTools';
|
||||
import InputTextModal from '../modals/InputTextModal.svelte';
|
||||
import FormCheckboxField from '../forms/FormCheckboxField.svelte';
|
||||
import { isProApp } from '../utility/proTools';
|
||||
import FormTextField from '../forms/FormTextField.svelte';
|
||||
|
||||
export let direction;
|
||||
export let storageTypeField;
|
||||
@@ -54,7 +59,7 @@
|
||||
{ value: 'query', label: _t('common.query', { defaultMessage: 'Query' }), directions: ['source'] },
|
||||
{
|
||||
value: 'archive',
|
||||
label: _t('common.archive', { defaultMessage: 'Archive' }),
|
||||
label: _t('common.archive', { defaultMessage: 'Archive (JSONL)' }),
|
||||
directions: ['source', 'target'],
|
||||
},
|
||||
];
|
||||
@@ -108,11 +113,18 @@
|
||||
<FormStyledButton
|
||||
value="New archive"
|
||||
on:click={() => {
|
||||
values.update(x => ({
|
||||
...x,
|
||||
[storageTypeField]: 'archive',
|
||||
[archiveFolderField]: `import-${moment().format('YYYY-MM-DD-hh-mm-ss')}`,
|
||||
}));
|
||||
showModal(InputTextModal, {
|
||||
header: 'Archive',
|
||||
label: 'Name of new archive folder',
|
||||
value: `import-${moment().format('YYYY-MM-DD-hh-mm-ss')}`,
|
||||
onConfirm: value => {
|
||||
values.update(x => ({
|
||||
...x,
|
||||
[storageTypeField]: 'archive',
|
||||
[archiveFolderField]: value,
|
||||
}));
|
||||
},
|
||||
});
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
@@ -124,6 +136,41 @@
|
||||
label="Storage type"
|
||||
/>
|
||||
|
||||
{#if format && isProApp()}
|
||||
{#if direction == 'source'}
|
||||
<FormCheckboxField
|
||||
name={`importFromZipFile`}
|
||||
label={_t('importExport.importFromZipFile', { defaultMessage: 'Import from ZIP file (in archive folder)' })}
|
||||
/>
|
||||
{#if $values.importFromZipFile}
|
||||
<FormArchiveFolderSelect
|
||||
label={_t('importExport.importFromZipArchive', { defaultMessage: 'Input ZIP archive' })}
|
||||
name={archiveFolderField}
|
||||
additionalFolders={_.compact([$values[archiveFolderField]])}
|
||||
zipFilesOnly
|
||||
/>
|
||||
{/if}
|
||||
{/if}
|
||||
{#if direction == 'target'}
|
||||
<FormCheckboxField
|
||||
name={`exportToZipFile`}
|
||||
label={_t('importExport.exportToZipFile', { defaultMessage: 'Export to ZIP file' })}
|
||||
/>
|
||||
{#if $values.exportToZipFile}
|
||||
<FormCheckboxField
|
||||
name={`createZipFileInArchive`}
|
||||
label={_t('importExport.createZipFileInArchive', { defaultMessage: 'Create ZIP file in archive' })}
|
||||
/>
|
||||
|
||||
<FormTextField
|
||||
label={_t('importExport.exportToZipArchive', { defaultMessage: 'Output ZIP archive' })}
|
||||
name={archiveFolderField}
|
||||
placeholder={'zip-archive-yyyy-mm-dd-hh-mm-ss.zip'}
|
||||
/>
|
||||
{/if}
|
||||
{/if}
|
||||
{/if}
|
||||
|
||||
{#if storageType == 'database' || storageType == 'query'}
|
||||
<FormConnectionSelect name={connectionIdField} label="Server" {direction} />
|
||||
<FormDatabaseSelect conidName={connectionIdField} name={databaseNameField} label="Database" />
|
||||
@@ -164,18 +211,20 @@
|
||||
label="Archive folder"
|
||||
name={archiveFolderField}
|
||||
additionalFolders={_.compact([$values[archiveFolderField]])}
|
||||
allowCreateNew={direction == 'target'}
|
||||
/>
|
||||
{/if}
|
||||
|
||||
{#if storageType == 'archive' && direction == 'source'}
|
||||
{#if direction == 'source' && (storageType == 'archive' || $values.importFromZipFile)}
|
||||
<FormArchiveFilesSelect
|
||||
label={_t('importExport.sourceFiles', { defaultMessage: 'Source files' })}
|
||||
folderName={$values[archiveFolderField]}
|
||||
name={tablesField}
|
||||
filterExtension={format?.extension}
|
||||
/>
|
||||
{/if}
|
||||
|
||||
{#if format && direction == 'source'}
|
||||
{#if format && direction == 'source' && !$values.importFromZipFile}
|
||||
<FilesInput {setPreviewSource} />
|
||||
{/if}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import _ from 'lodash';
|
||||
import moment from 'moment';
|
||||
import { ScriptWriter, ScriptWriterJson } from 'dbgate-tools';
|
||||
import getAsArray from '../utility/getAsArray';
|
||||
import { getConnectionInfo } from '../utility/metadataLoaders';
|
||||
@@ -93,7 +94,13 @@ function getSourceExpr(extensions, sourceName, values, sourceConnection, sourceD
|
||||
return [
|
||||
format.readerFunc,
|
||||
{
|
||||
..._.omit(sourceFile, ['isDownload']),
|
||||
...(sourceFile
|
||||
? _.omit(sourceFile, ['isDownload'])
|
||||
: {
|
||||
fileName: values.importFromZipFile
|
||||
? `zip://archive:${values.sourceArchiveFolder}//${sourceName}`
|
||||
: sourceName,
|
||||
}),
|
||||
...extractFormatApiParameters(values, 'source', format),
|
||||
},
|
||||
];
|
||||
@@ -237,6 +244,13 @@ export default async function createImpExpScript(extensions, values, forceScript
|
||||
script.copyStream(sourceVar, targetVar, colmapVar, sourceName);
|
||||
script.endLine();
|
||||
}
|
||||
|
||||
if (values.exportToZipFile) {
|
||||
let zipFileName = values.exportToZipFileName || `zip-archive-${moment().format('YYYY-MM-DD-HH-mm-ss')}.zip`;
|
||||
if (!zipFileName.endsWith('.zip')) zipFileName += '.zip';
|
||||
script.zipDirectory('.', values.createZipFileInArchive ? 'archive:' + zipFileName : zipFileName);
|
||||
}
|
||||
|
||||
return script.getScript(values.schedule);
|
||||
}
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<div>{message}</div>
|
||||
|
||||
<FormArchiveFolderSelect label="Archive folder" name="archiveFolder" isNative />
|
||||
<FormArchiveFolderSelect label="Archive folder" name="archiveFolder" isNative allowCreateNew />
|
||||
|
||||
<svelte:fragment slot="footer">
|
||||
<FormSubmit
|
||||
|
||||
@@ -15,9 +15,7 @@
|
||||
<FormProvider>
|
||||
<ModalBase {...$$restProps}>
|
||||
<svelte:fragment slot="header">
|
||||
{#if header}
|
||||
{header}
|
||||
{/if}
|
||||
{header || 'Confirm'}
|
||||
</svelte:fragment>
|
||||
|
||||
{message}
|
||||
|
||||
366
packages/web/src/modals/ExportImportConnectionsModal.svelte
Normal file
366
packages/web/src/modals/ExportImportConnectionsModal.svelte
Normal file
@@ -0,0 +1,366 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from 'svelte';
|
||||
|
||||
import FormStyledButton from '../buttons/FormStyledButton.svelte';
|
||||
import FormProvider from '../forms/FormProvider.svelte';
|
||||
|
||||
import ModalBase from './ModalBase.svelte';
|
||||
import { closeCurrentModal } from './modalTools';
|
||||
import { _t } from '../translations';
|
||||
import { apiCall } from '../utility/api';
|
||||
import TabControl from '../elements/TabControl.svelte';
|
||||
import TableControl from '../elements/TableControl.svelte';
|
||||
import { writable } from 'svelte/store';
|
||||
import LargeButton from '../buttons/LargeButton.svelte';
|
||||
import { downloadFromApi } from '../utility/exportFileTools';
|
||||
import getElectron from '../utility/getElectron';
|
||||
import { showSnackbarSuccess } from '../utility/snackbar';
|
||||
import { format } from 'date-fns';
|
||||
import Link from '../elements/Link.svelte';
|
||||
import _ from 'lodash';
|
||||
|
||||
export let mode: 'export' | 'import';
|
||||
export let uploadedFilePath = undefined;
|
||||
|
||||
let fullData: any = {};
|
||||
|
||||
async function loadExportedData() {
|
||||
fullData = await apiCall('config/export-connections-and-settings');
|
||||
initFromFullData();
|
||||
}
|
||||
|
||||
async function loadImportedData() {
|
||||
fullData = await apiCall('files/get-jsons-from-zip', { filePath: uploadedFilePath });
|
||||
initFromFullData();
|
||||
}
|
||||
|
||||
function initFromFullData() {
|
||||
connections = fullData.connections || [];
|
||||
users = fullData.users || [];
|
||||
roles = fullData.roles || [];
|
||||
authMethods = fullData.auth_methods || [];
|
||||
config = fullData.config || [];
|
||||
|
||||
handleCheckAll(true);
|
||||
}
|
||||
|
||||
function handleCheckAll(checked) {
|
||||
if (checked) {
|
||||
checkedConnections = connections.map(x => x.id);
|
||||
checkedUsers = users.map(x => x.id);
|
||||
checkedRoles = roles.map(x => x.id);
|
||||
checkedAuthMethods = authMethods.map(x => x.id);
|
||||
checkedConfig = config.map(x => x.id);
|
||||
} else {
|
||||
checkedConnections = [];
|
||||
checkedUsers = [];
|
||||
checkedRoles = [];
|
||||
checkedAuthMethods = [];
|
||||
checkedConfig = [];
|
||||
}
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
if (mode == 'export') {
|
||||
loadExportedData();
|
||||
}
|
||||
if (mode == 'import') {
|
||||
loadImportedData();
|
||||
}
|
||||
});
|
||||
|
||||
function getLimitedData() {
|
||||
const limitedData: any = {
|
||||
connections: fullData.connections?.filter(x => checkedConnections.includes(x.id)),
|
||||
|
||||
users: fullData.users?.filter(x => checkedUsers.includes(x.id)),
|
||||
|
||||
user_connections: fullData.user_connections?.filter(
|
||||
x => checkedUsers.includes(x.user_id) && checkedConnections.includes(x.connection_id)
|
||||
),
|
||||
user_roles: fullData.user_roles?.filter(x => checkedUsers.includes(x.user_id) && checkedRoles.includes(x.role_id)),
|
||||
user_permissions: fullData.user_permissions?.filter(x => checkedUsers.includes(x.user_id)),
|
||||
|
||||
roles: fullData.roles?.filter(x => checkedRoles.includes(x.id)),
|
||||
role_connections: fullData.role_connections?.filter(
|
||||
x => checkedRoles.includes(x.role_id) && checkedConnections.includes(x.connection_id)
|
||||
),
|
||||
role_permissions: fullData.role_permissions?.filter(x => checkedRoles.includes(x.role_id)),
|
||||
|
||||
auth_methods: fullData.auth_methods?.filter(x => checkedAuthMethods.includes(x.id)),
|
||||
auth_methods_config: fullData.auth_methods_config?.filter(x => checkedAuthMethods.includes(x.auth_method_id)),
|
||||
|
||||
config: fullData.config?.filter(
|
||||
x => checkedConfig.includes(x.id) || (x.group == 'admin' && x.key == 'encryptionKey')
|
||||
),
|
||||
};
|
||||
return limitedData;
|
||||
}
|
||||
|
||||
async function handleExport() {
|
||||
const electron = getElectron();
|
||||
|
||||
let filePath;
|
||||
let fileName;
|
||||
|
||||
if (electron) {
|
||||
const electron = getElectron();
|
||||
filePath = await electron.showSaveDialog({
|
||||
filters: [
|
||||
{ name: `ZIP files`, extensions: ['zip'] },
|
||||
{ name: `All files`, extensions: ['*'] },
|
||||
],
|
||||
defaultPath: `dbgateconfig.zip`,
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
});
|
||||
} else {
|
||||
const resp = await apiCall('files/generate-uploads-file', { extension: 'sql' });
|
||||
filePath = resp.filePath;
|
||||
fileName = resp.fileName;
|
||||
}
|
||||
|
||||
if (!filePath) {
|
||||
return;
|
||||
}
|
||||
|
||||
await apiCall('files/create-zip-from-jsons', { db: getLimitedData(), filePath });
|
||||
|
||||
if (electron) {
|
||||
showSnackbarSuccess(`Saved to file ${filePath}`);
|
||||
} else {
|
||||
await downloadFromApi(`uploads/get?file=${fileName}`, `dbgateconfig.zip`);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleSaveToArchive() {
|
||||
const filePath = `archive:dbgateconfig-${format(new Date(), 'yyyy-MM-dd-HH-mm-ss')}.zip`;
|
||||
await apiCall('files/create-zip-from-jsons', { db: getLimitedData(), filePath });
|
||||
showSnackbarSuccess(`Saved to ${filePath}`);
|
||||
}
|
||||
|
||||
async function handleImport() {
|
||||
await apiCall('config/import-connections-and-settings', { db: getLimitedData() });
|
||||
showSnackbarSuccess(`Imported connections and settings`);
|
||||
}
|
||||
|
||||
let connections = [];
|
||||
let checkedConnections = [];
|
||||
|
||||
let users = [];
|
||||
let checkedUsers = [];
|
||||
|
||||
let roles = [];
|
||||
let checkedRoles = [];
|
||||
|
||||
let authMethods = [];
|
||||
let checkedAuthMethods = [];
|
||||
|
||||
let config = [];
|
||||
let checkedConfig = [];
|
||||
|
||||
const connectionFilters = writable({});
|
||||
const userFilters = writable({});
|
||||
const roleFilters = writable({});
|
||||
const authMethodFilters = writable({});
|
||||
const configFilters = writable({});
|
||||
</script>
|
||||
|
||||
<FormProvider>
|
||||
<ModalBase {...$$restProps} fullScreen>
|
||||
<div slot="header">
|
||||
{mode == 'export' ? 'Export' : 'Import'} connections & settings
|
||||
<span class="check-uncheck">
|
||||
<Link onClick={() => handleCheckAll(true)}>Check all</Link>
|
||||
|
|
||||
<Link onClick={() => handleCheckAll(false)}>Uncheck all</Link>
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div class="tabs">
|
||||
<TabControl
|
||||
tabs={_.compact([
|
||||
connections?.length && {
|
||||
label: `Connections (${checkedConnections?.length}/${connections?.length})`,
|
||||
slot: 1,
|
||||
},
|
||||
users?.length && { label: `Users (${checkedUsers?.length}/${users?.length})`, slot: 2 },
|
||||
roles?.length && { label: `Roles (${checkedRoles?.length}/${roles?.length})`, slot: 3 },
|
||||
authMethods?.length && {
|
||||
label: `Auth methods (${checkedAuthMethods?.length}/${authMethods?.length})`,
|
||||
slot: 4,
|
||||
},
|
||||
config?.length && { label: `Config (${checkedConfig?.length}/${config?.length})`, slot: 5 },
|
||||
])}
|
||||
>
|
||||
<svelte:fragment slot="1">
|
||||
<div class="tablewrap">
|
||||
<TableControl
|
||||
filters={connectionFilters}
|
||||
stickyHeader
|
||||
columns={[
|
||||
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
|
||||
{ header: 'Display name', fieldName: 'displayName', sortable: true, filterable: true },
|
||||
{ header: 'Engine', fieldName: 'engine', sortable: true, filterable: true },
|
||||
{ header: 'Server', fieldName: 'server', sortable: true, filterable: true },
|
||||
{ header: 'User', fieldName: 'user', sortable: true, filterable: true },
|
||||
]}
|
||||
clickable
|
||||
rows={connections}
|
||||
on:clickrow={event => {
|
||||
checkedConnections = checkedConnections.includes(event.detail.id)
|
||||
? checkedConnections.filter(id => id !== event.detail.id)
|
||||
: [...checkedConnections, event.detail.id];
|
||||
}}
|
||||
checkedKeys={checkedConnections}
|
||||
onSetCheckedKeys={keys => {
|
||||
checkedConnections = keys;
|
||||
}}
|
||||
></TableControl>
|
||||
</div>
|
||||
</svelte:fragment>
|
||||
<svelte:fragment slot="2">
|
||||
<div class="tablewrap">
|
||||
<TableControl
|
||||
filters={userFilters}
|
||||
stickyHeader
|
||||
columns={[
|
||||
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
|
||||
{ header: 'Login', fieldName: 'login', sortable: true, filterable: true },
|
||||
{ header: 'E-mail', fieldName: 'email', sortable: true, filterable: true },
|
||||
]}
|
||||
clickable
|
||||
rows={users}
|
||||
on:clickrow={event => {
|
||||
checkedUsers = checkedUsers.includes(event.detail.id)
|
||||
? checkedUsers.filter(id => id !== event.detail.id)
|
||||
: [...checkedUsers, event.detail.id];
|
||||
}}
|
||||
checkedKeys={checkedUsers}
|
||||
onSetCheckedKeys={keys => {
|
||||
checkedUsers = keys;
|
||||
}}
|
||||
></TableControl>
|
||||
</div>
|
||||
</svelte:fragment>
|
||||
<svelte:fragment slot="3">
|
||||
<div class="tablewrap">
|
||||
<TableControl
|
||||
filters={roleFilters}
|
||||
stickyHeader
|
||||
columns={[
|
||||
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
|
||||
{ header: 'Name', fieldName: 'name', sortable: true, filterable: true },
|
||||
]}
|
||||
clickable
|
||||
rows={roles}
|
||||
on:clickrow={event => {
|
||||
checkedRoles = checkedRoles.includes(event.detail.id)
|
||||
? checkedRoles.filter(id => id !== event.detail.id)
|
||||
: [...checkedRoles, event.detail.id];
|
||||
}}
|
||||
checkedKeys={checkedRoles}
|
||||
onSetCheckedKeys={keys => {
|
||||
checkedRoles = keys;
|
||||
}}
|
||||
></TableControl>
|
||||
</div>
|
||||
</svelte:fragment>
|
||||
<svelte:fragment slot="4">
|
||||
<div class="tablewrap">
|
||||
<TableControl
|
||||
filters={authMethodFilters}
|
||||
stickyHeader
|
||||
columns={[
|
||||
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
|
||||
{ header: 'Name', fieldName: 'name', sortable: true, filterable: true },
|
||||
{ header: 'Type', fieldName: 'type', sortable: true, filterable: true },
|
||||
]}
|
||||
clickable
|
||||
rows={authMethods}
|
||||
on:clickrow={event => {
|
||||
checkedAuthMethods = checkedAuthMethods.includes(event.detail.id)
|
||||
? checkedAuthMethods.filter(id => id !== event.detail.id)
|
||||
: [...checkedAuthMethods, event.detail.id];
|
||||
}}
|
||||
checkedKeys={checkedAuthMethods}
|
||||
onSetCheckedKeys={keys => {
|
||||
checkedAuthMethods = keys;
|
||||
}}
|
||||
></TableControl>
|
||||
</div>
|
||||
</svelte:fragment>
|
||||
<svelte:fragment slot="5">
|
||||
<div class="tablewrap">
|
||||
<TableControl
|
||||
filters={configFilters}
|
||||
stickyHeader
|
||||
columns={[
|
||||
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
|
||||
{ header: 'Group', fieldName: 'group', sortable: true, filterable: true },
|
||||
{ header: 'Key', fieldName: 'key', sortable: true, filterable: true },
|
||||
{ header: 'Value', fieldName: 'value', sortable: true, filterable: true },
|
||||
]}
|
||||
clickable
|
||||
rows={config}
|
||||
on:clickrow={event => {
|
||||
checkedConfig = checkedConfig.includes(event.detail.id)
|
||||
? checkedConfig.filter(id => id !== event.detail.id)
|
||||
: [...checkedConfig, event.detail.id];
|
||||
}}
|
||||
checkedKeys={checkedConfig}
|
||||
onSetCheckedKeys={keys => {
|
||||
checkedConfig = keys;
|
||||
}}
|
||||
></TableControl>
|
||||
</div>
|
||||
</svelte:fragment>
|
||||
</TabControl>
|
||||
</div>
|
||||
|
||||
<div slot="footer">
|
||||
<div class="flex m-2">
|
||||
{#if mode == 'export'}
|
||||
<LargeButton
|
||||
data-testid="ExportImportConnectionsModal_exportButton"
|
||||
icon="icon export"
|
||||
on:click={handleExport}>{_t('common.export', { defaultMessage: 'Export' })}</LargeButton
|
||||
>
|
||||
<LargeButton
|
||||
data-testid="ExportImportConnectionsModal_saveToArchive"
|
||||
icon="icon archive"
|
||||
on:click={handleSaveToArchive}
|
||||
>{_t('common.saveToArchive', { defaultMessage: 'Save to archive' })}</LargeButton
|
||||
>
|
||||
{/if}
|
||||
{#if mode == 'import'}
|
||||
<LargeButton
|
||||
data-testid="ExportImportConnectionsModal_importButton"
|
||||
icon="icon import"
|
||||
on:click={handleImport}>{_t('common.import', { defaultMessage: 'Import' })}</LargeButton
|
||||
>
|
||||
{/if}
|
||||
<LargeButton icon="icon close" on:click={closeCurrentModal} data-testid="EditJsonModal_closeButton"
|
||||
>Close</LargeButton
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
</ModalBase>
|
||||
</FormProvider>
|
||||
|
||||
<style>
|
||||
.tablewrap {
|
||||
overflow: auto;
|
||||
width: 100%;
|
||||
height: calc(100vh - 220px);
|
||||
margin: 1rem;
|
||||
}
|
||||
|
||||
.tabs {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.check-uncheck {
|
||||
margin-left: 1rem;
|
||||
font-size: 0.8rem;
|
||||
}
|
||||
</style>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user