Merge branch 'master' into feature/duckdb-2

This commit is contained in:
SPRINX0\prochazka
2025-04-24 09:25:45 +02:00
117 changed files with 5141 additions and 2829 deletions

View File

@@ -39,7 +39,7 @@ jobs:
repository: dbgate/dbgate-pro repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }} token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro path: dbgate-pro
ref: 00da2abe10e1ec8a3887b49dfabd42ccda365514 ref: bc38030228a7d77b45032476b4d920b830120d3d
- name: Merge dbgate/dbgate-pro - name: Merge dbgate/dbgate-pro
run: | run: |
mkdir ../dbgate-pro mkdir ../dbgate-pro

View File

@@ -39,7 +39,7 @@ jobs:
repository: dbgate/dbgate-pro repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }} token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro path: dbgate-pro
ref: 00da2abe10e1ec8a3887b49dfabd42ccda365514 ref: bc38030228a7d77b45032476b4d920b830120d3d
- name: Merge dbgate/dbgate-pro - name: Merge dbgate/dbgate-pro
run: | run: |
mkdir ../dbgate-pro mkdir ../dbgate-pro

View File

@@ -39,7 +39,7 @@ jobs:
repository: dbgate/dbgate-pro repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }} token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro path: dbgate-pro
ref: 00da2abe10e1ec8a3887b49dfabd42ccda365514 ref: bc38030228a7d77b45032476b4d920b830120d3d
- name: Merge dbgate/dbgate-pro - name: Merge dbgate/dbgate-pro
run: | run: |
mkdir ../dbgate-pro mkdir ../dbgate-pro

View File

@@ -44,7 +44,7 @@ jobs:
repository: dbgate/dbgate-pro repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }} token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro path: dbgate-pro
ref: 00da2abe10e1ec8a3887b49dfabd42ccda365514 ref: bc38030228a7d77b45032476b4d920b830120d3d
- name: Merge dbgate/dbgate-pro - name: Merge dbgate/dbgate-pro
run: | run: |
mkdir ../dbgate-pro mkdir ../dbgate-pro

View File

@@ -32,7 +32,7 @@ jobs:
repository: dbgate/dbgate-pro repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }} token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro path: dbgate-pro
ref: 00da2abe10e1ec8a3887b49dfabd42ccda365514 ref: bc38030228a7d77b45032476b4d920b830120d3d
- name: Merge dbgate/dbgate-pro - name: Merge dbgate/dbgate-pro
run: | run: |
mkdir ../dbgate-pro mkdir ../dbgate-pro

View File

@@ -26,7 +26,7 @@ jobs:
repository: dbgate/dbgate-pro repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }} token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro path: dbgate-pro
ref: 00da2abe10e1ec8a3887b49dfabd42ccda365514 ref: bc38030228a7d77b45032476b4d920b830120d3d
- name: Merge dbgate/dbgate-pro - name: Merge dbgate/dbgate-pro
run: | run: |
mkdir ../dbgate-pro mkdir ../dbgate-pro

View File

@@ -8,6 +8,26 @@ Builds:
- linux - application for linux - linux - application for linux
- win - application for Windows - win - application for Windows
## 6.4.0 - not released yet
- ADDED: Data deployer (Premium)
- ADDED: Compare data between JSON lines file in archive and database table
- CHANGED: Data Duplicator => Data Replicator (suitable for update, create and delete data, much more customizable)
- REMOVED: Data duplicator GUI (replaced with Data Deployer)
- ADDED: Exporting to ZIP file
- ADDED: Download SQL and SQLite files
- ADDED: Upload SQLite files
- ADDED: Upload archive as ZIP folder (Premium)
- ADDED: Compress, uncompress archive folder (Premium)
## 6.3.3
- CHANGED: New administration UI, redesigned administration of users, connections and roles
- ADDED: Encrypting passwords in team-premium edition
- ADDED: Show scale bar on map #1090
- FIXED: Fixed native backup/restore for MySQL+PostgreSQL over SSH tunnel #1092
- CHANGED: Column mapping dialog - fixes and improvements for copying from one existing table into another
- ADDED: Search in columns in table editor
- ADDED: Line Wrap for JSON viewer #768
### 6.3.2 ### 6.3.2
- ADDED: "Use system theme" switch, use changed system theme without restart #1084 - ADDED: "Use system theme" switch, use changed system theme without restart #1084
- ADDED: "Skip SETNAME instruction" option for Redis #1077 - ADDED: "Skip SETNAME instruction" option for Redis #1077

View File

@@ -88,6 +88,9 @@ module.exports = ({ editMenu, isMac }) => [
{ command: 'folder.showData', hideDisabled: true }, { command: 'folder.showData', hideDisabled: true },
{ command: 'new.gist', hideDisabled: true }, { command: 'new.gist', hideDisabled: true },
{ command: 'app.resetSettings', hideDisabled: true }, { command: 'app.resetSettings', hideDisabled: true },
{ divider: true },
{ command: 'app.exportConnections', hideDisabled: true },
{ command: 'app.importConnections', hideDisabled: true },
], ],
}, },
...(isMac ...(isMac

View File

@@ -126,7 +126,7 @@ describe('Data browser data', () => {
cy.themeshot('data-browser-form-view'); cy.themeshot('data-browser-form-view');
}); });
it.only('Column search', () => { it('Column search', () => {
cy.contains('MySql-connection').click(); cy.contains('MySql-connection').click();
cy.contains('MyChinook').click(); cy.contains('MyChinook').click();
cy.contains('Customer').click(); cy.contains('Customer').click();
@@ -468,15 +468,24 @@ describe('Data browser data', () => {
cy.themeshot('database-model-table-yaml'); cy.themeshot('database-model-table-yaml');
}); });
it('Data duplicator', () => { it('Data replicator', () => {
cy.contains('MySql-connection').click(); cy.contains('MySql-connection').click();
cy.contains('MyChinook').click(); cy.contains('MyChinook').click();
cy.testid('WidgetIconPanel_archive').click(); cy.testid('WidgetIconPanel_archive').click();
cy.contains('chinook-archive').rightclick(); cy.contains('chinook-archive').rightclick();
cy.contains('Data duplicator').click(); cy.contains('Data deployer').click();
cy.contains('Dry run').click(); cy.contains('Dry run').click();
cy.testid('DataDuplicatorTab_importIntoDb').click(); cy.testid('TableControl_row_2_checkbox').click();
cy.contains('Duplicated Album, inserted 347 rows, mapped 0 rows, missing 0 rows, skipped 0 rows'); cy.testid('TableControl_row_2').click();
cy.themeshot('data-duplicator'); cy.testid('DataDeploySettings_find_checkbox').click();
cy.testid('DataDeploySettings_create_checkbox').click();
cy.testid('WidgetIconPanel_archive').click();
cy.themeshot('data-deployer');
cy.testid('DataDeployTab_importIntoDb').click();
cy.testid('ConfirmDataDeployModal_okButton').click();
cy.contains('Replicated Customer, inserted 59 rows');
cy.contains('Finished job script');
cy.testid('DataDeployTab_importIntoDb').click();
cy.themeshot('data-replicator');
}); });
}); });

View File

@@ -18,6 +18,10 @@ describe('Team edition tests', () => {
cy.contains('logged-user').click(); cy.contains('logged-user').click();
cy.themeshot('role-administration'); cy.themeshot('role-administration');
cy.testid('AdminMenuWidget_itemUsers').click();
cy.contains('New user').click();
cy.themeshot('user-administration');
cy.testid('AdminMenuWidget_itemAuthentication').click(); cy.testid('AdminMenuWidget_itemAuthentication').click();
cy.contains('Add authentication').click(); cy.contains('Add authentication').click();
cy.contains('Use database login').click(); cy.contains('Use database login').click();

View File

@@ -21,8 +21,8 @@ services:
build: containers/mysql-ssh-login build: containers/mysql-ssh-login
restart: always restart: always
ports: ports:
- 16005:3306 - 16017:3306
- "16015:22" - "16012:22"
mysql-ssh-keyfile: mysql-ssh-keyfile:
build: containers/mysql-ssh-keyfile build: containers/mysql-ssh-keyfile

View File

@@ -1,160 +0,0 @@
const engines = require('../engines');
const stream = require('stream');
const { testWrapper } = require('../tools');
const dataDuplicator = require('dbgate-api/src/shell/dataDuplicator');
const { runCommandOnDriver, runQueryOnDriver } = require('dbgate-tools');
describe('Data duplicator', () => {
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
'Insert simple data - %s',
testWrapper(async (conn, driver, engine) => {
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't1',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
})
);
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't2',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
{ columnName: 'valfk', dataType: 'int', notNull: true },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
})
);
const gett1 = () =>
stream.Readable.from([
{ __isStreamHeader: true, __isDynamicStructure: true },
{ id: 1, val: 'v1' },
{ id: 2, val: 'v2' },
{ id: 3, val: 'v3' },
]);
const gett2 = () =>
stream.Readable.from([
{ __isStreamHeader: true, __isDynamicStructure: true },
{ id: 1, val: 'v1', valfk: 1 },
{ id: 2, val: 'v2', valfk: 2 },
{ id: 3, val: 'v3', valfk: 3 },
]);
await dataDuplicator({
systemConnection: conn,
driver,
items: [
{
name: 't1',
operation: 'copy',
openStream: gett1,
},
{
name: 't2',
operation: 'copy',
openStream: gett2,
},
],
});
await dataDuplicator({
systemConnection: conn,
driver,
items: [
{
name: 't1',
operation: 'copy',
openStream: gett1,
},
{
name: 't2',
operation: 'copy',
openStream: gett2,
},
],
});
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res1.rows[0].cnt.toString()).toEqual('6');
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))(
'Skip nullable weak refs - %s',
testWrapper(async (conn, driver, engine) => {
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't1',
columns: [
{ columnName: 'id', dataType: 'int', notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
})
);
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't2',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
{ columnName: 'valfk', dataType: 'int', notNull: false },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
})
);
runCommandOnDriver(conn, driver, dmp => dmp.put("insert into ~t1 (~id, ~val) values (1, 'first')"));
const gett2 = () =>
stream.Readable.from([
{ __isStreamHeader: true, __isDynamicStructure: true },
{ id: 1, val: 'v1', valfk: 1 },
{ id: 2, val: 'v2', valfk: 2 },
]);
await dataDuplicator({
systemConnection: conn,
driver,
items: [
{
name: 't2',
operation: 'copy',
openStream: gett2,
},
],
options: {
setNullForUnresolvedNullableRefs: true,
},
});
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res1.rows[0].cnt.toString()).toEqual('1');
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
expect(res2.rows[0].cnt.toString()).toEqual('2');
const res3 = await runQueryOnDriver(conn, driver, dmp =>
dmp.put(`select count(*) as ~cnt from ~t2 where ~valfk is not null`)
);
expect(res3.rows[0].cnt.toString()).toEqual('1');
})
);
});

View File

@@ -0,0 +1,306 @@
const engines = require('../engines');
const stream = require('stream');
const { testWrapper } = require('../tools');
const dataReplicator = require('dbgate-api/src/shell/dataReplicator');
const deployDb = require('dbgate-api/src/shell/deployDb');
const storageModel = require('dbgate-api/src/storageModel');
const { runCommandOnDriver, runQueryOnDriver } = require('dbgate-tools');
describe('Data replicator', () => {
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
'Insert simple data - %s',
testWrapper(async (conn, driver, engine) => {
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't1',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
})
);
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't2',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
{ columnName: 'valfk', dataType: 'int', notNull: true },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
})
);
const gett1 = () =>
stream.Readable.from([
{ __isStreamHeader: true, __isDynamicStructure: true },
{ id: 1, val: 'v1' },
{ id: 2, val: 'v2' },
{ id: 3, val: 'v3' },
]);
const gett2 = () =>
stream.Readable.from([
{ __isStreamHeader: true, __isDynamicStructure: true },
{ id: 1, val: 'v1', valfk: 1 },
{ id: 2, val: 'v2', valfk: 2 },
{ id: 3, val: 'v3', valfk: 3 },
]);
await dataReplicator({
systemConnection: conn,
driver,
items: [
{
name: 't1',
createNew: true,
openStream: gett1,
},
{
name: 't2',
createNew: true,
openStream: gett2,
},
],
});
await dataReplicator({
systemConnection: conn,
driver,
items: [
{
name: 't1',
createNew: true,
openStream: gett1,
},
{
name: 't2',
createNew: true,
openStream: gett2,
},
],
});
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res1.rows[0].cnt.toString()).toEqual('6');
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
'Skip nullable weak refs - %s',
testWrapper(async (conn, driver, engine) => {
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't1',
columns: [
{ columnName: 'id', dataType: 'int', notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
})
);
runCommandOnDriver(conn, driver, dmp =>
dmp.createTable({
pureName: 't2',
columns: [
{ columnName: 'id', dataType: 'int', autoIncrement: true, notNull: true },
{ columnName: 'val', dataType: 'varchar(50)' },
{ columnName: 'valfk', dataType: 'int', notNull: false },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
foreignKeys: [{ refTableName: 't1', columns: [{ columnName: 'valfk', refColumnName: 'id' }] }],
})
);
runCommandOnDriver(conn, driver, dmp => dmp.put("insert into ~t1 (~id, ~val) values (1, 'first')"));
await dataReplicator({
systemConnection: conn,
driver,
items: [
{
name: 't2',
createNew: true,
jsonArray: [
{ id: 1, val: 'v1', valfk: 1 },
{ id: 2, val: 'v2', valfk: 2 },
],
},
],
options: {
setNullForUnresolvedNullableRefs: true,
},
});
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res1.rows[0].cnt.toString()).toEqual('1');
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
expect(res2.rows[0].cnt.toString()).toEqual('2');
const res3 = await runQueryOnDriver(conn, driver, dmp =>
dmp.put(`select count(*) as ~cnt from ~t2 where ~valfk is not null`)
);
expect(res3.rows[0].cnt.toString()).toEqual('1');
})
);
test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
'Import storage DB - %s',
testWrapper(async (conn, driver, engine) => {
await deployDb({
systemConnection: conn,
driver,
loadedDbModel: storageModel,
targetSchema: engine.defaultSchemaName,
});
async function queryValue(sql) {
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(sql));
return res1.rows[0].val?.toString();
}
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('2');
expect(
await queryValue(
`select ~is_disabled as ~val from ~auth_methods where ~amoid='790ca4d2-7f01-4800-955b-d691b890cc50'`
)
).toBeFalsy();
const DB1 = {
auth_methods: [
{ id: -1, name: 'Anonymous', amoid: '790ca4d2-7f01-4800-955b-d691b890cc50', is_disabled: 1 },
{ id: 10, name: 'OAuth', amoid: '4269b660-54b6-11ef-a3aa-a9021250bf4b' },
],
auth_methods_config: [{ id: 20, auth_method_id: 10, key: 'oauthClient', value: 'dbgate' }],
config: [
{ group: 'admin', key: 'encyptKey', value: '1234' },
{ group: 'admin', key: 'adminPasswordState', value: 'set' },
{ group: 'license', key: 'licenseKey', value: '123467' },
],
roles: [
{ id: -3, name: 'superadmin' },
{ id: -2, name: 'logged-user' },
{ id: -1, name: 'anonymous-user' },
],
role_permissions: [
{ id: 14, role_id: -1, permission: 'perm1' },
{ id: 29, role_id: -1, permission: 'perm2' },
{ id: 1, role_id: -1, permission: 'perm3' },
],
};
const DB2 = {
auth_methods: [{ id: 10, name: 'My Auth', amoid: 'myauth1' }],
auth_methods_config: [{ id: 20, auth_method_id: 10, key: 'my authClient', value: 'mydbgate' }],
config: [],
roles: [{ id: 1, name: 'test' }],
role_permissions: [{ id: 14, role_id: 1, permission: 'permxx' }],
};
function createDuplConfig(db) {
return {
systemConnection: conn,
driver,
items: [
{
name: 'auth_methods',
findExisting: true,
updateExisting: true,
createNew: true,
matchColumns: ['amoid'],
jsonArray: db.auth_methods,
},
{
name: 'auth_methods_config',
findExisting: true,
updateExisting: true,
createNew: true,
matchColumns: ['auth_method_id', 'key'],
jsonArray: db.auth_methods_config,
},
{
name: 'config',
findExisting: true,
updateExisting: true,
createNew: true,
matchColumns: ['group', 'key'],
jsonArray: db.config,
},
{
name: 'roles',
findExisting: true,
updateExisting: true,
createNew: true,
matchColumns: ['name'],
jsonArray: db.roles,
},
{
name: 'role_permissions',
findExisting: true,
updateExisting: true,
createNew: true,
deleteMissing: true,
matchColumns: ['role_id', 'permission'],
deleteRestrictionColumns: ['role_id'],
jsonArray: db.role_permissions,
},
],
};
}
await dataReplicator(createDuplConfig(DB1));
expect(
await queryValue(
`select ~is_disabled as ~val from ~auth_methods where ~amoid='790ca4d2-7f01-4800-955b-d691b890cc50'`
)
).toBeTruthy();
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('3');
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('1');
expect(await queryValue(`select count(*) as ~val from ~config`)).toEqual('3');
expect(await queryValue(`select ~value as ~val from ~auth_methods_config`)).toEqual('dbgate');
expect(
await queryValue(`select ~value as ~val from ~config where ~group='license' and ~key='licenseKey'`)
).toEqual('123467');
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('3');
DB1.auth_methods_config[0].value = 'dbgate2';
DB1.config[2].value = '567';
DB1.role_permissions.splice(2, 1);
await dataReplicator(createDuplConfig(DB1));
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('1');
expect(await queryValue(`select count(*) as ~val from ~config`)).toEqual('3');
expect(await queryValue(`select ~value as ~val from ~auth_methods_config`)).toEqual('dbgate2');
expect(
await queryValue(`select ~value as ~val from ~config where ~group='license' and ~key='licenseKey'`)
).toEqual('567');
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('2');
// now add DB2
await dataReplicator(createDuplConfig(DB2));
expect(await queryValue(`select count(*) as ~val from ~auth_methods`)).toEqual('4');
expect(await queryValue(`select count(*) as ~val from ~auth_methods_config`)).toEqual('2');
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('3');
DB1.role_permissions.splice(1, 1);
await dataReplicator(createDuplConfig(DB1));
expect(await queryValue(`select count(*) as ~val from ~role_permissions`)).toEqual('2');
}),
15 * 1000
);
});

View File

@@ -190,7 +190,7 @@ describe('Query', () => {
}) })
); );
test.each(engines.filter(x => !x.skipDataDuplicator).map(engine => [engine.label, engine]))( test.each(engines.filter(x => !x.skipDataReplicator).map(engine => [engine.label, engine]))(
'Select scope identity - %s', 'Select scope identity - %s',
testWrapper(async (conn, driver, engine) => { testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => await runCommandOnDriver(conn, driver, dmp =>

View File

@@ -8,14 +8,14 @@ services:
# ports: # ports:
# - 15000:5432 # - 15000:5432
# #
# mariadb: mariadb:
# image: mariadb image: mariadb
# command: --default-authentication-plugin=mysql_native_password command: --default-authentication-plugin=mysql_native_password
# restart: always restart: always
# ports: ports:
# - 15004:3306 - 15004:3306
# environment: environment:
# - MYSQL_ROOT_PASSWORD=Pwd2020Db - MYSQL_ROOT_PASSWORD=Pwd2020Db
# mysql: # mysql:
# image: mysql:8.0.18 # image: mysql:8.0.18
@@ -25,7 +25,7 @@ services:
# - 15001:3306 # - 15001:3306
# environment: # environment:
# - MYSQL_ROOT_PASSWORD=Pwd2020Db # - MYSQL_ROOT_PASSWORD=Pwd2020Db
#
# cassandradb: # cassandradb:
# image: cassandra:5.0.2 # image: cassandra:5.0.2
@@ -81,11 +81,11 @@ services:
# ports: # ports:
# - 15006:1521 # - 15006:1521
libsql: # libsql:
image: ghcr.io/tursodatabase/libsql-server:latest # image: ghcr.io/tursodatabase/libsql-server:latest
platform: linux/amd64 # platform: linux/amd64
ports: # ports:
- '8080:8080' # - '8080:8080'
- '5002:5001' # - '5002:5001'
volumes: # volumes:
- ./data/libsql:/var/lib/sqld # - ./data/libsql:/var/lib/sqld

View File

@@ -551,7 +551,7 @@ const clickhouseEngine = {
skipUnique: true, skipUnique: true,
skipAutoIncrement: true, skipAutoIncrement: true,
skipPkColumnTesting: true, skipPkColumnTesting: true,
skipDataDuplicator: true, skipDataReplicator: true,
skipStringLength: true, skipStringLength: true,
alterTableAddColumnSyntax: true, alterTableAddColumnSyntax: true,
dbSnapshotBySeconds: true, dbSnapshotBySeconds: true,
@@ -643,7 +643,7 @@ const cassandraEngine = {
skipOrderBy: true, skipOrderBy: true,
skipAutoIncrement: true, skipAutoIncrement: true,
skipDataModifications: true, skipDataModifications: true,
skipDataDuplicator: true, skipDataReplicator: true,
skipDeploy: true, skipDeploy: true,
skipImportModel: true, skipImportModel: true,
@@ -693,7 +693,7 @@ const enginesOnLocal = [
// all engines, which would be run on local test // all engines, which would be run on local test
// cassandraEngine, // cassandraEngine,
// mysqlEngine, // mysqlEngine,
// mariaDbEngine, mariaDbEngine,
// postgreSqlEngine, // postgreSqlEngine,
// sqlServerEngine, // sqlServerEngine,
// sqliteEngine, // sqliteEngine,

View File

@@ -1,6 +1,6 @@
{ {
"private": true, "private": true,
"version": "6.3.2", "version": "6.3.4-premium-beta.1",
"name": "dbgate-all", "name": "dbgate-all",
"workspaces": [ "workspaces": [
"packages/*", "packages/*",

View File

@@ -22,6 +22,7 @@
"dependencies": { "dependencies": {
"@aws-sdk/rds-signer": "^3.665.0", "@aws-sdk/rds-signer": "^3.665.0",
"activedirectory2": "^2.1.0", "activedirectory2": "^2.1.0",
"archiver": "^7.0.1",
"async-lock": "^1.2.6", "async-lock": "^1.2.6",
"axios": "^0.21.1", "axios": "^0.21.1",
"body-parser": "^1.19.0", "body-parser": "^1.19.0",
@@ -62,7 +63,8 @@
"simple-encryptor": "^4.0.0", "simple-encryptor": "^4.0.0",
"ssh2": "^1.16.0", "ssh2": "^1.16.0",
"stream-json": "^1.8.0", "stream-json": "^1.8.0",
"tar": "^6.0.5" "tar": "^6.0.5",
"yauzl": "^3.2.0"
}, },
"scripts": { "scripts": {
"start": "env-cmd -f .env node src/index.js --listen-api", "start": "env-cmd -f .env node src/index.js --listen-api",

View File

@@ -2,14 +2,20 @@ const fs = require('fs-extra');
const readline = require('readline'); const readline = require('readline');
const crypto = require('crypto'); const crypto = require('crypto');
const path = require('path'); const path = require('path');
const { archivedir, clearArchiveLinksCache, resolveArchiveFolder } = require('../utility/directories'); const { archivedir, clearArchiveLinksCache, resolveArchiveFolder, uploadsdir } = require('../utility/directories');
const socket = require('../utility/socket'); const socket = require('../utility/socket');
const loadFilesRecursive = require('../utility/loadFilesRecursive'); const loadFilesRecursive = require('../utility/loadFilesRecursive');
const getJslFileName = require('../utility/getJslFileName'); const getJslFileName = require('../utility/getJslFileName');
const { getLogger, extractErrorLogData } = require('dbgate-tools'); const { getLogger, extractErrorLogData, jsonLinesParse } = require('dbgate-tools');
const dbgateApi = require('../shell'); const dbgateApi = require('../shell');
const jsldata = require('./jsldata'); const jsldata = require('./jsldata');
const platformInfo = require('../utility/platformInfo'); const platformInfo = require('../utility/platformInfo');
const { isProApp } = require('../utility/checkLicense');
const listZipEntries = require('../utility/listZipEntries');
const unzipJsonLinesFile = require('../shell/unzipJsonLinesFile');
const { zip } = require('lodash');
const zipDirectory = require('../shell/zipDirectory');
const unzipDirectory = require('../shell/unzipDirectory');
const logger = getLogger('archive'); const logger = getLogger('archive');
@@ -47,9 +53,31 @@ module.exports = {
return folder; return folder;
}, },
async getZipFiles({ file }) {
const entries = await listZipEntries(path.join(archivedir(), file));
const files = entries.map(entry => {
let name = entry.fileName;
if (isProApp() && entry.fileName.endsWith('.jsonl')) {
name = entry.fileName.slice(0, -6);
}
return {
name: name,
label: name,
type: isProApp() && entry.fileName.endsWith('.jsonl') ? 'jsonl' : 'other',
};
});
return files;
},
files_meta: true, files_meta: true,
async files({ folder }) { async files({ folder }) {
try { try {
if (folder.endsWith('.zip')) {
if (await fs.exists(path.join(archivedir(), folder))) {
return this.getZipFiles({ file: folder });
}
return [];
}
const dir = resolveArchiveFolder(folder); const dir = resolveArchiveFolder(folder);
if (!(await fs.exists(dir))) return []; if (!(await fs.exists(dir))) return [];
const files = await loadFilesRecursive(dir); // fs.readdir(dir); const files = await loadFilesRecursive(dir); // fs.readdir(dir);
@@ -91,6 +119,16 @@ module.exports = {
return true; return true;
}, },
createFile_meta: true,
async createFile({ folder, file, fileType, tableInfo }) {
await fs.writeFile(
path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
tableInfo ? JSON.stringify({ __isStreamHeader: true, tableInfo }) : ''
);
socket.emitChanged(`archive-files-changed`, { folder });
return true;
},
deleteFile_meta: true, deleteFile_meta: true,
async deleteFile({ folder, file, fileType }) { async deleteFile({ folder, file, fileType }) {
await fs.unlink(path.join(resolveArchiveFolder(folder), `${file}.${fileType}`)); await fs.unlink(path.join(resolveArchiveFolder(folder), `${file}.${fileType}`));
@@ -158,7 +196,7 @@ module.exports = {
deleteFolder_meta: true, deleteFolder_meta: true,
async deleteFolder({ folder }) { async deleteFolder({ folder }) {
if (!folder) throw new Error('Missing folder parameter'); if (!folder) throw new Error('Missing folder parameter');
if (folder.endsWith('.link')) { if (folder.endsWith('.link') || folder.endsWith('.zip')) {
await fs.unlink(path.join(archivedir(), folder)); await fs.unlink(path.join(archivedir(), folder));
} else { } else {
await fs.rmdir(path.join(archivedir(), folder), { recursive: true }); await fs.rmdir(path.join(archivedir(), folder), { recursive: true });
@@ -204,9 +242,10 @@ module.exports = {
}, },
async getNewArchiveFolder({ database }) { async getNewArchiveFolder({ database }) {
const isLink = database.endsWith(database); const isLink = database.endsWith('.link');
const name = isLink ? database.slice(0, -5) : database; const isZip = database.endsWith('.zip');
const suffix = isLink ? '.link' : ''; const name = isLink ? database.slice(0, -5) : isZip ? database.slice(0, -4) : database;
const suffix = isLink ? '.link' : isZip ? '.zip' : '';
if (!(await fs.exists(path.join(archivedir(), database)))) return database; if (!(await fs.exists(path.join(archivedir(), database)))) return database;
let index = 2; let index = 2;
while (await fs.exists(path.join(archivedir(), `${name}${index}${suffix}`))) { while (await fs.exists(path.join(archivedir(), `${name}${index}${suffix}`))) {
@@ -214,4 +253,58 @@ module.exports = {
} }
return `${name}${index}${suffix}`; return `${name}${index}${suffix}`;
}, },
getArchiveData_meta: true,
async getArchiveData({ folder, file }) {
let rows;
if (folder.endsWith('.zip')) {
rows = await unzipJsonLinesFile(path.join(archivedir(), folder), `${file}.jsonl`);
} else {
rows = jsonLinesParse(await fs.readFile(path.join(archivedir(), folder, `${file}.jsonl`), { encoding: 'utf8' }));
}
return rows.filter(x => !x.__isStreamHeader);
},
saveUploadedZip_meta: true,
async saveUploadedZip({ filePath, fileName }) {
if (!fileName?.endsWith('.zip')) {
throw new Error(`${fileName} is not a ZIP file`);
}
const folder = await this.getNewArchiveFolder({ database: fileName });
await fs.copyFile(filePath, path.join(archivedir(), folder));
socket.emitChanged(`archive-folders-changed`);
return null;
},
zip_meta: true,
async zip({ folder }) {
const newFolder = await this.getNewArchiveFolder({ database: folder + '.zip' });
await zipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
socket.emitChanged(`archive-folders-changed`);
return null;
},
unzip_meta: true,
async unzip({ folder }) {
const newFolder = await this.getNewArchiveFolder({ database: folder.slice(0, -4) });
await unzipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
socket.emitChanged(`archive-folders-changed`);
return null;
},
getZippedPath_meta: true,
async getZippedPath({ folder }) {
if (folder.endsWith('.zip')) {
return { filePath: path.join(archivedir(), folder) };
}
const uploadName = crypto.randomUUID();
const filePath = path.join(uploadsdir(), uploadName);
await zipDirectory(path.join(archivedir(), folder), filePath);
return { filePath };
},
}; };

View File

@@ -12,6 +12,7 @@ const {
getAuthProviderById, getAuthProviderById,
} = require('../auth/authProvider'); } = require('../auth/authProvider');
const storage = require('./storage'); const storage = require('./storage');
const { decryptPasswordString } = require('../utility/crypting');
const logger = getLogger('auth'); const logger = getLogger('auth');
@@ -44,6 +45,7 @@ function authMiddleware(req, res, next) {
'/connections/dblogin-auth', '/connections/dblogin-auth',
'/connections/dblogin-auth-token', '/connections/dblogin-auth-token',
'/health', '/health',
'/__health',
]; ];
// console.log('********************* getAuthProvider()', getAuthProvider()); // console.log('********************* getAuthProvider()', getAuthProvider());
@@ -95,7 +97,7 @@ module.exports = {
let adminPassword = process.env.ADMIN_PASSWORD; let adminPassword = process.env.ADMIN_PASSWORD;
if (!adminPassword) { if (!adminPassword) {
const adminConfig = await storage.readConfig({ group: 'admin' }); const adminConfig = await storage.readConfig({ group: 'admin' });
adminPassword = adminConfig?.adminPassword; adminPassword = decryptPasswordString(adminConfig?.adminPassword);
} }
if (adminPassword && adminPassword == password) { if (adminPassword && adminPassword == password) {
return { return {

View File

@@ -19,6 +19,14 @@ const storage = require('./storage');
const { getAuthProxyUrl } = require('../utility/authProxy'); const { getAuthProxyUrl } = require('../utility/authProxy');
const { getPublicHardwareFingerprint } = require('../utility/hardwareFingerprint'); const { getPublicHardwareFingerprint } = require('../utility/hardwareFingerprint');
const { extractErrorMessage } = require('dbgate-tools'); const { extractErrorMessage } = require('dbgate-tools');
const {
generateTransportEncryptionKey,
createTransportEncryptor,
recryptConnection,
getInternalEncryptor,
recryptUser,
recryptObjectPasswordFieldInPlace,
} = require('../utility/crypting');
const lock = new AsyncLock(); const lock = new AsyncLock();
@@ -107,6 +115,7 @@ module.exports = {
datadir(), datadir(),
processArgs.runE2eTests ? 'connections-e2etests.jsonl' : 'connections.jsonl' processArgs.runE2eTests ? 'connections-e2etests.jsonl' : 'connections.jsonl'
), ),
supportCloudAutoUpgrade: !!process.env.CLOUD_UPGRADE_FILE,
...currentVersion, ...currentVersion,
}; };
@@ -144,7 +153,7 @@ module.exports = {
const res = { const res = {
...value, ...value,
}; };
if (value['app.useNativeMenu'] !== true && value['app.useNativeMenu'] !== false) { if (platformInfo.isElectron && value['app.useNativeMenu'] !== true && value['app.useNativeMenu'] !== false) {
// res['app.useNativeMenu'] = os.platform() == 'darwin' ? true : false; // res['app.useNativeMenu'] = os.platform() == 'darwin' ? true : false;
res['app.useNativeMenu'] = false; res['app.useNativeMenu'] = false;
} }
@@ -161,6 +170,10 @@ module.exports = {
async loadSettings() { async loadSettings() {
try { try {
if (process.env.STORAGE_DATABASE) {
const settings = await storage.readConfig({ group: 'settings' });
return this.fillMissingSettings(settings);
} else {
const settingsText = await fs.readFile( const settingsText = await fs.readFile(
path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'), path.join(datadir(), processArgs.runE2eTests ? 'settings-e2etests.json' : 'settings.json'),
{ encoding: 'utf-8' } { encoding: 'utf-8' }
@@ -169,6 +182,7 @@ module.exports = {
...this.fillMissingSettings(JSON.parse(settingsText)), ...this.fillMissingSettings(JSON.parse(settingsText)),
'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined, 'other.licenseKey': platformInfo.isElectron ? await this.loadLicenseKey() : undefined,
}; };
}
} catch (err) { } catch (err) {
return this.fillMissingSettings({}); return this.fillMissingSettings({});
} }
@@ -246,7 +260,18 @@ module.exports = {
const res = await lock.acquire('settings', async () => { const res = await lock.acquire('settings', async () => {
const currentValue = await this.loadSettings(); const currentValue = await this.loadSettings();
try { try {
const updated = { let updated = currentValue;
if (process.env.STORAGE_DATABASE) {
updated = {
...currentValue,
...values,
};
await storage.writeConfig({
group: 'settings',
config: updated,
});
} else {
updated = {
...currentValue, ...currentValue,
..._.omit(values, ['other.licenseKey']), ..._.omit(values, ['other.licenseKey']),
}; };
@@ -260,6 +285,7 @@ module.exports = {
await this.saveLicenseKey({ licenseKey: values['other.licenseKey'] }); await this.saveLicenseKey({ licenseKey: values['other.licenseKey'] });
socket.emitChanged(`config-changed`); socket.emitChanged(`config-changed`);
} }
}
socket.emitChanged(`settings-changed`); socket.emitChanged(`settings-changed`);
return updated; return updated;
@@ -281,4 +307,91 @@ module.exports = {
const resp = await checkLicenseKey(licenseKey); const resp = await checkLicenseKey(licenseKey);
return resp; return resp;
}, },
recryptDatabaseForExport(db) {
const encryptionKey = generateTransportEncryptionKey();
const transportEncryptor = createTransportEncryptor(encryptionKey);
const config = _.cloneDeep([
...(db.config?.filter(c => !(c.group == 'admin' && c.key == 'encryptionKey')) || []),
{ group: 'admin', key: 'encryptionKey', value: encryptionKey },
]);
const adminPassword = config.find(c => c.group == 'admin' && c.key == 'adminPassword');
recryptObjectPasswordFieldInPlace(adminPassword, 'value', getInternalEncryptor(), transportEncryptor);
return {
...db,
connections: db.connections?.map(conn => recryptConnection(conn, getInternalEncryptor(), transportEncryptor)),
users: db.users?.map(conn => recryptUser(conn, getInternalEncryptor(), transportEncryptor)),
config,
};
},
recryptDatabaseFromImport(db) {
const encryptionKey = db.config?.find(c => c.group == 'admin' && c.key == 'encryptionKey')?.value;
if (!encryptionKey) {
throw new Error('Missing encryption key in the database');
}
const config = _.cloneDeep(db.config || []).filter(c => !(c.group == 'admin' && c.key == 'encryptionKey'));
const transportEncryptor = createTransportEncryptor(encryptionKey);
const adminPassword = config.find(c => c.group == 'admin' && c.key == 'adminPassword');
recryptObjectPasswordFieldInPlace(adminPassword, 'value', transportEncryptor, getInternalEncryptor());
return {
...db,
connections: db.connections?.map(conn => recryptConnection(conn, transportEncryptor, getInternalEncryptor())),
users: db.users?.map(conn => recryptUser(conn, transportEncryptor, getInternalEncryptor())),
config,
};
},
exportConnectionsAndSettings_meta: true,
async exportConnectionsAndSettings(_params, req) {
if (!hasPermission(`admin/config`, req)) {
throw new Error('Permission denied: admin/config');
}
if (connections.portalConnections) {
throw new Error('Not allowed');
}
if (process.env.STORAGE_DATABASE) {
const db = await storage.getExportedDatabase();
return this.recryptDatabaseForExport(db);
}
return this.recryptDatabaseForExport({
connections: (await connections.list(null, req)).map((conn, index) => ({
..._.omit(conn, ['_id']),
id: index + 1,
conid: conn._id,
})),
});
},
importConnectionsAndSettings_meta: true,
async importConnectionsAndSettings({ db }, req) {
if (!hasPermission(`admin/config`, req)) {
throw new Error('Permission denied: admin/config');
}
if (connections.portalConnections) {
throw new Error('Not allowed');
}
const recryptedDb = this.recryptDatabaseFromImport(db);
if (process.env.STORAGE_DATABASE) {
await storage.replicateImportedDatabase(recryptedDb);
} else {
await connections.importFromArray(
recryptedDb.connections.map(conn => ({
..._.omit(conn, ['conid', 'id']),
_id: conn.conid,
}))
);
}
return true;
},
}; };

View File

@@ -321,6 +321,18 @@ module.exports = {
return res; return res;
}, },
importFromArray(list) {
this.datastore.transformAll(connections => {
const mapped = connections.map(x => {
const found = list.find(y => y._id == x._id);
if (found) return found;
return x;
});
return [...mapped, ...list.filter(x => !connections.find(y => y._id == x._id))];
});
socket.emitChanged('connection-list-changed');
},
async checkUnsavedConnectionsLimit() { async checkUnsavedConnectionsLimit() {
if (!this.datastore) { if (!this.datastore) {
return; return;

View File

@@ -37,6 +37,8 @@ const loadModelTransform = require('../utility/loadModelTransform');
const exportDbModelSql = require('../utility/exportDbModelSql'); const exportDbModelSql = require('../utility/exportDbModelSql');
const axios = require('axios'); const axios = require('axios');
const { callTextToSqlApi, callCompleteOnCursorApi, callRefactorSqlQueryApi } = require('../utility/authProxy'); const { callTextToSqlApi, callCompleteOnCursorApi, callRefactorSqlQueryApi } = require('../utility/authProxy');
const { decryptConnection } = require('../utility/crypting');
const { getSshTunnel } = require('../utility/sshTunnel');
const logger = getLogger('databaseConnections'); const logger = getLogger('databaseConnections');
@@ -140,6 +142,11 @@ module.exports = {
if (newOpened.disconnected) return; if (newOpened.disconnected) return;
this.close(conid, database, false); this.close(conid, database, false);
}); });
subprocess.on('error', err => {
logger.error(extractErrorLogData(err), 'Error in database connection subprocess');
if (newOpened.disconnected) return;
this.close(conid, database, false);
});
subprocess.send({ subprocess.send({
msgtype: 'connect', msgtype: 'connect',
@@ -619,9 +626,26 @@ module.exports = {
command, command,
{ conid, database, outputFile, inputFile, options, selectedTables, skippedTables, argsFormat } { conid, database, outputFile, inputFile, options, selectedTables, skippedTables, argsFormat }
) { ) {
const connection = await connections.getCore({ conid }); const sourceConnection = await connections.getCore({ conid });
const connection = {
...decryptConnection(sourceConnection),
};
const driver = requireEngineDriver(connection); const driver = requireEngineDriver(connection);
if (!connection.port && driver.defaultPort) {
connection.port = driver.defaultPort.toString();
}
if (connection.useSshTunnel) {
const tunnel = await getSshTunnel(connection);
if (tunnel.state == 'error') {
throw new Error(tunnel.message);
}
connection.server = tunnel.localHost;
connection.port = tunnel.localPort;
}
const settingsValue = await config.getSettings(); const settingsValue = await config.getSettings();
const externalTools = {}; const externalTools = {};

View File

@@ -9,6 +9,9 @@ const scheduler = require('./scheduler');
const getDiagramExport = require('../utility/getDiagramExport'); const getDiagramExport = require('../utility/getDiagramExport');
const apps = require('./apps'); const apps = require('./apps');
const getMapExport = require('../utility/getMapExport'); const getMapExport = require('../utility/getMapExport');
const dbgateApi = require('../shell');
const { getLogger } = require('dbgate-tools');
const logger = getLogger('files');
function serialize(format, data) { function serialize(format, data) {
if (format == 'text') return data; if (format == 'text') return data;
@@ -219,4 +222,60 @@ module.exports = {
return path.join(dir, file); return path.join(dir, file);
} }
}, },
createZipFromJsons_meta: true,
async createZipFromJsons({ db, filePath }) {
logger.info(`Creating zip file from JSONS ${filePath}`);
await dbgateApi.zipJsonLinesData(db, filePath);
return true;
},
getJsonsFromZip_meta: true,
async getJsonsFromZip({ filePath }) {
const res = await dbgateApi.unzipJsonLinesData(filePath);
return res;
},
downloadText_meta: true,
async downloadText({ uri }, req) {
if (!uri) return null;
const filePath = await dbgateApi.download(uri);
const text = await fs.readFile(filePath, {
encoding: 'utf-8',
});
return text;
},
saveUploadedFile_meta: true,
async saveUploadedFile({ filePath, fileName }) {
const FOLDERS = ['sql', 'sqlite'];
for (const folder of FOLDERS) {
if (fileName.toLowerCase().endsWith('.' + folder)) {
logger.info(`Saving ${folder} file ${fileName}`);
await fs.copyFile(filePath, path.join(filesdir(), folder, fileName));
socket.emitChanged(`files-changed`, { folder: folder });
socket.emitChanged(`all-files-changed`);
return {
name: path.basename(filePath),
folder: folder,
};
}
}
throw new Error(`${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`);
},
exportFile_meta: true,
async exportFile({ folder, file, filePath }, req) {
if (!hasPermission(`files/${folder}/read`, req)) return false;
await fs.copyFile(path.join(filesdir(), folder, file), filePath);
return true;
},
simpleCopy_meta: true,
async simpleCopy({ sourceFilePath, targetFilePath }, req) {
await fs.copyFile(sourceFilePath, targetFilePath);
return true;
},
}; };

View File

@@ -8,6 +8,8 @@ const getJslFileName = require('../utility/getJslFileName');
const JsonLinesDatastore = require('../utility/JsonLinesDatastore'); const JsonLinesDatastore = require('../utility/JsonLinesDatastore');
const requirePluginFunction = require('../utility/requirePluginFunction'); const requirePluginFunction = require('../utility/requirePluginFunction');
const socket = require('../utility/socket'); const socket = require('../utility/socket');
const crypto = require('crypto');
const dbgateApi = require('../shell');
function readFirstLine(file) { function readFirstLine(file) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
@@ -293,4 +295,11 @@ module.exports = {
})), })),
}; };
}, },
downloadJslData_meta: true,
async downloadJslData({ uri }) {
const jslid = crypto.randomUUID();
await dbgateApi.download(uri, { targetFile: getJslFileName(jslid) });
return { jslid };
},
}; };

View File

@@ -96,9 +96,9 @@ module.exports = {
handle_ping() {}, handle_ping() {},
handle_freeData(runid, { freeData }) { handle_dataResult(runid, { dataResult }) {
const { resolve } = this.requests[runid]; const { resolve } = this.requests[runid];
resolve(freeData); resolve(dataResult);
delete this.requests[runid]; delete this.requests[runid];
}, },
@@ -328,4 +328,24 @@ module.exports = {
}); });
return promise; return promise;
}, },
scriptResult_meta: true,
async scriptResult({ script }) {
if (script.type != 'json') {
return { errorMessage: 'Only JSON scripts are allowed' };
}
const promise = new Promise((resolve, reject) => {
const runid = crypto.randomUUID();
this.requests[runid] = { resolve, reject, exitOnStreamError: true };
const cloned = _.cloneDeepWith(script, node => {
if (node?.$replace == 'runid') {
return runid;
}
});
const js = jsonScriptToJavascript(cloned);
this.startCore(runid, scriptTemplate(js, false));
});
return promise;
},
}; };

View File

@@ -98,6 +98,11 @@ module.exports = {
if (newOpened.disconnected) return; if (newOpened.disconnected) return;
this.close(conid, false); this.close(conid, false);
}); });
subprocess.on('error', err => {
logger.error(extractErrorLogData(err), 'Error in server connection subprocess');
if (newOpened.disconnected) return;
this.close(conid, false);
});
subprocess.send({ msgtype: 'connect', ...connection, globalSettings: await config.getSettings() }); subprocess.send({ msgtype: 'connect', ...connection, globalSettings: await config.getSettings() });
return newOpened; return newOpened;
}); });

View File

@@ -4,6 +4,10 @@ module.exports = {
return null; return null;
}, },
async getExportedDatabase() {
return {};
},
getConnection_meta: true, getConnection_meta: true,
async getConnection({ conid }) { async getConnection({ conid }) {
return null; return null;

View File

@@ -39,52 +39,6 @@ module.exports = {
}); });
}, },
uploadDataFile_meta: {
method: 'post',
raw: true,
},
uploadDataFile(req, res) {
const { data } = req.files || {};
if (!data) {
res.json(null);
return;
}
if (data.name.toLowerCase().endsWith('.sql')) {
logger.info(`Uploading SQL file ${data.name}, size=${data.size}`);
data.mv(path.join(filesdir(), 'sql', data.name), () => {
res.json({
name: data.name,
folder: 'sql',
});
socket.emitChanged(`files-changed`, { folder: 'sql' });
socket.emitChanged(`all-files-changed`);
});
return;
}
res.json(null);
},
saveDataFile_meta: true,
async saveDataFile({ filePath }) {
if (filePath.toLowerCase().endsWith('.sql')) {
logger.info(`Saving SQL file ${filePath}`);
await fs.copyFile(filePath, path.join(filesdir(), 'sql', path.basename(filePath)));
socket.emitChanged(`files-changed`, { folder: 'sql' });
socket.emitChanged(`all-files-changed`);
return {
name: path.basename(filePath),
folder: 'sql',
};
}
return null;
},
get_meta: { get_meta: {
method: 'get', method: 'get',
raw: true, raw: true,

View File

@@ -38,7 +38,7 @@ const { getLogger } = require('dbgate-tools');
const { getDefaultAuthProvider } = require('./auth/authProvider'); const { getDefaultAuthProvider } = require('./auth/authProvider');
const startCloudUpgradeTimer = require('./utility/cloudUpgrade'); const startCloudUpgradeTimer = require('./utility/cloudUpgrade');
const { isProApp } = require('./utility/checkLicense'); const { isProApp } = require('./utility/checkLicense');
const getHealthStatus = require('./utility/healthStatus'); const { getHealthStatus, getHealthStatusSprinx } = require('./utility/healthStatus');
const logger = getLogger('main'); const logger = getLogger('main');
@@ -124,6 +124,12 @@ function start() {
res.end(JSON.stringify(health, null, 2)); res.end(JSON.stringify(health, null, 2));
}); });
app.get(getExpressPath('/__health'), async function (req, res) {
res.setHeader('Content-Type', 'application/json');
const health = await getHealthStatusSprinx();
res.end(JSON.stringify(health, null, 2));
});
app.use(bodyParser.json({ limit: '50mb' })); app.use(bodyParser.json({ limit: '50mb' }));
app.use( app.use(

View File

@@ -4,6 +4,8 @@ const { connectUtility } = require('../utility/connectUtility');
const { handleProcessCommunication } = require('../utility/processComm'); const { handleProcessCommunication } = require('../utility/processComm');
const { pickSafeConnectionInfo } = require('../utility/crypting'); const { pickSafeConnectionInfo } = require('../utility/crypting');
const _ = require('lodash'); const _ = require('lodash');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('connectProcess');
const formatErrorDetail = (e, connection) => `${e.stack} const formatErrorDetail = (e, connection) => `${e.stack}
@@ -23,12 +25,22 @@ function start() {
try { try {
const driver = requireEngineDriver(connection); const driver = requireEngineDriver(connection);
const dbhan = await connectUtility(driver, connection, 'app'); const dbhan = await connectUtility(driver, connection, 'app');
const res = await driver.getVersion(dbhan); let version = {
version: 'Unknown',
};
try {
version = await driver.getVersion(dbhan);
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB server version');
version = {
version: 'Unknown',
};
}
let databases = undefined; let databases = undefined;
if (requestDbList) { if (requestDbList) {
databases = await driver.listDatabases(dbhan); databases = await driver.listDatabases(dbhan);
} }
process.send({ msgtype: 'connected', ...res, databases }); process.send({ msgtype: 'connected', ...version, databases });
await driver.close(dbhan); await driver.close(dbhan);
} catch (e) { } catch (e) {
console.error(e); console.error(e);

View File

@@ -120,10 +120,15 @@ function setStatusName(name) {
async function readVersion() { async function readVersion() {
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
try {
const version = await driver.getVersion(dbhan); const version = await driver.getVersion(dbhan);
logger.debug(`Got server version: ${version.version}`); logger.debug(`Got server version: ${version.version}`);
process.send({ msgtype: 'version', version });
serverVersion = version; serverVersion = version;
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB server version');
serverVersion = { version: 'Unknown' };
}
process.send({ msgtype: 'version', version: serverVersion });
} }
async function handleConnect({ connection, structure, globalSettings }) { async function handleConnect({ connection, structure, globalSettings }) {

View File

@@ -46,7 +46,13 @@ async function handleRefresh() {
async function readVersion() { async function readVersion() {
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
const version = await driver.getVersion(dbhan); let version;
try {
version = await driver.getVersion(dbhan);
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB server version');
version = { version: 'Unknown' };
}
process.send({ msgtype: 'version', version }); process.send({ msgtype: 'version', version });
} }

View File

@@ -3,7 +3,9 @@ const { archivedir, resolveArchiveFolder } = require('../utility/directories');
const jsonLinesReader = require('./jsonLinesReader'); const jsonLinesReader = require('./jsonLinesReader');
function archiveReader({ folderName, fileName, ...other }) { function archiveReader({ folderName, fileName, ...other }) {
const jsonlFile = path.join(resolveArchiveFolder(folderName), `${fileName}.jsonl`); const jsonlFile = folderName.endsWith('.zip')
? `zip://archive:${folderName}//${fileName}.jsonl`
: path.join(resolveArchiveFolder(folderName), `${fileName}.jsonl`);
const res = jsonLinesReader({ fileName: jsonlFile, ...other }); const res = jsonLinesReader({ fileName: jsonlFile, ...other });
return res; return res;
} }

View File

@@ -15,9 +15,9 @@ class CollectorWriterStream extends stream.Writable {
_final(callback) { _final(callback) {
process.send({ process.send({
msgtype: 'freeData', msgtype: 'dataResult',
runid: this.runid, runid: this.runid,
freeData: { rows: this.rows, structure: this.structure }, dataResult: { rows: this.rows, structure: this.structure },
}); });
callback(); callback();
} }

View File

@@ -1,61 +0,0 @@
const stream = require('stream');
const path = require('path');
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
const requireEngineDriver = require('../utility/requireEngineDriver');
const { connectUtility } = require('../utility/connectUtility');
const logger = getLogger('dataDuplicator');
const { DataDuplicator } = require('dbgate-datalib');
const copyStream = require('./copyStream');
const jsonLinesReader = require('./jsonLinesReader');
const { resolveArchiveFolder } = require('../utility/directories');
async function dataDuplicator({
connection,
archive,
folder,
items,
options,
analysedStructure = null,
driver,
systemConnection,
}) {
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
try {
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(dbhan);
}
const sourceDir = archive
? resolveArchiveFolder(archive)
: folder?.startsWith('archive:')
? resolveArchiveFolder(folder.substring('archive:'.length))
: folder;
const dupl = new DataDuplicator(
dbhan,
driver,
analysedStructure,
items.map(item => ({
name: item.name,
operation: item.operation,
matchColumns: item.matchColumns,
openStream:
item.openStream || (() => jsonLinesReader({ fileName: path.join(sourceDir, `${item.name}.jsonl`) })),
})),
stream,
copyStream,
options
);
await dupl.run();
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
}
module.exports = dataDuplicator;

View File

@@ -0,0 +1,96 @@
const stream = require('stream');
const path = require('path');
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
const requireEngineDriver = require('../utility/requireEngineDriver');
const { connectUtility } = require('../utility/connectUtility');
const logger = getLogger('datareplicator');
const { DataReplicator } = require('dbgate-datalib');
const { compileCompoudEvalCondition } = require('dbgate-filterparser');
const copyStream = require('./copyStream');
const jsonLinesReader = require('./jsonLinesReader');
const { resolveArchiveFolder } = require('../utility/directories');
const { evaluateCondition } = require('dbgate-sqltree');
function compileOperationFunction(enabled, condition) {
if (!enabled) return _row => false;
const conditionCompiled = compileCompoudEvalCondition(condition);
if (condition) {
return row => evaluateCondition(conditionCompiled, row);
}
return _row => true;
}
async function dataReplicator({
connection,
archive,
folder,
items,
options,
analysedStructure = null,
driver,
systemConnection,
}) {
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
try {
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(dbhan);
}
let joinPath;
if (archive?.endsWith('.zip')) {
joinPath = file => `zip://archive:${archive}//${file}`;
} else {
const sourceDir = archive
? resolveArchiveFolder(archive)
: folder?.startsWith('archive:')
? resolveArchiveFolder(folder.substring('archive:'.length))
: folder;
joinPath = file => path.join(sourceDir, file);
}
const repl = new DataReplicator(
dbhan,
driver,
analysedStructure,
items.map(item => {
return {
name: item.name,
matchColumns: item.matchColumns,
findExisting: compileOperationFunction(item.findExisting, item.findCondition),
createNew: compileOperationFunction(item.createNew, item.createCondition),
updateExisting: compileOperationFunction(item.updateExisting, item.updateCondition),
deleteMissing: !!item.deleteMissing,
deleteRestrictionColumns: item.deleteRestrictionColumns ?? [],
openStream: item.openStream
? item.openStream
: item.jsonArray
? () => stream.Readable.from(item.jsonArray)
: () => jsonLinesReader({ fileName: joinPath(`${item.name}.jsonl`) }),
};
}),
stream,
copyStream,
options
);
await repl.run();
if (options?.runid) {
process.send({
msgtype: 'dataResult',
runid: options?.runid,
dataResult: repl.result,
});
}
return repl.result;
} finally {
if (!systemConnection) {
await driver.close(dbhan);
}
}
}
module.exports = dataReplicator;

View File

@@ -1,14 +1,30 @@
const crypto = require('crypto'); const crypto = require('crypto');
const path = require('path'); const path = require('path');
const { uploadsdir } = require('../utility/directories'); const { uploadsdir, archivedir } = require('../utility/directories');
const { downloadFile } = require('../utility/downloader'); const { downloadFile } = require('../utility/downloader');
const extractSingleFileFromZip = require('../utility/extractSingleFileFromZip');
async function download(url) { async function download(url, options = {}) {
if (url && url.match(/(^http:\/\/)|(^https:\/\/)/)) { const { targetFile } = options || {};
const tmpFile = path.join(uploadsdir(), crypto.randomUUID()); if (url) {
await downloadFile(url, tmpFile); if (url.match(/(^http:\/\/)|(^https:\/\/)/)) {
return tmpFile; const destFile = targetFile || path.join(uploadsdir(), crypto.randomUUID());
await downloadFile(url, destFile);
return destFile;
} }
const zipMatch = url.match(/^zip\:\/\/(.*)\/\/(.*)$/);
if (zipMatch) {
const destFile = targetFile || path.join(uploadsdir(), crypto.randomUUID());
let zipFile = zipMatch[1];
if (zipFile.startsWith('archive:')) {
zipFile = path.join(archivedir(), zipFile.substring('archive:'.length));
}
await extractSingleFileFromZip(zipFile, zipMatch[2], destFile);
return destFile;
}
}
return url; return url;
} }

View File

@@ -25,7 +25,7 @@ const importDatabase = require('./importDatabase');
const loadDatabase = require('./loadDatabase'); const loadDatabase = require('./loadDatabase');
const generateModelSql = require('./generateModelSql'); const generateModelSql = require('./generateModelSql');
const modifyJsonLinesReader = require('./modifyJsonLinesReader'); const modifyJsonLinesReader = require('./modifyJsonLinesReader');
const dataDuplicator = require('./dataDuplicator'); const dataReplicator = require('./dataReplicator');
const dbModelToJson = require('./dbModelToJson'); const dbModelToJson = require('./dbModelToJson');
const jsonToDbModel = require('./jsonToDbModel'); const jsonToDbModel = require('./jsonToDbModel');
const jsonReader = require('./jsonReader'); const jsonReader = require('./jsonReader');
@@ -35,6 +35,11 @@ const autoIndexForeignKeysTransform = require('./autoIndexForeignKeysTransform')
const generateDeploySql = require('./generateDeploySql'); const generateDeploySql = require('./generateDeploySql');
const dropAllDbObjects = require('./dropAllDbObjects'); const dropAllDbObjects = require('./dropAllDbObjects');
const importDbFromFolder = require('./importDbFromFolder'); const importDbFromFolder = require('./importDbFromFolder');
const zipDirectory = require('./zipDirectory');
const unzipDirectory = require('./unzipDirectory');
const zipJsonLinesData = require('./zipJsonLinesData');
const unzipJsonLinesData = require('./unzipJsonLinesData');
const unzipJsonLinesFile = require('./unzipJsonLinesFile');
const dbgateApi = { const dbgateApi = {
queryReader, queryReader,
@@ -64,7 +69,7 @@ const dbgateApi = {
loadDatabase, loadDatabase,
generateModelSql, generateModelSql,
modifyJsonLinesReader, modifyJsonLinesReader,
dataDuplicator, dataReplicator,
dbModelToJson, dbModelToJson,
jsonToDbModel, jsonToDbModel,
dataTypeMapperTransform, dataTypeMapperTransform,
@@ -73,6 +78,11 @@ const dbgateApi = {
generateDeploySql, generateDeploySql,
dropAllDbObjects, dropAllDbObjects,
importDbFromFolder, importDbFromFolder,
zipDirectory,
unzipDirectory,
zipJsonLinesData,
unzipJsonLinesData,
unzipJsonLinesFile,
}; };
requirePlugin.initializeDbgateApi(dbgateApi); requirePlugin.initializeDbgateApi(dbgateApi);

View File

@@ -36,9 +36,10 @@ async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true })
logger.info(`Writing file ${fileName}`); logger.info(`Writing file ${fileName}`);
const stringify = new StringifyStream({ header }); const stringify = new StringifyStream({ header });
const fileStream = fs.createWriteStream(fileName, encoding); const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream); return [stringify, fileStream];
stringify['finisher'] = fileStream; // stringify.pipe(fileStream);
return stringify; // stringify['finisher'] = fileStream;
// return stringify;
} }
module.exports = jsonLinesWriter; module.exports = jsonLinesWriter;

View File

@@ -0,0 +1,91 @@
const yauzl = require('yauzl');
const fs = require('fs');
const path = require('path');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('unzipDirectory');
/**
* Extracts an entire ZIP file, preserving its internal directory layout.
*
* @param {string} zipPath Path to the ZIP file on disk.
* @param {string} outputDirectory Folder to create / overwrite with the contents.
* @returns {Promise<boolean>} Resolves `true` on success, rejects on error.
*/
function unzipDirectory(zipPath, outputDirectory) {
return new Promise((resolve, reject) => {
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
if (err) return reject(err);
/** Pending per-file extractions we resolve the main promise after theyre all done */
const pending = [];
// kick things off
zipFile.readEntry();
zipFile.on('entry', entry => {
const destPath = path.join(outputDirectory, entry.fileName);
// Handle directories (their names always end with “/” in ZIPs)
if (/\/$/.test(entry.fileName)) {
// Ensure directory exists, then continue to next entry
fs.promises
.mkdir(destPath, { recursive: true })
.then(() => zipFile.readEntry())
.catch(reject);
return;
}
// Handle files
const filePromise = fs.promises
.mkdir(path.dirname(destPath), { recursive: true }) // make sure parent dirs exist
.then(
() =>
new Promise((res, rej) => {
zipFile.openReadStream(entry, (err, readStream) => {
if (err) return rej(err);
const writeStream = fs.createWriteStream(destPath);
readStream.pipe(writeStream);
// proceed to next entry once weve consumed *this* one
readStream.on('end', () => zipFile.readEntry());
writeStream.on('finish', () => {
logger.info(`Extracted "${entry.fileName}" → "${destPath}".`);
res();
});
writeStream.on('error', writeErr => {
logger.error(
extractErrorLogData(writeErr),
`Error extracting "${entry.fileName}" from "${zipPath}".`
);
rej(writeErr);
});
});
})
);
pending.push(filePromise);
});
// Entire archive enumerated; wait for all streams to finish
zipFile.on('end', () => {
Promise.all(pending)
.then(() => {
logger.info(`Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
resolve(true);
})
.catch(reject);
});
zipFile.on('error', err => {
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
reject(err);
});
});
});
}
module.exports = unzipDirectory;

View File

@@ -0,0 +1,60 @@
const yauzl = require('yauzl');
const fs = require('fs');
const { jsonLinesParse } = require('dbgate-tools');
function unzipJsonLinesData(zipPath) {
return new Promise((resolve, reject) => {
// Open the zip file
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
if (err) {
return reject(err);
}
const results = {};
// Start reading entries
zipfile.readEntry();
zipfile.on('entry', entry => {
// Only process .json files
if (/\.jsonl$/i.test(entry.fileName)) {
zipfile.openReadStream(entry, (err, readStream) => {
if (err) {
return reject(err);
}
const chunks = [];
readStream.on('data', chunk => chunks.push(chunk));
readStream.on('end', () => {
try {
const fileContent = Buffer.concat(chunks).toString('utf-8');
const parsedJson = jsonLinesParse(fileContent);
results[entry.fileName.replace(/\.jsonl$/, '')] = parsedJson;
} catch (parseError) {
return reject(parseError);
}
// Move to the next entry
zipfile.readEntry();
});
});
} else {
// Not a JSON file, skip
zipfile.readEntry();
}
});
// Resolve when no more entries
zipfile.on('end', () => {
resolve(results);
});
// Catch errors from zipfile
zipfile.on('error', zipErr => {
reject(zipErr);
});
});
});
}
module.exports = unzipJsonLinesData;

View File

@@ -0,0 +1,59 @@
const yauzl = require('yauzl');
const fs = require('fs');
const { jsonLinesParse } = require('dbgate-tools');
function unzipJsonLinesFile(zipPath, fileInZip) {
return new Promise((resolve, reject) => {
// Open the zip file
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
if (err) {
return reject(err);
}
let result = null;
// Start reading entries
zipfile.readEntry();
zipfile.on('entry', entry => {
if (entry.fileName == fileInZip) {
zipfile.openReadStream(entry, (err, readStream) => {
if (err) {
return reject(err);
}
const chunks = [];
readStream.on('data', chunk => chunks.push(chunk));
readStream.on('end', () => {
try {
const fileContent = Buffer.concat(chunks).toString('utf-8');
const parsedJson = jsonLinesParse(fileContent);
result = parsedJson;
} catch (parseError) {
return reject(parseError);
}
// Move to the next entry
zipfile.readEntry();
});
});
} else {
// Not a JSON file, skip
zipfile.readEntry();
}
});
// Resolve when no more entries
zipfile.on('end', () => {
resolve(result);
});
// Catch errors from zipfile
zipfile.on('error', zipErr => {
reject(zipErr);
});
});
});
}
module.exports = unzipJsonLinesFile;

View File

@@ -0,0 +1,49 @@
const fs = require('fs');
const path = require('path');
const archiver = require('archiver');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const { archivedir } = require('../utility/directories');
const logger = getLogger('compressDirectory');
function zipDirectory(inputDirectory, outputFile) {
if (outputFile.startsWith('archive:')) {
outputFile = path.join(archivedir(), outputFile.substring('archive:'.length));
}
return new Promise((resolve, reject) => {
const output = fs.createWriteStream(outputFile);
const archive = archiver('zip', { zlib: { level: 9 } }); // level: 9 => best compression
// Listen for all archive data to be written
output.on('close', () => {
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
resolve();
});
archive.on('warning', err => {
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
});
archive.on('error', err => {
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
reject(err);
});
// Pipe archive data to the file
archive.pipe(output);
// Append files from a folder
archive.directory(inputDirectory, false, entryData => {
if (entryData.name.endsWith('.zip')) {
return false; // returning false means "do not include"
}
// otherwise, include it
return entryData;
});
// Finalize the archive
archive.finalize();
});
}
module.exports = zipDirectory;

View File

@@ -0,0 +1,49 @@
const fs = require('fs');
const _ = require('lodash');
const path = require('path');
const archiver = require('archiver');
const { getLogger, extractErrorLogData, jsonLinesStringify } = require('dbgate-tools');
const { archivedir } = require('../utility/directories');
const logger = getLogger('compressDirectory');
function zipDirectory(jsonDb, outputFile) {
if (outputFile.startsWith('archive:')) {
outputFile = path.join(archivedir(), outputFile.substring('archive:'.length));
}
return new Promise((resolve, reject) => {
const output = fs.createWriteStream(outputFile);
const archive = archiver('zip', { zlib: { level: 9 } }); // level: 9 => best compression
// Listen for all archive data to be written
output.on('close', () => {
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
resolve();
});
archive.on('warning', err => {
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
});
archive.on('error', err => {
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
reject(err);
});
// Pipe archive data to the file
archive.pipe(output);
for (const key in jsonDb) {
const data = jsonDb[key];
if (_.isArray(data)) {
const jsonString = jsonLinesStringify(data);
archive.append(jsonString, { name: `${key}.jsonl` });
}
}
// Finalize the archive
archive.finalize();
});
}
module.exports = zipDirectory;

View File

@@ -0,0 +1,819 @@
module.exports = {
"tables": [
{
"pureName": "auth_methods",
"columns": [
{
"pureName": "auth_methods",
"columnName": "id",
"dataType": "int",
"notNull": true
},
{
"pureName": "auth_methods",
"columnName": "name",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "auth_methods",
"columnName": "type",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "auth_methods",
"columnName": "amoid",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "auth_methods",
"columnName": "is_disabled",
"dataType": "int",
"notNull": false
},
{
"pureName": "auth_methods",
"columnName": "is_default",
"dataType": "int",
"notNull": false
},
{
"pureName": "auth_methods",
"columnName": "is_collapsed",
"dataType": "int",
"notNull": false
}
],
"foreignKeys": [],
"primaryKey": {
"pureName": "auth_methods",
"constraintType": "primaryKey",
"columns": [
{
"columnName": "id"
}
]
},
"preloadedRows": [
{
"id": -1,
"amoid": "790ca4d2-7f01-4800-955b-d691b890cc50",
"name": "Anonymous",
"type": "none"
},
{
"id": -2,
"amoid": "53db1cbf-f488-44d9-8670-7162510eb09c",
"name": "Local",
"type": "local"
}
]
},
{
"pureName": "auth_methods_config",
"columns": [
{
"pureName": "auth_methods_config",
"columnName": "id",
"dataType": "int",
"autoIncrement": true,
"notNull": true
},
{
"pureName": "auth_methods_config",
"columnName": "auth_method_id",
"dataType": "int",
"notNull": true
},
{
"pureName": "auth_methods_config",
"columnName": "key",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "auth_methods_config",
"columnName": "value",
"dataType": "varchar(250)",
"notNull": false
}
],
"foreignKeys": [
{
"constraintType": "foreignKey",
"pureName": "auth_methods_config",
"refTableName": "auth_methods",
"deleteAction": "CASCADE",
"columns": [
{
"columnName": "auth_method_id",
"refColumnName": "id"
}
]
}
],
"primaryKey": {
"pureName": "auth_methods_config",
"constraintType": "primaryKey",
"columns": [
{
"columnName": "id"
}
]
}
},
{
"pureName": "config",
"columns": [
{
"pureName": "config",
"columnName": "id",
"dataType": "int",
"autoIncrement": true,
"notNull": true
},
{
"pureName": "config",
"columnName": "group",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "config",
"columnName": "key",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "config",
"columnName": "value",
"dataType": "varchar(1000)",
"notNull": false
}
],
"foreignKeys": [],
"primaryKey": {
"pureName": "config",
"constraintType": "primaryKey",
"columns": [
{
"columnName": "id"
}
]
}
},
{
"pureName": "connections",
"columns": [
{
"pureName": "connections",
"columnName": "id",
"dataType": "int",
"autoIncrement": true,
"notNull": true
},
{
"pureName": "connections",
"columnName": "conid",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "displayName",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "connectionColor",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "engine",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "server",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "databaseFile",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "useDatabaseUrl",
"dataType": "int",
"notNull": false
},
{
"pureName": "connections",
"columnName": "databaseUrl",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "authType",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "port",
"dataType": "varchar(20)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "serviceName",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "serviceNameType",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "socketPath",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "user",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "password",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "passwordMode",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "treeKeySeparator",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "windowsDomain",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "isReadOnly",
"dataType": "int",
"notNull": false
},
{
"pureName": "connections",
"columnName": "trustServerCertificate",
"dataType": "int",
"notNull": false
},
{
"pureName": "connections",
"columnName": "defaultDatabase",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "singleDatabase",
"dataType": "int",
"notNull": false
},
{
"pureName": "connections",
"columnName": "useSshTunnel",
"dataType": "int",
"notNull": false
},
{
"pureName": "connections",
"columnName": "sshHost",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "sshPort",
"dataType": "varchar(20)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "sshMode",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "sshKeyFile",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "sshKeyfilePassword",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "sshLogin",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "sshPassword",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "sshBastionHost",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "useSsl",
"dataType": "int",
"notNull": false
},
{
"pureName": "connections",
"columnName": "sslCaFile",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "sslCertFilePassword",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "sslKeyFile",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "sslRejectUnauthorized",
"dataType": "int",
"notNull": false
},
{
"pureName": "connections",
"columnName": "clientLibraryPath",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "useRedirectDbLogin",
"dataType": "int",
"notNull": false
},
{
"pureName": "connections",
"columnName": "allowedDatabases",
"dataType": "varchar(500)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "allowedDatabasesRegex",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "endpoint",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "endpointKey",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "accessKeyId",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "secretAccessKey",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "connections",
"columnName": "awsRegion",
"dataType": "varchar(250)",
"notNull": false
}
],
"foreignKeys": [],
"primaryKey": {
"pureName": "connections",
"constraintType": "primaryKey",
"columns": [
{
"columnName": "id"
}
]
}
},
{
"pureName": "roles",
"columns": [
{
"pureName": "roles",
"columnName": "id",
"dataType": "int",
"autoIncrement": true,
"notNull": true
},
{
"pureName": "roles",
"columnName": "name",
"dataType": "varchar(250)",
"notNull": false
}
],
"foreignKeys": [],
"primaryKey": {
"pureName": "roles",
"constraintType": "primaryKey",
"columns": [
{
"columnName": "id"
}
]
},
"preloadedRows": [
{
"id": -1,
"name": "anonymous-user"
},
{
"id": -2,
"name": "logged-user"
},
{
"id": -3,
"name": "superadmin"
}
]
},
{
"pureName": "role_connections",
"columns": [
{
"pureName": "role_connections",
"columnName": "id",
"dataType": "int",
"autoIncrement": true,
"notNull": true
},
{
"pureName": "role_connections",
"columnName": "role_id",
"dataType": "int",
"notNull": true
},
{
"pureName": "role_connections",
"columnName": "connection_id",
"dataType": "int",
"notNull": true
}
],
"foreignKeys": [
{
"constraintType": "foreignKey",
"pureName": "role_connections",
"refTableName": "roles",
"deleteAction": "CASCADE",
"columns": [
{
"columnName": "role_id",
"refColumnName": "id"
}
]
},
{
"constraintType": "foreignKey",
"pureName": "role_connections",
"refTableName": "connections",
"deleteAction": "CASCADE",
"columns": [
{
"columnName": "connection_id",
"refColumnName": "id"
}
]
}
],
"primaryKey": {
"pureName": "role_connections",
"constraintType": "primaryKey",
"columns": [
{
"columnName": "id"
}
]
}
},
{
"pureName": "role_permissions",
"columns": [
{
"pureName": "role_permissions",
"columnName": "id",
"dataType": "int",
"autoIncrement": true,
"notNull": true
},
{
"pureName": "role_permissions",
"columnName": "role_id",
"dataType": "int",
"notNull": true
},
{
"pureName": "role_permissions",
"columnName": "permission",
"dataType": "varchar(250)",
"notNull": true
}
],
"foreignKeys": [
{
"constraintType": "foreignKey",
"pureName": "role_permissions",
"refTableName": "roles",
"deleteAction": "CASCADE",
"columns": [
{
"columnName": "role_id",
"refColumnName": "id"
}
]
}
],
"primaryKey": {
"pureName": "role_permissions",
"constraintType": "primaryKey",
"columns": [
{
"columnName": "id"
}
]
}
},
{
"pureName": "users",
"columns": [
{
"pureName": "users",
"columnName": "id",
"dataType": "int",
"autoIncrement": true,
"notNull": true
},
{
"pureName": "users",
"columnName": "login",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "users",
"columnName": "password",
"dataType": "varchar(250)",
"notNull": false
},
{
"pureName": "users",
"columnName": "email",
"dataType": "varchar(250)",
"notNull": false
}
],
"foreignKeys": [],
"primaryKey": {
"pureName": "users",
"constraintType": "primaryKey",
"columns": [
{
"columnName": "id"
}
]
}
},
{
"pureName": "user_connections",
"columns": [
{
"pureName": "user_connections",
"columnName": "id",
"dataType": "int",
"autoIncrement": true,
"notNull": true
},
{
"pureName": "user_connections",
"columnName": "user_id",
"dataType": "int",
"notNull": true
},
{
"pureName": "user_connections",
"columnName": "connection_id",
"dataType": "int",
"notNull": true
}
],
"foreignKeys": [
{
"constraintType": "foreignKey",
"pureName": "user_connections",
"refTableName": "users",
"deleteAction": "CASCADE",
"columns": [
{
"columnName": "user_id",
"refColumnName": "id"
}
]
},
{
"constraintType": "foreignKey",
"pureName": "user_connections",
"refTableName": "connections",
"deleteAction": "CASCADE",
"columns": [
{
"columnName": "connection_id",
"refColumnName": "id"
}
]
}
],
"primaryKey": {
"pureName": "user_connections",
"constraintType": "primaryKey",
"columns": [
{
"columnName": "id"
}
]
}
},
{
"pureName": "user_permissions",
"columns": [
{
"pureName": "user_permissions",
"columnName": "id",
"dataType": "int",
"autoIncrement": true,
"notNull": true
},
{
"pureName": "user_permissions",
"columnName": "user_id",
"dataType": "int",
"notNull": true
},
{
"pureName": "user_permissions",
"columnName": "permission",
"dataType": "varchar(250)",
"notNull": true
}
],
"foreignKeys": [
{
"constraintType": "foreignKey",
"pureName": "user_permissions",
"refTableName": "users",
"deleteAction": "CASCADE",
"columns": [
{
"columnName": "user_id",
"refColumnName": "id"
}
]
}
],
"primaryKey": {
"pureName": "user_permissions",
"constraintType": "primaryKey",
"columns": [
{
"columnName": "id"
}
]
}
},
{
"pureName": "user_roles",
"columns": [
{
"pureName": "user_roles",
"columnName": "id",
"dataType": "int",
"autoIncrement": true,
"notNull": true
},
{
"pureName": "user_roles",
"columnName": "user_id",
"dataType": "int",
"notNull": true
},
{
"pureName": "user_roles",
"columnName": "role_id",
"dataType": "int",
"notNull": true
}
],
"foreignKeys": [
{
"constraintType": "foreignKey",
"pureName": "user_roles",
"refTableName": "users",
"deleteAction": "CASCADE",
"columns": [
{
"columnName": "user_id",
"refColumnName": "id"
}
]
},
{
"constraintType": "foreignKey",
"pureName": "user_roles",
"refTableName": "roles",
"deleteAction": "CASCADE",
"columns": [
{
"columnName": "role_id",
"refColumnName": "id"
}
]
}
],
"primaryKey": {
"pureName": "user_roles",
"constraintType": "primaryKey",
"columns": [
{
"columnName": "id"
}
]
}
}
],
"collections": [],
"views": [],
"matviews": [],
"functions": [],
"procedures": [],
"triggers": []
};

View File

@@ -60,6 +60,10 @@ class DatastoreProxy {
// if (this.disconnected) return; // if (this.disconnected) return;
this.subprocess = null; this.subprocess = null;
}); });
this.subprocess.on('error', err => {
logger.error(extractErrorLogData(err), 'Error in data store subprocess');
this.subprocess = null;
});
this.subprocess.send({ msgtype: 'open', file: this.file }); this.subprocess.send({ msgtype: 'open', file: this.file });
} }
return this.subprocess; return this.subprocess;

View File

@@ -4,11 +4,20 @@ const fsp = require('fs/promises');
const semver = require('semver'); const semver = require('semver');
const currentVersion = require('../currentVersion'); const currentVersion = require('../currentVersion');
const { getLogger, extractErrorLogData } = require('dbgate-tools'); const { getLogger, extractErrorLogData } = require('dbgate-tools');
const { storageReadConfig } = require('../controllers/storageDb');
const logger = getLogger('cloudUpgrade'); const logger = getLogger('cloudUpgrade');
async function checkCloudUpgrade() { async function checkCloudUpgrade() {
try { try {
if (process.env.STORAGE_DATABASE) {
const settings = await storageReadConfig('settings');
if (settings['cloud.useAutoUpgrade'] != 1) {
// auto-upgrade not allowed
return;
}
}
const resp = await axios.default.get('https://api.github.com/repos/dbgate/dbgate/releases/latest'); const resp = await axios.default.get('https://api.github.com/repos/dbgate/dbgate/releases/latest');
const json = resp.data; const json = resp.data;
const version = json.name.substring(1); const version = json.name.substring(1);
@@ -43,7 +52,11 @@ async function checkCloudUpgrade() {
logger.info(`Downloaded new version from ${zipUrl}`); logger.info(`Downloaded new version from ${zipUrl}`);
} else { } else {
logger.info(`Checked version ${version} is not newer than ${cloudDownloadedVersion ?? currentVersion.version}, upgrade skippped`); logger.info(
`Checked version ${version} is not newer than ${
cloudDownloadedVersion ?? currentVersion.version
}, upgrade skippped`
);
} }
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error checking cloud upgrade'); logger.error(extractErrorLogData(err), 'Error checking cloud upgrade');

View File

@@ -96,7 +96,9 @@ async function connectUtility(driver, storedConnection, connectionMode, addition
...decryptConnection(connectionLoaded), ...decryptConnection(connectionLoaded),
}; };
if (!connection.port && driver.defaultPort) connection.port = driver.defaultPort.toString(); if (!connection.port && driver.defaultPort) {
connection.port = driver.defaultPort.toString();
}
if (connection.useSshTunnel) { if (connection.useSshTunnel) {
const tunnel = await getSshTunnelProxy(connection); const tunnel = await getSshTunnelProxy(connection);

View File

@@ -5,12 +5,16 @@ const path = require('path');
const _ = require('lodash'); const _ = require('lodash');
const { datadir } = require('./directories'); const { datadir } = require('./directories');
const { encryptionKeyArg } = require('./processArgs');
const defaultEncryptionKey = 'mQAUaXhavRGJDxDTXSCg7Ej0xMmGCrx6OKA07DIMBiDcYYkvkaXjTAzPUEHEHEf9'; const defaultEncryptionKey = 'mQAUaXhavRGJDxDTXSCg7Ej0xMmGCrx6OKA07DIMBiDcYYkvkaXjTAzPUEHEHEf9';
let _encryptionKey = null; let _encryptionKey = null;
function loadEncryptionKey() { function loadEncryptionKey() {
if (encryptionKeyArg) {
return encryptionKeyArg;
}
if (_encryptionKey) { if (_encryptionKey) {
return _encryptionKey; return _encryptionKey;
} }
@@ -55,7 +59,7 @@ async function loadEncryptionKeyFromExternal(storedValue, setStoredValue) {
let _encryptor = null; let _encryptor = null;
function getEncryptor() { function getInternalEncryptor() {
if (_encryptor) { if (_encryptor) {
return _encryptor; return _encryptor;
} }
@@ -63,11 +67,25 @@ function getEncryptor() {
return _encryptor; return _encryptor;
} }
function encryptPasswordString(password) {
if (password && !password.startsWith('crypt:')) {
return 'crypt:' + getInternalEncryptor().encrypt(password);
}
return password;
}
function decryptPasswordString(password) {
if (password && password.startsWith('crypt:')) {
return getInternalEncryptor().decrypt(password.substring('crypt:'.length));
}
return password;
}
function encryptObjectPasswordField(obj, field) { function encryptObjectPasswordField(obj, field) {
if (obj && obj[field] && !obj[field].startsWith('crypt:')) { if (obj && obj[field] && !obj[field].startsWith('crypt:')) {
return { return {
...obj, ...obj,
[field]: 'crypt:' + getEncryptor().encrypt(obj[field]), [field]: 'crypt:' + getInternalEncryptor().encrypt(obj[field]),
}; };
} }
return obj; return obj;
@@ -77,7 +95,7 @@ function decryptObjectPasswordField(obj, field) {
if (obj && obj[field] && obj[field].startsWith('crypt:')) { if (obj && obj[field] && obj[field].startsWith('crypt:')) {
return { return {
...obj, ...obj,
[field]: getEncryptor().decrypt(obj[field].substring('crypt:'.length)), [field]: getInternalEncryptor().decrypt(obj[field].substring('crypt:'.length)),
}; };
} }
return obj; return obj;
@@ -131,6 +149,54 @@ function pickSafeConnectionInfo(connection) {
function setEncryptionKey(encryptionKey) { function setEncryptionKey(encryptionKey) {
_encryptionKey = encryptionKey; _encryptionKey = encryptionKey;
_encryptor = null; _encryptor = null;
global.ENCRYPTION_KEY = encryptionKey;
}
function getEncryptionKey() {
return _encryptionKey;
}
function generateTransportEncryptionKey() {
const encryptor = simpleEncryptor.createEncryptor(defaultEncryptionKey);
const result = {
encryptionKey: crypto.randomBytes(32).toString('hex'),
};
return encryptor.encrypt(result);
}
function createTransportEncryptor(encryptionData) {
const encryptor = simpleEncryptor.createEncryptor(defaultEncryptionKey);
const data = encryptor.decrypt(encryptionData);
const res = simpleEncryptor.createEncryptor(data['encryptionKey']);
return res;
}
function recryptObjectPasswordField(obj, field, decryptEncryptor, encryptEncryptor) {
if (obj && obj[field] && obj[field].startsWith('crypt:')) {
return {
...obj,
[field]: 'crypt:' + encryptEncryptor.encrypt(decryptEncryptor.decrypt(obj[field].substring('crypt:'.length))),
};
}
return obj;
}
function recryptObjectPasswordFieldInPlace(obj, field, decryptEncryptor, encryptEncryptor) {
if (obj && obj[field] && obj[field].startsWith('crypt:')) {
obj[field] = 'crypt:' + encryptEncryptor.encrypt(decryptEncryptor.decrypt(obj[field].substring('crypt:'.length)));
}
}
function recryptConnection(connection, decryptEncryptor, encryptEncryptor) {
connection = recryptObjectPasswordField(connection, 'password', decryptEncryptor, encryptEncryptor);
connection = recryptObjectPasswordField(connection, 'sshPassword', decryptEncryptor, encryptEncryptor);
connection = recryptObjectPasswordField(connection, 'sshKeyfilePassword', decryptEncryptor, encryptEncryptor);
return connection;
}
function recryptUser(user, decryptEncryptor, encryptEncryptor) {
user = recryptObjectPasswordField(user, 'password', decryptEncryptor, encryptEncryptor);
return user;
} }
module.exports = { module.exports = {
@@ -142,4 +208,16 @@ module.exports = {
maskConnection, maskConnection,
pickSafeConnectionInfo, pickSafeConnectionInfo,
loadEncryptionKeyFromExternal, loadEncryptionKeyFromExternal,
getEncryptionKey,
setEncryptionKey,
encryptPasswordString,
decryptPasswordString,
getInternalEncryptor,
recryptConnection,
recryptUser,
generateTransportEncryptionKey,
createTransportEncryptor,
recryptObjectPasswordField,
recryptObjectPasswordFieldInPlace,
}; };

View File

@@ -0,0 +1,77 @@
const yauzl = require('yauzl');
const fs = require('fs');
const { getLogger, extractErrorLogData } = require('dbgate-tools');
const logger = getLogger('extractSingleFileFromZip');
/**
* Extracts a single file from a ZIP using yauzl.
* Stops reading the rest of the archive once the file is found.
*
* @param {string} zipPath - Path to the ZIP file on disk.
* @param {string} fileInZip - The file path *inside* the ZIP to extract.
* @param {string} outputPath - Where to write the extracted file on disk.
* @returns {Promise<boolean>} - Resolves with a success message or a "not found" message.
*/
function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
return new Promise((resolve, reject) => {
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
if (err) return reject(err);
let fileFound = false;
// Start reading the first entry
zipFile.readEntry();
zipFile.on('entry', entry => {
// Compare the entry name to the file we want
if (entry.fileName === fileInZip) {
fileFound = true;
// Open a read stream for this entry
zipFile.openReadStream(entry, (err, readStream) => {
if (err) return reject(err);
// Create a write stream to outputPath
const writeStream = fs.createWriteStream(outputPath);
readStream.pipe(writeStream);
// When the read stream ends, we can close the zipFile
readStream.on('end', () => {
// We won't read further entries
zipFile.close();
});
// When the file is finished writing, resolve
writeStream.on('finish', () => {
logger.info(`File "${fileInZip}" extracted to "${outputPath}".`);
resolve(true);
});
// Handle write errors
writeStream.on('error', writeErr => {
logger.error(extractErrorLogData(writeErr), `Error extracting "${fileInZip}" from "${zipPath}".`);
reject(writeErr);
});
});
} else {
// Not the file we want; skip to the next entry
zipFile.readEntry();
}
});
// If we reach the end without finding the file
zipFile.on('end', () => {
if (!fileFound) {
resolve(false);
}
});
// Handle general errors
zipFile.on('error', err => {
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
reject(err);
});
});
});
}
module.exports = extractSingleFileFromZip;

View File

@@ -22,6 +22,8 @@ const getMapExport = (geoJson) => {
}) })
.addTo(map); .addTo(map);
leaflet.control.scale().addTo(map);
const geoJsonObj = leaflet const geoJsonObj = leaflet
.geoJSON(${JSON.stringify(geoJson)}, { .geoJSON(${JSON.stringify(geoJson)}, {
style: function () { style: function () {

View File

@@ -24,4 +24,15 @@ async function getHealthStatus() {
}; };
} }
module.exports = getHealthStatus; async function getHealthStatusSprinx() {
return {
overallStatus: 'OK',
timeStamp: new Date().toISOString(),
timeStampUnix: Math.floor(Date.now() / 1000),
};
}
module.exports = {
getHealthStatus,
getHealthStatusSprinx,
};

View File

@@ -0,0 +1,41 @@
const yauzl = require('yauzl');
const path = require('path');
/**
* Lists the files in a ZIP archive using yauzl,
* returning an array of { fileName, uncompressedSize } objects.
*
* @param {string} zipPath - The path to the ZIP file.
* @returns {Promise<Array<{fileName: string, uncompressedSize: number}>>}
*/
function listZipEntries(zipPath) {
return new Promise((resolve, reject) => {
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
if (err) return reject(err);
const entries = [];
// Start reading entries
zipfile.readEntry();
// Handle each entry
zipfile.on('entry', entry => {
entries.push({
fileName: entry.fileName,
uncompressedSize: entry.uncompressedSize,
});
// Move on to the next entry (were only listing, not reading file data)
zipfile.readEntry();
});
// Finished reading all entries
zipfile.on('end', () => resolve(entries));
// Handle errors
zipfile.on('error', err => reject(err));
});
});
}
module.exports = listZipEntries;

View File

@@ -17,6 +17,7 @@ const processDisplayName = getNamedArg('--process-display-name');
const listenApi = process.argv.includes('--listen-api'); const listenApi = process.argv.includes('--listen-api');
const listenApiChild = process.argv.includes('--listen-api-child') || listenApi; const listenApiChild = process.argv.includes('--listen-api-child') || listenApi;
const runE2eTests = process.argv.includes('--run-e2e-tests'); const runE2eTests = process.argv.includes('--run-e2e-tests');
const encryptionKeyArg = getNamedArg('--encryption-key');
function getPassArgs() { function getPassArgs() {
const res = []; const res = [];
@@ -31,6 +32,9 @@ function getPassArgs() {
if (runE2eTests) { if (runE2eTests) {
res.push('--run-e2e-tests'); res.push('--run-e2e-tests');
} }
if (global['ENCRYPTION_KEY']) {
res.push('--encryption-key', global['ENCRYPTION_KEY']);
}
return res; return res;
} }
@@ -45,4 +49,5 @@ module.exports = {
listenApiChild, listenApiChild,
processDisplayName, processDisplayName,
runE2eTests, runE2eTests,
encryptionKeyArg,
}; };

View File

@@ -57,10 +57,21 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
} }
}); });
subprocess.on('exit', code => { subprocess.on('exit', code => {
logger.info('SSH forward process exited'); logger.info(`SSH forward process exited with code ${code}`);
delete sshTunnelCache[tunnelCacheKey]; delete sshTunnelCache[tunnelCacheKey];
if (!promiseHandled) { if (!promiseHandled) {
reject(new Error('SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections')); reject(
new Error(
'SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'
)
);
}
});
subprocess.on('error', error => {
logger.error(extractErrorLogData(error), 'SSH forward process error');
delete sshTunnelCache[tunnelCacheKey];
if (!promiseHandled) {
reject(error);
} }
}); });
}); });

View File

@@ -572,6 +572,27 @@ export function changeSetInsertDocuments(
}; };
} }
export function createMergedRowsChangeSet(
table: TableInfo,
updatedRows: any[],
insertedRows: any[],
mergeKey: string[]
): ChangeSet {
const res = createChangeSet();
res.updates = updatedRows.map(row => ({
pureName: table.pureName,
schemaName: table.schemaName,
fields: _.omit(row, mergeKey),
condition: _.pick(row, mergeKey),
}));
res.inserts = insertedRows.map(row => ({
pureName: table.pureName,
schemaName: table.schemaName,
fields: row,
}));
return res;
}
export function changeSetContainsChanges(changeSet: ChangeSet) { export function changeSetContainsChanges(changeSet: ChangeSet) {
if (!changeSet) return false; if (!changeSet) return false;
return ( return (

View File

@@ -1,326 +0,0 @@
import {
createAsyncWriteStream,
extractErrorLogData,
getLogger,
runCommandOnDriver,
runQueryOnDriver,
} from 'dbgate-tools';
import { DatabaseInfo, EngineDriver, ForeignKeyInfo, TableInfo } from 'dbgate-types';
import _pick from 'lodash/pick';
import _omit from 'lodash/omit';
const logger = getLogger('dataDuplicator');
export interface DataDuplicatorItem {
openStream: () => Promise<ReadableStream>;
name: string;
operation: 'copy' | 'lookup' | 'insertMissing';
matchColumns: string[];
}
export interface DataDuplicatorOptions {
rollbackAfterFinish?: boolean;
skipRowsWithUnresolvedRefs?: boolean;
setNullForUnresolvedNullableRefs?: boolean;
}
class DuplicatorReference {
constructor(
public base: DuplicatorItemHolder,
public ref: DuplicatorItemHolder,
public isMandatory: boolean,
public foreignKey: ForeignKeyInfo
) {}
get columnName() {
return this.foreignKey.columns[0].columnName;
}
}
class DuplicatorWeakReference {
constructor(public base: DuplicatorItemHolder, public ref: TableInfo, public foreignKey: ForeignKeyInfo) {}
get columnName() {
return this.foreignKey.columns[0].columnName;
}
}
class DuplicatorItemHolder {
references: DuplicatorReference[] = [];
backReferences: DuplicatorReference[] = [];
// not mandatory references to entities out of the model
weakReferences: DuplicatorWeakReference[] = [];
table: TableInfo;
isPlanned = false;
idMap = {};
autoColumn: string;
refByColumn: { [columnName: string]: DuplicatorReference } = {};
isReferenced: boolean;
get name() {
return this.item.name;
}
constructor(public item: DataDuplicatorItem, public duplicator: DataDuplicator) {
this.table = duplicator.db.tables.find(x => x.pureName.toUpperCase() == item.name.toUpperCase());
this.autoColumn = this.table.columns.find(x => x.autoIncrement)?.columnName;
if (
this.table.primaryKey?.columns?.length != 1 ||
this.table.primaryKey?.columns?.[0]?.columnName != this.autoColumn
) {
this.autoColumn = null;
}
}
initializeReferences() {
for (const fk of this.table.foreignKeys) {
if (fk.columns?.length != 1) continue;
const refHolder = this.duplicator.itemHolders.find(y => y.name.toUpperCase() == fk.refTableName.toUpperCase());
const isMandatory = this.table.columns.find(x => x.columnName == fk.columns[0]?.columnName)?.notNull;
if (refHolder == null) {
if (!isMandatory) {
const weakref = new DuplicatorWeakReference(
this,
this.duplicator.db.tables.find(x => x.pureName == fk.refTableName),
fk
);
this.weakReferences.push(weakref);
}
} else {
const newref = new DuplicatorReference(this, refHolder, isMandatory, fk);
this.references.push(newref);
this.refByColumn[newref.columnName] = newref;
refHolder.isReferenced = true;
}
}
}
createInsertObject(chunk, weakrefcols: string[]) {
const res = _omit(
_pick(
chunk,
this.table.columns.map(x => x.columnName)
),
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...weakrefcols]
);
for (const key in res) {
const ref = this.refByColumn[key];
if (ref) {
// remap id
res[key] = ref.ref.idMap[res[key]];
if (ref.isMandatory && res[key] == null) {
// mandatory refertence not matched
if (this.duplicator.options.skipRowsWithUnresolvedRefs) {
return null;
}
throw new Error(`Unresolved reference, base=${ref.base.name}, ref=${ref.ref.name}, ${key}=${chunk[key]}`);
}
}
}
return res;
}
// returns list of columns that are weak references and are not resolved
async getMissingWeakRefsForRow(row): Promise<string[]> {
if (!this.duplicator.options.setNullForUnresolvedNullableRefs || !this.weakReferences?.length) {
return [];
}
const qres = await runQueryOnDriver(this.duplicator.pool, this.duplicator.driver, dmp => {
dmp.put('^select ');
dmp.putCollection(',', this.weakReferences, weakref => {
dmp.put(
'(^case ^when ^exists (^select * ^from %f where %i = %v) ^then 1 ^else 0 ^end) as %i',
weakref.ref,
weakref.foreignKey.columns[0].refColumnName,
row[weakref.foreignKey.columns[0].columnName],
weakref.foreignKey.columns[0].columnName
);
});
if (this.duplicator.driver.dialect.requireFromDual) {
dmp.put(' ^from ^dual');
}
});
const qrow = qres.rows[0];
return this.weakReferences.filter(x => qrow[x.columnName] == 0).map(x => x.columnName);
}
async runImport() {
const readStream = await this.item.openStream();
const driver = this.duplicator.driver;
const pool = this.duplicator.pool;
let inserted = 0;
let mapped = 0;
let missing = 0;
let skipped = 0;
let lastLogged = new Date();
const existingWeakRefs = {};
const writeStream = createAsyncWriteStream(this.duplicator.stream, {
processItem: async chunk => {
if (chunk.__isStreamHeader) {
return;
}
const doCopy = async () => {
// console.log('chunk', this.name, JSON.stringify(chunk));
const weakrefcols = await this.getMissingWeakRefsForRow(chunk);
const insertedObj = this.createInsertObject(chunk, weakrefcols);
// console.log('insertedObj', this.name, JSON.stringify(insertedObj));
if (insertedObj == null) {
skipped += 1;
return;
}
let res = await runQueryOnDriver(pool, driver, dmp => {
dmp.put(
'^insert ^into %f (%,i) ^values (%,v)',
this.table,
Object.keys(insertedObj),
Object.values(insertedObj)
);
if (
this.autoColumn &&
this.isReferenced &&
!this.duplicator.driver.dialect.requireStandaloneSelectForScopeIdentity
) {
dmp.selectScopeIdentity(this.table);
}
});
inserted += 1;
if (this.autoColumn && this.isReferenced) {
if (this.duplicator.driver.dialect.requireStandaloneSelectForScopeIdentity) {
res = await runQueryOnDriver(pool, driver, dmp => dmp.selectScopeIdentity(this.table));
}
// console.log('IDRES', JSON.stringify(res));
// console.log('*********** ENTRIES OF', res?.rows?.[0]);
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
if (resId != null) {
this.idMap[chunk[this.autoColumn]] = resId;
}
}
};
switch (this.item.operation) {
case 'copy': {
await doCopy();
break;
}
case 'insertMissing':
case 'lookup': {
const res = await runQueryOnDriver(pool, driver, dmp =>
dmp.put(
'^select %i ^from %f ^where %i = %v',
this.autoColumn,
this.table,
this.item.matchColumns[0],
chunk[this.item.matchColumns[0]]
)
);
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
if (resId != null) {
mapped += 1;
this.idMap[chunk[this.autoColumn]] = resId;
} else if (this.item.operation == 'insertMissing') {
await doCopy();
} else {
missing += 1;
}
break;
}
}
if (new Date().getTime() - lastLogged.getTime() > 5000) {
logger.info(
`Duplicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows`
);
lastLogged = new Date();
}
// this.idMap[oldId] = newId;
},
});
await this.duplicator.copyStream(readStream, writeStream);
// await this.duplicator.driver.writeQueryStream(this.duplicator.pool, {
// mapResultId: (oldId, newId) => {
// this.idMap[oldId] = newId;
// },
// });
return { inserted, mapped, missing, skipped };
}
}
export class DataDuplicator {
itemHolders: DuplicatorItemHolder[];
itemPlan: DuplicatorItemHolder[] = [];
constructor(
public pool: any,
public driver: EngineDriver,
public db: DatabaseInfo,
public items: DataDuplicatorItem[],
public stream,
public copyStream: (input, output) => Promise<void>,
public options: DataDuplicatorOptions = {}
) {
this.itemHolders = items.map(x => new DuplicatorItemHolder(x, this));
this.itemHolders.forEach(x => x.initializeReferences());
}
findItemToPlan(): DuplicatorItemHolder {
for (const item of this.itemHolders) {
if (item.isPlanned) continue;
if (item.references.every(x => x.ref.isPlanned)) {
return item;
}
}
for (const item of this.itemHolders) {
if (item.isPlanned) continue;
if (item.references.every(x => x.ref.isPlanned || !x.isMandatory)) {
const backReferences = item.references.filter(x => !x.ref.isPlanned);
item.backReferences = backReferences;
return item;
}
}
throw new Error('Cycle in mandatory references');
}
createPlan() {
while (this.itemPlan.length < this.itemHolders.length) {
const item = this.findItemToPlan();
item.isPlanned = true;
this.itemPlan.push(item);
}
}
async run() {
this.createPlan();
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.beginTransaction());
try {
for (const item of this.itemPlan) {
const stats = await item.runImport();
logger.info(
`Duplicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows`
);
}
} catch (err) {
logger.error(extractErrorLogData(err), `Failed duplicator job, rollbacking. ${err.message}`);
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.rollbackTransaction());
return;
}
if (this.options.rollbackAfterFinish) {
logger.info('Rollbacking transaction, nothing was changed');
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.rollbackTransaction());
} else {
logger.info('Committing duplicator transaction');
await runCommandOnDriver(this.pool, this.driver, dmp => dmp.commitTransaction());
}
}
}

View File

@@ -0,0 +1,510 @@
import {
createAsyncWriteStream,
extractErrorLogData,
getLogger,
isTypeNumber,
runCommandOnDriver,
runQueryOnDriver,
SqlDumper,
} from 'dbgate-tools';
import { DatabaseInfo, EngineDriver, ForeignKeyInfo, NamedObjectInfo, QueryResult, TableInfo } from 'dbgate-types';
import _pick from 'lodash/pick';
import _omit from 'lodash/omit';
import stableStringify from 'json-stable-stringify';
const logger = getLogger('dataReplicator');
export interface DataReplicatorItem {
openStream: () => Promise<ReadableStream>;
name: string;
findExisting: (row: any) => boolean;
createNew: (row: any) => boolean;
updateExisting: (row: any) => boolean;
deleteMissing: boolean;
deleteRestrictionColumns: string[];
matchColumns: string[];
}
export interface DataReplicatorOptions {
rollbackAfterFinish?: boolean;
skipRowsWithUnresolvedRefs?: boolean;
setNullForUnresolvedNullableRefs?: boolean;
generateSqlScript?: boolean;
runid?: string;
}
class ReplicatorReference {
constructor(
public base: ReplicatorItemHolder,
public ref: ReplicatorItemHolder,
public isMandatory: boolean,
public foreignKey: ForeignKeyInfo
) {}
get columnName() {
return this.foreignKey.columns[0].columnName;
}
}
class ReplicatorWeakReference {
constructor(public base: ReplicatorItemHolder, public ref: TableInfo, public foreignKey: ForeignKeyInfo) {}
get columnName() {
return this.foreignKey.columns[0].columnName;
}
}
class ReplicatorItemHolder {
references: ReplicatorReference[] = [];
backReferences: ReplicatorReference[] = [];
// not mandatory references to entities out of the model
weakReferences: ReplicatorWeakReference[] = [];
table: TableInfo;
isPlanned = false;
idMap = {};
autoColumn: string;
isManualAutoColumn: boolean;
refByColumn: { [columnName: string]: ReplicatorReference } = {};
isReferenced: boolean;
get name() {
return this.item.name;
}
constructor(public item: DataReplicatorItem, public replicator: DataReplicator) {
this.table = replicator.db.tables.find(x => x.pureName.toUpperCase() == item.name.toUpperCase());
this.autoColumn = this.table.columns.find(x => x.autoIncrement)?.columnName;
if (
this.table.primaryKey?.columns?.length != 1 ||
this.table.primaryKey?.columns?.[0]?.columnName != this.autoColumn
) {
this.autoColumn = null;
}
if (!this.autoColumn && this.table.primaryKey?.columns?.length == 1) {
const name = this.table.primaryKey.columns[0].columnName;
const column = this.table.columns.find(x => x.columnName == name);
if (isTypeNumber(column?.dataType)) {
this.autoColumn = name;
this.isManualAutoColumn = true;
}
}
if (this.autoColumn && this.replicator.options.generateSqlScript) {
this.isManualAutoColumn = true;
}
}
initializeReferences() {
for (const fk of this.table.foreignKeys) {
if (fk.columns?.length != 1) continue;
const refHolder = this.replicator.itemHolders.find(y => y.name.toUpperCase() == fk.refTableName.toUpperCase());
const isMandatory = this.table.columns.find(x => x.columnName == fk.columns[0]?.columnName)?.notNull;
if (refHolder == null) {
if (!isMandatory) {
const weakref = new ReplicatorWeakReference(
this,
this.replicator.db.tables.find(x => x.pureName == fk.refTableName),
fk
);
this.weakReferences.push(weakref);
}
} else {
const newref = new ReplicatorReference(this, refHolder, isMandatory, fk);
this.references.push(newref);
this.refByColumn[newref.columnName] = newref;
refHolder.isReferenced = true;
}
}
}
createInsertObject(chunk, weakrefcols?: string[]) {
const res = _omit(
_pick(
chunk,
this.table.columns.map(x => x.columnName)
),
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...(weakrefcols ? weakrefcols : [])]
);
for (const key in res) {
const ref = this.refByColumn[key];
if (ref) {
// remap id
const oldId = res[key];
res[key] = ref.ref.idMap[oldId];
if (ref.isMandatory && res[key] == null) {
// mandatory refertence not matched
if (this.replicator.options.skipRowsWithUnresolvedRefs) {
return null;
}
throw new Error(`Unresolved reference, base=${ref.base.name}, ref=${ref.ref.name}, ${key}=${chunk[key]}`);
}
}
}
return res;
}
createUpdateObject(chunk) {
const res = _omit(
_pick(
chunk,
this.table.columns.map(x => x.columnName)
),
[this.autoColumn, ...this.backReferences.map(x => x.columnName), ...this.references.map(x => x.columnName)]
);
return res;
}
// returns list of columns that are weak references and are not resolved
async getMissingWeakRefsForRow(row): Promise<string[]> {
if (!this.replicator.options.setNullForUnresolvedNullableRefs || !this.weakReferences?.length) {
return [];
}
const qres = await runQueryOnDriver(this.replicator.pool, this.replicator.driver, dmp => {
dmp.put('^select ');
dmp.putCollection(',', this.weakReferences, weakref => {
dmp.put(
'(^case ^when ^exists (^select * ^from %f where %i = %v) ^then 1 ^else 0 ^end) as %i',
weakref.ref,
weakref.foreignKey.columns[0].refColumnName,
row[weakref.foreignKey.columns[0].columnName],
weakref.foreignKey.columns[0].columnName
);
});
if (this.replicator.driver.dialect.requireFromDual) {
dmp.put(' ^from ^dual');
}
});
const qrow = qres.rows[0];
return this.weakReferences.filter(x => qrow[x.columnName] == 0).map(x => x.columnName);
}
async runImport() {
const readStream = await this.item.openStream();
const driver = this.replicator.driver;
const pool = this.replicator.pool;
let inserted = 0;
let mapped = 0;
let updated = 0;
let deleted = 0;
let missing = 0;
let skipped = 0;
let lastLogged = new Date();
const { deleteMissing, deleteRestrictionColumns } = this.item;
const deleteRestrictions = {};
const usedKeyRows = {};
const writeStream = createAsyncWriteStream(this.replicator.stream, {
processItem: async chunk => {
if (chunk.__isStreamHeader) {
return;
}
const doFind = async () => {
let insertedObj = this.createInsertObject(chunk);
const res = await runQueryOnDriver(pool, driver, dmp => {
dmp.put('^select %i ^from %f ^where ', this.autoColumn, this.table);
dmp.putCollection(' and ', this.item.matchColumns, x => {
dmp.put('%i = %v', x, insertedObj[x]);
});
});
const resId = Object.entries(res?.rows?.[0] || {})?.[0]?.[1];
if (resId != null) {
mapped += 1;
this.idMap[chunk[this.autoColumn]] = resId;
}
return resId;
};
const doUpdate = async recordId => {
const updateObj = this.createUpdateObject(chunk);
if (Object.keys(updateObj).length == 0) {
skipped += 1;
return;
}
await this.replicator.runDumperCommand(dmp => {
dmp.put('^update %f ^ set ', this.table);
dmp.putCollection(',', Object.keys(updateObj), x => {
dmp.put('%i = %v', x, updateObj[x]);
});
dmp.put(' ^where %i = %v', this.autoColumn, recordId);
dmp.endCommand();
});
updated += 1;
};
const doInsert = async () => {
// console.log('chunk', this.name, JSON.stringify(chunk));
const weakrefcols = await this.getMissingWeakRefsForRow(chunk);
let insertedObj = this.createInsertObject(chunk, weakrefcols);
// console.log('insertedObj', this.name, JSON.stringify(insertedObj));
if (insertedObj == null) {
skipped += 1;
return;
}
if (this.isManualAutoColumn) {
const maxId = await this.replicator.generateIdentityValue(this.autoColumn, this.table);
insertedObj = {
...insertedObj,
[this.autoColumn]: maxId,
};
this.idMap[chunk[this.autoColumn]] = maxId;
}
let res = await this.replicator.runDumperQuery(dmp => {
dmp.put(
'^insert ^into %f (%,i) ^values (%,v)',
this.table,
Object.keys(insertedObj),
Object.values(insertedObj)
);
dmp.endCommand();
if (
this.autoColumn &&
this.isReferenced &&
!this.replicator.driver.dialect.requireStandaloneSelectForScopeIdentity &&
!this.isManualAutoColumn
) {
dmp.selectScopeIdentity(this.table);
}
});
inserted += 1;
if (this.autoColumn && this.isReferenced && !this.isManualAutoColumn) {
if (this.replicator.driver.dialect.requireStandaloneSelectForScopeIdentity) {
res = await runQueryOnDriver(pool, driver, dmp => dmp.selectScopeIdentity(this.table));
}
// console.log('IDRES', JSON.stringify(res));
// console.log('*********** ENTRIES OF', res?.rows?.[0]);
const resId = Object.entries(res?.rows?.[0])?.[0]?.[1];
if (resId != null) {
this.idMap[chunk[this.autoColumn]] = resId;
}
return resId;
}
};
const doMarkDelete = () => {
const insertedObj = this.createInsertObject(chunk);
if (deleteRestrictionColumns?.length > 0) {
const restriction = _pick(insertedObj, deleteRestrictionColumns);
const key = stableStringify(restriction);
deleteRestrictions[key] = restriction;
}
const usedKey = _pick(insertedObj, this.item.matchColumns);
usedKeyRows[stableStringify(usedKey)] = usedKey;
};
const findExisting = this.item.findExisting(chunk);
const updateExisting = this.item.updateExisting(chunk);
const createNew = this.item.createNew(chunk);
if (deleteMissing) {
doMarkDelete();
}
let recordId = null;
if (findExisting) {
recordId = await doFind();
}
if (updateExisting && recordId != null) {
await doUpdate(recordId);
}
if (createNew && recordId == null) {
recordId = await doInsert();
}
if (recordId == null && findExisting) {
missing += 1;
}
if (new Date().getTime() - lastLogged.getTime() > 5000) {
logger.info(
`Replicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows, updated ${updated} rows`
);
lastLogged = new Date();
}
// this.idMap[oldId] = newId;
},
});
const dumpConditionArray = (dmp: SqlDumper, array: any[], positive: boolean) => {
dmp.putCollection(positive ? ' or ' : ' and ', array, x => {
dmp.put('(');
dmp.putCollection(positive ? ' and ' : ' or ', Object.keys(x), y => {
dmp.put(positive ? '%i = %v' : 'not (%i = %v)', y, x[y]);
});
dmp.put(')');
});
};
const dumpDeleteCondition = (dmp: SqlDumper) => {
const deleteRestrictionValues = Object.values(deleteRestrictions);
const usedKeyRowsValues = Object.values(usedKeyRows);
if (deleteRestrictionValues.length == 0 && usedKeyRowsValues.length == 0) {
return;
}
dmp.put(' ^where ');
if (deleteRestrictionColumns?.length > 0) {
dmp.put('(');
dumpConditionArray(dmp, deleteRestrictionValues, true);
dmp.put(')');
if (usedKeyRowsValues.length > 0) {
dmp.put(' ^and ');
}
}
dumpConditionArray(dmp, Object.values(usedKeyRows), false);
};
const doDelete = async () => {
const countRes = await runQueryOnDriver(pool, driver, dmp => {
dmp.put('^select count(*) as ~cnt ^from %f', this.table);
dumpDeleteCondition(dmp);
dmp.endCommand();
});
const count = parseInt(countRes.rows[0].cnt);
if (count > 0) {
await this.replicator.runDumperCommand(dmp => {
dmp.put('^delete ^from %f', this.table);
dumpDeleteCondition(dmp);
dmp.endCommand();
});
deleted += count;
}
};
await this.replicator.copyStream(readStream, writeStream, {});
if (deleteMissing) {
await doDelete();
}
// await this.replicator.driver.writeQueryStream(this.replicator.pool, {
// mapResultId: (oldId, newId) => {
// this.idMap[oldId] = newId;
// },
// });
return { inserted, mapped, missing, skipped, updated, deleted };
}
}
export class DataReplicator {
itemHolders: ReplicatorItemHolder[];
itemPlan: ReplicatorItemHolder[] = [];
result: string = '';
dumper: SqlDumper;
identityValues: { [fullTableName: string]: number } = {};
constructor(
public pool: any,
public driver: EngineDriver,
public db: DatabaseInfo,
public items: DataReplicatorItem[],
public stream,
public copyStream: (input, output, options) => Promise<void>,
public options: DataReplicatorOptions = {}
) {
this.itemHolders = items.map(x => new ReplicatorItemHolder(x, this));
this.itemHolders.forEach(x => x.initializeReferences());
// @ts-ignore
this.dumper = driver.createDumper();
}
findItemToPlan(): ReplicatorItemHolder {
for (const item of this.itemHolders) {
if (item.isPlanned) continue;
if (item.references.every(x => x.ref.isPlanned)) {
return item;
}
}
for (const item of this.itemHolders) {
if (item.isPlanned) continue;
if (item.references.every(x => x.ref.isPlanned || !x.isMandatory)) {
const backReferences = item.references.filter(x => !x.ref.isPlanned);
item.backReferences = backReferences;
return item;
}
}
throw new Error('Cycle in mandatory references');
}
createPlan() {
while (this.itemPlan.length < this.itemHolders.length) {
const item = this.findItemToPlan();
item.isPlanned = true;
this.itemPlan.push(item);
}
}
async runDumperCommand(cmd: (dmp: SqlDumper) => void | string): Promise<void> {
if (this.options.generateSqlScript) {
cmd(this.dumper);
} else {
await runCommandOnDriver(this.pool, this.driver, cmd);
}
}
async runDumperQuery(cmd: (dmp: SqlDumper) => void | string): Promise<QueryResult> {
if (this.options.generateSqlScript) {
cmd(this.dumper);
return {
rows: [],
};
} else {
return await runQueryOnDriver(this.pool, this.driver, cmd);
}
}
async generateIdentityValue(column: string, table: NamedObjectInfo): Promise<number> {
const tableKey = `${table.schemaName}.${table.pureName}`;
if (!(tableKey in this.identityValues)) {
const max = await runQueryOnDriver(this.pool, this.driver, dmp => {
dmp.put('^select max(%i) as ~maxid ^from %f', column, table);
});
const maxId = Math.max(max.rows[0]['maxid'] ?? 0, 0) + 1;
this.identityValues[tableKey] = maxId;
return maxId;
}
this.identityValues[tableKey] += 1;
return this.identityValues[tableKey];
}
async run() {
this.createPlan();
await this.runDumperCommand(dmp => dmp.beginTransaction());
try {
for (const item of this.itemPlan) {
const stats = await item.runImport();
logger.info(
`Replicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows, updated ${stats.updated} rows, deleted ${stats.deleted} rows`
);
}
} catch (err) {
logger.error(extractErrorLogData(err), `Failed replicator job, rollbacking. ${err.message}`);
await this.runDumperCommand(dmp => dmp.rollbackTransaction());
return;
}
if (this.options.rollbackAfterFinish) {
logger.info('Rollbacking transaction, nothing was changed');
await this.runDumperCommand(dmp => dmp.rollbackTransaction());
} else {
logger.info('Committing replicator transaction');
await this.runDumperCommand(dmp => dmp.commitTransaction());
}
this.result = this.dumper.s;
}
}

View File

@@ -18,7 +18,7 @@ export * from './processPerspectiveDefaultColunns';
export * from './PerspectiveDataPattern'; export * from './PerspectiveDataPattern';
export * from './PerspectiveDataLoader'; export * from './PerspectiveDataLoader';
export * from './perspectiveTools'; export * from './perspectiveTools';
export * from './DataDuplicator'; export * from './DataReplicator';
export * from './FreeTableGridDisplay'; export * from './FreeTableGridDisplay';
export * from './FreeTableModel'; export * from './FreeTableModel';
export * from './CustomGridDisplay'; export * from './CustomGridDisplay';

View File

@@ -1,6 +1,9 @@
import { arrayToHexString, isTypeDateTime } from 'dbgate-tools'; import { arrayToHexString, evalFilterBehaviour, isTypeDateTime } from 'dbgate-tools';
import { format, toDate } from 'date-fns'; import { format, toDate } from 'date-fns';
import _isString from 'lodash/isString'; import _isString from 'lodash/isString';
import _cloneDeepWith from 'lodash/cloneDeepWith';
import { Condition, Expression } from 'dbgate-sqltree';
import { parseFilter } from './parseFilter';
export type FilterMultipleValuesMode = 'is' | 'is_not' | 'contains' | 'begins' | 'ends'; export type FilterMultipleValuesMode = 'is' | 'is_not' | 'contains' | 'begins' | 'ends';
@@ -61,3 +64,29 @@ export function createMultiLineFilter(mode: FilterMultipleValuesMode, text: stri
} }
return res; return res;
} }
export function compileCompoudEvalCondition(filters: { [column: string]: string }): Condition {
if (!filters) return null;
const conditions = [];
for (const name in filters) {
try {
const condition = parseFilter(filters[name], evalFilterBehaviour);
const replaced = _cloneDeepWith(condition, (expr: Expression) => {
if (expr.exprType == 'placeholder')
return {
exprType: 'column',
columnName: name,
};
});
conditions.push(replaced);
} catch (err) {
// filter parse error - ignore filter
}
}
if (conditions.length == 0) return null;
return {
conditionType: 'and',
conditions,
};
}

View File

@@ -1,5 +1,5 @@
import type { EngineDriver, SqlDumper } from 'dbgate-types'; import type { EngineDriver, SqlDumper } from 'dbgate-types';
import { Command, Condition } from './types'; import { Command, Condition, Select, Source } from './types';
import { dumpSqlCommand } from './dumpSqlCommand'; import { dumpSqlCommand } from './dumpSqlCommand';
export function treeToSql<T>(driver: EngineDriver, object: T, func: (dmp: SqlDumper, obj: T) => void) { export function treeToSql<T>(driver: EngineDriver, object: T, func: (dmp: SqlDumper, obj: T) => void) {
@@ -43,3 +43,43 @@ export function mergeConditions(condition1: Condition, condition2: Condition): C
conditions: [condition1, condition2], conditions: [condition1, condition2],
}; };
} }
export function selectKeysFromTable(options: {
pureName: string;
schemaName: string;
keyColumns: [];
loadKeys: any[][];
}): Select {
const source: Source = {
name: { pureName: options.pureName, schemaName: options.schemaName },
};
const res: Select = {
commandType: 'select',
columns: options.keyColumns.map(col => ({
exprType: 'column',
columnName: col,
source,
})),
from: source,
where: {
conditionType: 'or',
conditions: options.loadKeys.map(key => ({
conditionType: 'and',
conditions: key.map((keyValue, index) => ({
conditionType: 'binary',
operator: '=',
left: {
exprType: 'column',
columnName: options.keyColumns[index],
source,
},
right: {
exprType: 'value',
value: keyValue,
},
})),
})),
},
};
return res;
}

View File

@@ -54,8 +54,8 @@ export class ScriptWriter {
this._put(`await dbgateApi.importDatabase(${JSON.stringify(options)});`); this._put(`await dbgateApi.importDatabase(${JSON.stringify(options)});`);
} }
dataDuplicator(options) { dataReplicator(options) {
this._put(`await dbgateApi.dataDuplicator(${JSON.stringify(options, null, 2)});`); this._put(`await dbgateApi.dataReplicator(${JSON.stringify(options, null, 2)});`);
} }
comment(s) { comment(s) {
@@ -72,6 +72,10 @@ export class ScriptWriter {
return prefix + this.s; return prefix + this.s;
} }
zipDirectory(inputDirectory, outputFile) {
this._put(`await dbgateApi.zipDirectory('${inputDirectory}', '${outputFile}');`);
}
} }
export class ScriptWriterJson { export class ScriptWriterJson {
@@ -138,13 +142,21 @@ export class ScriptWriterJson {
}); });
} }
dataDuplicator(options) { dataReplicator(options) {
this.commands.push({ this.commands.push({
type: 'dataDuplicator', type: 'dataReplicator',
options, options,
}); });
} }
zipDirectory(inputDirectory, outputFile) {
this.commands.push({
type: 'zipDirectory',
inputDirectory,
outputFile,
});
}
getScript(schedule = null) { getScript(schedule = null) {
return { return {
type: 'json', type: 'json',
@@ -185,8 +197,11 @@ export function jsonScriptToJavascript(json) {
case 'importDatabase': case 'importDatabase':
script.importDatabase(cmd.options); script.importDatabase(cmd.options);
break; break;
case 'dataDuplicator': case 'dataReplicator':
script.dataDuplicator(cmd.options); script.dataReplicator(cmd.options);
break;
case 'zipDirectory':
script.zipDirectory(cmd.inputDirectory, cmd.outputFile);
break; break;
} }
} }

View File

@@ -7,7 +7,7 @@ export function isTypeNumeric(dataType) {
} }
export function isTypeFloat(dataType) { export function isTypeFloat(dataType) {
return dataType && /float|single|double/i.test(dataType); return dataType && /float|single|double|number/i.test(dataType);
} }
export function isTypeNumber(dataType) { export function isTypeNumber(dataType) {

View File

@@ -100,7 +100,9 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
dmp.putRaw(';'); dmp.putRaw(';');
// require('fs').writeFileSync('/home/jena/test.sql', dmp.s); // require('fs').writeFileSync('/home/jena/test.sql', dmp.s);
// console.log(dmp.s); // console.log(dmp.s);
if (rows.length > 0) {
await driver.query(dbhan, dmp.s, { discardResult: true }); await driver.query(dbhan, dmp.s, { discardResult: true });
}
writable.rowsReporter.add(rows.length); writable.rowsReporter.add(rows.length);
} else { } else {
for (const row of rows) { for (const row of rows) {

View File

@@ -549,3 +549,20 @@ export function pinoLogRecordToMessageRecord(logRecord, defaultSeverity = 'info'
severity: levelToSeverity[level] ?? defaultSeverity, severity: levelToSeverity[level] ?? defaultSeverity,
}; };
} }
export function jsonLinesStringify(jsonArray: any[]): string {
return jsonArray.map(json => JSON.stringify(json)).join('\n');
}
export function jsonLinesParse(jsonLines: string): any[] {
return jsonLines
.split('\n')
.filter(x => x.trim())
.map(line => {
try {
return JSON.parse(line);
} catch (e) {
return null;
}
})
.filter(x => x);
}

View File

@@ -31,7 +31,7 @@ export type TestEngineInfo = {
skipUnique?: boolean; skipUnique?: boolean;
skipAutoIncrement?: boolean; skipAutoIncrement?: boolean;
skipPkColumnTesting?: boolean; skipPkColumnTesting?: boolean;
skipDataDuplicator?: boolean; skipDataReplicator?: boolean;
skipDeploy?: boolean; skipDeploy?: boolean;
skipStringLength?: boolean; skipStringLength?: boolean;
skipChangeColumn?: boolean; skipChangeColumn?: boolean;

View File

@@ -157,6 +157,7 @@
} }
.snackbar-container { .snackbar-container {
z-index: 1000;
position: fixed; position: fixed;
right: 0; right: 0;
bottom: var(--dim-statusbar-height); bottom: var(--dim-statusbar-height);

View File

@@ -1,5 +1,5 @@
<script lang="ts" context="module"> <script lang="ts" context="module">
function openArchive(fileName, folderName) { async function openArchive(fileName, folderName) {
openNewTab({ openNewTab({
title: fileName, title: fileName,
icon: 'img archive', icon: 'img archive',
@@ -10,13 +10,17 @@
archiveFolder: folderName, archiveFolder: folderName,
}, },
}); });
// }
} }
async function openTextFile(fileName, fileType, folderName, tabComponent, icon) { async function openTextFile(fileName, fileType, folderName, tabComponent, icon) {
const connProps: any = {}; const connProps: any = {};
let tooltip = undefined; let tooltip = undefined;
const isZipped = folderName.endsWith('.zip');
const resp = await apiCall('files/load', { const resp = isZipped
? await apiCall('files/download-text', { uri: `zip://archive:${folderName}//${fileName}.jsonl` })
: await apiCall('files/load', {
folder: 'archive:' + folderName, folder: 'archive:' + folderName,
file: fileName + '.' + fileType, file: fileName + '.' + fileType,
format: 'text', format: 'text',
@@ -58,7 +62,7 @@
if (data.fileType == 'jsonl') { if (data.fileType == 'jsonl') {
return 'img archive'; return 'img archive';
} }
return ARCHIVE_ICONS[data.fileType]; return ARCHIVE_ICONS[data.fileType] ?? 'img anyfile';
} }
</script> </script>
@@ -79,6 +83,7 @@
import { openImportExportTab } from '../utility/importExportTools'; import { openImportExportTab } from '../utility/importExportTools';
export let data; export let data;
$: isZipped = data.folderName?.endsWith('.zip');
const handleRename = () => { const handleRename = () => {
showModal(InputTextModal, { showModal(InputTextModal, {
@@ -112,6 +117,9 @@
openArchive(data.fileName, data.folderName); openArchive(data.fileName, data.folderName);
}; };
const handleClick = () => { const handleClick = () => {
if (!data.fileType) {
return;
}
if (data.fileType == 'jsonl') { if (data.fileType == 'jsonl') {
handleOpenArchive(); handleOpenArchive();
} }
@@ -133,11 +141,15 @@
}; };
function createMenu() { function createMenu() {
if (!data.fileType) {
return [];
}
return [ return [
data.fileType == 'jsonl' && { text: 'Open', onClick: handleOpenArchive }, data.fileType == 'jsonl' && { text: 'Open', onClick: handleOpenArchive },
data.fileType == 'jsonl' && { text: 'Open in text editor', onClick: handleOpenJsonLinesText }, data.fileType == 'jsonl' && { text: 'Open in text editor', onClick: handleOpenJsonLinesText },
{ text: 'Delete', onClick: handleDelete }, !isZipped && { text: 'Delete', onClick: handleDelete },
{ text: 'Rename', onClick: handleRename }, !isZipped && { text: 'Rename', onClick: handleRename },
data.fileType == 'jsonl' && data.fileType == 'jsonl' &&
createQuickExportMenu( createQuickExportMenu(
fmt => async () => { fmt => async () => {
@@ -174,6 +186,7 @@
), ),
data.fileType.endsWith('.sql') && { text: 'Open SQL', onClick: handleOpenSqlFile }, data.fileType.endsWith('.sql') && { text: 'Open SQL', onClick: handleOpenSqlFile },
data.fileType.endsWith('.yaml') && { text: 'Open YAML', onClick: handleOpenYamlFile }, data.fileType.endsWith('.yaml') && { text: 'Open YAML', onClick: handleOpenYamlFile },
!isZipped &&
data.fileType == 'jsonl' && { data.fileType == 'jsonl' && {
text: 'Open in profiler', text: 'Open in profiler',
submenu: getExtensions() submenu: getExtensions()

View File

@@ -20,6 +20,7 @@
import hasPermission from '../utility/hasPermission'; import hasPermission from '../utility/hasPermission';
import { isProApp } from '../utility/proTools'; import { isProApp } from '../utility/proTools';
import { extractShellConnection } from '../impexp/createImpExpScript'; import { extractShellConnection } from '../impexp/createImpExpScript';
import { saveFileToDisk } from '../utility/exportFileTools';
export let data; export let data;
@@ -100,7 +101,7 @@ await dbgateApi.deployDb(${JSON.stringify(
props: { props: {
conid: $currentDatabase?.connection?._id, conid: $currentDatabase?.connection?._id,
database: $currentDatabase?.name, database: $currentDatabase?.name,
} },
}, },
{ {
editor: { editor: {
@@ -113,12 +114,12 @@ await dbgateApi.deployDb(${JSON.stringify(
); );
}; };
const handleOpenDuplicatorTab = () => { const handleOpenDataDeployTab = () => {
openNewTab( openNewTab(
{ {
title: data.name, title: data.name,
icon: 'img duplicator', icon: 'img data-deploy',
tabComponent: 'DataDuplicatorTab', tabComponent: 'DataDeployTab',
props: { props: {
conid: $currentDatabase?.connection?._id, conid: $currentDatabase?.connection?._id,
database: $currentDatabase?.name, database: $currentDatabase?.name,
@@ -127,21 +128,56 @@ await dbgateApi.deployDb(${JSON.stringify(
{ {
editor: { editor: {
archiveFolder: data.name, archiveFolder: data.name,
conid: $currentDatabase?.connection?._id,
database: $currentDatabase?.name,
}, },
} }
); );
}; };
const handleZipUnzip = async method => {
await apiCall(method, {
folder: data.name,
});
};
const handleDownloadZip = async () => {
saveFileToDisk(
async filePath => {
const zipped = await apiCall('archive/get-zipped-path', {
folder: data.name,
});
await apiCall('files/simple-copy', {
sourceFilePath: zipped.filePath,
targetFilePath: filePath,
});
},
{
formatLabel: 'ZIP files',
formatExtension: 'zip',
defaultFileName: data.name?.endsWith('.zip') ? data.name : data.name + '.zip',
}
);
};
function createMenu() { function createMenu() {
return [ return [
data.name != 'default' && { text: 'Delete', onClick: handleDelete }, data.name != 'default' && { text: 'Delete', onClick: handleDelete },
data.name != 'default' && { text: 'Rename', onClick: handleRename }, data.name != 'default' && { text: 'Rename', onClick: handleRename },
data.name != 'default' && data.name != 'default' &&
$currentDatabase && [ $currentDatabase && [
{ text: 'Data duplicator', onClick: handleOpenDuplicatorTab }, isProApp() && { text: 'Data deployer', onClick: handleOpenDataDeployTab },
{ text: 'Generate deploy DB SQL', onClick: handleGenerateDeploySql }, { text: 'Generate deploy DB SQL', onClick: handleGenerateDeploySql },
{ text: 'Shell: Deploy DB', onClick: handleGenerateDeployScript }, { text: 'Shell: Deploy DB', onClick: handleGenerateDeployScript },
], ],
data.name != 'default' &&
isProApp() &&
data.name.endsWith('.zip') && { text: 'Unpack ZIP', onClick: () => handleZipUnzip('archive/unzip') },
data.name != 'default' &&
isProApp() &&
!data.name.endsWith('.zip') && { text: 'Pack (create ZIP)', onClick: () => handleZipUnzip('archive/zip') },
isProApp() && { text: 'Download ZIP', onClick: handleDownloadZip },
data.name != 'default' && data.name != 'default' &&
hasPermission('dbops/model/compare') && hasPermission('dbops/model/compare') &&
@@ -158,7 +194,7 @@ await dbgateApi.deployDb(${JSON.stringify(
{...$$restProps} {...$$restProps}
{data} {data}
title={data.name.endsWith('.link') ? data.name.slice(0, -5) : data.name} title={data.name.endsWith('.link') ? data.name.slice(0, -5) : data.name}
icon={data.name.endsWith('.link') ? 'img link' : 'img archive-folder'} icon={data.name.endsWith('.link') ? 'img link' : data.name.endsWith('.zip') ? 'img zipfile' : 'img archive-folder'}
isBold={data.name == $currentArchive} isBold={data.name == $currentArchive}
on:click={() => ($currentArchive = data.name)} on:click={() => ($currentArchive = data.name)}
menu={createMenu} menu={createMenu}

View File

@@ -330,15 +330,15 @@ await dbgateApi.dropAllDbObjects(${JSON.stringify(
}); });
}; };
const handleImportWithDbDuplicator = () => { const handleShowDataDeployer = () => {
showModal(ChooseArchiveFolderModal, { showModal(ChooseArchiveFolderModal, {
message: 'Choose archive folder for import from', message: 'Choose archive folder for data deployer',
onConfirm: archiveFolder => { onConfirm: archiveFolder => {
openNewTab( openNewTab(
{ {
title: archiveFolder, title: archiveFolder,
icon: 'img duplicator', icon: 'img replicator',
tabComponent: 'DataDuplicatorTab', tabComponent: 'DataDeployerTab',
props: { props: {
conid: connection?._id, conid: connection?._id,
database: name, database: name,
@@ -439,8 +439,8 @@ await dbgateApi.dropAllDbObjects(${JSON.stringify(
driver?.databaseEngineTypes?.includes('sql') && driver?.databaseEngineTypes?.includes('sql') &&
hasPermission(`dbops/import`) && { hasPermission(`dbops/import`) && {
onClick: handleImportWithDbDuplicator, onClick: handleShowDataDeployer,
text: 'Import with DB duplicator', text: 'Data deployer',
}, },
{ divider: true }, { divider: true },

View File

@@ -7,6 +7,8 @@
tabComponent: string; tabComponent: string;
folder: string; folder: string;
currentConnection: boolean; currentConnection: boolean;
extension: string;
label: string;
} }
const sql: FileTypeHandler = { const sql: FileTypeHandler = {
@@ -15,6 +17,8 @@
tabComponent: 'QueryTab', tabComponent: 'QueryTab',
folder: 'sql', folder: 'sql',
currentConnection: true, currentConnection: true,
extension: 'sql',
label: 'SQL file',
}; };
const shell: FileTypeHandler = { const shell: FileTypeHandler = {
@@ -23,6 +27,8 @@
tabComponent: 'ShellTab', tabComponent: 'ShellTab',
folder: 'shell', folder: 'shell',
currentConnection: false, currentConnection: false,
extension: 'js',
label: 'JavaScript Shell script',
}; };
const markdown: FileTypeHandler = { const markdown: FileTypeHandler = {
@@ -31,6 +37,8 @@
tabComponent: 'MarkdownEditorTab', tabComponent: 'MarkdownEditorTab',
folder: 'markdown', folder: 'markdown',
currentConnection: false, currentConnection: false,
extension: 'md',
label: 'Markdown file',
}; };
const charts: FileTypeHandler = { const charts: FileTypeHandler = {
@@ -39,6 +47,8 @@
tabComponent: 'ChartTab', tabComponent: 'ChartTab',
folder: 'charts', folder: 'charts',
currentConnection: true, currentConnection: true,
extension: 'json',
label: 'Chart file',
}; };
const query: FileTypeHandler = { const query: FileTypeHandler = {
@@ -47,6 +57,8 @@
tabComponent: 'QueryDesignTab', tabComponent: 'QueryDesignTab',
folder: 'query', folder: 'query',
currentConnection: true, currentConnection: true,
extension: 'json',
label: 'Query design file',
}; };
const sqlite: FileTypeHandler = { const sqlite: FileTypeHandler = {
@@ -55,6 +67,8 @@
tabComponent: null, tabComponent: null,
folder: 'sqlite', folder: 'sqlite',
currentConnection: true, currentConnection: true,
extension: 'sqlite',
label: 'SQLite database',
}; };
const diagrams: FileTypeHandler = { const diagrams: FileTypeHandler = {
@@ -63,22 +77,52 @@
tabComponent: 'DiagramTab', tabComponent: 'DiagramTab',
folder: 'diagrams', folder: 'diagrams',
currentConnection: true, currentConnection: true,
extension: 'json',
label: 'Diagram file',
}; };
const jobs: FileTypeHandler = { const impexp: FileTypeHandler = {
icon: 'img export', icon: 'img export',
format: 'json', format: 'json',
tabComponent: 'ImportExportTab', tabComponent: 'ImportExportTab',
folder: 'jobs', folder: 'impexp',
currentConnection: false, currentConnection: false,
extension: 'json',
label: 'Import/Export file',
}; };
const datadeploy: FileTypeHandler = isProApp()
? {
icon: 'img data-deploy',
format: 'json',
tabComponent: 'DataDeployTab',
folder: 'datadeploy',
currentConnection: false,
extension: 'json',
label: 'Data deploy file',
}
: undefined;
const dbcompare: FileTypeHandler = isProApp()
? {
icon: 'img compare',
format: 'json',
tabComponent: 'CompareModelTab',
folder: 'dbcompare',
currentConnection: false,
extension: 'json',
label: 'Database compare file',
}
: undefined;
const perspectives: FileTypeHandler = { const perspectives: FileTypeHandler = {
icon: 'img perspective', icon: 'img perspective',
format: 'json', format: 'json',
tabComponent: 'PerspectiveTab', tabComponent: 'PerspectiveTab',
folder: 'pesrpectives', folder: 'pesrpectives',
currentConnection: true, currentConnection: true,
extension: 'json',
label: 'Perspective file',
}; };
const modtrans: FileTypeHandler = { const modtrans: FileTypeHandler = {
@@ -87,6 +131,8 @@
tabComponent: 'ModelTransformTab', tabComponent: 'ModelTransformTab',
folder: 'modtrans', folder: 'modtrans',
currentConnection: false, currentConnection: false,
extension: 'json',
label: 'Model transform file',
}; };
export const SAVED_FILE_HANDLERS = { export const SAVED_FILE_HANDLERS = {
@@ -98,8 +144,10 @@
sqlite, sqlite,
diagrams, diagrams,
perspectives, perspectives,
jobs, impexp,
modtrans, modtrans,
datadeploy,
dbcompare,
}; };
export const extractKey = data => data.file; export const extractKey = data => data.file;
@@ -122,6 +170,8 @@
import openNewTab from '../utility/openNewTab'; import openNewTab from '../utility/openNewTab';
import AppObjectCore from './AppObjectCore.svelte'; import AppObjectCore from './AppObjectCore.svelte';
import { isProApp } from '../utility/proTools';
import { saveFileToDisk } from '../utility/exportFileTools';
export let data; export let data;
@@ -148,6 +198,7 @@
hasPermission(`files/${data.folder}/write`) && { text: 'Create copy', onClick: handleCopy }, hasPermission(`files/${data.folder}/write`) && { text: 'Create copy', onClick: handleCopy },
hasPermission(`files/${data.folder}/write`) && { text: 'Delete', onClick: handleDelete }, hasPermission(`files/${data.folder}/write`) && { text: 'Delete', onClick: handleDelete },
folder == 'markdown' && { text: 'Show page', onClick: showMarkdownPage }, folder == 'markdown' && { text: 'Show page', onClick: showMarkdownPage },
{ text: 'Download', onClick: handleDownload },
]; ];
} }
@@ -182,6 +233,19 @@
}); });
}; };
const handleDownload = () => {
saveFileToDisk(
async filePath => {
await apiCall('files/export-file', {
folder,
file: data.file,
filePath,
});
},
{ formatLabel: handler.label, formatExtension: handler.format, defaultFileName: data.file }
);
};
async function openTab() { async function openTab() {
const resp = await apiCall('files/load', { folder, file: data.file, format: handler.format }); const resp = await apiCall('files/load', { folder, file: data.file, format: handler.format });

View File

@@ -17,6 +17,7 @@
border-radius: 2px; border-radius: 2px;
position: relative; position: relative;
top: 3px; top: 3px;
font-size: 10pt;
} }
label:hover:not(.disabled) { label:hover:not(.disabled) {

View File

@@ -0,0 +1,61 @@
<script lang="ts">
import _ from 'lodash';
import InlineButton from '../buttons/InlineButton.svelte';
import FontIcon from '../icons/FontIcon.svelte';
import getElectron from '../utility/getElectron';
import InlineButtonLabel from '../buttons/InlineButtonLabel.svelte';
import resolveApi, { resolveApiHeaders } from '../utility/resolveApi';
import uuidv1 from 'uuid/v1';
export let filters;
export let onProcessFile;
export let icon = 'icon plus-thick';
const inputId = `uploadFileButton-${uuidv1()}`;
const electron = getElectron();
async function handleUploadedFile(e) {
const files = [...e.target.files];
for (const file of files) {
const formData = new FormData();
formData.append('name', file.name);
formData.append('data', file);
const fetchOptions = {
method: 'POST',
body: formData,
headers: resolveApiHeaders(),
};
const apiBase = resolveApi();
const resp = await fetch(`${apiBase}/uploads/upload`, fetchOptions);
const { filePath, originalName } = await resp.json();
await onProcessFile(filePath, originalName);
}
}
async function handleOpenElectronFile() {
const filePaths = await electron.showOpenDialog({
filters,
properties: ['showHiddenFiles', 'openFile'],
});
const filePath = filePaths && filePaths[0];
if (!filePath) return;
onProcessFile(filePath, filePath.split(/[\/\\]/).pop());
}
</script>
{#if electron}
<InlineButton on:click={handleOpenElectronFile} title="Open file" data-testid={$$props['data-testid']}>
<FontIcon {icon} />
</InlineButton>
{:else}
<InlineButtonLabel on:click={() => {}} title="Upload file" data-testid={$$props['data-testid']} htmlFor={inputId}>
<FontIcon {icon} />
</InlineButtonLabel>
{/if}
<input type="file" id={inputId} hidden on:change={handleUploadedFile} />

View File

@@ -13,7 +13,7 @@
} }
</script> </script>
<div class="button" on:click={handleClick} class:disabled class:fillHorizontal> <div class="button" on:click={handleClick} class:disabled class:fillHorizontal data-testid={$$props['data-testid']}>
<div class="icon"> <div class="icon">
<FontIcon {icon} /> <FontIcon {icon} />
</div> </div>

View File

@@ -47,6 +47,7 @@ import newTable from '../tableeditor/newTable';
import { isProApp } from '../utility/proTools'; import { isProApp } from '../utility/proTools';
import { openWebLink } from '../utility/simpleTools'; import { openWebLink } from '../utility/simpleTools';
import { _t } from '../translations'; import { _t } from '../translations';
import ExportImportConnectionsModal from '../modals/ExportImportConnectionsModal.svelte';
// function themeCommand(theme: ThemeDefinition) { // function themeCommand(theme: ThemeDefinition) {
// return { // return {
@@ -530,6 +531,44 @@ registerCommand({
}, },
}); });
registerCommand({
id: 'app.exportConnections',
category: 'Settings',
name: 'Export connections',
testEnabled: () => getElectron() != null,
onClick: () => {
showModal(ExportImportConnectionsModal, {
mode: 'export',
});
},
});
registerCommand({
id: 'app.importConnections',
category: 'Settings',
name: 'Import connections',
testEnabled: () => getElectron() != null,
onClick: async () => {
const files = await electron.showOpenDialog({
properties: ['showHiddenFiles', 'openFile'],
filters: [
{
name: `All supported files`,
extensions: ['zip'],
},
{ name: `ZIP files`, extensions: ['zip'] },
],
});
if (files?.length > 0) {
showModal(ExportImportConnectionsModal, {
mode: 'import',
uploadedFilePath: files[0],
});
}
},
});
registerCommand({ registerCommand({
id: 'file.import', id: 'file.import',
category: 'File', category: 'File',

View File

@@ -7,6 +7,7 @@
export let value; export let value;
export let jsonParsedValue = undefined; export let jsonParsedValue = undefined;
export let editorTypes; export let editorTypes;
export let rightMargin = false;
$: stringified = stringifyCellValue( $: stringified = stringifyCellValue(
value, value,
@@ -20,7 +21,7 @@
{#if rowData == null} {#if rowData == null}
<span class="null">(No row)</span> <span class="null">(No row)</span>
{:else} {:else}
<span class={stringified.gridStyle} title={stringified.gridTitle}>{stringified.value}</span> <span class={stringified.gridStyle} title={stringified.gridTitle} class:rightMargin>{stringified.value}</span>
{/if} {/if}
<style> <style>
@@ -31,4 +32,8 @@
.valueCellStyle { .valueCellStyle {
color: var(--theme-icon-green); color: var(--theme-icon-green);
} }
.rightMargin {
margin-right: 16px;
}
</style> </style>

View File

@@ -21,6 +21,7 @@
export let isModifiedCell = false; export let isModifiedCell = false;
export let isInserted = false; export let isInserted = false;
export let isDeleted = false; export let isDeleted = false;
export let isMissing = false;
export let isAutofillSelected = false; export let isAutofillSelected = false;
export let isFocusedColumn = false; export let isFocusedColumn = false;
export let domCell = undefined; export let domCell = undefined;
@@ -33,6 +34,9 @@
export let onSetValue; export let onSetValue;
export let editorTypes = null; export let editorTypes = null;
export let isReadonly; export let isReadonly;
export let hasOverlayValue = false;
export let overlayValue = null;
export let isMissingOverlayField = false;
$: value = col.isStructured ? _.get(rowData || {}, col.uniquePath) : (rowData || {})[col.uniqueName]; $: value = col.isStructured ? _.get(rowData || {}, col.uniquePath) : (rowData || {})[col.uniqueName];
@@ -68,13 +72,31 @@
class:isModifiedCell class:isModifiedCell
class:isInserted class:isInserted
class:isDeleted class:isDeleted
class:isMissing
class:isAutofillSelected class:isAutofillSelected
class:isFocusedColumn class:isFocusedColumn
class:hasOverlayValue
class:isMissingOverlayField
class:alignRight={_.isNumber(value) && !showHint} class:alignRight={_.isNumber(value) && !showHint}
{style} {style}
> >
{#if hasOverlayValue}
<div class="flex1 flex">
<div class="replacedValue overlayCell overlayCell1">
<CellValue {rowData} {value} {jsonParsedValue} {editorTypes} /> <CellValue {rowData} {value} {jsonParsedValue} {editorTypes} />
</div>
<div class="overlayCell overlayCell2">
<CellValue {rowData} value={overlayValue} {editorTypes} />
</div>
</div>
{:else}
<CellValue
{rowData}
{value}
{jsonParsedValue}
{editorTypes}
rightMargin={_.isNumber(value) && !showHint && (editorTypes?.explicitDataType || col.foreignKey)}
/>
{#if showHint} {#if showHint}
<span class="hint" <span class="hint"
>{col.hintColumnNames.map(hintColumnName => rowData[hintColumnName]).join(col.hintColumnDelimiter || ' ')}</span >{col.hintColumnNames.map(hintColumnName => rowData[hintColumnName]).join(col.hintColumnDelimiter || ' ')}</span
@@ -132,6 +154,7 @@
{#if showSlot} {#if showSlot}
<slot /> <slot />
{/if} {/if}
{/if}
</td> </td>
<!-- {#if _.isArray(value.data)} <!-- {#if _.isArray(value.data)}
@@ -175,6 +198,9 @@
td.isDeleted { td.isDeleted {
background: var(--theme-bg-volcano); background: var(--theme-bg-volcano);
} }
td.isMissing {
background: var(--theme-bg-volcano);
}
td.isSelected { td.isSelected {
background: var(--theme-bg-3); background: var(--theme-bg-3);
} }
@@ -182,9 +208,9 @@
background: var(--theme-bg-selected); background: var(--theme-bg-selected);
} }
td.isDeleted { td.isDeleted {
background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAEElEQVQImWNgIAX8x4KJBAD+agT8INXz9wAAAABJRU5ErkJggg=='); background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAEElEQVQImWNgIAX8x4KJBAD+agT8INXz9wAAAABJRU5ErkJggg==') !important;
background-repeat: repeat-x; background-repeat: repeat-x !important;
background-position: 50% 50%; background-position: 50% 50% !important;
} }
.hint { .hint {
@@ -207,4 +233,31 @@
color: var(--theme-icon-green); color: var(--theme-icon-green);
text-align: var(--data-grid-numbers-align); text-align: var(--data-grid-numbers-align);
} }
.hasOverlayValue .overlayCell {
width: 50%;
overflow: hidden;
}
.hasOverlayValue .overlayCell1 {
margin-right: 5px;
}
.hasOverlayValue .overlayCell2 {
margin-left: 5px;
}
.replacedValue {
background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAEElEQVQImWNgIAX8x4KJBAD+agT8INXz9wAAAABJRU5ErkJggg==');
background-repeat: repeat-x;
background-position: 50% 50%;
}
td.isMissingOverlayField {
background: var(--theme-bg-orange);
background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAEElEQVQImWNgIAX8x4KJBAD+agT8INXz9wAAAABJRU5ErkJggg==');
background-repeat: repeat-x;
background-position: 50% 50%;
}
</style> </style>

View File

@@ -282,48 +282,59 @@
testEnabled: () => getCurrentDataGrid()?.editCellValueEnabled(), testEnabled: () => getCurrentDataGrid()?.editCellValueEnabled(),
onClick: () => getCurrentDataGrid().editCellValue(), onClick: () => getCurrentDataGrid().editCellValue(),
}); });
if (isProApp()) {
registerCommand({ registerCommand({
id: 'dataGrid.mergeSelectedCellsIntoMirror', id: 'dataGrid.sendToDataDeploy',
category: 'Data grid', category: 'Data grid',
name: 'Merge selected cells', name: 'Send to data deployer',
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true), testEnabled: () => getCurrentDataGrid()?.sendToDataDeployEnabled(),
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'merge', fullRows: false }), onClick: () => getCurrentDataGrid().sendToDataDeploy(),
});
registerCommand({
id: 'dataGrid.mergeSelectedRowsIntoMirror',
category: 'Data grid',
name: 'Merge selected rows',
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'merge', fullRows: true }),
});
registerCommand({
id: 'dataGrid.appendSelectedCellsIntoMirror',
category: 'Data grid',
name: 'Append selected cells',
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'append', fullRows: false }),
});
registerCommand({
id: 'dataGrid.appendSelectedRowsIntoMirror',
category: 'Data grid',
name: 'Append selected rows',
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'append', fullRows: true }),
});
registerCommand({
id: 'dataGrid.replaceSelectedCellsIntoMirror',
category: 'Data grid',
name: 'Replace with selected cells',
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'replace', fullRows: false }),
});
registerCommand({
id: 'dataGrid.replaceSelectedRowsIntoMirror',
category: 'Data grid',
name: 'Replace with selected rows',
testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'replace', fullRows: true }),
}); });
}
// registerCommand({
// id: 'dataGrid.mergeSelectedCellsIntoMirror',
// category: 'Data grid',
// name: 'Merge selected cells',
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'merge', fullRows: false }),
// });
// registerCommand({
// id: 'dataGrid.mergeSelectedRowsIntoMirror',
// category: 'Data grid',
// name: 'Merge selected rows',
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'merge', fullRows: true }),
// });
// registerCommand({
// id: 'dataGrid.appendSelectedCellsIntoMirror',
// category: 'Data grid',
// name: 'Append selected cells',
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'append', fullRows: false }),
// });
// registerCommand({
// id: 'dataGrid.appendSelectedRowsIntoMirror',
// category: 'Data grid',
// name: 'Append selected rows',
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'append', fullRows: true }),
// });
// registerCommand({
// id: 'dataGrid.replaceSelectedCellsIntoMirror',
// category: 'Data grid',
// name: 'Replace with selected cells',
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'replace', fullRows: false }),
// });
// registerCommand({
// id: 'dataGrid.replaceSelectedRowsIntoMirror',
// category: 'Data grid',
// name: 'Replace with selected rows',
// testEnabled: () => getCurrentDataGrid()?.mirrorWriteEnabled(true),
// onClick: () => getCurrentDataGrid().mergeSelectionIntoMirror({ mergeMode: 'replace', fullRows: true }),
// });
function getSelectedCellsInfo(selectedCells, grider, realColumnUniqueNames, selectedRowData) { function getSelectedCellsInfo(selectedCells, grider, realColumnUniqueNames, selectedRowData) {
if (selectedCells.length > 1 && selectedCells.every(x => _.isNumber(x[0]) && _.isNumber(x[1]))) { if (selectedCells.length > 1 && selectedCells.every(x => _.isNumber(x[0]) && _.isNumber(x[1]))) {
@@ -418,6 +429,8 @@
import contextMenuActivator from '../utility/contextMenuActivator'; import contextMenuActivator from '../utility/contextMenuActivator';
import InputTextModal from '../modals/InputTextModal.svelte'; import InputTextModal from '../modals/InputTextModal.svelte';
import { _t } from '../translations'; import { _t } from '../translations';
import { isProApp } from '../utility/proTools';
import SaveArchiveModal from '../modals/SaveArchiveModal.svelte';
export let onLoadNextData = undefined; export let onLoadNextData = undefined;
export let grider = undefined; export let grider = undefined;
@@ -454,6 +467,8 @@
export let jslid; export let jslid;
// export let generalAllowSave = false; // export let generalAllowSave = false;
export let hideGridLeftColumn = false; export let hideGridLeftColumn = false;
export let overlayDefinition = null;
export let onGetSelectionMenu = null;
export const activator = createActivator('DataGridCore', false); export const activator = createActivator('DataGridCore', false);
@@ -482,6 +497,7 @@
const domFilterControlsRef = createRef({}); const domFilterControlsRef = createRef({});
let isGridFocused = false; let isGridFocused = false;
let selectionMenu = null;
const tabid = getContext('tabid'); const tabid = getContext('tabid');
@@ -1003,11 +1019,11 @@
}); });
} }
export function mirrorWriteEnabled(requireKey) { export function sendToDataDeployEnabled() {
return requireKey ? !!display.baseTable?.primaryKey || !!display.baseCollection : !!display.baseTableOrSimilar; return !!display.baseTable?.primaryKey || !!display.baseCollection;
} }
export async function mergeSelectionIntoMirror({ fullRows, mergeMode = 'merge' }) { export async function sendToDataDeploy() {
const file = display.baseTableOrSimilar?.pureName; const file = display.baseTableOrSimilar?.pureName;
const mergeKey = display.baseCollection const mergeKey = display.baseCollection
? display.baseCollection?.uniqueKey?.map(x => x.columnName) ? display.baseCollection?.uniqueKey?.map(x => x.columnName)
@@ -1019,19 +1035,76 @@
const rows = rowIndexes.map(rowIndex => grider.getRowData(rowIndex)); const rows = rowIndexes.map(rowIndex => grider.getRowData(rowIndex));
// @ts-ignore // @ts-ignore
const columns = colIndexes.map(col => realColumnUniqueNames[col]); const columns = colIndexes.map(col => realColumnUniqueNames[col]);
const mergedRows = fullRows ? rows : rows.map(x => _.pick(x, _.uniq([...columns, ...mergeKey])));
const res = await apiCall('archive/modify-file', { const mergedRows = rows.map(x => _.pick(x, _.uniq([...columns, ...mergeKey])));
showModal(SaveArchiveModal, {
folder: $currentArchive, folder: $currentArchive,
file, file,
fileIsReadOnly: true,
onSave: async folder => {
const res = await apiCall('archive/modify-file', {
folder,
file,
mergedRows, mergedRows,
mergeKey, mergeKey,
mergeMode, mergeMode: 'merge',
}); });
if (res) { if (res) {
showSnackbarSuccess(`Merged ${mergedRows.length} rows into ${file} in archive ${$currentArchive}`); showSnackbarSuccess(`Merged ${mergedRows.length} rows into ${file} in archive ${folder}`);
openNewTab(
{
title: folder,
icon: 'img data-deploy',
tabComponent: 'DataDeployTab',
props: {
conid,
database,
},
},
{
editor: {
archiveFolder: folder,
conid,
database,
},
} }
);
} }
},
});
}
// export function mirrorWriteEnabled(requireKey) {
// return requireKey ? !!display.baseTable?.primaryKey || !!display.baseCollection : !!display.baseTableOrSimilar;
// }
// export async function mergeSelectionIntoMirror({ fullRows, mergeMode = 'merge' }) {
// const file = display.baseTableOrSimilar?.pureName;
// const mergeKey = display.baseCollection
// ? display.baseCollection?.uniqueKey?.map(x => x.columnName)
// : display.baseTable?.primaryKey.columns.map(x => x.columnName);
// const cells = cellsToRegularCells(selectedCells);
// const rowIndexes = _.sortBy(_.uniq(cells.map(x => x[0])));
// const colIndexes = _.sortBy(_.uniq(cells.map(x => x[1])));
// const rows = rowIndexes.map(rowIndex => grider.getRowData(rowIndex));
// // @ts-ignore
// const columns = colIndexes.map(col => realColumnUniqueNames[col]);
// const mergedRows = fullRows ? rows : rows.map(x => _.pick(x, _.uniq([...columns, ...mergeKey])));
// const res = await apiCall('archive/modify-file', {
// folder: $currentArchive,
// file,
// mergedRows,
// mergeKey,
// mergeMode,
// });
// if (res) {
// showSnackbarSuccess(`Merged ${mergedRows.length} rows into ${file} in archive ${$currentArchive}`);
// }
// }
export function canShowLeftPanel() { export function canShowLeftPanel() {
return !hideGridLeftColumn; return !hideGridLeftColumn;
@@ -1152,8 +1225,16 @@
onChangeSelectedColumns(getSelectedColumns().map(x => x.columnName)); onChangeSelectedColumns(getSelectedColumns().map(x => x.columnName));
} }
let publishedCells = null;
if (onPublishedCellsChanged) { if (onPublishedCellsChanged) {
onPublishedCellsChanged(getCellsPublished(selectedCells)); if (!publishedCells) publishedCells = getCellsPublished(selectedCells);
onPublishedCellsChanged(publishedCells);
}
if (onGetSelectionMenu) {
if (!publishedCells) publishedCells = getCellsPublished(selectedCells);
selectionMenu = onGetSelectionMenu(publishedCells);
} }
} }
}); });
@@ -1192,6 +1273,7 @@
engine: display?.driver, engine: display?.driver,
condition: display?.getChangeSetCondition(rowData), condition: display?.getChangeSetCondition(rowData),
insertedRowIndex: grider?.getInsertedRowIndex(row), insertedRowIndex: grider?.getInsertedRowIndex(row),
rowStatus: grider.getRowStatus(row),
}; };
}) })
.filter(x => x.column); .filter(x => x.column);
@@ -1747,14 +1829,14 @@
{ placeTag: 'save' }, { placeTag: 'save' },
{ command: 'dataGrid.revertRowChanges', hideDisabled: true }, { command: 'dataGrid.revertRowChanges', hideDisabled: true },
{ command: 'dataGrid.revertAllChanges', hideDisabled: true }, { command: 'dataGrid.revertAllChanges', hideDisabled: true },
{ command: 'dataGrid.deleteSelectedRows' }, { command: 'dataGrid.deleteSelectedRows', hideDisabled: true },
{ command: 'dataGrid.insertNewRow' }, { command: 'dataGrid.insertNewRow', hideDisabled: true },
{ command: 'dataGrid.cloneRows' }, { command: 'dataGrid.cloneRows', hideDisabled: true },
{ command: 'dataGrid.setNull', hideDisabled: true }, { command: 'dataGrid.setNull', hideDisabled: true },
{ command: 'dataGrid.removeField', hideDisabled: true }, { command: 'dataGrid.removeField', hideDisabled: true },
{ placeTag: 'edit' }, { placeTag: 'edit' },
{ divider: true }, { divider: true },
{ command: 'dataGrid.findColumn' }, { command: 'dataGrid.findColumn', hideDisabled: true },
{ command: 'dataGrid.hideColumn', hideDisabled: true }, { command: 'dataGrid.hideColumn', hideDisabled: true },
{ command: 'dataGrid.filterSelected' }, { command: 'dataGrid.filterSelected' },
{ command: 'dataGrid.clearFilter' }, { command: 'dataGrid.clearFilter' },
@@ -1773,17 +1855,18 @@
// { command: 'dataGrid.copyJsonDocument', hideDisabled: true }, // { command: 'dataGrid.copyJsonDocument', hideDisabled: true },
{ divider: true }, { divider: true },
{ placeTag: 'export' }, { placeTag: 'export' },
{ // {
label: 'Save to current archive', // label: 'Save to current archive',
submenu: [ // submenu: [
{ command: 'dataGrid.mergeSelectedCellsIntoMirror' }, // { command: 'dataGrid.mergeSelectedCellsIntoMirror' },
{ command: 'dataGrid.mergeSelectedRowsIntoMirror' }, // { command: 'dataGrid.mergeSelectedRowsIntoMirror' },
{ command: 'dataGrid.appendSelectedCellsIntoMirror' }, // { command: 'dataGrid.appendSelectedCellsIntoMirror' },
{ command: 'dataGrid.appendSelectedRowsIntoMirror' }, // { command: 'dataGrid.appendSelectedRowsIntoMirror' },
{ command: 'dataGrid.replaceSelectedCellsIntoMirror' }, // { command: 'dataGrid.replaceSelectedCellsIntoMirror' },
{ command: 'dataGrid.replaceSelectedRowsIntoMirror' }, // { command: 'dataGrid.replaceSelectedRowsIntoMirror' },
], // ],
}, // },
isProApp() && { command: 'dataGrid.sendToDataDeploy' },
{ command: 'dataGrid.generateSqlFromData' }, { command: 'dataGrid.generateSqlFromData' },
{ command: 'dataGrid.openFreeTable' }, { command: 'dataGrid.openFreeTable' },
{ command: 'dataGrid.openChartFromSelection' }, { command: 'dataGrid.openChartFromSelection' },
@@ -2017,6 +2100,7 @@
onSetFormView={formViewAvailable && display?.baseTable?.primaryKey ? handleSetFormView : null} onSetFormView={formViewAvailable && display?.baseTable?.primaryKey ? handleSetFormView : null}
{dataEditorTypesBehaviourOverride} {dataEditorTypesBehaviourOverride}
{gridColoringMode} {gridColoringMode}
{overlayDefinition}
/> />
{/each} {/each}
</tbody> </tbody>
@@ -2053,7 +2137,19 @@
on:scroll={e => (firstVisibleRowScrollIndex = e.detail)} on:scroll={e => (firstVisibleRowScrollIndex = e.detail)}
bind:this={domVerticalScroll} bind:this={domVerticalScroll}
/> />
{#if selectedCellsInfo} {#if selectionMenu}
<div class="selection-menu">
{#each selectionMenu as item}
<InlineButton
on:click={() => {
item.onClick();
}}
>
{item.text}
</InlineButton>
{/each}
</div>
{:else if selectedCellsInfo}
<div class="row-count-label"> <div class="row-count-label">
{selectedCellsInfo} {selectedCellsInfo}
</div> </div>
@@ -2118,6 +2214,13 @@
bottom: 20px; bottom: 20px;
} }
.selection-menu {
position: absolute;
background-color: var(--theme-bg-2);
right: 40px;
bottom: 20px;
}
.no-rows-info { .no-rows-info {
margin-top: 60px; margin-top: 60px;
} }

View File

@@ -1,5 +1,19 @@
<script lang="ts" context="module">
const OVERLAY_STATUS_ICONS = {
regular: 'icon equal',
updated: 'icon not-equal',
missing: 'img table',
inserted: 'img archive',
};
const OVERLAY_STATUS_TOOLTIPS = {
regular: 'Row is the same in database and archive',
updated: 'Row is different in database and archive',
missing: 'Row is only in database',
inserted: 'Row is only in archive',
};
</script>
<script lang="ts"> <script lang="ts">
import openReferenceForm from '../formview/openReferenceForm';
import DictionaryLookupModal from '../modals/DictionaryLookupModal.svelte'; import DictionaryLookupModal from '../modals/DictionaryLookupModal.svelte';
import { showModal } from '../modals/modalTools'; import { showModal } from '../modals/modalTools';
@@ -27,6 +41,7 @@
export let database; export let database;
export let driver; export let driver;
export let gridColoringMode = '36'; export let gridColoringMode = '36';
export let overlayDefinition = null;
export let dataEditorTypesBehaviourOverride = null; export let dataEditorTypesBehaviourOverride = null;
@@ -51,10 +66,17 @@
onConfirm: value => grider.setCellValue(rowIndex, col.uniqueName, value), onConfirm: value => grider.setCellValue(rowIndex, col.uniqueName, value),
}); });
} }
// $: console.log('rowStatus', rowStatus);
</script> </script>
<tr style={`height: ${rowHeight}px`} class={`coloring-mode-${gridColoringMode}`}> <tr style={`height: ${rowHeight}px`} class={`coloring-mode-${gridColoringMode}`}>
<RowHeaderCell {rowIndex} onShowForm={onSetFormView ? () => onSetFormView(rowData, null) : null} /> <RowHeaderCell
{rowIndex}
onShowForm={onSetFormView && !overlayDefinition ? () => onSetFormView(rowData, null) : null}
extraIcon={overlayDefinition ? OVERLAY_STATUS_ICONS[rowStatus.status] : null}
extraIconTooltip={overlayDefinition ? OVERLAY_STATUS_TOOLTIPS[rowStatus.status] : null}
/>
{#each visibleRealColumns as col (col.uniqueName)} {#each visibleRealColumns as col (col.uniqueName)}
{#if inplaceEditorState.cell && rowIndex == inplaceEditorState.cell[0] && col.colIndex == inplaceEditorState.cell[1]} {#if inplaceEditorState.cell && rowIndex == inplaceEditorState.cell[0] && col.colIndex == inplaceEditorState.cell[1]}
<InplaceEditor <InplaceEditor
@@ -83,11 +105,15 @@
isAutofillSelected={cellIsSelected(rowIndex, col.colIndex, autofillSelectedCells)} isAutofillSelected={cellIsSelected(rowIndex, col.colIndex, autofillSelectedCells)}
isFocusedColumn={focusedColumns?.includes(col.uniqueName)} isFocusedColumn={focusedColumns?.includes(col.uniqueName)}
isModifiedCell={rowStatus.modifiedFields && rowStatus.modifiedFields.has(col.uniqueName)} isModifiedCell={rowStatus.modifiedFields && rowStatus.modifiedFields.has(col.uniqueName)}
overlayValue={rowStatus.overlayFields?.[col.uniqueName]}
hasOverlayValue={rowStatus.overlayFields && col.uniqueName in rowStatus.overlayFields}
isMissingOverlayField={rowStatus.missingOverlayFields && rowStatus.missingOverlayFields.has(col.uniqueName)}
isModifiedRow={rowStatus.status == 'updated'} isModifiedRow={rowStatus.status == 'updated'}
isInserted={rowStatus.status == 'inserted' || isInserted={rowStatus.status == 'inserted' ||
(rowStatus.insertedFields && rowStatus.insertedFields.has(col.uniqueName))} (rowStatus.insertedFields && rowStatus.insertedFields.has(col.uniqueName))}
isDeleted={rowStatus.status == 'deleted' || isDeleted={rowStatus.status == 'deleted' ||
(rowStatus.deletedFields && rowStatus.deletedFields.has(col.uniqueName))} (rowStatus.deletedFields && rowStatus.deletedFields.has(col.uniqueName))}
isMissing={rowStatus.status == 'missing'}
{onSetFormView} {onSetFormView}
{isDynamicStructure} {isDynamicStructure}
isAutoFillMarker={autofillMarkerCell && isAutoFillMarker={autofillMarkerCell &&

View File

@@ -1,8 +1,10 @@
export interface GriderRowStatus { export interface GriderRowStatus {
status: 'regular' | 'updated' | 'deleted' | 'inserted'; status: 'regular' | 'updated' | 'deleted' | 'inserted' | 'missing';
modifiedFields?: Set<string>; modifiedFields?: Set<string>;
insertedFields?: Set<string>; insertedFields?: Set<string>;
deletedFields?: Set<string>; deletedFields?: Set<string>;
overlayFields?: { [field: string]: string };
missingOverlayFields?: Set<string>;
} }
export default abstract class Grider { export default abstract class Grider {
@@ -61,4 +63,7 @@ export default abstract class Grider {
this.setCellValue(index, key, changeObject[key]); this.setCellValue(index, key, changeObject[key]);
} }
} }
getInsertedRowIndex(index) {
return null;
}
} }

View File

@@ -20,7 +20,7 @@
export let allowChangeChangeSetStructure = false; export let allowChangeChangeSetStructure = false;
export let infoLoadCounter = 0; export let infoLoadCounter = 0;
export let driver; export let driver = null;
let loadedRows; let loadedRows;
let infoCounter = 0; let infoCounter = 0;

View File

@@ -0,0 +1,110 @@
import { GridDisplay } from 'dbgate-datalib';
import Grider from './Grider';
import { GriderRowStatus } from './Grider';
import _uniq from 'lodash/uniq';
export default class OverlayDiffGrider extends Grider {
private prependRows: any[];
private rowCacheIndexes: Set<number>;
private rowDataCache;
private rowStatusCache;
private overlayRowsByStr: { [key: string]: any };
constructor(
public sourceRows: any[],
public display: GridDisplay,
public matchColumns: string[],
public overlayData: any[],
public matchedDbKeys: any[][]
) {
super();
const matchedDbKeysByStr = new Set(matchedDbKeys.map(x => x.join('||')));
this.prependRows = overlayData.filter(row => !matchedDbKeysByStr.has(matchColumns.map(x => row[x]).join('||')));
this.overlayRowsByStr = {};
for (const row of overlayData) {
const key = matchColumns.map(x => row[x]).join('||');
this.overlayRowsByStr[key] = row;
}
this.rowDataCache = {};
this.rowStatusCache = {};
this.rowCacheIndexes = new Set();
}
requireRowCache(index: number) {
if (this.rowCacheIndexes.has(index)) return;
if (index < this.prependRows.length) {
this.rowStatusCache[index] = {
status: 'inserted',
};
this.rowDataCache[index] = this.prependRows[index];
this.rowCacheIndexes.add(index);
return;
}
const row = this.sourceRows[index - this.prependRows.length];
if (!row) {
this.rowStatusCache[index] = {
status: 'missing',
};
this.rowDataCache[index] = row;
this.rowCacheIndexes.add(index);
return;
}
const overlayKey = this.matchColumns.map(x => row[x]).join('||');
const overlayRow = this.overlayRowsByStr[overlayKey];
if (!overlayRow) {
this.rowStatusCache[index] = {
status: 'missing',
};
this.rowDataCache[index] = row;
this.rowCacheIndexes.add(index);
return;
}
const overlayFields = {};
const missingOverlayFields = new Set();
for (const field of this.display.columns.map(x => x.columnName)) {
if (!(field in overlayRow)) {
missingOverlayFields.add(field);
} else if (row[field] != overlayRow[field]) {
overlayFields[field] = overlayRow[field];
}
}
if (Object.keys(overlayFields).length > 0 || missingOverlayFields.size > 0) {
this.rowStatusCache[index] = {
status: 'updated',
overlayFields,
missingOverlayFields,
modifiedFields: new Set(Object.keys(overlayFields)),
};
this.rowDataCache[index] = row;
} else {
this.rowStatusCache[index] = {
status: 'regular',
};
this.rowDataCache[index] = row;
}
this.rowCacheIndexes.add(index);
}
getRowData(index: number) {
this.requireRowCache(index);
return this.rowDataCache[index];
}
getRowStatus(index): GriderRowStatus {
this.requireRowCache(index);
return this.rowStatusCache[index];
}
get rowCount() {
return this.sourceRows.length + this.prependRows.length;
}
}

View File

@@ -1,9 +1,13 @@
<script lang="ts"> <script lang="ts">
import ShowFormButton from '../formview/ShowFormButton.svelte'; import ShowFormButton from '../formview/ShowFormButton.svelte';
import FontIcon from '../icons/FontIcon.svelte';
export let rowIndex; export let rowIndex;
export let onShowForm; export let onShowForm;
export let extraIcon = null;
export let extraIconTooltip = null;
let mouseIn = false; let mouseIn = false;
</script> </script>
@@ -18,6 +22,11 @@
{#if mouseIn && onShowForm} {#if mouseIn && onShowForm}
<ShowFormButton on:click={onShowForm} /> <ShowFormButton on:click={onShowForm} />
{/if} {/if}
{#if extraIcon}
<div class="extraIcon" title={extraIconTooltip}>
<FontIcon icon={extraIcon} />
</div>
{/if}
</td> </td>
<style> <style>
@@ -29,4 +38,9 @@
overflow: hidden; overflow: hidden;
position: relative; position: relative;
} }
.extraIcon {
position: absolute;
right: 0px;
top: 1px;
}
</style> </style>

View File

@@ -83,6 +83,7 @@
import hasPermission from '../utility/hasPermission'; import hasPermission from '../utility/hasPermission';
import { openImportExportTab } from '../utility/importExportTools'; import { openImportExportTab } from '../utility/importExportTools';
import { getIntSettingsValue } from '../settings/settingsTools'; import { getIntSettingsValue } from '../settings/settingsTools';
import OverlayDiffGrider from './OverlayDiffGrider';
export let conid; export let conid;
export let display; export let display;
@@ -92,6 +93,7 @@
export let config; export let config;
export let changeSetState; export let changeSetState;
export let dispatchChangeSet; export let dispatchChangeSet;
export let overlayDefinition = null;
export let macroPreview; export let macroPreview;
export let macroValues; export let macroValues;
@@ -110,7 +112,7 @@
// $: console.log('loadedRows BIND', loadedRows); // $: console.log('loadedRows BIND', loadedRows);
$: { $: {
if (macroPreview) { if (!overlayDefinition && macroPreview) {
grider = new ChangeSetGrider( grider = new ChangeSetGrider(
loadedRows, loadedRows,
changeSetState, changeSetState,
@@ -124,13 +126,25 @@
} }
// prevent recreate grider, if no macro is selected, so there is no need to selectedcells in macro // prevent recreate grider, if no macro is selected, so there is no need to selectedcells in macro
$: { $: {
if (!macroPreview) { if (!overlayDefinition && !macroPreview) {
grider = new ChangeSetGrider(loadedRows, changeSetState, dispatchChangeSet, display); grider = new ChangeSetGrider(loadedRows, changeSetState, dispatchChangeSet, display);
} }
} }
// $: console.log('GRIDER', grider); // $: console.log('GRIDER', grider);
// $: if (onChangeGrider) onChangeGrider(grider); // $: if (onChangeGrider) onChangeGrider(grider);
$: {
if (overlayDefinition) {
grider = new OverlayDiffGrider(
loadedRows,
display,
overlayDefinition.matchColumns,
overlayDefinition.overlayData,
overlayDefinition.matchedDbKeys
);
}
}
export async function exportGrid() { export async function exportGrid() {
const coninfo = await getConnectionInfo({ conid }); const coninfo = await getConnectionInfo({ conid });

View File

@@ -47,6 +47,8 @@
export let isRawMode = false; export let isRawMode = false;
export let forceReadOnly = false;
$: connection = useConnectionInfo({ conid }); $: connection = useConnectionInfo({ conid });
$: dbinfo = useDatabaseInfo({ conid, database }); $: dbinfo = useDatabaseInfo({ conid, database });
$: serverVersion = useDatabaseServerVersion({ conid, database }); $: serverVersion = useDatabaseServerVersion({ conid, database });
@@ -73,7 +75,7 @@
{ showHintColumns: getBoolSettingsValue('dataGrid.showHintColumns', true) }, { showHintColumns: getBoolSettingsValue('dataGrid.showHintColumns', true) },
$serverVersion, $serverVersion,
table => getDictionaryDescription(table, conid, database, $apps, $connections), table => getDictionaryDescription(table, conid, database, $apps, $connections),
$connection?.isReadOnly, forceReadOnly || $connection?.isReadOnly,
isRawMode isRawMode
) )
: null; : null;
@@ -161,7 +163,7 @@
formViewComponent={SqlFormView} formViewComponent={SqlFormView}
{display} {display}
showReferences showReferences
showMacros={!$connection?.isReadOnly} showMacros={!forceReadOnly && !$connection?.isReadOnly}
hasMultiColumnFilter hasMultiColumnFilter
onRunMacro={handleRunMacro} onRunMacro={handleRunMacro}
macroCondition={macro => macro.type == 'transformValue'} macroCondition={macro => macro.type == 'transformValue'}

View File

@@ -69,6 +69,8 @@
}) })
.addTo(map); .addTo(map);
leaflet.control.scale().addTo(map);
addObjectToMap(); addObjectToMap();
}); });

View File

@@ -3,6 +3,7 @@
import FontIcon from '../icons/FontIcon.svelte'; import FontIcon from '../icons/FontIcon.svelte';
import Link from './Link.svelte'; import Link from './Link.svelte';
import TableControl from './TableControl.svelte'; import TableControl from './TableControl.svelte';
import { writable } from 'svelte/store';
export let title; export let title;
export let collection; export let collection;
@@ -12,6 +13,9 @@
export let hideDisplayName = false; export let hideDisplayName = false;
export let clickable = false; export let clickable = false;
export let onAddNew = null; export let onAddNew = null;
export let displayNameFieldName = null;
export let filters = writable({});
let collapsed = false; let collapsed = false;
</script> </script>
@@ -43,14 +47,16 @@
rows={collection || []} rows={collection || []}
columns={_.compact([ columns={_.compact([
!hideDisplayName && { !hideDisplayName && {
fieldName: 'displayName', fieldName: displayNameFieldName || 'displayName',
header: 'Name', header: 'Name',
slot: -1, slot: -1,
sortable: true, sortable: true,
filterable: !!displayNameFieldName,
}, },
...columns, ...columns,
])} ])}
{clickable} {clickable}
{filters}
on:clickrow on:clickrow
> >
<svelte:fragment slot="-1" let:row let:col> <svelte:fragment slot="-1" let:row let:col>

View File

@@ -7,8 +7,11 @@
props?: any; props?: any;
formatter?: any; formatter?: any;
slot?: number; slot?: number;
slotKey?: string;
isHighlighted?: Function; isHighlighted?: Function;
sortable?: boolean; sortable?: boolean;
filterable?: boolean;
filteredExpression?: (row: any) => string;
} }
</script> </script>
@@ -19,12 +22,19 @@
import keycodes from '../utility/keycodes'; import keycodes from '../utility/keycodes';
import { createEventDispatcher } from 'svelte'; import { createEventDispatcher } from 'svelte';
import FontIcon from '../icons/FontIcon.svelte'; import FontIcon from '../icons/FontIcon.svelte';
import DataFilterControl from '../datagrid/DataFilterControl.svelte';
import { evalFilterBehaviour } from 'dbgate-tools';
import { evaluateCondition } from 'dbgate-sqltree';
import { compileCompoudEvalCondition } from 'dbgate-filterparser';
import { chevronExpandIcon } from '../icons/expandIcons';
export let columns: (TableControlColumn | false)[]; export let columns: (TableControlColumn | false)[];
export let rows; export let rows = null;
export let groupedRows = null;
export let focusOnCreate = false; export let focusOnCreate = false;
export let selectable = false; export let selectable = false;
export let selectedIndex = 0; export let selectedIndex = 0;
export let selectedKey = null;
export let clickable = false; export let clickable = false;
export let disableFocusOutline = false; export let disableFocusOutline = false;
export let emptyMessage = null; export let emptyMessage = null;
@@ -35,7 +45,11 @@
export let checkedKeys = null; export let checkedKeys = null;
export let onSetCheckedKeys = null; export let onSetCheckedKeys = null;
export let extractCheckedKey = x => x.id; export let extractTableItemKey = x => x.id;
export let itemSupportsCheckbox = x => true;
export let filters = null;
export let selectionMode: 'index' | 'key' = 'index';
const dispatch = createEventDispatcher(); const dispatch = createEventDispatcher();
@@ -46,19 +60,120 @@
}); });
const handleKeyDown = event => { const handleKeyDown = event => {
if (event.keyCode == keycodes.downArrow) { const oldSelectedIndex =
selectedIndex = Math.min(selectedIndex + 1, sortedRows.length - 1); selectionMode == 'index' ? selectedIndex : _.findIndex(flatRowsShown, x => extractTableItemKey(x) == selectedKey);
let newIndex = oldSelectedIndex;
switch (event.keyCode) {
case keycodes.downArrow:
newIndex = Math.min(newIndex + 1, flatRowsShown.length - 1);
break;
case keycodes.upArrow:
newIndex = Math.max(0, newIndex - 1);
break;
case keycodes.home:
newIndex = 0;
break;
case keycodes.end:
newIndex = rows.length - 1;
break;
case keycodes.pageUp:
newIndex -= 10;
break;
case keycodes.pageDown:
newIndex += 10;
break;
}
if (newIndex < 0) {
newIndex = 0;
}
if (newIndex >= flatRowsShown.length) {
newIndex = flatRowsShown.length - 1;
}
if (clickable && oldSelectedIndex != newIndex) {
event.preventDefault();
domRows[newIndex]?.scrollIntoView();
if (clickable) {
dispatch('clickrow', flatRowsShown[newIndex]);
}
if (selectionMode == 'index') {
selectedIndex = newIndex;
} else {
selectedKey = extractTableItemKey(flatRowsShown[newIndex]);
} }
if (event.keyCode == keycodes.upArrow) {
selectedIndex = Math.max(0, selectedIndex - 1);
} }
}; };
function filterRows(grows, filters) {
const condition = compileCompoudEvalCondition(filters);
if (!condition) return grows;
return grows
.map(gitem => {
return {
group: gitem.group,
rows: gitem.rows.filter(row => {
const newrow = { ...row };
for (const col of columnList) {
if (col.filteredExpression) {
newrow[col.fieldName] = col.filteredExpression(row);
}
}
return evaluateCondition(condition, newrow);
}),
};
})
.filter(gitem => gitem.rows.length > 0);
}
// function computeGroupedRows(array) {
// if (!extractGroupName) {
// return [{ label: null, rows: array }];
// }
// const res = [];
// let lastGroupName = null;
// let buildArray = [];
// for (const item of array) {
// const groupName = extractGroupName(item);
// if (lastGroupName != groupName) {
// if (buildArray.length > 0) {
// res.push({ label: lastGroupName, rows: buildArray });
// }
// lastGroupName = groupName;
// buildArray = [];
// }
// buildArray.push(item);
// }
// if (buildArray.length > 0) {
// res.push({ label: lastGroupName, rows: buildArray });
// }
// }
let sortedByField = null; let sortedByField = null;
let sortOrderIsDesc = false; let sortOrderIsDesc = false;
let collapsedGroupIndexes = [];
let domRows = {};
$: sortedRowsTmp = sortedByField ? _.sortBy(rows || [], sortedByField) : rows; $: rowsSource = groupedRows ? groupedRows : [{ group: null, rows }];
$: sortedRows = sortOrderIsDesc ? [...sortedRowsTmp].reverse() : sortedRowsTmp;
$: filteredRows = filters ? filterRows(rowsSource, $filters) : rowsSource;
$: sortedRows = sortedByField
? filteredRows.map(gitem => {
let res = _.sortBy(gitem.rows, sortedByField);
if (sortOrderIsDesc) res = [...res].reverse();
return { group: gitem.group, rows: res };
})
: filteredRows;
// $: console.log('sortedRows', sortedRows);
$: flatRowsShown = sortedRows.map(gitem => gitem.rows).flat();
$: checkableFlatRowsShown = flatRowsShown.filter(x => itemSupportsCheckbox(x));
// $: groupedRows = computeGroupedRows(sortedRows);
</script> </script>
<table <table
@@ -73,7 +188,17 @@
<thead class:stickyHeader> <thead class:stickyHeader>
<tr> <tr>
{#if checkedKeys} {#if checkedKeys}
<th></th> <th>
<input
type="checkbox"
checked={checkableFlatRowsShown.every(r => checkedKeys.includes(extractTableItemKey(r)))}
data-testid="TableControl_selectAllCheckBox"
on:change={e => {
if (e.target['checked']) onSetCheckedKeys(checkableFlatRowsShown.map(r => extractTableItemKey(r)));
else onSetCheckedKeys([]);
}}
/>
</th>
{/if} {/if}
{#each columnList as col} {#each columnList as col}
<th <th
@@ -101,32 +226,82 @@
</th> </th>
{/each} {/each}
</tr> </tr>
{#if filters}
<tr>
{#if checkedKeys}
<td class="empty-cell"></td>
{/if}
{#each columnList as col}
<td class="filter-cell" class:empty-cell={!col.filterable}>
{#if col.filterable}
<DataFilterControl
filterBehaviour={evalFilterBehaviour}
filter={$filters[col.fieldName]}
setFilter={value => filters.update(f => ({ ...f, [col.fieldName]: value }))}
placeholder="Data filter"
/>
{/if}
</td>
{/each}
</tr>
{/if}
</thead> </thead>
<tbody> <tbody>
{#each sortedRows as row, index} {#each sortedRows as gitem, groupIndex}
{#if gitem.group}
<tr class="group-row">
<td
colspan={columnList.length + (checkedKeys ? 1 : 0)}
class="groupcell"
on:click={() => {
if (collapsedGroupIndexes.includes(groupIndex)) {
collapsedGroupIndexes = collapsedGroupIndexes.filter(x => x != groupIndex);
} else {
collapsedGroupIndexes = [...collapsedGroupIndexes, groupIndex];
}
}}
>
<FontIcon icon={chevronExpandIcon(!collapsedGroupIndexes.includes(groupIndex))} padRight />
<strong>{gitem.group} ({gitem.rows.length})</strong>
</td>
</tr>
{/if}
{#if !collapsedGroupIndexes.includes(groupIndex)}
{#each gitem.rows as row}
{@const index = _.indexOf(flatRowsShown, row)}
<tr <tr
class:selected={selectable && selectedIndex == index} class:selected={selectable &&
(selectionMode == 'index' ? selectedIndex == index : selectedKey == extractTableItemKey(row))}
class:clickable class:clickable
bind:this={domRows[index]}
on:click={() => { on:click={() => {
if (selectable) { if (selectable) {
if (selectionMode == 'index') {
selectedIndex = index; selectedIndex = index;
} else {
selectedKey = extractTableItemKey(row);
}
domTable.focus(); domTable.focus();
} }
if (clickable) { if (clickable) {
dispatch('clickrow', row); dispatch('clickrow', row);
} }
}} }}
data-testid={`TableControl_row_${index}`}
> >
{#if checkedKeys} {#if checkedKeys}
<td> <td>
{#if itemSupportsCheckbox(row)}
<input <input
type="checkbox" type="checkbox"
checked={checkedKeys.includes(extractCheckedKey(row))} checked={checkedKeys.includes(extractTableItemKey(row))}
on:change={e => { on:change={e => {
if (e.target['checked']) onSetCheckedKeys(_.uniq([...checkedKeys, extractCheckedKey(row)])); if (e.target['checked']) onSetCheckedKeys(_.uniq([...checkedKeys, extractTableItemKey(row)]));
else onSetCheckedKeys(checkedKeys.filter(x => x != extractCheckedKey(row))); else onSetCheckedKeys(checkedKeys.filter(x => x != extractTableItemKey(row)));
}} }}
data-testid={`TableControl_row_${index}_checkbox`}
/> />
{/if}
</td> </td>
{/if} {/if}
{#each columnList as col} {#each columnList as col}
@@ -137,6 +312,7 @@
{:else if col.formatter} {:else if col.formatter}
{col.formatter(row)} {col.formatter(row)}
{:else if col.slot != null} {:else if col.slot != null}
{#key row[col.slotKey] || 'key'}
{#if col.slot == -1}<slot name="-1" {row} {col} {index} /> {#if col.slot == -1}<slot name="-1" {row} {col} {index} />
{:else if col.slot == 0}<slot name="0" {row} {col} {index} {...rowProps} /> {:else if col.slot == 0}<slot name="0" {row} {col} {index} {...rowProps} />
{:else if col.slot == 1}<slot name="1" {row} {col} {index} {...rowProps} /> {:else if col.slot == 1}<slot name="1" {row} {col} {index} {...rowProps} />
@@ -149,6 +325,7 @@
{:else if col.slot == 8}<slot name="8" {row} {col} {index} {...rowProps} /> {:else if col.slot == 8}<slot name="8" {row} {col} {index} {...rowProps} />
{:else if col.slot == 9}<slot name="9" {row} {col} {index} {...rowProps} /> {:else if col.slot == 9}<slot name="9" {row} {col} {index} {...rowProps} />
{/if} {/if}
{/key}
{:else} {:else}
{row[col.fieldName] || ''} {row[col.fieldName] || ''}
{/if} {/if}
@@ -156,6 +333,8 @@
{/each} {/each}
</tr> </tr>
{/each} {/each}
{/if}
{/each}
{#if emptyMessage && sortedRows.length == 0} {#if emptyMessage && sortedRows.length == 0}
<tr> <tr>
<td colspan={columnList.length}>{emptyMessage}</td> <td colspan={columnList.length}>{emptyMessage}</td>
@@ -179,6 +358,9 @@
background: var(--theme-bg-0); background: var(--theme-bg-0);
} }
tbody tr.selected { tbody tr.selected {
background: var(--theme-bg-3);
}
table:focus tbody tr.selected {
background: var(--theme-bg-selected); background: var(--theme-bg-selected);
} }
tbody tr.clickable:hover { tbody tr.clickable:hover {
@@ -232,4 +414,20 @@
border-collapse: separate; border-collapse: separate;
border-left: 1px solid var(--theme-border); border-left: 1px solid var(--theme-border);
} }
.filter-cell {
text-align: left;
overflow: hidden;
margin: 0;
padding: 0;
}
.empty-cell {
background-color: var(--theme-bg-1);
}
.groupcell {
background-color: var(--theme-bg-1);
cursor: pointer;
}
</style> </style>

View File

@@ -10,11 +10,14 @@
export let folderName; export let folderName;
export let name; export let name;
export let filterExtension = null;
const { setFieldValue, values } = getFormContext(); const { setFieldValue, values } = getFormContext();
$: files = useArchiveFiles({ folder: folderName }); $: files = useArchiveFiles({ folder: folderName });
$: filesOptions = ($files || []).map(x => ({ $: filesOptions = ($files || [])
.filter(x => (filterExtension ? x.name.endsWith('.' + filterExtension) : true))
.map(x => ({
value: x.name, value: x.name,
label: x.name, label: x.name,
})); }));

View File

@@ -11,13 +11,19 @@
export let additionalFolders = []; export let additionalFolders = [];
export let name; export let name;
export let allowCreateNew = false;
export let zipFilesOnly = false;
export let skipZipFiles = false;
const { setFieldValue } = getFormContext(); const { setFieldValue } = getFormContext();
const folders = useArchiveFolders(); const folders = useArchiveFolders();
$: folderOptions = [ $: folderOptions = [
...($folders || []).map(folder => ({ ...($folders || [])
.filter(folder => (zipFilesOnly ? folder.name.endsWith('.zip') : true))
.filter(folder => (skipZipFiles ? !folder.name.endsWith('.zip') : true))
.map(folder => ({
value: folder.name, value: folder.name,
label: folder.name, label: folder.name,
})), })),
@@ -28,7 +34,7 @@
value: folder, value: folder,
label: folder, label: folder,
})), })),
{ allowCreateNew && {
label: '(Create new)', label: '(Create new)',
value: '@create', value: '@create',
}, },
@@ -43,7 +49,7 @@
if (e.detail == '@create') { if (e.detail == '@create') {
showModal(InputTextModal, { showModal(InputTextModal, {
header: 'Archive', header: 'Archive',
label: 'Name of new folder', label: 'Name of new archive folder',
onConfirm: createOption, onConfirm: createOption,
}); });
} }

View File

@@ -22,7 +22,7 @@
? { disabled: true } ? { disabled: true }
: { : {
onClick: () => { onClick: () => {
setFieldValue(name, !$values[name]); setFieldValue(name, $values?.[name] == 0 ? true : $values?.[name] == 1 ? false : !$values?.[name]);
dispatch('change'); dispatch('change');
}, },
}} }}

View File

@@ -11,4 +11,9 @@
} }
</script> </script>
<CheckboxField {...$$restProps} checked={$values[name] ?? defaultValue} on:change={handleChange} on:change /> <CheckboxField
{...$$restProps}
checked={$values?.[name] == 0 ? false : $values?.[name] == '1' ? true : ($values?.[name] ?? defaultValue)}
on:change={handleChange}
on:change
/>

View File

@@ -110,6 +110,7 @@
'icon history': 'mdi mdi-history', 'icon history': 'mdi mdi-history',
'icon structure': 'mdi mdi-tools', 'icon structure': 'mdi mdi-tools',
'icon square': 'mdi mdi-square', 'icon square': 'mdi mdi-square',
'icon data-deploy': 'mdi mdi-database-settings',
'icon edit': 'mdi mdi-pencil', 'icon edit': 'mdi mdi-pencil',
'icon delete': 'mdi mdi-delete', 'icon delete': 'mdi mdi-delete',
@@ -206,6 +207,8 @@
'icon type-objectid': 'mdi mdi-alpha-i-box', 'icon type-objectid': 'mdi mdi-alpha-i-box',
'icon type-null': 'mdi mdi-code-equal', 'icon type-null': 'mdi mdi-code-equal',
'icon type-unknown': 'mdi mdi-help-box', 'icon type-unknown': 'mdi mdi-help-box',
'icon equal': 'mdi mdi-equal',
'icon not-equal': 'mdi mdi-not-equal-variant',
'icon at': 'mdi mdi-at', 'icon at': 'mdi mdi-at',
'icon expand-all': 'mdi mdi-expand-all', 'icon expand-all': 'mdi mdi-expand-all',
@@ -218,6 +221,7 @@
'icon autocommit-off': 'mdi mdi-check-circle-outline', 'icon autocommit-off': 'mdi mdi-check-circle-outline',
'icon premium': 'mdi mdi-star', 'icon premium': 'mdi mdi-star',
'icon upload': 'mdi mdi-upload',
'img ok': 'mdi mdi-check-circle color-icon-green', 'img ok': 'mdi mdi-check-circle color-icon-green',
'img ok-inv': 'mdi mdi-check-circle color-icon-inv-green', 'img ok-inv': 'mdi mdi-check-circle color-icon-inv-green',
@@ -232,12 +236,14 @@
'img archive': 'mdi mdi-table color-icon-gold', 'img archive': 'mdi mdi-table color-icon-gold',
'img archive-folder': 'mdi mdi-database-outline color-icon-green', 'img archive-folder': 'mdi mdi-database-outline color-icon-green',
'img zipfile': 'mdi mdi-zip-box color-icon-gold',
'img autoincrement': 'mdi mdi-numeric-1-box-multiple-outline', 'img autoincrement': 'mdi mdi-numeric-1-box-multiple-outline',
'img column': 'mdi mdi-table-column', 'img column': 'mdi mdi-table-column',
'img server': 'mdi mdi-server color-icon-blue', 'img server': 'mdi mdi-server color-icon-blue',
'img primary-key': 'mdi mdi-key-star color-icon-yellow', 'img primary-key': 'mdi mdi-key-star color-icon-yellow',
'img foreign-key': 'mdi mdi-key-link', 'img foreign-key': 'mdi mdi-key-link',
'img sql-file': 'mdi mdi-file', 'img sql-file': 'mdi mdi-file',
'img anyfile': 'mdi mdi-file-question color-icon-red',
'img shell': 'mdi mdi-flash color-icon-blue', 'img shell': 'mdi mdi-flash color-icon-blue',
'img chart': 'mdi mdi-chart-bar color-icon-magenta', 'img chart': 'mdi mdi-chart-bar color-icon-magenta',
'img markdown': 'mdi mdi-application color-icon-red', 'img markdown': 'mdi mdi-application color-icon-red',
@@ -301,7 +307,7 @@
'img type-rejson': 'mdi mdi-color-json color-icon-blue', 'img type-rejson': 'mdi mdi-color-json color-icon-blue',
'img keydb': 'mdi mdi-key color-icon-blue', 'img keydb': 'mdi mdi-key color-icon-blue',
'img duplicator': 'mdi mdi-content-duplicate color-icon-green', 'img replicator': 'mdi mdi-content-duplicate color-icon-green',
'img import': 'mdi mdi-database-import color-icon-green', 'img import': 'mdi mdi-database-import color-icon-green',
'img export': 'mdi mdi-database-export color-icon-green', 'img export': 'mdi mdi-database-export color-icon-green',
'img transform': 'mdi mdi-rotate-orbit color-icon-blue', 'img transform': 'mdi mdi-rotate-orbit color-icon-blue',
@@ -311,6 +317,8 @@
'img db-backup': 'mdi mdi-database-export color-icon-yellow', 'img db-backup': 'mdi mdi-database-export color-icon-yellow',
'img db-restore': 'mdi mdi-database-import color-icon-red', 'img db-restore': 'mdi mdi-database-import color-icon-red',
'img settings': 'mdi mdi-cog color-icon-blue',
'img data-deploy': 'mdi mdi-database-settings color-icon-green',
}; };
</script> </script>

View File

@@ -5,7 +5,7 @@
import { getConnectionLabel } from 'dbgate-tools'; import { getConnectionLabel } from 'dbgate-tools';
export let allowChooseModel = false; export let allowChooseModel = false;
export let direction; export let direction = 'source';
$: connections = useConnectionList(); $: connections = useConnectionList();
$: connectionOptions = [ $: connectionOptions = [

View File

@@ -23,6 +23,11 @@
import { findEngineDriver } from 'dbgate-tools'; import { findEngineDriver } from 'dbgate-tools';
import AceEditor from '../query/AceEditor.svelte'; import AceEditor from '../query/AceEditor.svelte';
import { _t } from '../translations'; import { _t } from '../translations';
import { showModal } from '../modals/modalTools';
import InputTextModal from '../modals/InputTextModal.svelte';
import FormCheckboxField from '../forms/FormCheckboxField.svelte';
import { isProApp } from '../utility/proTools';
import FormTextField from '../forms/FormTextField.svelte';
export let direction; export let direction;
export let storageTypeField; export let storageTypeField;
@@ -54,7 +59,7 @@
{ value: 'query', label: _t('common.query', { defaultMessage: 'Query' }), directions: ['source'] }, { value: 'query', label: _t('common.query', { defaultMessage: 'Query' }), directions: ['source'] },
{ {
value: 'archive', value: 'archive',
label: _t('common.archive', { defaultMessage: 'Archive' }), label: _t('common.archive', { defaultMessage: 'Archive (JSONL)' }),
directions: ['source', 'target'], directions: ['source', 'target'],
}, },
]; ];
@@ -108,11 +113,18 @@
<FormStyledButton <FormStyledButton
value="New archive" value="New archive"
on:click={() => { on:click={() => {
showModal(InputTextModal, {
header: 'Archive',
label: 'Name of new archive folder',
value: `import-${moment().format('YYYY-MM-DD-hh-mm-ss')}`,
onConfirm: value => {
values.update(x => ({ values.update(x => ({
...x, ...x,
[storageTypeField]: 'archive', [storageTypeField]: 'archive',
[archiveFolderField]: `import-${moment().format('YYYY-MM-DD-hh-mm-ss')}`, [archiveFolderField]: value,
})); }));
},
});
}} }}
/> />
{/if} {/if}
@@ -124,6 +136,41 @@
label="Storage type" label="Storage type"
/> />
{#if format && isProApp()}
{#if direction == 'source'}
<FormCheckboxField
name={`importFromZipFile`}
label={_t('importExport.importFromZipFile', { defaultMessage: 'Import from ZIP file (in archive folder)' })}
/>
{#if $values.importFromZipFile}
<FormArchiveFolderSelect
label={_t('importExport.importFromZipArchive', { defaultMessage: 'Input ZIP archive' })}
name={archiveFolderField}
additionalFolders={_.compact([$values[archiveFolderField]])}
zipFilesOnly
/>
{/if}
{/if}
{#if direction == 'target'}
<FormCheckboxField
name={`exportToZipFile`}
label={_t('importExport.exportToZipFile', { defaultMessage: 'Export to ZIP file' })}
/>
{#if $values.exportToZipFile}
<FormCheckboxField
name={`createZipFileInArchive`}
label={_t('importExport.createZipFileInArchive', { defaultMessage: 'Create ZIP file in archive' })}
/>
<FormTextField
label={_t('importExport.exportToZipArchive', { defaultMessage: 'Output ZIP archive' })}
name={archiveFolderField}
placeholder={'zip-archive-yyyy-mm-dd-hh-mm-ss.zip'}
/>
{/if}
{/if}
{/if}
{#if storageType == 'database' || storageType == 'query'} {#if storageType == 'database' || storageType == 'query'}
<FormConnectionSelect name={connectionIdField} label="Server" {direction} /> <FormConnectionSelect name={connectionIdField} label="Server" {direction} />
<FormDatabaseSelect conidName={connectionIdField} name={databaseNameField} label="Database" /> <FormDatabaseSelect conidName={connectionIdField} name={databaseNameField} label="Database" />
@@ -164,18 +211,20 @@
label="Archive folder" label="Archive folder"
name={archiveFolderField} name={archiveFolderField}
additionalFolders={_.compact([$values[archiveFolderField]])} additionalFolders={_.compact([$values[archiveFolderField]])}
allowCreateNew={direction == 'target'}
/> />
{/if} {/if}
{#if storageType == 'archive' && direction == 'source'} {#if direction == 'source' && (storageType == 'archive' || $values.importFromZipFile)}
<FormArchiveFilesSelect <FormArchiveFilesSelect
label={_t('importExport.sourceFiles', { defaultMessage: 'Source files' })} label={_t('importExport.sourceFiles', { defaultMessage: 'Source files' })}
folderName={$values[archiveFolderField]} folderName={$values[archiveFolderField]}
name={tablesField} name={tablesField}
filterExtension={format?.extension}
/> />
{/if} {/if}
{#if format && direction == 'source'} {#if format && direction == 'source' && !$values.importFromZipFile}
<FilesInput {setPreviewSource} /> <FilesInput {setPreviewSource} />
{/if} {/if}

View File

@@ -1,4 +1,5 @@
import _ from 'lodash'; import _ from 'lodash';
import moment from 'moment';
import { ScriptWriter, ScriptWriterJson } from 'dbgate-tools'; import { ScriptWriter, ScriptWriterJson } from 'dbgate-tools';
import getAsArray from '../utility/getAsArray'; import getAsArray from '../utility/getAsArray';
import { getConnectionInfo } from '../utility/metadataLoaders'; import { getConnectionInfo } from '../utility/metadataLoaders';
@@ -93,7 +94,13 @@ function getSourceExpr(extensions, sourceName, values, sourceConnection, sourceD
return [ return [
format.readerFunc, format.readerFunc,
{ {
..._.omit(sourceFile, ['isDownload']), ...(sourceFile
? _.omit(sourceFile, ['isDownload'])
: {
fileName: values.importFromZipFile
? `zip://archive:${values.sourceArchiveFolder}//${sourceName}`
: sourceName,
}),
...extractFormatApiParameters(values, 'source', format), ...extractFormatApiParameters(values, 'source', format),
}, },
]; ];
@@ -237,6 +244,13 @@ export default async function createImpExpScript(extensions, values, forceScript
script.copyStream(sourceVar, targetVar, colmapVar, sourceName); script.copyStream(sourceVar, targetVar, colmapVar, sourceName);
script.endLine(); script.endLine();
} }
if (values.exportToZipFile) {
let zipFileName = values.exportToZipFileName || `zip-archive-${moment().format('YYYY-MM-DD-HH-mm-ss')}.zip`;
if (!zipFileName.endsWith('.zip')) zipFileName += '.zip';
script.zipDirectory('.', values.createZipFileInArchive ? 'archive:' + zipFileName : zipFileName);
}
return script.getScript(values.schedule); return script.getScript(values.schedule);
} }

View File

@@ -17,7 +17,7 @@
<div>{message}</div> <div>{message}</div>
<FormArchiveFolderSelect label="Archive folder" name="archiveFolder" isNative /> <FormArchiveFolderSelect label="Archive folder" name="archiveFolder" isNative allowCreateNew />
<svelte:fragment slot="footer"> <svelte:fragment slot="footer">
<FormSubmit <FormSubmit

View File

@@ -15,9 +15,7 @@
<FormProvider> <FormProvider>
<ModalBase {...$$restProps}> <ModalBase {...$$restProps}>
<svelte:fragment slot="header"> <svelte:fragment slot="header">
{#if header} {header || 'Confirm'}
{header}
{/if}
</svelte:fragment> </svelte:fragment>
{message} {message}

View File

@@ -0,0 +1,366 @@
<script lang="ts">
import { onMount } from 'svelte';
import FormStyledButton from '../buttons/FormStyledButton.svelte';
import FormProvider from '../forms/FormProvider.svelte';
import ModalBase from './ModalBase.svelte';
import { closeCurrentModal } from './modalTools';
import { _t } from '../translations';
import { apiCall } from '../utility/api';
import TabControl from '../elements/TabControl.svelte';
import TableControl from '../elements/TableControl.svelte';
import { writable } from 'svelte/store';
import LargeButton from '../buttons/LargeButton.svelte';
import { downloadFromApi } from '../utility/exportFileTools';
import getElectron from '../utility/getElectron';
import { showSnackbarSuccess } from '../utility/snackbar';
import { format } from 'date-fns';
import Link from '../elements/Link.svelte';
import _ from 'lodash';
export let mode: 'export' | 'import';
export let uploadedFilePath = undefined;
let fullData: any = {};
async function loadExportedData() {
fullData = await apiCall('config/export-connections-and-settings');
initFromFullData();
}
async function loadImportedData() {
fullData = await apiCall('files/get-jsons-from-zip', { filePath: uploadedFilePath });
initFromFullData();
}
function initFromFullData() {
connections = fullData.connections || [];
users = fullData.users || [];
roles = fullData.roles || [];
authMethods = fullData.auth_methods || [];
config = fullData.config || [];
handleCheckAll(true);
}
function handleCheckAll(checked) {
if (checked) {
checkedConnections = connections.map(x => x.id);
checkedUsers = users.map(x => x.id);
checkedRoles = roles.map(x => x.id);
checkedAuthMethods = authMethods.map(x => x.id);
checkedConfig = config.map(x => x.id);
} else {
checkedConnections = [];
checkedUsers = [];
checkedRoles = [];
checkedAuthMethods = [];
checkedConfig = [];
}
}
onMount(() => {
if (mode == 'export') {
loadExportedData();
}
if (mode == 'import') {
loadImportedData();
}
});
function getLimitedData() {
const limitedData: any = {
connections: fullData.connections?.filter(x => checkedConnections.includes(x.id)),
users: fullData.users?.filter(x => checkedUsers.includes(x.id)),
user_connections: fullData.user_connections?.filter(
x => checkedUsers.includes(x.user_id) && checkedConnections.includes(x.connection_id)
),
user_roles: fullData.user_roles?.filter(x => checkedUsers.includes(x.user_id) && checkedRoles.includes(x.role_id)),
user_permissions: fullData.user_permissions?.filter(x => checkedUsers.includes(x.user_id)),
roles: fullData.roles?.filter(x => checkedRoles.includes(x.id)),
role_connections: fullData.role_connections?.filter(
x => checkedRoles.includes(x.role_id) && checkedConnections.includes(x.connection_id)
),
role_permissions: fullData.role_permissions?.filter(x => checkedRoles.includes(x.role_id)),
auth_methods: fullData.auth_methods?.filter(x => checkedAuthMethods.includes(x.id)),
auth_methods_config: fullData.auth_methods_config?.filter(x => checkedAuthMethods.includes(x.auth_method_id)),
config: fullData.config?.filter(
x => checkedConfig.includes(x.id) || (x.group == 'admin' && x.key == 'encryptionKey')
),
};
return limitedData;
}
async function handleExport() {
const electron = getElectron();
let filePath;
let fileName;
if (electron) {
const electron = getElectron();
filePath = await electron.showSaveDialog({
filters: [
{ name: `ZIP files`, extensions: ['zip'] },
{ name: `All files`, extensions: ['*'] },
],
defaultPath: `dbgateconfig.zip`,
properties: ['showOverwriteConfirmation'],
});
} else {
const resp = await apiCall('files/generate-uploads-file', { extension: 'sql' });
filePath = resp.filePath;
fileName = resp.fileName;
}
if (!filePath) {
return;
}
await apiCall('files/create-zip-from-jsons', { db: getLimitedData(), filePath });
if (electron) {
showSnackbarSuccess(`Saved to file ${filePath}`);
} else {
await downloadFromApi(`uploads/get?file=${fileName}`, `dbgateconfig.zip`);
}
}
async function handleSaveToArchive() {
const filePath = `archive:dbgateconfig-${format(new Date(), 'yyyy-MM-dd-HH-mm-ss')}.zip`;
await apiCall('files/create-zip-from-jsons', { db: getLimitedData(), filePath });
showSnackbarSuccess(`Saved to ${filePath}`);
}
async function handleImport() {
await apiCall('config/import-connections-and-settings', { db: getLimitedData() });
showSnackbarSuccess(`Imported connections and settings`);
}
let connections = [];
let checkedConnections = [];
let users = [];
let checkedUsers = [];
let roles = [];
let checkedRoles = [];
let authMethods = [];
let checkedAuthMethods = [];
let config = [];
let checkedConfig = [];
const connectionFilters = writable({});
const userFilters = writable({});
const roleFilters = writable({});
const authMethodFilters = writable({});
const configFilters = writable({});
</script>
<FormProvider>
<ModalBase {...$$restProps} fullScreen>
<div slot="header">
{mode == 'export' ? 'Export' : 'Import'} connections &amp; settings
<span class="check-uncheck">
<Link onClick={() => handleCheckAll(true)}>Check all</Link>
|
<Link onClick={() => handleCheckAll(false)}>Uncheck all</Link>
</span>
</div>
<div class="tabs">
<TabControl
tabs={_.compact([
connections?.length && {
label: `Connections (${checkedConnections?.length}/${connections?.length})`,
slot: 1,
},
users?.length && { label: `Users (${checkedUsers?.length}/${users?.length})`, slot: 2 },
roles?.length && { label: `Roles (${checkedRoles?.length}/${roles?.length})`, slot: 3 },
authMethods?.length && {
label: `Auth methods (${checkedAuthMethods?.length}/${authMethods?.length})`,
slot: 4,
},
config?.length && { label: `Config (${checkedConfig?.length}/${config?.length})`, slot: 5 },
])}
>
<svelte:fragment slot="1">
<div class="tablewrap">
<TableControl
filters={connectionFilters}
stickyHeader
columns={[
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
{ header: 'Display name', fieldName: 'displayName', sortable: true, filterable: true },
{ header: 'Engine', fieldName: 'engine', sortable: true, filterable: true },
{ header: 'Server', fieldName: 'server', sortable: true, filterable: true },
{ header: 'User', fieldName: 'user', sortable: true, filterable: true },
]}
clickable
rows={connections}
on:clickrow={event => {
checkedConnections = checkedConnections.includes(event.detail.id)
? checkedConnections.filter(id => id !== event.detail.id)
: [...checkedConnections, event.detail.id];
}}
checkedKeys={checkedConnections}
onSetCheckedKeys={keys => {
checkedConnections = keys;
}}
></TableControl>
</div>
</svelte:fragment>
<svelte:fragment slot="2">
<div class="tablewrap">
<TableControl
filters={userFilters}
stickyHeader
columns={[
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
{ header: 'Login', fieldName: 'login', sortable: true, filterable: true },
{ header: 'E-mail', fieldName: 'email', sortable: true, filterable: true },
]}
clickable
rows={users}
on:clickrow={event => {
checkedUsers = checkedUsers.includes(event.detail.id)
? checkedUsers.filter(id => id !== event.detail.id)
: [...checkedUsers, event.detail.id];
}}
checkedKeys={checkedUsers}
onSetCheckedKeys={keys => {
checkedUsers = keys;
}}
></TableControl>
</div>
</svelte:fragment>
<svelte:fragment slot="3">
<div class="tablewrap">
<TableControl
filters={roleFilters}
stickyHeader
columns={[
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
{ header: 'Name', fieldName: 'name', sortable: true, filterable: true },
]}
clickable
rows={roles}
on:clickrow={event => {
checkedRoles = checkedRoles.includes(event.detail.id)
? checkedRoles.filter(id => id !== event.detail.id)
: [...checkedRoles, event.detail.id];
}}
checkedKeys={checkedRoles}
onSetCheckedKeys={keys => {
checkedRoles = keys;
}}
></TableControl>
</div>
</svelte:fragment>
<svelte:fragment slot="4">
<div class="tablewrap">
<TableControl
filters={authMethodFilters}
stickyHeader
columns={[
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
{ header: 'Name', fieldName: 'name', sortable: true, filterable: true },
{ header: 'Type', fieldName: 'type', sortable: true, filterable: true },
]}
clickable
rows={authMethods}
on:clickrow={event => {
checkedAuthMethods = checkedAuthMethods.includes(event.detail.id)
? checkedAuthMethods.filter(id => id !== event.detail.id)
: [...checkedAuthMethods, event.detail.id];
}}
checkedKeys={checkedAuthMethods}
onSetCheckedKeys={keys => {
checkedAuthMethods = keys;
}}
></TableControl>
</div>
</svelte:fragment>
<svelte:fragment slot="5">
<div class="tablewrap">
<TableControl
filters={configFilters}
stickyHeader
columns={[
{ header: 'ID', fieldName: 'id', sortable: true, filterable: true },
{ header: 'Group', fieldName: 'group', sortable: true, filterable: true },
{ header: 'Key', fieldName: 'key', sortable: true, filterable: true },
{ header: 'Value', fieldName: 'value', sortable: true, filterable: true },
]}
clickable
rows={config}
on:clickrow={event => {
checkedConfig = checkedConfig.includes(event.detail.id)
? checkedConfig.filter(id => id !== event.detail.id)
: [...checkedConfig, event.detail.id];
}}
checkedKeys={checkedConfig}
onSetCheckedKeys={keys => {
checkedConfig = keys;
}}
></TableControl>
</div>
</svelte:fragment>
</TabControl>
</div>
<div slot="footer">
<div class="flex m-2">
{#if mode == 'export'}
<LargeButton
data-testid="ExportImportConnectionsModal_exportButton"
icon="icon export"
on:click={handleExport}>{_t('common.export', { defaultMessage: 'Export' })}</LargeButton
>
<LargeButton
data-testid="ExportImportConnectionsModal_saveToArchive"
icon="icon archive"
on:click={handleSaveToArchive}
>{_t('common.saveToArchive', { defaultMessage: 'Save to archive' })}</LargeButton
>
{/if}
{#if mode == 'import'}
<LargeButton
data-testid="ExportImportConnectionsModal_importButton"
icon="icon import"
on:click={handleImport}>{_t('common.import', { defaultMessage: 'Import' })}</LargeButton
>
{/if}
<LargeButton icon="icon close" on:click={closeCurrentModal} data-testid="EditJsonModal_closeButton"
>Close</LargeButton
>
</div>
</div>
</ModalBase>
</FormProvider>
<style>
.tablewrap {
overflow: auto;
width: 100%;
height: calc(100vh - 220px);
margin: 1rem;
}
.tabs {
flex: 1;
}
.check-uncheck {
margin-left: 1rem;
font-size: 0.8rem;
}
</style>

Some files were not shown because too many files have changed in this diff Show More