Merge branch 'feature/separate-schemas-2'

This commit is contained in:
SPRINX0\prochazka
2024-09-20 14:54:07 +02:00
94 changed files with 816 additions and 569 deletions

View File

@@ -1,7 +1,7 @@
const stableStringify = require('json-stable-stringify'); const stableStringify = require('json-stable-stringify');
const _ = require('lodash'); const _ = require('lodash');
const fp = require('lodash/fp'); const fp = require('lodash/fp');
const { testWrapper } = require('../tools'); const { testWrapper, extractConnection } = require('../tools');
const engines = require('../engines'); const engines = require('../engines');
const { runCommandOnDriver } = require('dbgate-tools'); const { runCommandOnDriver } = require('dbgate-tools');
@@ -23,17 +23,17 @@ describe('Schema tests', () => {
testWrapper(async (conn, driver, engine) => { testWrapper(async (conn, driver, engine) => {
await baseStructure(conn, driver); await baseStructure(conn, driver);
const structure1 = await driver.analyseFull(conn); const structure1 = await driver.analyseFull(conn);
expect(structure1.schemas.find(x => x.schemaName == 'myschema')).toBeFalsy(); const schemas1 = await driver.listSchemas(conn);
const count = structure1.schemas.length; expect(schemas1.find(x => x.schemaName == 'myschema')).toBeFalsy();
const count = schemas1.length;
expect(structure1.tables.length).toEqual(2); expect(structure1.tables.length).toEqual(2);
await runCommandOnDriver(conn, driver, dmp => dmp.createSchema('myschema')); await runCommandOnDriver(conn, driver, dmp => dmp.createSchema('myschema'));
const structure2 = await driver.analyseIncremental(conn, structure1); const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2.schemas.find(x => x.schemaName == 'myschema')).toBeTruthy(); const schemas2 = await driver.listSchemas(conn);
expect(structure2.tables.length).toEqual(2); expect(schemas2.find(x => x.schemaName == 'myschema')).toBeTruthy();
expect(structure2.schemas.length).toEqual(count + 1); expect(schemas2.length).toEqual(count + 1);
expect(schemas2.find(x => x.isDefault).schemaName).toEqual(engine.defaultSchemaName);
const structure3 = await driver.analyseIncremental(conn, structure2); expect(structure2).toBeNull();
expect(structure3).toBeNull();
}) })
); );
@@ -44,29 +44,33 @@ describe('Schema tests', () => {
await runCommandOnDriver(conn, driver, dmp => dmp.createSchema('myschema')); await runCommandOnDriver(conn, driver, dmp => dmp.createSchema('myschema'));
const structure1 = await driver.analyseFull(conn); const structure1 = await driver.analyseFull(conn);
expect(structure1.schemas.find(x => x.schemaName == 'myschema')).toBeTruthy(); const schemas1 = await driver.listSchemas(conn);
expect(schemas1.find(x => x.schemaName == 'myschema')).toBeTruthy();
expect(structure1.tables.length).toEqual(2); expect(structure1.tables.length).toEqual(2);
await runCommandOnDriver(conn, driver, dmp => dmp.dropSchema('myschema')); await runCommandOnDriver(conn, driver, dmp => dmp.dropSchema('myschema'));
const structure2 = await driver.analyseIncremental(conn, structure1); const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2.schemas.find(x => x.schemaName == 'myschema')).toBeFalsy(); const schemas2 = await driver.listSchemas(conn);
expect(structure2.tables.length).toEqual(2); expect(schemas2.find(x => x.schemaName == 'myschema')).toBeFalsy();
expect(structure2).toBeNull();
const structure3 = await driver.analyseIncremental(conn, structure2);
expect(structure3).toBeNull();
}) })
); );
test.each(engines.filter(x => x.supportSchemas).map(engine => [engine.label, engine]))( test.each(engines.filter(x => x.supportSchemas && !x.skipSeparateSchemas).map(engine => [engine.label, engine]))(
'Create table - keep schemas - %s', 'Table inside schema - %s',
testWrapper(async (conn, driver, engine) => { testWrapper(async (handle, driver, engine) => {
await baseStructure(conn, driver); await baseStructure(handle, driver);
const structure1 = await driver.analyseFull(conn); await runCommandOnDriver(handle, driver, dmp => dmp.createSchema('myschema'));
const count = structure1.schemas.length;
expect(structure1.tables.length).toEqual(2); const schemaConnDef = {
await driver.query(conn, `create table t3 (id int not null primary key)`); ...extractConnection(engine),
const structure2 = await driver.analyseIncremental(conn, structure1); database: `${handle.database}::myschema`,
expect(structure2.tables.length).toEqual(3); };
expect(structure2.schemas.length).toEqual(count);
const schemaConn = await driver.connect(schemaConnDef);
await driver.query(schemaConn, `create table myschema.myt1 (id int not null primary key)`);
const structure1 = await driver.analyseFull(schemaConn);
expect(structure1.tables.length).toEqual(1);
expect(structure1.tables[0].pureName).toEqual('myt1');
}) })
); );
}); });

View File

@@ -82,6 +82,7 @@ const engines = [
}, },
], ],
supportSchemas: true, supportSchemas: true,
defaultSchemaName: 'public',
}, },
{ {
label: 'SQL Server', label: 'SQL Server',
@@ -107,6 +108,8 @@ const engines = [
}, },
], ],
supportSchemas: true, supportSchemas: true,
defaultSchemaName: 'dbo',
// skipSeparateSchemas: true,
}, },
{ {
label: 'SQLite', label: 'SQLite',
@@ -115,6 +118,7 @@ const engines = [
engine: 'sqlite@dbgate-plugin-sqlite', engine: 'sqlite@dbgate-plugin-sqlite',
}, },
objects: [views], objects: [views],
skipOnCI: false,
}, },
{ {
label: 'CockroachDB', label: 'CockroachDB',
@@ -161,9 +165,9 @@ const filterLocal = [
// filter local testing // filter local testing
'-MySQL', '-MySQL',
'-MariaDB', '-MariaDB',
'PostgreSQL', '-PostgreSQL',
'-SQL Server', '-SQL Server',
'-SQLite', 'SQLite',
'-CockroachDB', '-CockroachDB',
'-ClickHouse', '-ClickHouse',
]; ];

View File

@@ -13,7 +13,7 @@
"wait:ci": "cross-env DEVMODE=1 CITEST=1 node wait.js", "wait:ci": "cross-env DEVMODE=1 CITEST=1 node wait.js",
"test:local": "cross-env DEVMODE=1 LOCALTEST=1 jest", "test:local": "cross-env DEVMODE=1 LOCALTEST=1 jest",
"test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/data-duplicator.spec.js", "test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/data-duplicator.spec.js",
"test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults", "test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults --detectOpenHandles --forceExit",
"run:local": "docker-compose down && docker-compose up -d && yarn wait:local && yarn test:local" "run:local": "docker-compose down && docker-compose up -d && yarn wait:local && yarn test:local"
}, },
"jest": { "jest": {

View File

@@ -1,6 +1,6 @@
{ {
"private": true, "private": true,
"version": "5.4.5-beta.7", "version": "5.4.5-beta.10",
"name": "dbgate-all", "name": "dbgate-all",
"workspaces": [ "workspaces": [
"packages/*", "packages/*",

View File

@@ -76,6 +76,7 @@ function getPortalCollections() {
allowedDatabases: process.env[`ALLOWED_DATABASES_${id}`]?.replace(/\|/g, '\n'), allowedDatabases: process.env[`ALLOWED_DATABASES_${id}`]?.replace(/\|/g, '\n'),
allowedDatabasesRegex: process.env[`ALLOWED_DATABASES_REGEX_${id}`], allowedDatabasesRegex: process.env[`ALLOWED_DATABASES_REGEX_${id}`],
parent: process.env[`PARENT_${id}`] || undefined, parent: process.env[`PARENT_${id}`] || undefined,
useSeparateSchemas: !!process.env[`USE_SEPARATE_SCHEMAS_${id}`],
// SSH tunnel // SSH tunnel
useSshTunnel: process.env[`USE_SSH_${id}`], useSshTunnel: process.env[`USE_SSH_${id}`],

View File

@@ -213,6 +213,17 @@ module.exports = {
return res.result || null; return res.result || null;
}, },
schemaList_meta: true,
async schemaList({ conid, database }, req) {
testConnectionPermission(conid, req);
return this.loadDataCore('schemaList', { conid, database });
},
dispatchDatabaseChangedEvent_meta: true,
dispatchDatabaseChangedEvent({ event, conid, database }) {
socket.emitChanged(event, { conid, database });
},
loadKeys_meta: true, loadKeys_meta: true,
async loadKeys({ conid, database, root, filter }, req) { async loadKeys({ conid, database, root, filter }, req) {
testConnectionPermission(conid, req); testConnectionPermission(conid, req);

View File

@@ -11,7 +11,7 @@ const { dumpSqlSelect } = require('dbgate-sqltree');
const logger = getLogger('dbconnProcess'); const logger = getLogger('dbconnProcess');
let systemConnection; let dbhan;
let storedConnection; let storedConnection;
let afterConnectCallbacks = []; let afterConnectCallbacks = [];
let afterAnalyseCallbacks = []; let afterAnalyseCallbacks = [];
@@ -49,7 +49,7 @@ async function handleFullRefresh() {
loadingModel = true; loadingModel = true;
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
setStatusName('loadStructure'); setStatusName('loadStructure');
analysedStructure = await checkedAsyncCall(driver.analyseFull(systemConnection, serverVersion)); analysedStructure = await checkedAsyncCall(driver.analyseFull(dbhan, serverVersion));
analysedTime = new Date().getTime(); analysedTime = new Date().getTime();
process.send({ msgtype: 'structure', structure: analysedStructure }); process.send({ msgtype: 'structure', structure: analysedStructure });
process.send({ msgtype: 'structureTime', analysedTime }); process.send({ msgtype: 'structureTime', analysedTime });
@@ -64,7 +64,7 @@ async function handleIncrementalRefresh(forceSend) {
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
setStatusName('checkStructure'); setStatusName('checkStructure');
const newStructure = await checkedAsyncCall( const newStructure = await checkedAsyncCall(
driver.analyseIncremental(systemConnection, analysedStructure, serverVersion) driver.analyseIncremental(dbhan, analysedStructure, serverVersion)
); );
analysedTime = new Date().getTime(); analysedTime = new Date().getTime();
if (newStructure != null) { if (newStructure != null) {
@@ -103,7 +103,7 @@ function setStatusName(name) {
async function readVersion() { async function readVersion() {
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
const version = await driver.getVersion(systemConnection); const version = await driver.getVersion(dbhan);
process.send({ msgtype: 'version', version }); process.send({ msgtype: 'version', version });
serverVersion = version; serverVersion = version;
} }
@@ -114,8 +114,8 @@ async function handleConnect({ connection, structure, globalSettings }) {
if (!structure) setStatusName('pending'); if (!structure) setStatusName('pending');
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
systemConnection = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app')); dbhan = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app'));
systemConnection.feedback = feedback => setStatus({ feedback }); dbhan.feedback = feedback => setStatus({ feedback });
await checkedAsyncCall(readVersion()); await checkedAsyncCall(readVersion());
if (structure) { if (structure) {
analysedStructure = structure; analysedStructure = structure;
@@ -138,7 +138,7 @@ async function handleConnect({ connection, structure, globalSettings }) {
} }
function waitConnected() { function waitConnected() {
if (systemConnection) return Promise.resolve(); if (dbhan) return Promise.resolve();
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
afterConnectCallbacks.push([resolve, reject]); afterConnectCallbacks.push([resolve, reject]);
}); });
@@ -163,7 +163,7 @@ async function handleRunScript({ msgid, sql, useTransaction }, skipReadonlyCheck
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
try { try {
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver); if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
await driver.script(systemConnection, sql, { useTransaction }); await driver.script(dbhan, sql, { useTransaction });
process.send({ msgtype: 'response', msgid }); process.send({ msgtype: 'response', msgid });
} catch (err) { } catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message }); process.send({ msgtype: 'response', msgid, errorMessage: err.message });
@@ -175,7 +175,7 @@ async function handleRunOperation({ msgid, operation, useTransaction }, skipRead
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
try { try {
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver); if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
await driver.operation(systemConnection, operation, { useTransaction }); await driver.operation(dbhan, operation, { useTransaction });
process.send({ msgtype: 'response', msgid }); process.send({ msgtype: 'response', msgid });
} catch (err) { } catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message }); process.send({ msgtype: 'response', msgid, errorMessage: err.message });
@@ -188,7 +188,7 @@ async function handleQueryData({ msgid, sql }, skipReadonlyCheck = false) {
try { try {
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver); if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
// console.log(sql); // console.log(sql);
const res = await driver.query(systemConnection, sql); const res = await driver.query(dbhan, sql);
process.send({ msgtype: 'response', msgid, ...res }); process.send({ msgtype: 'response', msgid, ...res });
} catch (err) { } catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message || 'Error executing SQL script' }); process.send({ msgtype: 'response', msgid, errorMessage: err.message || 'Error executing SQL script' });
@@ -213,20 +213,25 @@ async function handleDriverDataCore(msgid, callMethod) {
} }
} }
async function handleSchemaList({ msgid }) {
logger.debug('Loading schema list');
return handleDriverDataCore(msgid, driver => driver.listSchemas(dbhan));
}
async function handleCollectionData({ msgid, options }) { async function handleCollectionData({ msgid, options }) {
return handleDriverDataCore(msgid, driver => driver.readCollection(systemConnection, options)); return handleDriverDataCore(msgid, driver => driver.readCollection(dbhan, options));
} }
async function handleLoadKeys({ msgid, root, filter }) { async function handleLoadKeys({ msgid, root, filter }) {
return handleDriverDataCore(msgid, driver => driver.loadKeys(systemConnection, root, filter)); return handleDriverDataCore(msgid, driver => driver.loadKeys(dbhan, root, filter));
} }
async function handleExportKeys({ msgid, options }) { async function handleExportKeys({ msgid, options }) {
return handleDriverDataCore(msgid, driver => driver.exportKeys(systemConnection, options)); return handleDriverDataCore(msgid, driver => driver.exportKeys(dbhan, options));
} }
async function handleLoadKeyInfo({ msgid, key }) { async function handleLoadKeyInfo({ msgid, key }) {
return handleDriverDataCore(msgid, driver => driver.loadKeyInfo(systemConnection, key)); return handleDriverDataCore(msgid, driver => driver.loadKeyInfo(dbhan, key));
} }
async function handleCallMethod({ msgid, method, args }) { async function handleCallMethod({ msgid, method, args }) {
@@ -236,17 +241,17 @@ async function handleCallMethod({ msgid, method, args }) {
} }
ensureExecuteCustomScript(driver); ensureExecuteCustomScript(driver);
return driver.callMethod(systemConnection, method, args); return driver.callMethod(dbhan, method, args);
}); });
} }
async function handleLoadKeyTableRange({ msgid, key, cursor, count }) { async function handleLoadKeyTableRange({ msgid, key, cursor, count }) {
return handleDriverDataCore(msgid, driver => driver.loadKeyTableRange(systemConnection, key, cursor, count)); return handleDriverDataCore(msgid, driver => driver.loadKeyTableRange(dbhan, key, cursor, count));
} }
async function handleLoadFieldValues({ msgid, schemaName, pureName, field, search }) { async function handleLoadFieldValues({ msgid, schemaName, pureName, field, search }) {
return handleDriverDataCore(msgid, driver => return handleDriverDataCore(msgid, driver =>
driver.loadFieldValues(systemConnection, { schemaName, pureName }, field, search) driver.loadFieldValues(dbhan, { schemaName, pureName }, field, search)
); );
} }
@@ -264,7 +269,7 @@ async function handleUpdateCollection({ msgid, changeSet }) {
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
try { try {
ensureExecuteCustomScript(driver); ensureExecuteCustomScript(driver);
const result = await driver.updateCollection(systemConnection, changeSet); const result = await driver.updateCollection(dbhan, changeSet);
process.send({ msgtype: 'response', msgid, result }); process.send({ msgtype: 'response', msgid, result });
} catch (err) { } catch (err) {
process.send({ msgtype: 'response', msgid, errorMessage: err.message }); process.send({ msgtype: 'response', msgid, errorMessage: err.message });
@@ -277,7 +282,7 @@ async function handleSqlPreview({ msgid, objects, options }) {
try { try {
const dmp = driver.createDumper(); const dmp = driver.createDumper();
const generator = new SqlGenerator(analysedStructure, options, objects, dmp, driver, systemConnection); const generator = new SqlGenerator(analysedStructure, options, objects, dmp, driver, dbhan);
await generator.dump(); await generator.dump();
process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated }); process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated });
@@ -297,7 +302,7 @@ async function handleGenerateDeploySql({ msgid, modelFolder }) {
try { try {
const res = await generateDeploySql({ const res = await generateDeploySql({
systemConnection, systemConnection: dbhan,
connection: storedConnection, connection: storedConnection,
analysedStructure, analysedStructure,
modelFolder, modelFolder,
@@ -337,6 +342,7 @@ const messageHandlers = {
loadFieldValues: handleLoadFieldValues, loadFieldValues: handleLoadFieldValues,
sqlSelect: handleSqlSelect, sqlSelect: handleSqlSelect,
exportKeys: handleExportKeys, exportKeys: handleExportKeys,
schemaList: handleSchemaList,
// runCommand: handleRunCommand, // runCommand: handleRunCommand,
}; };

View File

@@ -1,11 +1,11 @@
import { DatabaseInfo, DatabaseModification, EngineDriver, SqlDialect } from 'dbgate-types'; import { DatabaseHandle, DatabaseInfo, DatabaseModification, EngineDriver, SqlDialect } from 'dbgate-types';
import _sortBy from 'lodash/sortBy'; import _sortBy from 'lodash/sortBy';
import _groupBy from 'lodash/groupBy'; import _groupBy from 'lodash/groupBy';
import _pick from 'lodash/pick'; import _pick from 'lodash/pick';
import _compact from 'lodash/compact'; import _compact from 'lodash/compact';
import { getLogger } from './getLogger'; import { getLogger } from './getLogger';
import { type Logger } from 'pinomin'; import { type Logger } from 'pinomin';
import stableStringify from 'json-stable-stringify'; import { isCompositeDbName, splitCompositeDbName } from './schemaInfoTools';
const logger = getLogger('dbAnalyser'); const logger = getLogger('dbAnalyser');
@@ -41,7 +41,7 @@ export class DatabaseAnalyser {
dialect: SqlDialect; dialect: SqlDialect;
logger: Logger; logger: Logger;
constructor(public pool, public driver: EngineDriver, version) { constructor(public dbhan: DatabaseHandle, public driver: EngineDriver, version) {
this.dialect = (driver?.dialectByVersion && driver?.dialectByVersion(version)) || driver?.dialect; this.dialect = (driver?.dialectByVersion && driver?.dialectByVersion(version)) || driver?.dialect;
this.logger = logger; this.logger = logger;
} }
@@ -71,10 +71,7 @@ export class DatabaseAnalyser {
async fullAnalysis() { async fullAnalysis() {
const res = this.addEngineField(await this._runAnalysis()); const res = this.addEngineField(await this._runAnalysis());
// console.log('FULL ANALYSIS', res); // console.log('FULL ANALYSIS', res);
return { return res;
...res,
schemas: await this.readSchemaList(),
};
} }
async singleObjectAnalysis(name, typeField) { async singleObjectAnalysis(name, typeField) {
@@ -91,10 +88,6 @@ export class DatabaseAnalyser {
return obj; return obj;
} }
async readSchemaList() {
return undefined;
}
async incrementalAnalysis(structure) { async incrementalAnalysis(structure) {
this.structure = structure; this.structure = structure;
@@ -107,35 +100,22 @@ export class DatabaseAnalyser {
const structureModifications = modifications.filter(x => x.action != 'setTableRowCounts'); const structureModifications = modifications.filter(x => x.action != 'setTableRowCounts');
const setTableRowCounts = modifications.find(x => x.action == 'setTableRowCounts'); const setTableRowCounts = modifications.find(x => x.action == 'setTableRowCounts');
let structureUpdated = null; let structureWithRowCounts = null;
if (setTableRowCounts) { if (setTableRowCounts) {
const newStructure = mergeTableRowCounts(structure, setTableRowCounts.rowCounts); const newStructure = mergeTableRowCounts(structure, setTableRowCounts.rowCounts);
if (areDifferentRowCounts(structure, newStructure)) { if (areDifferentRowCounts(structure, newStructure)) {
structureUpdated = newStructure; structureWithRowCounts = newStructure;
} }
} }
const schemas = await this.readSchemaList();
const areSchemasDifferent = stableStringify(schemas) != stableStringify(this.structure.schemas);
if (areSchemasDifferent) {
structureUpdated = {
...structure,
...structureUpdated,
schemas,
};
}
if (structureModifications.length == 0) { if (structureModifications.length == 0) {
return structureUpdated ? this.addEngineField(structureUpdated) : null; return structureWithRowCounts ? this.addEngineField(structureWithRowCounts) : null;
} }
this.modifications = structureModifications; this.modifications = structureModifications;
if (structureUpdated) this.structure = structureUpdated; if (structureWithRowCounts) this.structure = structureWithRowCounts;
logger.info({ modifications: this.modifications }, 'DB modifications detected:'); logger.info({ modifications: this.modifications }, 'DB modifications detected:');
return { return this.addEngineField(this.mergeAnalyseResult(await this._runAnalysis()));
...this.addEngineField(this.mergeAnalyseResult(await this._runAnalysis())),
schemas,
};
} }
mergeAnalyseResult(newlyAnalysed) { mergeAnalyseResult(newlyAnalysed) {
@@ -201,8 +181,19 @@ export class DatabaseAnalyser {
// return this.createQueryCore('=OBJECT_ID_CONDITION', typeFields) != ' is not null'; // return this.createQueryCore('=OBJECT_ID_CONDITION', typeFields) != ' is not null';
// } // }
getDefaultSchemaNameCondition() {
return 'is not null';
}
createQuery(template, typeFields, replacements = {}) { createQuery(template, typeFields, replacements = {}) {
return this.createQueryCore(this.processQueryReplacements(template, replacements), typeFields); let query = this.createQueryCore(this.processQueryReplacements(template, replacements), typeFields);
const dbname = this.dbhan.database;
const schemaCondition = isCompositeDbName(dbname)
? `= '${splitCompositeDbName(dbname).schema}' `
: ` ${this.getDefaultSchemaNameCondition()} `;
return query?.replace(/=SCHEMA_NAME_CONDITION/g, schemaCondition);
} }
processQueryReplacements(query, replacements) { processQueryReplacements(query, replacements) {
@@ -263,8 +254,8 @@ export class DatabaseAnalyser {
} }
feedback(obj) { feedback(obj) {
if (this.pool.feedback) { if (this.dbhan.feedback) {
this.pool.feedback(obj); this.dbhan.feedback(obj);
} }
if (obj && obj.analysingMessage) { if (obj && obj.analysingMessage) {
logger.debug(obj.analysingMessage); logger.debug(obj.analysingMessage);
@@ -339,7 +330,7 @@ export class DatabaseAnalyser {
}; };
} }
try { try {
const res = await this.driver.query(this.pool, sql); const res = await this.driver.query(this.dbhan, sql);
this.logger.debug({ rows: res.rows.length, template }, `Loaded analyser query`); this.logger.debug({ rows: res.rows.length, template }, `Loaded analyser query`);
return res; return res;
} catch (err) { } catch (err) {
@@ -359,7 +350,6 @@ export class DatabaseAnalyser {
functions: [], functions: [],
procedures: [], procedures: [],
triggers: [], triggers: [],
schemas: [],
}; };
} }

View File

@@ -5,7 +5,7 @@ import { prepareTableForImport } from './tableTransforms';
const logger = getLogger('bulkStreamBase'); const logger = getLogger('bulkStreamBase');
export function createBulkInsertStreamBase(driver: EngineDriver, stream, pool, name, options: WriteTableOptions): any { export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan, name, options: WriteTableOptions): any {
const fullNameQuoted = name.schemaName const fullNameQuoted = name.schemaName
? `${driver.dialect.quoteIdentifier(name.schemaName)}.${driver.dialect.quoteIdentifier(name.pureName)}` ? `${driver.dialect.quoteIdentifier(name.schemaName)}.${driver.dialect.quoteIdentifier(name.pureName)}`
: driver.dialect.quoteIdentifier(name.pureName); : driver.dialect.quoteIdentifier(name.pureName);
@@ -29,22 +29,22 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, pool, n
}; };
writable.checkStructure = async () => { writable.checkStructure = async () => {
let structure = await driver.analyseSingleTable(pool, name); let structure = await driver.analyseSingleTable(dbhan, name);
// console.log('ANALYSING', name, structure); // console.log('ANALYSING', name, structure);
if (structure && options.dropIfExists) { if (structure && options.dropIfExists) {
logger.info(`Dropping table ${fullNameQuoted}`); logger.info(`Dropping table ${fullNameQuoted}`);
await driver.script(pool, `DROP TABLE ${fullNameQuoted}`); await driver.script(dbhan, `DROP TABLE ${fullNameQuoted}`);
} }
if (options.createIfNotExists && (!structure || options.dropIfExists)) { if (options.createIfNotExists && (!structure || options.dropIfExists)) {
const dmp = driver.createDumper(); const dmp = driver.createDumper();
const createdTableInfo = driver.adaptTableInfo(prepareTableForImport({ ...writable.structure, ...name })); const createdTableInfo = driver.adaptTableInfo(prepareTableForImport({ ...writable.structure, ...name }));
dmp.createTable(createdTableInfo); dmp.createTable(createdTableInfo);
logger.info({ sql: dmp.s }, `Creating table ${fullNameQuoted}`); logger.info({ sql: dmp.s }, `Creating table ${fullNameQuoted}`);
await driver.script(pool, dmp.s); await driver.script(dbhan, dmp.s);
structure = await driver.analyseSingleTable(pool, name); structure = await driver.analyseSingleTable(dbhan, name);
} }
if (options.truncate) { if (options.truncate) {
await driver.script(pool, `TRUNCATE TABLE ${fullNameQuoted}`); await driver.script(dbhan, `TRUNCATE TABLE ${fullNameQuoted}`);
} }
writable.columnNames = _intersection( writable.columnNames = _intersection(
@@ -74,7 +74,7 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, pool, n
dmp.putRaw(';'); dmp.putRaw(';');
// require('fs').writeFileSync('/home/jena/test.sql', dmp.s); // require('fs').writeFileSync('/home/jena/test.sql', dmp.s);
// console.log(dmp.s); // console.log(dmp.s);
await driver.query(pool, dmp.s, { discardResult: true }); await driver.query(dbhan, dmp.s, { discardResult: true });
} else { } else {
for (const row of rows) { for (const row of rows) {
const dmp = driver.createDumper(); const dmp = driver.createDumper();
@@ -85,13 +85,13 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, pool, n
dmp.putRaw('('); dmp.putRaw('(');
dmp.putCollection(',', writable.columnNames, col => dmp.putValue(row[col as string])); dmp.putCollection(',', writable.columnNames, col => dmp.putValue(row[col as string]));
dmp.putRaw(')'); dmp.putRaw(')');
await driver.query(pool, dmp.s, { discardResult: true }); await driver.query(dbhan, dmp.s, { discardResult: true });
} }
} }
if (options.commitAfterInsert) { if (options.commitAfterInsert) {
const dmp = driver.createDumper(); const dmp = driver.createDumper();
dmp.commitTransaction(); dmp.commitTransaction();
await driver.query(pool, dmp.s, { discardResult: true }); await driver.query(dbhan, dmp.s, { discardResult: true });
} }
}; };

View File

@@ -191,4 +191,8 @@ export const driverBase = {
adaptTableInfo(table) { adaptTableInfo(table) {
return table; return table;
}, },
async listSchemas(pool) {
return null;
},
}; };

View File

@@ -23,3 +23,4 @@ export * from './getLogger';
export * from './getConnectionLabel'; export * from './getConnectionLabel';
export * from './detectSqlFilterBehaviour'; export * from './detectSqlFilterBehaviour';
export * from './filterBehaviours'; export * from './filterBehaviours';
export * from './schemaInfoTools';

View File

@@ -0,0 +1,38 @@
import { SchemaInfo, SqlDialect } from 'dbgate-types';
export function findDefaultSchema(schemaList: SchemaInfo[], dialect: SqlDialect, schemaInStorage: string = null) {
if (!schemaList) {
return null;
}
if (schemaInStorage && schemaList.find(x => x.schemaName == schemaInStorage)) {
return schemaInStorage;
}
const dynamicDefaultSchema = schemaList.find(x => x.isDefault);
if (dynamicDefaultSchema) {
return dynamicDefaultSchema.schemaName;
}
if (dialect?.defaultSchemaName && schemaList.find(x => x.schemaName == dialect.defaultSchemaName)) {
return dialect.defaultSchemaName;
}
return schemaList[0]?.schemaName;
}
export function isCompositeDbName(name: string) {
return name?.includes('::');
}
export function splitCompositeDbName(name: string) {
if (!isCompositeDbName(name)) return null;
const [database, schema] = name.split('::');
return { database, schema };
}
export function extractDbNameFromComposite(name: string) {
return isCompositeDbName(name) ? splitCompositeDbName(name).database : name;
}
export function extractSchemaNameFromComposite(name: string) {
return splitCompositeDbName(name)?.schema;
}

View File

@@ -126,6 +126,7 @@ export interface TriggerInfo extends SqlObjectInfo {}
export interface SchemaInfo { export interface SchemaInfo {
objectId?: string; objectId?: string;
schemaName: string; schemaName: string;
isDefault?: boolean;
} }
export interface DatabaseInfoObjects { export interface DatabaseInfoObjects {
@@ -139,7 +140,5 @@ export interface DatabaseInfoObjects {
} }
export interface DatabaseInfo extends DatabaseInfoObjects { export interface DatabaseInfo extends DatabaseInfoObjects {
schemas?: SchemaInfo[];
engine?: string; engine?: string;
defaultSchema?: string;
} }

View File

@@ -11,6 +11,7 @@ import {
FunctionInfo, FunctionInfo,
TriggerInfo, TriggerInfo,
CollectionInfo, CollectionInfo,
SchemaInfo,
} from './dbinfo'; } from './dbinfo';
import { FilterBehaviour } from './filter-type'; import { FilterBehaviour } from './filter-type';
@@ -129,6 +130,15 @@ export interface FilterBehaviourProvider {
getFilterBehaviour(dataType: string, standardFilterBehaviours: { [id: string]: FilterBehaviour }): FilterBehaviour; getFilterBehaviour(dataType: string, standardFilterBehaviours: { [id: string]: FilterBehaviour }): FilterBehaviour;
} }
export interface DatabaseHandle {
client: any;
database?: string;
feedback?: (message: any) => void;
getDatabase?: () => any;
connectionType?: string;
treeKeySeparator?: string;
}
export interface EngineDriver extends FilterBehaviourProvider { export interface EngineDriver extends FilterBehaviourProvider {
engine: string; engine: string;
title: string; title: string;
@@ -170,52 +180,52 @@ export interface EngineDriver extends FilterBehaviourProvider {
defaultSocketPath?: string; defaultSocketPath?: string;
authTypeLabel?: string; authTypeLabel?: string;
importExportArgs?: any[]; importExportArgs?: any[];
connect({ server, port, user, password, database }): Promise<any>; connect({ server, port, user, password, database }): Promise<DatabaseHandle>;
close(pool): Promise<any>; close(dbhan: DatabaseHandle): Promise<any>;
query(pool: any, sql: string, options?: QueryOptions): Promise<QueryResult>; query(dbhan: DatabaseHandle, sql: string, options?: QueryOptions): Promise<QueryResult>;
stream(pool: any, sql: string, options: StreamOptions); stream(dbhan: DatabaseHandle, sql: string, options: StreamOptions);
readQuery(pool: any, sql: string, structure?: TableInfo): Promise<stream.Readable>; readQuery(dbhan: DatabaseHandle, sql: string, structure?: TableInfo): Promise<stream.Readable>;
readJsonQuery(pool: any, query: any, structure?: TableInfo): Promise<stream.Readable>; readJsonQuery(dbhan: DatabaseHandle, query: any, structure?: TableInfo): Promise<stream.Readable>;
writeTable(pool: any, name: NamedObjectInfo, options: WriteTableOptions): Promise<stream.Writable>; writeTable(dbhan: DatabaseHandle, name: NamedObjectInfo, options: WriteTableOptions): Promise<stream.Writable>;
analyseSingleObject( analyseSingleObject(
pool: any, dbhan: DatabaseHandle,
name: NamedObjectInfo, name: NamedObjectInfo,
objectTypeField: keyof DatabaseInfo objectTypeField: keyof DatabaseInfo
): Promise<TableInfo | ViewInfo | ProcedureInfo | FunctionInfo | TriggerInfo>; ): Promise<TableInfo | ViewInfo | ProcedureInfo | FunctionInfo | TriggerInfo>;
analyseSingleTable(pool: any, name: NamedObjectInfo): Promise<TableInfo>; analyseSingleTable(dbhan: DatabaseHandle, name: NamedObjectInfo): Promise<TableInfo>;
getVersion(pool: any): Promise<{ version: string }>; getVersion(dbhan: DatabaseHandle): Promise<{ version: string }>;
listDatabases(pool: any): Promise< listDatabases(dbhan: DatabaseHandle): Promise<
{ {
name: string; name: string;
}[] }[]
>; >;
loadKeys(pool, root: string, filter?: string): Promise; loadKeys(dbhan: DatabaseHandle, root: string, filter?: string): Promise;
exportKeys(pool, options: {}): Promise; exportKeys(dbhan: DatabaseHandle, options: {}): Promise;
loadKeyInfo(pool, key): Promise; loadKeyInfo(dbhan: DatabaseHandle, key): Promise;
loadKeyTableRange(pool, key, cursor, count): Promise; loadKeyTableRange(dbhan: DatabaseHandle, key, cursor, count): Promise;
loadFieldValues(pool: any, name: NamedObjectInfo, field: string, search: string): Promise; loadFieldValues(dbhan: DatabaseHandle, name: NamedObjectInfo, field: string, search: string): Promise;
analyseFull(pool: any, serverVersion): Promise<DatabaseInfo>; analyseFull(dbhan: DatabaseHandle, serverVersion): Promise<DatabaseInfo>;
analyseIncremental(pool: any, structure: DatabaseInfo, serverVersion): Promise<DatabaseInfo>; analyseIncremental(dbhan: DatabaseHandle, structure: DatabaseInfo, serverVersion): Promise<DatabaseInfo>;
dialect: SqlDialect; dialect: SqlDialect;
dialectByVersion(version): SqlDialect; dialectByVersion(version): SqlDialect;
createDumper(options = null): SqlDumper; createDumper(options = null): SqlDumper;
createBackupDumper(pool: any, options): Promise<SqlBackupDumper>; createBackupDumper(dbhan: DatabaseHandle, options): Promise<SqlBackupDumper>;
getAuthTypes(): EngineAuthType[]; getAuthTypes(): EngineAuthType[];
readCollection(pool: any, options: ReadCollectionOptions): Promise<any>; readCollection(dbhan: DatabaseHandle, options: ReadCollectionOptions): Promise<any>;
updateCollection(pool: any, changeSet: any): Promise<any>; updateCollection(dbhan: DatabaseHandle, changeSet: any): Promise<any>;
getCollectionUpdateScript(changeSet: any, collectionInfo: CollectionInfo): string; getCollectionUpdateScript(changeSet: any, collectionInfo: CollectionInfo): string;
createDatabase(pool: any, name: string): Promise; createDatabase(dbhan: DatabaseHandle, name: string): Promise;
dropDatabase(pool: any, name: string): Promise; dropDatabase(dbhan: DatabaseHandle, name: string): Promise;
getQuerySplitterOptions(usage: 'stream' | 'script' | 'editor'): any; getQuerySplitterOptions(usage: 'stream' | 'script' | 'editor'): any;
script(pool: any, sql: string, options?: RunScriptOptions): Promise; script(dbhan: DatabaseHandle, sql: string, options?: RunScriptOptions): Promise;
operation(pool: any, operation: {}, options?: RunScriptOptions): Promise; operation(dbhan: DatabaseHandle, operation: {}, options?: RunScriptOptions): Promise;
getNewObjectTemplates(): NewObjectTemplate[]; getNewObjectTemplates(): NewObjectTemplate[];
// direct call of pool method, only some methods could be supported, on only some drivers // direct call of dbhan.client method, only some methods could be supported, on only some drivers
callMethod(pool, method, args); callMethod(dbhan: DatabaseHandle, method, args);
serverSummary(pool): Promise<ServerSummary>; serverSummary(dbhan: DatabaseHandle): Promise<ServerSummary>;
summaryCommand(pool, command, row): Promise<void>; summaryCommand(dbhan: DatabaseHandle, command, row): Promise<void>;
startProfiler(pool, options): Promise<any>; startProfiler(dbhan: DatabaseHandle, options): Promise<any>;
stopProfiler(pool, profiler): Promise<void>; stopProfiler(dbhan: DatabaseHandle, profiler): Promise<void>;
getRedirectAuthUrl(connection, options): Promise<{ url: string; sid: string }>; getRedirectAuthUrl(connection, options): Promise<{ url: string; sid: string }>;
getAuthTokenFromCode(connection, options): Promise<string>; getAuthTokenFromCode(connection, options): Promise<string>;
getAccessTokenFromAuth(connection, req): Promise<string | null>; getAccessTokenFromAuth(connection, req): Promise<string | null>;
@@ -230,6 +240,7 @@ export interface EngineDriver extends FilterBehaviourProvider {
): any[]; ): any[];
// adapts table info from different source (import, other database) to be suitable for this database // adapts table info from different source (import, other database) to be suitable for this database
adaptTableInfo(table: TableInfo): TableInfo; adaptTableInfo(table: TableInfo): TableInfo;
listSchemas(dbhan: DatabaseHandle): SchemaInfo[];
analyserClass?: any; analyserClass?: any;
dumperClass?: any; dumperClass?: any;

View File

@@ -14,7 +14,7 @@
export function openConnection(connection) { export function openConnection(connection) {
const config = getCurrentConfig(); const config = getCurrentConfig();
if (connection.singleDatabase) { if (connection.singleDatabase) {
currentDatabase.set({ connection, name: connection.defaultDatabase }); switchCurrentDatabase({ connection, name: connection.defaultDatabase });
apiCall('database-connections/refresh', { apiCall('database-connections/refresh', {
conid: connection._id, conid: connection._id,
database: connection.defaultDatabase, database: connection.defaultDatabase,
@@ -60,7 +60,7 @@
if (electron) { if (electron) {
apiCall('database-connections/disconnect', { conid, database: currentDb.name }); apiCall('database-connections/disconnect', { conid, database: currentDb.name });
} }
currentDatabase.set(null); switchCurrentDatabase(null);
} }
closeMultipleTabs(closeCondition); closeMultipleTabs(closeCondition);
// if (data.unsaved) { // if (data.unsaved) {
@@ -107,6 +107,7 @@
import { tick } from 'svelte'; import { tick } from 'svelte';
import { getConnectionLabel } from 'dbgate-tools'; import { getConnectionLabel } from 'dbgate-tools';
import hasPermission from '../utility/hasPermission'; import hasPermission from '../utility/hasPermission';
import { switchCurrentDatabase } from '../utility/common';
export let data; export let data;
export let passProps; export let passProps;
@@ -142,7 +143,7 @@
return; return;
} }
if ($openedSingleDatabaseConnections.includes(data._id)) { if ($openedSingleDatabaseConnections.includes(data._id)) {
currentDatabase.set({ connection: data, name: data.defaultDatabase }); switchCurrentDatabase({ connection: data, name: data.defaultDatabase });
return; return;
} }
if ($openedConnections.includes(data._id)) { if ($openedConnections.includes(data._id)) {

View File

@@ -26,7 +26,7 @@
apiCall('database-connections/disconnect', { conid, database }); apiCall('database-connections/disconnect', { conid, database });
} }
if (getCurrentDatabase()?.connection?._id == conid && getCurrentDatabase()?.name == database) { if (getCurrentDatabase()?.connection?._id == conid && getCurrentDatabase()?.name == database) {
currentDatabase.set(null); switchCurrentDatabase(null);
} }
openedSingleDatabaseConnections.update(list => list.filter(x => x != conid)); openedSingleDatabaseConnections.update(list => list.filter(x => x != conid));
closeMultipleTabs(closeCondition); closeMultipleTabs(closeCondition);
@@ -262,6 +262,17 @@
}); });
}; };
const handleRefreshSchemas = () => {
const conid = connection._id;
const database = name;
apiCall('database-connections/dispatch-database-changed-event', {
event: 'schema-list-changed',
conid,
database,
});
loadSchemaList(conid, database);
};
async function handleConfirmSql(sql) { async function handleConfirmSql(sql) {
saveScriptToDatabase({ conid: connection._id, database: name }, sql, false); saveScriptToDatabase({ conid: connection._id, database: name }, sql, false);
} }
@@ -290,6 +301,8 @@
onClick: handleNewPerspective, onClick: handleNewPerspective,
text: 'Design perspective query', text: 'Design perspective query',
}, },
connection.useSeparateSchemas && { onClick: handleRefreshSchemas, text: 'Refresh schemas' },
{ divider: true }, { divider: true },
isSqlOrDoc && isSqlOrDoc &&
!connection.isReadOnly && !connection.isReadOnly &&
@@ -364,6 +377,7 @@
getCurrentDatabase, getCurrentDatabase,
getExtensions, getExtensions,
getOpenedTabs, getOpenedTabs,
loadingSchemaLists,
openedConnections, openedConnections,
openedSingleDatabaseConnections, openedSingleDatabaseConnections,
pinnedDatabases, pinnedDatabases,
@@ -374,7 +388,7 @@
import openNewTab from '../utility/openNewTab'; import openNewTab from '../utility/openNewTab';
import AppObjectCore from './AppObjectCore.svelte'; import AppObjectCore from './AppObjectCore.svelte';
import { showSnackbarError, showSnackbarSuccess } from '../utility/snackbar'; import { showSnackbarError, showSnackbarSuccess } from '../utility/snackbar';
import { findEngineDriver, getConnectionLabel } from 'dbgate-tools'; import { extractDbNameFromComposite, findEngineDriver, getConnectionLabel } from 'dbgate-tools';
import InputTextModal from '../modals/InputTextModal.svelte'; import InputTextModal from '../modals/InputTextModal.svelte';
import { getDatabaseInfo, useUsedApps } from '../utility/metadataLoaders'; import { getDatabaseInfo, useUsedApps } from '../utility/metadataLoaders';
import { openJsonDocument } from '../tabs/JsonTab.svelte'; import { openJsonDocument } from '../tabs/JsonTab.svelte';
@@ -391,6 +405,7 @@
import hasPermission from '../utility/hasPermission'; import hasPermission from '../utility/hasPermission';
import { openImportExportTab } from '../utility/importExportTools'; import { openImportExportTab } from '../utility/importExportTools';
import newTable from '../tableeditor/newTable'; import newTable from '../tableeditor/newTable';
import { loadSchemaList, switchCurrentDatabase } from '../utility/common';
export let data; export let data;
export let passProps; export let passProps;
@@ -408,6 +423,7 @@
$: isPinned = !!$pinnedDatabases.find(x => x?.name == data.name && x?.connection?._id == data.connection?._id); $: isPinned = !!$pinnedDatabases.find(x => x?.name == data.name && x?.connection?._id == data.connection?._id);
$: apps = useUsedApps(); $: apps = useUsedApps();
$: isLoadingSchemas = $loadingSchemaLists[`${data?.connection?._id}::${data?.name}`];
</script> </script>
<AppObjectCore <AppObjectCore
@@ -419,8 +435,8 @@
colorMark={passProps?.connectionColorFactory && colorMark={passProps?.connectionColorFactory &&
passProps?.connectionColorFactory({ conid: _.get(data.connection, '_id'), database: data.name }, null, null, false)} passProps?.connectionColorFactory({ conid: _.get(data.connection, '_id'), database: data.name }, null, null, false)}
isBold={_.get($currentDatabase, 'connection._id') == _.get(data.connection, '_id') && isBold={_.get($currentDatabase, 'connection._id') == _.get(data.connection, '_id') &&
_.get($currentDatabase, 'name') == data.name} extractDbNameFromComposite(_.get($currentDatabase, 'name')) == data.name}
on:click={() => ($currentDatabase = data)} on:click={() => switchCurrentDatabase(data)}
on:dragstart on:dragstart
on:dragenter on:dragenter
on:dragend on:dragend
@@ -430,6 +446,7 @@
.find(x => x.isNewQuery) .find(x => x.isNewQuery)
.onClick(); .onClick();
}} }}
statusIcon={isLoadingSchemas ? 'icon loading' : ''}
menu={createMenu} menu={createMenu}
showPinnedInsteadOfUnpin={passProps?.showPinnedInsteadOfUnpin} showPinnedInsteadOfUnpin={passProps?.showPinnedInsteadOfUnpin}
onPin={isPinned ? null : () => pinnedDatabases.update(list => [...list, data])} onPin={isPinned ? null : () => pinnedDatabases.update(list => [...list, data])}

View File

@@ -46,7 +46,7 @@
databaseList.push({ databaseList.push({
text: `${db.name} on ${getConnectionLabel(connection)}`, text: `${db.name} on ${getConnectionLabel(connection)}`,
icon: 'img database', icon: 'img database',
onClick: () => currentDatabase.set({ connection, name: db.name }), onClick: () => switchCurrentDatabase({ connection, name: db.name }),
}); });
} }
} }
@@ -80,7 +80,7 @@
import { useConnectionList, useDatabaseInfo } from '../utility/metadataLoaders'; import { useConnectionList, useDatabaseInfo } from '../utility/metadataLoaders';
import { getLocalStorage } from '../utility/storageCache'; import { getLocalStorage } from '../utility/storageCache';
import registerCommand from './registerCommand'; import registerCommand from './registerCommand';
import { formatKeyText } from '../utility/common'; import { formatKeyText, switchCurrentDatabase } from '../utility/common';
let domInput; let domInput;
let filter = ''; let filter = '';

View File

@@ -3,6 +3,7 @@ import { currentDatabase, getCurrentDatabase } from '../stores';
import getElectron from '../utility/getElectron'; import getElectron from '../utility/getElectron';
import registerCommand from './registerCommand'; import registerCommand from './registerCommand';
import { apiCall } from '../utility/api'; import { apiCall } from '../utility/api';
import { switchCurrentDatabase } from '../utility/common';
registerCommand({ registerCommand({
id: 'database.changeState', id: 'database.changeState',
@@ -40,7 +41,7 @@ registerCommand({
onClick: () => { onClick: () => {
const electron = getElectron(); const electron = getElectron();
if (electron) apiCall('database-connections/disconnect', dbid); if (electron) apiCall('database-connections/disconnect', dbid);
currentDatabase.set(null); switchCurrentDatabase(null);
}, },
}, },
]; ];

View File

@@ -2,6 +2,7 @@ import _ from 'lodash';
import { recentDatabases, currentDatabase, getRecentDatabases } from '../stores'; import { recentDatabases, currentDatabase, getRecentDatabases } from '../stores';
import registerCommand from './registerCommand'; import registerCommand from './registerCommand';
import { getConnectionLabel } from 'dbgate-tools'; import { getConnectionLabel } from 'dbgate-tools';
import { switchCurrentDatabase } from '../utility/common';
currentDatabase.subscribe(value => { currentDatabase.subscribe(value => {
if (!value) return; if (!value) return;
@@ -17,7 +18,7 @@ currentDatabase.subscribe(value => {
function switchDatabaseCommand(db) { function switchDatabaseCommand(db) {
return { return {
text: `${db.name} on ${getConnectionLabel(db?.connection, { allowExplicitDatabase: false })}`, text: `${db.name} on ${getConnectionLabel(db?.connection, { allowExplicitDatabase: false })}`,
onClick: () => currentDatabase.set(db), onClick: () => switchCurrentDatabase(db),
}; };
} }

View File

@@ -35,7 +35,7 @@ import { apiCall } from '../utility/api';
import runCommand from './runCommand'; import runCommand from './runCommand';
import { openWebLink } from '../utility/exportFileTools'; import { openWebLink } from '../utility/exportFileTools';
import { getSettings } from '../utility/metadataLoaders'; import { getSettings } from '../utility/metadataLoaders';
import { isMac } from '../utility/common'; import { isMac, switchCurrentDatabase } from '../utility/common';
import { doLogout, internalRedirectTo } from '../clientAuth'; import { doLogout, internalRedirectTo } from '../clientAuth';
import { disconnectServerConnection } from '../appobj/ConnectionAppObject.svelte'; import { disconnectServerConnection } from '../appobj/ConnectionAppObject.svelte';
import UploadErrorModal from '../modals/UploadErrorModal.svelte'; import UploadErrorModal from '../modals/UploadErrorModal.svelte';
@@ -347,7 +347,7 @@ registerCommand({
onConfirm: async file => { onConfirm: async file => {
const resp = await apiCall('connections/new-sqlite-database', { file }); const resp = await apiCall('connections/new-sqlite-database', { file });
const connection = resp; const connection = resp;
currentDatabase.set({ connection, name: `${file}.sqlite` }); switchCurrentDatabase({ connection, name: `${file}.sqlite` });
}, },
}); });
}, },

View File

@@ -1,15 +1,16 @@
<script lang="ts"> <script lang="ts">
import _ from 'lodash';
import { getFormContext } from '../forms/FormProviderCore.svelte'; import { getFormContext } from '../forms/FormProviderCore.svelte';
import FormSelectField from '../forms/FormSelectField.svelte'; import FormSelectField from '../forms/FormSelectField.svelte';
import { useDatabaseInfo, useDatabaseList } from '../utility/metadataLoaders'; import { useSchemaList } from '../utility/metadataLoaders';
export let conidName; export let conidName;
export let databaseName; export let databaseName;
const { values } = getFormContext(); const { values } = getFormContext();
$: dbinfo = useDatabaseInfo({ conid: $values[conidName], database: values[databaseName] }); $: schemaList = useSchemaList({ conid: $values[conidName], database: values[databaseName] });
$: schemaOptions = (($dbinfo && $dbinfo.schemas) || []).map(schema => ({ $: schemaOptions = (_.isArray($schemaList) ? $schemaList : []).map(schema => ({
value: schema.schemaName, value: schema.schemaName,
label: schema.schemaName, label: schema.schemaName,
})); }));

View File

@@ -1,4 +1,4 @@
<script context="module"> <script context="module" lang="ts">
export async function saveScriptToDatabase({ conid, database }, sql, syncModel = true) { export async function saveScriptToDatabase({ conid, database }, sql, syncModel = true) {
const resp = await apiCall('database-connections/run-script', { const resp = await apiCall('database-connections/run-script', {
conid, conid,
@@ -15,7 +15,7 @@
} }
} }
export async function runOperationOnDatabase({ conid, database }, operation, syncModel = true) { export async function runOperationOnDatabase({ conid, database }, operation, syncModel: string | boolean = true) {
const resp = await apiCall('database-connections/run-operation', { const resp = await apiCall('database-connections/run-operation', {
conid, conid,
database, database,
@@ -27,7 +27,11 @@
showModal(ErrorMessageModal, { title: 'Error when executing operation', message: errorMessage }); showModal(ErrorMessageModal, { title: 'Error when executing operation', message: errorMessage });
} else { } else {
showSnackbarSuccess('Saved to database'); showSnackbarSuccess('Saved to database');
if (syncModel) apiCall('database-connections/sync-model', { conid, database }); if (_.isString(syncModel)) {
apiCall('database-connections/dispatch-database-changed-event', { event: syncModel, conid, database });
} else if (syncModel) {
apiCall('database-connections/sync-model', { conid, database });
}
} }
} }
</script> </script>

View File

@@ -1,8 +1,10 @@
import _ from 'lodash'; import _ from 'lodash';
import { addCompleter, setCompleters } from 'ace-builds/src-noconflict/ext-language_tools'; import { addCompleter, setCompleters } from 'ace-builds/src-noconflict/ext-language_tools';
import { getDatabaseInfo } from '../utility/metadataLoaders'; import { getConnectionInfo, getDatabaseInfo, getSchemaList } from '../utility/metadataLoaders';
import analyseQuerySources from './analyseQuerySources'; import analyseQuerySources from './analyseQuerySources';
import { getStringSettingsValue } from '../settings/settingsTools'; import { getStringSettingsValue } from '../settings/settingsTools';
import { findEngineDriver, findDefaultSchema } from 'dbgate-tools';
import { getExtensions } from '../stores';
const COMMON_KEYWORDS = [ const COMMON_KEYWORDS = [
'select', 'select',
@@ -24,9 +26,9 @@ const COMMON_KEYWORDS = [
'go', 'go',
]; ];
function createTableLikeList(dbinfo, schemaCondition) { function createTableLikeList(schemaList, dbinfo, schemaCondition) {
return [ return [
...(dbinfo.schemas?.map(x => ({ ...(schemaList?.map(x => ({
name: x.schemaName, name: x.schemaName,
value: x.schemaName, value: x.schemaName,
caption: x.schemaName, caption: x.schemaName,
@@ -78,6 +80,10 @@ export function mountCodeCompletion({ conid, database, editor, getText }) {
const cursor = session.selection.cursor; const cursor = session.selection.cursor;
const line = session.getLine(cursor.row).slice(0, cursor.column); const line = session.getLine(cursor.row).slice(0, cursor.column);
const dbinfo = await getDatabaseInfo({ conid, database }); const dbinfo = await getDatabaseInfo({ conid, database });
const schemaList = await getSchemaList({ conid, database });
const connection = await getConnectionInfo({ conid });
const driver = findEngineDriver(connection, getExtensions());
const defaultSchema = findDefaultSchema(schemaList, driver.dialect);
const convertUpper = getStringSettingsValue('sqlEditor.sqlCommandsCase', 'upperCase') == 'upperCase'; const convertUpper = getStringSettingsValue('sqlEditor.sqlCommandsCase', 'upperCase') == 'upperCase';
@@ -147,9 +153,9 @@ export function mountCodeCompletion({ conid, database, editor, getText }) {
]; ];
} }
} else { } else {
const schema = (dbinfo.schemas || []).find(x => x.schemaName == colMatch[1]); const schema = (schemaList || []).find(x => x.schemaName == colMatch[1]);
if (schema) { if (schema) {
list = createTableLikeList(dbinfo, x => x.schemaName == schema.schemaName); list = createTableLikeList(schemaList, dbinfo, x => x.schemaName == schema.schemaName);
} }
} }
} else { } else {
@@ -167,7 +173,7 @@ export function mountCodeCompletion({ conid, database, editor, getText }) {
} else { } else {
list = [ list = [
...(onlyTables ? [] : list), ...(onlyTables ? [] : list),
...createTableLikeList(dbinfo, x => !dbinfo.defaultSchema || dbinfo.defaultSchema == x.schemaName), ...createTableLikeList(schemaList, dbinfo, x => !defaultSchema || defaultSchema == x.schemaName),
...(onlyTables ...(onlyTables
? [] ? []

View File

@@ -249,6 +249,10 @@
<FormCheckboxField label={`Use only database ${defaultDatabase}`} name="singleDatabase" disabled={isConnected} /> <FormCheckboxField label={`Use only database ${defaultDatabase}`} name="singleDatabase" disabled={isConnected} />
{/if} {/if}
{#if driver?.showConnectionField('useSeparateSchemas', $values, showConnectionFieldArgs)}
<FormCheckboxField label={`Use schemas separately (use this if you have many large schemas)`} name="useSeparateSchemas" disabled={isConnected} />
{/if}
{#if driver} {#if driver}
<div class="row"> <div class="row">
<div class="col-6 mr-1"> <div class="col-6 mr-1">

View File

@@ -8,6 +8,7 @@ import _ from 'lodash';
import { safeJsonParse } from 'dbgate-tools'; import { safeJsonParse } from 'dbgate-tools';
import { apiCall } from './utility/api'; import { apiCall } from './utility/api';
import { getOpenedTabsStorageName, isAdminPage } from './utility/pageDefs'; import { getOpenedTabsStorageName, isAdminPage } from './utility/pageDefs';
import { switchCurrentDatabase } from './utility/common';
export interface TabDefinition { export interface TabDefinition {
title: string; title: string;
@@ -149,6 +150,7 @@ export const loadingPluginStore = writable({
}); });
export const activeDbKeysStore = writableWithStorage({}, 'activeDbKeysStore'); export const activeDbKeysStore = writableWithStorage({}, 'activeDbKeysStore');
export const appliedCurrentSchema = writable<string>(null); export const appliedCurrentSchema = writable<string>(null);
export const loadingSchemaLists = writable({}); // dict [`${conid}::${database}`]: true
export const currentThemeDefinition = derived([currentTheme, extensions], ([$currentTheme, $extensions]) => export const currentThemeDefinition = derived([currentTheme, extensions], ([$currentTheme, $extensions]) =>
$extensions.themes.find(x => x.themeClassName == $currentTheme) $extensions.themes.find(x => x.themeClassName == $currentTheme)
@@ -296,7 +298,7 @@ export function subscribeApiDependendStores() {
currentConfigValue = value; currentConfigValue = value;
invalidateCommands(); invalidateCommands();
if (value.singleDbConnection) { if (value.singleDbConnection) {
currentDatabase.set(value.singleDbConnection); switchCurrentDatabase(value.singleDbConnection);
} }
}); });
} }

View File

@@ -92,6 +92,7 @@
export let driver; export let driver;
export let resetCounter; export let resetCounter;
export let isCreateTable; export let isCreateTable;
export let schemaList;
$: isWritable = !!setTableInfo; $: isWritable = !!setTableInfo;
@@ -172,7 +173,7 @@
title="Table properties" title="Table properties"
fieldDefinitions={tableFormOptions ?? []} fieldDefinitions={tableFormOptions ?? []}
pureNameTitle={isCreateTable ? 'Table name' : null} pureNameTitle={isCreateTable ? 'Table name' : null}
schemaList={isCreateTable && dbInfo?.schemas?.length >= 0 ? dbInfo?.schemas : null} schemaList={isCreateTable && schemaList?.length >= 0 ? schemaList : null}
values={_.pick(tableInfo, ['schemaName', 'pureName', ...(tableFormOptions ?? []).map(x => x.name)])} values={_.pick(tableInfo, ['schemaName', 'pureName', ...(tableFormOptions ?? []).map(x => x.name)])}
onChangeValues={vals => { onChangeValues={vals => {
if (!_.isEmpty(vals)) { if (!_.isEmpty(vals)) {

View File

@@ -285,7 +285,7 @@
draggingTabTarget, draggingTabTarget,
} from '../stores'; } from '../stores';
import tabs from '../tabs'; import tabs from '../tabs';
import { setSelectedTab } from '../utility/common'; import { setSelectedTab, switchCurrentDatabase } from '../utility/common';
import contextMenu from '../utility/contextMenu'; import contextMenu from '../utility/contextMenu';
import { isElectronAvailable } from '../utility/getElectron'; import { isElectronAvailable } from '../utility/getElectron';
import { getConnectionInfo, useConnectionList } from '../utility/metadataLoaders'; import { getConnectionInfo, useConnectionList } from '../utility/metadataLoaders';
@@ -420,11 +420,11 @@
if (conid) { if (conid) {
const connection = await getConnectionInfo({ conid, database }); const connection = await getConnectionInfo({ conid, database });
if (connection) { if (connection) {
$currentDatabase = { connection, name: database }; switchCurrentDatabase({ connection, name: database });
return; return;
} }
} }
$currentDatabase = null; switchCurrentDatabase(null);
}; };
async function scrollInViewTab(tabid) { async function scrollInViewTab(tabid) {

View File

@@ -44,7 +44,7 @@
import TableEditor from '../tableeditor/TableEditor.svelte'; import TableEditor from '../tableeditor/TableEditor.svelte';
import createActivator, { getActiveComponent } from '../utility/createActivator'; import createActivator, { getActiveComponent } from '../utility/createActivator';
import { useConnectionInfo, useDatabaseInfo, useDbCore } from '../utility/metadataLoaders'; import { useConnectionInfo, useDatabaseInfo, useDbCore, useSchemaList } from '../utility/metadataLoaders';
import { showModal } from '../modals/modalTools'; import { showModal } from '../modals/modalTools';
import ConfirmSqlModal from '../modals/ConfirmSqlModal.svelte'; import ConfirmSqlModal from '../modals/ConfirmSqlModal.svelte';
import ErrorMessageModal from '../modals/ErrorMessageModal.svelte'; import ErrorMessageModal from '../modals/ErrorMessageModal.svelte';
@@ -75,6 +75,7 @@
$: tableInfoWithPairingId = $tableInfo ? generateTablePairingId($tableInfo) : null; $: tableInfoWithPairingId = $tableInfo ? generateTablePairingId($tableInfo) : null;
$: connection = useConnectionInfo({ conid }); $: connection = useConnectionInfo({ conid });
$: driver = findEngineDriver($connection, $extensions); $: driver = findEngineDriver($connection, $extensions);
$: schemaList = useSchemaList({ conid, database });
const { editorState, editorValue, setEditorData, clearEditorData } = useEditorData({ tabid }); const { editorState, editorValue, setEditorData, clearEditorData } = useEditorData({ tabid });
@@ -146,6 +147,7 @@
bind:this={domEditor} bind:this={domEditor}
tableInfo={showTable} tableInfo={showTable}
dbInfo={$dbInfo} dbInfo={$dbInfo}
schemaList={$schemaList}
{driver} {driver}
{resetCounter} {resetCounter}
isCreateTable={objectTypeField == 'tables' && !$editorValue?.base} isCreateTable={objectTypeField == 'tables' && !$editorValue?.base}

View File

@@ -3,6 +3,7 @@ import { currentDatabase, getCurrentDatabase, getLockedDatabaseMode, openedTabs
import { shouldShowTab } from '../tabpanel/TabsPanel.svelte'; import { shouldShowTab } from '../tabpanel/TabsPanel.svelte';
import { callWhenAppLoaded, getAppLoaded } from './appLoadManager'; import { callWhenAppLoaded, getAppLoaded } from './appLoadManager';
import { getConnectionInfo } from './metadataLoaders'; import { getConnectionInfo } from './metadataLoaders';
import { switchCurrentDatabase } from './common';
let lastCurrentTab = null; let lastCurrentTab = null;
@@ -20,7 +21,7 @@ openedTabs.subscribe(value => {
if (conid && database && (conid != lastTab?.props?.conid || database != lastTab?.props?.database)) { if (conid && database && (conid != lastTab?.props?.conid || database != lastTab?.props?.database)) {
const doWork = async () => { const doWork = async () => {
const connection = await getConnectionInfo({ conid }); const connection = await getConnectionInfo({ conid });
currentDatabase.set({ switchCurrentDatabase({
connection, connection,
name: database, name: database,
}); });

View File

@@ -1,5 +1,8 @@
import { getOpenedTabs, openedTabs } from '../stores'; import { findDefaultSchema, findEngineDriver, isCompositeDbName } from 'dbgate-tools';
import { currentDatabase, getExtensions, getOpenedTabs, loadingSchemaLists, openedTabs } from '../stores';
import _ from 'lodash'; import _ from 'lodash';
import { getSchemaList } from './metadataLoaders';
import { showSnackbarError } from './snackbar';
export class LoadingToken { export class LoadingToken {
isCanceled = false; isCanceled = false;
@@ -82,3 +85,37 @@ export function isCtrlOrCommandKey(event) {
} }
return event.ctrlKey; return event.ctrlKey;
} }
export async function loadSchemaList(conid, database) {
try {
loadingSchemaLists.update(x => ({ ...x, [`${conid}::${database}`]: true }));
const schemas = await getSchemaList({ conid, database });
if (schemas.errorMessage) {
showSnackbarError(`Error loading schemas: ${schemas.errorMessage}`);
console.error('Error loading schemas', schemas.errorMessage);
return;
}
return schemas;
} finally {
loadingSchemaLists.update(x => _.omit(x, [`${conid}::${database}`]));
}
}
export async function switchCurrentDatabase(data) {
if (data?.connection?.useSeparateSchemas && !isCompositeDbName(data.name)) {
const conid = data.connection._id;
const database = data.name;
const storageKey = `selected-schema-${conid}-${database}`;
const schemaInStorage = localStorage.getItem(storageKey);
const schemas = await loadSchemaList(conid, database);
if (!schemas) return;
const driver = findEngineDriver(data.connection, getExtensions());
const defaultSchema = findDefaultSchema(schemas, driver?.dialect, schemaInStorage);
currentDatabase.set({
...data,
name: `${data.name}::${defaultSchema}`,
});
} else {
currentDatabase.set(data);
}
}

View File

@@ -13,6 +13,12 @@ const databaseInfoLoader = ({ conid, database }) => ({
transform: extendDatabaseInfo, transform: extendDatabaseInfo,
}); });
const schemaListLoader = ({ conid, database }) => ({
url: 'database-connections/schema-list',
params: { conid, database },
reloadTrigger: { key: `schema-list-changed`, conid, database },
});
// const tableInfoLoader = ({ conid, database, schemaName, pureName }) => ({ // const tableInfoLoader = ({ conid, database, schemaName, pureName }) => ({
// url: 'metadata/table-info', // url: 'metadata/table-info',
// params: { conid, database, schemaName, pureName }, // params: { conid, database, schemaName, pureName },
@@ -449,3 +455,9 @@ export function useAuthTypes(args) {
// export function useDatabaseKeys(args) { // export function useDatabaseKeys(args) {
// return useCore(databaseKeysLoader, args); // return useCore(databaseKeysLoader, args);
// } // }
export function getSchemaList(args) {
return getCore(schemaListLoader, args);
}
export function useSchemaList(args) {
return useCore(schemaListLoader, args);
}

View File

@@ -12,6 +12,7 @@ import { SAVED_FILE_HANDLERS } from '../appobj/SavedFileAppObject.svelte';
import _ from 'lodash'; import _ from 'lodash';
import ErrorMessageModal from '../modals/ErrorMessageModal.svelte'; import ErrorMessageModal from '../modals/ErrorMessageModal.svelte';
import { openImportExportTab } from './importExportTools'; import { openImportExportTab } from './importExportTools';
import { switchCurrentDatabase } from './common';
export function canOpenByElectron(file, extensions) { export function canOpenByElectron(file, extensions) {
if (!file) return false; if (!file) return false;
@@ -38,7 +39,7 @@ export async function openSqliteFile(filePath) {
singleDatabase: true, singleDatabase: true,
defaultDatabase, defaultDatabase,
}); });
currentDatabase.set({ switchCurrentDatabase({
connection: resp, connection: resp,
name: getDatabaseFileLabel(filePath), name: getDatabaseFileLabel(filePath),
}); });

View File

@@ -4,21 +4,26 @@
import _ from 'lodash'; import _ from 'lodash';
import FontIcon from '../icons/FontIcon.svelte'; import FontIcon from '../icons/FontIcon.svelte';
import { DatabaseInfo } from 'dbgate-types';
import { showModal } from '../modals/modalTools'; import { showModal } from '../modals/modalTools';
import ConfirmModal from '../modals/ConfirmModal.svelte'; import ConfirmModal from '../modals/ConfirmModal.svelte';
import { runOperationOnDatabase } from '../modals/ConfirmSqlModal.svelte'; import { runOperationOnDatabase } from '../modals/ConfirmSqlModal.svelte';
import InputTextModal from '../modals/InputTextModal.svelte'; import InputTextModal from '../modals/InputTextModal.svelte';
import { appliedCurrentSchema } from '../stores'; import { appliedCurrentSchema, currentDatabase } from '../stores';
import { switchCurrentDatabase } from '../utility/common';
import { extractDbNameFromComposite, extractSchemaNameFromComposite, findDefaultSchema } from 'dbgate-tools';
export let dbinfo: DatabaseInfo; export let schemaList;
export let selectedSchema;
export let objectList; export let objectList;
export let valueStorageKey;
export let conid; export let conid;
export let database; export let database;
export let connection;
export let driver;
let selectedSchema = null;
$: valueStorageKey = `selected-schema-${conid}-${database}`;
$: { $: {
if (selectedSchema != null) { if (selectedSchema != null) {
@@ -43,8 +48,8 @@
return res; return res;
} }
$: schemaList = _.uniq( $: realSchemaList = _.uniq(
_.compact([selectedSchema, ...Object.keys(countBySchema), ...(dbinfo?.schemas?.map(x => x.schemaName) ?? [])]) _.compact([selectedSchema, ...Object.keys(countBySchema), ...(schemaList?.map(x => x.schemaName) ?? [])])
); );
$: countBySchema = computeCountBySchema(objectList ?? []); $: countBySchema = computeCountBySchema(objectList ?? []);
@@ -55,10 +60,14 @@
label: 'Schema name', label: 'Schema name',
onConfirm: async name => { onConfirm: async name => {
const dbid = { conid, database }; const dbid = { conid, database };
await runOperationOnDatabase(dbid, { await runOperationOnDatabase(
dbid,
{
type: 'createSchema', type: 'createSchema',
schemaName: name, schemaName: name,
}); },
'schema-list-changed'
);
if (selectedSchema) { if (selectedSchema) {
selectedSchema = name; selectedSchema = name;
} }
@@ -70,32 +79,48 @@
message: `Really drop schema ${$appliedCurrentSchema}?`, message: `Really drop schema ${$appliedCurrentSchema}?`,
onConfirm: async () => { onConfirm: async () => {
const dbid = { conid, database }; const dbid = { conid, database };
runOperationOnDatabase(dbid, { runOperationOnDatabase(
dbid,
{
type: 'dropSchema', type: 'dropSchema',
schemaName: $appliedCurrentSchema, schemaName: $appliedCurrentSchema,
}); },
'schema-list-changed'
);
selectedSchema = null; selectedSchema = null;
}, },
}); });
} }
$: selectedSchema = localStorage.getItem(valueStorageKey ?? ''); $: if (connection?.useSeparateSchemas) {
selectedSchema =
extractSchemaNameFromComposite($currentDatabase?.name) ?? findDefaultSchema(schemaList, driver?.dialect);
} else {
selectedSchema = localStorage.getItem(valueStorageKey ?? '');
}
</script> </script>
{#if schemaList.length > 0} {#if realSchemaList.length > 0}
<div class="wrapper"> <div class="wrapper">
<div class="mr-1">Schema:</div> <div class="mr-1">Schema:</div>
<SelectField <SelectField
isNative isNative
options={[ options={connection?.useSeparateSchemas
? (schemaList?.map(x => ({ label: x.schemaName, value: x.schemaName })) ?? [])
: [
{ label: `All schemas (${objectList?.length ?? 0})`, value: '' }, { label: `All schemas (${objectList?.length ?? 0})`, value: '' },
...schemaList.map(x => ({ label: `${x} (${countBySchema[x] ?? 0})`, value: x })), ...realSchemaList.map(x => ({ label: `${x} (${countBySchema[x] ?? 0})`, value: x })),
// ...schemaList.filter(x => countBySchema[x]).map(x => ({ label: `${x} (${countBySchema[x] ?? 0})`, value: x })),
// ...schemaList.filter(x => !countBySchema[x]).map(x => ({ label: `${x} (${countBySchema[x] ?? 0})`, value: x })),
]} ]}
value={selectedSchema ?? $appliedCurrentSchema ?? ''} value={selectedSchema ?? $appliedCurrentSchema ?? ''}
on:change={e => { on:change={e => {
if (connection?.useSeparateSchemas) {
switchCurrentDatabase({
connection,
name: `${extractDbNameFromComposite(database)}::${e.detail}`,
});
} else {
selectedSchema = e.detail; selectedSchema = e.detail;
}
localStorage.setItem(valueStorageKey, e.detail); localStorage.setItem(valueStorageKey, e.detail);
}} }}
selectClass="schema-select" selectClass="schema-select"

View File

@@ -16,7 +16,13 @@
import InlineButton from '../buttons/InlineButton.svelte'; import InlineButton from '../buttons/InlineButton.svelte';
import SearchInput from '../elements/SearchInput.svelte'; import SearchInput from '../elements/SearchInput.svelte';
import WidgetsInnerContainer from './WidgetsInnerContainer.svelte'; import WidgetsInnerContainer from './WidgetsInnerContainer.svelte';
import { useConnectionInfo, useDatabaseInfo, useDatabaseStatus, useUsedApps } from '../utility/metadataLoaders'; import {
useConnectionInfo,
useDatabaseInfo,
useDatabaseStatus,
useSchemaList,
useUsedApps,
} from '../utility/metadataLoaders';
import SearchBoxWrapper from '../elements/SearchBoxWrapper.svelte'; import SearchBoxWrapper from '../elements/SearchBoxWrapper.svelte';
import AppObjectList from '../appobj/AppObjectList.svelte'; import AppObjectList from '../appobj/AppObjectList.svelte';
import _ from 'lodash'; import _ from 'lodash';
@@ -42,10 +48,10 @@
export let database; export let database;
let filter = ''; let filter = '';
let selectedSchema = null;
$: objects = useDatabaseInfo({ conid, database }); $: objects = useDatabaseInfo({ conid, database });
$: status = useDatabaseStatus({ conid, database }); $: status = useDatabaseStatus({ conid, database });
$: schemaList = useSchemaList({ conid, database });
$: connection = useConnectionInfo({ conid }); $: connection = useConnectionInfo({ conid });
$: driver = findEngineDriver($connection, $extensions); $: driver = findEngineDriver($connection, $extensions);
@@ -79,6 +85,7 @@
const handleRefreshDatabase = () => { const handleRefreshDatabase = () => {
apiCall('database-connections/refresh', { conid, database }); apiCall('database-connections/refresh', { conid, database });
apiCall('database-connections/dispatch-database-changed-event', { event: 'schema-list-changed', conid, database });
}; };
function createAddMenu() { function createAddMenu() {
@@ -116,6 +123,14 @@
<InlineButton on:click={handleRefreshDatabase}>Refresh</InlineButton> <InlineButton on:click={handleRefreshDatabase}>Refresh</InlineButton>
</WidgetsInnerContainer> </WidgetsInnerContainer>
{:else if objectList.length == 0 && $status && $status.name != 'pending' && $status.name != 'checkStructure' && $status.name != 'loadStructure' && $objects} {:else if objectList.length == 0 && $status && $status.name != 'pending' && $status.name != 'checkStructure' && $status.name != 'loadStructure' && $objects}
<SchemaSelector
schemaList={_.isArray($schemaList) ? $schemaList : null}
objectList={flatFilteredList}
connection={$connection}
{conid}
{database}
{driver}
/>
<WidgetsInnerContainer> <WidgetsInnerContainer>
<ErrorInfo <ErrorInfo
message={`Database ${database} is empty or structure is not loaded, press Refresh button to reload structure`} message={`Database ${database} is empty or structure is not loaded, press Refresh button to reload structure`}
@@ -144,12 +159,12 @@
</InlineButton> </InlineButton>
</SearchBoxWrapper> </SearchBoxWrapper>
<SchemaSelector <SchemaSelector
dbinfo={$objects} schemaList={_.isArray($schemaList) ? $schemaList : null}
bind:selectedSchema
objectList={flatFilteredList} objectList={flatFilteredList}
valueStorageKey={`sql-object-list-schema-${conid}-${database}`} connection={$connection}
{conid} {conid}
{database} {database}
{driver}
/> />
<WidgetsInnerContainer> <WidgetsInnerContainer>

View File

@@ -1,4 +1,4 @@
const { DatabaseAnalyser } = require('dbgate-tools'); const { DatabaseAnalyser } = global.DBGATE_PACKAGES['dbgate-tools'];
const sql = require('./sql'); const sql = require('./sql');
function extractDataType(dataType) { function extractDataType(dataType) {
@@ -24,7 +24,7 @@ class Analyser extends DatabaseAnalyser {
createQuery(resFileName, typeFields, replacements = {}) { createQuery(resFileName, typeFields, replacements = {}) {
let res = sql[resFileName]; let res = sql[resFileName];
res = res.replace('#DATABASE#', this.pool._database_name); res = res.replace('#DATABASE#', this.dbhan.database);
return super.createQuery(res, typeFields, replacements); return super.createQuery(res, typeFields, replacements);
} }
@@ -82,8 +82,8 @@ class Analyser extends DatabaseAnalyser {
async _computeSingleObjectId() { async _computeSingleObjectId() {
const { pureName } = this.singleObjectFilter; const { pureName } = this.singleObjectFilter;
const resId = await this.driver.query( const resId = await this.driver.query(
this.pool, this.dbhan,
`SELECT uuid as id FROM system.tables WHERE database = '${this.pool._database_name}' AND name='${pureName}'` `SELECT uuid as id FROM system.tables WHERE database = '${this.dbhan.database}' AND name='${pureName}'`
); );
this.singleObjectId = resId.rows[0]?.id; this.singleObjectId = resId.rows[0]?.id;
} }

View File

@@ -5,11 +5,11 @@ const _ = require('lodash');
* *
* @param {import('dbgate-types').EngineDriver} driver * @param {import('dbgate-types').EngineDriver} driver
*/ */
function createOracleBulkInsertStream(driver, stream, pool, name, options) { function createOracleBulkInsertStream(driver, stream, dbhan, name, options) {
const writable = createBulkInsertStreamBase(driver, stream, pool, name, options); const writable = createBulkInsertStreamBase(driver, stream, dbhan, name, options);
writable.send = async () => { writable.send = async () => {
await pool.insert({ await dbhan.client.insert({
table: name.pureName, table: name.pureName,
values: writable.buffer, values: writable.buffer,
format: 'JSONEachRow', format: 'JSONEachRow',

View File

@@ -15,16 +15,18 @@ const driver = {
url: databaseUrl, url: databaseUrl,
username: user, username: user,
password: password, password: password,
database: database, database,
}); });
client._database_name = database; return {
return client; client,
database,
};
}, },
// called for retrieve data (eg. browse in data grid) and for update database // called for retrieve data (eg. browse in data grid) and for update database
async query(client, query, options) { async query(dbhan, query, options) {
if (options?.discardResult) { if (options?.discardResult) {
await client.command({ await dbhan.client.command({
query, query,
}); });
return { return {
@@ -32,7 +34,7 @@ const driver = {
columns: [], columns: [],
}; };
} else { } else {
const resultSet = await client.query({ const resultSet = await dbhan.client.query({
query, query,
format: 'JSONCompactEachRowWithNamesAndTypes', format: 'JSONCompactEachRowWithNamesAndTypes',
}); });
@@ -57,10 +59,10 @@ const driver = {
} }
}, },
// called in query console // called in query console
async stream(client, query, options) { async stream(dbhan, query, options) {
try { try {
if (!query.match(/^\s*SELECT/i)) { if (!query.match(/^\s*SELECT/i)) {
const resp = await client.command({ const resp = await dbhan.client.command({
query, query,
}); });
// console.log('RESP', resp); // console.log('RESP', resp);
@@ -76,7 +78,7 @@ const driver = {
return; return;
} }
const resultSet = await client.query({ const resultSet = await dbhan.client.query({
query, query,
format: 'JSONCompactEachRowWithNamesAndTypes', format: 'JSONCompactEachRowWithNamesAndTypes',
}); });
@@ -138,13 +140,13 @@ const driver = {
} }
}, },
// called when exporting table or view // called when exporting table or view
async readQuery(client, query, structure) { async readQuery(dbhan, query, structure) {
const pass = new stream.PassThrough({ const pass = new stream.PassThrough({
objectMode: true, objectMode: true,
highWaterMark: 100, highWaterMark: 100,
}); });
const resultSet = await client.query({ const resultSet = await dbhan.client.query({
query, query,
format: 'JSONCompactEachRowWithNamesAndTypes', format: 'JSONCompactEachRowWithNamesAndTypes',
}); });
@@ -190,12 +192,12 @@ const driver = {
return pass; return pass;
}, },
async writeTable(pool, name, options) { async writeTable(dbhan, name, options) {
return createBulkInsertStream(this, stream, pool, name, options); return createBulkInsertStream(this, stream, dbhan, name, options);
}, },
// detect server version // detect server version
async getVersion(client) { async getVersion(dbhan) {
const resultSet = await client.query({ const resultSet = await dbhan.client.query({
query: 'SELECT version() as version', query: 'SELECT version() as version',
format: 'JSONEachRow', format: 'JSONEachRow',
}); });
@@ -203,8 +205,8 @@ const driver = {
return { version: dataset[0].version }; return { version: dataset[0].version };
}, },
// list databases on server // list databases on server
async listDatabases(client) { async listDatabases(dbhan) {
const resultSet = await client.query({ const resultSet = await dbhan.client.query({
query: `SELECT name query: `SELECT name
FROM system.databases FROM system.databases
WHERE name NOT IN ('system', 'information_schema', 'information_schema_ro', 'INFORMATION_SCHEMA')`, WHERE name NOT IN ('system', 'information_schema', 'information_schema_ro', 'INFORMATION_SCHEMA')`,
@@ -214,8 +216,8 @@ const driver = {
return dataset; return dataset;
}, },
async close(client) { async close(dbhan) {
return client.close(); return dbhan.client.close();
}, },
}; };

View File

@@ -1,12 +1,12 @@
const { DatabaseAnalyser } = global.DBGATE_PACKAGES['dbgate-tools']; const { DatabaseAnalyser } = global.DBGATE_PACKAGES['dbgate-tools'];
class Analyser extends DatabaseAnalyser { class Analyser extends DatabaseAnalyser {
constructor(pool, driver, version) { constructor(dbhan, driver, version) {
super(pool, driver, version); super(dbhan, driver, version);
} }
async _runAnalysis() { async _runAnalysis() {
const collectionsAndViews = await this.pool.__getDatabase().listCollections().toArray(); const collectionsAndViews = await this.dbhan.getDatabase().listCollections().toArray();
const collections = collectionsAndViews.filter((x) => x.type == 'collection'); const collections = collectionsAndViews.filter((x) => x.type == 'collection');
const views = collectionsAndViews.filter((x) => x.type == 'view'); const views = collectionsAndViews.filter((x) => x.type == 'view');
@@ -16,8 +16,8 @@ class Analyser extends DatabaseAnalyser {
collections collections
.filter((x) => x.type == 'collection') .filter((x) => x.type == 'collection')
.map((x) => .map((x) =>
this.pool this.dbhan
.__getDatabase() .getDatabase()
.collection(x.name) .collection(x.name)
.aggregate([{ $collStats: { count: {} } }]) .aggregate([{ $collStats: { count: {} } }])
.toArray() .toArray()

View File

@@ -5,9 +5,9 @@ const { EJSON } = require('bson');
const logger = getLogger('mongoBulkInsert'); const logger = getLogger('mongoBulkInsert');
function createBulkInsertStream(driver, stream, pool, name, options) { function createBulkInsertStream(driver, stream, dbhan, name, options) {
const collectionName = name.pureName; const collectionName = name.pureName;
const db = pool.__getDatabase(); const db = dbhan.getDatabase();
const writable = new stream.Writable({ const writable = new stream.Writable({
objectMode: true, objectMode: true,

View File

@@ -34,8 +34,8 @@ function findArrayResult(resValue) {
return null; return null;
} }
async function getScriptableDb(pool) { async function getScriptableDb(dbhan) {
const db = pool.__getDatabase(); const db = dbhan.getDatabase();
const collections = await db.listCollections().toArray(); const collections = await db.listCollections().toArray();
for (const collection of collections) { for (const collection of collections) {
_.set(db, collection.name, db.collection(collection.name)); _.set(db, collection.name, db.collection(collection.name));
@@ -77,42 +77,43 @@ const driver = {
options.tlsInsecure = !ssl.rejectUnauthorized; options.tlsInsecure = !ssl.rejectUnauthorized;
} }
const pool = new MongoClient(mongoUrl, options); const client = new MongoClient(mongoUrl, options);
await pool.connect(); await client.connect();
// const pool = await MongoClient.connect(mongoUrl); return {
pool.__getDatabase = database ? () => pool.db(database) : () => pool.db(); client,
pool.__databaseName = database; database,
return pool; getDatabase: database ? () => client.db(database) : () => client.db(),
};
}, },
// @ts-ignore // @ts-ignore
async query(pool, sql) { async query(dbhan, sql) {
return { return {
rows: [], rows: [],
columns: [], columns: [],
}; };
}, },
async script(pool, sql) { async script(dbhan, sql) {
let func; let func;
func = eval(`(db,ObjectId) => ${sql}`); func = eval(`(db,ObjectId) => ${sql}`);
const db = await getScriptableDb(pool); const db = await getScriptableDb(dbhan);
const res = func(db, ObjectId.createFromHexString); const res = func(db, ObjectId.createFromHexString);
if (isPromise(res)) await res; if (isPromise(res)) await res;
}, },
async operation(pool, operation, options) { async operation(dbhan, operation, options) {
const { type } = operation; const { type } = operation;
switch (type) { switch (type) {
case 'createCollection': case 'createCollection':
await this.script(pool, `db.createCollection('${operation.collection.name}')`); await this.script(dbhan, `db.createCollection('${operation.collection.name}')`);
break; break;
case 'dropCollection': case 'dropCollection':
await this.script(pool, `db.dropCollection('${operation.collection}')`); await this.script(dbhan, `db.dropCollection('${operation.collection}')`);
break; break;
case 'renameCollection': case 'renameCollection':
await this.script(pool, `db.renameCollection('${operation.collection}', '${operation.newName}')`); await this.script(dbhan, `db.renameCollection('${operation.collection}', '${operation.newName}')`);
break; break;
case 'cloneCollection': case 'cloneCollection':
await this.script( await this.script(
pool, dbhan,
`db.collection('${operation.collection}').aggregate([{$out: '${operation.newName}'}]).toArray()` `db.collection('${operation.collection}').aggregate([{$out: '${operation.newName}'}]).toArray()`
); );
break; break;
@@ -121,7 +122,7 @@ const driver = {
} }
// saveScriptToDatabase({ conid: connection._id, database: name }, `db.createCollection('${newCollection}')`); // saveScriptToDatabase({ conid: connection._id, database: name }, `db.createCollection('${newCollection}')`);
}, },
async stream(pool, sql, options) { async stream(dbhan, sql, options) {
let func; let func;
try { try {
func = eval(`(db,ObjectId) => ${sql}`); func = eval(`(db,ObjectId) => ${sql}`);
@@ -134,7 +135,7 @@ const driver = {
options.done(); options.done();
return; return;
} }
const db = await getScriptableDb(pool); const db = await getScriptableDb(dbhan);
let exprValue; let exprValue;
try { try {
@@ -192,8 +193,8 @@ const driver = {
options.done(); options.done();
}, },
async startProfiler(pool, options) { async startProfiler(dbhan, options) {
const db = await getScriptableDb(pool); const db = await getScriptableDb(dbhan);
const old = await db.command({ profile: -1 }); const old = await db.command({ profile: -1 });
await db.command({ profile: 2 }); await db.command({ profile: 2 });
const cursor = await db.collection('system.profile').find({ const cursor = await db.collection('system.profile').find({
@@ -230,12 +231,12 @@ const driver = {
old, old,
}; };
}, },
async stopProfiler(pool, { cursor, old }) { async stopProfiler(dbhan, { cursor, old }) {
cursor.close(); cursor.close();
const db = await getScriptableDb(pool); const db = await getScriptableDb(dbhan);
await db.command({ profile: old.was, slowms: old.slowms }); await db.command({ profile: old.was, slowms: old.slowms });
}, },
async readQuery(pool, sql, structure) { async readQuery(dbhan, sql, structure) {
try { try {
const json = JSON.parse(sql); const json = JSON.parse(sql);
if (json && json.pureName) { if (json && json.pureName) {
@@ -251,7 +252,7 @@ const driver = {
// }); // });
func = eval(`(db,ObjectId) => ${sql}`); func = eval(`(db,ObjectId) => ${sql}`);
const db = await getScriptableDb(pool); const db = await getScriptableDb(dbhan);
exprValue = func(db, ObjectId.createFromHexString); exprValue = func(db, ObjectId.createFromHexString);
const pass = new stream.PassThrough({ const pass = new stream.PassThrough({
@@ -278,27 +279,27 @@ const driver = {
// return pass; // return pass;
}, },
async writeTable(pool, name, options) { async writeTable(dbhan, name, options) {
return createBulkInsertStream(this, stream, pool, name, options); return createBulkInsertStream(this, stream, dbhan, name, options);
}, },
async getVersion(pool) { async getVersion(dbhan) {
const status = await pool.__getDatabase().admin().serverInfo(); const status = await dbhan.getDatabase().admin().serverInfo();
return { return {
...status, ...status,
versionText: `MongoDB ${status.version}`, versionText: `MongoDB ${status.version}`,
}; };
}, },
async listDatabases(pool) { async listDatabases(dbhan) {
const res = await pool.__getDatabase().admin().listDatabases(); const res = await dbhan.getDatabase().admin().listDatabases();
return res.databases; return res.databases;
}, },
async readCollection(pool, options) { async readCollection(dbhan, options) {
try { try {
const mongoCondition = convertToMongoCondition(options.condition); const mongoCondition = convertToMongoCondition(options.condition);
// console.log('******************* mongoCondition *****************'); // console.log('******************* mongoCondition *****************');
// console.log(JSON.stringify(mongoCondition, undefined, 2)); // console.log(JSON.stringify(mongoCondition, undefined, 2));
const collection = pool.__getDatabase().collection(options.pureName); const collection = dbhan.getDatabase().collection(options.pureName);
if (options.countDocuments) { if (options.countDocuments) {
const count = await collection.countDocuments(convertObjectId(mongoCondition) || {}); const count = await collection.countDocuments(convertObjectId(mongoCondition) || {});
return { count }; return { count };
@@ -326,7 +327,7 @@ const driver = {
return { errorMessage: err.message }; return { errorMessage: err.message };
} }
}, },
async updateCollection(pool, changeSet) { async updateCollection(dbhan, changeSet) {
const res = { const res = {
inserted: [], inserted: [],
updated: [], updated: [],
@@ -334,7 +335,7 @@ const driver = {
replaced: [], replaced: [],
}; };
try { try {
const db = pool.__getDatabase(); const db = dbhan.getDatabase();
for (const insert of changeSet.inserts) { for (const insert of changeSet.inserts) {
const collection = db.collection(insert.pureName); const collection = db.collection(insert.pureName);
const document = { const document = {
@@ -384,19 +385,19 @@ const driver = {
} }
}, },
async createDatabase(pool, name) { async createDatabase(dbhan, name) {
const db = pool.db(name); const db = dbhan.client.db(name);
await db.createCollection('collection1'); await db.createCollection('collection1');
}, },
async dropDatabase(pool, name) { async dropDatabase(dbhan, name) {
const db = pool.db(name); const db = dbhan.client.db(name);
await db.dropDatabase(); await db.dropDatabase();
}, },
async loadFieldValues(pool, name, field, search) { async loadFieldValues(dbhan, name, field, search) {
try { try {
const collection = pool.__getDatabase().collection(name.pureName); const collection = dbhan.getDatabase().collection(name.pureName);
// console.log('options.condition', JSON.stringify(options.condition, undefined, 2)); // console.log('options.condition', JSON.stringify(options.condition, undefined, 2));
const pipelineMatch = []; const pipelineMatch = [];
@@ -442,10 +443,10 @@ const driver = {
} }
}, },
readJsonQuery(pool, select, structure) { readJsonQuery(dbhan, select, structure) {
const { collection, condition, sort } = select; const { collection, condition, sort } = select;
const db = pool.__getDatabase(); const db = dbhan.getDatabase();
const res = db const res = db
.collection(collection) .collection(collection)
.find(condition || {}) .find(condition || {})
@@ -455,23 +456,23 @@ const driver = {
return res; return res;
}, },
async summaryCommand(pool, command, row) { async summaryCommand(dbhan, command, row) {
switch (command) { switch (command) {
case 'profileOff': case 'profileOff':
await pool.db(row.name).command({ profile: 0 }); await dbhan.client.db(row.name).command({ profile: 0 });
return; return;
case 'profileFiltered': case 'profileFiltered':
await pool.db(row.name).command({ profile: 1, slowms: 100 }); await dbhan.client.db(row.name).command({ profile: 1, slowms: 100 });
return; return;
case 'profileAll': case 'profileAll':
await pool.db(row.name).command({ profile: 2 }); await dbhan.client.db(row.name).command({ profile: 2 });
return; return;
} }
}, },
async serverSummary(pool) { async serverSummary(dbhan) {
const res = await pool.__getDatabase().admin().listDatabases(); const res = await dbhan.getDatabase().admin().listDatabases();
const profiling = await Promise.all(res.databases.map((x) => pool.db(x.name).command({ profile: -1 }))); const profiling = await Promise.all(res.databases.map((x) => dbhan.client.db(x.name).command({ profile: -1 })));
function formatProfiling(info) { function formatProfiling(info) {
switch (info.was) { switch (info.was) {

View File

@@ -63,8 +63,8 @@ function getColumnInfo({
} }
class MsSqlAnalyser extends DatabaseAnalyser { class MsSqlAnalyser extends DatabaseAnalyser {
constructor(pool, driver, version) { constructor(dbhan, driver, version) {
super(pool, driver, version); super(dbhan, driver, version);
} }
createQuery(resFileName, typeFields) { createQuery(resFileName, typeFields) {
@@ -75,16 +75,10 @@ class MsSqlAnalyser extends DatabaseAnalyser {
async _computeSingleObjectId() { async _computeSingleObjectId() {
const { schemaName, pureName, typeField } = this.singleObjectFilter; const { schemaName, pureName, typeField } = this.singleObjectFilter;
const fullName = schemaName ? `[${schemaName}].[${pureName}]` : pureName; const fullName = schemaName ? `[${schemaName}].[${pureName}]` : pureName;
const resId = await this.driver.query(this.pool, `SELECT OBJECT_ID('${fullName}') AS id`); const resId = await this.driver.query(this.dbhan, `SELECT OBJECT_ID('${fullName}') AS id`);
this.singleObjectId = resId.rows[0].id; this.singleObjectId = resId.rows[0].id;
} }
async readSchemaList() {
const schemaRows = await this.analyserQuery('getSchemas');
const schemas = schemaRows.rows;
return schemas;
}
async _runAnalysis() { async _runAnalysis() {
this.feedback({ analysingMessage: 'Loading tables' }); this.feedback({ analysingMessage: 'Loading tables' });
const tablesRows = await this.analyserQuery('tables', ['tables']); const tablesRows = await this.analyserQuery('tables', ['tables']);
@@ -98,8 +92,6 @@ class MsSqlAnalyser extends DatabaseAnalyser {
const indexesRows = await this.analyserQuery('indexes', ['tables']); const indexesRows = await this.analyserQuery('indexes', ['tables']);
this.feedback({ analysingMessage: 'Loading index columns' }); this.feedback({ analysingMessage: 'Loading index columns' });
const indexcolsRows = await this.analyserQuery('indexcols', ['tables']); const indexcolsRows = await this.analyserQuery('indexcols', ['tables']);
this.feedback({ analysingMessage: 'Loading default schema' });
const defaultSchemaRows = await this.driver.query(this.pool, 'SELECT SCHEMA_NAME() as name');
this.feedback({ analysingMessage: 'Loading table sizes' }); this.feedback({ analysingMessage: 'Loading table sizes' });
const tableSizes = await this.analyserQuery('tableSizes'); const tableSizes = await this.analyserQuery('tableSizes');
@@ -179,7 +171,6 @@ class MsSqlAnalyser extends DatabaseAnalyser {
views, views,
procedures, procedures,
functions, functions,
defaultSchema: defaultSchemaRows.rows[0] ? defaultSchemaRows.rows[0].name : undefined,
}; };
} }

View File

@@ -1,8 +1,8 @@
const { createBulkInsertStreamBase } = global.DBGATE_PACKAGES['dbgate-tools']; const { createBulkInsertStreamBase } = global.DBGATE_PACKAGES['dbgate-tools'];
function runBulkInsertBatch(pool, tableName, writable, rows) { function runBulkInsertBatch(dbhan, tableName, writable, rows) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const tableMgr = pool.tableMgr(); const tableMgr = dbhan.client.tableMgr();
tableMgr.bind(tableName, bulkMgr => { tableMgr.bind(tableName, bulkMgr => {
bulkMgr.insertRows(rows, err => { bulkMgr.insertRows(rows, err => {
if (err) reject(err); if (err) reject(err);
@@ -16,8 +16,8 @@ function runBulkInsertBatch(pool, tableName, writable, rows) {
* *
* @param {import('dbgate-types').EngineDriver} driver * @param {import('dbgate-types').EngineDriver} driver
*/ */
function createNativeBulkInsertStream(driver, stream, pool, name, options) { function createNativeBulkInsertStream(driver, stream, dbhan, name, options) {
const writable = createBulkInsertStreamBase(driver, stream, pool, name, options); const writable = createBulkInsertStreamBase(driver, stream, dbhan, name, options);
const fullName = name.schemaName ? `[${name.schemaName}].[${name.pureName}]` : name.pureName; const fullName = name.schemaName ? `[${name.schemaName}].[${name.pureName}]` : name.pureName;
@@ -25,7 +25,7 @@ function createNativeBulkInsertStream(driver, stream, pool, name, options) {
const rows = writable.buffer; const rows = writable.buffer;
writable.buffer = []; writable.buffer = [];
await runBulkInsertBatch(pool, fullName, writable, rows); await runBulkInsertBatch(dbhan, fullName, writable, rows);
}; };
return writable; return writable;

View File

@@ -3,12 +3,12 @@ const tedious = require('tedious');
const getConcreteType = require('./getConcreteType'); const getConcreteType = require('./getConcreteType');
const _ = require('lodash'); const _ = require('lodash');
function runBulkInsertBatch(pool, tableName, writable, rows) { function runBulkInsertBatch(dbhan, tableName, writable, rows) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
var options = { keepNulls: true }; var options = { keepNulls: true };
// instantiate - provide the table where you'll be inserting to, options and a callback // instantiate - provide the table where you'll be inserting to, options and a callback
var bulkLoad = pool.newBulkLoad(tableName, options, (error, rowCount) => { var bulkLoad = dbhan.client.newBulkLoad(tableName, options, (error, rowCount) => {
if (error) reject(error); if (error) reject(error);
else resolve(); else resolve();
}); });
@@ -40,7 +40,7 @@ function runBulkInsertBatch(pool, tableName, writable, rows) {
); );
// console.log('IMPORT ROWS', rowsMapped); // console.log('IMPORT ROWS', rowsMapped);
pool.execBulkLoad(bulkLoad, rowsMapped); dbhan.client.execBulkLoad(bulkLoad, rowsMapped);
}); });
} }
@@ -48,8 +48,8 @@ function runBulkInsertBatch(pool, tableName, writable, rows) {
* *
* @param {import('dbgate-types').EngineDriver} driver * @param {import('dbgate-types').EngineDriver} driver
*/ */
function createTediousBulkInsertStream(driver, stream, pool, name, options) { function createTediousBulkInsertStream(driver, stream, dbhan, name, options) {
const writable = createBulkInsertStreamBase(driver, stream, pool, name, options); const writable = createBulkInsertStreamBase(driver, stream, dbhan, name, options);
const fullName = name.schemaName ? `[${name.schemaName}].[${name.pureName}]` : name.pureName; const fullName = name.schemaName ? `[${name.schemaName}].[${name.pureName}]` : name.pureName;
@@ -59,7 +59,7 @@ function createTediousBulkInsertStream(driver, stream, pool, name, options) {
? `${driver.dialect.quoteIdentifier(name.schemaName)}.${driver.dialect.quoteIdentifier(name.pureName)}` ? `${driver.dialect.quoteIdentifier(name.schemaName)}.${driver.dialect.quoteIdentifier(name.pureName)}`
: driver.dialect.quoteIdentifier(name.pureName); : driver.dialect.quoteIdentifier(name.pureName);
const respTemplate = await driver.query(pool, `SELECT * FROM ${fullNameQuoted} WHERE 1=0`, { const respTemplate = await driver.query(dbhan, `SELECT * FROM ${fullNameQuoted} WHERE 1=0`, {
addDriverNativeColumn: true, addDriverNativeColumn: true,
}); });
writable.templateColumns = respTemplate.columns; writable.templateColumns = respTemplate.columns;
@@ -68,7 +68,7 @@ function createTediousBulkInsertStream(driver, stream, pool, name, options) {
const rows = writable.buffer; const rows = writable.buffer;
writable.buffer = []; writable.buffer = [];
await runBulkInsertBatch(pool, fullName, writable, rows); await runBulkInsertBatch(dbhan, fullName, writable, rows);
}; };
return writable; return writable;

View File

@@ -79,50 +79,53 @@ const driver = {
async connect(conn) { async connect(conn) {
const { authType } = conn; const { authType } = conn;
if (requireMsnodesqlv8 && (authType == 'sspi' || authType == 'sql')) { const connectionType = requireMsnodesqlv8 && (authType == 'sspi' || authType == 'sql') ? 'msnodesqlv8' : 'tedious';
return nativeConnect(conn); const client = connectionType == 'msnodesqlv8' ? await nativeConnect(conn) : await tediousConnect(conn);
}
return tediousConnect(conn); return {
client,
connectionType,
database: conn.database,
};
}, },
async close(pool) { async close(dbhan) {
return pool.close(); return dbhan.client.close();
}, },
async queryCore(pool, sql, options) { async queryCore(dbhan, sql, options) {
if (pool._connectionType == 'msnodesqlv8') { if (dbhan.connectionType == 'msnodesqlv8') {
return nativeQueryCore(pool, sql, options); return nativeQueryCore(dbhan, sql, options);
} else { } else {
return tediousQueryCore(pool, sql, options); return tediousQueryCore(dbhan, sql, options);
} }
}, },
async query(pool, sql, options) { async query(dbhan, sql, options) {
return lock.acquire('connection', async () => { return lock.acquire('connection', async () => {
return this.queryCore(pool, sql, options); return this.queryCore(dbhan, sql, options);
}); });
}, },
async stream(pool, sql, options) { async stream(dbhan, sql, options) {
if (pool._connectionType == 'msnodesqlv8') { if (dbhan.connectionType == 'msnodesqlv8') {
return nativeStream(pool, sql, options); return nativeStream(dbhan, sql, options);
} else { } else {
return tediousStream(pool, sql, options); return tediousStream(dbhan, sql, options);
} }
}, },
async readQuery(pool, sql, structure) { async readQuery(dbhan, sql, structure) {
if (pool._connectionType == 'msnodesqlv8') { if (dbhan.connectionType == 'msnodesqlv8') {
return nativeReadQuery(pool, sql, structure); return nativeReadQuery(dbhan, sql, structure);
} else { } else {
return tediousReadQuery(pool, sql, structure); return tediousReadQuery(dbhan, sql, structure);
} }
}, },
async writeTable(pool, name, options) { async writeTable(dbhan, name, options) {
if (pool._connectionType == 'msnodesqlv8') { if (dbhan.connectionType == 'msnodesqlv8') {
return createNativeBulkInsertStream(this, stream, pool, name, options); return createNativeBulkInsertStream(this, stream, dbhan, name, options);
} else { } else {
return createTediousBulkInsertStream(this, stream, pool, name, options); return createTediousBulkInsertStream(this, stream, dbhan, name, options);
} }
}, },
async getVersion(pool) { async getVersion(dbhan) {
const res = (await this.query(pool, versionQuery)).rows[0]; const res = (await this.query(dbhan, versionQuery)).rows[0];
if (res.productVersion) { if (res.productVersion) {
const splitted = res.productVersion.split('.'); const splitted = res.productVersion.split('.');
@@ -133,8 +136,8 @@ const driver = {
} }
return res; return res;
}, },
async listDatabases(pool) { async listDatabases(dbhan) {
const { rows } = await this.query(pool, 'SELECT name FROM sys.databases order by name'); const { rows } = await this.query(dbhan, 'SELECT name FROM sys.databases order by name');
return rows; return rows;
}, },
getRedirectAuthUrl(connection, options) { getRedirectAuthUrl(connection, options) {
@@ -150,6 +153,19 @@ const driver = {
getAccessTokenFromAuth: (connection, req) => { getAccessTokenFromAuth: (connection, req) => {
return req?.user?.msentraToken; return req?.user?.msentraToken;
}, },
async listSchemas(dbhan) {
const { rows } = await this.query(dbhan, 'select schema_id as objectId, name as schemaName from sys.schemas');
const defaultSchemaRows = await this.query(dbhan, 'SELECT SCHEMA_NAME() as name');
const defaultSchema = defaultSchemaRows.rows[0]?.name;
logger.debug(`Loaded ${rows.length} mssql schemas`);
return rows.map(x => ({
...x,
isDefault: x.schemaName == defaultSchema,
}));
},
}; };
driver.initialize = dbgateEnv => { driver.initialize = dbgateEnv => {

View File

@@ -2,6 +2,7 @@ const _ = require('lodash');
const stream = require('stream'); const stream = require('stream');
const makeUniqueColumnNames = require('./makeUniqueColumnNames'); const makeUniqueColumnNames = require('./makeUniqueColumnNames');
let requireMsnodesqlv8; let requireMsnodesqlv8;
const { extractDbNameFromComposite } = global.DBGATE_PACKAGES['dbgate-tools'];
// async function nativeQueryCore(pool, sql, options) { // async function nativeQueryCore(pool, sql, options) {
// if (sql == null) { // if (sql == null) {
@@ -57,13 +58,12 @@ async function connectWithDriver({ server, port, user, password, database, authT
connectionString += `;Driver={${driver}}`; connectionString += `;Driver={${driver}}`;
if (authType == 'sspi') connectionString += ';Trusted_Connection=Yes'; if (authType == 'sspi') connectionString += ';Trusted_Connection=Yes';
else connectionString += `;UID=${user};PWD=${password}`; else connectionString += `;UID=${user};PWD=${password}`;
if (database) connectionString += `;Database=${database}`; if (database) connectionString += `;Database=${extractDbNameFromComposite(database)}`;
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
getMsnodesqlv8().open(connectionString, (err, conn) => { getMsnodesqlv8().open(connectionString, (err, conn) => {
if (err) { if (err) {
reject(err); reject(err);
} else { } else {
conn._connectionType = 'msnodesqlv8';
resolve(conn); resolve(conn);
} }
}); });
@@ -88,7 +88,7 @@ async function nativeConnect(connection) {
} }
} }
async function nativeQueryCore(pool, sql, options) { async function nativeQueryCore(dbhan, sql, options) {
if (sql == null) { if (sql == null) {
return Promise.resolve({ return Promise.resolve({
rows: [], rows: [],
@@ -98,7 +98,7 @@ async function nativeQueryCore(pool, sql, options) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let columns = null; let columns = null;
let currentRow = null; let currentRow = null;
const q = pool.query(sql); const q = dbhan.client.query(sql);
const rows = []; const rows = [];
q.on('meta', meta => { q.on('meta', meta => {
@@ -128,7 +128,7 @@ async function nativeQueryCore(pool, sql, options) {
}); });
} }
async function nativeReadQuery(pool, sql, structure) { async function nativeReadQuery(dbhan, sql, structure) {
const pass = new stream.PassThrough({ const pass = new stream.PassThrough({
objectMode: true, objectMode: true,
highWaterMark: 100, highWaterMark: 100,
@@ -136,7 +136,7 @@ async function nativeReadQuery(pool, sql, structure) {
let columns = null; let columns = null;
let currentRow = null; let currentRow = null;
const q = pool.query(sql); const q = dbhan.client.query(sql);
q.on('meta', meta => { q.on('meta', meta => {
columns = extractNativeColumns(meta); columns = extractNativeColumns(meta);
@@ -168,7 +168,7 @@ async function nativeReadQuery(pool, sql, structure) {
return pass; return pass;
} }
async function nativeStream(pool, sql, options) { async function nativeStream(dbhan, sql, options) {
const handleInfo = info => { const handleInfo = info => {
const { message, lineNumber, procName } = info; const { message, lineNumber, procName } = info;
options.info({ options.info({
@@ -192,7 +192,7 @@ async function nativeStream(pool, sql, options) {
let columns = null; let columns = null;
let currentRow = null; let currentRow = null;
const q = pool.query(sql); const q = dbhan.client.query(sql);
q.on('meta', meta => { q.on('meta', meta => {
if (currentRow) options.row(currentRow); if (currentRow) options.row(currentRow);

View File

@@ -15,6 +15,6 @@ INNER JOIN sys.schemas u ON u.schema_id=o.schema_id
INNER JOIN INFORMATION_SCHEMA.COLUMNS col ON col.TABLE_NAME = o.name AND col.TABLE_SCHEMA = u.name and col.COLUMN_NAME = c.name INNER JOIN INFORMATION_SCHEMA.COLUMNS col ON col.TABLE_NAME = o.name AND col.TABLE_SCHEMA = u.name and col.COLUMN_NAME = c.name
left join sys.default_constraints d on c.default_object_id = d.object_id left join sys.default_constraints d on c.default_object_id = d.object_id
left join sys.computed_columns m on m.object_id = c.object_id and m.column_id = c.column_id left join sys.computed_columns m on m.object_id = c.object_id and m.column_id = c.column_id
where o.type = 'U' and o.object_id =OBJECT_ID_CONDITION where o.type = 'U' and o.object_id =OBJECT_ID_CONDITION and u.name =SCHEMA_NAME_CONDITION
order by c.column_id order by c.column_id
`; `;

View File

@@ -36,5 +36,5 @@ LEFT JOIN sys.schemas IXS ON IXT.schema_id = IXS.schema_id
inner join sys.objects o on FK.TABLE_NAME = o.name inner join sys.objects o on FK.TABLE_NAME = o.name
inner join sys.schemas s on o.schema_id = s.schema_id and FK.TABLE_SCHEMA = s.name inner join sys.schemas s on o.schema_id = s.schema_id and FK.TABLE_SCHEMA = s.name
where o.object_id =OBJECT_ID_CONDITION where o.object_id =OBJECT_ID_CONDITION and s.name =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -1 +0,0 @@
module.exports = `select schema_id as objectId, name as schemaName from sys.schemas`;

View File

@@ -8,7 +8,6 @@ const loadSqlCode = require('./loadSqlCode');
const views = require('./views'); const views = require('./views');
const programmables = require('./programmables'); const programmables = require('./programmables');
const viewColumns = require('./viewColumns'); const viewColumns = require('./viewColumns');
const getSchemas = require('./getSchemas');
const indexes = require('./indexes'); const indexes = require('./indexes');
const indexcols = require('./indexcols'); const indexcols = require('./indexcols');
@@ -22,7 +21,6 @@ module.exports = {
views, views,
programmables, programmables,
viewColumns, viewColumns,
getSchemas,
indexes, indexes,
indexcols, indexcols,
tableSizes, tableSizes,

View File

@@ -5,6 +5,8 @@ select
c.is_descending_key as isDescending, c.is_included_column as isIncludedColumn c.is_descending_key as isDescending, c.is_included_column as isIncludedColumn
from sys.index_columns c from sys.index_columns c
inner join sys.columns col on c.object_id = col.object_id and c.column_id = col.column_id inner join sys.columns col on c.object_id = col.object_id and c.column_id = col.column_id
where c.object_id =OBJECT_ID_CONDITION inner join sys.objects o on c.object_id = o.object_id
INNER JOIN sys.schemas u ON u.schema_id=o.schema_id
where c.object_id =OBJECT_ID_CONDITION and u.name =SCHEMA_NAME_CONDITION
order by c.key_ordinal order by c.key_ordinal
`; `;

View File

@@ -1,5 +1,7 @@
module.exports = ` module.exports = `
select i.object_id, i.name as constraintName, i.type_desc as indexType, i.is_unique as isUnique,i.index_id, i.is_unique_constraint from sys.indexes i select i.object_id, i.name as constraintName, i.type_desc as indexType, i.is_unique as isUnique,i.index_id, i.is_unique_constraint from sys.indexes i
inner join sys.objects o on i.object_id = o.object_id
INNER JOIN sys.schemas u ON u.schema_id=o.schema_id
where i.is_primary_key=0 where i.is_primary_key=0
and i.is_hypothetical=0 and indexproperty(i.object_id, i.name, 'IsStatistics') = 0 and i.is_hypothetical=0 and indexproperty(i.object_id, i.name, 'IsStatistics') = 0
and objectproperty(i.object_id, 'IsUserTable') = 1 and objectproperty(i.object_id, 'IsUserTable') = 1
@@ -10,5 +12,5 @@ and i.index_id between 1 and 254
-- where o.parent_obj = i.object_id -- where o.parent_obj = i.object_id
-- and objectproperty(o.id, N'isConstraint') = 1.0) -- and objectproperty(o.id, N'isConstraint') = 1.0)
and i.object_id =OBJECT_ID_CONDITION and i.object_id =OBJECT_ID_CONDITION and u.name =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -3,6 +3,6 @@ select s.name as pureName, u.name as schemaName, c.text AS codeText
from sys.objects s from sys.objects s
inner join sys.syscomments c on s.object_id = c.id inner join sys.syscomments c on s.object_id = c.id
inner join sys.schemas u on u.schema_id = s.schema_id inner join sys.schemas u on u.schema_id = s.schema_id
where (s.object_id =OBJECT_ID_CONDITION) where (s.object_id =OBJECT_ID_CONDITION) and u.name =SCHEMA_NAME_CONDITION
order by u.name, s.name, c.colid order by u.name, s.name, c.colid
`; `;

View File

@@ -3,4 +3,5 @@ select o.object_id as objectId, o.modify_date as modifyDate, o.type, o.name as p
from sys.objects o from sys.objects o
inner join sys.schemas s on o.schema_id = s.schema_id inner join sys.schemas s on o.schema_id = s.schema_id
where o.type in ('U', 'V', 'P', 'IF', 'FN', 'TF') -- , 'TR' - triggers disabled where o.type in ('U', 'V', 'P', 'IF', 'FN', 'TF') -- , 'TR' - triggers disabled
and s.name =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -11,4 +11,5 @@ where
and c.Table_Name = t.Table_Name and c.Table_Name = t.Table_Name
and Constraint_Type = 'PRIMARY KEY' and Constraint_Type = 'PRIMARY KEY'
and o.object_id =OBJECT_ID_CONDITION and o.object_id =OBJECT_ID_CONDITION
and s.name =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -2,5 +2,5 @@ module.exports = `
select o.name as pureName, s.name as schemaName, o.object_id as objectId, o.create_date as createDate, o.modify_date as modifyDate, o.type as sqlObjectType select o.name as pureName, s.name as schemaName, o.object_id as objectId, o.create_date as createDate, o.modify_date as modifyDate, o.type as sqlObjectType
from sys.objects o from sys.objects o
inner join sys.schemas s on o.schema_id = s.schema_id inner join sys.schemas s on o.schema_id = s.schema_id
where o.type in ('P', 'IF', 'FN', 'TF') and o.object_id =OBJECT_ID_CONDITION where o.type in ('P', 'IF', 'FN', 'TF') and o.object_id =OBJECT_ID_CONDITION and s.name =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -8,8 +8,11 @@ INNER JOIN
sys.indexes i ON t.OBJECT_ID = i.object_id sys.indexes i ON t.OBJECT_ID = i.object_id
INNER JOIN INNER JOIN
sys.partitions p ON i.object_id = p.OBJECT_ID AND i.index_id = p.index_id sys.partitions p ON i.object_id = p.OBJECT_ID AND i.index_id = p.index_id
INNER JOIN
sys.schemas s ON t.schema_id = s.schema_id
WHERE WHERE
t.NAME NOT LIKE 'dt%' t.NAME NOT LIKE 'dt%'
AND t.is_ms_shipped = 0 AND t.is_ms_shipped = 0
AND i.OBJECT_ID > 255 AND i.OBJECT_ID > 255
AND s.name =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -4,5 +4,5 @@ select
o.create_date as createDate, o.modify_date as modifyDate o.create_date as createDate, o.modify_date as modifyDate
from sys.tables o from sys.tables o
inner join sys.schemas s on o.schema_id = s.schema_id inner join sys.schemas s on o.schema_id = s.schema_id
where o.object_id =OBJECT_ID_CONDITION where o.object_id =OBJECT_ID_CONDITION and s.name =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -13,6 +13,6 @@ select
FROM sys.objects o FROM sys.objects o
INNER JOIN sys.schemas u ON u.schema_id=o.schema_id INNER JOIN sys.schemas u ON u.schema_id=o.schema_id
INNER JOIN INFORMATION_SCHEMA.COLUMNS col ON col.TABLE_NAME = o.name AND col.TABLE_SCHEMA = u.name INNER JOIN INFORMATION_SCHEMA.COLUMNS col ON col.TABLE_NAME = o.name AND col.TABLE_SCHEMA = u.name
WHERE o.type in ('V') and o.object_id =OBJECT_ID_CONDITION WHERE o.type in ('V') and o.object_id =OBJECT_ID_CONDITION and u.name =SCHEMA_NAME_CONDITION
order by col.ORDINAL_POSITION order by col.ORDINAL_POSITION
`; `;

View File

@@ -6,5 +6,5 @@ SELECT
o.create_date as createDate, o.create_date as createDate,
o.modify_date as modifyDate o.modify_date as modifyDate
FROM sys.objects o INNER JOIN sys.schemas u ON u.schema_id=o.schema_id FROM sys.objects o INNER JOIN sys.schemas u ON u.schema_id=o.schema_id
WHERE type in ('V') and o.object_id =OBJECT_ID_CONDITION WHERE type in ('V') and o.object_id =OBJECT_ID_CONDITION and u.name =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -2,6 +2,7 @@ const _ = require('lodash');
const stream = require('stream'); const stream = require('stream');
const tedious = require('tedious'); const tedious = require('tedious');
const makeUniqueColumnNames = require('./makeUniqueColumnNames'); const makeUniqueColumnNames = require('./makeUniqueColumnNames');
const { extractDbNameFromComposite } = global.DBGATE_PACKAGES['dbgate-tools'];
function extractTediousColumns(columns, addDriverNativeColumn = false) { function extractTediousColumns(columns, addDriverNativeColumn = false) {
const res = columns.map(col => { const res = columns.map(col => {
@@ -39,7 +40,7 @@ async function tediousConnect(storedConnection) {
}; };
if (database) { if (database) {
connectionOptions.database = database; connectionOptions.database = extractDbNameFromComposite(database);
} }
const authentication = const authentication =
@@ -68,14 +69,13 @@ async function tediousConnect(storedConnection) {
if (err) { if (err) {
reject(err); reject(err);
} }
connection._connectionType = 'tedious';
resolve(connection); resolve(connection);
}); });
connection.connect(); connection.connect();
}); });
} }
async function tediousQueryCore(pool, sql, options) { async function tediousQueryCore(dbhan, sql, options) {
if (sql == null) { if (sql == null) {
return Promise.resolve({ return Promise.resolve({
rows: [], rows: [],
@@ -103,12 +103,12 @@ async function tediousQueryCore(pool, sql, options) {
) )
); );
}); });
if (discardResult) pool.execSqlBatch(request); if (discardResult) dbhan.client.execSqlBatch(request);
else pool.execSql(request); else dbhan.client.execSql(request);
}); });
} }
async function tediousReadQuery(pool, sql, structure) { async function tediousReadQuery(dbhan, sql, structure) {
const pass = new stream.PassThrough({ const pass = new stream.PassThrough({
objectMode: true, objectMode: true,
highWaterMark: 100, highWaterMark: 100,
@@ -133,12 +133,12 @@ async function tediousReadQuery(pool, sql, structure) {
); );
pass.write(row); pass.write(row);
}); });
pool.execSql(request); dbhan.client.execSql(request);
return pass; return pass;
} }
async function tediousStream(pool, sql, options) { async function tediousStream(dbhan, sql, options) {
let currentColumns = []; let currentColumns = [];
const handleInfo = info => { const handleInfo = info => {
@@ -162,14 +162,14 @@ async function tediousStream(pool, sql, options) {
}); });
}; };
pool.on('infoMessage', handleInfo); dbhan.client.on('infoMessage', handleInfo);
pool.on('errorMessage', handleError); dbhan.client.on('errorMessage', handleError);
const request = new tedious.Request(sql, (err, rowCount) => { const request = new tedious.Request(sql, (err, rowCount) => {
// if (err) reject(err); // if (err) reject(err);
// else resolve(result); // else resolve(result);
options.done(); options.done();
pool.off('infoMessage', handleInfo); dbhan.client.off('infoMessage', handleInfo);
pool.off('errorMessage', handleError); dbhan.client.off('errorMessage', handleError);
options.info({ options.info({
message: `${rowCount} rows affected`, message: `${rowCount} rows affected`,
@@ -188,7 +188,7 @@ async function tediousStream(pool, sql, options) {
); );
options.row(row); options.row(row);
}); });
pool.execSqlBatch(request); dbhan.client.execSqlBatch(request);
} }
module.exports = { module.exports = {

View File

@@ -127,9 +127,17 @@ const driver = {
return dialect; return dialect;
}, },
showConnectionField: (field, values) => showConnectionField: (field, values) =>
['authType', 'server', 'port', 'user', 'password', 'defaultDatabase', 'singleDatabase', 'isReadOnly'].includes( [
field 'authType',
) || 'server',
'port',
'user',
'password',
'defaultDatabase',
'singleDatabase',
'isReadOnly',
'useSeparateSchemas',
].includes(field) ||
(field == 'trustServerCertificate' && values.authType != 'sql' && values.authType != 'sspi') || (field == 'trustServerCertificate' && values.authType != 'sql' && values.authType != 'sspi') ||
(field == 'windowsDomain' && values.authType != 'sql' && values.authType != 'sspi' && values.authType != 'msentra'), (field == 'windowsDomain' && values.authType != 'sql' && values.authType != 'sspi' && values.authType != 'msentra'),
// (field == 'useDatabaseUrl' && values.authType != 'sql' && values.authType != 'sspi') // (field == 'useDatabaseUrl' && values.authType != 'sql' && values.authType != 'sspi')

View File

@@ -62,13 +62,13 @@ function getColumnInfo(
} }
class Analyser extends DatabaseAnalyser { class Analyser extends DatabaseAnalyser {
constructor(pool, driver, version) { constructor(dbhan, driver, version) {
super(pool, driver, version); super(dbhan, driver, version);
} }
createQuery(resFileName, typeFields, replacements = {}) { createQuery(resFileName, typeFields, replacements = {}) {
let res = sql[resFileName]; let res = sql[resFileName];
res = res.replace('#DATABASE#', this.pool._database_name); res = res.replace('#DATABASE#', this.dbhan.database);
return super.createQuery(res, typeFields, replacements); return super.createQuery(res, typeFields, replacements);
} }

View File

@@ -48,17 +48,19 @@ const drivers = driverBases.map(driverBase => ({
// multipleStatements: true, // multipleStatements: true,
}; };
const connection = mysql2.createConnection(options); const client = mysql2.createConnection(options);
connection._database_name = database;
if (isReadOnly) { if (isReadOnly) {
await this.query(connection, 'SET SESSION TRANSACTION READ ONLY'); await this.query(client, 'SET SESSION TRANSACTION READ ONLY');
} }
return connection; return {
client,
database,
};
}, },
async close(pool) { async close(dbhan) {
return pool.close(); return dbhan.client.close();
}, },
query(connection, sql) { query(dbhan, sql) {
if (sql == null) { if (sql == null) {
return { return {
rows: [], rows: [],
@@ -67,15 +69,15 @@ const drivers = driverBases.map(driverBase => ({
} }
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
connection.query(sql, function (error, results, fields) { dbhan.client.query(sql, function (error, results, fields) {
if (error) reject(error); if (error) reject(error);
const columns = extractColumns(fields); const columns = extractColumns(fields);
resolve({ rows: results && columns && results.map && results.map(row => zipDataRow(row, columns)), columns }); resolve({ rows: results && columns && results.map && results.map(row => zipDataRow(row, columns)), columns });
}); });
}); });
}, },
async stream(connection, sql, options) { async stream(dbhan, sql, options) {
const query = connection.query(sql); const query = dbhan.client.query(sql);
let columns = []; let columns = [];
// const handleInfo = (info) => { // const handleInfo = (info) => {
@@ -125,8 +127,8 @@ const drivers = driverBases.map(driverBase => ({
query.on('error', handleError).on('fields', handleFields).on('result', handleRow).on('end', handleEnd); query.on('error', handleError).on('fields', handleFields).on('result', handleRow).on('end', handleEnd);
}, },
async readQuery(connection, sql, structure) { async readQuery(dbhan, sql, structure) {
const query = connection.query(sql); const query = dbhan.client.query(sql);
const pass = new stream.PassThrough({ const pass = new stream.PassThrough({
objectMode: true, objectMode: true,
@@ -151,8 +153,8 @@ const drivers = driverBases.map(driverBase => ({
return pass; return pass;
}, },
async getVersion(connection) { async getVersion(dbhan) {
const { rows } = await this.query(connection, "show variables like 'version'"); const { rows } = await this.query(dbhan, "show variables like 'version'");
const version = rows[0].Value; const version = rows[0].Value;
if (version) { if (version) {
const m = version.match(/(.*)-MariaDB-/); const m = version.match(/(.*)-MariaDB-/);
@@ -169,18 +171,18 @@ const drivers = driverBases.map(driverBase => ({
versionText: `MySQL ${version}`, versionText: `MySQL ${version}`,
}; };
}, },
async listDatabases(connection) { async listDatabases(dbhan) {
const { rows } = await this.query(connection, 'show databases'); const { rows } = await this.query(dbhan, 'show databases');
return rows.map(x => ({ name: x.Database })); return rows.map(x => ({ name: x.Database }));
}, },
async writeTable(pool, name, options) { async writeTable(dbhan, name, options) {
// @ts-ignore // @ts-ignore
return createBulkInsertStreamBase(this, stream, pool, name, options); return createBulkInsertStreamBase(this, stream, dbhan, name, options);
}, },
async createBackupDumper(pool, options) { async createBackupDumper(dbhan, options) {
const { outputFile, databaseName, schemaName } = options; const { outputFile, databaseName, schemaName } = options;
const res = new MySqlDumper({ const res = new MySqlDumper({
connection: pool, connection: dbhan.client,
schema: databaseName || schemaName, schema: databaseName || schemaName,
outputFile, outputFile,
}); });

View File

@@ -30,8 +30,8 @@ function getColumnInfo(
} }
class Analyser extends DatabaseAnalyser { class Analyser extends DatabaseAnalyser {
constructor(pool, driver, version) { constructor(dbhan, driver, version) {
super(pool, driver, version); super(dbhan, driver, version);
} }
createQuery(resFileName, typeFields, replacements = {}) { createQuery(resFileName, typeFields, replacements = {}) {
@@ -47,32 +47,32 @@ class Analyser extends DatabaseAnalyser {
async _runAnalysis() { async _runAnalysis() {
this.feedback({ analysingMessage: 'Loading tables' }); this.feedback({ analysingMessage: 'Loading tables' });
const tables = await this.analyserQuery('tableList', ['tables'], { $owner: this.pool._schema_name }); const tables = await this.analyserQuery('tableList', ['tables'], { $owner: this.dbhan.database });
this.feedback({ analysingMessage: 'Loading columns' }); this.feedback({ analysingMessage: 'Loading columns' });
const columns = await this.analyserQuery('columns', ['tables', 'views'], { $owner: this.pool._schema_name }); const columns = await this.analyserQuery('columns', ['tables', 'views'], { $owner: this.dbhan.database });
this.feedback({ analysingMessage: 'Loading primary keys' }); this.feedback({ analysingMessage: 'Loading primary keys' });
const pkColumns = await this.analyserQuery('primaryKeys', ['tables'], { $owner: this.pool._schema_name }); const pkColumns = await this.analyserQuery('primaryKeys', ['tables'], { $owner: this.dbhan.database });
//let fkColumns = null; //let fkColumns = null;
this.feedback({ analysingMessage: 'Loading foreign keys' }); this.feedback({ analysingMessage: 'Loading foreign keys' });
const fkColumns = await this.analyserQuery('foreignKeys', ['tables'], { $owner: this.pool._schema_name }); const fkColumns = await this.analyserQuery('foreignKeys', ['tables'], { $owner: this.dbhan.database });
this.feedback({ analysingMessage: 'Loading views' }); this.feedback({ analysingMessage: 'Loading views' });
const views = await this.analyserQuery('views', ['views'], { $owner: this.pool._schema_name }); const views = await this.analyserQuery('views', ['views'], { $owner: this.dbhan.database });
this.feedback({ analysingMessage: 'Loading materialized views' }); this.feedback({ analysingMessage: 'Loading materialized views' });
const matviews = this.driver.dialect.materializedViews const matviews = this.driver.dialect.materializedViews
? await this.analyserQuery('matviews', ['matviews'], { $owner: this.pool._schema_name }) ? await this.analyserQuery('matviews', ['matviews'], { $owner: this.dbhan.database })
: null; : null;
this.feedback({ analysingMessage: 'Loading routines' }); this.feedback({ analysingMessage: 'Loading routines' });
const routines = await this.analyserQuery('routines', ['procedures', 'functions'], { const routines = await this.analyserQuery('routines', ['procedures', 'functions'], {
$owner: this.pool._schema_name, $owner: this.dbhan.database,
}); });
this.feedback({ analysingMessage: 'Loading indexes' }); this.feedback({ analysingMessage: 'Loading indexes' });
const indexes = await this.analyserQuery('indexes', ['tables'], { $owner: this.pool._schema_name }); const indexes = await this.analyserQuery('indexes', ['tables'], { $owner: this.dbhan.database });
this.feedback({ analysingMessage: 'Loading unique names' }); this.feedback({ analysingMessage: 'Loading unique names' });
const uniqueNames = await this.analyserQuery('uniqueNames', ['tables'], { $owner: this.pool._schema_name }); const uniqueNames = await this.analyserQuery('uniqueNames', ['tables'], { $owner: this.dbhan.database });
this.feedback({ analysingMessage: 'Finalizing DB structure' }); this.feedback({ analysingMessage: 'Finalizing DB structure' });
const fkColumnsMapped = fkColumns.rows.map(x => ({ const fkColumnsMapped = fkColumns.rows.map(x => ({

View File

@@ -5,12 +5,12 @@ const _ = require('lodash');
* *
* @param {import('dbgate-types').EngineDriver} driver * @param {import('dbgate-types').EngineDriver} driver
*/ */
function createOracleBulkInsertStream(driver, stream, pool, name, options) { function createOracleBulkInsertStream(driver, stream, dbhan, name, options) {
const fullNameQuoted = name.schemaName const fullNameQuoted = name.schemaName
? `${driver.dialect.quoteIdentifier(name.schemaName)}.${driver.dialect.quoteIdentifier(name.pureName)}` ? `${driver.dialect.quoteIdentifier(name.schemaName)}.${driver.dialect.quoteIdentifier(name.pureName)}`
: driver.dialect.quoteIdentifier(name.pureName); : driver.dialect.quoteIdentifier(name.pureName);
const writable = createBulkInsertStreamBase(driver, stream, pool, name, { const writable = createBulkInsertStreamBase(driver, stream, dbhan, name, {
...options, ...options,
// this is really not used, send method below is used instead // this is really not used, send method below is used instead
commitAfterInsert: true, commitAfterInsert: true,
@@ -28,7 +28,7 @@ function createOracleBulkInsertStream(driver, stream, pool, name, options) {
dmp.putRaw(')'); dmp.putRaw(')');
const rows = writable.buffer.map(row => _.mapKeys(row, (v, k) => `c${writable.columnNames.indexOf(k)}`)); const rows = writable.buffer.map(row => _.mapKeys(row, (v, k) => `c${writable.columnNames.indexOf(k)}`));
await pool.executeMany(dmp.s, rows, { autoCommit: true }); await dbhan.client.executeMany(dmp.s, rows, { autoCommit: true });
writable.buffer = []; writable.buffer = [];
}; };

View File

@@ -88,13 +88,15 @@ const driver = {
if (database) { if (database) {
await client.execute(`ALTER SESSION SET CURRENT_SCHEMA = ${database}`); await client.execute(`ALTER SESSION SET CURRENT_SCHEMA = ${database}`);
} }
client._schema_name = database; return {
return client; client,
database,
};
}, },
async close(pool) { async close(dbhan) {
return pool.end(); return dbhan.client.end();
}, },
async query(client, sql) { async query(dbhan, sql) {
if (sql == null || sql.trim() == '') { if (sql == null || sql.trim() == '') {
return { return {
rows: [], rows: [],
@@ -107,7 +109,7 @@ const driver = {
sql = mtrim[1]; sql = mtrim[1];
} }
const res = await client.execute(sql); const res = await dbhan.client.execute(sql);
try { try {
const columns = extractOracleColumns(res.metaData); const columns = extractOracleColumns(res.metaData);
return { rows: (res.rows || []).map(row => zipDataRow(row, columns)), columns }; return { rows: (res.rows || []).map(row => zipDataRow(row, columns)), columns };
@@ -118,7 +120,7 @@ const driver = {
}; };
} }
}, },
stream(client, sql, options) { stream(dbhan, sql, options) {
/* /*
const query = new pg.Query({ const query = new pg.Query({
text: sql, text: sql,
@@ -128,7 +130,7 @@ const driver = {
// console.log('queryStream', sql); // console.log('queryStream', sql);
if (sql.trim().toLowerCase().startsWith('select')) { if (sql.trim().toLowerCase().startsWith('select')) {
const query = client.queryStream(sql); const query = dbhan.client.queryStream(sql);
// const consumeStream = new Promise((resolve, reject) => { // const consumeStream = new Promise((resolve, reject) => {
let rowcount = 0; let rowcount = 0;
let wasHeader = false; let wasHeader = false;
@@ -202,7 +204,7 @@ const driver = {
}); });
//}); //});
} else { } else {
client.execute(sql, (err, res) => { dbhan.client.execute(sql, (err, res) => {
if (err) { if (err) {
console.log('Error query', err, sql); console.log('Error query', err, sql);
const lineNumber = (sql.substring(0, err.offset).match(/\n/g) || []).length; const lineNumber = (sql.substring(0, err.offset).match(/\n/g) || []).length;
@@ -237,23 +239,23 @@ const driver = {
//console.log('Rows selected: ' + numrows); //console.log('Rows selected: ' + numrows);
//client.query(query); //client.query(query);
}, },
async getVersionCore(client) { async getVersionCore(dbhan) {
try { try {
const { rows } = await this.query( const { rows } = await this.query(
client, dbhan,
"SELECT product || ' ' || version_full as \"version\" FROM product_component_version WHERE product LIKE 'Oracle%Database%'" "SELECT product || ' ' || version_full as \"version\" FROM product_component_version WHERE product LIKE 'Oracle%Database%'"
); );
return rows[0].version.replace(' ', ' '); return rows[0].version.replace(' ', ' ');
} catch (e) { } catch (e) {
const { rows } = await this.query(client, 'SELECT banner as "version" FROM v$version'); const { rows } = await this.query(dbhan, 'SELECT banner as "version" FROM v$version');
return rows[0].version; return rows[0].version;
} }
}, },
async getVersion(client) { async getVersion(dbhan) {
try { try {
//const { rows } = await this.query(client, "SELECT banner as version FROM v$version WHERE banner LIKE 'Oracle%'"); //const { rows } = await this.query(client, "SELECT banner as version FROM v$version WHERE banner LIKE 'Oracle%'");
// const { rows } = await this.query(client, 'SELECT version as "version" FROM v$instance'); // const { rows } = await this.query(client, 'SELECT version as "version" FROM v$instance');
const version = await this.getVersionCore(client); const version = await this.getVersionCore(dbhan);
const m = version.match(/(\d+[a-z]+)\s+(\w+).*?(\d+)\.(\d+)/); const m = version.match(/(\d+[a-z]+)\s+(\w+).*?(\d+)\.(\d+)/);
//console.log('M', m); //console.log('M', m);
@@ -281,7 +283,7 @@ const driver = {
}; };
} }
}, },
async readQuery(client, sql, structure) { async readQuery(dbhan, sql, structure) {
/* /*
const query = new pg.Query({ const query = new pg.Query({
text: sql, text: sql,
@@ -289,7 +291,7 @@ const driver = {
}); });
*/ */
// console.log('readQuery', sql, structure); // console.log('readQuery', sql, structure);
const query = await client.queryStream(sql); const query = await dbhan.client.queryStream(sql);
let wasHeader = false; let wasHeader = false;
let columns = null; let columns = null;
@@ -333,11 +335,11 @@ const driver = {
return pass; return pass;
}, },
async writeTable(pool, name, options) { async writeTable(dbhan, name, options) {
return createOracleBulkInsertStream(this, stream, pool, name, options); return createOracleBulkInsertStream(this, stream, dbhan, name, options);
}, },
async listDatabases(client) { async listDatabases(dbhan) {
const { rows } = await this.query(client, 'SELECT username as "name" from all_users order by username'); const { rows } = await this.query(dbhan, 'SELECT username as "name" from all_users order by username');
return rows; return rows;
}, },

View File

@@ -2,7 +2,8 @@ const fp = require('lodash/fp');
const _ = require('lodash'); const _ = require('lodash');
const sql = require('./sql'); const sql = require('./sql');
const { DatabaseAnalyser, isTypeString, isTypeNumeric } = global.DBGATE_PACKAGES['dbgate-tools']; const { DatabaseAnalyser, isTypeString, isTypeNumeric, isCompositeDbName, splitCompositeDbName } =
global.DBGATE_PACKAGES['dbgate-tools'];
function normalizeTypeName(dataType) { function normalizeTypeName(dataType) {
if (dataType == 'character varying') return 'varchar'; if (dataType == 'character varying') return 'varchar';
@@ -50,8 +51,8 @@ function getColumnInfo(
} }
class Analyser extends DatabaseAnalyser { class Analyser extends DatabaseAnalyser {
constructor(pool, driver, version) { constructor(dbhan, driver, version) {
super(pool, driver, version); super(dbhan, driver, version);
} }
createQuery(resFileName, typeFields, replacements = {}) { createQuery(resFileName, typeFields, replacements = {}) {
@@ -312,17 +313,6 @@ class Analyser extends DatabaseAnalyser {
return res; return res;
} }
async readSchemaList() {
const schemaRows = await this.analyserQuery('getSchemas');
const schemas = schemaRows.rows.map(x => ({
schemaName: x.schema_name,
objectId: `schemas:${x.schema_name}`,
}));
return schemas;
}
async _getFastSnapshot() { async _getFastSnapshot() {
const tableModificationsQueryData = this.driver.dialect.stringAgg const tableModificationsQueryData = this.driver.dialect.stringAgg
? await this.analyserQuery('tableModifications') ? await this.analyserQuery('tableModifications')
@@ -374,6 +364,10 @@ class Analyser extends DatabaseAnalyser {
})), })),
}; };
} }
getDefaultSchemaNameCondition() {
return `not in ('pg_catalog', 'pg_toast', 'information_schema')`;
}
} }
module.exports = Analyser; module.exports = Analyser;

View File

@@ -4,7 +4,8 @@ const stream = require('stream');
const driverBases = require('../frontend/drivers'); const driverBases = require('../frontend/drivers');
const Analyser = require('./Analyser'); const Analyser = require('./Analyser');
const pg = require('pg'); const pg = require('pg');
const { getLogger, createBulkInsertStreamBase, makeUniqueColumnNames } = global.DBGATE_PACKAGES['dbgate-tools'];; const { getLogger, createBulkInsertStreamBase, makeUniqueColumnNames, extractDbNameFromComposite } =
global.DBGATE_PACKAGES['dbgate-tools'];
const logger = getLogger('postreDriver'); const logger = getLogger('postreDriver');
@@ -76,7 +77,7 @@ const drivers = driverBases.map(driverBase => ({
port: authType == 'socket' ? null : port, port: authType == 'socket' ? null : port,
user, user,
password, password,
database: database || 'postgres', database: extractDbNameFromComposite(database) || 'postgres',
ssl, ssl,
application_name: 'DbGate', application_name: 'DbGate',
}; };
@@ -89,23 +90,26 @@ const drivers = driverBases.map(driverBase => ({
await this.query(client, 'SET SESSION CHARACTERISTICS AS TRANSACTION READ ONLY'); await this.query(client, 'SET SESSION CHARACTERISTICS AS TRANSACTION READ ONLY');
} }
return client; return {
client,
database,
};
}, },
async close(pool) { async close(dbhan) {
return pool.end(); return dbhan.client.end();
}, },
async query(client, sql) { async query(dbhan, sql) {
if (sql == null) { if (sql == null) {
return { return {
rows: [], rows: [],
columns: [], columns: [],
}; };
} }
const res = await client.query({ text: sql, rowMode: 'array' }); const res = await dbhan.client.query({ text: sql, rowMode: 'array' });
const columns = extractPostgresColumns(res); const columns = extractPostgresColumns(res);
return { rows: (res.rows || []).map(row => zipDataRow(row, columns)), columns }; return { rows: (res.rows || []).map(row => zipDataRow(row, columns)), columns };
}, },
stream(client, sql, options) { stream(dbhan, sql, options) {
const query = new pg.Query({ const query = new pg.Query({
text: sql, text: sql,
rowMode: 'array', rowMode: 'array',
@@ -164,10 +168,10 @@ const drivers = driverBases.map(driverBase => ({
options.done(); options.done();
}); });
client.query(query); dbhan.client.query(query);
}, },
async getVersion(client) { async getVersion(dbhan) {
const { rows } = await this.query(client, 'SELECT version()'); const { rows } = await this.query(dbhan, 'SELECT version()');
const { version } = rows[0]; const { version } = rows[0];
const isCockroach = version.toLowerCase().includes('cockroachdb'); const isCockroach = version.toLowerCase().includes('cockroachdb');
@@ -197,7 +201,7 @@ const drivers = driverBases.map(driverBase => ({
versionMinor, versionMinor,
}; };
}, },
async readQuery(client, sql, structure) { async readQuery(dbhan, sql, structure) {
const query = new pg.Query({ const query = new pg.Query({
text: sql, text: sql,
rowMode: 'array', rowMode: 'array',
@@ -242,16 +246,16 @@ const drivers = driverBases.map(driverBase => ({
pass.end(); pass.end();
}); });
client.query(query); dbhan.client.query(query);
return pass; return pass;
}, },
async writeTable(pool, name, options) { async writeTable(dbhan, name, options) {
// @ts-ignore // @ts-ignore
return createBulkInsertStreamBase(this, stream, pool, name, options); return createBulkInsertStreamBase(this, stream, dbhan, name, options);
}, },
async listDatabases(client) { async listDatabases(dbhan) {
const { rows } = await this.query(client, 'SELECT datname AS name FROM pg_database WHERE datistemplate = false'); const { rows } = await this.query(dbhan, 'SELECT datname AS name FROM pg_database WHERE datistemplate = false');
return rows; return rows;
}, },
@@ -267,6 +271,25 @@ const drivers = driverBases.map(driverBase => ({
}, },
]; ];
}, },
async listSchemas(dbhan) {
const schemaRows = await this.query(
dbhan,
'select oid as "object_id", nspname as "schema_name" from pg_catalog.pg_namespace'
);
const defaultSchemaRows = await this.query(dbhan, 'SHOW SEARCH_PATH;');
const searchPath = defaultSchemaRows.rows[0]?.search_path?.replace('"$user",', '')?.trim();
logger.debug(`Loaded ${schemaRows.rows.length} postgres schemas`);
const schemas = schemaRows.rows.map(x => ({
schemaName: x.schema_name,
objectId: x.object_id,
isDefault: x.schema_name == searchPath,
}));
return schemas;
},
})); }));
module.exports = drivers; module.exports = drivers;

View File

@@ -11,14 +11,12 @@ select
column_default as "default_value" column_default as "default_value"
from information_schema.columns from information_schema.columns
where where
table_schema <> 'information_schema' table_schema !~ '^_timescaledb_'
and table_schema <> 'pg_catalog'
and table_schema !~ '^pg_toast'
and table_schema !~ '^_timescaledb_'
and ( and (
('tables:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION ('tables:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION
or or
('views:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION ('views:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION
) )
and table_schema =SCHEMA_NAME_CONDITION
order by ordinal_position order by ordinal_position
`; `;

View File

@@ -7,5 +7,5 @@ select
basecol.table_name, basecol.table_name,
basecol.ordinal_position basecol.ordinal_position
from information_schema.key_column_usage basecol from information_schema.key_column_usage basecol
where ('tables:' || basecol.table_schema || '.' || basecol.table_name) =OBJECT_ID_CONDITION where ('tables:' || basecol.table_schema || '.' || basecol.table_name) =OBJECT_ID_CONDITION and basecol.table_schema =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -7,4 +7,5 @@ select
fk.unique_constraint_name as "unique_constraint_name", fk.unique_constraint_name as "unique_constraint_name",
fk.unique_constraint_schema as "unique_constraint_schema" fk.unique_constraint_schema as "unique_constraint_schema"
from information_schema.referential_constraints fk from information_schema.referential_constraints fk
where fk.constraint_schema =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -5,5 +5,5 @@ select
base.constraint_name as "constraint_name", base.constraint_name as "constraint_name",
base.constraint_schema as "constraint_schema" base.constraint_schema as "constraint_schema"
from information_schema.table_constraints base from information_schema.table_constraints base
where ('tables:' || base.table_schema || '.' || base.table_name) =OBJECT_ID_CONDITION where ('tables:' || base.table_schema || '.' || base.table_name) =OBJECT_ID_CONDITION and base.table_schema =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -4,5 +4,5 @@ select
f_table_name as "pure_name", f_table_name as "pure_name",
f_geography_column as "column_name" f_geography_column as "column_name"
from public.geography_columns from public.geography_columns
where ('tables:' || f_table_schema || '.' || f_table_name) =OBJECT_ID_CONDITION where ('tables:' || f_table_schema || '.' || f_table_name) =OBJECT_ID_CONDITION and f_table_schema =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -4,5 +4,5 @@ select
f_table_name as "pure_name", f_table_name as "pure_name",
f_geometry_column as "column_name" f_geometry_column as "column_name"
from public.geometry_columns from public.geometry_columns
where ('tables:' || f_table_schema || '.' || f_table_name) =OBJECT_ID_CONDITION where ('tables:' || f_table_schema || '.' || f_table_name) =OBJECT_ID_CONDITION and f_table_schema =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -1 +0,0 @@
module.exports = `select oid as "object_id", nspname as "schema_name" from pg_catalog.pg_namespace`;

View File

@@ -12,7 +12,6 @@ const matviewColumns = require('./matviewColumns');
const indexes = require('./indexes'); const indexes = require('./indexes');
const indexcols = require('./indexcols'); const indexcols = require('./indexcols');
const uniqueNames = require('./uniqueNames'); const uniqueNames = require('./uniqueNames');
const getSchemas = require('./getSchemas');
const geometryColumns = require('./geometryColumns'); const geometryColumns = require('./geometryColumns');
const geographyColumns = require('./geographyColumns'); const geographyColumns = require('./geographyColumns');
@@ -40,5 +39,4 @@ module.exports = {
uniqueNames, uniqueNames,
geometryColumns, geometryColumns,
geographyColumns, geographyColumns,
getSchemas,
}; };

View File

@@ -19,6 +19,7 @@ module.exports = `
and t.relnamespace = c.oid and t.relnamespace = c.oid
and c.nspname != 'pg_catalog' and c.nspname != 'pg_catalog'
and ('tables:' || c.nspname || '.' || t.relname) =OBJECT_ID_CONDITION and ('tables:' || c.nspname || '.' || t.relname) =OBJECT_ID_CONDITION
and c.nspname =SCHEMA_NAME_CONDITION
order by order by
t.relname t.relname
`; `;

View File

@@ -21,6 +21,7 @@ module.exports = `
and t.relnamespace = c.oid and t.relnamespace = c.oid
and c.nspname != 'pg_catalog' and c.nspname != 'pg_catalog'
and ('tables:' || c.nspname || '.' || t.relname) =OBJECT_ID_CONDITION and ('tables:' || c.nspname || '.' || t.relname) =OBJECT_ID_CONDITION
and c.nspname =SCHEMA_NAME_CONDITION
order by order by
t.relname t.relname
`; `;

View File

@@ -12,6 +12,7 @@ FROM pg_catalog.pg_class
WHERE pg_class.relkind = 'm' WHERE pg_class.relkind = 'm'
AND pg_attribute.attnum >= 1 AND pg_attribute.attnum >= 1
AND ('matviews:' || pg_namespace.nspname || '.' || pg_class.relname) =OBJECT_ID_CONDITION AND ('matviews:' || pg_namespace.nspname || '.' || pg_class.relname) =OBJECT_ID_CONDITION
AND pg_namespace.nspname =SCHEMA_NAME_CONDITION
ORDER BY pg_attribute.attnum ORDER BY pg_attribute.attnum
`; `;

View File

@@ -4,5 +4,5 @@ select
schemaname as "schema_name", schemaname as "schema_name",
md5(definition) as "hash_code" md5(definition) as "hash_code"
from from
pg_catalog.pg_matviews WHERE schemaname NOT LIKE 'pg_%' pg_catalog.pg_matviews WHERE schemaname NOT LIKE 'pg_%' AND schemaname =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -7,4 +7,5 @@ select
from from
pg_catalog.pg_matviews WHERE schemaname NOT LIKE 'pg_%' pg_catalog.pg_matviews WHERE schemaname NOT LIKE 'pg_%'
and ('matviews:' || schemaname || '.' || matviewname) =OBJECT_ID_CONDITION and ('matviews:' || schemaname || '.' || matviewname) =OBJECT_ID_CONDITION
and schemaname =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -8,11 +8,9 @@ select
from information_schema.table_constraints from information_schema.table_constraints
inner join information_schema.key_column_usage on table_constraints.table_name = key_column_usage.table_name and table_constraints.constraint_name = key_column_usage.constraint_name inner join information_schema.key_column_usage on table_constraints.table_name = key_column_usage.table_name and table_constraints.constraint_name = key_column_usage.constraint_name
where where
table_constraints.table_schema <> 'information_schema' table_constraints.table_schema !~ '^_timescaledb_'
and table_constraints.table_schema <> 'pg_catalog'
and table_constraints.table_schema !~ '^pg_toast'
and table_constraints.table_schema !~ '^_timescaledb_'
and table_constraints.constraint_type = 'PRIMARY KEY' and table_constraints.constraint_type = 'PRIMARY KEY'
and ('tables:' || table_constraints.table_schema || '.' || table_constraints.table_name) =OBJECT_ID_CONDITION and ('tables:' || table_constraints.table_schema || '.' || table_constraints.table_name) =OBJECT_ID_CONDITION
and table_constraints.table_schema =SCHEMA_NAME_CONDITION
order by key_column_usage.ordinal_position order by key_column_usage.ordinal_position
`; `;

View File

@@ -5,6 +5,6 @@ select
md5(routine_definition) as "hash_code", md5(routine_definition) as "hash_code",
routine_type as "object_type" routine_type as "object_type"
from from
information_schema.routines where routine_schema != 'information_schema' and routine_schema != 'pg_catalog' and routine_schema !~ '^_timescaledb_' information_schema.routines where routine_schema !~ '^_timescaledb_'
and routine_type in ('PROCEDURE', 'FUNCTION') and routine_type in ('PROCEDURE', 'FUNCTION') and routine_schema =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -8,7 +8,8 @@ select
$typeAggFunc(data_type $typeAggParam) as "data_type", $typeAggFunc(data_type $typeAggParam) as "data_type",
max(external_language) as "language" max(external_language) as "language"
from from
information_schema.routines where routine_schema != 'information_schema' and routine_schema != 'pg_catalog' and routine_schema !~ '^_timescaledb_' information_schema.routines where routine_schema !~ '^_timescaledb_'
and routine_schema =SCHEMA_NAME_CONDITION
and ( and (
(routine_type = 'PROCEDURE' and ('procedures:' || routine_schema || '.' || routine_name) =OBJECT_ID_CONDITION) (routine_type = 'PROCEDURE' and ('procedures:' || routine_schema || '.' || routine_name) =OBJECT_ID_CONDITION)
or or

View File

@@ -3,9 +3,7 @@ select infoTables.table_schema as "schema_name", infoTables.table_name as "pure_
from information_schema.tables infoTables from information_schema.tables infoTables
where infoTables.table_type not like '%VIEW%' where infoTables.table_type not like '%VIEW%'
and ('tables:' || infoTables.table_schema || '.' || infoTables.table_name) =OBJECT_ID_CONDITION and ('tables:' || infoTables.table_schema || '.' || infoTables.table_name) =OBJECT_ID_CONDITION
and infoTables.table_schema <> 'pg_catalog'
and infoTables.table_schema <> 'information_schema'
and infoTables.table_schema <> 'pg_internal' and infoTables.table_schema <> 'pg_internal'
and infoTables.table_schema !~ '^pg_toast'
and infoTables.table_schema !~ '^_timescaledb_' and infoTables.table_schema !~ '^_timescaledb_'
and infoTables.table_schema =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -21,9 +21,7 @@ select infoTables.table_schema as "schema_name", infoTables.table_name as "pure_
from information_schema.tables infoTables from information_schema.tables infoTables
where infoTables.table_type not like '%VIEW%' where infoTables.table_type not like '%VIEW%'
and ('tables:' || infoTables.table_schema || '.' || infoTables.table_name) =OBJECT_ID_CONDITION and ('tables:' || infoTables.table_schema || '.' || infoTables.table_name) =OBJECT_ID_CONDITION
and infoTables.table_schema <> 'pg_catalog'
and infoTables.table_schema <> 'information_schema'
and infoTables.table_schema <> 'pg_internal' and infoTables.table_schema <> 'pg_internal'
and infoTables.table_schema !~ '^pg_toast'
and infoTables.table_schema !~ '^_timescaledb_' and infoTables.table_schema !~ '^_timescaledb_'
and infoTables.table_schema =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -1,3 +1,3 @@
module.exports = ` module.exports = `
select conname as "constraint_name" from pg_constraint where contype = 'u' select conname as "constraint_name" from pg_constraint where contype = 'u' and connamespace =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -4,5 +4,5 @@ select
table_schema as "schema_name", table_schema as "schema_name",
md5(view_definition) as "hash_code" md5(view_definition) as "hash_code"
from from
information_schema.views where table_schema != 'information_schema' and table_schema != 'pg_catalog' and table_schema !~ '^_timescaledb_' information_schema.views where table_schema != 'information_schema' and table_schema != 'pg_catalog' and table_schema !~ '^_timescaledb_' and table_schema =SCHEMA_NAME_CONDITION
`; `;

View File

@@ -6,6 +6,6 @@ select
md5(view_definition) as "hash_code" md5(view_definition) as "hash_code"
from from
information_schema.views information_schema.views
where table_schema != 'information_schema' and table_schema != 'pg_catalog' and table_schema !~ '^_timescaledb_' where table_schema !~ '^_timescaledb_' and table_schema =SCHEMA_NAME_CONDITION
and ('views:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION and ('views:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION
`; `;

View File

@@ -136,7 +136,9 @@ const postgresDriverBase = {
} }
return ( return (
['authType', 'user', 'password', 'defaultDatabase', 'singleDatabase', 'isReadOnly'].includes(field) || ['authType', 'user', 'password', 'defaultDatabase', 'singleDatabase', 'isReadOnly', 'useSeparateSchemas'].includes(
field
) ||
(values.authType == 'socket' && ['socketPath'].includes(field)) || (values.authType == 'socket' && ['socketPath'].includes(field)) ||
(values.authType != 'socket' && ['server', 'port'].includes(field)) (values.authType != 'socket' && ['server', 'port'].includes(field))
); );
@@ -242,7 +244,8 @@ const redshiftDriver = {
title: 'Amazon Redshift', title: 'Amazon Redshift',
defaultPort: 5439, defaultPort: 5439,
databaseUrlPlaceholder: 'e.g. redshift-cluster-1.xxxx.redshift.amazonaws.com:5439/dev', databaseUrlPlaceholder: 'e.g. redshift-cluster-1.xxxx.redshift.amazonaws.com:5439/dev',
showConnectionField: (field, values) => ['databaseUrl', 'user', 'password', 'isReadOnly'].includes(field), showConnectionField: (field, values) =>
['databaseUrl', 'user', 'password', 'isReadOnly', 'useSeparateSchemas'].includes(field),
beforeConnectionSave: connection => { beforeConnectionSave: connection => {
const { databaseUrl } = connection; const { databaseUrl } = connection;
if (databaseUrl) { if (databaseUrl) {

View File

@@ -1,8 +1,8 @@
const { DatabaseAnalyser } = global.DBGATE_PACKAGES['dbgate-tools'];; const { DatabaseAnalyser } = global.DBGATE_PACKAGES['dbgate-tools'];;
class Analyser extends DatabaseAnalyser { class Analyser extends DatabaseAnalyser {
constructor(pool, driver) { constructor(dbhan, driver) {
super(pool, driver); super(dbhan, driver);
} }
} }

View File

@@ -83,32 +83,34 @@ const driver = {
analyserClass: Analyser, analyserClass: Analyser,
async connect({ server, port, user, password, database, useDatabaseUrl, databaseUrl, treeKeySeparator }) { async connect({ server, port, user, password, database, useDatabaseUrl, databaseUrl, treeKeySeparator }) {
let db = 0; let db = 0;
let pool; let client;
if (useDatabaseUrl) { if (useDatabaseUrl) {
pool = new Redis(databaseUrl); client = new Redis(databaseUrl);
} else { } else {
if (_.isString(database) && database.startsWith('db')) db = parseInt(database.substring(2)); if (_.isString(database) && database.startsWith('db')) db = parseInt(database.substring(2));
if (_.isNumber(database)) db = database; if (_.isNumber(database)) db = database;
pool = new Redis({ client = new Redis({
host: server, host: server,
port, port,
username: user, username: user,
password, password,
db, db,
}); });
pool.__treeKeySeparator = treeKeySeparator || ':';
} }
return pool; return {
client,
treeKeySeparator: treeKeySeparator || ':',
};
}, },
// @ts-ignore // @ts-ignore
async query(pool, sql) { async query(dbhan, sql) {
return { return {
rows: [], rows: [],
columns: [], columns: [],
}; };
}, },
async stream(pool, sql, options) { async stream(dbhan, sql, options) {
const parts = splitCommandLine(sql); const parts = splitCommandLine(sql);
if (parts.length < 1) { if (parts.length < 1) {
options.done(); options.done();
@@ -116,7 +118,7 @@ const driver = {
} }
const command = parts[0].toLowerCase(); const command = parts[0].toLowerCase();
const args = parts.slice(1); const args = parts.slice(1);
const res = await pool.call(command, ...args); const res = await dbhan.client.call(command, ...args);
options.info({ options.info({
message: JSON.stringify(res), message: JSON.stringify(res),
@@ -126,7 +128,7 @@ const driver = {
options.done(); options.done();
}, },
async readQuery(pool, sql, structure) { async readQuery(dbhan, sql, structure) {
const pass = new stream.PassThrough({ const pass = new stream.PassThrough({
objectMode: true, objectMode: true,
highWaterMark: 100, highWaterMark: 100,
@@ -139,11 +141,11 @@ const driver = {
return pass; return pass;
}, },
async writeTable(pool, name, options) { async writeTable(dbhan, name, options) {
return createBulkInsertStreamBase(this, stream, pool, name, options); return createBulkInsertStreamBase(this, stream, dbhan, name, options);
}, },
async info(pool) { async info(dbhan) {
const info = await pool.info(); const info = await dbhan.client.info();
return _.fromPairs( return _.fromPairs(
info info
.split('\n') .split('\n')
@@ -151,30 +153,30 @@ const driver = {
.map((x) => x.split(':')) .map((x) => x.split(':'))
); );
}, },
async getVersion(pool) { async getVersion(dbhan) {
const info = await this.info(pool); const info = await this.info(dbhan);
return { return {
version: info.redis_version, version: info.redis_version,
versionText: `Redis ${info.redis_version}`, versionText: `Redis ${info.redis_version}`,
}; };
}, },
async listDatabases(pool) { async listDatabases(dbhan) {
const info = await this.info(pool); const info = await this.info(dbhan);
return _.range(16).map((index) => ({ name: `db${index}`, extInfo: info[`db${index}`], sortOrder: index })); return _.range(16).map((index) => ({ name: `db${index}`, extInfo: info[`db${index}`], sortOrder: index }));
}, },
async loadKeys(pool, root = '', filter = null) { async loadKeys(dbhan, root = '', filter = null) {
const keys = await this.getKeys(pool, root ? `${root}${pool.__treeKeySeparator}*` : '*'); const keys = await this.getKeys(dbhan, root ? `${root}${dbhan.__treeKeySeparator}*` : '*');
const keysFiltered = keys.filter((x) => filterName(filter, x)); const keysFiltered = keys.filter((x) => filterName(filter, x));
const res = this.extractKeysFromLevel(pool, root, keysFiltered); const res = this.extractKeysFromLevel(dbhan, root, keysFiltered);
await this.enrichKeyInfo(pool, res); await this.enrichKeyInfo(dbhan, res);
return res; return res;
}, },
async exportKeys(pool, options) { async exportKeys(dbhan, options) {
const dump = new RedisDump({ client: pool }); const dump = new RedisDump({ client: dbhan.client });
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
dump.export({ dump.export({
type: 'redis', type: 'redis',
@@ -187,24 +189,24 @@ const driver = {
}); });
}, },
async getKeys(pool, keyQuery = '*') { async getKeys(dbhan, keyQuery = '*') {
const res = []; const res = [];
let cursor = 0; let cursor = 0;
do { do {
const [strCursor, keys] = await pool.scan(cursor, 'MATCH', keyQuery, 'COUNT', 100); const [strCursor, keys] = await dbhan.client.scan(cursor, 'MATCH', keyQuery, 'COUNT', 100);
res.push(...keys); res.push(...keys);
cursor = parseInt(strCursor); cursor = parseInt(strCursor);
} while (cursor > 0); } while (cursor > 0);
return res; return res;
}, },
extractKeysFromLevel(pool, root, keys) { extractKeysFromLevel(dbhan, root, keys) {
const prefix = root ? `${root}${pool.__treeKeySeparator}` : ''; const prefix = root ? `${root}${dbhan.treeKeySeparator}` : '';
const rootSplit = _.compact(root.split(pool.__treeKeySeparator)); const rootSplit = _.compact(root.split(dbhan.treeKeySeparator));
const res = {}; const res = {};
for (const key of keys) { for (const key of keys) {
if (!key.startsWith(prefix)) continue; if (!key.startsWith(prefix)) continue;
const keySplit = key.split(pool.__treeKeySeparator); const keySplit = key.split(dbhan.treeKeySeparator);
if (keySplit.length > rootSplit.length) { if (keySplit.length > rootSplit.length) {
const text = keySplit[rootSplit.length]; const text = keySplit[rootSplit.length];
if (keySplit.length == rootSplit.length + 1) { if (keySplit.length == rootSplit.length + 1) {
@@ -218,9 +220,9 @@ const driver = {
res[dctKey].count++; res[dctKey].count++;
} else { } else {
res[dctKey] = { res[dctKey] = {
text: text + pool.__treeKeySeparator + '*', text: text + dbhan.treeKeySeparator + '*',
type: 'dir', type: 'dir',
root: keySplit.slice(0, rootSplit.length + 1).join(pool.__treeKeySeparator), root: keySplit.slice(0, rootSplit.length + 1).join(dbhan.treeKeySeparator),
count: 1, count: 1,
}; };
} }
@@ -230,46 +232,46 @@ const driver = {
return Object.values(res); return Object.values(res);
}, },
async getKeyCardinality(pool, key, type) { async getKeyCardinality(dbhan, key, type) {
switch (type) { switch (type) {
case 'list': case 'list':
return pool.llen(key); return dbhan.client.llen(key);
case 'set': case 'set':
return pool.scard(key); return dbhan.client.scard(key);
case 'zset': case 'zset':
return pool.zcard(key); return dbhan.client.zcard(key);
case 'stream': case 'stream':
return pool.xlen(key); return dbhan.client.xlen(key);
case 'hash': case 'hash':
return pool.hlen(key); return dbhan.client.hlen(key);
} }
}, },
async enrichOneKeyInfo(pool, item) { async enrichOneKeyInfo(dbhan, item) {
item.type = await pool.type(item.key); item.type = await dbhan.client.type(item.key);
item.count = await this.getKeyCardinality(pool, item.key, item.type); item.count = await this.getKeyCardinality(dbhan, item.key, item.type);
}, },
async enrichKeyInfo(pool, levelInfo) { async enrichKeyInfo(dbhan, levelInfo) {
await async.eachLimit( await async.eachLimit(
levelInfo.filter((x) => x.key), levelInfo.filter((x) => x.key),
10, 10,
async (item) => await this.enrichOneKeyInfo(pool, item) async (item) => await this.enrichOneKeyInfo(dbhan, item)
); );
}, },
async loadKeyInfo(pool, key) { async loadKeyInfo(dbhan, key) {
const res = {}; const res = {};
const type = await pool.type(key); const type = await dbhan.client.type(key);
res.key = key; res.key = key;
res.type = type; res.type = type;
res.ttl = await pool.ttl(key); res.ttl = await dbhan.client.ttl(key);
res.count = await this.getKeyCardinality(pool, key, type); res.count = await this.getKeyCardinality(dbhan, key, type);
switch (type) { switch (type) {
case 'string': case 'string':
res.value = await pool.get(key); res.value = await dbhan.client.get(key);
break; break;
// case 'list': // case 'list':
// res.tableColumns = [{ name: 'value' }]; // res.tableColumns = [{ name: 'value' }];
@@ -297,16 +299,16 @@ const driver = {
return res; return res;
}, },
async deleteBranch(pool, keyQuery) { async deleteBranch(dbhan, keyQuery) {
const keys = await this.getKeys(pool, keyQuery); const keys = await this.getKeys(dbhan, keyQuery);
const keysChunked = _.chunk(keys, 10); const keysChunked = _.chunk(keys, 10);
await async.eachLimit(keysChunked, 10, async (keysChunk) => await pool.del(...keysChunk)); await async.eachLimit(keysChunked, 10, async (keysChunk) => await dbhan.client.del(...keysChunk));
}, },
async callMethod(pool, method, args) { async callMethod(dbhan, method, args) {
switch (method) { switch (method) {
case 'mdel': case 'mdel':
return await this.deleteBranch(pool, args[0]); return await this.deleteBranch(dbhan, args[0]);
case 'xaddjson': case 'xaddjson':
let json; let json;
try { try {
@@ -314,44 +316,44 @@ const driver = {
} catch (e) { } catch (e) {
throw new Error('Value must be valid JSON. ' + e.message); throw new Error('Value must be valid JSON. ' + e.message);
} }
return await pool.xadd(args[0], args[1] || '*', ..._.flatten(_.toPairs(json))); return await dbhan.client.xadd(args[0], args[1] || '*', ..._.flatten(_.toPairs(json)));
} }
return await pool[method](...args); return await dbhan.client[method](...args);
}, },
async loadKeyTableRange(pool, key, cursor, count) { async loadKeyTableRange(dbhan, key, cursor, count) {
const type = await pool.type(key); const type = await dbhan.client.type(key);
switch (type) { switch (type) {
case 'list': { case 'list': {
const res = await pool.lrange(key, cursor, cursor + count); const res = await dbhan.client.lrange(key, cursor, cursor + count);
return { return {
cursor: res.length > count ? cursor + count : 0, cursor: res.length > count ? cursor + count : 0,
items: res.map((value) => ({ value })).slice(0, count), items: res.map((value) => ({ value })).slice(0, count),
}; };
} }
case 'set': { case 'set': {
const res = await pool.sscan(key, cursor, 'COUNT', count); const res = await dbhan.client.sscan(key, cursor, 'COUNT', count);
return { return {
cursor: parseInt(res[0]), cursor: parseInt(res[0]),
items: res[1].map((value) => ({ value })), items: res[1].map((value) => ({ value })),
}; };
} }
case 'zset': { case 'zset': {
const res = await pool.zscan(key, cursor, 'COUNT', count); const res = await dbhan.client.zscan(key, cursor, 'COUNT', count);
return { return {
cursor: parseInt(res[0]), cursor: parseInt(res[0]),
items: _.chunk(res[1], 2).map((item) => ({ value: item[0], score: item[1] })), items: _.chunk(res[1], 2).map((item) => ({ value: item[0], score: item[1] })),
}; };
} }
case 'hash': { case 'hash': {
const res = await pool.hscan(key, cursor, 'COUNT', count); const res = await dbhan.client.hscan(key, cursor, 'COUNT', count);
return { return {
cursor: parseInt(res[0]), cursor: parseInt(res[0]),
items: _.chunk(res[1], 2).map((item) => ({ key: item[0], value: item[1] })), items: _.chunk(res[1], 2).map((item) => ({ key: item[0], value: item[1] })),
}; };
} }
case 'stream': { case 'stream': {
const res = await pool.xrange(key, cursor == 0 ? '-' : cursor, '+', 'COUNT', count); const res = await dbhan.client.xrange(key, cursor == 0 ? '-' : cursor, '+', 'COUNT', count);
let newCursor = 0; let newCursor = 0;
if (res.length > 0) { if (res.length > 0) {
const id = res[res.length - 1][0]; const id = res[res.length - 1][0];

View File

@@ -16,8 +16,8 @@ SELECT
`; `;
class Analyser extends DatabaseAnalyser { class Analyser extends DatabaseAnalyser {
constructor(pool, driver, version) { constructor(dbhan, driver, version) {
super(pool, driver, version); super(dbhan, driver, version);
} }
async _computeSingleObjectId() { async _computeSingleObjectId() {
@@ -26,8 +26,8 @@ class Analyser extends DatabaseAnalyser {
} }
async _getFastSnapshot() { async _getFastSnapshot() {
const objects = await this.driver.query(this.pool, "select * from sqlite_master where type='table' or type='view'"); const objects = await this.driver.query(this.dbhan, "select * from sqlite_master where type='table' or type='view'");
const indexcols = await this.driver.query(this.pool, indexcolsQuery); const indexcols = await this.driver.query(this.dbhan, indexcolsQuery);
return { return {
tables: objects.rows tables: objects.rows
@@ -79,7 +79,7 @@ class Analyser extends DatabaseAnalyser {
createSql: x.sql, createSql: x.sql,
})); }));
const indexcols = await this.driver.query(this.pool, indexcolsQuery); const indexcols = await this.driver.query(this.dbhan, indexcolsQuery);
for (const tableName of this.getRequestedObjectPureNames( for (const tableName of this.getRequestedObjectPureNames(
'tables', 'tables',
@@ -88,7 +88,7 @@ class Analyser extends DatabaseAnalyser {
const tableObj = tableList.find((x) => x.pureName == tableName); const tableObj = tableList.find((x) => x.pureName == tableName);
if (!tableObj) continue; if (!tableObj) continue;
const info = await this.driver.query(this.pool, `pragma table_info('${tableName}')`); const info = await this.driver.query(this.dbhan, `pragma table_info('${tableName}')`);
tableObj.columns = info.rows.map((col) => ({ tableObj.columns = info.rows.map((col) => ({
columnName: col.name, columnName: col.name,
dataType: col.type, dataType: col.type,
@@ -132,7 +132,7 @@ class Analyser extends DatabaseAnalyser {
}; };
} }
const fklist = await this.driver.query(this.pool, `pragma foreign_key_list('${tableName}')`); const fklist = await this.driver.query(this.dbhan, `pragma foreign_key_list('${tableName}')`);
tableObj.foreignKeys = _.values(_.groupBy(fklist.rows, 'id')).map((fkcols) => { tableObj.foreignKeys = _.values(_.groupBy(fklist.rows, 'id')).map((fkcols) => {
const fkcol = fkcols[0]; const fkcol = fkcols[0];
const fk = { const fk = {
@@ -157,7 +157,7 @@ class Analyser extends DatabaseAnalyser {
const viewObj = viewList.find((x) => x.pureName == viewName); const viewObj = viewList.find((x) => x.pureName == viewName);
if (!viewObj) continue; if (!viewObj) continue;
const info = await this.driver.query(this.pool, `pragma table_info('${viewName}')`); const info = await this.driver.query(this.dbhan, `pragma table_info('${viewName}')`);
viewObj.columns = info.rows.map((col) => ({ viewObj.columns = info.rows.map((col) => ({
columnName: col.name, columnName: col.name,
dataType: col.type, dataType: col.type,

View File

@@ -26,8 +26,8 @@ async function waitForDrain(stream) {
}); });
} }
function runStreamItem(client, sql, options, rowCounter) { function runStreamItem(dbhan, sql, options, rowCounter) {
const stmt = client.prepare(sql); const stmt = dbhan.client.prepare(sql);
if (stmt.reader) { if (stmt.reader) {
const columns = stmt.columns(); const columns = stmt.columns();
// const rows = stmt.all(); // const rows = stmt.all();
@@ -64,15 +64,17 @@ const driver = {
analyserClass: Analyser, analyserClass: Analyser,
async connect({ databaseFile, isReadOnly }) { async connect({ databaseFile, isReadOnly }) {
const Database = getBetterSqlite(); const Database = getBetterSqlite();
const pool = new Database(databaseFile, { readonly: !!isReadOnly }); const client = new Database(databaseFile, { readonly: !!isReadOnly });
return pool; return {
client,
};
}, },
async close(pool) { async close(dbhan) {
return pool.close(); return dbhan.client.close();
}, },
// @ts-ignore // @ts-ignore
async query(pool, sql) { async query(dbhan, sql) {
const stmt = pool.prepare(sql); const stmt = dbhan.client.prepare(sql);
// stmt.raw(); // stmt.raw();
if (stmt.reader) { if (stmt.reader) {
const columns = stmt.columns(); const columns = stmt.columns();
@@ -92,14 +94,14 @@ const driver = {
}; };
} }
}, },
async stream(client, sql, options) { async stream(dbhan, sql, options) {
const sqlSplitted = splitQuery(sql, sqliteSplitterOptions); const sqlSplitted = splitQuery(sql, sqliteSplitterOptions);
const rowCounter = { count: 0, date: null }; const rowCounter = { count: 0, date: null };
const inTransaction = client.transaction(() => { const inTransaction = dbhan.client.transaction(() => {
for (const sqlItem of sqlSplitted) { for (const sqlItem of sqlSplitted) {
runStreamItem(client, sqlItem, options, rowCounter); runStreamItem(dbhan, sqlItem, options, rowCounter);
} }
if (rowCounter.date) { if (rowCounter.date) {
@@ -128,10 +130,10 @@ const driver = {
options.done(); options.done();
// return stream; // return stream;
}, },
async script(client, sql) { async script(dbhan, sql) {
const inTransaction = client.transaction(() => { const inTransaction = dbhan.client.transaction(() => {
for (const sqlItem of splitQuery(sql, this.getQuerySplitterOptions('script'))) { for (const sqlItem of splitQuery(sql, this.getQuerySplitterOptions('script'))) {
const stmt = client.prepare(sqlItem); const stmt = dbhan.client.prepare(sqlItem);
stmt.run(); stmt.run();
} }
}); });
@@ -149,13 +151,13 @@ const driver = {
} }
pass.end(); pass.end();
}, },
async readQuery(pool, sql, structure) { async readQuery(dbhan, sql, structure) {
const pass = new stream.PassThrough({ const pass = new stream.PassThrough({
objectMode: true, objectMode: true,
highWaterMark: 100, highWaterMark: 100,
}); });
const stmt = pool.prepare(sql); const stmt = dbhan.client.prepare(sql);
const columns = stmt.columns(); const columns = stmt.columns();
pass.write({ pass.write({
@@ -171,11 +173,11 @@ const driver = {
return pass; return pass;
}, },
async writeTable(pool, name, options) { async writeTable(dbhan, name, options) {
return createBulkInsertStreamBase(this, stream, pool, name, options); return createBulkInsertStreamBase(this, stream, dbhan, name, options);
}, },
async getVersion(pool) { async getVersion(dbhan) {
const { rows } = await this.query(pool, 'select sqlite_version() as version'); const { rows } = await this.query(dbhan, 'select sqlite_version() as version');
const { version } = rows[0]; const { version } = rows[0];
return { return {