diff --git a/integration-tests/__tests__/schema-tests.spec.js b/integration-tests/__tests__/schema-tests.spec.js index 5f5b4f04e..116b15b17 100644 --- a/integration-tests/__tests__/schema-tests.spec.js +++ b/integration-tests/__tests__/schema-tests.spec.js @@ -1,7 +1,7 @@ const stableStringify = require('json-stable-stringify'); const _ = require('lodash'); const fp = require('lodash/fp'); -const { testWrapper } = require('../tools'); +const { testWrapper, extractConnection } = require('../tools'); const engines = require('../engines'); const { runCommandOnDriver } = require('dbgate-tools'); @@ -23,17 +23,17 @@ describe('Schema tests', () => { testWrapper(async (conn, driver, engine) => { await baseStructure(conn, driver); const structure1 = await driver.analyseFull(conn); - expect(structure1.schemas.find(x => x.schemaName == 'myschema')).toBeFalsy(); - const count = structure1.schemas.length; + const schemas1 = await driver.listSchemas(conn); + expect(schemas1.find(x => x.schemaName == 'myschema')).toBeFalsy(); + const count = schemas1.length; expect(structure1.tables.length).toEqual(2); await runCommandOnDriver(conn, driver, dmp => dmp.createSchema('myschema')); const structure2 = await driver.analyseIncremental(conn, structure1); - expect(structure2.schemas.find(x => x.schemaName == 'myschema')).toBeTruthy(); - expect(structure2.tables.length).toEqual(2); - expect(structure2.schemas.length).toEqual(count + 1); - - const structure3 = await driver.analyseIncremental(conn, structure2); - expect(structure3).toBeNull(); + const schemas2 = await driver.listSchemas(conn); + expect(schemas2.find(x => x.schemaName == 'myschema')).toBeTruthy(); + expect(schemas2.length).toEqual(count + 1); + expect(schemas2.find(x => x.isDefault).schemaName).toEqual(engine.defaultSchemaName); + expect(structure2).toBeNull(); }) ); @@ -44,29 +44,33 @@ describe('Schema tests', () => { await runCommandOnDriver(conn, driver, dmp => dmp.createSchema('myschema')); const structure1 = await driver.analyseFull(conn); - expect(structure1.schemas.find(x => x.schemaName == 'myschema')).toBeTruthy(); + const schemas1 = await driver.listSchemas(conn); + expect(schemas1.find(x => x.schemaName == 'myschema')).toBeTruthy(); expect(structure1.tables.length).toEqual(2); await runCommandOnDriver(conn, driver, dmp => dmp.dropSchema('myschema')); const structure2 = await driver.analyseIncremental(conn, structure1); - expect(structure2.schemas.find(x => x.schemaName == 'myschema')).toBeFalsy(); - expect(structure2.tables.length).toEqual(2); - - const structure3 = await driver.analyseIncremental(conn, structure2); - expect(structure3).toBeNull(); + const schemas2 = await driver.listSchemas(conn); + expect(schemas2.find(x => x.schemaName == 'myschema')).toBeFalsy(); + expect(structure2).toBeNull(); }) ); - test.each(engines.filter(x => x.supportSchemas).map(engine => [engine.label, engine]))( - 'Create table - keep schemas - %s', - testWrapper(async (conn, driver, engine) => { - await baseStructure(conn, driver); - const structure1 = await driver.analyseFull(conn); - const count = structure1.schemas.length; - expect(structure1.tables.length).toEqual(2); - await driver.query(conn, `create table t3 (id int not null primary key)`); - const structure2 = await driver.analyseIncremental(conn, structure1); - expect(structure2.tables.length).toEqual(3); - expect(structure2.schemas.length).toEqual(count); + test.each(engines.filter(x => x.supportSchemas && !x.skipSeparateSchemas).map(engine => [engine.label, engine]))( + 'Table inside schema - %s', + testWrapper(async (handle, driver, engine) => { + await baseStructure(handle, driver); + await runCommandOnDriver(handle, driver, dmp => dmp.createSchema('myschema')); + + const schemaConnDef = { + ...extractConnection(engine), + database: `${handle.database}::myschema`, + }; + + const schemaConn = await driver.connect(schemaConnDef); + await driver.query(schemaConn, `create table myschema.myt1 (id int not null primary key)`); + const structure1 = await driver.analyseFull(schemaConn); + expect(structure1.tables.length).toEqual(1); + expect(structure1.tables[0].pureName).toEqual('myt1'); }) ); }); diff --git a/integration-tests/engines.js b/integration-tests/engines.js index 380e8c3fd..8ff17d6c6 100644 --- a/integration-tests/engines.js +++ b/integration-tests/engines.js @@ -82,6 +82,7 @@ const engines = [ }, ], supportSchemas: true, + defaultSchemaName: 'public', }, { label: 'SQL Server', @@ -107,6 +108,8 @@ const engines = [ }, ], supportSchemas: true, + defaultSchemaName: 'dbo', + // skipSeparateSchemas: true, }, { label: 'SQLite', @@ -115,6 +118,7 @@ const engines = [ engine: 'sqlite@dbgate-plugin-sqlite', }, objects: [views], + skipOnCI: false, }, { label: 'CockroachDB', @@ -161,9 +165,9 @@ const filterLocal = [ // filter local testing '-MySQL', '-MariaDB', - 'PostgreSQL', + '-PostgreSQL', '-SQL Server', - '-SQLite', + 'SQLite', '-CockroachDB', '-ClickHouse', ]; diff --git a/integration-tests/package.json b/integration-tests/package.json index 6a0dea08c..5f232a029 100644 --- a/integration-tests/package.json +++ b/integration-tests/package.json @@ -13,7 +13,7 @@ "wait:ci": "cross-env DEVMODE=1 CITEST=1 node wait.js", "test:local": "cross-env DEVMODE=1 LOCALTEST=1 jest", "test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/data-duplicator.spec.js", - "test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults", + "test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults --detectOpenHandles --forceExit", "run:local": "docker-compose down && docker-compose up -d && yarn wait:local && yarn test:local" }, "jest": { diff --git a/package.json b/package.json index 3baedf7cb..1cae113a9 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "private": true, - "version": "5.4.5-beta.7", + "version": "5.4.5-beta.10", "name": "dbgate-all", "workspaces": [ "packages/*", diff --git a/packages/api/src/controllers/connections.js b/packages/api/src/controllers/connections.js index dc68ad14f..e16fdbcf9 100644 --- a/packages/api/src/controllers/connections.js +++ b/packages/api/src/controllers/connections.js @@ -76,6 +76,7 @@ function getPortalCollections() { allowedDatabases: process.env[`ALLOWED_DATABASES_${id}`]?.replace(/\|/g, '\n'), allowedDatabasesRegex: process.env[`ALLOWED_DATABASES_REGEX_${id}`], parent: process.env[`PARENT_${id}`] || undefined, + useSeparateSchemas: !!process.env[`USE_SEPARATE_SCHEMAS_${id}`], // SSH tunnel useSshTunnel: process.env[`USE_SSH_${id}`], diff --git a/packages/api/src/controllers/databaseConnections.js b/packages/api/src/controllers/databaseConnections.js index 8b731c37e..f9f1f11e7 100644 --- a/packages/api/src/controllers/databaseConnections.js +++ b/packages/api/src/controllers/databaseConnections.js @@ -213,6 +213,17 @@ module.exports = { return res.result || null; }, + schemaList_meta: true, + async schemaList({ conid, database }, req) { + testConnectionPermission(conid, req); + return this.loadDataCore('schemaList', { conid, database }); + }, + + dispatchDatabaseChangedEvent_meta: true, + dispatchDatabaseChangedEvent({ event, conid, database }) { + socket.emitChanged(event, { conid, database }); + }, + loadKeys_meta: true, async loadKeys({ conid, database, root, filter }, req) { testConnectionPermission(conid, req); diff --git a/packages/api/src/proc/databaseConnectionProcess.js b/packages/api/src/proc/databaseConnectionProcess.js index 894fba18c..7ca93323e 100644 --- a/packages/api/src/proc/databaseConnectionProcess.js +++ b/packages/api/src/proc/databaseConnectionProcess.js @@ -11,7 +11,7 @@ const { dumpSqlSelect } = require('dbgate-sqltree'); const logger = getLogger('dbconnProcess'); -let systemConnection; +let dbhan; let storedConnection; let afterConnectCallbacks = []; let afterAnalyseCallbacks = []; @@ -49,7 +49,7 @@ async function handleFullRefresh() { loadingModel = true; const driver = requireEngineDriver(storedConnection); setStatusName('loadStructure'); - analysedStructure = await checkedAsyncCall(driver.analyseFull(systemConnection, serverVersion)); + analysedStructure = await checkedAsyncCall(driver.analyseFull(dbhan, serverVersion)); analysedTime = new Date().getTime(); process.send({ msgtype: 'structure', structure: analysedStructure }); process.send({ msgtype: 'structureTime', analysedTime }); @@ -64,7 +64,7 @@ async function handleIncrementalRefresh(forceSend) { const driver = requireEngineDriver(storedConnection); setStatusName('checkStructure'); const newStructure = await checkedAsyncCall( - driver.analyseIncremental(systemConnection, analysedStructure, serverVersion) + driver.analyseIncremental(dbhan, analysedStructure, serverVersion) ); analysedTime = new Date().getTime(); if (newStructure != null) { @@ -103,7 +103,7 @@ function setStatusName(name) { async function readVersion() { const driver = requireEngineDriver(storedConnection); - const version = await driver.getVersion(systemConnection); + const version = await driver.getVersion(dbhan); process.send({ msgtype: 'version', version }); serverVersion = version; } @@ -114,8 +114,8 @@ async function handleConnect({ connection, structure, globalSettings }) { if (!structure) setStatusName('pending'); const driver = requireEngineDriver(storedConnection); - systemConnection = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app')); - systemConnection.feedback = feedback => setStatus({ feedback }); + dbhan = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app')); + dbhan.feedback = feedback => setStatus({ feedback }); await checkedAsyncCall(readVersion()); if (structure) { analysedStructure = structure; @@ -138,7 +138,7 @@ async function handleConnect({ connection, structure, globalSettings }) { } function waitConnected() { - if (systemConnection) return Promise.resolve(); + if (dbhan) return Promise.resolve(); return new Promise((resolve, reject) => { afterConnectCallbacks.push([resolve, reject]); }); @@ -163,7 +163,7 @@ async function handleRunScript({ msgid, sql, useTransaction }, skipReadonlyCheck const driver = requireEngineDriver(storedConnection); try { if (!skipReadonlyCheck) ensureExecuteCustomScript(driver); - await driver.script(systemConnection, sql, { useTransaction }); + await driver.script(dbhan, sql, { useTransaction }); process.send({ msgtype: 'response', msgid }); } catch (err) { process.send({ msgtype: 'response', msgid, errorMessage: err.message }); @@ -175,7 +175,7 @@ async function handleRunOperation({ msgid, operation, useTransaction }, skipRead const driver = requireEngineDriver(storedConnection); try { if (!skipReadonlyCheck) ensureExecuteCustomScript(driver); - await driver.operation(systemConnection, operation, { useTransaction }); + await driver.operation(dbhan, operation, { useTransaction }); process.send({ msgtype: 'response', msgid }); } catch (err) { process.send({ msgtype: 'response', msgid, errorMessage: err.message }); @@ -188,7 +188,7 @@ async function handleQueryData({ msgid, sql }, skipReadonlyCheck = false) { try { if (!skipReadonlyCheck) ensureExecuteCustomScript(driver); // console.log(sql); - const res = await driver.query(systemConnection, sql); + const res = await driver.query(dbhan, sql); process.send({ msgtype: 'response', msgid, ...res }); } catch (err) { process.send({ msgtype: 'response', msgid, errorMessage: err.message || 'Error executing SQL script' }); @@ -213,20 +213,25 @@ async function handleDriverDataCore(msgid, callMethod) { } } +async function handleSchemaList({ msgid }) { + logger.debug('Loading schema list'); + return handleDriverDataCore(msgid, driver => driver.listSchemas(dbhan)); +} + async function handleCollectionData({ msgid, options }) { - return handleDriverDataCore(msgid, driver => driver.readCollection(systemConnection, options)); + return handleDriverDataCore(msgid, driver => driver.readCollection(dbhan, options)); } async function handleLoadKeys({ msgid, root, filter }) { - return handleDriverDataCore(msgid, driver => driver.loadKeys(systemConnection, root, filter)); + return handleDriverDataCore(msgid, driver => driver.loadKeys(dbhan, root, filter)); } async function handleExportKeys({ msgid, options }) { - return handleDriverDataCore(msgid, driver => driver.exportKeys(systemConnection, options)); + return handleDriverDataCore(msgid, driver => driver.exportKeys(dbhan, options)); } async function handleLoadKeyInfo({ msgid, key }) { - return handleDriverDataCore(msgid, driver => driver.loadKeyInfo(systemConnection, key)); + return handleDriverDataCore(msgid, driver => driver.loadKeyInfo(dbhan, key)); } async function handleCallMethod({ msgid, method, args }) { @@ -236,17 +241,17 @@ async function handleCallMethod({ msgid, method, args }) { } ensureExecuteCustomScript(driver); - return driver.callMethod(systemConnection, method, args); + return driver.callMethod(dbhan, method, args); }); } async function handleLoadKeyTableRange({ msgid, key, cursor, count }) { - return handleDriverDataCore(msgid, driver => driver.loadKeyTableRange(systemConnection, key, cursor, count)); + return handleDriverDataCore(msgid, driver => driver.loadKeyTableRange(dbhan, key, cursor, count)); } async function handleLoadFieldValues({ msgid, schemaName, pureName, field, search }) { return handleDriverDataCore(msgid, driver => - driver.loadFieldValues(systemConnection, { schemaName, pureName }, field, search) + driver.loadFieldValues(dbhan, { schemaName, pureName }, field, search) ); } @@ -264,7 +269,7 @@ async function handleUpdateCollection({ msgid, changeSet }) { const driver = requireEngineDriver(storedConnection); try { ensureExecuteCustomScript(driver); - const result = await driver.updateCollection(systemConnection, changeSet); + const result = await driver.updateCollection(dbhan, changeSet); process.send({ msgtype: 'response', msgid, result }); } catch (err) { process.send({ msgtype: 'response', msgid, errorMessage: err.message }); @@ -277,7 +282,7 @@ async function handleSqlPreview({ msgid, objects, options }) { try { const dmp = driver.createDumper(); - const generator = new SqlGenerator(analysedStructure, options, objects, dmp, driver, systemConnection); + const generator = new SqlGenerator(analysedStructure, options, objects, dmp, driver, dbhan); await generator.dump(); process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated }); @@ -297,7 +302,7 @@ async function handleGenerateDeploySql({ msgid, modelFolder }) { try { const res = await generateDeploySql({ - systemConnection, + systemConnection: dbhan, connection: storedConnection, analysedStructure, modelFolder, @@ -337,6 +342,7 @@ const messageHandlers = { loadFieldValues: handleLoadFieldValues, sqlSelect: handleSqlSelect, exportKeys: handleExportKeys, + schemaList: handleSchemaList, // runCommand: handleRunCommand, }; diff --git a/packages/tools/src/DatabaseAnalyser.ts b/packages/tools/src/DatabaseAnalyser.ts index 9ebceaac4..1c3d83a69 100644 --- a/packages/tools/src/DatabaseAnalyser.ts +++ b/packages/tools/src/DatabaseAnalyser.ts @@ -1,11 +1,11 @@ -import { DatabaseInfo, DatabaseModification, EngineDriver, SqlDialect } from 'dbgate-types'; +import { DatabaseHandle, DatabaseInfo, DatabaseModification, EngineDriver, SqlDialect } from 'dbgate-types'; import _sortBy from 'lodash/sortBy'; import _groupBy from 'lodash/groupBy'; import _pick from 'lodash/pick'; import _compact from 'lodash/compact'; import { getLogger } from './getLogger'; import { type Logger } from 'pinomin'; -import stableStringify from 'json-stable-stringify'; +import { isCompositeDbName, splitCompositeDbName } from './schemaInfoTools'; const logger = getLogger('dbAnalyser'); @@ -41,7 +41,7 @@ export class DatabaseAnalyser { dialect: SqlDialect; logger: Logger; - constructor(public pool, public driver: EngineDriver, version) { + constructor(public dbhan: DatabaseHandle, public driver: EngineDriver, version) { this.dialect = (driver?.dialectByVersion && driver?.dialectByVersion(version)) || driver?.dialect; this.logger = logger; } @@ -71,10 +71,7 @@ export class DatabaseAnalyser { async fullAnalysis() { const res = this.addEngineField(await this._runAnalysis()); // console.log('FULL ANALYSIS', res); - return { - ...res, - schemas: await this.readSchemaList(), - }; + return res; } async singleObjectAnalysis(name, typeField) { @@ -91,10 +88,6 @@ export class DatabaseAnalyser { return obj; } - async readSchemaList() { - return undefined; - } - async incrementalAnalysis(structure) { this.structure = structure; @@ -107,35 +100,22 @@ export class DatabaseAnalyser { const structureModifications = modifications.filter(x => x.action != 'setTableRowCounts'); const setTableRowCounts = modifications.find(x => x.action == 'setTableRowCounts'); - let structureUpdated = null; + let structureWithRowCounts = null; if (setTableRowCounts) { const newStructure = mergeTableRowCounts(structure, setTableRowCounts.rowCounts); if (areDifferentRowCounts(structure, newStructure)) { - structureUpdated = newStructure; + structureWithRowCounts = newStructure; } } - const schemas = await this.readSchemaList(); - const areSchemasDifferent = stableStringify(schemas) != stableStringify(this.structure.schemas); - if (areSchemasDifferent) { - structureUpdated = { - ...structure, - ...structureUpdated, - schemas, - }; - } - if (structureModifications.length == 0) { - return structureUpdated ? this.addEngineField(structureUpdated) : null; + return structureWithRowCounts ? this.addEngineField(structureWithRowCounts) : null; } this.modifications = structureModifications; - if (structureUpdated) this.structure = structureUpdated; + if (structureWithRowCounts) this.structure = structureWithRowCounts; logger.info({ modifications: this.modifications }, 'DB modifications detected:'); - return { - ...this.addEngineField(this.mergeAnalyseResult(await this._runAnalysis())), - schemas, - }; + return this.addEngineField(this.mergeAnalyseResult(await this._runAnalysis())); } mergeAnalyseResult(newlyAnalysed) { @@ -201,8 +181,19 @@ export class DatabaseAnalyser { // return this.createQueryCore('=OBJECT_ID_CONDITION', typeFields) != ' is not null'; // } + getDefaultSchemaNameCondition() { + return 'is not null'; + } + createQuery(template, typeFields, replacements = {}) { - return this.createQueryCore(this.processQueryReplacements(template, replacements), typeFields); + let query = this.createQueryCore(this.processQueryReplacements(template, replacements), typeFields); + + const dbname = this.dbhan.database; + const schemaCondition = isCompositeDbName(dbname) + ? `= '${splitCompositeDbName(dbname).schema}' ` + : ` ${this.getDefaultSchemaNameCondition()} `; + + return query?.replace(/=SCHEMA_NAME_CONDITION/g, schemaCondition); } processQueryReplacements(query, replacements) { @@ -263,8 +254,8 @@ export class DatabaseAnalyser { } feedback(obj) { - if (this.pool.feedback) { - this.pool.feedback(obj); + if (this.dbhan.feedback) { + this.dbhan.feedback(obj); } if (obj && obj.analysingMessage) { logger.debug(obj.analysingMessage); @@ -339,7 +330,7 @@ export class DatabaseAnalyser { }; } try { - const res = await this.driver.query(this.pool, sql); + const res = await this.driver.query(this.dbhan, sql); this.logger.debug({ rows: res.rows.length, template }, `Loaded analyser query`); return res; } catch (err) { @@ -359,7 +350,6 @@ export class DatabaseAnalyser { functions: [], procedures: [], triggers: [], - schemas: [], }; } diff --git a/packages/tools/src/createBulkInsertStreamBase.ts b/packages/tools/src/createBulkInsertStreamBase.ts index dcf93e40b..78b466aa8 100644 --- a/packages/tools/src/createBulkInsertStreamBase.ts +++ b/packages/tools/src/createBulkInsertStreamBase.ts @@ -5,7 +5,7 @@ import { prepareTableForImport } from './tableTransforms'; const logger = getLogger('bulkStreamBase'); -export function createBulkInsertStreamBase(driver: EngineDriver, stream, pool, name, options: WriteTableOptions): any { +export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan, name, options: WriteTableOptions): any { const fullNameQuoted = name.schemaName ? `${driver.dialect.quoteIdentifier(name.schemaName)}.${driver.dialect.quoteIdentifier(name.pureName)}` : driver.dialect.quoteIdentifier(name.pureName); @@ -29,22 +29,22 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, pool, n }; writable.checkStructure = async () => { - let structure = await driver.analyseSingleTable(pool, name); + let structure = await driver.analyseSingleTable(dbhan, name); // console.log('ANALYSING', name, structure); if (structure && options.dropIfExists) { logger.info(`Dropping table ${fullNameQuoted}`); - await driver.script(pool, `DROP TABLE ${fullNameQuoted}`); + await driver.script(dbhan, `DROP TABLE ${fullNameQuoted}`); } if (options.createIfNotExists && (!structure || options.dropIfExists)) { const dmp = driver.createDumper(); const createdTableInfo = driver.adaptTableInfo(prepareTableForImport({ ...writable.structure, ...name })); dmp.createTable(createdTableInfo); logger.info({ sql: dmp.s }, `Creating table ${fullNameQuoted}`); - await driver.script(pool, dmp.s); - structure = await driver.analyseSingleTable(pool, name); + await driver.script(dbhan, dmp.s); + structure = await driver.analyseSingleTable(dbhan, name); } if (options.truncate) { - await driver.script(pool, `TRUNCATE TABLE ${fullNameQuoted}`); + await driver.script(dbhan, `TRUNCATE TABLE ${fullNameQuoted}`); } writable.columnNames = _intersection( @@ -74,7 +74,7 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, pool, n dmp.putRaw(';'); // require('fs').writeFileSync('/home/jena/test.sql', dmp.s); // console.log(dmp.s); - await driver.query(pool, dmp.s, { discardResult: true }); + await driver.query(dbhan, dmp.s, { discardResult: true }); } else { for (const row of rows) { const dmp = driver.createDumper(); @@ -85,13 +85,13 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, pool, n dmp.putRaw('('); dmp.putCollection(',', writable.columnNames, col => dmp.putValue(row[col as string])); dmp.putRaw(')'); - await driver.query(pool, dmp.s, { discardResult: true }); + await driver.query(dbhan, dmp.s, { discardResult: true }); } } if (options.commitAfterInsert) { const dmp = driver.createDumper(); dmp.commitTransaction(); - await driver.query(pool, dmp.s, { discardResult: true }); + await driver.query(dbhan, dmp.s, { discardResult: true }); } }; diff --git a/packages/tools/src/driverBase.ts b/packages/tools/src/driverBase.ts index 251ea87b9..f3d892c73 100644 --- a/packages/tools/src/driverBase.ts +++ b/packages/tools/src/driverBase.ts @@ -191,4 +191,8 @@ export const driverBase = { adaptTableInfo(table) { return table; }, + + async listSchemas(pool) { + return null; + }, }; diff --git a/packages/tools/src/index.ts b/packages/tools/src/index.ts index 900f7aebe..46b6c36f0 100644 --- a/packages/tools/src/index.ts +++ b/packages/tools/src/index.ts @@ -23,3 +23,4 @@ export * from './getLogger'; export * from './getConnectionLabel'; export * from './detectSqlFilterBehaviour'; export * from './filterBehaviours'; +export * from './schemaInfoTools'; diff --git a/packages/tools/src/schemaInfoTools.ts b/packages/tools/src/schemaInfoTools.ts new file mode 100644 index 000000000..75711f2d9 --- /dev/null +++ b/packages/tools/src/schemaInfoTools.ts @@ -0,0 +1,38 @@ +import { SchemaInfo, SqlDialect } from 'dbgate-types'; + +export function findDefaultSchema(schemaList: SchemaInfo[], dialect: SqlDialect, schemaInStorage: string = null) { + if (!schemaList) { + return null; + } + + if (schemaInStorage && schemaList.find(x => x.schemaName == schemaInStorage)) { + return schemaInStorage; + } + + const dynamicDefaultSchema = schemaList.find(x => x.isDefault); + if (dynamicDefaultSchema) { + return dynamicDefaultSchema.schemaName; + } + if (dialect?.defaultSchemaName && schemaList.find(x => x.schemaName == dialect.defaultSchemaName)) { + return dialect.defaultSchemaName; + } + return schemaList[0]?.schemaName; +} + +export function isCompositeDbName(name: string) { + return name?.includes('::'); +} + +export function splitCompositeDbName(name: string) { + if (!isCompositeDbName(name)) return null; + const [database, schema] = name.split('::'); + return { database, schema }; +} + +export function extractDbNameFromComposite(name: string) { + return isCompositeDbName(name) ? splitCompositeDbName(name).database : name; +} + +export function extractSchemaNameFromComposite(name: string) { + return splitCompositeDbName(name)?.schema; +} diff --git a/packages/types/dbinfo.d.ts b/packages/types/dbinfo.d.ts index 038ba345e..dc41dfa20 100644 --- a/packages/types/dbinfo.d.ts +++ b/packages/types/dbinfo.d.ts @@ -126,6 +126,7 @@ export interface TriggerInfo extends SqlObjectInfo {} export interface SchemaInfo { objectId?: string; schemaName: string; + isDefault?: boolean; } export interface DatabaseInfoObjects { @@ -139,7 +140,5 @@ export interface DatabaseInfoObjects { } export interface DatabaseInfo extends DatabaseInfoObjects { - schemas?: SchemaInfo[]; engine?: string; - defaultSchema?: string; } diff --git a/packages/types/engines.d.ts b/packages/types/engines.d.ts index 1ee4b71a6..f100858b7 100644 --- a/packages/types/engines.d.ts +++ b/packages/types/engines.d.ts @@ -11,6 +11,7 @@ import { FunctionInfo, TriggerInfo, CollectionInfo, + SchemaInfo, } from './dbinfo'; import { FilterBehaviour } from './filter-type'; @@ -129,6 +130,15 @@ export interface FilterBehaviourProvider { getFilterBehaviour(dataType: string, standardFilterBehaviours: { [id: string]: FilterBehaviour }): FilterBehaviour; } +export interface DatabaseHandle { + client: any; + database?: string; + feedback?: (message: any) => void; + getDatabase?: () => any; + connectionType?: string; + treeKeySeparator?: string; +} + export interface EngineDriver extends FilterBehaviourProvider { engine: string; title: string; @@ -170,52 +180,52 @@ export interface EngineDriver extends FilterBehaviourProvider { defaultSocketPath?: string; authTypeLabel?: string; importExportArgs?: any[]; - connect({ server, port, user, password, database }): Promise; - close(pool): Promise; - query(pool: any, sql: string, options?: QueryOptions): Promise; - stream(pool: any, sql: string, options: StreamOptions); - readQuery(pool: any, sql: string, structure?: TableInfo): Promise; - readJsonQuery(pool: any, query: any, structure?: TableInfo): Promise; - writeTable(pool: any, name: NamedObjectInfo, options: WriteTableOptions): Promise; + connect({ server, port, user, password, database }): Promise; + close(dbhan: DatabaseHandle): Promise; + query(dbhan: DatabaseHandle, sql: string, options?: QueryOptions): Promise; + stream(dbhan: DatabaseHandle, sql: string, options: StreamOptions); + readQuery(dbhan: DatabaseHandle, sql: string, structure?: TableInfo): Promise; + readJsonQuery(dbhan: DatabaseHandle, query: any, structure?: TableInfo): Promise; + writeTable(dbhan: DatabaseHandle, name: NamedObjectInfo, options: WriteTableOptions): Promise; analyseSingleObject( - pool: any, + dbhan: DatabaseHandle, name: NamedObjectInfo, objectTypeField: keyof DatabaseInfo ): Promise; - analyseSingleTable(pool: any, name: NamedObjectInfo): Promise; - getVersion(pool: any): Promise<{ version: string }>; - listDatabases(pool: any): Promise< + analyseSingleTable(dbhan: DatabaseHandle, name: NamedObjectInfo): Promise; + getVersion(dbhan: DatabaseHandle): Promise<{ version: string }>; + listDatabases(dbhan: DatabaseHandle): Promise< { name: string; }[] >; - loadKeys(pool, root: string, filter?: string): Promise; - exportKeys(pool, options: {}): Promise; - loadKeyInfo(pool, key): Promise; - loadKeyTableRange(pool, key, cursor, count): Promise; - loadFieldValues(pool: any, name: NamedObjectInfo, field: string, search: string): Promise; - analyseFull(pool: any, serverVersion): Promise; - analyseIncremental(pool: any, structure: DatabaseInfo, serverVersion): Promise; + loadKeys(dbhan: DatabaseHandle, root: string, filter?: string): Promise; + exportKeys(dbhan: DatabaseHandle, options: {}): Promise; + loadKeyInfo(dbhan: DatabaseHandle, key): Promise; + loadKeyTableRange(dbhan: DatabaseHandle, key, cursor, count): Promise; + loadFieldValues(dbhan: DatabaseHandle, name: NamedObjectInfo, field: string, search: string): Promise; + analyseFull(dbhan: DatabaseHandle, serverVersion): Promise; + analyseIncremental(dbhan: DatabaseHandle, structure: DatabaseInfo, serverVersion): Promise; dialect: SqlDialect; dialectByVersion(version): SqlDialect; createDumper(options = null): SqlDumper; - createBackupDumper(pool: any, options): Promise; + createBackupDumper(dbhan: DatabaseHandle, options): Promise; getAuthTypes(): EngineAuthType[]; - readCollection(pool: any, options: ReadCollectionOptions): Promise; - updateCollection(pool: any, changeSet: any): Promise; + readCollection(dbhan: DatabaseHandle, options: ReadCollectionOptions): Promise; + updateCollection(dbhan: DatabaseHandle, changeSet: any): Promise; getCollectionUpdateScript(changeSet: any, collectionInfo: CollectionInfo): string; - createDatabase(pool: any, name: string): Promise; - dropDatabase(pool: any, name: string): Promise; + createDatabase(dbhan: DatabaseHandle, name: string): Promise; + dropDatabase(dbhan: DatabaseHandle, name: string): Promise; getQuerySplitterOptions(usage: 'stream' | 'script' | 'editor'): any; - script(pool: any, sql: string, options?: RunScriptOptions): Promise; - operation(pool: any, operation: {}, options?: RunScriptOptions): Promise; + script(dbhan: DatabaseHandle, sql: string, options?: RunScriptOptions): Promise; + operation(dbhan: DatabaseHandle, operation: {}, options?: RunScriptOptions): Promise; getNewObjectTemplates(): NewObjectTemplate[]; - // direct call of pool method, only some methods could be supported, on only some drivers - callMethod(pool, method, args); - serverSummary(pool): Promise; - summaryCommand(pool, command, row): Promise; - startProfiler(pool, options): Promise; - stopProfiler(pool, profiler): Promise; + // direct call of dbhan.client method, only some methods could be supported, on only some drivers + callMethod(dbhan: DatabaseHandle, method, args); + serverSummary(dbhan: DatabaseHandle): Promise; + summaryCommand(dbhan: DatabaseHandle, command, row): Promise; + startProfiler(dbhan: DatabaseHandle, options): Promise; + stopProfiler(dbhan: DatabaseHandle, profiler): Promise; getRedirectAuthUrl(connection, options): Promise<{ url: string; sid: string }>; getAuthTokenFromCode(connection, options): Promise; getAccessTokenFromAuth(connection, req): Promise; @@ -230,6 +240,7 @@ export interface EngineDriver extends FilterBehaviourProvider { ): any[]; // adapts table info from different source (import, other database) to be suitable for this database adaptTableInfo(table: TableInfo): TableInfo; + listSchemas(dbhan: DatabaseHandle): SchemaInfo[]; analyserClass?: any; dumperClass?: any; diff --git a/packages/web/src/appobj/ConnectionAppObject.svelte b/packages/web/src/appobj/ConnectionAppObject.svelte index 8c3020e5b..88a71f67a 100644 --- a/packages/web/src/appobj/ConnectionAppObject.svelte +++ b/packages/web/src/appobj/ConnectionAppObject.svelte @@ -14,7 +14,7 @@ export function openConnection(connection) { const config = getCurrentConfig(); if (connection.singleDatabase) { - currentDatabase.set({ connection, name: connection.defaultDatabase }); + switchCurrentDatabase({ connection, name: connection.defaultDatabase }); apiCall('database-connections/refresh', { conid: connection._id, database: connection.defaultDatabase, @@ -60,7 +60,7 @@ if (electron) { apiCall('database-connections/disconnect', { conid, database: currentDb.name }); } - currentDatabase.set(null); + switchCurrentDatabase(null); } closeMultipleTabs(closeCondition); // if (data.unsaved) { @@ -107,6 +107,7 @@ import { tick } from 'svelte'; import { getConnectionLabel } from 'dbgate-tools'; import hasPermission from '../utility/hasPermission'; + import { switchCurrentDatabase } from '../utility/common'; export let data; export let passProps; @@ -142,7 +143,7 @@ return; } if ($openedSingleDatabaseConnections.includes(data._id)) { - currentDatabase.set({ connection: data, name: data.defaultDatabase }); + switchCurrentDatabase({ connection: data, name: data.defaultDatabase }); return; } if ($openedConnections.includes(data._id)) { diff --git a/packages/web/src/appobj/DatabaseAppObject.svelte b/packages/web/src/appobj/DatabaseAppObject.svelte index b817f6974..7285f8b6e 100644 --- a/packages/web/src/appobj/DatabaseAppObject.svelte +++ b/packages/web/src/appobj/DatabaseAppObject.svelte @@ -26,7 +26,7 @@ apiCall('database-connections/disconnect', { conid, database }); } if (getCurrentDatabase()?.connection?._id == conid && getCurrentDatabase()?.name == database) { - currentDatabase.set(null); + switchCurrentDatabase(null); } openedSingleDatabaseConnections.update(list => list.filter(x => x != conid)); closeMultipleTabs(closeCondition); @@ -262,6 +262,17 @@ }); }; + const handleRefreshSchemas = () => { + const conid = connection._id; + const database = name; + apiCall('database-connections/dispatch-database-changed-event', { + event: 'schema-list-changed', + conid, + database, + }); + loadSchemaList(conid, database); + }; + async function handleConfirmSql(sql) { saveScriptToDatabase({ conid: connection._id, database: name }, sql, false); } @@ -290,6 +301,8 @@ onClick: handleNewPerspective, text: 'Design perspective query', }, + connection.useSeparateSchemas && { onClick: handleRefreshSchemas, text: 'Refresh schemas' }, + { divider: true }, isSqlOrDoc && !connection.isReadOnly && @@ -364,6 +377,7 @@ getCurrentDatabase, getExtensions, getOpenedTabs, + loadingSchemaLists, openedConnections, openedSingleDatabaseConnections, pinnedDatabases, @@ -374,7 +388,7 @@ import openNewTab from '../utility/openNewTab'; import AppObjectCore from './AppObjectCore.svelte'; import { showSnackbarError, showSnackbarSuccess } from '../utility/snackbar'; - import { findEngineDriver, getConnectionLabel } from 'dbgate-tools'; + import { extractDbNameFromComposite, findEngineDriver, getConnectionLabel } from 'dbgate-tools'; import InputTextModal from '../modals/InputTextModal.svelte'; import { getDatabaseInfo, useUsedApps } from '../utility/metadataLoaders'; import { openJsonDocument } from '../tabs/JsonTab.svelte'; @@ -391,6 +405,7 @@ import hasPermission from '../utility/hasPermission'; import { openImportExportTab } from '../utility/importExportTools'; import newTable from '../tableeditor/newTable'; + import { loadSchemaList, switchCurrentDatabase } from '../utility/common'; export let data; export let passProps; @@ -408,6 +423,7 @@ $: isPinned = !!$pinnedDatabases.find(x => x?.name == data.name && x?.connection?._id == data.connection?._id); $: apps = useUsedApps(); + $: isLoadingSchemas = $loadingSchemaLists[`${data?.connection?._id}::${data?.name}`]; ($currentDatabase = data)} + extractDbNameFromComposite(_.get($currentDatabase, 'name')) == data.name} + on:click={() => switchCurrentDatabase(data)} on:dragstart on:dragenter on:dragend @@ -430,6 +446,7 @@ .find(x => x.isNewQuery) .onClick(); }} + statusIcon={isLoadingSchemas ? 'icon loading' : ''} menu={createMenu} showPinnedInsteadOfUnpin={passProps?.showPinnedInsteadOfUnpin} onPin={isPinned ? null : () => pinnedDatabases.update(list => [...list, data])} diff --git a/packages/web/src/commands/CommandPalette.svelte b/packages/web/src/commands/CommandPalette.svelte index fc8c96ce1..aea16a20a 100644 --- a/packages/web/src/commands/CommandPalette.svelte +++ b/packages/web/src/commands/CommandPalette.svelte @@ -46,7 +46,7 @@ databaseList.push({ text: `${db.name} on ${getConnectionLabel(connection)}`, icon: 'img database', - onClick: () => currentDatabase.set({ connection, name: db.name }), + onClick: () => switchCurrentDatabase({ connection, name: db.name }), }); } } @@ -80,7 +80,7 @@ import { useConnectionList, useDatabaseInfo } from '../utility/metadataLoaders'; import { getLocalStorage } from '../utility/storageCache'; import registerCommand from './registerCommand'; - import { formatKeyText } from '../utility/common'; + import { formatKeyText, switchCurrentDatabase } from '../utility/common'; let domInput; let filter = ''; diff --git a/packages/web/src/commands/changeDatabaseStatusCommand.ts b/packages/web/src/commands/changeDatabaseStatusCommand.ts index 0d71a7ef7..d2d3b00bc 100644 --- a/packages/web/src/commands/changeDatabaseStatusCommand.ts +++ b/packages/web/src/commands/changeDatabaseStatusCommand.ts @@ -3,6 +3,7 @@ import { currentDatabase, getCurrentDatabase } from '../stores'; import getElectron from '../utility/getElectron'; import registerCommand from './registerCommand'; import { apiCall } from '../utility/api'; +import { switchCurrentDatabase } from '../utility/common'; registerCommand({ id: 'database.changeState', @@ -40,7 +41,7 @@ registerCommand({ onClick: () => { const electron = getElectron(); if (electron) apiCall('database-connections/disconnect', dbid); - currentDatabase.set(null); + switchCurrentDatabase(null); }, }, ]; diff --git a/packages/web/src/commands/recentDatabaseSwitch.ts b/packages/web/src/commands/recentDatabaseSwitch.ts index ee550b933..2e64b22f4 100644 --- a/packages/web/src/commands/recentDatabaseSwitch.ts +++ b/packages/web/src/commands/recentDatabaseSwitch.ts @@ -2,6 +2,7 @@ import _ from 'lodash'; import { recentDatabases, currentDatabase, getRecentDatabases } from '../stores'; import registerCommand from './registerCommand'; import { getConnectionLabel } from 'dbgate-tools'; +import { switchCurrentDatabase } from '../utility/common'; currentDatabase.subscribe(value => { if (!value) return; @@ -17,7 +18,7 @@ currentDatabase.subscribe(value => { function switchDatabaseCommand(db) { return { text: `${db.name} on ${getConnectionLabel(db?.connection, { allowExplicitDatabase: false })}`, - onClick: () => currentDatabase.set(db), + onClick: () => switchCurrentDatabase(db), }; } diff --git a/packages/web/src/commands/stdCommands.ts b/packages/web/src/commands/stdCommands.ts index 70282233d..78423b620 100644 --- a/packages/web/src/commands/stdCommands.ts +++ b/packages/web/src/commands/stdCommands.ts @@ -35,7 +35,7 @@ import { apiCall } from '../utility/api'; import runCommand from './runCommand'; import { openWebLink } from '../utility/exportFileTools'; import { getSettings } from '../utility/metadataLoaders'; -import { isMac } from '../utility/common'; +import { isMac, switchCurrentDatabase } from '../utility/common'; import { doLogout, internalRedirectTo } from '../clientAuth'; import { disconnectServerConnection } from '../appobj/ConnectionAppObject.svelte'; import UploadErrorModal from '../modals/UploadErrorModal.svelte'; @@ -347,7 +347,7 @@ registerCommand({ onConfirm: async file => { const resp = await apiCall('connections/new-sqlite-database', { file }); const connection = resp; - currentDatabase.set({ connection, name: `${file}.sqlite` }); + switchCurrentDatabase({ connection, name: `${file}.sqlite` }); }, }); }, diff --git a/packages/web/src/impexp/FormSchemaSelect.svelte b/packages/web/src/impexp/FormSchemaSelect.svelte index 7096d8266..049fdad7e 100644 --- a/packages/web/src/impexp/FormSchemaSelect.svelte +++ b/packages/web/src/impexp/FormSchemaSelect.svelte @@ -1,15 +1,16 @@ diff --git a/packages/web/src/query/codeCompletion.ts b/packages/web/src/query/codeCompletion.ts index 924ff4b54..319a3ec81 100644 --- a/packages/web/src/query/codeCompletion.ts +++ b/packages/web/src/query/codeCompletion.ts @@ -1,8 +1,10 @@ import _ from 'lodash'; import { addCompleter, setCompleters } from 'ace-builds/src-noconflict/ext-language_tools'; -import { getDatabaseInfo } from '../utility/metadataLoaders'; +import { getConnectionInfo, getDatabaseInfo, getSchemaList } from '../utility/metadataLoaders'; import analyseQuerySources from './analyseQuerySources'; import { getStringSettingsValue } from '../settings/settingsTools'; +import { findEngineDriver, findDefaultSchema } from 'dbgate-tools'; +import { getExtensions } from '../stores'; const COMMON_KEYWORDS = [ 'select', @@ -24,9 +26,9 @@ const COMMON_KEYWORDS = [ 'go', ]; -function createTableLikeList(dbinfo, schemaCondition) { +function createTableLikeList(schemaList, dbinfo, schemaCondition) { return [ - ...(dbinfo.schemas?.map(x => ({ + ...(schemaList?.map(x => ({ name: x.schemaName, value: x.schemaName, caption: x.schemaName, @@ -78,6 +80,10 @@ export function mountCodeCompletion({ conid, database, editor, getText }) { const cursor = session.selection.cursor; const line = session.getLine(cursor.row).slice(0, cursor.column); const dbinfo = await getDatabaseInfo({ conid, database }); + const schemaList = await getSchemaList({ conid, database }); + const connection = await getConnectionInfo({ conid }); + const driver = findEngineDriver(connection, getExtensions()); + const defaultSchema = findDefaultSchema(schemaList, driver.dialect); const convertUpper = getStringSettingsValue('sqlEditor.sqlCommandsCase', 'upperCase') == 'upperCase'; @@ -147,9 +153,9 @@ export function mountCodeCompletion({ conid, database, editor, getText }) { ]; } } else { - const schema = (dbinfo.schemas || []).find(x => x.schemaName == colMatch[1]); + const schema = (schemaList || []).find(x => x.schemaName == colMatch[1]); if (schema) { - list = createTableLikeList(dbinfo, x => x.schemaName == schema.schemaName); + list = createTableLikeList(schemaList, dbinfo, x => x.schemaName == schema.schemaName); } } } else { @@ -167,7 +173,7 @@ export function mountCodeCompletion({ conid, database, editor, getText }) { } else { list = [ ...(onlyTables ? [] : list), - ...createTableLikeList(dbinfo, x => !dbinfo.defaultSchema || dbinfo.defaultSchema == x.schemaName), + ...createTableLikeList(schemaList, dbinfo, x => !defaultSchema || defaultSchema == x.schemaName), ...(onlyTables ? [] diff --git a/packages/web/src/settings/ConnectionDriverFields.svelte b/packages/web/src/settings/ConnectionDriverFields.svelte index 4d30797b1..24f1bcd5d 100644 --- a/packages/web/src/settings/ConnectionDriverFields.svelte +++ b/packages/web/src/settings/ConnectionDriverFields.svelte @@ -249,6 +249,10 @@ {/if} +{#if driver?.showConnectionField('useSeparateSchemas', $values, showConnectionFieldArgs)} + +{/if} + {#if driver}
diff --git a/packages/web/src/stores.ts b/packages/web/src/stores.ts index 5f33b9104..9d2ca81f6 100644 --- a/packages/web/src/stores.ts +++ b/packages/web/src/stores.ts @@ -8,6 +8,7 @@ import _ from 'lodash'; import { safeJsonParse } from 'dbgate-tools'; import { apiCall } from './utility/api'; import { getOpenedTabsStorageName, isAdminPage } from './utility/pageDefs'; +import { switchCurrentDatabase } from './utility/common'; export interface TabDefinition { title: string; @@ -149,6 +150,7 @@ export const loadingPluginStore = writable({ }); export const activeDbKeysStore = writableWithStorage({}, 'activeDbKeysStore'); export const appliedCurrentSchema = writable(null); +export const loadingSchemaLists = writable({}); // dict [`${conid}::${database}`]: true export const currentThemeDefinition = derived([currentTheme, extensions], ([$currentTheme, $extensions]) => $extensions.themes.find(x => x.themeClassName == $currentTheme) @@ -296,7 +298,7 @@ export function subscribeApiDependendStores() { currentConfigValue = value; invalidateCommands(); if (value.singleDbConnection) { - currentDatabase.set(value.singleDbConnection); + switchCurrentDatabase(value.singleDbConnection); } }); } diff --git a/packages/web/src/tableeditor/TableEditor.svelte b/packages/web/src/tableeditor/TableEditor.svelte index d3cb316eb..65d6f2961 100644 --- a/packages/web/src/tableeditor/TableEditor.svelte +++ b/packages/web/src/tableeditor/TableEditor.svelte @@ -92,6 +92,7 @@ export let driver; export let resetCounter; export let isCreateTable; + export let schemaList; $: isWritable = !!setTableInfo; @@ -172,7 +173,7 @@ title="Table properties" fieldDefinitions={tableFormOptions ?? []} pureNameTitle={isCreateTable ? 'Table name' : null} - schemaList={isCreateTable && dbInfo?.schemas?.length >= 0 ? dbInfo?.schemas : null} + schemaList={isCreateTable && schemaList?.length >= 0 ? schemaList : null} values={_.pick(tableInfo, ['schemaName', 'pureName', ...(tableFormOptions ?? []).map(x => x.name)])} onChangeValues={vals => { if (!_.isEmpty(vals)) { diff --git a/packages/web/src/tabpanel/TabsPanel.svelte b/packages/web/src/tabpanel/TabsPanel.svelte index d859fd2eb..707522f6b 100644 --- a/packages/web/src/tabpanel/TabsPanel.svelte +++ b/packages/web/src/tabpanel/TabsPanel.svelte @@ -285,7 +285,7 @@ draggingTabTarget, } from '../stores'; import tabs from '../tabs'; - import { setSelectedTab } from '../utility/common'; + import { setSelectedTab, switchCurrentDatabase } from '../utility/common'; import contextMenu from '../utility/contextMenu'; import { isElectronAvailable } from '../utility/getElectron'; import { getConnectionInfo, useConnectionList } from '../utility/metadataLoaders'; @@ -420,11 +420,11 @@ if (conid) { const connection = await getConnectionInfo({ conid, database }); if (connection) { - $currentDatabase = { connection, name: database }; + switchCurrentDatabase({ connection, name: database }); return; } } - $currentDatabase = null; + switchCurrentDatabase(null); }; async function scrollInViewTab(tabid) { diff --git a/packages/web/src/tabs/TableStructureTab.svelte b/packages/web/src/tabs/TableStructureTab.svelte index 631664862..7bcb6bdf6 100644 --- a/packages/web/src/tabs/TableStructureTab.svelte +++ b/packages/web/src/tabs/TableStructureTab.svelte @@ -44,7 +44,7 @@ import TableEditor from '../tableeditor/TableEditor.svelte'; import createActivator, { getActiveComponent } from '../utility/createActivator'; - import { useConnectionInfo, useDatabaseInfo, useDbCore } from '../utility/metadataLoaders'; + import { useConnectionInfo, useDatabaseInfo, useDbCore, useSchemaList } from '../utility/metadataLoaders'; import { showModal } from '../modals/modalTools'; import ConfirmSqlModal from '../modals/ConfirmSqlModal.svelte'; import ErrorMessageModal from '../modals/ErrorMessageModal.svelte'; @@ -75,6 +75,7 @@ $: tableInfoWithPairingId = $tableInfo ? generateTablePairingId($tableInfo) : null; $: connection = useConnectionInfo({ conid }); $: driver = findEngineDriver($connection, $extensions); + $: schemaList = useSchemaList({ conid, database }); const { editorState, editorValue, setEditorData, clearEditorData } = useEditorData({ tabid }); @@ -146,6 +147,7 @@ bind:this={domEditor} tableInfo={showTable} dbInfo={$dbInfo} + schemaList={$schemaList} {driver} {resetCounter} isCreateTable={objectTypeField == 'tables' && !$editorValue?.base} diff --git a/packages/web/src/utility/changeCurrentDbByTab.ts b/packages/web/src/utility/changeCurrentDbByTab.ts index 86fcc4c88..7a8183643 100644 --- a/packages/web/src/utility/changeCurrentDbByTab.ts +++ b/packages/web/src/utility/changeCurrentDbByTab.ts @@ -3,6 +3,7 @@ import { currentDatabase, getCurrentDatabase, getLockedDatabaseMode, openedTabs import { shouldShowTab } from '../tabpanel/TabsPanel.svelte'; import { callWhenAppLoaded, getAppLoaded } from './appLoadManager'; import { getConnectionInfo } from './metadataLoaders'; +import { switchCurrentDatabase } from './common'; let lastCurrentTab = null; @@ -20,7 +21,7 @@ openedTabs.subscribe(value => { if (conid && database && (conid != lastTab?.props?.conid || database != lastTab?.props?.database)) { const doWork = async () => { const connection = await getConnectionInfo({ conid }); - currentDatabase.set({ + switchCurrentDatabase({ connection, name: database, }); diff --git a/packages/web/src/utility/common.ts b/packages/web/src/utility/common.ts index 3951e41df..e047e859a 100644 --- a/packages/web/src/utility/common.ts +++ b/packages/web/src/utility/common.ts @@ -1,5 +1,8 @@ -import { getOpenedTabs, openedTabs } from '../stores'; +import { findDefaultSchema, findEngineDriver, isCompositeDbName } from 'dbgate-tools'; +import { currentDatabase, getExtensions, getOpenedTabs, loadingSchemaLists, openedTabs } from '../stores'; import _ from 'lodash'; +import { getSchemaList } from './metadataLoaders'; +import { showSnackbarError } from './snackbar'; export class LoadingToken { isCanceled = false; @@ -82,3 +85,37 @@ export function isCtrlOrCommandKey(event) { } return event.ctrlKey; } + +export async function loadSchemaList(conid, database) { + try { + loadingSchemaLists.update(x => ({ ...x, [`${conid}::${database}`]: true })); + const schemas = await getSchemaList({ conid, database }); + if (schemas.errorMessage) { + showSnackbarError(`Error loading schemas: ${schemas.errorMessage}`); + console.error('Error loading schemas', schemas.errorMessage); + return; + } + return schemas; + } finally { + loadingSchemaLists.update(x => _.omit(x, [`${conid}::${database}`])); + } +} + +export async function switchCurrentDatabase(data) { + if (data?.connection?.useSeparateSchemas && !isCompositeDbName(data.name)) { + const conid = data.connection._id; + const database = data.name; + const storageKey = `selected-schema-${conid}-${database}`; + const schemaInStorage = localStorage.getItem(storageKey); + const schemas = await loadSchemaList(conid, database); + if (!schemas) return; + const driver = findEngineDriver(data.connection, getExtensions()); + const defaultSchema = findDefaultSchema(schemas, driver?.dialect, schemaInStorage); + currentDatabase.set({ + ...data, + name: `${data.name}::${defaultSchema}`, + }); + } else { + currentDatabase.set(data); + } +} diff --git a/packages/web/src/utility/metadataLoaders.ts b/packages/web/src/utility/metadataLoaders.ts index 3ea4c2764..7528ff478 100644 --- a/packages/web/src/utility/metadataLoaders.ts +++ b/packages/web/src/utility/metadataLoaders.ts @@ -13,6 +13,12 @@ const databaseInfoLoader = ({ conid, database }) => ({ transform: extendDatabaseInfo, }); +const schemaListLoader = ({ conid, database }) => ({ + url: 'database-connections/schema-list', + params: { conid, database }, + reloadTrigger: { key: `schema-list-changed`, conid, database }, +}); + // const tableInfoLoader = ({ conid, database, schemaName, pureName }) => ({ // url: 'metadata/table-info', // params: { conid, database, schemaName, pureName }, @@ -449,3 +455,9 @@ export function useAuthTypes(args) { // export function useDatabaseKeys(args) { // return useCore(databaseKeysLoader, args); // } +export function getSchemaList(args) { + return getCore(schemaListLoader, args); +} +export function useSchemaList(args) { + return useCore(schemaListLoader, args); +} diff --git a/packages/web/src/utility/openElectronFile.ts b/packages/web/src/utility/openElectronFile.ts index c4e0b09d0..cbfa8d501 100644 --- a/packages/web/src/utility/openElectronFile.ts +++ b/packages/web/src/utility/openElectronFile.ts @@ -12,6 +12,7 @@ import { SAVED_FILE_HANDLERS } from '../appobj/SavedFileAppObject.svelte'; import _ from 'lodash'; import ErrorMessageModal from '../modals/ErrorMessageModal.svelte'; import { openImportExportTab } from './importExportTools'; +import { switchCurrentDatabase } from './common'; export function canOpenByElectron(file, extensions) { if (!file) return false; @@ -38,7 +39,7 @@ export async function openSqliteFile(filePath) { singleDatabase: true, defaultDatabase, }); - currentDatabase.set({ + switchCurrentDatabase({ connection: resp, name: getDatabaseFileLabel(filePath), }); diff --git a/packages/web/src/widgets/SchemaSelector.svelte b/packages/web/src/widgets/SchemaSelector.svelte index 6f4350f6c..98d1d2cb5 100644 --- a/packages/web/src/widgets/SchemaSelector.svelte +++ b/packages/web/src/widgets/SchemaSelector.svelte @@ -4,21 +4,26 @@ import _ from 'lodash'; import FontIcon from '../icons/FontIcon.svelte'; - import { DatabaseInfo } from 'dbgate-types'; import { showModal } from '../modals/modalTools'; import ConfirmModal from '../modals/ConfirmModal.svelte'; import { runOperationOnDatabase } from '../modals/ConfirmSqlModal.svelte'; import InputTextModal from '../modals/InputTextModal.svelte'; - import { appliedCurrentSchema } from '../stores'; + import { appliedCurrentSchema, currentDatabase } from '../stores'; + import { switchCurrentDatabase } from '../utility/common'; + import { extractDbNameFromComposite, extractSchemaNameFromComposite, findDefaultSchema } from 'dbgate-tools'; - export let dbinfo: DatabaseInfo; - export let selectedSchema; + export let schemaList; export let objectList; - export let valueStorageKey; - export let conid; export let database; + export let connection; + + export let driver; + + let selectedSchema = null; + + $: valueStorageKey = `selected-schema-${conid}-${database}`; $: { if (selectedSchema != null) { @@ -43,8 +48,8 @@ return res; } - $: schemaList = _.uniq( - _.compact([selectedSchema, ...Object.keys(countBySchema), ...(dbinfo?.schemas?.map(x => x.schemaName) ?? [])]) + $: realSchemaList = _.uniq( + _.compact([selectedSchema, ...Object.keys(countBySchema), ...(schemaList?.map(x => x.schemaName) ?? [])]) ); $: countBySchema = computeCountBySchema(objectList ?? []); @@ -55,10 +60,14 @@ label: 'Schema name', onConfirm: async name => { const dbid = { conid, database }; - await runOperationOnDatabase(dbid, { - type: 'createSchema', - schemaName: name, - }); + await runOperationOnDatabase( + dbid, + { + type: 'createSchema', + schemaName: name, + }, + 'schema-list-changed' + ); if (selectedSchema) { selectedSchema = name; } @@ -70,32 +79,48 @@ message: `Really drop schema ${$appliedCurrentSchema}?`, onConfirm: async () => { const dbid = { conid, database }; - runOperationOnDatabase(dbid, { - type: 'dropSchema', - schemaName: $appliedCurrentSchema, - }); + runOperationOnDatabase( + dbid, + { + type: 'dropSchema', + schemaName: $appliedCurrentSchema, + }, + 'schema-list-changed' + ); selectedSchema = null; }, }); } - $: selectedSchema = localStorage.getItem(valueStorageKey ?? ''); + $: if (connection?.useSeparateSchemas) { + selectedSchema = + extractSchemaNameFromComposite($currentDatabase?.name) ?? findDefaultSchema(schemaList, driver?.dialect); + } else { + selectedSchema = localStorage.getItem(valueStorageKey ?? ''); + } -{#if schemaList.length > 0} +{#if realSchemaList.length > 0}
Schema:
({ label: `${x} (${countBySchema[x] ?? 0})`, value: x })), - // ...schemaList.filter(x => countBySchema[x]).map(x => ({ label: `${x} (${countBySchema[x] ?? 0})`, value: x })), - // ...schemaList.filter(x => !countBySchema[x]).map(x => ({ label: `${x} (${countBySchema[x] ?? 0})`, value: x })), - ]} + options={connection?.useSeparateSchemas + ? (schemaList?.map(x => ({ label: x.schemaName, value: x.schemaName })) ?? []) + : [ + { label: `All schemas (${objectList?.length ?? 0})`, value: '' }, + ...realSchemaList.map(x => ({ label: `${x} (${countBySchema[x] ?? 0})`, value: x })), + ]} value={selectedSchema ?? $appliedCurrentSchema ?? ''} on:change={e => { - selectedSchema = e.detail; + if (connection?.useSeparateSchemas) { + switchCurrentDatabase({ + connection, + name: `${extractDbNameFromComposite(database)}::${e.detail}`, + }); + } else { + selectedSchema = e.detail; + } localStorage.setItem(valueStorageKey, e.detail); }} selectClass="schema-select" diff --git a/packages/web/src/widgets/SqlObjectList.svelte b/packages/web/src/widgets/SqlObjectList.svelte index 203f8eeeb..126d15e54 100644 --- a/packages/web/src/widgets/SqlObjectList.svelte +++ b/packages/web/src/widgets/SqlObjectList.svelte @@ -16,7 +16,13 @@ import InlineButton from '../buttons/InlineButton.svelte'; import SearchInput from '../elements/SearchInput.svelte'; import WidgetsInnerContainer from './WidgetsInnerContainer.svelte'; - import { useConnectionInfo, useDatabaseInfo, useDatabaseStatus, useUsedApps } from '../utility/metadataLoaders'; + import { + useConnectionInfo, + useDatabaseInfo, + useDatabaseStatus, + useSchemaList, + useUsedApps, + } from '../utility/metadataLoaders'; import SearchBoxWrapper from '../elements/SearchBoxWrapper.svelte'; import AppObjectList from '../appobj/AppObjectList.svelte'; import _ from 'lodash'; @@ -42,10 +48,10 @@ export let database; let filter = ''; - let selectedSchema = null; $: objects = useDatabaseInfo({ conid, database }); $: status = useDatabaseStatus({ conid, database }); + $: schemaList = useSchemaList({ conid, database }); $: connection = useConnectionInfo({ conid }); $: driver = findEngineDriver($connection, $extensions); @@ -79,6 +85,7 @@ const handleRefreshDatabase = () => { apiCall('database-connections/refresh', { conid, database }); + apiCall('database-connections/dispatch-database-changed-event', { event: 'schema-list-changed', conid, database }); }; function createAddMenu() { @@ -116,6 +123,14 @@ Refresh {:else if objectList.length == 0 && $status && $status.name != 'pending' && $status.name != 'checkStructure' && $status.name != 'loadStructure' && $objects} + diff --git a/plugins/dbgate-plugin-clickhouse/src/backend/Analyser.js b/plugins/dbgate-plugin-clickhouse/src/backend/Analyser.js index ca2e5553c..ea34369e1 100644 --- a/plugins/dbgate-plugin-clickhouse/src/backend/Analyser.js +++ b/plugins/dbgate-plugin-clickhouse/src/backend/Analyser.js @@ -1,4 +1,4 @@ -const { DatabaseAnalyser } = require('dbgate-tools'); +const { DatabaseAnalyser } = global.DBGATE_PACKAGES['dbgate-tools']; const sql = require('./sql'); function extractDataType(dataType) { @@ -24,7 +24,7 @@ class Analyser extends DatabaseAnalyser { createQuery(resFileName, typeFields, replacements = {}) { let res = sql[resFileName]; - res = res.replace('#DATABASE#', this.pool._database_name); + res = res.replace('#DATABASE#', this.dbhan.database); return super.createQuery(res, typeFields, replacements); } @@ -82,8 +82,8 @@ class Analyser extends DatabaseAnalyser { async _computeSingleObjectId() { const { pureName } = this.singleObjectFilter; const resId = await this.driver.query( - this.pool, - `SELECT uuid as id FROM system.tables WHERE database = '${this.pool._database_name}' AND name='${pureName}'` + this.dbhan, + `SELECT uuid as id FROM system.tables WHERE database = '${this.dbhan.database}' AND name='${pureName}'` ); this.singleObjectId = resId.rows[0]?.id; } diff --git a/plugins/dbgate-plugin-clickhouse/src/backend/createBulkInsertStream.js b/plugins/dbgate-plugin-clickhouse/src/backend/createBulkInsertStream.js index 7627febf9..c4e098c82 100644 --- a/plugins/dbgate-plugin-clickhouse/src/backend/createBulkInsertStream.js +++ b/plugins/dbgate-plugin-clickhouse/src/backend/createBulkInsertStream.js @@ -5,11 +5,11 @@ const _ = require('lodash'); * * @param {import('dbgate-types').EngineDriver} driver */ -function createOracleBulkInsertStream(driver, stream, pool, name, options) { - const writable = createBulkInsertStreamBase(driver, stream, pool, name, options); +function createOracleBulkInsertStream(driver, stream, dbhan, name, options) { + const writable = createBulkInsertStreamBase(driver, stream, dbhan, name, options); writable.send = async () => { - await pool.insert({ + await dbhan.client.insert({ table: name.pureName, values: writable.buffer, format: 'JSONEachRow', diff --git a/plugins/dbgate-plugin-clickhouse/src/backend/driver.js b/plugins/dbgate-plugin-clickhouse/src/backend/driver.js index 82e4a7364..827a1fe54 100644 --- a/plugins/dbgate-plugin-clickhouse/src/backend/driver.js +++ b/plugins/dbgate-plugin-clickhouse/src/backend/driver.js @@ -15,16 +15,18 @@ const driver = { url: databaseUrl, username: user, password: password, - database: database, + database, }); - client._database_name = database; - return client; + return { + client, + database, + }; }, // called for retrieve data (eg. browse in data grid) and for update database - async query(client, query, options) { + async query(dbhan, query, options) { if (options?.discardResult) { - await client.command({ + await dbhan.client.command({ query, }); return { @@ -32,7 +34,7 @@ const driver = { columns: [], }; } else { - const resultSet = await client.query({ + const resultSet = await dbhan.client.query({ query, format: 'JSONCompactEachRowWithNamesAndTypes', }); @@ -57,10 +59,10 @@ const driver = { } }, // called in query console - async stream(client, query, options) { + async stream(dbhan, query, options) { try { if (!query.match(/^\s*SELECT/i)) { - const resp = await client.command({ + const resp = await dbhan.client.command({ query, }); // console.log('RESP', resp); @@ -76,7 +78,7 @@ const driver = { return; } - const resultSet = await client.query({ + const resultSet = await dbhan.client.query({ query, format: 'JSONCompactEachRowWithNamesAndTypes', }); @@ -138,13 +140,13 @@ const driver = { } }, // called when exporting table or view - async readQuery(client, query, structure) { + async readQuery(dbhan, query, structure) { const pass = new stream.PassThrough({ objectMode: true, highWaterMark: 100, }); - const resultSet = await client.query({ + const resultSet = await dbhan.client.query({ query, format: 'JSONCompactEachRowWithNamesAndTypes', }); @@ -190,12 +192,12 @@ const driver = { return pass; }, - async writeTable(pool, name, options) { - return createBulkInsertStream(this, stream, pool, name, options); + async writeTable(dbhan, name, options) { + return createBulkInsertStream(this, stream, dbhan, name, options); }, // detect server version - async getVersion(client) { - const resultSet = await client.query({ + async getVersion(dbhan) { + const resultSet = await dbhan.client.query({ query: 'SELECT version() as version', format: 'JSONEachRow', }); @@ -203,8 +205,8 @@ const driver = { return { version: dataset[0].version }; }, // list databases on server - async listDatabases(client) { - const resultSet = await client.query({ + async listDatabases(dbhan) { + const resultSet = await dbhan.client.query({ query: `SELECT name FROM system.databases WHERE name NOT IN ('system', 'information_schema', 'information_schema_ro', 'INFORMATION_SCHEMA')`, @@ -214,8 +216,8 @@ const driver = { return dataset; }, - async close(client) { - return client.close(); + async close(dbhan) { + return dbhan.client.close(); }, }; diff --git a/plugins/dbgate-plugin-mongo/src/backend/Analyser.js b/plugins/dbgate-plugin-mongo/src/backend/Analyser.js index 0e4a80fb7..3d3c6e598 100644 --- a/plugins/dbgate-plugin-mongo/src/backend/Analyser.js +++ b/plugins/dbgate-plugin-mongo/src/backend/Analyser.js @@ -1,12 +1,12 @@ const { DatabaseAnalyser } = global.DBGATE_PACKAGES['dbgate-tools']; class Analyser extends DatabaseAnalyser { - constructor(pool, driver, version) { - super(pool, driver, version); + constructor(dbhan, driver, version) { + super(dbhan, driver, version); } async _runAnalysis() { - const collectionsAndViews = await this.pool.__getDatabase().listCollections().toArray(); + const collectionsAndViews = await this.dbhan.getDatabase().listCollections().toArray(); const collections = collectionsAndViews.filter((x) => x.type == 'collection'); const views = collectionsAndViews.filter((x) => x.type == 'view'); @@ -16,8 +16,8 @@ class Analyser extends DatabaseAnalyser { collections .filter((x) => x.type == 'collection') .map((x) => - this.pool - .__getDatabase() + this.dbhan + .getDatabase() .collection(x.name) .aggregate([{ $collStats: { count: {} } }]) .toArray() diff --git a/plugins/dbgate-plugin-mongo/src/backend/createBulkInsertStream.js b/plugins/dbgate-plugin-mongo/src/backend/createBulkInsertStream.js index 2259873cf..153f3f704 100644 --- a/plugins/dbgate-plugin-mongo/src/backend/createBulkInsertStream.js +++ b/plugins/dbgate-plugin-mongo/src/backend/createBulkInsertStream.js @@ -5,9 +5,9 @@ const { EJSON } = require('bson'); const logger = getLogger('mongoBulkInsert'); -function createBulkInsertStream(driver, stream, pool, name, options) { +function createBulkInsertStream(driver, stream, dbhan, name, options) { const collectionName = name.pureName; - const db = pool.__getDatabase(); + const db = dbhan.getDatabase(); const writable = new stream.Writable({ objectMode: true, diff --git a/plugins/dbgate-plugin-mongo/src/backend/driver.js b/plugins/dbgate-plugin-mongo/src/backend/driver.js index db2f637ec..4a795bc42 100644 --- a/plugins/dbgate-plugin-mongo/src/backend/driver.js +++ b/plugins/dbgate-plugin-mongo/src/backend/driver.js @@ -34,8 +34,8 @@ function findArrayResult(resValue) { return null; } -async function getScriptableDb(pool) { - const db = pool.__getDatabase(); +async function getScriptableDb(dbhan) { + const db = dbhan.getDatabase(); const collections = await db.listCollections().toArray(); for (const collection of collections) { _.set(db, collection.name, db.collection(collection.name)); @@ -77,42 +77,43 @@ const driver = { options.tlsInsecure = !ssl.rejectUnauthorized; } - const pool = new MongoClient(mongoUrl, options); - await pool.connect(); - // const pool = await MongoClient.connect(mongoUrl); - pool.__getDatabase = database ? () => pool.db(database) : () => pool.db(); - pool.__databaseName = database; - return pool; + const client = new MongoClient(mongoUrl, options); + await client.connect(); + return { + client, + database, + getDatabase: database ? () => client.db(database) : () => client.db(), + }; }, // @ts-ignore - async query(pool, sql) { + async query(dbhan, sql) { return { rows: [], columns: [], }; }, - async script(pool, sql) { + async script(dbhan, sql) { let func; func = eval(`(db,ObjectId) => ${sql}`); - const db = await getScriptableDb(pool); + const db = await getScriptableDb(dbhan); const res = func(db, ObjectId.createFromHexString); if (isPromise(res)) await res; }, - async operation(pool, operation, options) { + async operation(dbhan, operation, options) { const { type } = operation; switch (type) { case 'createCollection': - await this.script(pool, `db.createCollection('${operation.collection.name}')`); + await this.script(dbhan, `db.createCollection('${operation.collection.name}')`); break; case 'dropCollection': - await this.script(pool, `db.dropCollection('${operation.collection}')`); + await this.script(dbhan, `db.dropCollection('${operation.collection}')`); break; case 'renameCollection': - await this.script(pool, `db.renameCollection('${operation.collection}', '${operation.newName}')`); + await this.script(dbhan, `db.renameCollection('${operation.collection}', '${operation.newName}')`); break; case 'cloneCollection': await this.script( - pool, + dbhan, `db.collection('${operation.collection}').aggregate([{$out: '${operation.newName}'}]).toArray()` ); break; @@ -121,7 +122,7 @@ const driver = { } // saveScriptToDatabase({ conid: connection._id, database: name }, `db.createCollection('${newCollection}')`); }, - async stream(pool, sql, options) { + async stream(dbhan, sql, options) { let func; try { func = eval(`(db,ObjectId) => ${sql}`); @@ -134,7 +135,7 @@ const driver = { options.done(); return; } - const db = await getScriptableDb(pool); + const db = await getScriptableDb(dbhan); let exprValue; try { @@ -192,8 +193,8 @@ const driver = { options.done(); }, - async startProfiler(pool, options) { - const db = await getScriptableDb(pool); + async startProfiler(dbhan, options) { + const db = await getScriptableDb(dbhan); const old = await db.command({ profile: -1 }); await db.command({ profile: 2 }); const cursor = await db.collection('system.profile').find({ @@ -230,12 +231,12 @@ const driver = { old, }; }, - async stopProfiler(pool, { cursor, old }) { + async stopProfiler(dbhan, { cursor, old }) { cursor.close(); - const db = await getScriptableDb(pool); + const db = await getScriptableDb(dbhan); await db.command({ profile: old.was, slowms: old.slowms }); }, - async readQuery(pool, sql, structure) { + async readQuery(dbhan, sql, structure) { try { const json = JSON.parse(sql); if (json && json.pureName) { @@ -251,7 +252,7 @@ const driver = { // }); func = eval(`(db,ObjectId) => ${sql}`); - const db = await getScriptableDb(pool); + const db = await getScriptableDb(dbhan); exprValue = func(db, ObjectId.createFromHexString); const pass = new stream.PassThrough({ @@ -278,27 +279,27 @@ const driver = { // return pass; }, - async writeTable(pool, name, options) { - return createBulkInsertStream(this, stream, pool, name, options); + async writeTable(dbhan, name, options) { + return createBulkInsertStream(this, stream, dbhan, name, options); }, - async getVersion(pool) { - const status = await pool.__getDatabase().admin().serverInfo(); + async getVersion(dbhan) { + const status = await dbhan.getDatabase().admin().serverInfo(); return { ...status, versionText: `MongoDB ${status.version}`, }; }, - async listDatabases(pool) { - const res = await pool.__getDatabase().admin().listDatabases(); + async listDatabases(dbhan) { + const res = await dbhan.getDatabase().admin().listDatabases(); return res.databases; }, - async readCollection(pool, options) { + async readCollection(dbhan, options) { try { const mongoCondition = convertToMongoCondition(options.condition); // console.log('******************* mongoCondition *****************'); // console.log(JSON.stringify(mongoCondition, undefined, 2)); - const collection = pool.__getDatabase().collection(options.pureName); + const collection = dbhan.getDatabase().collection(options.pureName); if (options.countDocuments) { const count = await collection.countDocuments(convertObjectId(mongoCondition) || {}); return { count }; @@ -326,7 +327,7 @@ const driver = { return { errorMessage: err.message }; } }, - async updateCollection(pool, changeSet) { + async updateCollection(dbhan, changeSet) { const res = { inserted: [], updated: [], @@ -334,7 +335,7 @@ const driver = { replaced: [], }; try { - const db = pool.__getDatabase(); + const db = dbhan.getDatabase(); for (const insert of changeSet.inserts) { const collection = db.collection(insert.pureName); const document = { @@ -384,19 +385,19 @@ const driver = { } }, - async createDatabase(pool, name) { - const db = pool.db(name); + async createDatabase(dbhan, name) { + const db = dbhan.client.db(name); await db.createCollection('collection1'); }, - async dropDatabase(pool, name) { - const db = pool.db(name); + async dropDatabase(dbhan, name) { + const db = dbhan.client.db(name); await db.dropDatabase(); }, - async loadFieldValues(pool, name, field, search) { + async loadFieldValues(dbhan, name, field, search) { try { - const collection = pool.__getDatabase().collection(name.pureName); + const collection = dbhan.getDatabase().collection(name.pureName); // console.log('options.condition', JSON.stringify(options.condition, undefined, 2)); const pipelineMatch = []; @@ -442,10 +443,10 @@ const driver = { } }, - readJsonQuery(pool, select, structure) { + readJsonQuery(dbhan, select, structure) { const { collection, condition, sort } = select; - const db = pool.__getDatabase(); + const db = dbhan.getDatabase(); const res = db .collection(collection) .find(condition || {}) @@ -455,23 +456,23 @@ const driver = { return res; }, - async summaryCommand(pool, command, row) { + async summaryCommand(dbhan, command, row) { switch (command) { case 'profileOff': - await pool.db(row.name).command({ profile: 0 }); + await dbhan.client.db(row.name).command({ profile: 0 }); return; case 'profileFiltered': - await pool.db(row.name).command({ profile: 1, slowms: 100 }); + await dbhan.client.db(row.name).command({ profile: 1, slowms: 100 }); return; case 'profileAll': - await pool.db(row.name).command({ profile: 2 }); + await dbhan.client.db(row.name).command({ profile: 2 }); return; } }, - async serverSummary(pool) { - const res = await pool.__getDatabase().admin().listDatabases(); - const profiling = await Promise.all(res.databases.map((x) => pool.db(x.name).command({ profile: -1 }))); + async serverSummary(dbhan) { + const res = await dbhan.getDatabase().admin().listDatabases(); + const profiling = await Promise.all(res.databases.map((x) => dbhan.client.db(x.name).command({ profile: -1 }))); function formatProfiling(info) { switch (info.was) { diff --git a/plugins/dbgate-plugin-mssql/src/backend/MsSqlAnalyser.js b/plugins/dbgate-plugin-mssql/src/backend/MsSqlAnalyser.js index 757933915..3ecf458aa 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/MsSqlAnalyser.js +++ b/plugins/dbgate-plugin-mssql/src/backend/MsSqlAnalyser.js @@ -63,8 +63,8 @@ function getColumnInfo({ } class MsSqlAnalyser extends DatabaseAnalyser { - constructor(pool, driver, version) { - super(pool, driver, version); + constructor(dbhan, driver, version) { + super(dbhan, driver, version); } createQuery(resFileName, typeFields) { @@ -75,16 +75,10 @@ class MsSqlAnalyser extends DatabaseAnalyser { async _computeSingleObjectId() { const { schemaName, pureName, typeField } = this.singleObjectFilter; const fullName = schemaName ? `[${schemaName}].[${pureName}]` : pureName; - const resId = await this.driver.query(this.pool, `SELECT OBJECT_ID('${fullName}') AS id`); + const resId = await this.driver.query(this.dbhan, `SELECT OBJECT_ID('${fullName}') AS id`); this.singleObjectId = resId.rows[0].id; } - async readSchemaList() { - const schemaRows = await this.analyserQuery('getSchemas'); - const schemas = schemaRows.rows; - return schemas; - } - async _runAnalysis() { this.feedback({ analysingMessage: 'Loading tables' }); const tablesRows = await this.analyserQuery('tables', ['tables']); @@ -98,8 +92,6 @@ class MsSqlAnalyser extends DatabaseAnalyser { const indexesRows = await this.analyserQuery('indexes', ['tables']); this.feedback({ analysingMessage: 'Loading index columns' }); const indexcolsRows = await this.analyserQuery('indexcols', ['tables']); - this.feedback({ analysingMessage: 'Loading default schema' }); - const defaultSchemaRows = await this.driver.query(this.pool, 'SELECT SCHEMA_NAME() as name'); this.feedback({ analysingMessage: 'Loading table sizes' }); const tableSizes = await this.analyserQuery('tableSizes'); @@ -179,7 +171,6 @@ class MsSqlAnalyser extends DatabaseAnalyser { views, procedures, functions, - defaultSchema: defaultSchemaRows.rows[0] ? defaultSchemaRows.rows[0].name : undefined, }; } diff --git a/plugins/dbgate-plugin-mssql/src/backend/createNativeBulkInsertStream.js b/plugins/dbgate-plugin-mssql/src/backend/createNativeBulkInsertStream.js index 093c05881..5a60f0ae3 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/createNativeBulkInsertStream.js +++ b/plugins/dbgate-plugin-mssql/src/backend/createNativeBulkInsertStream.js @@ -1,8 +1,8 @@ const { createBulkInsertStreamBase } = global.DBGATE_PACKAGES['dbgate-tools']; -function runBulkInsertBatch(pool, tableName, writable, rows) { +function runBulkInsertBatch(dbhan, tableName, writable, rows) { return new Promise((resolve, reject) => { - const tableMgr = pool.tableMgr(); + const tableMgr = dbhan.client.tableMgr(); tableMgr.bind(tableName, bulkMgr => { bulkMgr.insertRows(rows, err => { if (err) reject(err); @@ -16,8 +16,8 @@ function runBulkInsertBatch(pool, tableName, writable, rows) { * * @param {import('dbgate-types').EngineDriver} driver */ -function createNativeBulkInsertStream(driver, stream, pool, name, options) { - const writable = createBulkInsertStreamBase(driver, stream, pool, name, options); +function createNativeBulkInsertStream(driver, stream, dbhan, name, options) { + const writable = createBulkInsertStreamBase(driver, stream, dbhan, name, options); const fullName = name.schemaName ? `[${name.schemaName}].[${name.pureName}]` : name.pureName; @@ -25,7 +25,7 @@ function createNativeBulkInsertStream(driver, stream, pool, name, options) { const rows = writable.buffer; writable.buffer = []; - await runBulkInsertBatch(pool, fullName, writable, rows); + await runBulkInsertBatch(dbhan, fullName, writable, rows); }; return writable; diff --git a/plugins/dbgate-plugin-mssql/src/backend/createTediousBulkInsertStream.js b/plugins/dbgate-plugin-mssql/src/backend/createTediousBulkInsertStream.js index 2c2c057b0..878579262 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/createTediousBulkInsertStream.js +++ b/plugins/dbgate-plugin-mssql/src/backend/createTediousBulkInsertStream.js @@ -3,12 +3,12 @@ const tedious = require('tedious'); const getConcreteType = require('./getConcreteType'); const _ = require('lodash'); -function runBulkInsertBatch(pool, tableName, writable, rows) { +function runBulkInsertBatch(dbhan, tableName, writable, rows) { return new Promise((resolve, reject) => { var options = { keepNulls: true }; // instantiate - provide the table where you'll be inserting to, options and a callback - var bulkLoad = pool.newBulkLoad(tableName, options, (error, rowCount) => { + var bulkLoad = dbhan.client.newBulkLoad(tableName, options, (error, rowCount) => { if (error) reject(error); else resolve(); }); @@ -40,7 +40,7 @@ function runBulkInsertBatch(pool, tableName, writable, rows) { ); // console.log('IMPORT ROWS', rowsMapped); - pool.execBulkLoad(bulkLoad, rowsMapped); + dbhan.client.execBulkLoad(bulkLoad, rowsMapped); }); } @@ -48,8 +48,8 @@ function runBulkInsertBatch(pool, tableName, writable, rows) { * * @param {import('dbgate-types').EngineDriver} driver */ -function createTediousBulkInsertStream(driver, stream, pool, name, options) { - const writable = createBulkInsertStreamBase(driver, stream, pool, name, options); +function createTediousBulkInsertStream(driver, stream, dbhan, name, options) { + const writable = createBulkInsertStreamBase(driver, stream, dbhan, name, options); const fullName = name.schemaName ? `[${name.schemaName}].[${name.pureName}]` : name.pureName; @@ -59,7 +59,7 @@ function createTediousBulkInsertStream(driver, stream, pool, name, options) { ? `${driver.dialect.quoteIdentifier(name.schemaName)}.${driver.dialect.quoteIdentifier(name.pureName)}` : driver.dialect.quoteIdentifier(name.pureName); - const respTemplate = await driver.query(pool, `SELECT * FROM ${fullNameQuoted} WHERE 1=0`, { + const respTemplate = await driver.query(dbhan, `SELECT * FROM ${fullNameQuoted} WHERE 1=0`, { addDriverNativeColumn: true, }); writable.templateColumns = respTemplate.columns; @@ -68,7 +68,7 @@ function createTediousBulkInsertStream(driver, stream, pool, name, options) { const rows = writable.buffer; writable.buffer = []; - await runBulkInsertBatch(pool, fullName, writable, rows); + await runBulkInsertBatch(dbhan, fullName, writable, rows); }; return writable; diff --git a/plugins/dbgate-plugin-mssql/src/backend/driver.js b/plugins/dbgate-plugin-mssql/src/backend/driver.js index fc385ad2a..5404a0d6d 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/driver.js +++ b/plugins/dbgate-plugin-mssql/src/backend/driver.js @@ -79,50 +79,53 @@ const driver = { async connect(conn) { const { authType } = conn; - if (requireMsnodesqlv8 && (authType == 'sspi' || authType == 'sql')) { - return nativeConnect(conn); - } + const connectionType = requireMsnodesqlv8 && (authType == 'sspi' || authType == 'sql') ? 'msnodesqlv8' : 'tedious'; + const client = connectionType == 'msnodesqlv8' ? await nativeConnect(conn) : await tediousConnect(conn); - return tediousConnect(conn); + return { + client, + connectionType, + database: conn.database, + }; }, - async close(pool) { - return pool.close(); + async close(dbhan) { + return dbhan.client.close(); }, - async queryCore(pool, sql, options) { - if (pool._connectionType == 'msnodesqlv8') { - return nativeQueryCore(pool, sql, options); + async queryCore(dbhan, sql, options) { + if (dbhan.connectionType == 'msnodesqlv8') { + return nativeQueryCore(dbhan, sql, options); } else { - return tediousQueryCore(pool, sql, options); + return tediousQueryCore(dbhan, sql, options); } }, - async query(pool, sql, options) { + async query(dbhan, sql, options) { return lock.acquire('connection', async () => { - return this.queryCore(pool, sql, options); + return this.queryCore(dbhan, sql, options); }); }, - async stream(pool, sql, options) { - if (pool._connectionType == 'msnodesqlv8') { - return nativeStream(pool, sql, options); + async stream(dbhan, sql, options) { + if (dbhan.connectionType == 'msnodesqlv8') { + return nativeStream(dbhan, sql, options); } else { - return tediousStream(pool, sql, options); + return tediousStream(dbhan, sql, options); } }, - async readQuery(pool, sql, structure) { - if (pool._connectionType == 'msnodesqlv8') { - return nativeReadQuery(pool, sql, structure); + async readQuery(dbhan, sql, structure) { + if (dbhan.connectionType == 'msnodesqlv8') { + return nativeReadQuery(dbhan, sql, structure); } else { - return tediousReadQuery(pool, sql, structure); + return tediousReadQuery(dbhan, sql, structure); } }, - async writeTable(pool, name, options) { - if (pool._connectionType == 'msnodesqlv8') { - return createNativeBulkInsertStream(this, stream, pool, name, options); + async writeTable(dbhan, name, options) { + if (dbhan.connectionType == 'msnodesqlv8') { + return createNativeBulkInsertStream(this, stream, dbhan, name, options); } else { - return createTediousBulkInsertStream(this, stream, pool, name, options); + return createTediousBulkInsertStream(this, stream, dbhan, name, options); } }, - async getVersion(pool) { - const res = (await this.query(pool, versionQuery)).rows[0]; + async getVersion(dbhan) { + const res = (await this.query(dbhan, versionQuery)).rows[0]; if (res.productVersion) { const splitted = res.productVersion.split('.'); @@ -133,8 +136,8 @@ const driver = { } return res; }, - async listDatabases(pool) { - const { rows } = await this.query(pool, 'SELECT name FROM sys.databases order by name'); + async listDatabases(dbhan) { + const { rows } = await this.query(dbhan, 'SELECT name FROM sys.databases order by name'); return rows; }, getRedirectAuthUrl(connection, options) { @@ -150,6 +153,19 @@ const driver = { getAccessTokenFromAuth: (connection, req) => { return req?.user?.msentraToken; }, + async listSchemas(dbhan) { + const { rows } = await this.query(dbhan, 'select schema_id as objectId, name as schemaName from sys.schemas'); + + const defaultSchemaRows = await this.query(dbhan, 'SELECT SCHEMA_NAME() as name'); + const defaultSchema = defaultSchemaRows.rows[0]?.name; + + logger.debug(`Loaded ${rows.length} mssql schemas`); + + return rows.map(x => ({ + ...x, + isDefault: x.schemaName == defaultSchema, + })); + }, }; driver.initialize = dbgateEnv => { diff --git a/plugins/dbgate-plugin-mssql/src/backend/nativeDriver.js b/plugins/dbgate-plugin-mssql/src/backend/nativeDriver.js index 5c2de8d23..6ba14f634 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/nativeDriver.js +++ b/plugins/dbgate-plugin-mssql/src/backend/nativeDriver.js @@ -2,6 +2,7 @@ const _ = require('lodash'); const stream = require('stream'); const makeUniqueColumnNames = require('./makeUniqueColumnNames'); let requireMsnodesqlv8; +const { extractDbNameFromComposite } = global.DBGATE_PACKAGES['dbgate-tools']; // async function nativeQueryCore(pool, sql, options) { // if (sql == null) { @@ -57,13 +58,12 @@ async function connectWithDriver({ server, port, user, password, database, authT connectionString += `;Driver={${driver}}`; if (authType == 'sspi') connectionString += ';Trusted_Connection=Yes'; else connectionString += `;UID=${user};PWD=${password}`; - if (database) connectionString += `;Database=${database}`; + if (database) connectionString += `;Database=${extractDbNameFromComposite(database)}`; return new Promise((resolve, reject) => { getMsnodesqlv8().open(connectionString, (err, conn) => { if (err) { reject(err); } else { - conn._connectionType = 'msnodesqlv8'; resolve(conn); } }); @@ -88,7 +88,7 @@ async function nativeConnect(connection) { } } -async function nativeQueryCore(pool, sql, options) { +async function nativeQueryCore(dbhan, sql, options) { if (sql == null) { return Promise.resolve({ rows: [], @@ -98,7 +98,7 @@ async function nativeQueryCore(pool, sql, options) { return new Promise((resolve, reject) => { let columns = null; let currentRow = null; - const q = pool.query(sql); + const q = dbhan.client.query(sql); const rows = []; q.on('meta', meta => { @@ -128,7 +128,7 @@ async function nativeQueryCore(pool, sql, options) { }); } -async function nativeReadQuery(pool, sql, structure) { +async function nativeReadQuery(dbhan, sql, structure) { const pass = new stream.PassThrough({ objectMode: true, highWaterMark: 100, @@ -136,7 +136,7 @@ async function nativeReadQuery(pool, sql, structure) { let columns = null; let currentRow = null; - const q = pool.query(sql); + const q = dbhan.client.query(sql); q.on('meta', meta => { columns = extractNativeColumns(meta); @@ -168,7 +168,7 @@ async function nativeReadQuery(pool, sql, structure) { return pass; } -async function nativeStream(pool, sql, options) { +async function nativeStream(dbhan, sql, options) { const handleInfo = info => { const { message, lineNumber, procName } = info; options.info({ @@ -192,7 +192,7 @@ async function nativeStream(pool, sql, options) { let columns = null; let currentRow = null; - const q = pool.query(sql); + const q = dbhan.client.query(sql); q.on('meta', meta => { if (currentRow) options.row(currentRow); diff --git a/plugins/dbgate-plugin-mssql/src/backend/sql/columns.js b/plugins/dbgate-plugin-mssql/src/backend/sql/columns.js index 9e61b2626..7a6ee8cf2 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/sql/columns.js +++ b/plugins/dbgate-plugin-mssql/src/backend/sql/columns.js @@ -15,6 +15,6 @@ INNER JOIN sys.schemas u ON u.schema_id=o.schema_id INNER JOIN INFORMATION_SCHEMA.COLUMNS col ON col.TABLE_NAME = o.name AND col.TABLE_SCHEMA = u.name and col.COLUMN_NAME = c.name left join sys.default_constraints d on c.default_object_id = d.object_id left join sys.computed_columns m on m.object_id = c.object_id and m.column_id = c.column_id -where o.type = 'U' and o.object_id =OBJECT_ID_CONDITION +where o.type = 'U' and o.object_id =OBJECT_ID_CONDITION and u.name =SCHEMA_NAME_CONDITION order by c.column_id `; diff --git a/plugins/dbgate-plugin-mssql/src/backend/sql/foreignKeys.js b/plugins/dbgate-plugin-mssql/src/backend/sql/foreignKeys.js index a00ed5717..234ba20ce 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/sql/foreignKeys.js +++ b/plugins/dbgate-plugin-mssql/src/backend/sql/foreignKeys.js @@ -36,5 +36,5 @@ LEFT JOIN sys.schemas IXS ON IXT.schema_id = IXS.schema_id inner join sys.objects o on FK.TABLE_NAME = o.name inner join sys.schemas s on o.schema_id = s.schema_id and FK.TABLE_SCHEMA = s.name -where o.object_id =OBJECT_ID_CONDITION +where o.object_id =OBJECT_ID_CONDITION and s.name =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-mssql/src/backend/sql/getSchemas.js b/plugins/dbgate-plugin-mssql/src/backend/sql/getSchemas.js deleted file mode 100644 index a7cb0a4de..000000000 --- a/plugins/dbgate-plugin-mssql/src/backend/sql/getSchemas.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = `select schema_id as objectId, name as schemaName from sys.schemas`; diff --git a/plugins/dbgate-plugin-mssql/src/backend/sql/index.js b/plugins/dbgate-plugin-mssql/src/backend/sql/index.js index 17d63db49..563b8d080 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/sql/index.js +++ b/plugins/dbgate-plugin-mssql/src/backend/sql/index.js @@ -8,7 +8,6 @@ const loadSqlCode = require('./loadSqlCode'); const views = require('./views'); const programmables = require('./programmables'); const viewColumns = require('./viewColumns'); -const getSchemas = require('./getSchemas'); const indexes = require('./indexes'); const indexcols = require('./indexcols'); @@ -22,7 +21,6 @@ module.exports = { views, programmables, viewColumns, - getSchemas, indexes, indexcols, tableSizes, diff --git a/plugins/dbgate-plugin-mssql/src/backend/sql/indexcols.js b/plugins/dbgate-plugin-mssql/src/backend/sql/indexcols.js index 7dbb8a64b..fb1e37036 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/sql/indexcols.js +++ b/plugins/dbgate-plugin-mssql/src/backend/sql/indexcols.js @@ -5,6 +5,8 @@ select c.is_descending_key as isDescending, c.is_included_column as isIncludedColumn from sys.index_columns c inner join sys.columns col on c.object_id = col.object_id and c.column_id = col.column_id -where c.object_id =OBJECT_ID_CONDITION +inner join sys.objects o on c.object_id = o.object_id +INNER JOIN sys.schemas u ON u.schema_id=o.schema_id +where c.object_id =OBJECT_ID_CONDITION and u.name =SCHEMA_NAME_CONDITION order by c.key_ordinal `; diff --git a/plugins/dbgate-plugin-mssql/src/backend/sql/indexes.js b/plugins/dbgate-plugin-mssql/src/backend/sql/indexes.js index 4ba26f1bc..a685472d1 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/sql/indexes.js +++ b/plugins/dbgate-plugin-mssql/src/backend/sql/indexes.js @@ -1,5 +1,7 @@ module.exports = ` select i.object_id, i.name as constraintName, i.type_desc as indexType, i.is_unique as isUnique,i.index_id, i.is_unique_constraint from sys.indexes i +inner join sys.objects o on i.object_id = o.object_id +INNER JOIN sys.schemas u ON u.schema_id=o.schema_id where i.is_primary_key=0 and i.is_hypothetical=0 and indexproperty(i.object_id, i.name, 'IsStatistics') = 0 and objectproperty(i.object_id, 'IsUserTable') = 1 @@ -10,5 +12,5 @@ and i.index_id between 1 and 254 -- where o.parent_obj = i.object_id -- and objectproperty(o.id, N'isConstraint') = 1.0) - and i.object_id =OBJECT_ID_CONDITION + and i.object_id =OBJECT_ID_CONDITION and u.name =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-mssql/src/backend/sql/loadSqlCode.js b/plugins/dbgate-plugin-mssql/src/backend/sql/loadSqlCode.js index ea242b0e8..c9abd34f8 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/sql/loadSqlCode.js +++ b/plugins/dbgate-plugin-mssql/src/backend/sql/loadSqlCode.js @@ -3,6 +3,6 @@ select s.name as pureName, u.name as schemaName, c.text AS codeText from sys.objects s inner join sys.syscomments c on s.object_id = c.id inner join sys.schemas u on u.schema_id = s.schema_id -where (s.object_id =OBJECT_ID_CONDITION) +where (s.object_id =OBJECT_ID_CONDITION) and u.name =SCHEMA_NAME_CONDITION order by u.name, s.name, c.colid `; diff --git a/plugins/dbgate-plugin-mssql/src/backend/sql/modifications.js b/plugins/dbgate-plugin-mssql/src/backend/sql/modifications.js index 4389fd6ca..b1a2fec1d 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/sql/modifications.js +++ b/plugins/dbgate-plugin-mssql/src/backend/sql/modifications.js @@ -3,4 +3,5 @@ select o.object_id as objectId, o.modify_date as modifyDate, o.type, o.name as p from sys.objects o inner join sys.schemas s on o.schema_id = s.schema_id where o.type in ('U', 'V', 'P', 'IF', 'FN', 'TF') -- , 'TR' - triggers disabled + and s.name =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-mssql/src/backend/sql/primaryKeys.js b/plugins/dbgate-plugin-mssql/src/backend/sql/primaryKeys.js index 24b9fc3e0..2d6ffc311 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/sql/primaryKeys.js +++ b/plugins/dbgate-plugin-mssql/src/backend/sql/primaryKeys.js @@ -11,4 +11,5 @@ where and c.Table_Name = t.Table_Name and Constraint_Type = 'PRIMARY KEY' and o.object_id =OBJECT_ID_CONDITION + and s.name =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-mssql/src/backend/sql/programmables.js b/plugins/dbgate-plugin-mssql/src/backend/sql/programmables.js index 565bb776c..2f0489540 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/sql/programmables.js +++ b/plugins/dbgate-plugin-mssql/src/backend/sql/programmables.js @@ -2,5 +2,5 @@ module.exports = ` select o.name as pureName, s.name as schemaName, o.object_id as objectId, o.create_date as createDate, o.modify_date as modifyDate, o.type as sqlObjectType from sys.objects o inner join sys.schemas s on o.schema_id = s.schema_id -where o.type in ('P', 'IF', 'FN', 'TF') and o.object_id =OBJECT_ID_CONDITION +where o.type in ('P', 'IF', 'FN', 'TF') and o.object_id =OBJECT_ID_CONDITION and s.name =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-mssql/src/backend/sql/tableSizes.js b/plugins/dbgate-plugin-mssql/src/backend/sql/tableSizes.js index 896ef0cab..70a1ddd4b 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/sql/tableSizes.js +++ b/plugins/dbgate-plugin-mssql/src/backend/sql/tableSizes.js @@ -8,8 +8,11 @@ INNER JOIN sys.indexes i ON t.OBJECT_ID = i.object_id INNER JOIN sys.partitions p ON i.object_id = p.OBJECT_ID AND i.index_id = p.index_id +INNER JOIN + sys.schemas s ON t.schema_id = s.schema_id WHERE t.NAME NOT LIKE 'dt%' AND t.is_ms_shipped = 0 AND i.OBJECT_ID > 255 + AND s.name =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-mssql/src/backend/sql/tables.js b/plugins/dbgate-plugin-mssql/src/backend/sql/tables.js index b4886d4a2..32dace94d 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/sql/tables.js +++ b/plugins/dbgate-plugin-mssql/src/backend/sql/tables.js @@ -4,5 +4,5 @@ select o.create_date as createDate, o.modify_date as modifyDate from sys.tables o inner join sys.schemas s on o.schema_id = s.schema_id -where o.object_id =OBJECT_ID_CONDITION +where o.object_id =OBJECT_ID_CONDITION and s.name =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-mssql/src/backend/sql/viewColumns.js b/plugins/dbgate-plugin-mssql/src/backend/sql/viewColumns.js index 08caca7b7..9ad4a7bf0 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/sql/viewColumns.js +++ b/plugins/dbgate-plugin-mssql/src/backend/sql/viewColumns.js @@ -13,6 +13,6 @@ select FROM sys.objects o INNER JOIN sys.schemas u ON u.schema_id=o.schema_id INNER JOIN INFORMATION_SCHEMA.COLUMNS col ON col.TABLE_NAME = o.name AND col.TABLE_SCHEMA = u.name -WHERE o.type in ('V') and o.object_id =OBJECT_ID_CONDITION +WHERE o.type in ('V') and o.object_id =OBJECT_ID_CONDITION and u.name =SCHEMA_NAME_CONDITION order by col.ORDINAL_POSITION `; diff --git a/plugins/dbgate-plugin-mssql/src/backend/sql/views.js b/plugins/dbgate-plugin-mssql/src/backend/sql/views.js index e1436d712..3f746ed64 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/sql/views.js +++ b/plugins/dbgate-plugin-mssql/src/backend/sql/views.js @@ -6,5 +6,5 @@ SELECT o.create_date as createDate, o.modify_date as modifyDate FROM sys.objects o INNER JOIN sys.schemas u ON u.schema_id=o.schema_id -WHERE type in ('V') and o.object_id =OBJECT_ID_CONDITION +WHERE type in ('V') and o.object_id =OBJECT_ID_CONDITION and u.name =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-mssql/src/backend/tediousDriver.js b/plugins/dbgate-plugin-mssql/src/backend/tediousDriver.js index aff9291fb..f93234bf1 100644 --- a/plugins/dbgate-plugin-mssql/src/backend/tediousDriver.js +++ b/plugins/dbgate-plugin-mssql/src/backend/tediousDriver.js @@ -2,6 +2,7 @@ const _ = require('lodash'); const stream = require('stream'); const tedious = require('tedious'); const makeUniqueColumnNames = require('./makeUniqueColumnNames'); +const { extractDbNameFromComposite } = global.DBGATE_PACKAGES['dbgate-tools']; function extractTediousColumns(columns, addDriverNativeColumn = false) { const res = columns.map(col => { @@ -39,7 +40,7 @@ async function tediousConnect(storedConnection) { }; if (database) { - connectionOptions.database = database; + connectionOptions.database = extractDbNameFromComposite(database); } const authentication = @@ -68,14 +69,13 @@ async function tediousConnect(storedConnection) { if (err) { reject(err); } - connection._connectionType = 'tedious'; resolve(connection); }); connection.connect(); }); } -async function tediousQueryCore(pool, sql, options) { +async function tediousQueryCore(dbhan, sql, options) { if (sql == null) { return Promise.resolve({ rows: [], @@ -103,12 +103,12 @@ async function tediousQueryCore(pool, sql, options) { ) ); }); - if (discardResult) pool.execSqlBatch(request); - else pool.execSql(request); + if (discardResult) dbhan.client.execSqlBatch(request); + else dbhan.client.execSql(request); }); } -async function tediousReadQuery(pool, sql, structure) { +async function tediousReadQuery(dbhan, sql, structure) { const pass = new stream.PassThrough({ objectMode: true, highWaterMark: 100, @@ -133,12 +133,12 @@ async function tediousReadQuery(pool, sql, structure) { ); pass.write(row); }); - pool.execSql(request); + dbhan.client.execSql(request); return pass; } -async function tediousStream(pool, sql, options) { +async function tediousStream(dbhan, sql, options) { let currentColumns = []; const handleInfo = info => { @@ -162,14 +162,14 @@ async function tediousStream(pool, sql, options) { }); }; - pool.on('infoMessage', handleInfo); - pool.on('errorMessage', handleError); + dbhan.client.on('infoMessage', handleInfo); + dbhan.client.on('errorMessage', handleError); const request = new tedious.Request(sql, (err, rowCount) => { // if (err) reject(err); // else resolve(result); options.done(); - pool.off('infoMessage', handleInfo); - pool.off('errorMessage', handleError); + dbhan.client.off('infoMessage', handleInfo); + dbhan.client.off('errorMessage', handleError); options.info({ message: `${rowCount} rows affected`, @@ -188,7 +188,7 @@ async function tediousStream(pool, sql, options) { ); options.row(row); }); - pool.execSqlBatch(request); + dbhan.client.execSqlBatch(request); } module.exports = { diff --git a/plugins/dbgate-plugin-mssql/src/frontend/driver.js b/plugins/dbgate-plugin-mssql/src/frontend/driver.js index 9d58bb041..f9f1e3faa 100644 --- a/plugins/dbgate-plugin-mssql/src/frontend/driver.js +++ b/plugins/dbgate-plugin-mssql/src/frontend/driver.js @@ -127,9 +127,17 @@ const driver = { return dialect; }, showConnectionField: (field, values) => - ['authType', 'server', 'port', 'user', 'password', 'defaultDatabase', 'singleDatabase', 'isReadOnly'].includes( - field - ) || + [ + 'authType', + 'server', + 'port', + 'user', + 'password', + 'defaultDatabase', + 'singleDatabase', + 'isReadOnly', + 'useSeparateSchemas', + ].includes(field) || (field == 'trustServerCertificate' && values.authType != 'sql' && values.authType != 'sspi') || (field == 'windowsDomain' && values.authType != 'sql' && values.authType != 'sspi' && values.authType != 'msentra'), // (field == 'useDatabaseUrl' && values.authType != 'sql' && values.authType != 'sspi') diff --git a/plugins/dbgate-plugin-mysql/src/backend/Analyser.js b/plugins/dbgate-plugin-mysql/src/backend/Analyser.js index 118b55efc..fb18cc683 100644 --- a/plugins/dbgate-plugin-mysql/src/backend/Analyser.js +++ b/plugins/dbgate-plugin-mysql/src/backend/Analyser.js @@ -62,13 +62,13 @@ function getColumnInfo( } class Analyser extends DatabaseAnalyser { - constructor(pool, driver, version) { - super(pool, driver, version); + constructor(dbhan, driver, version) { + super(dbhan, driver, version); } createQuery(resFileName, typeFields, replacements = {}) { let res = sql[resFileName]; - res = res.replace('#DATABASE#', this.pool._database_name); + res = res.replace('#DATABASE#', this.dbhan.database); return super.createQuery(res, typeFields, replacements); } diff --git a/plugins/dbgate-plugin-mysql/src/backend/drivers.js b/plugins/dbgate-plugin-mysql/src/backend/drivers.js index fafb327b9..7aee2984d 100644 --- a/plugins/dbgate-plugin-mysql/src/backend/drivers.js +++ b/plugins/dbgate-plugin-mysql/src/backend/drivers.js @@ -48,17 +48,19 @@ const drivers = driverBases.map(driverBase => ({ // multipleStatements: true, }; - const connection = mysql2.createConnection(options); - connection._database_name = database; + const client = mysql2.createConnection(options); if (isReadOnly) { - await this.query(connection, 'SET SESSION TRANSACTION READ ONLY'); + await this.query(client, 'SET SESSION TRANSACTION READ ONLY'); } - return connection; + return { + client, + database, + }; }, - async close(pool) { - return pool.close(); + async close(dbhan) { + return dbhan.client.close(); }, - query(connection, sql) { + query(dbhan, sql) { if (sql == null) { return { rows: [], @@ -67,15 +69,15 @@ const drivers = driverBases.map(driverBase => ({ } return new Promise((resolve, reject) => { - connection.query(sql, function (error, results, fields) { + dbhan.client.query(sql, function (error, results, fields) { if (error) reject(error); const columns = extractColumns(fields); resolve({ rows: results && columns && results.map && results.map(row => zipDataRow(row, columns)), columns }); }); }); }, - async stream(connection, sql, options) { - const query = connection.query(sql); + async stream(dbhan, sql, options) { + const query = dbhan.client.query(sql); let columns = []; // const handleInfo = (info) => { @@ -125,8 +127,8 @@ const drivers = driverBases.map(driverBase => ({ query.on('error', handleError).on('fields', handleFields).on('result', handleRow).on('end', handleEnd); }, - async readQuery(connection, sql, structure) { - const query = connection.query(sql); + async readQuery(dbhan, sql, structure) { + const query = dbhan.client.query(sql); const pass = new stream.PassThrough({ objectMode: true, @@ -151,8 +153,8 @@ const drivers = driverBases.map(driverBase => ({ return pass; }, - async getVersion(connection) { - const { rows } = await this.query(connection, "show variables like 'version'"); + async getVersion(dbhan) { + const { rows } = await this.query(dbhan, "show variables like 'version'"); const version = rows[0].Value; if (version) { const m = version.match(/(.*)-MariaDB-/); @@ -169,18 +171,18 @@ const drivers = driverBases.map(driverBase => ({ versionText: `MySQL ${version}`, }; }, - async listDatabases(connection) { - const { rows } = await this.query(connection, 'show databases'); + async listDatabases(dbhan) { + const { rows } = await this.query(dbhan, 'show databases'); return rows.map(x => ({ name: x.Database })); }, - async writeTable(pool, name, options) { + async writeTable(dbhan, name, options) { // @ts-ignore - return createBulkInsertStreamBase(this, stream, pool, name, options); + return createBulkInsertStreamBase(this, stream, dbhan, name, options); }, - async createBackupDumper(pool, options) { + async createBackupDumper(dbhan, options) { const { outputFile, databaseName, schemaName } = options; const res = new MySqlDumper({ - connection: pool, + connection: dbhan.client, schema: databaseName || schemaName, outputFile, }); diff --git a/plugins/dbgate-plugin-oracle/src/backend/Analyser.js b/plugins/dbgate-plugin-oracle/src/backend/Analyser.js index c8c314bbd..5b4b845c8 100644 --- a/plugins/dbgate-plugin-oracle/src/backend/Analyser.js +++ b/plugins/dbgate-plugin-oracle/src/backend/Analyser.js @@ -30,8 +30,8 @@ function getColumnInfo( } class Analyser extends DatabaseAnalyser { - constructor(pool, driver, version) { - super(pool, driver, version); + constructor(dbhan, driver, version) { + super(dbhan, driver, version); } createQuery(resFileName, typeFields, replacements = {}) { @@ -47,32 +47,32 @@ class Analyser extends DatabaseAnalyser { async _runAnalysis() { this.feedback({ analysingMessage: 'Loading tables' }); - const tables = await this.analyserQuery('tableList', ['tables'], { $owner: this.pool._schema_name }); + const tables = await this.analyserQuery('tableList', ['tables'], { $owner: this.dbhan.database }); this.feedback({ analysingMessage: 'Loading columns' }); - const columns = await this.analyserQuery('columns', ['tables', 'views'], { $owner: this.pool._schema_name }); + const columns = await this.analyserQuery('columns', ['tables', 'views'], { $owner: this.dbhan.database }); this.feedback({ analysingMessage: 'Loading primary keys' }); - const pkColumns = await this.analyserQuery('primaryKeys', ['tables'], { $owner: this.pool._schema_name }); + const pkColumns = await this.analyserQuery('primaryKeys', ['tables'], { $owner: this.dbhan.database }); //let fkColumns = null; this.feedback({ analysingMessage: 'Loading foreign keys' }); - const fkColumns = await this.analyserQuery('foreignKeys', ['tables'], { $owner: this.pool._schema_name }); + const fkColumns = await this.analyserQuery('foreignKeys', ['tables'], { $owner: this.dbhan.database }); this.feedback({ analysingMessage: 'Loading views' }); - const views = await this.analyserQuery('views', ['views'], { $owner: this.pool._schema_name }); + const views = await this.analyserQuery('views', ['views'], { $owner: this.dbhan.database }); this.feedback({ analysingMessage: 'Loading materialized views' }); const matviews = this.driver.dialect.materializedViews - ? await this.analyserQuery('matviews', ['matviews'], { $owner: this.pool._schema_name }) + ? await this.analyserQuery('matviews', ['matviews'], { $owner: this.dbhan.database }) : null; this.feedback({ analysingMessage: 'Loading routines' }); const routines = await this.analyserQuery('routines', ['procedures', 'functions'], { - $owner: this.pool._schema_name, + $owner: this.dbhan.database, }); this.feedback({ analysingMessage: 'Loading indexes' }); - const indexes = await this.analyserQuery('indexes', ['tables'], { $owner: this.pool._schema_name }); + const indexes = await this.analyserQuery('indexes', ['tables'], { $owner: this.dbhan.database }); this.feedback({ analysingMessage: 'Loading unique names' }); - const uniqueNames = await this.analyserQuery('uniqueNames', ['tables'], { $owner: this.pool._schema_name }); + const uniqueNames = await this.analyserQuery('uniqueNames', ['tables'], { $owner: this.dbhan.database }); this.feedback({ analysingMessage: 'Finalizing DB structure' }); const fkColumnsMapped = fkColumns.rows.map(x => ({ diff --git a/plugins/dbgate-plugin-oracle/src/backend/createOracleBulkInsertStream.js b/plugins/dbgate-plugin-oracle/src/backend/createOracleBulkInsertStream.js index dac1cf56d..b7eedcc6a 100644 --- a/plugins/dbgate-plugin-oracle/src/backend/createOracleBulkInsertStream.js +++ b/plugins/dbgate-plugin-oracle/src/backend/createOracleBulkInsertStream.js @@ -5,12 +5,12 @@ const _ = require('lodash'); * * @param {import('dbgate-types').EngineDriver} driver */ -function createOracleBulkInsertStream(driver, stream, pool, name, options) { +function createOracleBulkInsertStream(driver, stream, dbhan, name, options) { const fullNameQuoted = name.schemaName ? `${driver.dialect.quoteIdentifier(name.schemaName)}.${driver.dialect.quoteIdentifier(name.pureName)}` : driver.dialect.quoteIdentifier(name.pureName); - const writable = createBulkInsertStreamBase(driver, stream, pool, name, { + const writable = createBulkInsertStreamBase(driver, stream, dbhan, name, { ...options, // this is really not used, send method below is used instead commitAfterInsert: true, @@ -28,7 +28,7 @@ function createOracleBulkInsertStream(driver, stream, pool, name, options) { dmp.putRaw(')'); const rows = writable.buffer.map(row => _.mapKeys(row, (v, k) => `c${writable.columnNames.indexOf(k)}`)); - await pool.executeMany(dmp.s, rows, { autoCommit: true }); + await dbhan.client.executeMany(dmp.s, rows, { autoCommit: true }); writable.buffer = []; }; diff --git a/plugins/dbgate-plugin-oracle/src/backend/driver.js b/plugins/dbgate-plugin-oracle/src/backend/driver.js index 64c9078fc..b1c3f39c2 100644 --- a/plugins/dbgate-plugin-oracle/src/backend/driver.js +++ b/plugins/dbgate-plugin-oracle/src/backend/driver.js @@ -88,13 +88,15 @@ const driver = { if (database) { await client.execute(`ALTER SESSION SET CURRENT_SCHEMA = ${database}`); } - client._schema_name = database; - return client; + return { + client, + database, + }; }, - async close(pool) { - return pool.end(); + async close(dbhan) { + return dbhan.client.end(); }, - async query(client, sql) { + async query(dbhan, sql) { if (sql == null || sql.trim() == '') { return { rows: [], @@ -107,7 +109,7 @@ const driver = { sql = mtrim[1]; } - const res = await client.execute(sql); + const res = await dbhan.client.execute(sql); try { const columns = extractOracleColumns(res.metaData); return { rows: (res.rows || []).map(row => zipDataRow(row, columns)), columns }; @@ -118,7 +120,7 @@ const driver = { }; } }, - stream(client, sql, options) { + stream(dbhan, sql, options) { /* const query = new pg.Query({ text: sql, @@ -128,7 +130,7 @@ const driver = { // console.log('queryStream', sql); if (sql.trim().toLowerCase().startsWith('select')) { - const query = client.queryStream(sql); + const query = dbhan.client.queryStream(sql); // const consumeStream = new Promise((resolve, reject) => { let rowcount = 0; let wasHeader = false; @@ -202,7 +204,7 @@ const driver = { }); //}); } else { - client.execute(sql, (err, res) => { + dbhan.client.execute(sql, (err, res) => { if (err) { console.log('Error query', err, sql); const lineNumber = (sql.substring(0, err.offset).match(/\n/g) || []).length; @@ -237,23 +239,23 @@ const driver = { //console.log('Rows selected: ' + numrows); //client.query(query); }, - async getVersionCore(client) { + async getVersionCore(dbhan) { try { const { rows } = await this.query( - client, + dbhan, "SELECT product || ' ' || version_full as \"version\" FROM product_component_version WHERE product LIKE 'Oracle%Database%'" ); return rows[0].version.replace(' ', ' '); } catch (e) { - const { rows } = await this.query(client, 'SELECT banner as "version" FROM v$version'); + const { rows } = await this.query(dbhan, 'SELECT banner as "version" FROM v$version'); return rows[0].version; } }, - async getVersion(client) { + async getVersion(dbhan) { try { //const { rows } = await this.query(client, "SELECT banner as version FROM v$version WHERE banner LIKE 'Oracle%'"); // const { rows } = await this.query(client, 'SELECT version as "version" FROM v$instance'); - const version = await this.getVersionCore(client); + const version = await this.getVersionCore(dbhan); const m = version.match(/(\d+[a-z]+)\s+(\w+).*?(\d+)\.(\d+)/); //console.log('M', m); @@ -281,7 +283,7 @@ const driver = { }; } }, - async readQuery(client, sql, structure) { + async readQuery(dbhan, sql, structure) { /* const query = new pg.Query({ text: sql, @@ -289,7 +291,7 @@ const driver = { }); */ // console.log('readQuery', sql, structure); - const query = await client.queryStream(sql); + const query = await dbhan.client.queryStream(sql); let wasHeader = false; let columns = null; @@ -333,11 +335,11 @@ const driver = { return pass; }, - async writeTable(pool, name, options) { - return createOracleBulkInsertStream(this, stream, pool, name, options); + async writeTable(dbhan, name, options) { + return createOracleBulkInsertStream(this, stream, dbhan, name, options); }, - async listDatabases(client) { - const { rows } = await this.query(client, 'SELECT username as "name" from all_users order by username'); + async listDatabases(dbhan) { + const { rows } = await this.query(dbhan, 'SELECT username as "name" from all_users order by username'); return rows; }, diff --git a/plugins/dbgate-plugin-postgres/src/backend/Analyser.js b/plugins/dbgate-plugin-postgres/src/backend/Analyser.js index bca81994e..8eea10464 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/Analyser.js +++ b/plugins/dbgate-plugin-postgres/src/backend/Analyser.js @@ -2,7 +2,8 @@ const fp = require('lodash/fp'); const _ = require('lodash'); const sql = require('./sql'); -const { DatabaseAnalyser, isTypeString, isTypeNumeric } = global.DBGATE_PACKAGES['dbgate-tools']; +const { DatabaseAnalyser, isTypeString, isTypeNumeric, isCompositeDbName, splitCompositeDbName } = + global.DBGATE_PACKAGES['dbgate-tools']; function normalizeTypeName(dataType) { if (dataType == 'character varying') return 'varchar'; @@ -50,8 +51,8 @@ function getColumnInfo( } class Analyser extends DatabaseAnalyser { - constructor(pool, driver, version) { - super(pool, driver, version); + constructor(dbhan, driver, version) { + super(dbhan, driver, version); } createQuery(resFileName, typeFields, replacements = {}) { @@ -312,17 +313,6 @@ class Analyser extends DatabaseAnalyser { return res; } - async readSchemaList() { - const schemaRows = await this.analyserQuery('getSchemas'); - - const schemas = schemaRows.rows.map(x => ({ - schemaName: x.schema_name, - objectId: `schemas:${x.schema_name}`, - })); - - return schemas; - } - async _getFastSnapshot() { const tableModificationsQueryData = this.driver.dialect.stringAgg ? await this.analyserQuery('tableModifications') @@ -374,6 +364,10 @@ class Analyser extends DatabaseAnalyser { })), }; } + + getDefaultSchemaNameCondition() { + return `not in ('pg_catalog', 'pg_toast', 'information_schema')`; + } } module.exports = Analyser; diff --git a/plugins/dbgate-plugin-postgres/src/backend/drivers.js b/plugins/dbgate-plugin-postgres/src/backend/drivers.js index 9fdc49f9b..9e14f08e1 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/drivers.js +++ b/plugins/dbgate-plugin-postgres/src/backend/drivers.js @@ -4,7 +4,8 @@ const stream = require('stream'); const driverBases = require('../frontend/drivers'); const Analyser = require('./Analyser'); const pg = require('pg'); -const { getLogger, createBulkInsertStreamBase, makeUniqueColumnNames } = global.DBGATE_PACKAGES['dbgate-tools'];; +const { getLogger, createBulkInsertStreamBase, makeUniqueColumnNames, extractDbNameFromComposite } = + global.DBGATE_PACKAGES['dbgate-tools']; const logger = getLogger('postreDriver'); @@ -76,7 +77,7 @@ const drivers = driverBases.map(driverBase => ({ port: authType == 'socket' ? null : port, user, password, - database: database || 'postgres', + database: extractDbNameFromComposite(database) || 'postgres', ssl, application_name: 'DbGate', }; @@ -89,23 +90,26 @@ const drivers = driverBases.map(driverBase => ({ await this.query(client, 'SET SESSION CHARACTERISTICS AS TRANSACTION READ ONLY'); } - return client; + return { + client, + database, + }; }, - async close(pool) { - return pool.end(); + async close(dbhan) { + return dbhan.client.end(); }, - async query(client, sql) { + async query(dbhan, sql) { if (sql == null) { return { rows: [], columns: [], }; } - const res = await client.query({ text: sql, rowMode: 'array' }); + const res = await dbhan.client.query({ text: sql, rowMode: 'array' }); const columns = extractPostgresColumns(res); return { rows: (res.rows || []).map(row => zipDataRow(row, columns)), columns }; }, - stream(client, sql, options) { + stream(dbhan, sql, options) { const query = new pg.Query({ text: sql, rowMode: 'array', @@ -164,10 +168,10 @@ const drivers = driverBases.map(driverBase => ({ options.done(); }); - client.query(query); + dbhan.client.query(query); }, - async getVersion(client) { - const { rows } = await this.query(client, 'SELECT version()'); + async getVersion(dbhan) { + const { rows } = await this.query(dbhan, 'SELECT version()'); const { version } = rows[0]; const isCockroach = version.toLowerCase().includes('cockroachdb'); @@ -197,7 +201,7 @@ const drivers = driverBases.map(driverBase => ({ versionMinor, }; }, - async readQuery(client, sql, structure) { + async readQuery(dbhan, sql, structure) { const query = new pg.Query({ text: sql, rowMode: 'array', @@ -242,16 +246,16 @@ const drivers = driverBases.map(driverBase => ({ pass.end(); }); - client.query(query); + dbhan.client.query(query); return pass; }, - async writeTable(pool, name, options) { + async writeTable(dbhan, name, options) { // @ts-ignore - return createBulkInsertStreamBase(this, stream, pool, name, options); + return createBulkInsertStreamBase(this, stream, dbhan, name, options); }, - async listDatabases(client) { - const { rows } = await this.query(client, 'SELECT datname AS name FROM pg_database WHERE datistemplate = false'); + async listDatabases(dbhan) { + const { rows } = await this.query(dbhan, 'SELECT datname AS name FROM pg_database WHERE datistemplate = false'); return rows; }, @@ -267,6 +271,25 @@ const drivers = driverBases.map(driverBase => ({ }, ]; }, + + async listSchemas(dbhan) { + const schemaRows = await this.query( + dbhan, + 'select oid as "object_id", nspname as "schema_name" from pg_catalog.pg_namespace' + ); + const defaultSchemaRows = await this.query(dbhan, 'SHOW SEARCH_PATH;'); + const searchPath = defaultSchemaRows.rows[0]?.search_path?.replace('"$user",', '')?.trim(); + + logger.debug(`Loaded ${schemaRows.rows.length} postgres schemas`); + + const schemas = schemaRows.rows.map(x => ({ + schemaName: x.schema_name, + objectId: x.object_id, + isDefault: x.schema_name == searchPath, + })); + + return schemas; + }, })); module.exports = drivers; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/columns.js b/plugins/dbgate-plugin-postgres/src/backend/sql/columns.js index 32b0b16f5..55b761ede 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/columns.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/columns.js @@ -11,14 +11,12 @@ select column_default as "default_value" from information_schema.columns where - table_schema <> 'information_schema' - and table_schema <> 'pg_catalog' - and table_schema !~ '^pg_toast' - and table_schema !~ '^_timescaledb_' + table_schema !~ '^_timescaledb_' and ( ('tables:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION or ('views:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION ) + and table_schema =SCHEMA_NAME_CONDITION order by ordinal_position `; \ No newline at end of file diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/fk_key_column_usage.js b/plugins/dbgate-plugin-postgres/src/backend/sql/fk_key_column_usage.js index f7f6e300a..3667cad78 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/fk_key_column_usage.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/fk_key_column_usage.js @@ -7,5 +7,5 @@ select basecol.table_name, basecol.ordinal_position from information_schema.key_column_usage basecol -where ('tables:' || basecol.table_schema || '.' || basecol.table_name) =OBJECT_ID_CONDITION +where ('tables:' || basecol.table_schema || '.' || basecol.table_name) =OBJECT_ID_CONDITION and basecol.table_schema =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/fk_referential_constraints.js b/plugins/dbgate-plugin-postgres/src/backend/sql/fk_referential_constraints.js index a93584d63..243272efb 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/fk_referential_constraints.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/fk_referential_constraints.js @@ -7,4 +7,5 @@ select fk.unique_constraint_name as "unique_constraint_name", fk.unique_constraint_schema as "unique_constraint_schema" from information_schema.referential_constraints fk +where fk.constraint_schema =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/fk_table_constraints.js b/plugins/dbgate-plugin-postgres/src/backend/sql/fk_table_constraints.js index faca2c5f3..f381ff0e9 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/fk_table_constraints.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/fk_table_constraints.js @@ -5,5 +5,5 @@ select base.constraint_name as "constraint_name", base.constraint_schema as "constraint_schema" from information_schema.table_constraints base -where ('tables:' || base.table_schema || '.' || base.table_name) =OBJECT_ID_CONDITION +where ('tables:' || base.table_schema || '.' || base.table_name) =OBJECT_ID_CONDITION and base.table_schema =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/geographyColumns.js b/plugins/dbgate-plugin-postgres/src/backend/sql/geographyColumns.js index 343d9c5d6..0dac1a0fb 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/geographyColumns.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/geographyColumns.js @@ -4,5 +4,5 @@ select f_table_name as "pure_name", f_geography_column as "column_name" from public.geography_columns -where ('tables:' || f_table_schema || '.' || f_table_name) =OBJECT_ID_CONDITION +where ('tables:' || f_table_schema || '.' || f_table_name) =OBJECT_ID_CONDITION and f_table_schema =SCHEMA_NAME_CONDITION `; \ No newline at end of file diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/geometryColumns.js b/plugins/dbgate-plugin-postgres/src/backend/sql/geometryColumns.js index 94b8d5516..386ea112e 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/geometryColumns.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/geometryColumns.js @@ -4,5 +4,5 @@ select f_table_name as "pure_name", f_geometry_column as "column_name" from public.geometry_columns -where ('tables:' || f_table_schema || '.' || f_table_name) =OBJECT_ID_CONDITION +where ('tables:' || f_table_schema || '.' || f_table_name) =OBJECT_ID_CONDITION and f_table_schema =SCHEMA_NAME_CONDITION `; \ No newline at end of file diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/getSchemas.js b/plugins/dbgate-plugin-postgres/src/backend/sql/getSchemas.js deleted file mode 100644 index 88f8972db..000000000 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/getSchemas.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = `select oid as "object_id", nspname as "schema_name" from pg_catalog.pg_namespace`; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/index.js b/plugins/dbgate-plugin-postgres/src/backend/sql/index.js index 9fb45a93b..53a858ab5 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/index.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/index.js @@ -12,7 +12,6 @@ const matviewColumns = require('./matviewColumns'); const indexes = require('./indexes'); const indexcols = require('./indexcols'); const uniqueNames = require('./uniqueNames'); -const getSchemas = require('./getSchemas'); const geometryColumns = require('./geometryColumns'); const geographyColumns = require('./geographyColumns'); @@ -40,5 +39,4 @@ module.exports = { uniqueNames, geometryColumns, geographyColumns, - getSchemas, }; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/indexcols.js b/plugins/dbgate-plugin-postgres/src/backend/sql/indexcols.js index 4acf4fbd2..e14c8a568 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/indexcols.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/indexcols.js @@ -19,6 +19,7 @@ module.exports = ` and t.relnamespace = c.oid and c.nspname != 'pg_catalog' and ('tables:' || c.nspname || '.' || t.relname) =OBJECT_ID_CONDITION + and c.nspname =SCHEMA_NAME_CONDITION order by t.relname `; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/indexes.js b/plugins/dbgate-plugin-postgres/src/backend/sql/indexes.js index 8cb8de247..53bd65053 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/indexes.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/indexes.js @@ -21,6 +21,7 @@ module.exports = ` and t.relnamespace = c.oid and c.nspname != 'pg_catalog' and ('tables:' || c.nspname || '.' || t.relname) =OBJECT_ID_CONDITION + and c.nspname =SCHEMA_NAME_CONDITION order by t.relname `; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/matviewColumns.js b/plugins/dbgate-plugin-postgres/src/backend/sql/matviewColumns.js index 292d90c41..6330bba90 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/matviewColumns.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/matviewColumns.js @@ -12,6 +12,7 @@ FROM pg_catalog.pg_class WHERE pg_class.relkind = 'm' AND pg_attribute.attnum >= 1 AND ('matviews:' || pg_namespace.nspname || '.' || pg_class.relname) =OBJECT_ID_CONDITION + AND pg_namespace.nspname =SCHEMA_NAME_CONDITION ORDER BY pg_attribute.attnum `; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/matviewModifications.js b/plugins/dbgate-plugin-postgres/src/backend/sql/matviewModifications.js index f8ad85ad4..abade80fd 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/matviewModifications.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/matviewModifications.js @@ -4,5 +4,5 @@ select schemaname as "schema_name", md5(definition) as "hash_code" from - pg_catalog.pg_matviews WHERE schemaname NOT LIKE 'pg_%' + pg_catalog.pg_matviews WHERE schemaname NOT LIKE 'pg_%' AND schemaname =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/matviews.js b/plugins/dbgate-plugin-postgres/src/backend/sql/matviews.js index de1105d8f..9210e4322 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/matviews.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/matviews.js @@ -7,4 +7,5 @@ select from pg_catalog.pg_matviews WHERE schemaname NOT LIKE 'pg_%' and ('matviews:' || schemaname || '.' || matviewname) =OBJECT_ID_CONDITION + and schemaname =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/primaryKeys.js b/plugins/dbgate-plugin-postgres/src/backend/sql/primaryKeys.js index 6df4ca5c0..bd0fb1b51 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/primaryKeys.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/primaryKeys.js @@ -8,11 +8,9 @@ select from information_schema.table_constraints inner join information_schema.key_column_usage on table_constraints.table_name = key_column_usage.table_name and table_constraints.constraint_name = key_column_usage.constraint_name where - table_constraints.table_schema <> 'information_schema' - and table_constraints.table_schema <> 'pg_catalog' - and table_constraints.table_schema !~ '^pg_toast' - and table_constraints.table_schema !~ '^_timescaledb_' + table_constraints.table_schema !~ '^_timescaledb_' and table_constraints.constraint_type = 'PRIMARY KEY' and ('tables:' || table_constraints.table_schema || '.' || table_constraints.table_name) =OBJECT_ID_CONDITION + and table_constraints.table_schema =SCHEMA_NAME_CONDITION order by key_column_usage.ordinal_position `; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/routineModifications.js b/plugins/dbgate-plugin-postgres/src/backend/sql/routineModifications.js index 1eae4ca88..2b3433634 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/routineModifications.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/routineModifications.js @@ -5,6 +5,6 @@ select md5(routine_definition) as "hash_code", routine_type as "object_type" from - information_schema.routines where routine_schema != 'information_schema' and routine_schema != 'pg_catalog' and routine_schema !~ '^_timescaledb_' - and routine_type in ('PROCEDURE', 'FUNCTION') + information_schema.routines where routine_schema !~ '^_timescaledb_' + and routine_type in ('PROCEDURE', 'FUNCTION') and routine_schema =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/routines.js b/plugins/dbgate-plugin-postgres/src/backend/sql/routines.js index d864780c7..bf7015297 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/routines.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/routines.js @@ -8,7 +8,8 @@ select $typeAggFunc(data_type $typeAggParam) as "data_type", max(external_language) as "language" from - information_schema.routines where routine_schema != 'information_schema' and routine_schema != 'pg_catalog' and routine_schema !~ '^_timescaledb_' + information_schema.routines where routine_schema !~ '^_timescaledb_' + and routine_schema =SCHEMA_NAME_CONDITION and ( (routine_type = 'PROCEDURE' and ('procedures:' || routine_schema || '.' || routine_name) =OBJECT_ID_CONDITION) or diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/tableList.js b/plugins/dbgate-plugin-postgres/src/backend/sql/tableList.js index 1e9f44db2..9e9f2b735 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/tableList.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/tableList.js @@ -3,9 +3,7 @@ select infoTables.table_schema as "schema_name", infoTables.table_name as "pure_ from information_schema.tables infoTables where infoTables.table_type not like '%VIEW%' and ('tables:' || infoTables.table_schema || '.' || infoTables.table_name) =OBJECT_ID_CONDITION -and infoTables.table_schema <> 'pg_catalog' -and infoTables.table_schema <> 'information_schema' and infoTables.table_schema <> 'pg_internal' -and infoTables.table_schema !~ '^pg_toast' and infoTables.table_schema !~ '^_timescaledb_' +and infoTables.table_schema =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/tableModifications.js b/plugins/dbgate-plugin-postgres/src/backend/sql/tableModifications.js index 1e074769b..e26388b05 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/tableModifications.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/tableModifications.js @@ -21,9 +21,7 @@ select infoTables.table_schema as "schema_name", infoTables.table_name as "pure_ from information_schema.tables infoTables where infoTables.table_type not like '%VIEW%' and ('tables:' || infoTables.table_schema || '.' || infoTables.table_name) =OBJECT_ID_CONDITION -and infoTables.table_schema <> 'pg_catalog' -and infoTables.table_schema <> 'information_schema' and infoTables.table_schema <> 'pg_internal' -and infoTables.table_schema !~ '^pg_toast' and infoTables.table_schema !~ '^_timescaledb_' +and infoTables.table_schema =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/uniqueNames.js b/plugins/dbgate-plugin-postgres/src/backend/sql/uniqueNames.js index a6eec71b4..f22b1c94f 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/uniqueNames.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/uniqueNames.js @@ -1,3 +1,3 @@ module.exports = ` - select conname as "constraint_name" from pg_constraint where contype = 'u' + select conname as "constraint_name" from pg_constraint where contype = 'u' and connamespace =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/viewModifications.js b/plugins/dbgate-plugin-postgres/src/backend/sql/viewModifications.js index e344bbddc..c23633923 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/viewModifications.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/viewModifications.js @@ -4,5 +4,5 @@ select table_schema as "schema_name", md5(view_definition) as "hash_code" from - information_schema.views where table_schema != 'information_schema' and table_schema != 'pg_catalog' and table_schema !~ '^_timescaledb_' + information_schema.views where table_schema != 'information_schema' and table_schema != 'pg_catalog' and table_schema !~ '^_timescaledb_' and table_schema =SCHEMA_NAME_CONDITION `; diff --git a/plugins/dbgate-plugin-postgres/src/backend/sql/views.js b/plugins/dbgate-plugin-postgres/src/backend/sql/views.js index a009f3ac5..77e502ad4 100644 --- a/plugins/dbgate-plugin-postgres/src/backend/sql/views.js +++ b/plugins/dbgate-plugin-postgres/src/backend/sql/views.js @@ -6,6 +6,6 @@ select md5(view_definition) as "hash_code" from information_schema.views -where table_schema != 'information_schema' and table_schema != 'pg_catalog' and table_schema !~ '^_timescaledb_' +where table_schema !~ '^_timescaledb_' and table_schema =SCHEMA_NAME_CONDITION and ('views:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION `; diff --git a/plugins/dbgate-plugin-postgres/src/frontend/drivers.js b/plugins/dbgate-plugin-postgres/src/frontend/drivers.js index af425036e..8b366ecda 100644 --- a/plugins/dbgate-plugin-postgres/src/frontend/drivers.js +++ b/plugins/dbgate-plugin-postgres/src/frontend/drivers.js @@ -136,7 +136,9 @@ const postgresDriverBase = { } return ( - ['authType', 'user', 'password', 'defaultDatabase', 'singleDatabase', 'isReadOnly'].includes(field) || + ['authType', 'user', 'password', 'defaultDatabase', 'singleDatabase', 'isReadOnly', 'useSeparateSchemas'].includes( + field + ) || (values.authType == 'socket' && ['socketPath'].includes(field)) || (values.authType != 'socket' && ['server', 'port'].includes(field)) ); @@ -242,7 +244,8 @@ const redshiftDriver = { title: 'Amazon Redshift', defaultPort: 5439, databaseUrlPlaceholder: 'e.g. redshift-cluster-1.xxxx.redshift.amazonaws.com:5439/dev', - showConnectionField: (field, values) => ['databaseUrl', 'user', 'password', 'isReadOnly'].includes(field), + showConnectionField: (field, values) => + ['databaseUrl', 'user', 'password', 'isReadOnly', 'useSeparateSchemas'].includes(field), beforeConnectionSave: connection => { const { databaseUrl } = connection; if (databaseUrl) { diff --git a/plugins/dbgate-plugin-redis/src/backend/Analyser.js b/plugins/dbgate-plugin-redis/src/backend/Analyser.js index 37749ff72..6000ff063 100644 --- a/plugins/dbgate-plugin-redis/src/backend/Analyser.js +++ b/plugins/dbgate-plugin-redis/src/backend/Analyser.js @@ -1,8 +1,8 @@ const { DatabaseAnalyser } = global.DBGATE_PACKAGES['dbgate-tools'];; class Analyser extends DatabaseAnalyser { - constructor(pool, driver) { - super(pool, driver); + constructor(dbhan, driver) { + super(dbhan, driver); } } diff --git a/plugins/dbgate-plugin-redis/src/backend/driver.js b/plugins/dbgate-plugin-redis/src/backend/driver.js index 576d3c213..db2d097aa 100644 --- a/plugins/dbgate-plugin-redis/src/backend/driver.js +++ b/plugins/dbgate-plugin-redis/src/backend/driver.js @@ -83,32 +83,34 @@ const driver = { analyserClass: Analyser, async connect({ server, port, user, password, database, useDatabaseUrl, databaseUrl, treeKeySeparator }) { let db = 0; - let pool; + let client; if (useDatabaseUrl) { - pool = new Redis(databaseUrl); + client = new Redis(databaseUrl); } else { if (_.isString(database) && database.startsWith('db')) db = parseInt(database.substring(2)); if (_.isNumber(database)) db = database; - pool = new Redis({ + client = new Redis({ host: server, port, username: user, password, db, }); - pool.__treeKeySeparator = treeKeySeparator || ':'; } - return pool; + return { + client, + treeKeySeparator: treeKeySeparator || ':', + }; }, // @ts-ignore - async query(pool, sql) { + async query(dbhan, sql) { return { rows: [], columns: [], }; }, - async stream(pool, sql, options) { + async stream(dbhan, sql, options) { const parts = splitCommandLine(sql); if (parts.length < 1) { options.done(); @@ -116,7 +118,7 @@ const driver = { } const command = parts[0].toLowerCase(); const args = parts.slice(1); - const res = await pool.call(command, ...args); + const res = await dbhan.client.call(command, ...args); options.info({ message: JSON.stringify(res), @@ -126,7 +128,7 @@ const driver = { options.done(); }, - async readQuery(pool, sql, structure) { + async readQuery(dbhan, sql, structure) { const pass = new stream.PassThrough({ objectMode: true, highWaterMark: 100, @@ -139,11 +141,11 @@ const driver = { return pass; }, - async writeTable(pool, name, options) { - return createBulkInsertStreamBase(this, stream, pool, name, options); + async writeTable(dbhan, name, options) { + return createBulkInsertStreamBase(this, stream, dbhan, name, options); }, - async info(pool) { - const info = await pool.info(); + async info(dbhan) { + const info = await dbhan.client.info(); return _.fromPairs( info .split('\n') @@ -151,30 +153,30 @@ const driver = { .map((x) => x.split(':')) ); }, - async getVersion(pool) { - const info = await this.info(pool); + async getVersion(dbhan) { + const info = await this.info(dbhan); return { version: info.redis_version, versionText: `Redis ${info.redis_version}`, }; }, - async listDatabases(pool) { - const info = await this.info(pool); + async listDatabases(dbhan) { + const info = await this.info(dbhan); return _.range(16).map((index) => ({ name: `db${index}`, extInfo: info[`db${index}`], sortOrder: index })); }, - async loadKeys(pool, root = '', filter = null) { - const keys = await this.getKeys(pool, root ? `${root}${pool.__treeKeySeparator}*` : '*'); + async loadKeys(dbhan, root = '', filter = null) { + const keys = await this.getKeys(dbhan, root ? `${root}${dbhan.__treeKeySeparator}*` : '*'); const keysFiltered = keys.filter((x) => filterName(filter, x)); - const res = this.extractKeysFromLevel(pool, root, keysFiltered); - await this.enrichKeyInfo(pool, res); + const res = this.extractKeysFromLevel(dbhan, root, keysFiltered); + await this.enrichKeyInfo(dbhan, res); return res; }, - async exportKeys(pool, options) { - const dump = new RedisDump({ client: pool }); + async exportKeys(dbhan, options) { + const dump = new RedisDump({ client: dbhan.client }); return new Promise((resolve, reject) => { dump.export({ type: 'redis', @@ -187,24 +189,24 @@ const driver = { }); }, - async getKeys(pool, keyQuery = '*') { + async getKeys(dbhan, keyQuery = '*') { const res = []; let cursor = 0; do { - const [strCursor, keys] = await pool.scan(cursor, 'MATCH', keyQuery, 'COUNT', 100); + const [strCursor, keys] = await dbhan.client.scan(cursor, 'MATCH', keyQuery, 'COUNT', 100); res.push(...keys); cursor = parseInt(strCursor); } while (cursor > 0); return res; }, - extractKeysFromLevel(pool, root, keys) { - const prefix = root ? `${root}${pool.__treeKeySeparator}` : ''; - const rootSplit = _.compact(root.split(pool.__treeKeySeparator)); + extractKeysFromLevel(dbhan, root, keys) { + const prefix = root ? `${root}${dbhan.treeKeySeparator}` : ''; + const rootSplit = _.compact(root.split(dbhan.treeKeySeparator)); const res = {}; for (const key of keys) { if (!key.startsWith(prefix)) continue; - const keySplit = key.split(pool.__treeKeySeparator); + const keySplit = key.split(dbhan.treeKeySeparator); if (keySplit.length > rootSplit.length) { const text = keySplit[rootSplit.length]; if (keySplit.length == rootSplit.length + 1) { @@ -218,9 +220,9 @@ const driver = { res[dctKey].count++; } else { res[dctKey] = { - text: text + pool.__treeKeySeparator + '*', + text: text + dbhan.treeKeySeparator + '*', type: 'dir', - root: keySplit.slice(0, rootSplit.length + 1).join(pool.__treeKeySeparator), + root: keySplit.slice(0, rootSplit.length + 1).join(dbhan.treeKeySeparator), count: 1, }; } @@ -230,46 +232,46 @@ const driver = { return Object.values(res); }, - async getKeyCardinality(pool, key, type) { + async getKeyCardinality(dbhan, key, type) { switch (type) { case 'list': - return pool.llen(key); + return dbhan.client.llen(key); case 'set': - return pool.scard(key); + return dbhan.client.scard(key); case 'zset': - return pool.zcard(key); + return dbhan.client.zcard(key); case 'stream': - return pool.xlen(key); + return dbhan.client.xlen(key); case 'hash': - return pool.hlen(key); + return dbhan.client.hlen(key); } }, - async enrichOneKeyInfo(pool, item) { - item.type = await pool.type(item.key); - item.count = await this.getKeyCardinality(pool, item.key, item.type); + async enrichOneKeyInfo(dbhan, item) { + item.type = await dbhan.client.type(item.key); + item.count = await this.getKeyCardinality(dbhan, item.key, item.type); }, - async enrichKeyInfo(pool, levelInfo) { + async enrichKeyInfo(dbhan, levelInfo) { await async.eachLimit( levelInfo.filter((x) => x.key), 10, - async (item) => await this.enrichOneKeyInfo(pool, item) + async (item) => await this.enrichOneKeyInfo(dbhan, item) ); }, - async loadKeyInfo(pool, key) { + async loadKeyInfo(dbhan, key) { const res = {}; - const type = await pool.type(key); + const type = await dbhan.client.type(key); res.key = key; res.type = type; - res.ttl = await pool.ttl(key); - res.count = await this.getKeyCardinality(pool, key, type); + res.ttl = await dbhan.client.ttl(key); + res.count = await this.getKeyCardinality(dbhan, key, type); switch (type) { case 'string': - res.value = await pool.get(key); + res.value = await dbhan.client.get(key); break; // case 'list': // res.tableColumns = [{ name: 'value' }]; @@ -297,16 +299,16 @@ const driver = { return res; }, - async deleteBranch(pool, keyQuery) { - const keys = await this.getKeys(pool, keyQuery); + async deleteBranch(dbhan, keyQuery) { + const keys = await this.getKeys(dbhan, keyQuery); const keysChunked = _.chunk(keys, 10); - await async.eachLimit(keysChunked, 10, async (keysChunk) => await pool.del(...keysChunk)); + await async.eachLimit(keysChunked, 10, async (keysChunk) => await dbhan.client.del(...keysChunk)); }, - async callMethod(pool, method, args) { + async callMethod(dbhan, method, args) { switch (method) { case 'mdel': - return await this.deleteBranch(pool, args[0]); + return await this.deleteBranch(dbhan, args[0]); case 'xaddjson': let json; try { @@ -314,44 +316,44 @@ const driver = { } catch (e) { throw new Error('Value must be valid JSON. ' + e.message); } - return await pool.xadd(args[0], args[1] || '*', ..._.flatten(_.toPairs(json))); + return await dbhan.client.xadd(args[0], args[1] || '*', ..._.flatten(_.toPairs(json))); } - return await pool[method](...args); + return await dbhan.client[method](...args); }, - async loadKeyTableRange(pool, key, cursor, count) { - const type = await pool.type(key); + async loadKeyTableRange(dbhan, key, cursor, count) { + const type = await dbhan.client.type(key); switch (type) { case 'list': { - const res = await pool.lrange(key, cursor, cursor + count); + const res = await dbhan.client.lrange(key, cursor, cursor + count); return { cursor: res.length > count ? cursor + count : 0, items: res.map((value) => ({ value })).slice(0, count), }; } case 'set': { - const res = await pool.sscan(key, cursor, 'COUNT', count); + const res = await dbhan.client.sscan(key, cursor, 'COUNT', count); return { cursor: parseInt(res[0]), items: res[1].map((value) => ({ value })), }; } case 'zset': { - const res = await pool.zscan(key, cursor, 'COUNT', count); + const res = await dbhan.client.zscan(key, cursor, 'COUNT', count); return { cursor: parseInt(res[0]), items: _.chunk(res[1], 2).map((item) => ({ value: item[0], score: item[1] })), }; } case 'hash': { - const res = await pool.hscan(key, cursor, 'COUNT', count); + const res = await dbhan.client.hscan(key, cursor, 'COUNT', count); return { cursor: parseInt(res[0]), items: _.chunk(res[1], 2).map((item) => ({ key: item[0], value: item[1] })), }; } case 'stream': { - const res = await pool.xrange(key, cursor == 0 ? '-' : cursor, '+', 'COUNT', count); + const res = await dbhan.client.xrange(key, cursor == 0 ? '-' : cursor, '+', 'COUNT', count); let newCursor = 0; if (res.length > 0) { const id = res[res.length - 1][0]; diff --git a/plugins/dbgate-plugin-sqlite/src/backend/Analyser.js b/plugins/dbgate-plugin-sqlite/src/backend/Analyser.js index d699b497f..e61bea08e 100644 --- a/plugins/dbgate-plugin-sqlite/src/backend/Analyser.js +++ b/plugins/dbgate-plugin-sqlite/src/backend/Analyser.js @@ -16,8 +16,8 @@ SELECT `; class Analyser extends DatabaseAnalyser { - constructor(pool, driver, version) { - super(pool, driver, version); + constructor(dbhan, driver, version) { + super(dbhan, driver, version); } async _computeSingleObjectId() { @@ -26,8 +26,8 @@ class Analyser extends DatabaseAnalyser { } async _getFastSnapshot() { - const objects = await this.driver.query(this.pool, "select * from sqlite_master where type='table' or type='view'"); - const indexcols = await this.driver.query(this.pool, indexcolsQuery); + const objects = await this.driver.query(this.dbhan, "select * from sqlite_master where type='table' or type='view'"); + const indexcols = await this.driver.query(this.dbhan, indexcolsQuery); return { tables: objects.rows @@ -79,7 +79,7 @@ class Analyser extends DatabaseAnalyser { createSql: x.sql, })); - const indexcols = await this.driver.query(this.pool, indexcolsQuery); + const indexcols = await this.driver.query(this.dbhan, indexcolsQuery); for (const tableName of this.getRequestedObjectPureNames( 'tables', @@ -88,7 +88,7 @@ class Analyser extends DatabaseAnalyser { const tableObj = tableList.find((x) => x.pureName == tableName); if (!tableObj) continue; - const info = await this.driver.query(this.pool, `pragma table_info('${tableName}')`); + const info = await this.driver.query(this.dbhan, `pragma table_info('${tableName}')`); tableObj.columns = info.rows.map((col) => ({ columnName: col.name, dataType: col.type, @@ -132,7 +132,7 @@ class Analyser extends DatabaseAnalyser { }; } - const fklist = await this.driver.query(this.pool, `pragma foreign_key_list('${tableName}')`); + const fklist = await this.driver.query(this.dbhan, `pragma foreign_key_list('${tableName}')`); tableObj.foreignKeys = _.values(_.groupBy(fklist.rows, 'id')).map((fkcols) => { const fkcol = fkcols[0]; const fk = { @@ -157,7 +157,7 @@ class Analyser extends DatabaseAnalyser { const viewObj = viewList.find((x) => x.pureName == viewName); if (!viewObj) continue; - const info = await this.driver.query(this.pool, `pragma table_info('${viewName}')`); + const info = await this.driver.query(this.dbhan, `pragma table_info('${viewName}')`); viewObj.columns = info.rows.map((col) => ({ columnName: col.name, dataType: col.type, diff --git a/plugins/dbgate-plugin-sqlite/src/backend/driver.js b/plugins/dbgate-plugin-sqlite/src/backend/driver.js index 378bd2fdf..4de36347e 100644 --- a/plugins/dbgate-plugin-sqlite/src/backend/driver.js +++ b/plugins/dbgate-plugin-sqlite/src/backend/driver.js @@ -26,8 +26,8 @@ async function waitForDrain(stream) { }); } -function runStreamItem(client, sql, options, rowCounter) { - const stmt = client.prepare(sql); +function runStreamItem(dbhan, sql, options, rowCounter) { + const stmt = dbhan.client.prepare(sql); if (stmt.reader) { const columns = stmt.columns(); // const rows = stmt.all(); @@ -64,15 +64,17 @@ const driver = { analyserClass: Analyser, async connect({ databaseFile, isReadOnly }) { const Database = getBetterSqlite(); - const pool = new Database(databaseFile, { readonly: !!isReadOnly }); - return pool; + const client = new Database(databaseFile, { readonly: !!isReadOnly }); + return { + client, + }; }, - async close(pool) { - return pool.close(); + async close(dbhan) { + return dbhan.client.close(); }, // @ts-ignore - async query(pool, sql) { - const stmt = pool.prepare(sql); + async query(dbhan, sql) { + const stmt = dbhan.client.prepare(sql); // stmt.raw(); if (stmt.reader) { const columns = stmt.columns(); @@ -92,14 +94,14 @@ const driver = { }; } }, - async stream(client, sql, options) { + async stream(dbhan, sql, options) { const sqlSplitted = splitQuery(sql, sqliteSplitterOptions); const rowCounter = { count: 0, date: null }; - const inTransaction = client.transaction(() => { + const inTransaction = dbhan.client.transaction(() => { for (const sqlItem of sqlSplitted) { - runStreamItem(client, sqlItem, options, rowCounter); + runStreamItem(dbhan, sqlItem, options, rowCounter); } if (rowCounter.date) { @@ -128,10 +130,10 @@ const driver = { options.done(); // return stream; }, - async script(client, sql) { - const inTransaction = client.transaction(() => { + async script(dbhan, sql) { + const inTransaction = dbhan.client.transaction(() => { for (const sqlItem of splitQuery(sql, this.getQuerySplitterOptions('script'))) { - const stmt = client.prepare(sqlItem); + const stmt = dbhan.client.prepare(sqlItem); stmt.run(); } }); @@ -149,13 +151,13 @@ const driver = { } pass.end(); }, - async readQuery(pool, sql, structure) { + async readQuery(dbhan, sql, structure) { const pass = new stream.PassThrough({ objectMode: true, highWaterMark: 100, }); - const stmt = pool.prepare(sql); + const stmt = dbhan.client.prepare(sql); const columns = stmt.columns(); pass.write({ @@ -171,11 +173,11 @@ const driver = { return pass; }, - async writeTable(pool, name, options) { - return createBulkInsertStreamBase(this, stream, pool, name, options); + async writeTable(dbhan, name, options) { + return createBulkInsertStreamBase(this, stream, dbhan, name, options); }, - async getVersion(pool) { - const { rows } = await this.query(pool, 'select sqlite_version() as version'); + async getVersion(dbhan) { + const { rows } = await this.query(dbhan, 'select sqlite_version() as version'); const { version } = rows[0]; return {