This commit is contained in:
SPRINX0\prochazka
2025-06-12 16:55:58 +02:00
44 changed files with 1437 additions and 22 deletions

View File

@@ -102,3 +102,14 @@ jobs:
image: ghcr.io/tursodatabase/libsql-server:latest
ports:
- '8080:8080'
firebird:
image: firebirdsql/firebird:latest
env:
FIREBIRD_DATABASE: mydatabase.fdb
FIREBIRD_USER: dbuser
FIREBIRD_PASSWORD: dbpassword
ISC_PASSWORD: masterkey
FIREBIRD_TRACE: false
FIREBIRD_USE_LEGACY_AUTH: true
ports:
- '3050:3050'

View File

@@ -29,7 +29,7 @@ async function testDatabaseDiff(conn, driver, mangle, createObject = null) {
driver,
`create table ~t2 (
~id int not null primary key,
~t1_id int null references ~t1(~id)
~t1_id int ${driver.dialect.implicitNullDeclaration ? '' : 'null'} references ~t1(~id)
)`
);

View File

@@ -60,7 +60,9 @@ async function testTableDiff(engine, conn, driver, mangle) {
if (!engine.skipReferences) {
const query = formatQueryWithoutParams(
driver,
`create table ~t2 (~id int not null primary key, ~fkval int null references ~t1(~col_ref))`
`create table ~t2 (~id int not null primary key, ~fkval int ${
driver.dialect.implicitNullDeclaration ? '' : 'null'
} references ~t1(~col_ref))`
);
await driver.query(conn, transformSqlForEngine(engine, query));

View File

@@ -106,7 +106,9 @@ async function testDatabaseDeploy(engine, conn, driver, dbModelsYaml, options) {
for (const loadedDbModel of dbModelsYaml) {
if (_.isString(loadedDbModel)) {
await driver.script(conn, formatQueryWithoutParams(driver, loadedDbModel));
await driver.script(conn, formatQueryWithoutParams(driver, loadedDbModel), {
useTransaction: engine.runDeployInTransaction,
});
} else {
const { sql, isEmpty } = await generateDeploySql({
systemConnection: conn.isPreparedOnly ? undefined : conn,
@@ -131,6 +133,7 @@ async function testDatabaseDeploy(engine, conn, driver, dbModelsYaml, options) {
driver,
loadedDbModel: convertModelToEngine(loadedDbModel, driver),
dbdiffOptionsExtra,
useTransaction: engine.runDeployInTransaction,
});
}
@@ -606,7 +609,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy && !i.skipRenameTable).map(engine => [engine.label, engine]))(
'Mark table removed - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], [], []], {
@@ -822,7 +825,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy && !i.skipRenameTable).map(engine => [engine.label, engine]))(
'Mark table removed, one remains - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1, T2], [T2], [T2]], {

View File

@@ -94,7 +94,7 @@ describe('Table analyse', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipIncrementalAnalysis).map(engine => [engine.label, engine]))(
'Table add - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
@@ -112,7 +112,7 @@ describe('Table analyse', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipIncrementalAnalysis).map(engine => [engine.label, engine]))(
'Table remove - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));
@@ -130,7 +130,7 @@ describe('Table analyse', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipIncrementalAnalysis).map(engine => [engine.label, engine]))(
'Table change - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));

View File

@@ -100,3 +100,28 @@ services:
# - '5002:5001'
# volumes:
# - ./data/libsql:/var/lib/sqld
firebird:
image: firebirdsql/firebird:latest
container_name: firebird-db
environment:
- FIREBIRD_DATABASE=mydatabase.fdb
- FIREBIRD_USER=dbuser
- FIREBIRD_PASSWORD=dbpassword
- ISC_PASSWORD=masterkey
- FIREBIRD_TRACE=false
- FIREBIRD_USE_LEGACY_AUTH=true
ports:
- '3050:3050'
volumes:
- firebird-data:/firebird/data
- ./firebird.conf:/firebird/firebird.conf # Mount custom config file
healthcheck:
test: ['CMD', 'nc', '-z', 'localhost', '3050']
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
volumes:
firebird-data:

View File

@@ -680,6 +680,56 @@ const duckdbEngine = {
skipDropReferences: true,
};
/** @type {import('dbgate-types').TestEngineInfo} */
const firebirdEngine = {
label: 'Firebird',
generateDbFile: true,
databaseFileLocationOnServer: '/var/lib/firebird/data/',
defaultSchemaName: 'main',
connection: {
engine: 'firebird@dbgate-plugin-firebird',
server: 'localhost',
port: 3050,
// databaseUrl: '/var/lib/firebird/data/mydatabase.fdb',
// databaseFile: '/var/lib/firebird/data/mydatabase.fdb',
user: 'SYSDBA',
password: 'masterkey',
},
objects: [],
triggers: [
{
testName: 'triggers after each row',
create: `CREATE OR ALTER TRIGGER ~obj1 AFTER INSERT ON ~t1 AS BEGIN END;`,
drop: 'DROP TRIGGER ~obj1;',
objectTypeField: 'triggers',
expected: {
pureName: 'obj1',
tableName: 't1',
eventType: 'INSERT',
triggerTiming: 'AFTER',
},
},
],
skipOnCI: false,
runDeployInTransaction: true,
skipDataModifications: true,
skipChangeColumn: true,
// skipIndexes: true,
// skipStringLength: true,
// skipTriggers: true,
skipDataReplicator: true,
skipAutoIncrement: true,
// skipDropColumn: true,
skipRenameColumn: true,
// skipChangeNullability: true,
// skipDeploy: true,
// supportRenameSqlObject: true,
skipIncrementalAnalysis: true,
skipRenameTable: true,
// skipDefaultValue: true,
skipDropReferences: true,
};
const enginesOnCi = [
// all engines, which would be run on GitHub actions
mysqlEngine,
@@ -694,6 +744,7 @@ const enginesOnCi = [
oracleEngine,
cassandraEngine,
duckdbEngine,
firebirdEngine,
];
const enginesOnLocal = [
@@ -709,7 +760,8 @@ const enginesOnLocal = [
// libsqlFileEngine,
// libsqlWsEngine,
// oracleEngine,
duckdbEngine,
// duckdbEngine,
firebirdEngine,
];
/** @type {import('dbgate-types').TestEngineInfo[] & Record<string, import('dbgate-types').TestEngineInfo>} */
@@ -727,3 +779,4 @@ module.exports.cassandraEngine = cassandraEngine;
module.exports.libsqlFileEngine = libsqlFileEngine;
module.exports.libsqlWsEngine = libsqlWsEngine;
module.exports.duckdbEngine = duckdbEngine;
module.exports.firebirdEngine = firebirdEngine;

View File

@@ -0,0 +1,45 @@
# Custom Firebird Configuration
# Wire encryption settings
# Options: Enabled, Required, Disabled
WireCrypt = Disabled
# Authentication settings
# Add Legacy_Auth to support older clients
AuthServer = Legacy_Auth
# User manager plugin
UserManager = Legacy_UserManager
# Default character set
DefaultCharSet = UTF8
# Buffer settings for better performance
DefaultDbCachePages = 2048
TempCacheLimit = 512M
# Connection settings
ConnectionTimeout = 180
DatabaseGrowthIncrement = 128M
# TCP Protocol settings
TcpRemoteBufferSize = 8192
TcpNoNagle = 1
# Security settings
RemoteServiceName = gds_db
RemoteServicePort = 3050
RemoteAuxPort = 0
RemotePipeName = firebird
# Lock settings
LockMemSize = 1M
LockHashSlots = 8191
LockAcquireSpins = 0
# Log settings
FileSystemCacheThreshold = 65536
FileSystemCacheSize = 0
# Compatibility settings for older clients
CompatiblityDialect = 3

View File

@@ -5,7 +5,12 @@ const crypto = require('crypto');
function randomDbName(dialect) {
const generatedKey = crypto.randomBytes(6);
const newKey = generatedKey.toString('hex');
const res = `db${newKey}`;
let res = `db${newKey}`;
if (dialect.dbFileExtension) {
res += dialect.dbFileExtension;
}
if (dialect.upperCaseAllDbObjectNames) return res.toUpperCase();
return res;
}
@@ -17,7 +22,7 @@ async function connect(engine, database) {
if (engine.generateDbFile) {
const conn = await driver.connect({
...connection,
databaseFile: `dbtemp/${database}`,
databaseFile: (engine.databaseFileLocationOnServer ?? 'dbtemp/') + database,
});
return conn;
} else {
@@ -42,7 +47,7 @@ async function prepareConnection(engine, database) {
if (engine.generateDbFile) {
return {
...connection,
databaseFile: `dbtemp/${database}`,
databaseFile: (engine.databaseFileLocationOnServer ?? 'dbtemp/') + database,
isPreparedOnly: true,
};
} else {

View File

@@ -20,6 +20,7 @@ const crypto = require('crypto');
* @param {string} options.ignoreNameRegex - regex for ignoring objects by name
* @param {string} options.targetSchema - target schema for deployment
* @param {number} options.maxMissingTablesRatio - maximum ratio of missing tables in database. Safety check, if missing ratio is highe, deploy is stopped (preventing accidental drop of all tables)
* @param {boolean} options.useTransaction - run deploy in transaction. If not provided, it will be set to true if driver supports transactions
*/
async function deployDb({
connection,
@@ -33,6 +34,7 @@ async function deployDb({
ignoreNameRegex = '',
targetSchema = null,
maxMissingTablesRatio = undefined,
useTransaction,
}) {
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
@@ -60,7 +62,14 @@ async function deployDb({
maxMissingTablesRatio,
});
// console.log('RUNNING DEPLOY SCRIPT:', sql);
await executeQuery({ connection, systemConnection: dbhan, driver, sql, logScriptItems: true });
await executeQuery({
connection,
systemConnection: dbhan,
driver,
sql,
logScriptItems: true,
useTransaction,
});
await scriptDeployer.runPost();
} finally {

View File

@@ -14,6 +14,7 @@ const logger = getLogger('execQuery');
* @param {string} [options.sql] - SQL query
* @param {string} [options.sqlFile] - SQL file
* @param {boolean} [options.logScriptItems] - whether to log script items instead of whole script
* @param {boolean} [options.useTransaction] - run query in transaction
* @param {boolean} [options.skipLogging] - whether to skip logging
*/
async function executeQuery({
@@ -24,6 +25,7 @@ async function executeQuery({
sqlFile = undefined,
logScriptItems = false,
skipLogging = false,
useTransaction,
}) {
if (!logScriptItems && !skipLogging) {
logger.info({ sql: getLimitedQuery(sql) }, `Execute query`);
@@ -42,7 +44,7 @@ async function executeQuery({
logger.debug(`Running SQL query, length: ${sql.length}`);
}
await driver.script(dbhan, sql, { logScriptItems });
await driver.script(dbhan, sql, { logScriptItems, useTransaction });
} finally {
if (!systemConnection) {
await driver.close(dbhan);

View File

@@ -41,13 +41,14 @@ program
'regex, which table data will be loaded and stored in model (in load command)'
)
.option('-e, --engine <engine>', 'engine name, eg. mysql@dbgate-plugin-mysql')
.option('--commonjs', 'Creates CommonJS module');
.option('--commonjs', 'Creates CommonJS module')
.option('--transaction', 'Run deploy query in transaction');
program
.command('deploy <modelFolder>')
.description('Deploys model to database')
.action(modelFolder => {
const { engine, server, user, password, database } = program.opts();
const { engine, server, user, password, database, transaction } = program.opts();
// const hooks = [];
// if (program.autoIndexForeignKeys) hooks.push(dbmodel.hooks.autoIndexForeignKeys);
@@ -61,6 +62,7 @@ program
database,
},
modelFolder,
useTransaction: transaction,
})
);
});

View File

@@ -12,6 +12,9 @@ export function dumpSqlSelect(dmp: SqlDumper, cmd: Select) {
if (cmd.topRecords) {
if (!dmp.dialect.rangeSelect || dmp.dialect.offsetFetchRangeSyntax) dmp.put('^top %s ', cmd.topRecords);
}
if (cmd.range && dmp.dialect.offsetFirstSkipSyntax) {
dmp.put('^first %s ^skip %s ', cmd.range.limit, cmd.range.offset);
}
if (cmd.selectAll) {
dmp.put('* ');
}
@@ -52,6 +55,8 @@ export function dumpSqlSelect(dmp: SqlDumper, cmd: Select) {
if (cmd.range) {
if (dmp.dialect.offsetFetchRangeSyntax) {
dmp.put('^offset %s ^rows ^fetch ^next %s ^rows ^only', cmd.range.offset, cmd.range.limit);
} else if (dmp.dialect.offsetFirstSkipSyntax) {
//
} else if (dmp.dialect.offsetNotSupported) {
dmp.put('^limit %s', cmd.range.limit + cmd.range.offset);
} else {

View File

@@ -266,11 +266,11 @@ export class SqlDumper implements AlterProcessor {
this.columnDefault(column);
}
if (includeNullable && !this.dialect?.specificNullabilityImplementation) {
this.put(column.notNull ? '^not ^null' : '^null');
this.put(column.notNull ? '^not ^null' : this.dialect.implicitNullDeclaration ? '' : '^null');
}
} else {
if (includeNullable && !this.dialect?.specificNullabilityImplementation) {
this.put(column.notNull ? '^not ^null' : '^null');
this.put(column.notNull ? '^not ^null' : this.dialect.implicitNullDeclaration ? '' : '^null');
}
if (includeDefault && column.defaultValue?.toString()?.trim()) {
this.columnDefault(column);

View File

@@ -8,6 +8,7 @@ export interface SqlDialect {
topRecords?: boolean;
stringEscapeChar: string;
offsetFetchRangeSyntax?: boolean;
offsetFirstSkipSyntax?: boolean;
offsetNotSupported?: boolean;
quoteIdentifier(s: string): string;
fallbackDataType?: string;
@@ -47,6 +48,7 @@ export interface SqlDialect {
namedDefaultConstraint?: boolean;
specificNullabilityImplementation?: boolean;
implicitNullDeclaration?: boolean;
omitForeignKeys?: boolean;
omitUniqueConstraints?: boolean;
omitIndexes?: boolean;
@@ -66,6 +68,7 @@ export interface SqlDialect {
requireFromDual?: boolean;
userDatabaseNamePrefix?: string; // c## in Oracle
upperCaseAllDbObjectNames?: boolean;
dbFileExtension?: string;
defaultValueBeforeNullability?: boolean;
predefinedDataTypes: string[];

View File

@@ -45,12 +45,14 @@ export type TestEngineInfo = {
skipChangeNullability?: boolean;
skipRenameColumn?: boolean;
skipDropReferences?: boolean;
skipRenameTable?: boolean;
forceSortResults?: boolean;
forceSortStructureColumns?: boolean;
alterTableAddColumnSyntax?: boolean;
dbSnapshotBySeconds?: boolean;
setNullDefaultInsteadOfDrop?: boolean;
runDeployInTransaction?: boolean;
useTextTypeForStrings?: boolean;
@@ -60,6 +62,8 @@ export type TestEngineInfo = {
defaultSchemaName?: string;
generateDbFile?: boolean;
generateDbFileOnServer?: boolean;
databaseFileLocationOnServer?: string;
dbSnapshotBySeconds?: boolean;
dumpFile?: string;
dumpChecks?: Array<{ sql: string; res: string }>;

View File

@@ -0,0 +1,6 @@
[![styled with prettier](https://img.shields.io/badge/styled_with-prettier-ff69b4.svg)](https://github.com/prettier/prettier)
[![NPM version](https://img.shields.io/npm/v/dbgate-plugin-firebird.svg)](https://www.npmjs.com/package/dbgate-plugin-firebird)
# dbgate-plugin-firebird
Firebid/Interbase plugin for DbGate

View File

@@ -0,0 +1,46 @@
{
"name": "dbgate-plugin-firebird",
"main": "dist/backend.js",
"version": "6.0.0-alpha.1",
"license": "GPL-3.0",
"description": "firebirdQL connector plugin for DbGate",
"homepage": "https://dbgate.org",
"repository": {
"type": "git",
"url": "https://github.com/dbgate/dbgate"
},
"author": "Jan Prochazka",
"keywords": [
"dbgate",
"firebird",
"dbgatebuiltin"
],
"files": [
"dist",
"icon.svg"
],
"scripts": {
"build:frontend": "webpack --config webpack-frontend.config",
"build:frontend:watch": "webpack --watch --config webpack-frontend.config",
"build:backend": "webpack --config webpack-backend.config.js",
"build": "yarn build:frontend && yarn build:backend",
"plugin": "yarn build && yarn pack && dbgate-plugin dbgate-plugin-firebird",
"copydist": "yarn build && yarn pack && dbgate-copydist ../dist/dbgate-plugin-firebird",
"plugout": "dbgate-plugout dbgate-plugin-firebird",
"prepublishOnly": "yarn build"
},
"devDependencies": {
"dbgate-plugin-tools": "^1.0.7",
"webpack": "^5.91.0",
"webpack-cli": "^5.1.4"
},
"dependencies": {
"wkx": "^0.5.0",
"pg-copy-streams": "^6.0.6",
"node-firebird": "^1.1.9",
"dbgate-query-splitter": "^4.11.3",
"dbgate-tools": "^6.0.0-alpha.1",
"lodash": "^4.17.21",
"pg": "^8.11.5"
}
}

View File

@@ -0,0 +1,9 @@
module.exports = {
trailingComma: 'es5',
tabWidth: 2,
semi: true,
singleQuote: true,
arrowParen: 'avoid',
arrowParens: 'avoid',
printWidth: 120,
};

View File

@@ -0,0 +1,147 @@
const _ = require('lodash');
const sql = require('./sql');
const {
getDataTypeString,
getTriggerTiming,
getTriggerEventType,
getFormattedDefaultValue,
getTriggerCreateSql,
} = require('./helpers');
const { DatabaseAnalyser } = require('dbgate-tools');
class Analyser extends DatabaseAnalyser {
constructor(dbhan, driver, version) {
super(dbhan, driver, version);
}
async _runAnalysis() {
const tablesResult = await this.analyserQuery(sql.tables, ['tables']);
const columnsResult = await this.analyserQuery(sql.columns, ['tables', 'views']);
const triggersResult = await this.analyserQuery(sql.triggers, ['triggers']);
const primaryKeysResult = await this.analyserQuery(sql.primaryKeys, ['primaryKeys']);
const foreignKeysResult = await this.analyserQuery(sql.foreignKeys, ['foreignKeys']);
const functionsResults = await this.analyserQuery(sql.functions, ['functions']);
const functionParametersResults = await this.analyserQuery(sql.functionParameters, ['functions']);
const proceduresResults = await this.analyserQuery(sql.procedures, ['procedures']);
const procedureParametersResults = await this.analyserQuery(sql.procedureParameters, ['procedures']);
const viewsResults = await this.analyserQuery(sql.views, ['views']);
const unqiuesResults = await this.analyserQuery(sql.uniques, ['tables']);
const indexesResults = await this.analyserQuery(sql.indexes, ['tables']);
const columns =
columnsResult.rows?.map(column => ({
...column,
objectId: `tables:${column.columnName}`,
dataType: getDataTypeString(column),
defaultValue: getFormattedDefaultValue(column.defaultValue),
})) ?? [];
const triggers =
triggersResult.rows?.map(i => ({
...i,
objectId: `triggers:${i.pureName}`,
eventType: getTriggerEventType(i.TRIGGERTYPE),
triggerTiming: getTriggerTiming(i.TRIGGERTYPE),
createSql: getTriggerCreateSql(i),
})) ?? [];
const primaryKeys =
primaryKeysResult.rows?.map(primaryKey => ({
...primaryKey,
objectId: `tables:${primaryKey.pureName}`,
})) ?? [];
const foreignKeys =
foreignKeysResult.rows?.map(foreignKey => ({
...foreignKey,
objectId: `tables:${foreignKey.pureName}`,
})) ?? [];
const functions =
functionsResults.rows?.map(func => ({
...func,
objectId: `functions:${func.pureName}`,
returnType: functionParametersResults.rows?.filter(
param => param.owningObjectName === func.pureName && param.parameterMode === 'RETURN'
)?.dataType,
parameters: functionParametersResults.rows
?.filter(param => param.owningObjectName === func.pureName)
.map(param => ({
...param,
dataType: getDataTypeString(param),
})),
})) ?? [];
const uniques =
unqiuesResults.rows?.map(unique => ({
pureName: unique.pureName,
constraintName: unique.constraintName,
constraintType: unique.constraintType,
columns: [
{
columnName: unique.columnName,
isDescending: unique.isDescending,
},
],
})) ?? [];
const indexesGrouped = _.groupBy(indexesResults.rows, 'constraintName');
const indexes = Object.values(indexesGrouped).map(indexGroup => ({
pureName: indexGroup[0].pureName,
constraintName: indexGroup[0].constraintName,
constraintType: indexGroup[0].constraintType,
columns: indexGroup.map(index => ({
columnName: index.columnName,
isDescending: index.isDescending,
})),
}));
const procedures =
proceduresResults.rows?.map(proc => ({
...proc,
objectId: `procedures:${proc.pureName}`,
parameters: procedureParametersResults.rows
?.filter(param => param.owningObjectName === proc.pureName)
.map(param => ({
...param,
dataType: getDataTypeString(param),
})),
})) ?? [];
const tables =
tablesResult.rows?.map(table => ({
...table,
objectId: `tables:${table.pureName}`,
columns: columns.filter(column => column.tableName === table.pureName),
primaryKey: DatabaseAnalyser.extractPrimaryKeys(table, primaryKeys),
foreignKeys: DatabaseAnalyser.extractForeignKeys(table, foreignKeys),
uniques: uniques.filter(unique => unique.pureName === table.pureName),
indexes: indexes.filter(index => index.pureName === table.pureName),
})) ?? [];
console.log(uniques);
const views =
viewsResults.rows?.map(view => ({
...view,
objectId: `views:${view.pureName}`,
columns: columns.filter(column => column.tableName === view.pureName),
})) ?? [];
return {
views,
tables,
triggers,
functions,
procedures,
};
}
async _computeSingleObjectId() {
const { typeField, pureName } = this.singleObjectFilter;
console.log('Computing single object ID for', typeField, pureName);
this.singleObjectId = `${typeField}:${pureName}`;
}
}
module.exports = Analyser;

View File

@@ -0,0 +1,239 @@
const _ = require('lodash');
const { splitQuery } = require('dbgate-query-splitter');
const stream = require('stream');
const driverBase = require('../frontend/driver');
const Analyser = require('./Analyser');
const Firebird = require('node-firebird');
const { normalizeRow, createFirebirdInsertStream } = require('./helpers');
const { getLogger, extractErrorLogData, createBulkInsertStreamBase } = require('dbgate-tools');
const sql = require('./sql');
const logger = getLogger('firebird');
/** @type {import('dbgate-types').EngineDriver<Firebird.Database>} */
const driver = {
...driverBase,
analyserClass: Analyser,
async connect({ port, user, password, server, databaseFile }) {
const options = {
host: server,
port,
database: databaseFile,
user,
password,
};
/**@type {Firebird.Database} */
const db = await new Promise((resolve, reject) => {
Firebird.attachOrCreate(options, (err, db) => {
if (err) {
reject(err);
return;
}
resolve(db);
});
});
return {
client: db,
};
},
async query(dbhan, sql, { discardResult } = {}) {
const res = await new Promise((resolve, reject) => {
dbhan.client.query(sql, (err, result) => {
if (err) {
reject(err);
console.error(err);
console.error('Executing query:', sql);
return;
}
resolve(result);
});
});
if (discardResult) {
return {
rows: [],
columns: [],
};
}
const columns = res?.[0] ? Object.keys(res[0]).map(i => ({ columnName: i })) : [];
return {
rows: res ? await Promise.all(res.map(normalizeRow)) : [],
columns,
};
},
async stream(dbhan, sql, options) {
try {
await new Promise((resolve, reject) => {
let hasSentColumns = false;
dbhan.client.sequentially(
sql,
[],
(row, index) => {
if (!hasSentColumns) {
hasSentColumns = true;
const columns = Object.keys(row).map(i => ({ columnName: i }));
options.recordset(columns);
}
options.row(row);
},
err => {
if (err) {
reject(err);
return;
}
resolve();
}
);
});
options.done();
} catch (err) {
logger.error(extractErrorLogData(err), 'Stream error');
options.info({
message: err.message,
line: err.line,
// procedure: procName,
time: new Date(),
severity: 'error',
});
options.done();
}
},
async script(dbhan, sql, { useTransaction } = {}) {
if (useTransaction) return this.runSqlInTransaction(dbhan, sql);
const sqlItems = splitQuery(sql, driver.sqlSplitterOptions);
for (const sqlItem of sqlItems) {
await this.query(dbhan, sqlItem, { discardResult: true });
}
},
async readQuery(dbhan, sql, structure) {
const pass = new stream.PassThrough({
objectMode: true,
highWaterMark: 100,
});
let hasSentColumns = false;
dbhan.client.sequentially(
sql,
[],
(row, index) => {
if (!hasSentColumns) {
hasSentColumns = true;
const columns = Object.keys(row).map(i => ({ columnName: i }));
pass.write({
__isStreamHeader: true,
...(structure || { columns }),
});
}
pass.write(row);
},
err => {
pass.end();
}
);
return pass;
},
async writeTable(dbhan, name, options) {
return createFirebirdInsertStream(this, stream, dbhan, name, options);
},
async getVersion(dbhan) {
const res = await this.query(dbhan, sql.version);
const version = res.rows?.[0]?.VERSION;
return {
version,
versionText: `Firebird ${version}`,
};
},
async close(dbhan) {
return new Promise((resolve, reject) => {
dbhan.client.detach(err => {
if (err) {
reject(err);
return;
}
resolve();
});
});
},
/**
* @param {import('dbgate-types').DatabaseHandle<Firebird.Database>} dbhan
* @param {string} sql
*/
async runSqlInTransaction(dbhan, sql) {
/** @type {Firebird.Transaction} */
let transactionPromise;
const sqlItems = splitQuery(sql, driver.sqlSplitterOptions);
try {
transactionPromise = await new Promise((resolve, reject) => {
dbhan.client.transaction(Firebird.ISOLATION_SNAPSHOT, function (err, currentTransaction) {
if (err) return reject(err);
resolve(currentTransaction);
});
});
for (let i = 0; i < sqlItems.length; i++) {
const currentSql = sqlItems[i];
await new Promise((resolve, reject) => {
transactionPromise.query(currentSql, function (err, result) {
if (err) {
logger.error(extractErrorLogData(err), 'Error executing SQL in transaction');
logger.error({ sql: currentSql }, 'SQL that caused the error');
return reject(err);
}
resolve(result);
});
});
}
await new Promise((resolve, reject) => {
transactionPromise.commit(function (err) {
if (err) {
logger.error(extractErrorLogData(err), 'Error committing transaction');
return reject(err);
}
resolve();
});
});
} catch (error) {
logger.error(extractErrorLogData(error), 'Transaction error');
if (transactionPromise) {
await new Promise((resolve, reject) => {
transactionPromise.rollback(function (rollbackErr) {
if (rollbackErr) {
logger.error(extractErrorLogData(rollbackErr), 'Error rolling back transaction');
return reject(rollbackErr); // Re-reject the rollback error
}
resolve();
});
});
}
}
return transactionPromise;
},
};
module.exports = driver;

View File

@@ -0,0 +1,169 @@
const { createBulkInsertStreamBase } = require('dbgate-tools');
function getDataTypeString({ dataTypeCode, scale, length, precision }) {
switch (dataTypeCode) {
case 7:
return 'smallint';
case 8:
return 'integer';
case 9:
return 'bigint';
case 10:
return 'float';
case 11:
return 'DOUBLE precision';
case 12:
return 'date';
case 13:
return 'time';
case 14:
return `char(${length})`;
case 16:
return `decimal(${precision}, ${scale})`;
case 27:
return 'double precision';
case 35:
return 'blob';
case 37:
return `varchar(${length})`;
case 261:
return 'cstring';
default:
if (dataTypeCode === null || dataTypeCode === undefined) return 'UNKNOWN';
return `unknown (${dataTypeCode})`;
}
}
const eventMap = {
1: { triggerTiming: 'BEFORE', eventType: 'INSERT' },
2: { triggerTiming: 'AFTER', eventType: 'INSERT' },
3: { triggerTiming: 'BEFORE', eventType: 'UPDATE' },
4: { triggerTiming: 'AFTER', eventType: 'UPDATE' },
5: { triggerTiming: 'BEFORE', eventType: 'DELETE' },
6: { triggerTiming: 'AFTER', eventType: 'DELETE' },
17: { triggerTiming: 'BEFORE', eventType: 'INSERT' }, // OR UPDATE
18: { triggerTiming: 'AFTER', eventType: 'INSERT' }, // OR UPDATE
25: { triggerTiming: 'BEFORE', eventType: 'INSERT' }, // OR DELETE
26: { triggerTiming: 'AFTER', eventType: 'INSERT' }, // OR DELETE
27: { triggerTiming: 'BEFORE', eventType: 'UPDATE' }, // OR DELETE
28: { triggerTiming: 'AFTER', eventType: 'UPDATE' }, // OR DELETE
113: { triggerTiming: 'BEFORE', eventType: 'INSERT' }, // OR UPDATE OR DELETE
114: { triggerTiming: 'AFTER', eventType: 'INSERT' }, // OR UPDATE OR DELETE
8192: { triggerTiming: 'BEFORE EVENT', eventType: null }, // ON CONNECT
8193: { triggerTiming: 'AFTER EVENT', eventType: null }, // ON DISCONNECT
8194: { triggerTiming: 'BEFORE STATEMENT', eventType: null }, // ON TRANSACTION START
8195: { triggerTiming: 'AFTER STATEMENT', eventType: null }, // ON TRANSACTION COMMIT
8196: { triggerTiming: 'AFTER STATEMENT', eventType: null }, // ON TRANSACTION ROLLBACK
};
function getTriggerEventType(triggerType) {
return eventMap[triggerType]?.eventType || null;
}
function getTriggerCreateSql(triggerResult) {
const eventType = getTriggerEventType(triggerResult.TRIGGERTYPE);
const triggerTiming = getTriggerTiming(triggerResult.TRIGGERTYPE);
const body = triggerResult.TRIGGER_BODY_SQL;
const createSql = `CREATE OR ALTER TRIGGER "${triggerResult.pureName}" ${triggerTiming} ${eventType} ON "${triggerResult.tableName}" ${body};`;
return createSql;
}
function getTriggerTiming(triggerType) {
return eventMap[triggerType]?.triggerTiming || null;
}
function getFormattedDefaultValue(defaultValue) {
if (defaultValue === null) return null;
return defaultValue.replace(/^default\s*/i, '');
}
function blobStreamToString(stream, encoding = 'utf8') {
return new Promise((resolve, reject) => {
const chunks = [];
stream.on('data', chunk => {
chunks.push(chunk);
});
stream.on('end', () => {
resolve(Buffer.concat(chunks).toString(encoding));
});
stream.on('error', err => {
reject(err);
});
});
}
async function normalizeRow(row) {
const entries = await Promise.all(
Object.entries(row).map(async ([key, value]) => {
if (value === null || value === undefined) return [key, null];
if (typeof value === 'function') {
const result = await new Promise((resolve, reject) => {
value(async (_err, _name, eventEmitter) => {
try {
const stringValue = await blobStreamToString(eventEmitter, 'utf8');
resolve(stringValue);
} catch (error) {
reject(error);
}
});
});
return [key, result];
}
return [key, value];
})
);
return Object.fromEntries(entries);
}
function transformRow(row) {
return Object.fromEntries(
Object.entries(row).map(([key, value]) => {
if (/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}$/.test(value)) {
return [key, value.replace('T', ' ')];
}
return [key, value];
})
);
}
function createFirebirdInsertStream(driver, stream, dbhan, name, options) {
const writable = createBulkInsertStreamBase(driver, stream, dbhan, name, options);
writable.addRow = async row => {
const transformedRow = transformRow(row);
if (writable.structure) {
writable.buffer.push(transformedRow);
} else {
writable.structure = transformedRow;
await writable.checkStructure();
}
};
return writable;
}
module.exports = {
getDataTypeString,
getTriggerEventType,
getTriggerTiming,
getFormattedDefaultValue,
getTriggerCreateSql,
blobStreamToString,
normalizeRow,
createFirebirdInsertStream,
};

View File

@@ -0,0 +1,7 @@
const driver = require('./driver');
module.exports = {
packageName: 'dbgate-plugin-firebird',
drivers: [driver],
initialize(dbgateEnv) {},
};

View File

@@ -0,0 +1,44 @@
module.exports = `
SELECT DISTINCT
CAST(TRIM(rf.rdb$relation_name) AS VARCHAR(255)) AS "tableName",
CAST(TRIM(rf.rdb$field_name) AS VARCHAR(255)) AS "columnName",
CASE rf.rdb$null_flag WHEN 1 THEN TRUE ELSE FALSE END AS "notNull",
CASE
WHEN EXISTS (
SELECT 1
FROM rdb$relation_constraints rc
JOIN rdb$index_segments idx ON rc.rdb$index_name = idx.rdb$index_name
WHERE rc.rdb$relation_name = rf.rdb$relation_name
AND idx.rdb$field_name = rf.rdb$field_name
AND rc.rdb$constraint_type = 'PRIMARY KEY'
) THEN TRUE
ELSE FALSE
END AS "isPrimaryKey",
f.rdb$field_type AS "dataTypeCode",
f.rdb$field_precision AS "precision",
f.rdb$field_scale AS "scale",
f.rdb$field_length / 4 AS "length",
CAST(TRIM(rf.RDB$DEFAULT_SOURCE) AS VARCHAR(255)) AS "defaultValue",
CAST(TRIM(rf.rdb$description) AS VARCHAR(255)) AS "columnComment",
CASE
WHEN f.rdb$field_type IN (8, 9, 16) AND f.rdb$field_scale < 0 THEN TRUE
ELSE FALSE
END AS "isUnsigned",
CAST(TRIM(rf.rdb$relation_name) AS VARCHAR(255)) AS "pureName"
FROM
rdb$relation_fields rf
JOIN
rdb$relations r ON rf.rdb$relation_name = r.rdb$relation_name
LEFT JOIN
rdb$fields f ON rf.rdb$field_source = f.rdb$field_name
LEFT JOIN
rdb$character_sets cs ON f.rdb$character_set_id = cs.rdb$character_set_id
LEFT JOIN
rdb$collations co ON f.rdb$collation_id = co.rdb$collation_id
WHERE
r.rdb$system_flag = 0
AND
('tables:' || CAST(TRIM(rf.rdb$relation_name) AS VARCHAR(255))) =OBJECT_ID_CONDITION
ORDER BY
"tableName", rf.rdb$field_position;
`;

View File

@@ -0,0 +1,36 @@
module.exports = `
SELECT
TRIM(rc_fk.RDB$RELATION_NAME) AS "pureName",
TRIM(rc_fk.RDB$CONSTRAINT_NAME) AS "constraintName",
TRIM(iseg_fk.RDB$FIELD_NAME) AS "columnName",
TRIM(iseg_pk.RDB$FIELD_NAME) AS "refColumnName",
TRIM(rc_pk.RDB$RELATION_NAME) AS "refTableName",
FALSE AS "isIncludedColumn",
CASE COALESCE(idx_fk.RDB$INDEX_TYPE, 0)
WHEN 1 THEN TRUE -- For the FK's own index, 1 = Descending (modern Firebird)
ELSE FALSE -- 0 or NULL = Ascending for the FK's own index
END AS "isDescending" -- Refers to the sort order of the index on the FK column(s)
FROM
RDB$RELATION_CONSTRAINTS rc_fk
JOIN
RDB$RELATIONS rel ON rc_fk.RDB$RELATION_NAME = rel.RDB$RELATION_NAME
JOIN
RDB$INDEX_SEGMENTS iseg_fk ON rc_fk.RDB$INDEX_NAME = iseg_fk.RDB$INDEX_NAME
JOIN
RDB$INDICES idx_fk ON rc_fk.RDB$INDEX_NAME = idx_fk.RDB$INDEX_NAME
JOIN
RDB$REF_CONSTRAINTS refc ON rc_fk.RDB$CONSTRAINT_NAME = refc.RDB$CONSTRAINT_NAME
JOIN
RDB$RELATION_CONSTRAINTS rc_pk ON refc.RDB$CONST_NAME_UQ = rc_pk.RDB$CONSTRAINT_NAME
JOIN
RDB$INDEX_SEGMENTS iseg_pk ON rc_pk.RDB$INDEX_NAME = iseg_pk.RDB$INDEX_NAME
AND iseg_fk.RDB$FIELD_POSITION = iseg_pk.RDB$FIELD_POSITION -- Critical for matching columns in composite keys
WHERE
rc_fk.RDB$CONSTRAINT_TYPE = 'FOREIGN KEY'
AND
('tables:' || TRIM(rc_fk.RDB$RELATION_NAME)) =OBJECT_ID_CONDITION
ORDER BY
"pureName",
"constraintName",
iseg_fk.RDB$FIELD_POSITION;
`;

View File

@@ -0,0 +1,29 @@
module.exports = `
SELECT
TRIM(FA.RDB$FUNCTION_NAME) AS "owningObjectName", -- Name of the function this parameter belongs to
TRIM(FA.RDB$ARGUMENT_NAME) AS "parameterName",
FFLDS.RDB$FIELD_TYPE AS "dataTypeCode", -- SQL data type code from RDB$FIELDS
FFLDS.rdb$field_precision AS "precision",
FFLDS.rdb$field_scale AS "scale",
FFLDS.rdb$field_length AS "length",
TRIM(CASE
WHEN FA.RDB$ARGUMENT_POSITION = F.RDB$RETURN_ARGUMENT THEN 'RETURN'
ELSE 'IN' -- For PSQL functions, non-return arguments are IN.
END) AS "parameterMode",
FA.RDB$ARGUMENT_POSITION AS "position", -- 0-based index for arguments
-- Fields for ParameterInfo.NamedObjectInfo
TRIM(FA.RDB$FUNCTION_NAME) AS "pureName" -- NamedObjectInfo.pureName for the parameter
FROM
RDB$FUNCTION_ARGUMENTS FA
JOIN
RDB$FUNCTIONS F ON FA.RDB$FUNCTION_NAME = F.RDB$FUNCTION_NAME
JOIN
RDB$FIELDS FFLDS ON FA.RDB$FIELD_SOURCE = FFLDS.RDB$FIELD_NAME -- Crucial join to get RDB$FIELDS.RDB$TYPE
WHERE
COALESCE(F.RDB$SYSTEM_FLAG, 0) = 0 -- Filter for user-defined functions
ORDER BY
"owningObjectName", "position";
`;

View File

@@ -0,0 +1,16 @@
module.exports = `
SELECT
TRIM(F.RDB$FUNCTION_NAME) AS "pureName",
TRIM(F.RDB$FUNCTION_NAME) AS "objectId",
TRIM('FUNCTION') AS "objectTypeField",
TRIM(F.RDB$DESCRIPTION) AS "objectComment",
F.RDB$FUNCTION_SOURCE AS "createSql", -- This is the PSQL body or definition for UDRs
FALSE AS "requiresFormat" -- Assuming PSQL source is generally readable
FROM
RDB$FUNCTIONS F
WHERE
COALESCE(F.RDB$SYSTEM_FLAG, 0) = 0 -- User-defined functions
AND ('funcitons:' || TRIM(F.RDB$FUNCTION_NAME)) =OBJECT_ID_CONDITION
ORDER BY
"pureName";
`;

View File

@@ -0,0 +1,29 @@
const version = require('./version');
const tables = require('./tables');
const columns = require('./columns');
const triggers = require('./triggers');
const primaryKeys = require('./primaryKeys');
const foreignKeys = require('./foreignKeys');
const functions = require('./functions');
const functionParameters = require('./functionParameters');
const procedures = require('./procedures');
const procedureParameters = require('./procedureParameters');
const views = require('./views');
const uniques = require('./uniques');
const indexes = require('./indexes');
module.exports = {
version,
columns,
views,
tables,
triggers,
primaryKeys,
foreignKeys,
functions,
functionParameters,
procedures,
procedureParameters,
uniques,
indexes,
};

View File

@@ -0,0 +1,42 @@
module.exports = `
SELECT -- Index name, maps to pureName
TRIM(I.RDB$INDEX_NAME) AS "constraintName", -- Index name, maps to constraintName
TRIM('index') AS "constraintType", -- ConstraintType for IndexInfo
TRIM(I.RDB$RELATION_NAME) AS "pureName", -- Context: Table the index is on
CASE COALESCE(I.RDB$UNIQUE_FLAG, 0) -- isUnique: 1 for unique, 0 or NULL for non-unique [cite: 46, 838]
WHEN 1 THEN TRUE
ELSE FALSE
END AS "isUnique",
CASE
WHEN I.RDB$EXPRESSION_SOURCE IS NOT NULL THEN TRIM('expression') -- indexType: if an expression index [cite: 46, 262]
ELSE TRIM('normal')
END AS "indexType",
I.RDB$CONDITION_SOURCE AS "idx_filterDefinition", -- filterDefinition: for partial indexes [cite: 46, 261, 838]
COALESCE(I.RDB$INDEX_INACTIVE, 0) AS "idx_isInactive", -- 0 for active, 1 for inactive [cite: 46, 838]
I.RDB$DESCRIPTION AS "idx_description", -- Index description/comment [cite: 46, 838]
-- Column specific fields from RDB$INDEX_SEGMENTS
TRIM(S.RDB$FIELD_NAME) AS "columnName", -- columnName for ColumnReference [cite: 46, 837]
CASE COALESCE(I.RDB$INDEX_TYPE, 0) -- isDescending: 0 for ASC (default), 1 for DESC for the whole index [cite: 46, 838]
WHEN 1 THEN TRUE
ELSE FALSE
END AS "isDescending",
S.RDB$FIELD_POSITION AS "col_fieldPosition" -- 0-based position of the column in the index [cite: 46, 837]
FROM
RDB$INDICES I
JOIN
RDB$INDEX_SEGMENTS S ON I.RDB$INDEX_NAME = S.RDB$INDEX_NAME
WHERE
COALESCE(I.RDB$SYSTEM_FLAG, 0) = 0 -- Filter for user-defined indexes [cite: 46, 838]
AND I.RDB$FOREIGN_KEY IS NULL -- Exclude indexes backing foreign keys [cite: 46, 838]
-- (RDB$FOREIGN_KEY is not null if the index is for an FK)
AND NOT EXISTS ( -- Exclude indexes that are the chosen supporting index for a PK or UQ constraint
SELECT 1
FROM RDB$RELATION_CONSTRAINTS rc
WHERE rc.RDB$INDEX_NAME = I.RDB$INDEX_NAME
AND rc.RDB$CONSTRAINT_TYPE IN ('PRIMARY KEY', 'UNIQUE')
)
AND
('tables:' || TRIM(i.RDB$RELATION_NAME)) =OBJECT_ID_CONDITION
`;

View File

@@ -0,0 +1,28 @@
module.exports = `
SELECT
TRIM(rc.RDB$RELATION_NAME) AS "pureName",
TRIM(rc.RDB$CONSTRAINT_NAME) AS "constraintName",
TRIM(iseg.RDB$FIELD_NAME) AS "columnName",
CAST(NULL AS VARCHAR(63)) AS "refColumnName",
FALSE AS "isIncludedColumn",
CASE COALESCE(idx.RDB$INDEX_TYPE, 0) -- Treat NULL as 0 (ascending)
WHEN 1 THEN TRUE -- Assuming 1 means DESCENDING for regular (non-expression) indexes
ELSE FALSE -- Assuming 0 (or NULL) means ASCENDING for regular indexes
END AS "isDescending"
FROM
RDB$RELATION_CONSTRAINTS rc
JOIN
RDB$RELATIONS rel ON rc.RDB$RELATION_NAME = rel.RDB$RELATION_NAME
JOIN
RDB$INDICES idx ON rc.RDB$INDEX_NAME = idx.RDB$INDEX_NAME
JOIN
RDB$INDEX_SEGMENTS iseg ON idx.RDB$INDEX_NAME = iseg.RDB$INDEX_NAME
WHERE
rc.RDB$CONSTRAINT_TYPE = 'PRIMARY KEY'
AND COALESCE(rel.RDB$SYSTEM_FLAG, 0) = 0 -- Typically, you only want user-defined tables
AND ('tables:' || TRIM(rc.RDB$RELATION_NAME)) =OBJECT_ID_CONDITION
ORDER BY
"pureName",
"constraintName",
iseg.RDB$FIELD_POSITION;
`;

View File

@@ -0,0 +1,30 @@
module.exports = `
SELECT
TRIM(PP.RDB$PROCEDURE_NAME) AS "owningObjectName", -- Name of the procedure this parameter belongs to
TRIM(PP.RDB$PARAMETER_NAME) AS "parameterName", -- ParameterInfo.parameterName
FFLDS.RDB$FIELD_TYPE AS "dataTypeCode", -- SQL data type code from RDB$FIELDS
FFLDS.rdb$field_precision AS "precision",
FFLDS.rdb$field_scale AS "scale",
FFLDS.rdb$field_length AS "length",
CASE PP.RDB$PARAMETER_TYPE
WHEN 0 THEN 'IN'
WHEN 1 THEN 'OUT'
ELSE CAST(PP.RDB$PARAMETER_TYPE AS VARCHAR(10)) -- Should ideally not happen for valid params
END AS "parameterMode",
PP.RDB$PARAMETER_NUMBER AS "position", -- 0-based for IN params, then 0-based for OUT params
-- Fields for ParameterInfo.NamedObjectInfo
TRIM(PP.RDB$PARAMETER_NAME) AS "pureName" -- NamedObjectInfo.pureName for the parameter
FROM
RDB$PROCEDURE_PARAMETERS PP
JOIN
RDB$PROCEDURES P ON PP.RDB$PROCEDURE_NAME = P.RDB$PROCEDURE_NAME
JOIN
RDB$FIELDS FFLDS ON PP.RDB$FIELD_SOURCE = FFLDS.RDB$FIELD_NAME -- Links parameter to its base field type
WHERE
COALESCE(P.RDB$SYSTEM_FLAG, 0) = 0 -- Filter for user-defined procedures
ORDER BY
"owningObjectName", PP.RDB$PARAMETER_TYPE, "position"; -- Order by IN(0)/OUT(1) then by position
`;

View File

@@ -0,0 +1,16 @@
module.exports = `
SELECT
TRIM(P.RDB$PROCEDURE_NAME) AS "pureName",
TRIM('PROCEDURE') AS "objectTypeField",
TRIM(P.RDB$DESCRIPTION) AS "objectComment",
P.RDB$PROCEDURE_SOURCE AS "createSql", -- Contains the PSQL body
FALSE AS "requiresFormat"
FROM
RDB$PROCEDURES P
WHERE
COALESCE(P.RDB$SYSTEM_FLAG, 0) = 0 -- Filter for user-defined procedures
AND P.RDB$PROCEDURE_TYPE IS NOT NULL -- Ensure it's a valid procedure type (0, 1, or 2)
AND ('procedures:' || TRIM(P.RDB$PROCEDURE_NAME)) =OBJECT_ID_CONDITION
ORDER BY
"pureName";
`;

View File

@@ -0,0 +1,14 @@
module.exports = `SELECT
TRIM(RDB$RELATION_NAME) AS "pureName",
RDB$DESCRIPTION AS "objectComment",
RDB$FORMAT AS "objectTypeField"
FROM
RDB$RELATIONS
WHERE
RDB$SYSTEM_FLAG = 0 -- only user-defined tables
AND
RDB$RELATION_TYPE = 0 -- only tables (not views, etc.)
AND
('tables:' || TRIM(RDB$RELATION_NAME)) =OBJECT_ID_CONDITION
ORDER BY
"pureName";`;

View File

@@ -0,0 +1,13 @@
module.exports = `
SELECT
TRIM(rtr.RDB$TRIGGER_NAME) as "pureName",
TRIM(rtr.RDB$RELATION_NAME) as "tableName",
rtr.RDB$TRIGGER_TYPE as TRIGGERTYPE,
CAST(rtr.RDB$TRIGGER_SOURCE AS VARCHAR(8191)) AS TRIGGER_BODY_SQL
FROM
RDB$TRIGGERS rtr
JOIN RDB$RELATIONS rel ON rtr.RDB$RELATION_NAME = rel.RDB$RELATION_NAME
WHERE rtr.RDB$SYSTEM_FLAG = 0
AND ('triggers:' || TRIM(rtr.RDB$TRIGGER_NAME)) =OBJECT_ID_CONDITION
ORDER BY rtr.RDB$TRIGGER_NAME
`;

View File

@@ -0,0 +1,25 @@
module.exports = `
SELECT
TRIM(rc.RDB$CONSTRAINT_NAME) AS "constraintName", -- Name of the constraint
TRIM('unique') AS "constraintType", -- Type of the constraint
TRIM(rc.RDB$RELATION_NAME) AS "pureName", -- Context: Table the constraint is on
-- Column specific fields from RDB$INDEX_SEGMENTS for the backing index
TRIM(s.RDB$FIELD_NAME) AS "columnName", -- Name of the column in the unique key
CASE COALESCE(i.RDB$INDEX_TYPE, 0) -- isDescending: 0 for ASC (default), 1 for DESC for the backing index
WHEN 1 THEN TRUE
ELSE FALSE
END AS "isDescending"
FROM
RDB$RELATION_CONSTRAINTS rc
JOIN
-- RDB$INDEX_NAME in RDB$RELATION_CONSTRAINTS is the name of the index that enforces the UNIQUE constraint
RDB$INDICES i ON rc.RDB$INDEX_NAME = i.RDB$INDEX_NAME
JOIN
RDB$INDEX_SEGMENTS s ON i.RDB$INDEX_NAME = s.RDB$INDEX_NAME
WHERE
rc.RDB$CONSTRAINT_TYPE = 'UNIQUE' -- Filter for UNIQUE constraints
AND COALESCE(i.RDB$SYSTEM_FLAG, 0) = 0 -- Typically, backing indexes for user UQ constraints are user-related.
AND
('tables:' || TRIM(rc.RDB$RELATION_NAME)) =OBJECT_ID_CONDITION
`;

View File

@@ -0,0 +1 @@
module.exports = `SELECT rdb$get_context('SYSTEM', 'ENGINE_VERSION') as version from rdb$database;`;

View File

@@ -0,0 +1,14 @@
module.exports = `SELECT
TRIM(RDB$RELATION_NAME) AS "pureName",
RDB$DESCRIPTION AS "objectComment",
RDB$FORMAT AS "objectTypeField"
FROM
RDB$RELATIONS
WHERE
RDB$SYSTEM_FLAG = 0 -- only user-defined tables
AND
RDB$RELATION_TYPE = 1 -- only views (not tables, etc.)
AND
('tables:' || TRIM(RDB$RELATION_NAME)) =OBJECT_ID_CONDITION
ORDER BY
"pureName";`;

View File

@@ -0,0 +1,69 @@
const { SqlDumper } = global.DBGATE_PACKAGES['dbgate-tools'];
class Dumper extends SqlDumper {
autoIncrement() {
this.put(' ^generated ^by ^default ^as ^identity');
}
dropColumn(column) {
this.putCmd('^alter ^table %f ^drop %i', column, column.columnName);
}
renameColumn(column, newName) {
this.putCmd('^alter ^table %f ^alter ^column %i ^to %i', column, column.columnName, newName);
}
changeColumn(oldcol, newcol, constraints) {
if (oldcol.columnName != newcol.columnName) {
this.putCmd('^alter ^table %f ^alter ^column %i ^to %i', oldcol, oldcol.columnName, newcol.columnName);
}
if (oldcol.notNull != newcol.notNull) {
if (newcol.notNull) {
this.putCmd('^alter ^table %f ^alter ^column %i ^set ^not ^null', newcol, newcol.columnName);
} else {
this.putCmd('^alter ^table %f ^alter ^column %i ^drop ^not ^null', newcol, newcol.columnName);
}
}
if (oldcol.defaultValue != newcol.defaultValue) {
if (newcol.defaultValue) {
this.putCmd(
'^alter ^table %f ^alter ^column %i ^set ^default %s',
newcol,
newcol.columnName,
newcol.defaultValue
);
} else {
this.putCmd('^alter ^table %f ^alter ^column %i ^drop ^default', newcol, newcol.columnName);
}
}
}
beginTransaction() {
this.putCmd('^set ^transaction');
}
createIndex(ix) {
const firstCol = ix.columns[0];
this.put('^create');
if (ix.isUnique) this.put(' ^unique');
this.put(
' %k ^index %i &n^on %f (&>&n',
firstCol.isDescending == true ? 'DESCENDING' : 'ASCENDING',
ix.constraintName,
ix
);
this.putCollection(',&n', ix.columns, col => {
this.put('%i', col.columnName);
});
this.put('&<&n)');
if (ix.filterDefinition && this.dialect.filteredIndexes) {
this.put('&n^where %s', ix.filterDefinition);
}
this.endCommand();
}
}
module.exports = Dumper;

View File

@@ -0,0 +1,105 @@
const { driverBase } = global.DBGATE_PACKAGES['dbgate-tools'];
const Dumper = require('./Dumper');
/** @type {import('dbgate-types').SqlDialect} */
const dialect = {
rangeSelect: true,
ilike: true,
multipleSchema: false,
stringEscapeChar: "'",
fallbackDataType: 'varchar(256)',
anonymousPrimaryKey: false,
enableConstraintsPerTable: true,
stringAgg: true,
offsetFirstSkipSyntax: true,
dropColumnDependencies: ['dependencies', 'primaryKeys', 'foreignKeys', 'indexes', 'uniques'],
changeColumnDependencies: ['dependencies', 'primaryKeys', 'indexes', 'uniques'],
renameColumnDependencies: ['dependencies', 'foreignKeys', 'uniques'],
defaultValueBeforeNullability: true,
quoteIdentifier(s) {
return `"${s}"`;
},
dbFileExtension: '.fdb',
implicitNullDeclaration: true,
createColumn: true,
dropColumn: true,
changeColumn: true,
createIndex: true,
dropIndex: true,
createForeignKey: true,
dropForeignKey: true,
createPrimaryKey: true,
dropPrimaryKey: true,
createUnique: true,
dropUnique: true,
createCheck: true,
dropCheck: true,
allowMultipleValuesInsert: false,
renameSqlObject: true,
filteredIndexes: true,
disableRenameTable: true,
};
const firebirdSplitterOptions = {
stringsBegins: ["'", '"'],
stringsEnds: {
"'": "'",
'"': '"',
},
stringEscapes: {
"'": "'", // Single quote is escaped by another single quote
'"': '"', // Double quote is escaped by another double quote
},
allowSemicolon: true,
allowCustomDelimiter: false,
allowCustomSqlTerminator: false,
allowGoDelimiter: false,
allowSlashDelimiter: false,
allowDollarDollarString: false,
noSplit: false,
doubleDashComments: true,
multilineComments: true,
javaScriptComments: false,
skipSeparatorBeginEnd: false,
ignoreComments: false,
preventSingleLineSplit: false,
adaptiveGoSplit: false,
returnRichInfo: false,
splitByLines: false,
splitByEmptyLine: false,
copyFromStdin: false,
queryParameterStyle: ':', // Firebird uses colon-prefixed parameters (:param_name)
};
/** @type {import('dbgate-types').EngineDriver} */
const firebirdDriverBase = {
...driverBase,
defaultPort: 3050,
showConnectionField: field => ['port', 'user', 'password', 'server', 'databaseFile'].includes(field),
getQuerySplitterOptions: () => firebirdSplitterOptions,
beforeConnectionSave: connection => {
const { databaseFile } = connection;
return {
...connection,
singleDatabase: true,
defaultDatabase: databaseFile,
};
},
adaptDataType(dataType) {
if (dataType?.toLowerCase() == 'datetime') return 'TIMESTAMP';
if (dataType?.toLowerCase() == 'text') return 'BLOB SUB_TYPE 1 CHARACTER SET UTF8';
return dataType;
},
engine: 'firebird@dbgate-plugin-firebird',
title: 'Firebird',
supportsTransactions: true,
dumperClass: Dumper,
dialect,
};
module.exports = firebirdDriverBase;

View File

@@ -0,0 +1,6 @@
import driver from './driver';
export default {
packageName: 'dbgate-plugin-firebird',
drivers: [driver],
};

View File

@@ -0,0 +1,46 @@
var webpack = require('webpack');
var path = require('path');
const packageJson = require('./package.json');
const buildPluginExternals = require('../../common/buildPluginExternals');
const externals = buildPluginExternals(packageJson);
var config = {
context: __dirname + '/src/backend',
entry: {
app: './index.js',
},
target: 'node',
output: {
path: path.resolve(__dirname, 'dist'),
filename: 'backend.js',
libraryTarget: 'commonjs2',
},
// uncomment for disable minimalization
// optimization: {
// minimize: false,
// },
plugins: [
new webpack.IgnorePlugin({
checkResource(resource) {
const lazyImports = ['pg-native', 'uws'];
if (!lazyImports.includes(resource)) {
return false;
}
try {
require.resolve(resource);
} catch (err) {
return true;
}
return false;
},
}),
],
externals,
};
module.exports = config;

View File

@@ -0,0 +1,30 @@
var webpack = require('webpack');
var path = require('path');
var config = {
context: __dirname + '/src/frontend',
entry: {
app: './index.js',
},
target: 'web',
output: {
path: path.resolve(__dirname, 'dist'),
filename: 'frontend.js',
libraryTarget: 'var',
library: 'plugin',
},
plugins: [
new webpack.DefinePlugin({
'global.DBGATE_PACKAGES': 'window.DBGATE_PACKAGES',
}),
],
// uncomment for disable minimalization
// optimization: {
// minimize: false,
// },
};
module.exports = config;

View File

@@ -81,13 +81,13 @@ jobs:
--health-timeout 5s
--health-retries 5
ports:
- 15000:5432
- 15000:5432
mysql-integr:
image: mysql:8.0.18
env:
MYSQL_ROOT_PASSWORD: Pwd2020Db
ports:
ports:
- 15001:3306
mssql-integr:
@@ -96,14 +96,14 @@ jobs:
ACCEPT_EULA: Y
SA_PASSWORD: Pwd2020Db
MSSQL_PID: Express
ports:
ports:
- 15002:1433
clickhouse-integr:
image: bitnami/clickhouse:24.8.4
env:
CLICKHOUSE_ADMIN_PASSWORD: Pwd2020Db
ports:
ports:
- 15005:8123
oracle-integr:
@@ -122,3 +122,15 @@ jobs:
image: ghcr.io/tursodatabase/libsql-server:latest
ports:
- '8080:8080'
firebird:
image: firebirdsql/firebird:latest
env:
FIREBIRD_DATABASE: mydatabase.fdb
FIREBIRD_USER: dbuser
FIREBIRD_PASSWORD: dbpassword
ISC_PASSWORD: masterkey
FIREBIRD_TRACE: false
FIREBIRD_USE_LEGACY_AUTH: true
ports:
- '3050:3050'

View File

@@ -3405,6 +3405,11 @@ better-sqlite3@11.8.1:
bindings "^1.5.0"
prebuild-install "^7.1.1"
big-integer@^1.6.51:
version "1.6.52"
resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.52.tgz#60a887f3047614a8e1bffe5d7173490a97dc8c85"
integrity sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==
big.js@^5.2.2:
version "5.2.2"
resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328"
@@ -7973,6 +7978,11 @@ long@*, long@^5.2.1, long@~5.2.3:
resolved "https://registry.yarnpkg.com/long/-/long-5.2.3.tgz#a3ba97f3877cf1d778eccbcb048525ebb77499e1"
integrity sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==
long@^5.2.3:
version "5.3.2"
resolved "https://registry.yarnpkg.com/long/-/long-5.3.2.tgz#1d84463095999262d7d7b7f8bfd4a8cc55167f83"
integrity sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==
lru-cache@^10.2.0:
version "10.4.3"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119"
@@ -8537,6 +8547,14 @@ node-cron@^2.0.3:
opencollective-postinstall "^2.0.0"
tz-offset "0.0.1"
node-firebird@^1.1.9:
version "1.1.9"
resolved "https://registry.yarnpkg.com/node-firebird/-/node-firebird-1.1.9.tgz#0e6815b4e209812a4c85b71227e40e268bedeb8b"
integrity sha512-6Ol+Koide1WbfUp4BJ1dSA4wm091jAgCwwSoihxO/RRdcfR+dMVDE9jd2Z2ixjk7q/vSNJUYORXv7jmRfvwdrw==
dependencies:
big-integer "^1.6.51"
long "^5.2.3"
node-gyp@^7.1.0:
version "7.1.2"
resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-7.1.2.tgz#21a810aebb187120251c3bcec979af1587b188ae"