removed bindings to engines

This commit is contained in:
Jan Prochazka
2020-11-26 14:25:51 +01:00
parent 06e98cff9f
commit d7ceb297e9
48 changed files with 1 additions and 1913 deletions

View File

@@ -25,7 +25,6 @@
"byline": "^5.0.0",
"cors": "^2.8.5",
"cross-env": "^6.0.3",
"dbgate-engines": "^1.0.0",
"dbgate-sqltree": "^1.0.0",
"dbgate-tools": "^1.0.0",
"eslint": "^6.8.0",
@@ -37,13 +36,9 @@
"http": "^0.0.0",
"line-reader": "^0.4.0",
"lodash": "^4.17.15",
"mssql": "^6.0.1",
"mysql": "^2.17.1",
"nedb-promises": "^4.0.1",
"node-fetch": "^2.6.1",
"pacote": "^11.1.13",
"pg": "^7.17.0",
"pg-query-stream": "^3.1.1"
"pacote": "^11.1.13"
},
"scripts": {
"start": "nodemon src/index.js",

View File

@@ -1,21 +0,0 @@
const mssql = require('mssql');
const mysql = require('mysql');
const pg = require('pg');
const pgQueryStream = require('pg-query-stream');
const fs = require('fs');
const stream = require('stream');
const nativeModules = {
mssql,
mysql,
pg,
pgQueryStream,
fs,
stream,
};
function driverConnect(driver, connection) {
return driver.connect(nativeModules, connection);
}
module.exports = driverConnect;

View File

@@ -1,52 +0,0 @@
# dbgate-engines
JavaScript library implementing MySQL, MS SQL and PostgreSQL operations. Server as abstraction layer for other DbGate packages, which could be database-engine independend. It can be used both on frontent (in browser) and on backend (in nodejs), but connection to real database is allowed only on backend.
## Installation
yarn add dbgate-engines
## Usage
```javascript
const engines = require('dbgate-engines');
// driver supports operations of EngineDriver listed belowe
const driver = engine('mysql');
```
In most cases, you don't use driver methods directly, but you pass driver instance into other dbgate packages.
## Driver definition
```typescript
export interface EngineDriver {
// works on both frontend and backend
engine: string;
dialect: SqlDialect;
createDumper(): SqlDumper;
// works only on backend
connect(nativeModules, { server, port, user, password, database }): any;
query(pool: any, sql: string): Promise<QueryResult>;
stream(pool: any, sql: string, options: StreamOptions);
readQuery(pool: any, sql: string, structure?: TableInfo): Promise<stream.Readable>;
writeTable(pool: any, name: NamedObjectInfo, options: WriteTableOptions): Promise<stream.Writeable>;
analyseSingleObject(
pool: any,
name: NamedObjectInfo,
objectTypeField: keyof DatabaseInfo
): Promise<TableInfo | ViewInfo | ProcedureInfo | FunctionInfo | TriggerInfo>;
analyseSingleTable(pool: any, name: NamedObjectInfo): Promise<TableInfo>;
getVersion(pool: any): Promise<{ version: string }>;
listDatabases(
pool: any
): Promise<
{
name: string;
}[]
>;
analyseFull(pool: any): Promise<DatabaseInfo>;
analyseIncremental(pool: any, structure: DatabaseInfo): Promise<DatabaseInfo>;
}
```

View File

@@ -1,7 +0,0 @@
import types from "dbgate-types";
declare function getDriver(
connection: string | { engine: string }
): types.EngineDriver;
export = getDriver;

View File

@@ -1,24 +0,0 @@
const _ = require("lodash");
const mssql = require("./mssql");
const mysql = require("./mysql");
const postgres = require("./postgres");
const drivers = {
mssql,
mysql,
postgres
};
function getDriver(connection) {
if (_.isString(connection)) {
return drivers[connection];
}
if (_.isPlainObject(connection)) {
const { engine } = connection;
if (engine) {
return drivers[engine];
}
}
throw new Error(`Cannot extract engine from ${connection}`);
}
module.exports = getDriver;

View File

@@ -1,212 +0,0 @@
const fp = require('lodash/fp');
const _ = require('lodash');
const sql = require('./sql');
const { DatabaseAnalyser } = require('dbgate-tools');
const { filter } = require('lodash');
const { isTypeString, isTypeNumeric } = require('dbgate-tools');
function objectTypeToField(type) {
switch (type.trim()) {
case 'U':
return 'tables';
case 'V':
return 'views';
case 'P':
return 'procedures';
case 'IF':
case 'FN':
case 'TF':
return 'functions';
case 'TR':
return 'triggers';
default:
return null;
}
}
function getColumnInfo({
isNullable,
isIdentity,
columnName,
dataType,
charMaxLength,
numericPrecision,
numericScale,
}) {
let fullDataType = dataType;
if (charMaxLength && isTypeString(dataType)) fullDataType = `${dataType}(${charMaxLength})`;
if (numericPrecision && numericScale && isTypeNumeric(dataType))
fullDataType = `${dataType}(${numericPrecision},${numericScale})`;
return {
columnName,
dataType: fullDataType,
notNull: !isNullable,
autoIncrement: !!isIdentity,
};
}
class MsSqlAnalyser extends DatabaseAnalyser {
constructor(pool, driver) {
super(pool, driver);
this.singleObjectId = null;
}
createQuery(resFileName, typeFields) {
let res = sql[resFileName];
if (this.singleObjectFilter) {
const { typeField } = this.singleObjectFilter;
if (!this.singleObjectId) return null;
if (!typeFields || !typeFields.includes(typeField)) return null;
return res.replace('=[OBJECT_ID_CONDITION]', ` = ${this.singleObjectId}`);
}
if (!this.modifications || !typeFields || this.modifications.length == 0) {
res = res.replace('=[OBJECT_ID_CONDITION]', ' is not null');
} else {
const filterIds = this.modifications
.filter((x) => typeFields.includes(x.objectTypeField) && (x.action == 'add' || x.action == 'change'))
.map((x) => x.objectId);
if (filterIds.length == 0) {
res = res.replace('=[OBJECT_ID_CONDITION]', ' = 0');
} else {
res = res.replace('=[OBJECT_ID_CONDITION]', ` in (${filterIds.join(',')})`);
}
}
return res;
}
async getSingleObjectId() {
if (this.singleObjectFilter) {
const { schemaName, pureName, typeField } = this.singleObjectFilter;
const fullName = schemaName ? `[${schemaName}].[${pureName}]` : pureName;
const resId = await this.driver.query(this.pool, `SELECT OBJECT_ID('${fullName}') AS id`);
this.singleObjectId = resId.rows[0].id;
}
}
async _runAnalysis() {
await this.getSingleObjectId();
const tablesRows = await this.driver.query(this.pool, this.createQuery('tables', ['tables']));
const columnsRows = await this.driver.query(this.pool, this.createQuery('columns', ['tables']));
const pkColumnsRows = await this.driver.query(this.pool, this.createQuery('primaryKeys', ['tables']));
const fkColumnsRows = await this.driver.query(this.pool, this.createQuery('foreignKeys', ['tables']));
const schemaRows = await this.driver.query(this.pool, this.createQuery('getSchemas'));
const schemas = schemaRows.rows;
const sqlCodeRows = await this.driver.query(
this.pool,
this.createQuery('loadSqlCode', ['views', 'procedures', 'functions', 'triggers'])
);
const getCreateSql = (row) =>
sqlCodeRows.rows
.filter((x) => x.pureName == row.pureName && x.schemaName == row.schemaName)
.map((x) => x.codeText)
.join('');
const viewsRows = await this.driver.query(this.pool, this.createQuery('views', ['views']));
const programmableRows = await this.driver.query(
this.pool,
this.createQuery('programmables', ['procedures', 'functions'])
);
const viewColumnRows = await this.driver.query(this.pool, this.createQuery('viewColumns', ['views']));
const tables = tablesRows.rows.map((row) => ({
...row,
columns: columnsRows.rows.filter((col) => col.objectId == row.objectId).map(getColumnInfo),
primaryKey: DatabaseAnalyser.extractPrimaryKeys(row, pkColumnsRows.rows),
foreignKeys: DatabaseAnalyser.extractForeignKeys(row, fkColumnsRows.rows),
}));
const views = viewsRows.rows.map((row) => ({
...row,
createSql: getCreateSql(row),
columns: viewColumnRows.rows.filter((col) => col.objectId == row.objectId).map(getColumnInfo),
}));
const procedures = programmableRows.rows
.filter((x) => x.sqlObjectType.trim() == 'P')
.map((row) => ({
...row,
createSql: getCreateSql(row),
}));
const functions = programmableRows.rows
.filter((x) => ['FN', 'IF', 'TF'].includes(x.sqlObjectType.trim()))
.map((row) => ({
...row,
createSql: getCreateSql(row),
}));
return this.mergeAnalyseResult(
{
tables,
views,
procedures,
functions,
schemas,
},
(x) => x.objectId
);
}
getDeletedObjectsForField(idArray, objectTypeField) {
return this.structure[objectTypeField]
.filter((x) => !idArray.includes(x.objectId))
.map((x) => ({
oldName: _.pick(x, ['schemaName', 'pureName']),
objectId: x.objectId,
action: 'remove',
objectTypeField,
}));
}
getDeletedObjects(idArray) {
return [
...this.getDeletedObjectsForField(idArray, 'tables'),
...this.getDeletedObjectsForField(idArray, 'views'),
...this.getDeletedObjectsForField(idArray, 'procedures'),
...this.getDeletedObjectsForField(idArray, 'functions'),
...this.getDeletedObjectsForField(idArray, 'triggers'),
];
}
async getModifications() {
const modificationsQueryData = await this.driver.query(this.pool, this.createQuery('modifications'));
// console.log('MOD - SRC', modifications);
// console.log(
// 'MODs',
// this.structure.tables.map((x) => x.modifyDate)
// );
const modifications = modificationsQueryData.rows.map((x) => {
const { type, objectId, modifyDate, schemaName, pureName } = x;
const field = objectTypeToField(type);
if (!this.structure[field]) return null;
// @ts-ignore
const obj = this.structure[field].find((x) => x.objectId == objectId);
// object not modified
if (obj && Math.abs(new Date(modifyDate).getTime() - new Date(obj.modifyDate).getTime()) < 1000) return null;
/** @type {import('dbgate-types').DatabaseModification} */
const action = obj
? {
newName: { schemaName, pureName },
oldName: _.pick(obj, ['schemaName', 'pureName']),
action: 'change',
objectTypeField: field,
objectId,
}
: {
newName: { schemaName, pureName },
action: 'add',
objectTypeField: field,
objectId,
};
return action;
});
return [..._.compact(modifications), ...this.getDeletedObjects(modificationsQueryData.rows.map((x) => x.objectId))];
}
}
module.exports = MsSqlAnalyser;

View File

@@ -1,54 +0,0 @@
const { SqlDumper } = require('dbgate-tools');
class MsSqlDumper extends SqlDumper {
autoIncrement() {
this.put(' ^identity');
}
putStringValue(value) {
if (/[^\u0000-\u00ff]/.test(value)) {
this.putRaw('N');
}
super.putStringValue(value);
}
allowIdentityInsert(table, allow) {
this.putCmd('^set ^identity_insert %f %k;&n', table, allow ? 'on' : 'off');
}
/** @param type {import('dbgate-types').TransformType} */
transform(type, dumpExpr) {
switch (type) {
case 'GROUP:YEAR':
case 'YEAR':
this.put('^datepart(^year, %c)', dumpExpr);
break;
case 'MONTH':
this.put('^datepart(^month, %c)', dumpExpr);
break;
case 'DAY':
this.put('^datepart(^day, %c)', dumpExpr);
break;
case 'GROUP:MONTH':
this.put(
"^convert(^varchar(100), ^datepart(^year, %c)) + '-' + ^convert(^varchar(100), ^datepart(^month, %c))",
dumpExpr,
dumpExpr
);
break;
case 'GROUP:DAY':
this.put(
"^convert(^varchar(100), ^datepart(^year, %c)) + '-' + ^convert(^varchar(100), ^datepart(^month, %c))+'-' + ^convert(^varchar(100), ^datepart(^day, %c))",
dumpExpr,
dumpExpr,
dumpExpr
);
break;
default:
dumpExpr();
break;
}
}
}
module.exports = MsSqlDumper;

View File

@@ -1,48 +0,0 @@
const { createBulkInsertStreamBase } = require('dbgate-tools');
/**
*
* @param {import('dbgate-types').EngineDriver} driver
*/
function createBulkInsertStream(driver, mssql, stream, pool, name, options) {
const writable = createBulkInsertStreamBase(driver, stream, pool, name, options);
const fullName = name.schemaName ? `[${name.schemaName}].[${name.pureName}]` : name.pureName;
writable.send = async () => {
if (!writable.templateColumns) {
const fullNameQuoted = name.schemaName
? `${driver.dialect.quoteIdentifier(name.schemaName)}.${driver.dialect.quoteIdentifier(name.pureName)}`
: driver.dialect.quoteIdentifier(name.pureName);
const respTemplate = await pool.request().query(`SELECT * FROM ${fullNameQuoted} WHERE 1=0`);
writable.templateColumns = respTemplate.recordset.toTable().columns;
}
const rows = writable.buffer;
writable.buffer = [];
const table = new mssql.Table(fullName);
// table.create = options.createIfNotExists;
for (const column of this.columnNames) {
const tcol = writable.templateColumns.find((x) => x.name == column);
// console.log('TCOL', tcol);
// console.log('TYPE', tcol.type, mssql.Int);
// table.columns.add(column, tcol ? tcol.type : mssql.NVarChar(mssql.MAX));
table.columns.add(column, tcol ? tcol.type : mssql.NVarChar(mssql.MAX), {
nullable: tcol.nullable,
length: tcol.length,
precision: tcol.precision,
scale: tcol.scale,
});
}
for (const row of rows) {
table.rows.add(...this.columnNames.map((col) => row[col]));
}
const request = pool.request();
await request.bulk(table);
};
return writable;
}
module.exports = createBulkInsertStream;

View File

@@ -1,210 +0,0 @@
const _ = require('lodash');
const { driverBase } = require('dbgate-tools');
const MsSqlAnalyser = require('./MsSqlAnalyser');
const MsSqlDumper = require('./MsSqlDumper');
const createBulkInsertStream = require('./createBulkInsertStream');
/** @type {import('dbgate-types').SqlDialect} */
const dialect = {
limitSelect: true,
rangeSelect: true,
offsetFetchRangeSyntax: true,
stringEscapeChar: "'",
fallbackDataType: 'nvarchar(max)',
quoteIdentifier(s) {
return `[${s}]`;
},
};
function extractColumns(columns) {
const mapper = {};
const res = _.sortBy(_.values(columns), 'index').map((col) => ({
...col,
columnName: col.name,
notNull: !col.nullable,
}));
const generateName = () => {
let index = 1;
while (res.find((x) => x.columnName == `col${index}`)) index += 1;
return `col${index}`;
};
// const groups = _.groupBy(res, 'columnName');
// for (const colname of _.keys(groups)) {
// if (groups[colname].length == 1) continue;
// mapper[colname] = [];
// for (const col of groups[colname]) {
// col.columnName = generateName();
// mapper[colname].push(colname);
// }
// }
for (const col of res) {
if (!col.columnName) {
const newName = generateName();
mapper[col.columnName] = newName;
col.columnName = newName;
}
}
return [res, mapper];
}
/** @type {import('dbgate-types').EngineDriver} */
const driver = {
...driverBase,
analyserClass: MsSqlAnalyser,
dumperClass: MsSqlDumper,
async connect(nativeModules, { server, port, user, password, database }) {
const pool = new nativeModules.mssql.ConnectionPool({
server,
port,
user,
password,
database,
requestTimeout: 1000 * 3600,
options: {
enableArithAbort: true,
},
});
await pool.connect();
pool._nativeModules = nativeModules;
return pool;
},
// @ts-ignore
async query(pool, sql) {
if (sql == null) {
return {
rows: [],
columns: [],
};
}
const resp = await pool.request().query(sql);
// console.log(Object.keys(resp.recordset));
// console.log(resp);
const res = {};
if (resp.recordset) {
const [columns] = extractColumns(resp.recordset.columns);
res.columns = columns;
res.rows = resp.recordset;
}
if (resp.rowsAffected) {
res.rowsAffected = _.sum(resp.rowsAffected);
}
return res;
},
async stream(pool, sql, options) {
const request = await pool.request();
let currentMapper = null;
const handleInfo = (info) => {
const { message, lineNumber, procName } = info;
options.info({
message,
line: lineNumber,
procedure: procName,
time: new Date(),
severity: 'info',
});
};
const handleDone = (result) => {
// console.log('RESULT', result);
options.done(result);
};
const handleRow = (row) => {
// if (currentMapper) {
// for (const colname of _.keys(currentMapper)) {
// let index = 0;
// for (const newcolname of currentMapper[colname]) {
// row[newcolname] = row[colname][index];
// index += 1;
// }
// delete row[colname];
// }
// }
if (currentMapper) {
row = { ...row };
for (const colname of _.keys(currentMapper)) {
const newcolname = currentMapper[colname];
row[newcolname] = row[colname];
if (_.isArray(row[newcolname])) row[newcolname] = row[newcolname].join(',');
delete row[colname];
}
}
options.row(row);
};
const handleRecordset = (columns) => {
const [extractedColumns, mapper] = extractColumns(columns);
currentMapper = mapper;
options.recordset(extractedColumns);
};
const handleError = (error) => {
const { message, lineNumber, procName } = error;
options.info({
message,
line: lineNumber,
procedure: procName,
time: new Date(),
severity: 'error',
});
};
request.stream = true;
request.on('recordset', handleRecordset);
request.on('row', handleRow);
request.on('error', handleError);
request.on('done', handleDone);
request.on('info', handleInfo);
request.query(sql);
return request;
},
async readQuery(pool, sql, structure) {
const request = await pool.request();
const { stream } = pool._nativeModules;
const pass = new stream.PassThrough({
objectMode: true,
highWaterMark: 100,
});
request.stream = true;
request.on('recordset', (driverColumns) => {
const [columns, mapper] = extractColumns(driverColumns);
pass.write(structure || { columns });
});
request.on('row', (row) => pass.write(row));
request.on('error', (err) => {
console.error(err);
pass.end();
});
request.on('done', () => pass.end());
request.query(sql);
return pass;
},
async writeTable(pool, name, options) {
const { stream, mssql } = pool._nativeModules;
return createBulkInsertStream(this, mssql, stream, pool, name, options);
},
async getVersion(pool) {
const { version } = (await this.query(pool, 'SELECT @@VERSION AS version')).rows[0];
return { version };
},
async listDatabases(pool) {
const { rows } = await this.query(pool, 'SELECT name FROM sys.databases order by name');
return rows;
},
dialect,
engine: 'mssql',
};
module.exports = driver;

View File

@@ -1,20 +0,0 @@
module.exports = `
select c.name as columnName, t.name as dataType, c.object_id as objectId, c.is_identity as isIdentity,
c.max_length as maxLength, c.precision, c.scale, c.is_nullable as isNullable,
col.CHARACTER_MAXIMUM_LENGTH as charMaxLength,
d.definition as defaultValue, d.name as defaultConstraint,
m.definition as computedExpression, m.is_persisted as isPersisted, c.column_id as columnId,
col.NUMERIC_PRECISION as numericPrecision,
col.NUMERIC_SCALE as numericScale,
-- TODO only if version >= 2008
c.is_sparse as isSparse
from sys.columns c
inner join sys.types t on c.system_type_id = t.system_type_id and c.user_type_id = t.user_type_id
inner join sys.objects o on c.object_id = o.object_id
INNER JOIN sys.schemas u ON u.schema_id=o.schema_id
INNER JOIN INFORMATION_SCHEMA.COLUMNS col ON col.TABLE_NAME = o.name AND col.TABLE_SCHEMA = u.name and col.COLUMN_NAME = c.name
left join sys.default_constraints d on c.default_object_id = d.object_id
left join sys.computed_columns m on m.object_id = c.object_id and m.column_id = c.column_id
where o.type = 'U' and o.object_id =[OBJECT_ID_CONDITION]
order by c.column_id
`;

View File

@@ -1,40 +0,0 @@
module.exports = `
SELECT
schemaName = FK.TABLE_SCHEMA,
pureName = FK.TABLE_NAME,
columnName = CU.COLUMN_NAME,
refSchemaName = ISNULL(IXS.name, PK.TABLE_SCHEMA),
refTableName = ISNULL(IXT.name, PK.TABLE_NAME),
refColumnName = IXCC.name,
constraintName = C.CONSTRAINT_NAME,
updateAction = rc.UPDATE_RULE,
deleteAction = rc.DELETE_RULE,
objectId = o.object_id
FROM INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS C
INNER JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS FK ON C.CONSTRAINT_NAME = FK.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS PK ON C.UNIQUE_CONSTRAINT_NAME = PK.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE CU ON C.CONSTRAINT_NAME = CU.CONSTRAINT_NAME
--LEFT JOIN (
--SELECT i1.TABLE_NAME, i2.COLUMN_NAME
--FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS i1
--INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE i2 ON i1.CONSTRAINT_NAME = i2.CONSTRAINT_NAME
--WHERE i1.CONSTRAINT_TYPE = 'PRIMARY KEY'
--) PT ON PT.TABLE_NAME = PK.TABLE_NAME
INNER JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc ON FK.CONSTRAINT_NAME = rc.CONSTRAINT_NAME
LEFT JOIN sys.indexes IX ON IX.name = C.UNIQUE_CONSTRAINT_NAME
LEFT JOIN sys.objects IXT ON IXT.object_id = IX.object_id
LEFT JOIN sys.index_columns IXC ON IX.index_id = IXC.index_id and IX.object_id = IXC.object_id
LEFT JOIN sys.columns IXCC ON IXCC.column_id = IXC.column_id AND IXCC.object_id = IXC.object_id
LEFT JOIN sys.schemas IXS ON IXT.schema_id = IXS.schema_id
inner join sys.objects o on FK.TABLE_NAME = o.name
inner join sys.schemas s on o.schema_id = s.schema_id and FK.TABLE_SCHEMA = s.name
where o.object_id =[OBJECT_ID_CONDITION]
`;

View File

@@ -1 +0,0 @@
module.exports = `select schema_id as objectId, name as schemaName from sys.schemas`;

View File

@@ -1,23 +0,0 @@
const columns = require('./columns');
const foreignKeys = require('./foreignKeys');
const primaryKeys = require('./primaryKeys');
const tables = require('./tables');
const modifications = require('./modifications');
const loadSqlCode = require('./loadSqlCode');
const views = require('./views');
const programmables = require('./programmables');
const viewColumns = require('./viewColumns');
const getSchemas = require('./getSchemas');
module.exports = {
columns,
tables,
foreignKeys,
primaryKeys,
modifications,
loadSqlCode,
views,
programmables,
viewColumns,
getSchemas,
};

View File

@@ -1,8 +0,0 @@
module.exports = `
select s.name as pureName, u.name as schemaName, c.text AS codeText
from sys.objects s
inner join sys.syscomments c on s.object_id = c.id
inner join sys.schemas u on u.schema_id = s.schema_id
where (s.object_id =[OBJECT_ID_CONDITION])
order by u.name, s.name, c.colid
`;

View File

@@ -1,6 +0,0 @@
module.exports = `
select o.object_id as objectId, o.modify_date as modifyDate, o.type, o.name as pureName, s.name as schemaName
from sys.objects o
inner join sys.schemas s on o.schema_id = s.schema_id
where o.type in ('U', 'V', 'P', 'IF', 'FN', 'TF') -- , 'TR' - triggers disabled
`;

View File

@@ -1,14 +0,0 @@
module.exports = `
select o.object_id, pureName = t.Table_Name, schemaName = t.Table_Schema, columnName = c.Column_Name, constraintName=t.constraint_name from
INFORMATION_SCHEMA.TABLE_CONSTRAINTS t,
sys.objects o,
sys.schemas s,
INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE c
where
c.Constraint_Name = t.Constraint_Name
and t.table_name = o.name
and o.schema_id = s.schema_id and t.Table_Schema = s.name
and c.Table_Name = t.Table_Name
and Constraint_Type = 'PRIMARY KEY'
and o.object_id =[OBJECT_ID_CONDITION]
`;

View File

@@ -1,6 +0,0 @@
module.exports = `
select o.name as pureName, s.name as schemaName, o.object_id as objectId, o.create_date as createDate, o.modify_date as modifyDate, o.type as sqlObjectType
from sys.objects o
inner join sys.schemas s on o.schema_id = s.schema_id
where o.type in ('P', 'IF', 'FN', 'TF') and o.object_id =[OBJECT_ID_CONDITION]
`;

View File

@@ -1,8 +0,0 @@
module.exports = `
select
o.name as pureName, s.name as schemaName, o.object_id as objectId,
o.create_date as createDate, o.modify_date as modifyDate
from sys.tables o
inner join sys.schemas s on o.schema_id = s.schema_id
where o.object_id =[OBJECT_ID_CONDITION]
`;

View File

@@ -1,18 +0,0 @@
module.exports = `
select
o.object_id AS objectId,
col.TABLE_SCHEMA as schemaName,
col.TABLE_NAME as pureName,
col.COLUMN_NAME as columnName,
col.IS_NULLABLE as isNullable,
col.DATA_TYPE as dataType,
col.CHARACTER_MAXIMUM_LENGTH as charMaxLength,
col.NUMERIC_PRECISION as precision,
col.NUMERIC_SCALE as scale,
col.COLUMN_DEFAULT
FROM sys.objects o
INNER JOIN sys.schemas u ON u.schema_id=o.schema_id
INNER JOIN INFORMATION_SCHEMA.COLUMNS col ON col.TABLE_NAME = o.name AND col.TABLE_SCHEMA = u.name
WHERE o.type in ('V') and o.object_id =[OBJECT_ID_CONDITION]
order by col.ORDINAL_POSITION
`;

View File

@@ -1,10 +0,0 @@
module.exports = `
SELECT
o.name as pureName,
u.name as schemaName,
o.object_id as objectId,
o.create_date as createDate,
o.modify_date as modifyDate
FROM sys.objects o INNER JOIN sys.schemas u ON u.schema_id=o.schema_id
WHERE type in ('V') and o.object_id =[OBJECT_ID_CONDITION]
`;

View File

@@ -1,214 +0,0 @@
const fp = require('lodash/fp');
const _ = require('lodash');
const sql = require('./sql');
const { DatabaseAnalyser } = require('dbgate-tools');
const { isTypeString, isTypeNumeric } = require('dbgate-tools');
const { rangeStep } = require('lodash/fp');
function getColumnInfo({
isNullable,
extra,
columnName,
dataType,
charMaxLength,
numericPrecision,
numericScale,
defaultValue,
}) {
let fullDataType = dataType;
if (charMaxLength && isTypeString(dataType)) fullDataType = `${dataType}(${charMaxLength})`;
if (numericPrecision && numericScale && isTypeNumeric(dataType))
fullDataType = `${dataType}(${numericPrecision},${numericScale})`;
return {
notNull: !isNullable || isNullable == 'NO' || isNullable == 'no',
autoIncrement: extra && extra.toLowerCase().includes('auto_increment'),
columnName,
dataType: fullDataType,
defaultValue,
};
}
function objectTypeToField(type) {
if (type == 'VIEW') return 'views';
if (type == 'BASE TABLE') return 'tables';
return null;
}
class MySqlAnalyser extends DatabaseAnalyser {
constructor(pool, driver) {
super(pool, driver);
}
createQuery(resFileName, typeFields) {
let res = sql[resFileName];
if (this.singleObjectFilter) {
const { typeField, pureName } = this.singleObjectFilter;
if (!typeFields || !typeFields.includes(typeField)) return null;
res = res.replace('=[OBJECT_NAME_CONDITION]', ` = '${pureName}'`).replace('#DATABASE#', this.pool._database_name);
return res;
}
if (!this.modifications || !typeFields || this.modifications.length == 0) {
res = res.replace('=[OBJECT_NAME_CONDITION]', ' is not null');
} else {
const filterNames = this.modifications
.filter((x) => typeFields.includes(x.objectTypeField) && (x.action == 'add' || x.action == 'change'))
.map((x) => x.newName && x.newName.pureName)
.filter(Boolean);
if (filterNames.length == 0) {
res = res.replace('=[OBJECT_NAME_CONDITION]', ' IS NULL');
} else {
res = res.replace('=[OBJECT_NAME_CONDITION]', ` in (${filterNames.map((x) => `'${x}'`).join(',')})`);
}
}
res = res.replace('#DATABASE#', this.pool._database_name);
return res;
}
getRequestedViewNames(allViewNames) {
if (this.singleObjectFilter) {
const { typeField, pureName } = this.singleObjectFilter;
if (typeField == 'views') return [pureName];
}
if (this.modifications) {
return this.modifications.filter((x) => x.objectTypeField == 'views').map((x) => x.newName.pureName);
}
return allViewNames;
}
async getViewTexts(allViewNames) {
const res = {};
for (const viewName of this.getRequestedViewNames(allViewNames)) {
const resp = await this.driver.query(this.pool, `SHOW CREATE VIEW \`${viewName}\``);
res[viewName] = resp.rows[0]['Create View'];
}
return res;
}
async _runAnalysis() {
const tables = await this.driver.query(this.pool, this.createQuery('tables', ['tables']));
const columns = await this.driver.query(this.pool, this.createQuery('columns', ['tables', 'views']));
const pkColumns = await this.driver.query(this.pool, this.createQuery('primaryKeys', ['tables']));
const fkColumns = await this.driver.query(this.pool, this.createQuery('foreignKeys', ['tables']));
const views = await this.driver.query(this.pool, this.createQuery('views', ['views']));
const programmables = await this.driver.query(
this.pool,
this.createQuery('programmables', ['procedures', 'functions'])
);
const viewTexts = await this.getViewTexts(views.rows.map((x) => x.pureName));
return this.mergeAnalyseResult(
{
tables: tables.rows.map((table) => ({
...table,
columns: columns.rows.filter((col) => col.pureName == table.pureName).map(getColumnInfo),
primaryKey: DatabaseAnalyser.extractPrimaryKeys(table, pkColumns.rows),
foreignKeys: DatabaseAnalyser.extractForeignKeys(table, fkColumns.rows),
})),
views: views.rows.map((view) => ({
...view,
columns: columns.rows.filter((col) => col.pureName == view.pureName).map(getColumnInfo),
createSql: viewTexts[view.pureName],
requiresFormat: true,
})),
procedures: programmables.rows.filter((x) => x.objectType == 'PROCEDURE').map(fp.omit(['objectType'])),
functions: programmables.rows.filter((x) => x.objectType == 'FUNCTION').map(fp.omit(['objectType'])),
},
(x) => x.pureName
);
}
getDeletedObjectsForField(nameArray, objectTypeField) {
return this.structure[objectTypeField]
.filter((x) => !nameArray.includes(x.pureName))
.map((x) => ({
oldName: _.pick(x, ['pureName']),
action: 'remove',
objectTypeField,
}));
}
getDeletedObjects(nameArray) {
return [
...this.getDeletedObjectsForField(nameArray, 'tables'),
...this.getDeletedObjectsForField(nameArray, 'views'),
...this.getDeletedObjectsForField(nameArray, 'procedures'),
...this.getDeletedObjectsForField(nameArray, 'functions'),
...this.getDeletedObjectsForField(nameArray, 'triggers'),
];
}
async getModifications() {
const tableModificationsQueryData = await this.driver.query(this.pool, this.createQuery('tableModifications'));
const procedureModificationsQueryData = await this.driver.query(
this.pool,
this.createQuery('procedureModifications')
);
const functionModificationsQueryData = await this.driver.query(
this.pool,
this.createQuery('functionModifications')
);
const allModifications = _.compact([
...tableModificationsQueryData.rows.map((x) => {
if (x.objectType == 'BASE TABLE') return { ...x, objectTypeField: 'tables' };
if (x.objectType == 'VIEW') return { ...x, objectTypeField: 'views' };
return null;
}),
...procedureModificationsQueryData.rows.map((x) => ({
objectTypeField: 'procedures',
modifyDate: x.Modified,
pureName: x.Name,
})),
...functionModificationsQueryData.rows.map((x) => ({
objectTypeField: 'functions',
modifyDate: x.Modified,
pureName: x.Name,
})),
]);
// console.log('allModifications', allModifications);
// console.log(
// 'DATES',
// this.structure.procedures.map((x) => x.modifyDate)
// );
// console.log('MOD - SRC', modifications);
// console.log(
// 'MODs',
// this.structure.tables.map((x) => x.modifyDate)
// );
const modifications = allModifications.map((x) => {
const { objectType, modifyDate, pureName } = x;
const field = objectTypeToField(objectType);
if (!field || !this.structure[field]) return null;
// @ts-ignore
const obj = this.structure[field].find((x) => x.pureName == pureName);
// object not modified
if (obj && Math.abs(new Date(modifyDate).getTime() - new Date(obj.modifyDate).getTime()) < 1000) return null;
// console.log('MODIFICATION OF ', field, pureName, modifyDate, obj.modifyDate);
/** @type {import('dbgate-types').DatabaseModification} */
const action = obj
? {
newName: { pureName },
oldName: _.pick(obj, ['pureName']),
action: 'change',
objectTypeField: field,
}
: {
newName: { pureName },
action: 'add',
objectTypeField: field,
};
return action;
});
return [..._.compact(modifications), ...this.getDeletedObjects([...allModifications.map((x) => x.pureName)])];
}
}
module.exports = MySqlAnalyser;

View File

@@ -1,30 +0,0 @@
const { SqlDumper } = require('dbgate-tools');
class MySqlDumper extends SqlDumper {
/** @param type {import('dbgate-types').TransformType} */
transform(type, dumpExpr) {
switch (type) {
case 'GROUP:YEAR':
case 'YEAR':
this.put('^year(%c)', dumpExpr);
break;
case 'MONTH':
this.put('^month(%c)', dumpExpr);
break;
case 'DAY':
this.put('^day(%c)', dumpExpr);
break;
case 'GROUP:MONTH':
this.put("^date_format(%c, '%s')", dumpExpr, '%Y-%m');
break;
case 'GROUP:DAY':
this.put("^date_format(%c, '%s')", dumpExpr, '%Y-%m-%d');
break;
default:
dumpExpr();
break;
}
}
}
module.exports = MySqlDumper;

View File

@@ -1,159 +0,0 @@
const { driverBase } = require('dbgate-tools');
const MySqlAnalyser = require('./MySqlAnalyser');
const MySqlDumper = require('./MySqlDumper');
/** @type {import('dbgate-types').SqlDialect} */
const dialect = {
rangeSelect: true,
stringEscapeChar: '\\',
fallbackDataType: 'longtext',
quoteIdentifier(s) {
return '`' + s + '`';
},
};
function extractColumns(fields) {
if (fields)
return fields.map((col) => ({
columnName: col.name,
}));
return null;
}
/** @type {import('dbgate-types').EngineDriver} */
const driver = {
...driverBase,
analyserClass: MySqlAnalyser,
dumperClass: MySqlDumper,
async connect(nativeModules, { server, port, user, password, database }) {
const connection = nativeModules.mysql.createConnection({
host: server,
port,
user,
password,
database,
});
connection._database_name = database;
connection._nativeModules = nativeModules;
return connection;
},
async query(connection, sql) {
if (sql == null) {
return {
rows: [],
columns: [],
};
}
return new Promise((resolve, reject) => {
connection.query(sql, function (error, results, fields) {
if (error) reject(error);
resolve({ rows: results, columns: extractColumns(fields) });
});
});
},
async stream(connection, sql, options) {
const query = connection.query(sql);
// const handleInfo = (info) => {
// const { message, lineNumber, procName } = info;
// options.info({
// message,
// line: lineNumber,
// procedure: procName,
// time: new Date(),
// severity: 'info',
// });
// };
const handleEnd = (result) => {
// console.log('RESULT', result);
options.done(result);
};
const handleRow = (row) => {
options.row(row);
};
const handleFields = (columns) => {
console.log('FIELDS', columns[0].name);
options.recordset(extractColumns(columns));
};
const handleError = (error) => {
console.log('ERROR', error);
const { message, lineNumber, procName } = error;
options.info({
message,
line: lineNumber,
procedure: procName,
time: new Date(),
severity: 'error',
});
};
query.on('error', handleError).on('fields', handleFields).on('result', handleRow).on('end', handleEnd);
return query;
},
async readQuery(connection, sql, structure) {
const query = connection.query(sql);
const { stream } = connection._nativeModules;
const pass = new stream.PassThrough({
objectMode: true,
highWaterMark: 100,
});
query
.on('error', (err) => {
console.error(err);
pass.end();
})
.on('fields', (fields) => pass.write(structure || { columns: extractColumns(fields) }))
.on('result', (row) => pass.write(row))
.on('end', () => pass.end());
return pass;
},
async getVersion(connection) {
const { rows } = await this.query(connection, "show variables like 'version'");
const version = rows[0].Value;
return { version };
},
// async analyseFull(pool) {
// const analyser = new MySqlAnalyser(pool, this);
// return analyser.fullAnalysis();
// },
// async analyseIncremental(pool, structure) {
// const analyser = new MySqlAnalyser(pool, this);
// return analyser.incrementalAnalysis(structure);
// },
// async analyseSingleObject(pool, name, typeField = 'tables') {
// const analyser = new MySqlAnalyser(pool, this);
// analyser.singleObjectFilter = { ...name, typeField };
// const res = await analyser.fullAnalysis();
// return res.tables[0];
// },
// // @ts-ignore
// analyseSingleTable(pool, name) {
// return this.analyseSingleObject(pool, name, 'tables');
// },
async listDatabases(connection) {
const { rows } = await this.query(connection, 'show databases');
return rows.map((x) => ({ name: x.Database }));
},
async writeTable(pool, name, options) {
const { stream } = pool._nativeModules;
// @ts-ignore
return createBulkInsertStreamBase(this, stream, pool, name, options);
},
// createDumper() {
// return new MySqlDumper(this);
// },
dialect,
engine: 'mysql',
};
module.exports = driver;

View File

@@ -1,15 +0,0 @@
module.exports = `
select
TABLE_NAME as pureName,
COLUMN_NAME as columnName,
IS_NULLABLE as isNullable,
DATA_TYPE as dataType,
CHARACTER_MAXIMUM_LENGTH as charMaxLength,
NUMERIC_PRECISION as numericPrecision,
NUMERIC_SCALE as numericScale,
COLUMN_DEFAULT as defaultValue,
EXTRA as extra
from INFORMATION_SCHEMA.COLUMNS
where TABLE_SCHEMA = '#DATABASE#' and TABLE_NAME =[OBJECT_NAME_CONDITION]
order by ORDINAL_POSITION
`;

View File

@@ -1,17 +0,0 @@
module.exports = `
select
REFERENTIAL_CONSTRAINTS.CONSTRAINT_NAME as constraintName,
REFERENTIAL_CONSTRAINTS.TABLE_NAME as pureName,
REFERENTIAL_CONSTRAINTS.UPDATE_RULE as updateAction,
REFERENTIAL_CONSTRAINTS.DELETE_RULE as deleteAction,
REFERENTIAL_CONSTRAINTS.REFERENCED_TABLE_NAME as refTableName,
KEY_COLUMN_USAGE.COLUMN_NAME as columnName,
KEY_COLUMN_USAGE.REFERENCED_COLUMN_NAME as refColumnName
from INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS
inner join INFORMATION_SCHEMA.KEY_COLUMN_USAGE
on REFERENTIAL_CONSTRAINTS.TABLE_NAME = KEY_COLUMN_USAGE.TABLE_NAME
and REFERENTIAL_CONSTRAINTS.CONSTRAINT_NAME = KEY_COLUMN_USAGE.CONSTRAINT_NAME
and REFERENTIAL_CONSTRAINTS.CONSTRAINT_SCHEMA = KEY_COLUMN_USAGE.CONSTRAINT_SCHEMA
where REFERENTIAL_CONSTRAINTS.CONSTRAINT_SCHEMA = '#DATABASE#' and REFERENTIAL_CONSTRAINTS.TABLE_NAME =[OBJECT_NAME_CONDITION]
order by KEY_COLUMN_USAGE.ORDINAL_POSITION
`;

View File

@@ -1,3 +0,0 @@
module.exports = `
SHOW FUNCTION STATUS WHERE Db = '#DATABASE#'
`;

View File

@@ -1,21 +0,0 @@
const columns = require('./columns');
const tables = require('./tables');
const primaryKeys = require('./primaryKeys');
const foreignKeys = require('./foreignKeys');
const tableModifications = require('./tableModifications');
const views = require('./views');
const programmables = require('./programmables');
const procedureModifications = require('./procedureModifications');
const functionModifications = require('./functionModifications');
module.exports = {
columns,
tables,
primaryKeys,
foreignKeys,
tableModifications,
views,
programmables,
procedureModifications,
functionModifications,
};

View File

@@ -1,12 +0,0 @@
module.exports = `select
TABLE_CONSTRAINTS.CONSTRAINT_NAME as constraintName,
TABLE_CONSTRAINTS.TABLE_NAME as pureName,
KEY_COLUMN_USAGE.COLUMN_NAME as columnName
from INFORMATION_SCHEMA.TABLE_CONSTRAINTS
inner join INFORMATION_SCHEMA.KEY_COLUMN_USAGE
on TABLE_CONSTRAINTS.TABLE_NAME = KEY_COLUMN_USAGE.TABLE_NAME
and TABLE_CONSTRAINTS.CONSTRAINT_NAME = KEY_COLUMN_USAGE.CONSTRAINT_NAME
and TABLE_CONSTRAINTS.CONSTRAINT_SCHEMA = KEY_COLUMN_USAGE.CONSTRAINT_SCHEMA
where TABLE_CONSTRAINTS.CONSTRAINT_SCHEMA = '#DATABASE#' and TABLE_CONSTRAINTS.TABLE_NAME =[OBJECT_NAME_CONDITION] AND TABLE_CONSTRAINTS.CONSTRAINT_TYPE = 'PRIMARY KEY'
order by KEY_COLUMN_USAGE.ORDINAL_POSITION
`;

View File

@@ -1,3 +0,0 @@
module.exports = `
SHOW PROCEDURE STATUS WHERE Db = '#DATABASE#'
`;

View File

@@ -1,9 +0,0 @@
module.exports = `
select
ROUTINE_NAME as pureName,
ROUTINE_TYPE as objectType,
COALESCE(LAST_ALTERED, CREATED) as modifyDate,
ROUTINE_DEFINITION as createSql
from information_schema.routines
where ROUTINE_SCHEMA = '#DATABASE#' and ROUTINE_NAME =[OBJECT_NAME_CONDITION]
`;

View File

@@ -1,8 +0,0 @@
module.exports = `
select
TABLE_NAME as pureName,
TABLE_TYPE as objectType,
case when ENGINE='InnoDB' then CREATE_TIME else coalesce(UPDATE_TIME, CREATE_TIME) end as modifyDate
from information_schema.tables
where TABLE_SCHEMA = '#DATABASE#'
`;

View File

@@ -1,7 +0,0 @@
module.exports = `
select
TABLE_NAME as pureName,
case when ENGINE='InnoDB' then CREATE_TIME else coalesce(UPDATE_TIME, CREATE_TIME) end as modifyDate
from information_schema.tables
where TABLE_SCHEMA = '#DATABASE#' and TABLE_TYPE='BASE TABLE' and TABLE_NAME =[OBJECT_NAME_CONDITION];
`;

View File

@@ -1,7 +0,0 @@
module.exports = `
select
TABLE_NAME as pureName,
coalesce(UPDATE_TIME, CREATE_TIME) as modifyDate
from information_schema.tables
where TABLE_SCHEMA = '#DATABASE#' and TABLE_NAME =[OBJECT_NAME_CONDITION] and TABLE_TYPE = 'VIEW';
`;

View File

@@ -1,32 +0,0 @@
{
"name": "dbgate-engines",
"version": "1.0.3",
"main": "index.js",
"typings": "./index.d.ts",
"homepage": "https://dbgate.org/",
"repository": {
"type": "git",
"url": "https://github.com/dbshell/dbgate.git"
},
"funding": "https://www.paypal.com/paypalme/JanProchazkaCz/30eur",
"author": "Jan Prochazka",
"license": "GPL",
"keywords": [
"sql",
"mssql",
"mssql",
"postgresql",
"dbgate"
],
"devDependencies": {
"@types/lodash": "^4.14.149",
"dbgate-types": "^1.0.0",
"nodemon": "^2.0.2",
"typescript": "^3.7.5"
},
"dependencies": {
"dbgate-tools": "^1.0.0",
"lodash": "^4.17.15",
"moment": "^2.24.0"
}
}

View File

@@ -1,173 +0,0 @@
const fp = require('lodash/fp');
const _ = require('lodash');
const sql = require('./sql');
const { DatabaseAnalyser } = require('dbgate-tools');
const { isTypeString, isTypeNumeric } = require('dbgate-tools');
function normalizeTypeName(dataType) {
if (dataType == 'character varying') return 'varchar';
if (dataType == 'timestamp without time zone') return 'timestamp';
return dataType;
}
function getColumnInfo({
isNullable,
isIdentity,
columnName,
dataType,
charMaxLength,
numericPrecision,
numericScale,
defaultValue,
}) {
const normDataType = normalizeTypeName(dataType);
let fullDataType = normDataType;
if (charMaxLength && isTypeString(normDataType)) fullDataType = `${normDataType}(${charMaxLength})`;
if (numericPrecision && numericScale && isTypeNumeric(normDataType))
fullDataType = `${normDataType}(${numericPrecision},${numericScale})`;
return {
columnName,
dataType: fullDataType,
notNull: !isNullable || isNullable == 'NO' || isNullable == 'no',
autoIncrement: !!isIdentity,
defaultValue,
};
}
class PostgreAnalyser extends DatabaseAnalyser {
constructor(pool, driver) {
super(pool, driver);
}
createQuery(resFileName, typeFields) {
let res = sql[resFileName];
if (this.singleObjectFilter) {
const { typeField, schemaName, pureName } = this.singleObjectFilter;
if (!typeFields || !typeFields.includes(typeField)) return null;
res = res.replace(/=OBJECT_ID_CONDITION/g, ` = '${typeField}:${schemaName || 'public'}.${pureName}'`);
return res;
}
if (!this.modifications || !typeFields || this.modifications.length == 0) {
res = res.replace(/=OBJECT_ID_CONDITION/g, ' is not null');
} else {
const filterNames = this.modifications
.filter((x) => typeFields.includes(x.objectTypeField) && (x.action == 'add' || x.action == 'change'))
.filter((x) => x.newName)
.map((x) => `${x.objectTypeField}:${x.newName.schemaName}.${x.newName.pureName}`);
if (filterNames.length == 0) {
res = res.replace(/=OBJECT_ID_CONDITION/g, ' IS NULL');
} else {
res = res.replace(/=OBJECT_ID_CONDITION/g, ` in (${filterNames.map((x) => `'${x}'`).join(',')})`);
}
}
return res;
// let res = sql[resFileName];
// res = res.replace('=[OBJECT_ID_CONDITION]', ' is not null');
// return res;
}
async _runAnalysis() {
const tables = await this.driver.query(this.pool, this.createQuery('tableModifications', ['tables']));
const columns = await this.driver.query(this.pool, this.createQuery('columns', ['tables']));
const pkColumns = await this.driver.query(this.pool, this.createQuery('primaryKeys', ['tables']));
const fkColumns = await this.driver.query(this.pool, this.createQuery('foreignKeys', ['tables']));
const views = await this.driver.query(this.pool, this.createQuery('views', ['views']));
const routines = await this.driver.query(this.pool, this.createQuery('routines', ['procedures', 'functions']));
// console.log('PG fkColumns', fkColumns.rows);
return this.mergeAnalyseResult(
{
tables: tables.rows.map((table) => ({
...table,
columns: columns.rows
.filter((col) => col.pureName == table.pureName && col.schemaName == table.schemaName)
.map(getColumnInfo),
primaryKey: DatabaseAnalyser.extractPrimaryKeys(table, pkColumns.rows),
foreignKeys: DatabaseAnalyser.extractForeignKeys(table, fkColumns.rows),
})),
views: views.rows.map((view) => ({
...view,
columns: columns.rows
.filter((col) => col.pureName == view.pureName && col.schemaName == view.schemaName)
.map(getColumnInfo),
})),
procedures: routines.rows.filter((x) => x.objectType == 'PROCEDURE'),
functions: routines.rows.filter((x) => x.objectType == 'FUNCTION'),
},
(x) => `${x.objectTypeField}:${x.schemaName}.${x.pureName}`
);
}
async getModifications() {
const tableModificationsQueryData = await this.driver.query(this.pool, this.createQuery('tableModifications'));
const viewModificationsQueryData = await this.driver.query(this.pool, this.createQuery('viewModifications'));
const routineModificationsQueryData = await this.driver.query(this.pool, this.createQuery('routineModifications'));
const allModifications = _.compact([
...tableModificationsQueryData.rows.map((x) => ({ ...x, objectTypeField: 'tables' })),
...viewModificationsQueryData.rows.map((x) => ({ ...x, objectTypeField: 'views' })),
...routineModificationsQueryData.rows
.filter((x) => x.objectType == 'PROCEDURE')
.map((x) => ({ ...x, objectTypeField: 'procedures' })),
...routineModificationsQueryData.rows
.filter((x) => x.objectType == 'FUNCTION')
.map((x) => ({ ...x, objectTypeField: 'functions' })),
]);
const modifications = allModifications.map((x) => {
const { objectTypeField, hashCode, pureName, schemaName } = x;
if (!objectTypeField || !this.structure[objectTypeField]) return null;
const obj = this.structure[objectTypeField].find((x) => x.pureName == pureName && x.schemaName == schemaName);
// object not modified
if (obj && obj.hashCode == hashCode) return null;
// console.log('MODIFICATION OF ', objectTypeField, schemaName, pureName);
/** @type {import('dbgate-types').DatabaseModification} */
const action = obj
? {
newName: { schemaName, pureName },
oldName: _.pick(obj, ['schemaName', 'pureName']),
action: 'change',
objectTypeField,
}
: {
newName: { schemaName, pureName },
action: 'add',
objectTypeField,
};
return action;
});
return [
..._.compact(modifications),
...this.getDeletedObjects([...allModifications.map((x) => `${x.schemaName}.${x.pureName}`)]),
];
}
getDeletedObjectsForField(nameArray, objectTypeField) {
return this.structure[objectTypeField]
.filter((x) => !nameArray.includes(`${x.schemaName}.${x.pureName}`))
.map((x) => ({
oldName: _.pick(x, ['schemaName', 'pureName']),
action: 'remove',
objectTypeField,
}));
}
getDeletedObjects(nameArray) {
return [
...this.getDeletedObjectsForField(nameArray, 'tables'),
...this.getDeletedObjectsForField(nameArray, 'views'),
...this.getDeletedObjectsForField(nameArray, 'procedures'),
...this.getDeletedObjectsForField(nameArray, 'functions'),
...this.getDeletedObjectsForField(nameArray, 'triggers'),
];
}
}
module.exports = PostgreAnalyser;

View File

@@ -1,30 +0,0 @@
const { SqlDumper } = require('dbgate-tools');
class PostgreDumper extends SqlDumper {
/** @param type {import('dbgate-types').TransformType} */
transform(type, dumpExpr) {
switch (type) {
case 'GROUP:YEAR':
case 'YEAR':
this.put('^extract(^year ^from %c)', dumpExpr);
break;
case 'MONTH':
this.put('^extract(^month ^from %c)', dumpExpr);
break;
case 'DAY':
this.put('^extract(^day ^from %c)', dumpExpr);
break;
case 'GROUP:MONTH':
this.put("^to_char(%c, '%s')", dumpExpr, 'YYYY-MM');
break;
case 'GROUP:DAY':
this.put("^to_char(%c, '%s')", dumpExpr, 'YYYY-MM-DD');
break;
default:
dumpExpr();
break;
}
}
}
module.exports = PostgreDumper;

View File

@@ -1,190 +0,0 @@
const _ = require('lodash');
const { driverBase } = require('dbgate-tools');
const PostgreAnalyser = require('./PostgreAnalyser');
const PostgreDumper = require('./PostgreDumper');
/** @type {import('dbgate-types').SqlDialect} */
const dialect = {
rangeSelect: true,
// stringEscapeChar: '\\',
stringEscapeChar: "'",
fallbackDataType: 'varchar',
quoteIdentifier(s) {
return '"' + s + '"';
},
};
/** @type {import('dbgate-types').EngineDriver} */
const driver = {
...driverBase,
analyserClass: PostgreAnalyser,
dumperClass: PostgreDumper,
async connect(nativeModules, { server, port, user, password, database }) {
const client = new nativeModules.pg.Client({
host: server,
port,
user,
password,
database: database || 'postgres',
});
await client.connect();
client._nativeModules = nativeModules;
return client;
},
async query(client, sql) {
if (sql == null) {
return {
rows: [],
columns: [],
};
}
const res = await client.query(sql);
return { rows: res.rows, columns: res.fields };
},
async stream(client, sql, options) {
const query = new client._nativeModules.pgQueryStream(sql);
const stream = client.query(query);
// const handleInfo = (info) => {
// const { message, lineNumber, procName } = info;
// options.info({
// message,
// line: lineNumber,
// procedure: procName,
// time: new Date(),
// severity: 'info',
// });
// };
let wasHeader = false;
const handleEnd = (result) => {
// console.log('RESULT', result);
options.done(result);
};
const handleReadable = () => {
let row = stream.read();
if (!wasHeader && row) {
options.recordset(_.keys(row).map((columnName) => ({ columnName })));
wasHeader = true;
}
while (row) {
options.row(row);
row = stream.read();
}
};
// const handleFields = (columns) => {
// // console.log('FIELDS', columns[0].name);
// options.recordset(columns);
// // options.recordset(extractColumns(columns));
// };
const handleError = (error) => {
console.log('ERROR', error);
const { message, lineNumber, procName } = error;
options.info({
message,
line: lineNumber,
procedure: procName,
time: new Date(),
severity: 'error',
});
};
stream.on('error', handleError);
stream.on('readable', handleReadable);
// stream.on('result', handleRow)
// stream.on('data', handleRow)
stream.on('end', handleEnd);
return stream;
},
// async analyseSingleObject(pool, name, typeField = 'tables') {
// const analyser = new PostgreAnalyser(pool, this);
// analyser.singleObjectFilter = { ...name, typeField };
// const res = await analyser.fullAnalysis();
// return res.tables[0];
// },
// // @ts-ignore
// analyseSingleTable(pool, name) {
// return this.analyseSingleObject(pool, name, 'tables');
// },
async getVersion(client) {
const { rows } = await this.query(client, 'SELECT version()');
const { version } = rows[0];
return { version };
},
// async analyseFull(pool) {
// const analyser = new PostgreAnalyser(pool, this);
// return analyser.fullAnalysis();
// },
// async analyseIncremental(pool, structure) {
// const analyser = new PostgreAnalyser(pool, this);
// return analyser.incrementalAnalysis(structure);
// },
async readQuery(client, sql, structure) {
const query = new client._nativeModules.pgQueryStream(sql);
const { stream } = client._nativeModules;
const queryStream = client.query(query);
let wasHeader = false;
const pass = new stream.PassThrough({
objectMode: true,
highWaterMark: 100,
});
const handleEnd = (result) => {
pass.end();
};
const handleReadable = () => {
let row = queryStream.read();
if (!wasHeader && row) {
pass.write(
structure || {
columns: _.keys(row).map((columnName) => ({ columnName })),
}
);
wasHeader = true;
}
while (row) {
pass.write(row);
row = queryStream.read();
}
};
const handleError = (error) => {
console.error(error);
pass.end();
};
queryStream.on('error', handleError);
queryStream.on('readable', handleReadable);
queryStream.on('end', handleEnd);
return pass;
},
// createDumper() {
// return new PostgreDumper(this);
// },
async writeTable(pool, name, options) {
const { stream } = pool._nativeModules;
// @ts-ignore
return createBulkInsertStreamBase(this, stream, pool, name, options);
},
async listDatabases(client) {
const { rows } = await this.query(client, 'SELECT datname AS name FROM pg_database WHERE datistemplate = false');
return rows;
},
dialect,
engine: 'postgres',
};
module.exports = driver;

View File

@@ -1,19 +0,0 @@
module.exports = `
select
table_schema as "schemaName",
table_name as "pureName",
column_name as "columnName",
is_nullable as "isNullable",
data_type as "dataType",
character_maximum_length as "charMaxLength",
numeric_precision as "numericPrecision",
numeric_scale as "numericScale",
column_default as "defaultValue"
from information_schema.columns
where
table_schema <> 'information_schema'
and table_schema <> 'pg_catalog'
and table_schema !~ '^pg_toast'
and 'tables:' || table_schema || '.' || table_name =OBJECT_ID_CONDITION
order by ordinal_position
`;

View File

@@ -1,24 +0,0 @@
module.exports = `
select
fk.constraint_name as "constraintName",
fk.constraint_schema as "constraintSchema",
base.table_name as "pureName",
base.table_schema as "schemaName",
fk.update_rule as "updateAction",
fk.delete_rule as "deleteAction",
ref.table_name as "refTableName",
ref.table_schema as "refSchemaName",
basecol.column_name as "columnName",
refcol.column_name as "refColumnName"
from information_schema.referential_constraints fk
inner join information_schema.table_constraints base on fk.constraint_name = base.constraint_name and fk.constraint_schema = base.constraint_schema
inner join information_schema.table_constraints ref on fk.unique_constraint_name = ref.constraint_name and fk.unique_constraint_schema = ref.constraint_schema
inner join information_schema.key_column_usage basecol on base.table_name = basecol.table_name and base.constraint_name = basecol.constraint_name
inner join information_schema.key_column_usage refcol on ref.table_name = refcol.table_name and ref.constraint_name = refcol.constraint_name and basecol.ordinal_position = refcol.ordinal_position
where
base.table_schema <> 'information_schema'
and base.table_schema <> 'pg_catalog'
and base.table_schema !~ '^pg_toast'
and 'tables:' || base.table_schema || '.' || base.table_name =OBJECT_ID_CONDITION
order by basecol.ordinal_position
`;

View File

@@ -1,19 +0,0 @@
const columns = require('./columns');
const tableModifications = require('./tableModifications');
const viewModifications = require('./viewModifications');
const primaryKeys = require('./primaryKeys');
const foreignKeys = require('./foreignKeys');
const views = require('./views');
const routines = require('./routines');
const routineModifications = require('./routineModifications');
module.exports = {
columns,
tableModifications,
viewModifications,
primaryKeys,
foreignKeys,
views,
routines,
routineModifications,
};

View File

@@ -1,17 +0,0 @@
module.exports = `
select
table_constraints.constraint_schema as "constraintSchema",
table_constraints.constraint_name as "constraintName",
table_constraints.table_schema as "schemaName",
table_constraints.table_name as "pureName",
key_column_usage.column_name as "columnName"
from information_schema.table_constraints
inner join information_schema.key_column_usage on table_constraints.table_name = key_column_usage.table_name and table_constraints.constraint_name = key_column_usage.constraint_name
where
table_constraints.table_schema <> 'information_schema'
and table_constraints.table_schema <> 'pg_catalog'
and table_constraints.table_schema !~ '^pg_toast'
and table_constraints.constraint_type = 'PRIMARY KEY'
and 'tables:' || table_constraints.table_schema || '.' || table_constraints.table_name =OBJECT_ID_CONDITION
order by key_column_usage.ordinal_position
`;

View File

@@ -1,10 +0,0 @@
module.exports = `
select
routine_name as "pureName",
routine_schema as "schemaName",
md5(routine_definition) as "hashCode",
routine_type as "objectType"
from
information_schema.routines where routine_schema != 'information_schema' and routine_schema != 'pg_catalog'
and routine_type in ('PROCEDURE', 'FUNCTION')
`;

View File

@@ -1,15 +0,0 @@
module.exports = `
select
routine_name as "pureName",
routine_schema as "schemaName",
routine_definition as "createSql",
md5(routine_definition) as "hashCode",
routine_type as "objectType"
from
information_schema.routines where routine_schema != 'information_schema' and routine_schema != 'pg_catalog'
and (
(routine_type = 'PROCEDURE' and ('procedures:' || routine_schema || '.' || routine_schema) =OBJECT_ID_CONDITION)
or
(routine_type = 'FUNCTION' and ('functions:' || routine_schema || '.' || routine_schema) =OBJECT_ID_CONDITION)
)
`;

View File

@@ -1,52 +0,0 @@
module.exports = `
with pkey as
(
select cc.conrelid, format(E'create constraint %I primary key(%s);\\n', cc.conname,
string_agg(a.attname, ', '
order by array_position(cc.conkey, a.attnum))) pkey
from pg_catalog.pg_constraint cc
join pg_catalog.pg_class c on c.oid = cc.conrelid
join pg_catalog.pg_attribute a on a.attrelid = cc.conrelid
and a.attnum = any(cc.conkey)
where cc.contype = 'p'
group by cc.conrelid, cc.conname
)
SELECT oid as "objectId", nspname as "schemaName", relname as "pureName",
md5('CREATE TABLE ' || nspname || '.' || relname || E'\\n(\\n' ||
array_to_string(
array_agg(
' ' || column_name || ' ' || type || ' '|| not_null
)
, E',\\n'
) || E'\\n);\\n' || coalesce((select pkey from pkey where pkey.conrelid = oid),'NO_PK')) as "hashCode"
from
(
SELECT
c.relname, a.attname AS column_name, c.oid,
n.nspname,
pg_catalog.format_type(a.atttypid, a.atttypmod) as type,
case
when a.attnotnull
then 'NOT NULL'
else 'NULL'
END as not_null
FROM pg_class c,
pg_namespace n,
pg_attribute a,
pg_type t
WHERE c.relkind = 'r'
AND a.attnum > 0
AND a.attrelid = c.oid
AND a.atttypid = t.oid
AND n.oid = c.relnamespace
AND n.nspname <> 'pg_catalog'
AND n.nspname <> 'information_schema'
AND n.nspname !~ '^pg_toast'
ORDER BY a.attnum
) as tabledefinition
where ('tables:' || nspname || '.' || relname) =OBJECT_ID_CONDITION
group by relname, nspname, oid
`;

View File

@@ -1,8 +0,0 @@
module.exports = `
select
table_name as "pureName",
table_schema as "schemaName",
md5(view_definition) as "hashCode"
from
information_schema.views where table_schema != 'information_schema' and table_schema != 'pg_catalog'
`;

View File

@@ -1,11 +0,0 @@
module.exports = `
select
table_name as "pureName",
table_schema as "schemaName",
view_definition as "createSql",
md5(view_definition) as "hashCode"
from
information_schema.views
where table_schema != 'information_schema' and table_schema != 'pg_catalog'
and ('views:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION
`;

View File

@@ -1,19 +0,0 @@
{
"compilerOptions": {
"module": "commonjs",
"allowJs": true,
"checkJs": true,
"noEmit": true,
"allowSyntheticDefaultImports": true,
"esModuleInterop": true,
"moduleResolution": "node",
"lib": [
"dom",
"dom.iterable",
"esnext"
],
},
"include": [
"."
]
}

View File

@@ -12,7 +12,6 @@
"axios": "^0.19.0",
"cross-env": "^6.0.3",
"dbgate-datalib": "^1.0.0",
"dbgate-engines": "^1.0.0",
"dbgate-sqltree": "^1.0.0",
"dbgate-tools": "^1.0.0",
"eslint": "^6.8.0",