postgre sql analyser - works also for redshift

This commit is contained in:
Jan Prochazka
2021-05-16 08:56:56 +02:00
parent 640b53e45f
commit acc49273c1
12 changed files with 152 additions and 94 deletions

View File

@@ -12,26 +12,24 @@ function normalizeTypeName(dataType) {
}
function getColumnInfo({
isNullable,
isIdentity,
columnName,
dataType,
charMaxLength,
numericPrecision,
numericScale,
defaultValue,
is_nullable,
column_name,
data_type,
char_max_length,
numeric_precision,
numeric_ccale,
default_value,
}) {
const normDataType = normalizeTypeName(dataType);
const normDataType = normalizeTypeName(data_type);
let fullDataType = normDataType;
if (charMaxLength && isTypeString(normDataType)) fullDataType = `${normDataType}(${charMaxLength})`;
if (numericPrecision && numericScale && isTypeNumeric(normDataType))
fullDataType = `${normDataType}(${numericPrecision},${numericScale})`;
if (char_max_length && isTypeString(normDataType)) fullDataType = `${normDataType}(${char_max_length})`;
if (numeric_precision && numeric_ccale && isTypeNumeric(normDataType))
fullDataType = `${normDataType}(${numeric_precision},${numeric_ccale})`;
return {
columnName,
columnName: column_name,
dataType: fullDataType,
notNull: !isNullable || isNullable == 'NO' || isNullable == 'no',
autoIncrement: !!isIdentity,
defaultValue,
notNull: !is_nullable || is_nullable == 'NO' || is_nullable == 'no',
defaultValue: default_value,
};
}
@@ -50,7 +48,10 @@ class Analyser extends DatabaseAnalyser {
}
async _runAnalysis() {
const tables = await this.driver.query(this.pool, this.createQuery('tableModifications', ['tables']));
const tables = await this.driver.query(
this.pool,
this.createQuery(this.driver.dialect.stringAgg ? 'tableModifications' : 'tableList', ['tables'])
);
const columns = await this.driver.query(this.pool, this.createQuery('columns', ['tables']));
const pkColumns = await this.driver.query(this.pool, this.createQuery('primaryKeys', ['tables']));
const fkColumns = await this.driver.query(this.pool, this.createQuery('foreignKeys', ['tables']));
@@ -59,70 +60,110 @@ class Analyser extends DatabaseAnalyser {
// console.log('PG fkColumns', fkColumns.rows);
return {
tables: tables.rows.map(table => ({
...table,
objectId: `tables:${table.schemaName}.${table.pureName}`,
contentHash: `${table.hashCodeColumns}-${table.hashCodeConstraints}`,
columns: columns.rows
.filter(col => col.pureName == table.pureName && col.schemaName == table.schemaName)
.map(getColumnInfo),
primaryKey: DatabaseAnalyser.extractPrimaryKeys(table, pkColumns.rows),
foreignKeys: DatabaseAnalyser.extractForeignKeys(table, fkColumns.rows),
})),
tables: tables.rows.map(table => {
const newTable = {
pureName: table.pure_name,
schemaName: table.schema_name,
objectId: `tables:${table.schema_name}.${table.pure_name}`,
contentHash: table.hash_code_columns ? `${table.hash_code_columns}-${table.hash_code_constraints}` : null,
};
return {
...newTable,
columns: columns.rows
.filter(col => col.pure_name == table.pure_name && col.schema_name == table.schema_name)
.map(getColumnInfo),
primaryKey: DatabaseAnalyser.extractPrimaryKeys(
newTable,
pkColumns.rows.map(x => ({
pureName: x.pure_name,
schemaName: x.schema_name,
constraintSchema: x.constraint_schema,
constraintName: x.constraint_name,
columnName: x.column_name,
}))
),
foreignKeys: DatabaseAnalyser.extractForeignKeys(
newTable,
fkColumns.rows.map(x => ({
pureName: x.pure_name,
schemaName: x.schema_name,
constraintSchema: x.constraint_schema,
constraintName: x.constraint_name,
columnName: x.column_name,
refColumnName: x.ref_column_name,
updateAction: x.update_action,
deleteAction: x.delete_action,
refTableName: x.ref_table_name,
refSchemaName: x.ref_schema_name,
}))
),
};
}),
views: views.rows.map(view => ({
...view,
objectId: `views:${view.schemaName}.${view.pureName}`,
contentHash: view.hashCode,
objectId: `views:${view.schema_name}.${view.pure_name}`,
pureName: view.pure_name,
schemaName: view.schema_name,
contentHash: view.hash_code,
columns: columns.rows
.filter(col => col.pureName == view.pureName && col.schemaName == view.schemaName)
.filter(col => col.pure_name == view.pure_name && col.schema_name == view.schema_name)
.map(getColumnInfo),
})),
procedures: routines.rows
.filter(x => x.objectType == 'PROCEDURE')
.map(proc => ({
objectId: `procedures:${proc.schemaName}.${proc.pureName}`,
contentHash: proc.hashCode,
...proc,
objectId: `procedures:${proc.schema_name}.${proc.pure_name}`,
pureName: proc.pure_name,
schemaName: proc.schema_name,
contentHash: proc.hash_code,
})),
functions: routines.rows
.filter(x => x.objectType == 'FUNCTION')
.map(func => ({
objectId: `functions:${func.schemaName}.${func.pureName}`,
contentHash: func.hashCode,
...func,
objectId: `functions:${func.schema_name}.${func.pure_name}`,
pureName: func.pure_name,
schemaName: func.schema_name,
contentHash: func.hash_code,
})),
};
}
async _getFastSnapshot() {
const tableModificationsQueryData = await this.driver.query(this.pool, this.createQuery('tableModifications'));
const tableModificationsQueryData = this.driver.dialect.stringAgg
? await this.driver.query(this.pool, this.createQuery('tableModifications'))
: null;
const viewModificationsQueryData = await this.driver.query(this.pool, this.createQuery('viewModifications'));
const routineModificationsQueryData = await this.driver.query(this.pool, this.createQuery('routineModifications'));
return {
tables: tableModificationsQueryData.rows.map(x => ({
...x,
objectId: `tables:${x.schemaName}.${x.pureName}`,
contentHash: `${x.hashCodeColumns}-${x.hashCodeConstraints}`,
})),
tables: tableModificationsQueryData
? tableModificationsQueryData.rows.map(x => ({
objectId: `tables:${x.schema_name}.${x.pure_name}`,
pureName: x.pure_name,
schemaName: x.schema_name,
contentHash: `${x.hash_code_columns}-${x.hash_code_constraints}`,
}))
: null,
views: viewModificationsQueryData.rows.map(x => ({
...x,
objectId: `views:${x.schemaName}.${x.pureName}`,
contentHash: x.hashCode,
objectId: `views:${x.schema_name}.${x.pure_name}`,
pureName: x.pure_name,
schemaName: x.schema_name,
contentHash: x.hash_code,
})),
procedures: routineModificationsQueryData.rows
.filter(x => x.objectType == 'PROCEDURE')
.map(x => ({
...x,
objectId: `procedures:${x.schemaName}.${x.pureName}`,
contentHash: x.hashCode,
objectId: `procedures:${x.schema_name}.${x.pure_name}`,
pureName: x.pure_name,
schemaName: x.schema_name,
contentHash: x.hash_code,
})),
functions: routineModificationsQueryData.rows
.filter(x => x.objectType == 'FUNCTION')
.map(x => ({
...x,
objectId: `functions:${x.schemaName}.${x.pureName}`,
contentHash: x.hashCode,
objectId: `functions:${x.schema_name}.${x.pure_name}`,
pureName: x.pure_name,
schemaName: x.schema_name,
contentHash: x.hash_code,
})),
};
}

View File

@@ -1,19 +1,19 @@
module.exports = `
select
table_schema as "schemaName",
table_name as "pureName",
column_name as "columnName",
is_nullable as "isNullable",
data_type as "dataType",
character_maximum_length as "charMaxLength",
numeric_precision as "numericPrecision",
numeric_scale as "numericScale",
column_default as "defaultValue"
table_schema as "schema_name",
table_name as "pure_name",
column_name as "column_name",
is_nullable as "is_nullable",
data_type as "data_type",
character_maximum_length as "char_max_length",
numeric_precision as "numeric_precision",
numeric_scale as "numeric_scale",
column_default as "default_value"
from information_schema.columns
where
table_schema <> 'information_schema'
and table_schema <> 'pg_catalog'
and table_schema !~ '^pg_toast'
and 'tables:' || table_schema || '.' || table_name =OBJECT_ID_CONDITION
and ('tables:' || table_schema || '.' || table_name) =OBJECT_ID_CONDITION
order by ordinal_position
`;

View File

@@ -1,15 +1,15 @@
module.exports = `
select
fk.constraint_name as "constraintName",
fk.constraint_schema as "constraintSchema",
base.table_name as "pureName",
base.table_schema as "schemaName",
fk.update_rule as "updateAction",
fk.delete_rule as "deleteAction",
ref.table_name as "refTableName",
ref.table_schema as "refSchemaName",
basecol.column_name as "columnName",
refcol.column_name as "refColumnName"
fk.constraint_name as "constraint_name",
fk.constraint_schema as "constraint_schema",
base.table_name as "pure_name",
base.table_schema as "schema_name",
fk.update_rule as "update_action",
fk.delete_rule as "delete_action",
ref.table_name as "ref_table_name",
ref.table_schema as "ref_schema_name",
basecol.column_name as "column_name",
refcol.column_name as "ref_column_name"
from information_schema.referential_constraints fk
inner join information_schema.table_constraints base on fk.constraint_name = base.constraint_name and fk.constraint_schema = base.constraint_schema
inner join information_schema.table_constraints ref on fk.unique_constraint_name = ref.constraint_name and fk.unique_constraint_schema = ref.constraint_schema
@@ -19,6 +19,6 @@ where
base.table_schema <> 'information_schema'
and base.table_schema <> 'pg_catalog'
and base.table_schema !~ '^pg_toast'
and 'tables:' || base.table_schema || '.' || base.table_name =OBJECT_ID_CONDITION
and ('tables:' || base.table_schema || '.' || base.table_name) =OBJECT_ID_CONDITION
order by basecol.ordinal_position
`;

View File

@@ -1,5 +1,6 @@
const columns = require('./columns');
const tableModifications = require('./tableModifications');
const tableList = require('./tableList');
const viewModifications = require('./viewModifications');
const primaryKeys = require('./primaryKeys');
const foreignKeys = require('./foreignKeys');
@@ -10,6 +11,7 @@ const routineModifications = require('./routineModifications');
module.exports = {
columns,
tableModifications,
tableList,
viewModifications,
primaryKeys,
foreignKeys,

View File

@@ -1,10 +1,10 @@
module.exports = `
select
table_constraints.constraint_schema as "constraintSchema",
table_constraints.constraint_name as "constraintName",
table_constraints.table_schema as "schemaName",
table_constraints.table_name as "pureName",
key_column_usage.column_name as "columnName"
table_constraints.constraint_schema as "constraint_schema",
table_constraints.constraint_name as "constraint_name",
table_constraints.table_schema as "schema_name",
table_constraints.table_name as "pure_name",
key_column_usage.column_name as "column_name"
from information_schema.table_constraints
inner join information_schema.key_column_usage on table_constraints.table_name = key_column_usage.table_name and table_constraints.constraint_name = key_column_usage.constraint_name
where
@@ -12,6 +12,6 @@ where
and table_constraints.table_schema <> 'pg_catalog'
and table_constraints.table_schema !~ '^pg_toast'
and table_constraints.constraint_type = 'PRIMARY KEY'
and 'tables:' || table_constraints.table_schema || '.' || table_constraints.table_name =OBJECT_ID_CONDITION
and ('tables:' || table_constraints.table_schema || '.' || table_constraints.table_name) =OBJECT_ID_CONDITION
order by key_column_usage.ordinal_position
`;

View File

@@ -1,9 +1,9 @@
module.exports = `
select
routine_name as "pureName",
routine_schema as "schemaName",
md5(routine_definition) as "hashCode",
routine_type as "objectType"
routine_name as "pure_name",
routine_schema as "schema_name",
md5(routine_definition) as "hash_code",
routine_type as "object_type"
from
information_schema.routines where routine_schema != 'information_schema' and routine_schema != 'pg_catalog'
and routine_type in ('PROCEDURE', 'FUNCTION')

View File

@@ -0,0 +1,10 @@
module.exports = `
select infoTables.table_schema as "schema_name", infoTables.table_name as "pure_name"
from information_schema.tables infoTables
where infoTables.table_type not like '%VIEW%'
and ('tables:' || infoTables.table_schema || '.' || infoTables.table_name) =OBJECT_ID_CONDITION
and infoTables.table_schema <> 'pg_catalog'
and infoTables.table_schema <> 'information_schema'
and infoTables.table_schema <> 'pg_internal'
and infoTables.table_schema !~ '^pg_toast'
`;

View File

@@ -1,11 +1,11 @@
module.exports = `
select infoTables.table_schema as "schemaName", infoTables.table_name as "pureName",
select infoTables.table_schema as "schema_name", infoTables.table_name as "pure_name",
(
select md5(string_agg(
infoColumns.column_name || '|' || infoColumns.data_type || '|' || infoColumns.is_nullable || '|' || coalesce(infoColumns.character_maximum_length, -1)
|| '|' || coalesce(infoColumns.numeric_precision, -1),
',' order by infoColumns.ordinal_position
)) as "hashCodeColumns"
)) as "hash_code_columns"
from information_schema.columns infoColumns
where infoColumns.table_schema = infoTables.table_schema and infoColumns.table_name = infoTables.table_name
),
@@ -13,7 +13,7 @@ select infoTables.table_schema as "schemaName", infoTables.table_name as "pureNa
select md5(string_agg(
infoConstraints.constraint_name || '|' || infoConstraints.constraint_type ,
',' order by infoConstraints.constraint_name
)) as "hashCodeConstraints"
)) as "hash_code_constraints"
from information_schema.table_constraints infoConstraints
where infoConstraints.table_schema = infoTables.table_schema and infoConstraints.table_name = infoTables.table_name
)

View File

@@ -1,8 +1,8 @@
module.exports = `
select
table_name as "pureName",
table_schema as "schemaName",
md5(view_definition) as "hashCode"
table_name as "pure_name",
table_schema as "schema_name",
md5(view_definition) as "hash_code"
from
information_schema.views where table_schema != 'information_schema' and table_schema != 'pg_catalog'
`;

View File

@@ -1,9 +1,9 @@
module.exports = `
select
table_name as "pureName",
table_schema as "schemaName",
view_definition as "createSql",
md5(view_definition) as "hashCode"
table_name as "pure_name",
table_schema as "schema_name",
view_definition as "create_sql",
md5(view_definition) as "hash_code"
from
information_schema.views
where table_schema != 'information_schema' and table_schema != 'pg_catalog'

View File

@@ -12,6 +12,7 @@ const dialect = {
quoteIdentifier(s) {
return '"' + s + '"';
},
stringAgg: true,
};
/** @type {import('dbgate-types').EngineDriver} */
@@ -38,7 +39,10 @@ const cockroachDriver = {
const redshiftDriver = {
...driverBase,
dumperClass: Dumper,
dialect,
dialect: {
...dialect,
stringAgg: false,
},
engine: 'red@dbgate-plugin-postgres',
title: 'Amazon Redshift',
defaultPort: 5439,