Merge branch 'master' into feature/firebird

This commit is contained in:
Pavel
2025-06-10 14:57:26 +02:00
145 changed files with 5117 additions and 1626 deletions

View File

@@ -37,7 +37,7 @@
"dependencies": {
"dbgate-tools": "^6.0.0-alpha.1",
"lodash": "^4.17.21",
"dbgate-query-splitter": "^4.11.3"
"dbgate-query-splitter": "^4.11.5"
},
"optionalDependencies": {
"@duckdb/node-api": "^1.2.1-alpha.16"

View File

@@ -9,7 +9,7 @@ const sql = require('./sql');
const { mapSchemaRowToSchemaInfo } = require('./Analyser.helpers');
const { zipObject } = require('lodash');
const logger = getLogger('sqliteDriver');
const logger = getLogger('duckdbDriver');
/**
* @type {import('@duckdb/node-api')}

View File

@@ -31,7 +31,14 @@ function _normalizeValue(value) {
}
if (typeof value === 'bigint') {
return parseInt(value);
const parsed = parseInt(value);
if (Number.isSafeInteger(parsed)) {
return parsed;
} else {
return {
$bigint: value.toString(),
};
}
}
if (value instanceof DuckDBTimestampValue) {
@@ -44,17 +51,15 @@ function _normalizeValue(value) {
}
if (value instanceof DuckDBDateValue) {
const year = value.year;
const month = String(value.month).padStart(2, '0');
const day = String(value.day).padStart(2, '0');
return `${year}-${month}-${day}`;
return value.toString();
}
if (value instanceof DuckDBTimeValue) {
const hour = String(value.hour).padStart(2, '0');
const minute = String(value.min).padStart(2, '0');
const second = String(value.sec).padStart(2, '0');
const micros = String(value.micros).padStart(6, '0').substring(0, 3);
const parts = value.toParts();
const hour = String(parts.hour).padStart(2, '0');
const minute = String(parts.min).padStart(2, '0');
const second = String(parts.sec).padStart(2, '0');
const micros = String(parts.micros).padStart(6, '0').substring(0, 3);
return `${hour}:${minute}:${second}.${micros}`;
}

View File

@@ -37,7 +37,7 @@
},
"dependencies": {
"bson": "^6.8.0",
"dbgate-query-splitter": "^4.11.4",
"dbgate-query-splitter": "^4.11.5",
"dbgate-tools": "^6.0.0-alpha.1",
"is-promise": "^4.0.0",
"lodash": "^4.17.21",

View File

@@ -3,8 +3,9 @@ const stream = require('stream');
const isPromise = require('is-promise');
const driverBase = require('../frontend/driver');
const Analyser = require('./Analyser');
const { MongoClient, ObjectId, AbstractCursor } = require('mongodb');
const { MongoClient, ObjectId, AbstractCursor, Long } = require('mongodb');
const { EJSON } = require('bson');
const { serializeJsTypesForJsonStringify, deserializeJsTypesFromJsonParse } = require('dbgate-tools');
const createBulkInsertStream = require('./createBulkInsertStream');
const {
convertToMongoCondition,
@@ -12,19 +13,30 @@ const {
convertToMongoSort,
} = require('../frontend/convertToMongoCondition');
function transformMongoData(row) {
return EJSON.serialize(row);
function serializeMongoData(row) {
return EJSON.serialize(
serializeJsTypesForJsonStringify(row, (value) => {
if (value instanceof Long) {
if (Number.isSafeInteger(value.toNumber())) {
return value.toNumber();
}
return {
$bigint: value.toString(),
};
}
})
);
}
async function readCursor(cursor, options) {
options.recordset({ __isDynamicStructure: true });
await cursor.forEach((row) => {
options.row(transformMongoData(row));
options.row(serializeMongoData(row));
});
}
function convertObjectId(condition) {
return EJSON.deserialize(condition);
function deserializeMongoData(value) {
return deserializeJsTypesFromJsonParse(EJSON.deserialize(value));
}
function findArrayResult(resValue) {
@@ -263,7 +275,7 @@ const driver = {
const cursorStream = exprValue.stream();
cursorStream.on('data', (row) => {
pass.write(transformMongoData(row));
pass.write(serializeMongoData(row));
});
// propagate error
@@ -317,26 +329,26 @@ const driver = {
const collection = dbhan.getDatabase().collection(options.pureName);
if (options.countDocuments) {
const count = await collection.countDocuments(convertObjectId(mongoCondition) || {});
const count = await collection.countDocuments(deserializeMongoData(mongoCondition) || {});
return { count };
} else if (options.aggregate) {
let cursor = await collection.aggregate(convertObjectId(convertToMongoAggregate(options.aggregate)));
let cursor = await collection.aggregate(deserializeMongoData(convertToMongoAggregate(options.aggregate)));
const rows = await cursor.toArray();
return {
rows: rows.map(transformMongoData).map((x) => ({
rows: rows.map(serializeMongoData).map((x) => ({
...x._id,
..._.omit(x, ['_id']),
})),
};
} else {
// console.log('options.condition', JSON.stringify(options.condition, undefined, 2));
let cursor = await collection.find(convertObjectId(mongoCondition) || {});
let cursor = await collection.find(deserializeMongoData(mongoCondition) || {});
if (options.sort) cursor = cursor.sort(convertToMongoSort(options.sort));
if (options.skip) cursor = cursor.skip(options.skip);
if (options.limit) cursor = cursor.limit(options.limit);
const rows = await cursor.toArray();
return {
rows: rows.map(transformMongoData),
rows: rows.map(serializeMongoData),
};
}
} catch (err) {
@@ -358,7 +370,7 @@ const driver = {
...insert.document,
...insert.fields,
};
const resdoc = await collection.insertOne(convertObjectId(document));
const resdoc = await collection.insertOne(deserializeMongoData(document));
res.inserted.push(resdoc._id);
}
for (const update of changeSet.updates) {
@@ -368,16 +380,16 @@ const driver = {
...update.document,
...update.fields,
};
const doc = await collection.findOne(convertObjectId(update.condition));
const doc = await collection.findOne(deserializeMongoData(update.condition));
if (doc) {
const resdoc = await collection.replaceOne(convertObjectId(update.condition), {
...convertObjectId(document),
const resdoc = await collection.replaceOne(deserializeMongoData(update.condition), {
...deserializeMongoData(document),
_id: doc._id,
});
res.replaced.push(resdoc._id);
}
} else {
const set = convertObjectId(_.pickBy(update.fields, (v, k) => !v?.$$undefined$$));
const set = deserializeMongoData(_.pickBy(update.fields, (v, k) => !v?.$$undefined$$));
const unset = _.fromPairs(
Object.keys(update.fields)
.filter((k) => update.fields[k]?.$$undefined$$)
@@ -387,13 +399,13 @@ const driver = {
if (!_.isEmpty(set)) updates.$set = set;
if (!_.isEmpty(unset)) updates.$unset = unset;
const resdoc = await collection.updateOne(convertObjectId(update.condition), updates);
const resdoc = await collection.updateOne(deserializeMongoData(update.condition), updates);
res.updated.push(resdoc._id);
}
}
for (const del of changeSet.deletes) {
const collection = db.collection(del.pureName);
const resdoc = await collection.deleteOne(convertObjectId(del.condition));
const resdoc = await collection.deleteOne(deserializeMongoData(del.condition));
res.deleted.push(resdoc._id);
}
return res;
@@ -449,7 +461,7 @@ const driver = {
]);
const rows = await cursor.toArray();
return _.uniqBy(
rows.map(transformMongoData).map(({ _id }) => {
rows.map(serializeMongoData).map(({ _id }) => {
if (_.isArray(_id) || _.isPlainObject(_id)) return { value: null };
return { value: _id };
}),

View File

@@ -11,6 +11,21 @@ function convertRightOperandToMongoValue(right) {
throw new Error(`Unknown right operand type ${right.exprType}`);
}
function convertRightEqualOperandToMongoCondition(right) {
if (right.exprType != 'value') {
throw new Error(`Unknown right operand type ${right.exprType}`);
}
const { value } = right;
if (/^[0-9a-fA-F]{24}$/.test(value)) {
return {
$in: [value, { $oid: value }],
};
}
return {
$eq: value,
};
}
function convertToMongoCondition(filter) {
if (!filter) {
return null;
@@ -28,9 +43,7 @@ function convertToMongoCondition(filter) {
switch (filter.operator) {
case '=':
return {
[convertLeftOperandToMongoColumn(filter.left)]: {
$eq: convertRightOperandToMongoValue(filter.right),
},
[convertLeftOperandToMongoColumn(filter.left)]: convertRightEqualOperandToMongoCondition(filter.right),
};
case '!=':
case '<>':

View File

@@ -5,11 +5,17 @@ const { mongoSplitterOptions } = require('dbgate-query-splitter/lib/options');
const _pickBy = require('lodash/pickBy');
const _fromPairs = require('lodash/fromPairs');
function mongoReplacer(key, value) {
if (typeof value === 'bigint') {
return { $bigint: value.toString() };
}
return value;
}
function jsonStringifyWithObjectId(obj) {
return JSON.stringify(obj, undefined, 2).replace(
/\{\s*\"\$oid\"\s*\:\s*\"([0-9a-f]+)\"\s*\}/g,
(m, id) => `ObjectId("${id}")`
);
return JSON.stringify(obj, mongoReplacer, 2)
.replace(/\{\s*\"\$oid\"\s*\:\s*\"([0-9a-f]+)\"\s*\}/g, (m, id) => `ObjectId("${id}")`)
.replace(/\{\s*\"\$bigint\"\s*\:\s*\"([0-9]+)\"\s*\}/g, (m, num) => `${num}n`);
}
/** @type {import('dbgate-types').SqlDialect} */
@@ -36,7 +42,8 @@ const driver = {
defaultPort: 27017,
supportsDatabaseUrl: true,
supportsServerSummary: true,
supportsDatabaseProfiler: true,
// temporatily disable MongoDB profiler support
supportsDatabaseProfiler: false,
profilerFormatterFunction: 'formatProfilerEntry@dbgate-plugin-mongo',
profilerTimestampFunction: 'extractProfileTimestamp@dbgate-plugin-mongo',
profilerChartAggregateFunction: 'aggregateProfileChartEntry@dbgate-plugin-mongo',

View File

@@ -38,7 +38,7 @@
"dependencies": {
"@azure/identity": "^4.6.0",
"async-lock": "^1.2.6",
"dbgate-query-splitter": "^4.11.4",
"dbgate-query-splitter": "^4.11.5",
"dbgate-tools": "^6.0.0-alpha.1",
"lodash": "^4.17.21",
"tedious": "^18.6.1"

View File

@@ -36,7 +36,7 @@
"webpack-cli": "^5.1.4"
},
"dependencies": {
"dbgate-query-splitter": "^4.11.4",
"dbgate-query-splitter": "^4.11.5",
"dbgate-tools": "^6.0.0-alpha.1",
"lodash": "^4.17.21",
"mysql2": "^3.11.3"

View File

@@ -245,6 +245,7 @@ class Analyser extends DatabaseAnalyser {
parameters: functionNameToParameters[x.pureName],
})),
triggers: triggers.rows.map(row => ({
objectId: 'triggers:' + row.triggerName,
contentHash: row.modifyDate,
pureName: row.triggerName,
eventType: row.eventType,
@@ -276,7 +277,8 @@ class Analyser extends DatabaseAnalyser {
const tableModificationsQueryData = await this.analyserQuery('tableModifications');
const procedureModificationsQueryData = await this.analyserQuery('procedureModifications');
const functionModificationsQueryData = await this.analyserQuery('functionModifications');
const schedulerEvents = await this.analyserQuery('schedulerEvents');
const schedulerEvents = await this.analyserQuery('schedulerEventsModifications');
const triggers = await this.analyserQuery('triggersModifications');
return {
tables: tableModificationsQueryData.rows
@@ -307,17 +309,13 @@ class Analyser extends DatabaseAnalyser {
schedulerEvents: schedulerEvents.rows.map(row => ({
contentHash: _.isDate(row.LAST_ALTERED) ? row.LAST_ALTERED.toISOString() : row.LAST_ALTERED,
pureName: row.EVENT_NAME,
createSql: row.CREATE_SQL,
objectId: row.EVENT_NAME,
intervalValue: row.INTERVAL_VALUE,
intervalField: row.INTERVAL_FIELD,
starts: row.STARTS,
status: row.STATUS,
executeAt: row.EXECUTE_AT,
lastExecuted: row.LAST_EXECUTED,
eventType: row.EVENT_TYPE,
definer: row.DEFINER,
objectTypeField: 'schedulerEvents',
})),
triggers: triggers.rows.map(row => ({
contentHash: row.modifyDate,
objectId: 'triggers:' + row.triggerName,
pureName: row.triggerName,
tableName: row.tableName,
})),
};
}

View File

@@ -12,7 +12,9 @@ const uniqueNames = require('./uniqueNames');
const viewTexts = require('./viewTexts');
const parameters = require('./parameters');
const triggers = require('./triggers');
const triggersModifications = require('./triggersModifications');
const schedulerEvents = require('./schedulerEvents.js');
const schedulerEventsModifications = require('./schedulerEventsModifications.js');
module.exports = {
columns,
@@ -29,5 +31,7 @@ module.exports = {
uniqueNames,
viewTexts,
triggers,
triggersModifications,
schedulerEvents,
schedulerEventsModifications,
};

View File

@@ -0,0 +1,7 @@
module.exports = `
SELECT
EVENT_NAME,
LAST_ALTERED
FROM INFORMATION_SCHEMA.EVENTS
WHERE EVENT_SCHEMA = '#DATABASE#'
`;

View File

@@ -0,0 +1,9 @@
module.exports = `
SELECT
TRIGGER_NAME AS triggerName,
EVENT_OBJECT_TABLE AS tableName,
CREATED as modifyDate
FROM
INFORMATION_SCHEMA.TRIGGERS
WHERE EVENT_OBJECT_SCHEMA = '#DATABASE#'
`;

View File

@@ -253,7 +253,14 @@ const mysqlDriverBase = {
const customArgs = options.customArgs.split(/\s+/).filter(arg => arg.trim() != '');
args.push(...customArgs);
}
args.push(database);
if (options.createDatabase) {
args.push('--databases', database);
if (options.dropDatabase) {
args.push('--add-drop-database');
}
} else {
args.push(database);
}
return { command, args };
},
restoreDatabaseCommand(connection, settings, externalTools) {
@@ -346,6 +353,19 @@ const mysqlDriverBase = {
default: false,
disabledFn: values => values.lockTables || values.skipLockTables,
},
{
type: 'checkbox',
label: 'Create database',
name: 'createDatabase',
default: false,
},
{
type: 'checkbox',
label: 'Drop database before import',
name: 'dropDatabase',
default: false,
disabledFn: values => !values.createDatabase,
},
{
type: 'text',
label: 'Custom arguments',

View File

@@ -35,7 +35,7 @@
"webpack-cli": "^5.1.4"
},
"dependencies": {
"dbgate-query-splitter": "^4.11.4",
"dbgate-query-splitter": "^4.11.5",
"dbgate-tools": "^6.0.0-alpha.1",
"lodash": "^4.17.21"
},

View File

@@ -37,7 +37,7 @@
"dependencies": {
"wkx": "^0.5.0",
"pg-copy-streams": "^6.0.6",
"dbgate-query-splitter": "^4.11.4",
"dbgate-query-splitter": "^4.11.5",
"dbgate-tools": "^6.0.0-alpha.1",
"lodash": "^4.17.21",
"pg": "^8.11.5"

View File

@@ -21,6 +21,11 @@ const logger = getLogger('postreDriver');
pg.types.setTypeParser(1082, 'text', val => val); // date
pg.types.setTypeParser(1114, 'text', val => val); // timestamp without timezone
pg.types.setTypeParser(1184, 'text', val => val); // timestamp
pg.types.setTypeParser(20, 'text', val => {
const parsed = parseInt(val);
if (Number.isSafeInteger(parsed)) return parsed;
return BigInt(val);
}); // timestamp
function extractGeographyDate(value) {
try {
@@ -159,6 +164,17 @@ const drivers = driverBases.map(driverBase => ({
return { rows: (res.rows || []).map(row => zipDataRow(row, columns)), columns };
},
stream(dbhan, sql, options) {
const handleNotice = notice => {
const { message, where } = notice;
options.info({
message,
procedure: where,
time: new Date(),
severity: 'info',
detail: notice,
});
};
const query = new pg.Query({
text: sql,
rowMode: 'array',
@@ -166,6 +182,7 @@ const drivers = driverBases.map(driverBase => ({
let wasHeader = false;
let columnsToTransform = null;
dbhan.client.on('notice', handleNotice);
query.on('row', row => {
if (!wasHeader) {
@@ -206,6 +223,7 @@ const drivers = driverBases.map(driverBase => ({
wasHeader = true;
}
dbhan.client.off('notice', handleNotice);
options.done();
});
@@ -223,6 +241,7 @@ const drivers = driverBases.map(driverBase => ({
time: new Date(),
severity: 'error',
});
dbhan.client.off('notice', handleNotice);
options.done();
});

View File

@@ -34,7 +34,7 @@
"webpack-cli": "^5.1.4"
},
"dependencies": {
"dbgate-query-splitter": "^4.11.4",
"dbgate-query-splitter": "^4.11.5",
"dbgate-tools": "^6.0.0-alpha.1",
"lodash": "^4.17.21",
"async": "^3.2.3",

View File

@@ -201,6 +201,21 @@ const driver = {
return _.range(16).map((index) => ({ name: `db${index}`, extInfo: info[`db${index}`], sortOrder: index }));
},
async scanKeys(dbhan, pattern, cursor = 0, count) {
const match = pattern?.match(/[\?\[\{]/) ? pattern : pattern ? `*${pattern}*` : '*';
const [nextCursor, keys] = await dbhan.client.scan(cursor, 'MATCH', match, 'COUNT', count);
const dbsize = await dbhan.client.dbsize();
const keysMapped = keys.map((key) => ({
key,
}));
await this.enrichKeyInfo(dbhan, keysMapped);
return {
nextCursor,
keys: keysMapped,
dbsize,
};
},
async loadKeys(dbhan, root = '', filter = null, limit = null) {
const keys = await this.getKeys(dbhan, root ? `${root}${dbhan.treeKeySeparator}*` : '*');
const keysFiltered = keys.filter((x) => filterName(filter, x));
@@ -260,7 +275,7 @@ const driver = {
extractKeysFromLevel(dbhan, root, keys) {
const prefix = root ? `${root}${dbhan.treeKeySeparator}` : '';
const rootSplit = _.compact(root.split(dbhan.treeKeySeparator));
const rootSplit = root == '' ? [] : root.split(dbhan.treeKeySeparator);
const res = {};
for (const key of keys) {
if (!key.startsWith(prefix)) continue;
@@ -305,17 +320,56 @@ const driver = {
}
},
async enrichOneKeyInfo(dbhan, item) {
item.type = await dbhan.client.type(item.key);
item.count = await this.getKeyCardinality(dbhan, item.key, item.type);
},
// async enrichOneKeyInfo(dbhan, item) {
// item.type = await dbhan.client.type(item.key);
// item.count = await this.getKeyCardinality(dbhan, item.key, item.type);
// },
async enrichKeyInfo(dbhan, levelInfo) {
await async.eachLimit(
levelInfo.filter((x) => x.key),
10,
async (item) => await this.enrichOneKeyInfo(dbhan, item)
);
async enrichKeyInfo(dbhan, keyObjects) {
// 1. get type
const typePipeline = dbhan.client.pipeline();
for (const item of keyObjects) {
typePipeline.type(item.key);
}
const resultType = await typePipeline.exec();
for (let i = 0; i < resultType.length; i++) {
if (resultType[i][0] == null) {
keyObjects[i].type = resultType[i][1];
}
}
// 2. get cardinality
const cardinalityPipeline = dbhan.client.pipeline();
for (const item of keyObjects) {
switch (item.type) {
case 'list':
cardinalityPipeline.llen(item.key);
case 'set':
cardinalityPipeline.scard(item.key);
case 'zset':
cardinalityPipeline.zcard(item.key);
case 'stream':
cardinalityPipeline.xlen(item.key);
case 'hash':
cardinalityPipeline.hlen(item.key);
}
}
const resultCardinality = await cardinalityPipeline.exec();
let resIndex = 0;
for (const item of keyObjects) {
if (
item.type == 'list' ||
item.type == 'set' ||
item.type == 'zset' ||
item.type == 'stream' ||
item.type == 'hash'
) {
if (resultCardinality[resIndex][0] == null) {
item.count = resultCardinality[resIndex][1];
resIndex++;
}
}
}
},
async loadKeyInfo(dbhan, key) {

View File

@@ -37,7 +37,7 @@
"dependencies": {
"dbgate-tools": "^6.0.0-alpha.1",
"lodash": "^4.17.21",
"dbgate-query-splitter": "^4.11.4"
"dbgate-query-splitter": "^4.11.5"
},
"optionalDependencies": {
"libsql": "0.5.0-pre.6",