Merge pull request #1010 from dbgate/feature/cassandra

Feature/cassandra
This commit is contained in:
Jan Prochazka
2025-02-11 10:40:03 +01:00
committed by GitHub
52 changed files with 1515 additions and 306 deletions

View File

@@ -89,3 +89,7 @@ jobs:
ORACLE_PASSWORD: Pwd2020Db
ports:
- '15006:1521'
cassandradb:
image: 'cassandra:5.0.2'
ports:
- '15942:9042'

View File

@@ -1,6 +1,6 @@
# this compose file is for testing purposes only
# use it for testing docker containsers built on local machine
version: "3"
version: '3'
services:
dbgate:
build: docker
@@ -15,31 +15,31 @@ services:
volumes:
- dbgate-data:/root/.dbgate
# environment:
# WEB_ROOT: /dbgate
# CONNECTIONS: mssql
# LABEL_mssql: MS Sql
# SERVER_mssql: mssql
# USER_mssql: sa
# PORT_mssql: 1433
# PASSWORD_mssql: Pwd2020Db
# ENGINE_mssql: mssql@dbgate-plugin-mssql
# proxy:
# # image: nginx
# build: test/nginx
# ports:
# - 8082:80
# CONNECTIONS: mssql
# LABEL_mssql: MS Sql
# SERVER_mssql: mssql
# USER_mssql: sa
# PORT_mssql: 1433
# PASSWORD_mssql: Pwd2020Db
# ENGINE_mssql: mssql@dbgate-plugin-mssql
proxy:
# image: nginx
build: test/nginx
ports:
- 8082:80
# volumes:
# - /home/jena/test/chinook:/mnt/sqt
# environment:
# CONNECTIONS: sqlite
volumes:
- /home/jena/test/chinook:/mnt/sqt
environment:
CONNECTIONS: sqlite
# LABEL_sqlite: sqt
# FILE_sqlite: /mnt/sqt/Chinook.db
# ENGINE_sqlite: sqlite@dbgate-plugin-sqlite
LABEL_sqlite: sqt
FILE_sqlite: /mnt/sqt/Chinook.db
ENGINE_sqlite: sqlite@dbgate-plugin-sqlite
# mssql:
# image: mcr.microsoft.com/mssql/server
@@ -51,4 +51,5 @@ services:
volumes:
dbgate-data:
driver: local
driver: local

View File

@@ -76,26 +76,23 @@ describe('Alter database', () => {
})
);
const objectsSupportingRename = flatSource(x => x.supportRenameSqlObject);
if (objectsSupportingRename.length > 0) {
test.each(objectsSupportingRename)(
'Rename object - %s - %s',
testWrapper(async (conn, driver, type, object, engine) => {
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
test.each(flatSource(x => x.supportRenameSqlObject))(
'Rename object - %s - %s',
testWrapper(async (conn, driver, type, object, engine) => {
for (const sql of initSql) await runCommandOnDriver(conn, driver, sql);
await runCommandOnDriver(conn, driver, object.create1);
await runCommandOnDriver(conn, driver, object.create1);
const structure = extendDatabaseInfo(await driver.analyseFull(conn));
const structure = extendDatabaseInfo(await driver.analyseFull(conn));
const dmp = driver.createDumper();
dmp.renameSqlObject(structure[type][0], 'renamed1');
const dmp = driver.createDumper();
dmp.renameSqlObject(structure[type][0], 'renamed1');
await driver.query(conn, dmp.s);
await driver.query(conn, dmp.s);
const structure2 = await driver.analyseFull(conn);
expect(structure2[type].length).toEqual(1);
expect(structure2[type][0].pureName).toEqual('renamed1');
})
);
}
const structure2 = await driver.analyseFull(conn);
expect(structure2[type].length).toEqual(1);
expect(structure2[type][0].pureName).toEqual('renamed1');
})
);
});

View File

@@ -1,7 +1,7 @@
const stableStringify = require('json-stable-stringify');
const _ = require('lodash');
const fp = require('lodash/fp');
const { testWrapper } = require('../tools');
const { testWrapper, removeNotNull, transformSqlForEngine } = require('../tools');
const engines = require('../engines');
const crypto = require('crypto');
const {
@@ -19,6 +19,7 @@ function pickImportantTableInfo(engine, table) {
pureName: table.pureName,
columns: table.columns
.filter(x => x.columnName != 'rowid')
.sort((a, b) => a.columnName.localeCompare(b.columnName))
.map(fp.pick(props))
.map(props => _.omitBy(props, x => x == null))
.map(props =>
@@ -33,36 +34,36 @@ function checkTableStructure(engine, t1, t2) {
}
async function testTableDiff(engine, conn, driver, mangle) {
await driver.query(conn, formatQueryWithoutParams(driver, `create table ~t0 (~id int not null primary key)`));
const initQuery = formatQueryWithoutParams(driver, `create table ~t0 (~id int not null primary key)`);
await driver.query(conn, transformSqlForEngine(engine, initQuery));
await driver.query(
conn,
formatQueryWithoutParams(
driver,
`create table ~t1 (
const query = formatQueryWithoutParams(
driver,
`create table ~t1 (
~col_pk int not null primary key,
~col_std int,
~col_def int default 12,
~col_def int ${engine.skipDefaultValue ? '' : 'default 12'},
${engine.skipReferences ? '' : '~col_fk int references ~t0(~id),'}
~col_idx int,
~col_uq int ${engine.skipUnique ? '' : 'unique'} ,
~col_ref int ${engine.skipUnique ? '' : 'unique'}
)`
)
);
await driver.query(conn, transformSqlForEngine(engine, query));
if (!engine.skipIndexes) {
await driver.query(conn, formatQueryWithoutParams(driver, `create index ~idx1 on ~t1(~col_idx)`));
const query = formatQueryWithoutParams(driver, `create index ~idx1 on ~t1(~col_idx)`);
await driver.query(conn, transformSqlForEngine(engine, query));
}
if (!engine.skipReferences) {
await driver.query(
conn,
formatQueryWithoutParams(
driver,
`create table ~t2 (~id int not null primary key, ~fkval int null references ~t1(~col_ref))`
)
const query = formatQueryWithoutParams(
driver,
`create table ~t2 (~id int not null primary key, ~fkval int null references ~t1(~col_ref))`
);
await driver.query(conn, transformSqlForEngine(engine, query));
}
const tget = x => x.tables.find(y => y.pureName == 't1');
@@ -89,14 +90,12 @@ const TESTED_COLUMNS = ['col_pk', 'col_std', 'col_def', 'col_fk', 'col_ref', 'co
// const TESTED_COLUMNS = ['col_std'];
// const TESTED_COLUMNS = ['col_ref'];
function engines_columns_source() {
function create_engines_columns_source(engines) {
return _.flatten(
engines.map(engine =>
TESTED_COLUMNS.filter(col => !col.endsWith('_pk') || !engine.skipPkColumnTesting).map(column => [
engine.label,
column,
engine,
])
TESTED_COLUMNS.filter(col => col.endsWith('_pk') || !engine.skipNonPkRename)
.filter(col => !col.endsWith('_pk') || !engine.skipPkColumnTesting)
.map(column => [engine.label, column, engine])
)
);
}
@@ -117,26 +116,45 @@ describe('Alter table', () => {
})
);
test.each(engines_columns_source())(
'Drop column - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(engine, conn, driver, tbl => (tbl.columns = tbl.columns.filter(x => x.columnName != column)));
})
const columnsSource = create_engines_columns_source(engines);
const dropableColumnsSrouce = columnsSource.filter(
([_label, col, engine]) => !engine.skipPkDrop || !col.endsWith('_pk')
);
const hasDropableColumns = dropableColumnsSrouce.length > 0;
test.each(engines_columns_source())(
'Change nullability - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(
engine,
conn,
driver,
tbl => (tbl.columns = tbl.columns.map(x => (x.columnName == column ? { ...x, notNull: true } : x)))
);
})
);
if (hasDropableColumns) {
test.each(dropableColumnsSrouce)(
'Drop column - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(
engine,
conn,
driver,
tbl => (tbl.columns = tbl.columns.filter(x => x.columnName != column))
);
})
);
}
test.each(engines_columns_source())(
const hasEnginesWithNullable = engines.filter(x => !x.skipNullable).length > 0;
if (hasEnginesWithNullable) {
const source = create_engines_columns_source(engines.filter(x => !x.skipNullable));
test.each(source)(
'Change nullability - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(
engine,
conn,
driver,
tbl => (tbl.columns = tbl.columns.map(x => (x.columnName == column ? { ...x, notNull: true } : x)))
);
})
);
}
test.each(columnsSource)(
'Rename column - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(
@@ -157,32 +175,37 @@ describe('Alter table', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Add default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_std').defaultValue = '123';
});
})
);
const enginesWithDefault = engines.filter(x => !x.skipDefaultValue);
const hasEnginesWithDefault = enginesWithDefault.length > 0;
test.each(engines.map(engine => [engine.label, engine]))(
'Unset default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = undefined;
});
})
);
if (hasEnginesWithDefault) {
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
'Add default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_std').defaultValue = '123';
});
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Change default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = '567';
});
})
);
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
'Unset default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = undefined;
});
})
);
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
'Change default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = '567';
});
})
);
}
// test.each(engines.map(engine => [engine.label, engine]))(
// 'Change autoincrement - %s',

View File

@@ -54,6 +54,30 @@ describe('DB Import/export', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
`Import to existing table - %s`,
testWrapper(async (conn, driver, engine) => {
await runQueryOnDriver(conn, driver, dmp =>
dmp.put(
`create table ~t1 (~id int primary key, ~country %s)`,
engine.useTextTypeForStrings ? 'text' : 'varchar(50)'
)
);
const reader = createImportStream();
const writer = await tableWriter({
systemConnection: conn,
driver,
pureName: 't1',
createIfNotExists: true,
});
await copyStream(reader, writer);
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res.rows[0].cnt.toString()).toEqual('6');
})
);
test.each(engines.map(engine => [engine.label, engine]))(
'Import two tables - %s',
testWrapper(async (conn, driver, engine) => {
@@ -85,38 +109,48 @@ describe('DB Import/export', () => {
})
);
test.each(engines.filter(x => x.dumpFile).map(engine => [engine.label, engine]))(
'Import SQL dump - %s',
testWrapper(async (conn, driver, engine) => {
// const reader = await fakeObjectReader({ delay: 10 });
// const reader = await fakeObjectReader();
await importDatabase({
systemConnection: conn,
driver,
inputFile: engine.dumpFile,
});
const enginesWithDumpFile = engines.filter(x => x.dumpFile);
const hasEnginesWithDumpFile = enginesWithDumpFile.length > 0;
const structure = await driver.analyseFull(conn);
if (hasEnginesWithDumpFile) {
test.each(enginesWithDumpFile.filter(x => x.dumpFile).map(engine => [engine.label, engine]))(
'Import SQL dump - %s',
testWrapper(async (conn, driver, engine) => {
// const reader = await fakeObjectReader({ delay: 10 });
// const reader = await fakeObjectReader();
await importDatabase({
systemConnection: conn,
driver,
inputFile: engine.dumpFile,
});
for (const check of engine.dumpChecks || []) {
const res = await driver.query(conn, check.sql);
expect(res.rows[0].res.toString()).toEqual(check.res);
}
const structure = await driver.analyseFull(conn);
// const res1 = await driver.query(conn, `select count(*) as cnt from t1`);
// expect(res1.rows[0].cnt.toString()).toEqual('6');
for (const check of engine.dumpChecks || []) {
const res = await driver.query(conn, check.sql);
expect(res.rows[0].res.toString()).toEqual(check.res);
}
// const res2 = await driver.query(conn, `select count(*) as cnt from t2`);
// expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
// const res1 = await driver.query(conn, `select count(*) as cnt from t1`);
// expect(res1.rows[0].cnt.toString()).toEqual('6');
// const res2 = await driver.query(conn, `select count(*) as cnt from t2`);
// expect(res2.rows[0].cnt.toString()).toEqual('6');
})
);
}
test.each(engines.map(engine => [engine.label, engine]))(
'Export one table - %s',
testWrapper(async (conn, driver, engine) => {
// const reader = await fakeObjectReader({ delay: 10 });
// const reader = await fakeObjectReader();
await runCommandOnDriver(conn, driver, 'create table ~t1 (~id int primary key, ~country varchar(100))');
await runCommandOnDriver(
conn,
driver,
`create table ~t1 (~id int primary key, ~country ${engine.useTextTypeForStrings ? 'text' : 'varchar(100)'})`
);
const data = [
[1, 'Czechia'],
[2, 'Austria'],
@@ -138,7 +172,13 @@ describe('DB Import/export', () => {
const writer = createExportStream();
await copyStream(reader, writer);
expect(writer.resultArray.filter(x => !x.__isStreamHeader).map(row => [row.id, row.country])).toEqual(data);
const result = writer.resultArray.filter(x => !x.__isStreamHeader).map(row => [row.id, row.country]);
if (engine.forceSortResults) {
result.sort((a, b) => a[0] - b[0]);
}
expect(result).toEqual(data);
})
);
});

View File

@@ -149,7 +149,7 @@ async function testDatabaseDeploy(engine, conn, driver, dbModelsYaml, options) {
}
describe('Deploy database', () => {
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Deploy database simple - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -167,7 +167,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Deploy database simple - %s - not connected',
testWrapperPrepareOnly(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -185,7 +185,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Deploy database simple twice - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
@@ -219,7 +219,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Add column - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -250,7 +250,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Dont drop column - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
@@ -287,7 +287,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(x => !x.skipReferences)
.map(engine => [engine.label, engine])
)(
'Foreign keys - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
@@ -343,7 +348,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(x => !x.skipDataModifications)
.map(engine => [engine.label, engine])
)(
'Deploy preloaded data - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -372,7 +382,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(x => !x.skipDataModifications)
.map(engine => [engine.label, engine])
)(
'Deploy preloaded data - update - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -448,7 +463,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(x => !x.skipChangeColumn).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(x => !x.skipChangeColumn && !x.skipNullability)
.map(engine => [engine.label, engine])
)(
'Change column to NOT NULL column with default - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -566,7 +586,7 @@ describe('Deploy database', () => {
text: 'create view ~_deleted_v1 as select * from ~t1',
};
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Dont remove column - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], [T1_NO_VAL]], {
@@ -576,7 +596,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Dont remove table - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], []], {
@@ -586,7 +606,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Mark table removed - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], [], []], {
@@ -597,7 +617,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(engine => engine.supportRenameSqlObject).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(engine => engine.supportRenameSqlObject)
.map(engine => [engine.label, engine])
)(
'Mark view removed - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1, V1], [T1], [T1]], {
@@ -608,7 +633,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Mark column removed - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], [T1_NO_VAL]], {
@@ -619,7 +644,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Undelete table - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
@@ -641,7 +666,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(engine => engine.supportRenameSqlObject).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(engine => engine.supportRenameSqlObject)
.map(engine => [engine.label, engine])
)(
'Undelete view - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1, V1], [T1], [T1, V1]], {
@@ -652,7 +682,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Undelete column - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1], [T1_NO_VAL], [T1]], {
@@ -662,7 +692,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'View redeploy - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
@@ -683,7 +713,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Change view - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(
@@ -703,7 +733,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(x => !x.skipDataModifications)
.map(engine => [engine.label, engine])
)(
'Script drived deploy - basic predeploy - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -723,7 +758,12 @@ describe('Deploy database', () => {
})
);
test.each(engines.filter(x => !x.skipDataModifications).map(engine => [engine.label, engine]))(
test.each(
engines
.filter(i => !i.skipDeploy)
.filter(x => !x.skipDataModifications)
.map(engine => [engine.label, engine])
)(
'Script drived deploy - install+uninstall - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [
@@ -782,7 +822,7 @@ describe('Deploy database', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipDeploy).map(engine => [engine.label, engine]))(
'Mark table removed, one remains - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDeploy(engine, conn, driver, [[T1, T2], [T2], [T2]], {

View File

@@ -73,7 +73,9 @@ describe('Query', () => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(sql));
}
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put('SELECT ~id FROM ~t1 ORDER BY ~id'));
const res = await runQueryOnDriver(conn, driver, dmp =>
dmp.put(`SELECT ~id FROM ~t1 ${engine.skipOrderBy ? '' : 'ORDER BY ~id'}`)
);
expect(res.columns).toEqual([
expect.objectContaining({
columnName: 'id',
@@ -98,7 +100,11 @@ describe('Query', () => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(sql));
}
const results = await executeStream(driver, conn, 'SELECT ~id FROM ~t1 ORDER BY ~id');
const results = await executeStream(
driver,
conn,
`SELECT ~id FROM ~t1 ${engine.skipOrderBy ? '' : 'ORDER BY ~id'}`
);
expect(results.length).toEqual(1);
const res = results[0];
@@ -107,7 +113,7 @@ describe('Query', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(i => !i.skipOrderBy).map(engine => [engine.label, engine]))(
'More queries - %s',
testWrapper(async (conn, driver, engine) => {
for (const sql of initSql) {
@@ -137,7 +143,9 @@ describe('Query', () => {
const results = await executeStream(
driver,
conn,
'CREATE TABLE ~t1 (~id int primary key); INSERT INTO ~t1 (~id) VALUES (1); INSERT INTO ~t1 (~id) VALUES (2); SELECT ~id FROM ~t1 ORDER BY ~id; '
`CREATE TABLE ~t1 (~id int primary key); INSERT INTO ~t1 (~id) VALUES (1); INSERT INTO ~t1 (~id) VALUES (2); SELECT ~id FROM ~t1 ${
engine.skipOrderBy ? '' : 'ORDER BY ~id'
}; `
);
expect(results.length).toEqual(1);
@@ -188,7 +196,7 @@ describe('Query', () => {
pureName: 't1',
columns: [
{ columnName: 'id', dataType: 'int', notNull: true, autoIncrement: true },
{ columnName: 'val', dataType: 'varchar(50)' },
{ columnName: 'val', dataType: engine.useTextTypeForStrings ? 'text' : 'varchar(50)' },
],
primaryKey: {
columns: [{ columnName: 'id' }],

View File

@@ -2,12 +2,37 @@ const { runCommandOnDriver } = require('dbgate-tools');
const engines = require('../engines');
const { testWrapper } = require('../tools');
const t1Sql = 'CREATE TABLE ~t1 (~id int not null primary key, ~val1 varchar(50))';
/**
* @param {import('dbgate-types').TestEngineInfo} engine
*/
const t1Sql = engine =>
`CREATE TABLE ~t1 (~id int ${engine.skipNullability ? '' : 'not null'} primary key, ~val1 ${
engine.useTextTypeForStrings ? 'text' : 'varchar(50)'
})`;
const ix1Sql = 'CREATE index ~ix1 ON ~t1(~val1, ~id)';
/**
* @param {import('dbgate-types').TestEngineInfo} engine
*/
const t2Sql = engine =>
`CREATE TABLE ~t2 (~id int not null primary key, ~val2 varchar(50) ${engine.skipUnique ? '' : 'unique'})`;
const t3Sql = 'CREATE TABLE ~t3 (~id int not null primary key, ~valfk int, foreign key (~valfk) references ~t2(~id))';
const t4Sql = 'CREATE TABLE ~t4 (~id int not null primary key, ~valdef int default 12 not null)';
`CREATE TABLE ~t2 (~id int ${engine.skipNullability ? '' : 'not null'} primary key, ~val2 ${
engine.useTextTypeForStrings ? 'text' : 'varchar(50)'
} ${engine.skipUnique ? '' : 'unique'})`;
/**
* @param {import('dbgate-types').TestEngineInfo} engine
*/
const t3Sql = engine =>
`CREATE TABLE ~t3 (~id int ${
engine.skipNullability ? '' : 'not null'
} primary key, ~valfk int, foreign key (~valfk) references ~t2(~id))`;
/**
* @param {import('dbgate-types').TestEngineInfo} engine
*/
const t4Sql = engine =>
`CREATE TABLE ~t4 (~id int ${engine.skipNullability ? '' : 'not null'} primary key, ~valdef int default 12 ${
engine.skipNullability ? '' : 'not null'
})`;
// const fkSql = 'ALTER TABLE t3 ADD FOREIGN KEY (valfk) REFERENCES t2(id)'
const txMatch = (engine, tname, vcolname, nextcol, defaultValue) =>
@@ -22,12 +47,12 @@ const txMatch = (engine, tname, vcolname, nextcol, defaultValue) =>
expect.objectContaining({
columnName: vcolname,
...(engine.skipNullability ? {} : { notNull: !!defaultValue }),
...(defaultValue
...(defaultValue && !engine.skipDefaultValue
? { defaultValue }
: {
dataType: engine.skipStringLength
? expect.stringMatching(/.*string|char.*/i)
: expect.stringMatching(/.*char.*\(50\)/i),
? expect.stringMatching(/.*string|char.*|text/i)
: expect.stringMatching(/.*char.*\(50\)|text/i),
}),
}),
...(nextcol
@@ -36,8 +61,8 @@ const txMatch = (engine, tname, vcolname, nextcol, defaultValue) =>
columnName: 'nextcol',
...(engine.skipNullability ? {} : { notNull: false }),
dataType: engine.skipStringLength
? expect.stringMatching(/.*string.*|char.*/i)
: expect.stringMatching(/.*char.*\(50\).*/i),
? expect.stringMatching(/.*string.*|char.*|text/i)
: expect.stringMatching(/.*char.*\(50\).*|text/i),
}),
]
: []),
@@ -60,10 +85,9 @@ describe('Table analyse', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Table structure - full analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));
const structure = await driver.analyseFull(conn);
console.log(JSON.stringify(structure, null, 2));
expect(structure.tables.length).toEqual(1);
expect(structure.tables[0]).toEqual(t1Match(engine));
@@ -79,7 +103,7 @@ describe('Table analyse', () => {
expect(structure1.tables.length).toEqual(1);
expect(structure1.tables[0]).toEqual(t2Match(engine));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));
const structure2 = await driver.analyseIncremental(conn, structure1);
expect(structure2.tables.length).toEqual(2);
@@ -91,7 +115,7 @@ describe('Table analyse', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Table remove - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
const structure1 = await driver.analyseFull(conn);
expect(structure1.tables.length).toEqual(2);
@@ -109,14 +133,18 @@ describe('Table analyse', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Table change - incremental analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
const structure1 = await driver.analyseFull(conn);
if (engine.dbSnapshotBySeconds) await new Promise(resolve => setTimeout(resolve, 1100));
await runCommandOnDriver(conn, driver, dmp =>
dmp.put(`ALTER TABLE ~t2 ADD ${engine.alterTableAddColumnSyntax ? 'COLUMN' : ''} ~nextcol varchar(50)`)
dmp.put(
`ALTER TABLE ~t2 ADD ${engine.alterTableAddColumnSyntax ? 'COLUMN' : ''} ~nextcol ${
engine.useTextTypeForStrings ? 'text' : 'varchar(50)'
}`
)
);
const structure2 = await driver.analyseIncremental(conn, structure1);
@@ -124,14 +152,25 @@ describe('Table analyse', () => {
expect(structure2.tables.length).toEqual(2);
expect(structure2.tables.find(x => x.pureName == 't1')).toEqual(t1Match(engine));
expect(structure2.tables.find(x => x.pureName == 't2')).toEqual(t2NextColMatch(engine));
const t2 = structure2.tables.find(x => x.pureName == 't2');
const t2ColumnsOrder = ['id', 'val2', 'nextcol'];
const t2Enchanted = engine.forceSortStructureColumns
? {
...t2,
columns: t2.columns.sort(
(a, b) => t2ColumnsOrder.indexOf(a.columnName) - t2ColumnsOrder.indexOf(b.columnName)
),
}
: t2;
expect(t2Enchanted).toEqual(t2NextColMatch(engine));
})
);
test.each(engines.filter(x => !x.skipIndexes).map(engine => [engine.label, engine]))(
'Index - full analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t1Sql(engine)));
await runCommandOnDriver(conn, driver, dmp => dmp.put(ix1Sql));
const structure = await driver.analyseFull(conn);
@@ -161,7 +200,7 @@ describe('Table analyse', () => {
'Foreign key - full analysis - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t2Sql(engine)));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t3Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t3Sql(engine)));
// await driver.query(conn, fkSql);
const structure = await driver.analyseFull(conn);
@@ -177,10 +216,10 @@ describe('Table analyse', () => {
})
);
test.each(engines.map(engine => [engine.label, engine]))(
test.each(engines.filter(engine => !engine.skipDefaultValue).map(engine => [engine.label, engine]))(
'Table structure - default value - %s',
testWrapper(async (conn, driver, engine) => {
await runCommandOnDriver(conn, driver, dmp => dmp.put(t4Sql));
await runCommandOnDriver(conn, driver, dmp => dmp.put(t4Sql(engine)));
const structure = await driver.analyseFull(conn);

View File

@@ -24,8 +24,10 @@ function checkTableStructure2(t1, t2) {
expect(t2).toEqual(createExpector(omitTableSpecificInfo(t1)));
}
async function testTableCreate(conn, driver, table) {
await runCommandOnDriver(conn, driver, dmp => dmp.put('create table ~t0 (~id int not null primary key)'));
async function testTableCreate(engine, conn, driver, table) {
await runCommandOnDriver(conn, driver, dmp =>
dmp.put(`create table ~t0 (~id int ${engine.skipNullability ? '' : 'not null'} primary key)`)
);
const dmp = driver.createDumper();
const table1 = {
@@ -47,12 +49,12 @@ describe('Table create', () => {
test.each(engines.map(engine => [engine.label, engine]))(
'Simple table - %s',
testWrapper(async (conn, driver, engine) => {
await testTableCreate(conn, driver, {
await testTableCreate(engine, conn, driver, {
columns: [
{
columnName: 'col1',
dataType: 'int',
notNull: true,
...(engine.skipNullability ? {} : { notNull: true }),
},
],
primaryKey: {
@@ -65,7 +67,7 @@ describe('Table create', () => {
test.each(engines.filter(x => !x.skipIndexes).map(engine => [engine.label, engine]))(
'Table with index - %s',
testWrapper(async (conn, driver, engine) => {
await testTableCreate(conn, driver, {
await testTableCreate(engine, conn, driver, {
columns: [
{
columnName: 'col1',
@@ -95,7 +97,7 @@ describe('Table create', () => {
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
'Table with foreign key - %s',
testWrapper(async (conn, driver, engine) => {
await testTableCreate(conn, driver, {
await testTableCreate(engine, conn, driver, {
columns: [
{
columnName: 'col1',
@@ -125,7 +127,7 @@ describe('Table create', () => {
test.each(engines.filter(x => !x.skipUnique).map(engine => [engine.label, engine]))(
'Table with unique - %s',
testWrapper(async (conn, driver, engine) => {
await testTableCreate(conn, driver, {
await testTableCreate(engine, conn, driver, {
columns: [
{
columnName: 'col1',

View File

@@ -1,35 +1,41 @@
version: '3'
services:
postgres:
image: postgres
restart: always
environment:
POSTGRES_PASSWORD: Pwd2020Db
ports:
- 15000:5432
mariadb:
image: mariadb
command: --default-authentication-plugin=mysql_native_password
restart: always
ports:
- 15004:3306
environment:
- MYSQL_ROOT_PASSWORD=Pwd2020Db
# postgres:
# image: postgres
# restart: always
# environment:
# POSTGRES_PASSWORD: Pwd2020Db
# ports:
# - 15000:5432
#
# mariadb:
# image: mariadb
# command: --default-authentication-plugin=mysql_native_password
# restart: always
# ports:
# - 15004:3306
# environment:
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
# mysql:
# image: mysql:8.0.18
# command: --default-authentication-plugin=mysql_native_password
# restart: always
# ports:
# ports:
# - 15001:3306
# environment:
# - MYSQL_ROOT_PASSWORD=Pwd2020Db
#
cassandradb:
image: cassandra:5.0.2
ports:
- 15942:9042
# clickhouse:
# image: bitnami/clickhouse:24.8.4
# restart: always
# ports:
# ports:
# - 15005:8123
# environment:
# - CLICKHOUSE_ADMIN_PASSWORD=Pwd2020Db
@@ -37,19 +43,18 @@ services:
# mssql:
# image: mcr.microsoft.com/mssql/server
# restart: always
# ports:
# ports:
# - 15002:1433
# environment:
# - ACCEPT_EULA=Y
# - SA_PASSWORD=Pwd2020Db
# - MSSQL_PID=Express
# cockroachdb:
# image: cockroachdb/cockroach
# ports:
# - 15003:26257
# command: start-single-node --insecure
# mongodb:
# image: mongo:4.0.12
# restart: always
@@ -59,20 +64,19 @@ services:
# ports:
# - 27017:27017
# cockroachdb-init:
# image: cockroachdb/cockroach
# # build: cockroach
# # entrypoint: /cockroach/init.sh
# # entrypoint: /cockroach/init.sh
# entrypoint: ./cockroach sql --insecure --host="cockroachdb" --execute="CREATE DATABASE IF NOT EXISTS test;"
# depends_on:
# - cockroachdb
# restart: on-failure
oracle:
image: gvenzl/oracle-xe:21-slim
environment:
ORACLE_PASSWORD: Pwd2020Db
ports:
- 15006:1521
# oracle:
# image: gvenzl/oracle-xe:21-slim
# environment:
# ORACLE_PASSWORD: Pwd2020Db
# ports:
# - 15006:1521

View File

@@ -1,3 +1,4 @@
// @ts-check
const views = {
type: 'views',
create1: 'CREATE VIEW ~obj1 AS SELECT ~id FROM ~t1',
@@ -13,6 +14,7 @@ const matviews = {
drop2: 'DROP MATERIALIZED VIEW obj2',
};
/** @type {import('dbgate-types').TestEngineInfo} */
const mysqlEngine = {
label: 'MySQL',
connection: {
@@ -160,6 +162,7 @@ const mysqlEngine = {
],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const mariaDbEngine = {
label: 'MariaDB',
connection: {
@@ -180,6 +183,7 @@ const mariaDbEngine = {
],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const postgreSqlEngine = {
label: 'PostgreSQL',
connection: {
@@ -352,6 +356,7 @@ $$ LANGUAGE plpgsql;`,
],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const sqlServerEngine = {
label: 'SQL Server',
connection: {
@@ -465,6 +470,7 @@ const sqlServerEngine = {
],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const sqliteEngine = {
label: 'SQLite',
generateDbFile: true,
@@ -500,6 +506,7 @@ const sqliteEngine = {
],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const cockroachDbEngine = {
label: 'CockroachDB',
connection: {
@@ -511,6 +518,7 @@ const cockroachDbEngine = {
objects: [views, matviews],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const clickhouseEngine = {
label: 'ClickHouse',
connection: {
@@ -533,6 +541,7 @@ const clickhouseEngine = {
skipChangeColumn: true,
};
/** @type {import('dbgate-types').TestEngineInfo} */
const oracleEngine = {
label: 'Oracle',
connection: {
@@ -592,6 +601,40 @@ const oracleEngine = {
],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const cassandraEngine = {
label: 'Cassandra',
connection: {
server: 'localhost:15942',
engine: 'cassandra@dbgate-plugin-cassandra',
},
removeNotNull: true,
alterTableAddColumnSyntax: false,
skipOnCI: false,
skipReferences: true,
// dbSnapshotBySeconds: true,
// setNullDefaultInsteadOfDrop: true,
skipIncrementalAnalysis: true,
skipNonPkRename: true,
skipPkDrop: true,
skipDefaultValue: true,
skipNullability: true,
skipUnique: true,
skipIndexes: true,
skipOrderBy: true,
skipAutoIncrement: true,
skipDataModifications: true,
skipDataDuplicator: true,
skipDeploy: true,
forceSortResults: true,
forceSortStructureColumns: true,
useTextTypeForStrings: true,
objects: [],
};
const enginesOnCi = [
// all engines, which would be run on GitHub actions
mysqlEngine,
@@ -602,20 +645,23 @@ const enginesOnCi = [
// cockroachDbEngine,
clickhouseEngine,
oracleEngine,
cassandraEngine,
];
const enginesOnLocal = [
// all engines, which would be run on local test
mysqlEngine,
cassandraEngine,
// mysqlEngine,
// mariaDbEngine,
// postgreSqlEngine,
// sqlServerEngine,
sqliteEngine,
// sqliteEngine,
// cockroachDbEngine,
// clickhouseEngine,
// oracleEngine,
];
/** @type {import('dbgate-types').TestEngineInfo[] & Record<string, import('dbgate-types').TestEngineInfo>} */
module.exports = process.env.CITEST ? enginesOnCi : enginesOnLocal;
module.exports.mysqlEngine = mysqlEngine;
@@ -626,3 +672,4 @@ module.exports.sqliteEngine = sqliteEngine;
module.exports.cockroachDbEngine = cockroachDbEngine;
module.exports.clickhouseEngine = clickhouseEngine;
module.exports.oracleEngine = oracleEngine;
module.exports.cassandraEngine = cassandraEngine;

View File

@@ -1,3 +1,4 @@
// @ts-check
const requireEngineDriver = require('dbgate-api/src/utility/requireEngineDriver');
const crypto = require('crypto');
@@ -81,9 +82,27 @@ const testWrapperPrepareOnly =
await body(conn, driver, ...other);
};
/** @param {string} sql
* @returns {string} */
const removeNotNull = sql => sql.replace(/not null/gi, '');
/** @param {import('dbgate-types').TestEngineInfo} engine
* @param {string} sql
* @returns {string} */
const transformSqlForEngine = (engine, sql) => {
let result = sql;
if (engine.removeNotNull) {
result = removeNotNull(result);
}
return result;
};
module.exports = {
randomDbName,
connect,
testWrapper,
testWrapperPrepareOnly,
transformSqlForEngine,
};

View File

@@ -213,13 +213,12 @@ async function handleRunOperation({ msgid, operation, useTransaction }, skipRead
}
}
async function handleQueryData({ msgid, sql }, skipReadonlyCheck = false) {
async function handleQueryData({ msgid, sql, range }, skipReadonlyCheck = false) {
await waitConnected();
const driver = requireEngineDriver(storedConnection);
try {
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
// console.log(sql);
const res = await driver.query(dbhan, sql);
const res = await driver.query(dbhan, sql, { range });
process.send({ msgtype: 'response', msgid, ...res });
} catch (err) {
process.send({
@@ -234,7 +233,7 @@ async function handleSqlSelect({ msgid, select }) {
const driver = requireEngineDriver(storedConnection);
const dmp = driver.createDumper();
dumpSqlSelect(dmp, select);
return handleQueryData({ msgid, sql: dmp.s }, true);
return handleQueryData({ msgid, sql: dmp.s, range: select.range }, true);
}
async function handleDriverDataCore(msgid, callMethod, { logName }) {
@@ -340,6 +339,7 @@ async function handleSqlPreview({ msgid, objects, options }) {
}, 500);
}
} catch (err) {
console.error(err);
process.send({
msgtype: 'response',
msgid,

View File

@@ -9,7 +9,7 @@ import {
AllowIdentityInsert,
Expression,
} from 'dbgate-sqltree';
import type { NamedObjectInfo, DatabaseInfo, TableInfo } from 'dbgate-types';
import type { NamedObjectInfo, DatabaseInfo, TableInfo, SqlDialect } from 'dbgate-types';
import { JsonDataObjectUpdateCommand } from 'dbgate-tools';
export interface ChangeSetItem {
@@ -230,41 +230,79 @@ export function batchUpdateChangeSet(
return changeSet;
}
function extractFields(item: ChangeSetItem, allowNulls = true, allowedDocumentColumns: string[] = []): UpdateField[] {
function extractFields(
item: ChangeSetItem,
allowNulls = true,
allowedDocumentColumns: string[] = [],
table?: TableInfo,
dialect?: SqlDialect
): UpdateField[] {
const allFields = {
...item.fields,
};
function isUuidColumn(columnName: string): boolean {
return table?.columns.find(x => x.columnName == columnName)?.dataType.toLowerCase() == 'uuid';
}
function createUpdateField(targetColumn: string): UpdateField {
const shouldGenerateDefaultValue =
isUuidColumn(targetColumn) && allFields[targetColumn] == null && dialect?.generateDefaultValueForUuid;
if (shouldGenerateDefaultValue) {
return {
targetColumn,
sql: dialect?.generateDefaultValueForUuid,
exprType: 'raw',
};
}
return {
targetColumn,
exprType: 'value',
value: allFields[targetColumn],
dataType: table?.columns?.find(x => x.columnName == targetColumn)?.dataType,
};
}
for (const docField in item.document || {}) {
if (allowedDocumentColumns.includes(docField)) {
allFields[docField] = item.document[docField];
}
}
return _.keys(allFields)
.filter(targetColumn => allowNulls || allFields[targetColumn] != null)
.map(targetColumn => ({
targetColumn,
exprType: 'value',
value: allFields[targetColumn],
}));
const columnNames = Object.keys(allFields);
if (dialect?.generateDefaultValueForUuid && table) {
columnNames.push(...table.columns.map(i => i.columnName));
}
return _.uniq(columnNames)
.filter(
targetColumn =>
allowNulls ||
allFields[targetColumn] != null ||
(isUuidColumn(targetColumn) && dialect?.generateDefaultValueForUuid)
)
.map(targetColumn => createUpdateField(targetColumn));
}
function changeSetInsertToSql(
item: ChangeSetItem,
dbinfo: DatabaseInfo = null
dbinfo: DatabaseInfo = null,
dialect: SqlDialect = null
): [AllowIdentityInsert, Insert, AllowIdentityInsert] {
const table = dbinfo?.tables?.find(x => x.schemaName == item.schemaName && x.pureName == item.pureName);
const fields = extractFields(
item,
false,
table?.columns?.map(x => x.columnName)
table?.columns?.map(x => x.columnName),
table,
dialect
);
if (fields.length == 0) return null;
let autoInc = false;
if (table) {
const autoIncCol = table.columns.find(x => x.autoIncrement);
// console.log('autoIncCol', autoIncCol);
if (autoIncCol && fields.find(x => x.targetColumn == autoIncCol.columnName)) {
autoInc = true;
}
@@ -299,19 +337,28 @@ function changeSetInsertToSql(
];
}
export function extractChangeSetCondition(item: ChangeSetItem, alias?: string): Condition {
export function extractChangeSetCondition(
item: ChangeSetItem,
alias?: string,
table?: TableInfo,
dialect?: SqlDialect
): Condition {
function getColumnCondition(columnName: string): Condition {
const dataType = table?.columns?.find(x => x.columnName == columnName)?.dataType;
const value = item.condition[columnName];
const expr: Expression = {
exprType: 'column',
columnName,
source: {
name: {
pureName: item.pureName,
schemaName: item.schemaName,
},
alias,
},
source: dialect?.omitTableBeforeColumn
? undefined
: {
name: {
pureName: item.pureName,
schemaName: item.schemaName,
},
alias,
},
};
if (value == null) {
return {
@@ -325,6 +372,7 @@ export function extractChangeSetCondition(item: ChangeSetItem, alias?: string):
left: expr,
right: {
exprType: 'value',
dataType,
value,
},
};
@@ -366,7 +414,7 @@ function compileSimpleChangeSetCondition(fields: { [column: string]: string }):
};
}
function changeSetUpdateToSql(item: ChangeSetItem, dbinfo: DatabaseInfo = null): Update {
function changeSetUpdateToSql(item: ChangeSetItem, dbinfo: DatabaseInfo = null, dialect: SqlDialect = null): Update {
const table = dbinfo?.tables?.find(x => x.schemaName == item.schemaName && x.pureName == item.pureName);
const autoIncCol = table?.columns?.find(x => x.autoIncrement);
@@ -382,13 +430,16 @@ function changeSetUpdateToSql(item: ChangeSetItem, dbinfo: DatabaseInfo = null):
fields: extractFields(
item,
true,
table?.columns?.map(x => x.columnName).filter(x => x != autoIncCol?.columnName)
table?.columns?.map(x => x.columnName).filter(x => x != autoIncCol?.columnName),
table
),
where: extractChangeSetCondition(item),
where: extractChangeSetCondition(item, undefined, table, dialect),
};
}
function changeSetDeleteToSql(item: ChangeSetItem): Delete {
function changeSetDeleteToSql(item: ChangeSetItem, dbinfo: DatabaseInfo = null, dialect: SqlDialect = null): Delete {
const table = dbinfo?.tables?.find(x => x.schemaName == item.schemaName && x.pureName == item.pureName);
return {
from: {
name: {
@@ -397,16 +448,16 @@ function changeSetDeleteToSql(item: ChangeSetItem): Delete {
},
},
commandType: 'delete',
where: extractChangeSetCondition(item),
where: extractChangeSetCondition(item, undefined, table, dialect),
};
}
export function changeSetToSql(changeSet: ChangeSet, dbinfo: DatabaseInfo): Command[] {
export function changeSetToSql(changeSet: ChangeSet, dbinfo: DatabaseInfo, dialect: SqlDialect): Command[] {
return _.compact(
_.flatten([
...(changeSet.inserts.map(item => changeSetInsertToSql(item, dbinfo)) as any),
...changeSet.updates.map(item => changeSetUpdateToSql(item, dbinfo)),
...changeSet.deletes.map(changeSetDeleteToSql),
...(changeSet.inserts.map(item => changeSetInsertToSql(item, dbinfo, dialect)) as any),
...changeSet.updates.map(item => changeSetUpdateToSql(item, dbinfo, dialect)),
...changeSet.deletes.map(item => changeSetDeleteToSql(item, dbinfo, dialect)),
])
);
}

View File

@@ -253,7 +253,12 @@ export abstract class GridDisplay {
orCondition.conditions.push(
_.cloneDeepWith(condition, (expr: Expression) => {
if (expr.exprType == 'placeholder') {
return this.createColumnExpression(column, { alias: 'basetbl' }, undefined, 'filter');
return this.createColumnExpression(
column,
!this.dialect.omitTableAliases ? { alias: 'basetbl' } : undefined,
undefined,
'filter'
);
}
})
);
@@ -584,7 +589,7 @@ export abstract class GridDisplay {
}
return {
exprType: 'column',
alias: alias || col.columnName,
...(!this.dialect.omitTableAliases && { alias: alias || col.columnName }),
source,
...col,
};
@@ -597,9 +602,16 @@ export abstract class GridDisplay {
commandType: 'select',
from: {
name: _.pick(name, ['schemaName', 'pureName']),
alias: 'basetbl',
...(!this.dialect.omitTableAliases && { alias: 'basetbl' }),
},
columns: columns.map(col => this.createColumnExpression(col, { alias: 'basetbl' }, undefined, 'view')),
columns: columns.map(col =>
this.createColumnExpression(
col,
!this.dialect.omitTableAliases ? { alias: 'basetbl' } : undefined,
undefined,
'view'
)
),
orderBy: this.driver?.requiresDefaultSortCriteria
? [
{
@@ -611,7 +623,10 @@ export abstract class GridDisplay {
: null,
};
const displayedColumnInfo = _.keyBy(
this.columns.map(col => ({ ...col, sourceAlias: 'basetbl' })),
this.columns.map(col => ({
...col,
...(!this.dialect.omitTableAliases && { sourceAlias: 'basetbl' }),
})),
'uniqueName'
);
this.processReferences(select, displayedColumnInfo, options);
@@ -639,7 +654,7 @@ export abstract class GridDisplay {
? x
: {
...x,
source: { alias: 'basetbl' },
...(!this.dialect.omitTableAliases && { source: { alias: 'basetbl' } }),
}
)
: [

View File

@@ -52,6 +52,8 @@ export function dumpSqlSelect(dmp: SqlDumper, cmd: Select) {
if (cmd.range) {
if (dmp.dialect.offsetFetchRangeSyntax) {
dmp.put('^offset %s ^rows ^fetch ^next %s ^rows ^only', cmd.range.offset, cmd.range.limit);
} else if (dmp.dialect.offsetNotSupported) {
dmp.put('^limit %s', cmd.range.limit + cmd.range.offset);
} else {
dmp.put('^limit %s ^offset %s ', cmd.range.limit, cmd.range.offset);
}

View File

@@ -21,7 +21,14 @@ export function dumpSqlExpression(dmp: SqlDumper, expr: Expression) {
break;
case 'value':
dmp.put('%v', expr.value);
if (expr.dataType) {
dmp.put('%V', {
value: expr.value,
dataType: expr.dataType,
});
} else {
dmp.put('%v', expr.value);
}
break;
case 'raw':

View File

@@ -182,6 +182,7 @@ export interface ColumnRefExpression {
export interface ValueExpression {
exprType: 'value';
value: any;
dataType?: string;
}
export interface PlaceholderExpression {

View File

@@ -70,7 +70,7 @@ export class SqlDumper implements AlterProcessor {
putByteArrayValue(value) {
this.put('^null');
}
putValue(value) {
putValue(value, dataType = null) {
if (value === null) this.put('^null');
else if (value === true) this.putRaw('1');
else if (value === false) this.putRaw('0');
@@ -117,6 +117,9 @@ export class SqlDumper implements AlterProcessor {
case 'v':
this.putValue(value);
break;
case 'V':
this.putValue(value.value, value.dataType);
break;
case 'c':
value(this);
break;
@@ -250,7 +253,7 @@ export class SqlDumper implements AlterProcessor {
this.columnType(column.dataType);
if (column.autoIncrement) {
if (column.autoIncrement && !this.dialect?.disableAutoIncrement) {
this.autoIncrement();
}

View File

@@ -14,6 +14,7 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
objectMode: true,
});
writable.fullNameQuoted = fullNameQuoted;
writable.buffer = [];
writable.structure = null;
writable.columnNames = null;
@@ -30,7 +31,9 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
writable.checkStructure = async () => {
let structure = await driver.analyseSingleTable(dbhan, name);
// console.log('ANALYSING', name, structure);
if (structure) {
writable.structure = structure;
}
if (structure && options.dropIfExists) {
logger.info(`Dropping table ${fullNameQuoted}`);
await driver.script(dbhan, `DROP TABLE ${fullNameQuoted}`);

View File

@@ -163,7 +163,7 @@ export interface SchedulerEventInfo extends SqlObjectInfo {
onCompletion: 'PRESERVE' | 'NOT PRESERVE';
status: 'ENABLED' | 'DISABLED';
lastExecuted?: string;
intervalValue: number;
intervalValue: string;
intervalField: string;
starts: string;
executeAt: string;

View File

@@ -1,3 +1,5 @@
import { ColumnInfo } from './dbinfo';
export interface SqlDialect {
rangeSelect?: boolean;
limitSelect?: boolean;
@@ -6,6 +8,7 @@ export interface SqlDialect {
topRecords?: boolean;
stringEscapeChar: string;
offsetFetchRangeSyntax?: boolean;
offsetNotSupported?: boolean;
quoteIdentifier(s: string): string;
fallbackDataType?: string;
explicitDropConstraint?: boolean;
@@ -44,7 +47,14 @@ export interface SqlDialect {
omitForeignKeys?: boolean;
omitUniqueConstraints?: boolean;
omitIndexes?: boolean;
omitTableAliases?: boolean;
omitTableBeforeColumn?: boolean;
disableAutoIncrement?: boolean;
disableNonPrimaryKeyRename?: boolean;
disableRenameTable?: boolean;
defaultNewTableColumns?: ColumnInfo[];
sortingKeys?: boolean;
generateDefaultValueForUuid?: string;
// syntax for create column: ALTER TABLE table ADD COLUMN column
createColumnWithColumnKeyword?: boolean;

View File

@@ -11,7 +11,7 @@ export interface SqlDumper extends AlterProcessor {
putRaw(s: string);
put(format: string, ...args);
putCmd(format: string, ...args);
putValue(value: string | number | Date);
putValue(value: string | number | Date, dataType?: string);
putCollection<T>(delimiter: string, collection: T[], lambda: (item: T) => void);
transform(type: TransformType, dumpExpr: () => void);
createDatabase(name: string);

View File

@@ -32,6 +32,7 @@ export interface RunScriptOptions {
export interface QueryOptions {
discardResult?: boolean;
importSqlDump?: boolean;
range?: { offset: number; limit: number };
}
export interface WriteTableOptions {
@@ -133,8 +134,8 @@ export interface FilterBehaviourProvider {
getFilterBehaviour(dataType: string, standardFilterBehaviours: { [id: string]: FilterBehaviour }): FilterBehaviour;
}
export interface DatabaseHandle {
client: any;
export interface DatabaseHandle<TClient = any> {
client: TClient;
database?: string;
feedback?: (message: any) => void;
getDatabase?: () => any;
@@ -142,7 +143,7 @@ export interface DatabaseHandle {
treeKeySeparator?: string;
}
export interface EngineDriver extends FilterBehaviourProvider {
export interface EngineDriver<TClient = any> extends FilterBehaviourProvider {
engine: string;
title: string;
defaultPort?: number;
@@ -180,63 +181,68 @@ export interface EngineDriver extends FilterBehaviourProvider {
beforeConnectionSave?: (values: any) => any;
databaseUrlPlaceholder?: string;
defaultAuthTypeName?: string;
defaultLocalDataCenter?: string;
defaultSocketPath?: string;
authTypeLabel?: string;
importExportArgs?: any[];
connect({ server, port, user, password, database }): Promise<DatabaseHandle>;
close(dbhan: DatabaseHandle): Promise<any>;
query(dbhan: DatabaseHandle, sql: string, options?: QueryOptions): Promise<QueryResult>;
stream(dbhan: DatabaseHandle, sql: string, options: StreamOptions);
readQuery(dbhan: DatabaseHandle, sql: string, structure?: TableInfo): Promise<stream.Readable>;
readJsonQuery(dbhan: DatabaseHandle, query: any, structure?: TableInfo): Promise<stream.Readable>;
connect({ server, port, user, password, database }): Promise<DatabaseHandle<TClient>>;
close(dbhan: DatabaseHandle<TClient>): Promise<any>;
query(dbhan: DatabaseHandle<TClient>, sql: string, options?: QueryOptions): Promise<QueryResult>;
stream(dbhan: DatabaseHandle<TClient>, sql: string, options: StreamOptions);
readQuery(dbhan: DatabaseHandle<TClient>, sql: string, structure?: TableInfo): Promise<stream.Readable>;
readJsonQuery(dbhan: DatabaseHandle<TClient>, query: any, structure?: TableInfo): Promise<stream.Readable>;
// eg. PostgreSQL COPY FROM stdin
writeQueryFromStream(dbhan: DatabaseHandle, sql: string): Promise<stream.Writable>;
writeTable(dbhan: DatabaseHandle, name: NamedObjectInfo, options: WriteTableOptions): Promise<stream.Writable>;
writeQueryFromStream(dbhan: DatabaseHandle<TClient>, sql: string): Promise<stream.Writable>;
writeTable(
dbhan: DatabaseHandle<TClient>,
name: NamedObjectInfo,
options: WriteTableOptions
): Promise<stream.Writable>;
analyseSingleObject(
dbhan: DatabaseHandle,
dbhan: DatabaseHandle<TClient>,
name: NamedObjectInfo,
objectTypeField: keyof DatabaseInfo
): Promise<TableInfo | ViewInfo | ProcedureInfo | FunctionInfo | TriggerInfo>;
analyseSingleTable(dbhan: DatabaseHandle, name: NamedObjectInfo): Promise<TableInfo>;
getVersion(dbhan: DatabaseHandle): Promise<{ version: string; versionText?: string }>;
listDatabases(dbhan: DatabaseHandle): Promise<
analyseSingleTable(dbhan: DatabaseHandle<TClient>, name: NamedObjectInfo): Promise<TableInfo>;
getVersion(dbhan: DatabaseHandle<TClient>): Promise<{ version: string; versionText?: string }>;
listDatabases(dbhan: DatabaseHandle<TClient>): Promise<
{
name: string;
}[]
>;
loadKeys(dbhan: DatabaseHandle, root: string, filter?: string): Promise;
exportKeys(dbhan: DatabaseHandle, options: {}): Promise;
loadKeyInfo(dbhan: DatabaseHandle, key): Promise;
loadKeyTableRange(dbhan: DatabaseHandle, key, cursor, count): Promise;
loadKeys(dbhan: DatabaseHandle<TClient>, root: string, filter?: string): Promise;
exportKeys(dbhan: DatabaseHandle<TClient>, options: {}): Promise;
loadKeyInfo(dbhan: DatabaseHandle<TClient>, key): Promise;
loadKeyTableRange(dbhan: DatabaseHandle<TClient>, key, cursor, count): Promise;
loadFieldValues(
dbhan: DatabaseHandle,
dbhan: DatabaseHandle<TClient>,
name: NamedObjectInfo,
field: string,
search: string,
dataType: string
): Promise;
analyseFull(dbhan: DatabaseHandle, serverVersion): Promise<DatabaseInfo>;
analyseIncremental(dbhan: DatabaseHandle, structure: DatabaseInfo, serverVersion): Promise<DatabaseInfo>;
analyseFull(dbhan: DatabaseHandle<TClient>, serverVersion): Promise<DatabaseInfo>;
analyseIncremental(dbhan: DatabaseHandle<TClient>, structure: DatabaseInfo, serverVersion): Promise<DatabaseInfo>;
dialect: SqlDialect;
dialectByVersion(version): SqlDialect;
createDumper(options = null): SqlDumper;
createBackupDumper(dbhan: DatabaseHandle, options): Promise<SqlBackupDumper>;
createBackupDumper(dbhan: DatabaseHandle<TClient>, options): Promise<SqlBackupDumper>;
getAuthTypes(): EngineAuthType[];
readCollection(dbhan: DatabaseHandle, options: ReadCollectionOptions): Promise<any>;
updateCollection(dbhan: DatabaseHandle, changeSet: any): Promise<any>;
readCollection(dbhan: DatabaseHandle<TClient>, options: ReadCollectionOptions): Promise<any>;
updateCollection(dbhan: DatabaseHandle<TClient>, changeSet: any): Promise<any>;
getCollectionUpdateScript(changeSet: any, collectionInfo: CollectionInfo): string;
createDatabase(dbhan: DatabaseHandle, name: string): Promise;
dropDatabase(dbhan: DatabaseHandle, name: string): Promise;
createDatabase(dbhan: DatabaseHandle<TClient>, name: string): Promise;
dropDatabase(dbhan: DatabaseHandle<TClient>, name: string): Promise;
getQuerySplitterOptions(usage: 'stream' | 'script' | 'editor' | 'import'): any;
script(dbhan: DatabaseHandle, sql: string, options?: RunScriptOptions): Promise;
operation(dbhan: DatabaseHandle, operation: {}, options?: RunScriptOptions): Promise;
script(dbhan: DatabaseHandle<TClient>, sql: string, options?: RunScriptOptions): Promise;
operation(dbhan: DatabaseHandle<TClient>, operation: {}, options?: RunScriptOptions): Promise;
getNewObjectTemplates(): NewObjectTemplate[];
// direct call of dbhan.client method, only some methods could be supported, on only some drivers
callMethod(dbhan: DatabaseHandle, method, args);
serverSummary(dbhan: DatabaseHandle): Promise<ServerSummary>;
summaryCommand(dbhan: DatabaseHandle, command, row): Promise<void>;
startProfiler(dbhan: DatabaseHandle, options): Promise<any>;
stopProfiler(dbhan: DatabaseHandle, profiler): Promise<void>;
callMethod(dbhan: DatabaseHandle<TClient>, method, args);
serverSummary(dbhan: DatabaseHandle<TClient>): Promise<ServerSummary>;
summaryCommand(dbhan: DatabaseHandle<TClient>, command, row): Promise<void>;
startProfiler(dbhan: DatabaseHandle<TClient>, options): Promise<any>;
stopProfiler(dbhan: DatabaseHandle<TClient>, profiler): Promise<void>;
getRedirectAuthUrl(connection, options): Promise<{ url: string; sid: string }>;
getAuthTokenFromCode(connection, options): Promise<string>;
getAccessTokenFromAuth(connection, req): Promise<string | null>;
@@ -247,11 +253,11 @@ export interface EngineDriver extends FilterBehaviourProvider {
createSaveChangeSetScript(
changeSet: any,
dbinfo: DatabaseInfo,
defaultCreator: (changeSet: any, dbinfo: DatabaseInfo) => any
defaultCreator: (changeSet: any, dbinfo: DatabaseInfo, dialect: SqlDialect) => any
): any[];
// adapts table info from different source (import, other database) to be suitable for this database
adaptTableInfo(table: TableInfo): TableInfo;
listSchemas(dbhan: DatabaseHandle): SchemaInfo[];
listSchemas(dbhan: DatabaseHandle<TClient>): SchemaInfo[];
analyserClass?: any;
dumperClass?: any;

View File

@@ -48,3 +48,4 @@ export * from './extensions';
export * from './alter-processor';
export * from './appdefs';
export * from './filter-type';
export * from './test-engines';

86
packages/types/test-engines.d.ts vendored Normal file
View File

@@ -0,0 +1,86 @@
import { ParameterInfo, SchedulerEventInfo, TriggerInfo } from './dbinfo';
export type TestObjectInfo = {
type: string;
create1: string;
create2: string;
drop1: string;
drop2: string;
};
export type TestEngineInfo = {
label: string;
connection: {
engine: string;
server?: string;
databaseUrl?: string;
serviceName?: string;
password?: string;
user?: string;
port?: number;
};
removeNotNull?: boolean;
skipOnCI?: boolean;
skipIncrementalAnalysis?: boolean;
skipDataModifications?: boolean;
skipReferences?: boolean;
skipIndexes?: boolean;
skipNullability?: boolean;
skipUnique?: boolean;
skipAutoIncrement?: boolean;
skipPkColumnTesting?: boolean;
skipDataDuplicator?: boolean;
skipDeploy?: boolean;
skipStringLength?: boolean;
skipChangeColumn?: boolean;
skipDefaultValue?: boolean;
skipNonPkRename?: boolean;
skipPkDrop?: boolean;
skipOrderBy?: boolean;
forceSortResults?: boolean;
forceSortStructureColumns?: boolean;
alterTableAddColumnSyntax?: boolean;
dbSnapshotBySeconds?: boolean;
setNullDefaultInsteadOfDrop?: boolean;
useTextTypeForStrings?: boolean;
supportRenameSqlObject?: boolean;
supportSchemas?: boolean;
defaultSchemaName?: string;
generateDbFile?: boolean;
dbSnapshotBySeconds?: boolean;
dumpFile?: string;
dumpChecks?: Array<{ sql: string; res: string }>;
parametersOtherSql?: string[];
parameters?: Array<{
testName: string;
create: string;
drop: string;
objectTypeField: string;
list: Array<Partial<ParameterInfo>>;
}>;
triggers?: Array<{
testName: string;
create: string;
drop: string;
triggerOtherCreateSql?: string;
triggerOtherDropSql?: string;
objectTypeField: string;
expected: Partial<TriggerInfo>;
}>;
schedulerEvents?: Array<{
create: string;
drop: string;
objectTypeField: string;
expected: Partial<SchedulerEventInfo>;
}>;
objects?: Array<TestObjectInfo>;
};

View File

@@ -21,11 +21,15 @@
import { renameDatabaseObjectDialog, alterDatabaseDialog } from '../utility/alterDatabaseTools';
import AppObjectCore from './AppObjectCore.svelte';
import { DEFAULT_OBJECT_SEARCH_SETTINGS } from '../stores';
import { filterName } from 'dbgate-tools';
import { DEFAULT_OBJECT_SEARCH_SETTINGS, extensions } from '../stores';
import { filterName, findEngineDriver } from 'dbgate-tools';
import { useConnectionInfo } from '../utility/metadataLoaders';
export let data;
$: connection = useConnectionInfo({ conid: data.conid });
$: driver = findEngineDriver($connection, $extensions);
function handleRenameColumn() {
renameDatabaseObjectDialog(data.conid, data.database, data.columnName, (db, newName) => {
const tbl = db.tables.find(x => x.schemaName == data.schemaName && x.pureName == data.pureName);
@@ -42,11 +46,20 @@
}
function createMenu() {
return [
{ text: 'Rename column', onClick: handleRenameColumn },
const isPrimaryKey = !!data.primaryKey?.columns?.some(i => i.columnName == data.columnName);
const menu = [];
if (!driver.dialect.disableNonPrimaryKeyRename || isPrimaryKey) {
menu.push({ text: 'Rename column', onClick: handleRenameColumn });
}
menu.push(
{ text: 'Drop column', onClick: handleDropColumn },
{ text: 'Copy name', onClick: () => navigator.clipboard.writeText(data.columnName) },
];
{ text: 'Copy name', onClick: () => navigator.clipboard.writeText(data.columnName) }
);
return menu;
}
function getExtInfo(data) {

View File

@@ -147,11 +147,12 @@
isDrop: true,
requiresWriteAccess: true,
},
hasPermission('dbops/table/rename') && {
label: 'Rename table',
isRename: true,
requiresWriteAccess: true,
},
hasPermission('dbops/table/rename') &&
!driver?.dialect.disableRenameTable && {
label: 'Rename table',
isRename: true,
requiresWriteAccess: true,
},
hasPermission('dbops/table/truncate') && {
label: 'Truncate table',
isTruncate: true,

View File

@@ -135,6 +135,16 @@
/>
{/if}
{#if driver?.showConnectionField('localDataCenter', $values, showConnectionFieldArgs)}
<FormTextField
label="Local DataCenter"
name="localDataCenter"
data-testid="ConnectionDriverFields_localDataCenter"
placeholder={driver?.defaultLocalDataCenter}
disabled={isConnected || disabledFields.includes('localDataCenter')}
/>
{/if}
{#if $authTypes && driver?.showConnectionField('authType', $values, showConnectionFieldArgs)}
{#key $authTypes}
<FormSelectField

View File

@@ -36,7 +36,9 @@
<FormCheckboxField name="notNull" label="NOT NULL" disabled={isReadOnly} />
{/if}
<FormCheckboxField name="isPrimaryKey" label="Is Primary Key" disabled={isReadOnly} />
<FormCheckboxField name="autoIncrement" label="Is Autoincrement" disabled={isReadOnly} />
{#if !driver?.dialect?.disableAutoIncrement}
<FormCheckboxField name="autoIncrement" label="Is Autoincrement" disabled={isReadOnly} />
{/if}
<FormTextField
name="defaultValue"
label="Default value. Please use valid SQL expression, eg. 'Hello World' for string value, '' for empty string"

View File

@@ -22,7 +22,7 @@ export default function newTable(connection, database) {
current: {
pureName: 'new_table',
schemaName: getAppliedCurrentSchema() ?? driver?.dialect?.defaultSchemaName,
columns: [
columns: driver.dialect?.defaultNewTableColumns ?? [
{
columnName: 'id',
dataType: 'int',

View File

@@ -110,6 +110,7 @@
'port',
'user',
'password',
'localDataCenter',
'defaultDatabase',
'singleDatabase',
'socketPath',

View File

@@ -160,7 +160,7 @@
const driver = findEngineDriver($connection, $extensions);
const script = driver.createSaveChangeSetScript($changeSetStore?.value, $dbinfo, () =>
changeSetToSql($changeSetStore?.value, $dbinfo)
changeSetToSql($changeSetStore?.value, $dbinfo, driver.dialect)
);
const deleteCascades = getDeleteCascades($changeSetStore?.value, $dbinfo);

View File

@@ -0,0 +1,6 @@
[![styled with prettier](https://img.shields.io/badge/styled_with-prettier-ff69b4.svg)](https://github.com/prettier/prettier)
[![NPM version](https://img.shields.io/npm/v/dbgate-plugin-cassandra.svg)](https://www.npmjs.com/package/dbgate-plugin-cassandra)
# dbgate-plugin-cassandra
Use DbGate for install of this plugin

View File

@@ -0,0 +1,35 @@
<?xml version="1.0" encoding="iso-8859-1"?>
<!-- Generator: Adobe Illustrator 19.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Capa_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 384 384" style="enable-background:new 0 0 384 384;" xml:space="preserve">
<polygon style="fill:#EFEEEE;" points="64,0 64,384 288,384 384,288 384,0 "/>
<polygon style="fill:#ABABAB;" points="288,288 288,384 384,288 "/>
<polygon style="fill:#DEDEDD;" points="192,384 288,384 288,288 "/>
<path style="fill:#448E47;" d="M0,96v112h256V96L0,96L0,96z"/>
<g>
<path style="fill:#FFFFFF;" d="M64.32,130.112c-1.184-2.288-3.344-3.424-6.48-3.424c-1.728,0-3.152,0.464-4.272,1.408
c-1.12,0.928-2,2.416-2.64,4.496s-1.088,4.8-1.344,8.176c-0.272,3.36-0.384,7.472-0.384,12.336c0,5.184,0.176,9.376,0.528,12.576
c0.336,3.2,0.896,5.664,1.632,7.44s1.664,2.96,2.784,3.552c1.12,0.608,2.416,0.928,3.888,0.928c1.216,0,2.352-0.208,3.408-0.624
s1.968-1.248,2.736-2.496c0.784-1.248,1.392-3.008,1.824-5.28c0.448-2.272,0.672-5.264,0.672-8.976H80.48
c0,3.696-0.288,7.232-0.864,10.56s-1.664,6.24-3.216,8.736c-1.584,2.48-3.776,4.432-6.624,5.84
c-2.848,1.408-6.544,2.128-11.088,2.128c-5.168,0-9.312-0.848-12.368-2.496c-3.072-1.664-5.424-4.064-7.056-7.2
s-2.688-6.88-3.168-11.232c-0.464-4.336-0.72-9.152-0.72-14.384c0-5.184,0.256-9.968,0.72-14.352
c0.48-4.368,1.552-8.144,3.168-11.28c1.648-3.12,3.984-5.584,7.056-7.344c3.056-1.744,7.2-2.64,12.368-2.64
c4.944,0,8.816,0.8,11.664,2.4c2.848,1.6,4.976,3.632,6.368,6.096s2.304,5.12,2.64,7.968c0.352,2.848,0.528,5.52,0.528,8.016H66.08
C66.08,136,65.488,132.368,64.32,130.112z"/>
<path style="fill:#FFFFFF;" d="M109.072,167.008c0,1.6,0.144,3.056,0.384,4.352c0.272,1.312,0.736,2.416,1.44,3.312
c0.704,0.912,1.664,1.616,2.848,2.128c1.168,0.496,2.672,0.768,4.448,0.768c2.128,0,4.016-0.688,5.712-2.064
c1.68-1.376,2.544-3.52,2.544-6.384c0-1.536-0.224-2.864-0.624-3.984c-0.416-1.12-1.104-2.128-2.064-3.008
c-0.976-0.912-2.24-1.712-3.792-2.448s-3.504-1.488-5.808-2.256c-3.056-1.024-5.712-2.16-7.968-3.376
c-2.24-1.2-4.112-2.624-5.616-4.272c-1.504-1.632-2.608-3.52-3.312-5.664c-0.704-2.16-1.056-4.624-1.056-7.456
c0-6.784,1.888-11.824,5.664-15.152c3.76-3.328,8.96-4.992,15.552-4.992c3.072,0,5.904,0.336,8.496,1.008s4.832,1.744,6.72,3.264
c1.888,1.504,3.36,3.424,4.416,5.744c1.04,2.336,1.584,5.136,1.584,8.4v1.92h-13.232c0-3.264-0.576-5.776-1.712-7.552
c-1.152-1.744-3.072-2.64-5.76-2.64c-1.536,0-2.816,0.24-3.84,0.672c-1.008,0.448-1.84,1.04-2.448,1.776s-1.04,1.616-1.264,2.576
c-0.24,0.96-0.336,1.952-0.336,2.976c0,2.128,0.448,3.888,1.344,5.328c0.896,1.456,2.816,2.784,5.76,3.984l10.656,4.608
c2.624,1.152,4.768,2.352,6.416,3.616c1.664,1.248,3.008,2.592,3.984,4.032c0.992,1.44,1.68,3.008,2.064,4.752
c0.384,1.712,0.576,3.648,0.576,5.744c0,7.232-2.096,12.496-6.288,15.792c-4.192,3.296-10.032,4.96-17.52,4.96
c-7.808,0-13.392-1.696-16.768-5.088c-3.36-3.392-5.024-8.256-5.024-14.592v-2.784h13.824L109.072,167.008L109.072,167.008z"/>
<path style="fill:#FFFFFF;" d="M177.344,168.544h0.304l10.176-50.688h14.32L186.4,186.4h-17.76l-15.728-68.544h14.784
L177.344,168.544z"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 3.1 KiB

View File

@@ -0,0 +1,38 @@
{
"name": "dbgate-plugin-cassandra",
"main": "dist/backend.js",
"version": "6.0.0-alpha.1",
"license": "GPL-3.0",
"author": "Jan Prochazka",
"description": "cassandra connector for DbGate",
"keywords": [
"dbgate",
"cassandra",
"dbgatebuiltin"
],
"files": [
"dist",
"icon.svg"
],
"scripts": {
"build:frontend": "webpack --config webpack-frontend.config",
"build:frontend:watch": "webpack --watch --config webpack-frontend.config",
"build:backend": "webpack --config webpack-backend.config.js",
"build": "yarn build:frontend && yarn build:backend",
"plugin": "yarn build && yarn pack && dbgate-plugin dbgate-plugin-cassandra",
"plugout": "dbgate-plugout dbgate-plugin-cassandra",
"copydist": "yarn build && yarn pack && dbgate-copydist ../dist/dbgate-plugin-cassandra",
"prepublishOnly": "yarn build"
},
"devDependencies": {
"dbgate-plugin-tools": "^1.0.8",
"webpack": "^5.91.0",
"webpack-cli": "^5.1.4"
},
"dependencies": {
"dbgate-tools": "^6.0.0-alpha.1",
"json-stable-stringify": "^1.0.1",
"lodash": "^4.17.21",
"cassandra-driver": "^4.7.2"
}
}

View File

@@ -0,0 +1,8 @@
module.exports = {
trailingComma: 'es5',
tabWidth: 2,
semi: true,
singleQuote: true,
arrowParen: 'avoid',
printWidth: 120,
};

View File

@@ -0,0 +1,60 @@
const { DatabaseAnalyser } = global.DBGATE_PACKAGES['dbgate-tools'];
const sql = require('./sql');
class Analyser extends DatabaseAnalyser {
constructor(connection, driver) {
super(connection, driver);
}
createQuery(resFileName, typeFields, replacements = {}) {
let res = sql[resFileName];
res = res.replace('#DATABASE#', this.dbhan.database);
return super.createQuery(res, typeFields, replacements);
}
async _runAnalysis() {
this.feedback({ analysingMessage: 'Loading tables' });
const tables = await this.analyserQuery('tables', ['tables']);
this.feedback({ analysingMessage: 'Loading columns' });
const columns = await this.analyserQuery('columns', ['tables']);
// this.feedback({ analysingMessage: 'Loading views' });
// const views = await this.analyserQuery('views', ['views']);
const res = {
tables: tables.rows.map((table) => {
const tableColumns = columns.rows.filter((col) => col.pureName == table.pureName);
const pkColumns = tableColumns.filter((i) => i.kind === 'partition_key' || i.kind === 'clustering');
return {
...table,
primaryKeyColumns: pkColumns,
columns: tableColumns,
primaryKey: pkColumns.length ? { columns: pkColumns } : null,
foreignKeys: [],
};
}),
views: [],
functions: [],
triggers: [],
};
this.feedback({ analysingMessage: null });
return res;
}
async singleObjectAnalysis(dbhan, typeField) {
const structure = await this._runAnalysis(dbhan, typeField);
const item = structure[typeField]?.find((i) => i.pureName === dbhan.pureName);
return item;
}
// async _computeSingleObjectId() {
// const { pureName } = this.singleObjectFilter;
// const resId = await this.driver.query(
// this.dbhan,
// `SELECT uuid as id FROM system.tables WHERE database = '${this.dbhan.database}' AND name='${pureName}'`
// );
// this.singleObjectId = resId.rows[0]?.id;
// }
}
module.exports = Analyser;

View File

@@ -0,0 +1,95 @@
const { createBulkInsertStreamBase } = global.DBGATE_PACKAGES['dbgate-tools'];
/**
*
* @param {import('dbgate-types').TableInfo} tableInfo
* @param {string} columnName
* @returns {{columnName: string, dataType: string} | null}
*/
function getColumnInfo(tableInfo, columnName) {
const column = tableInfo.columns.find((x) => x.columnName == columnName);
if (!column) return null;
return {
columnName,
dataType: column.dataType,
};
}
/**
*
* @param {string} tableName
* @returns {import('dbgate-types').TableInfo | null}
*/
/**
* @param {string} tableName
* @returns {{ shouldAddUuidPk: true, pkColumnName: string } | { shouldAddUuidPk: false }}
*/
function getShouldAddUuidPkInfo(tableInfo) {
const hasIdColumn = tableInfo.columns.some((x) => x.columnName == 'id');
if (hasIdColumn && !tableInfo.primaryKey) return { shouldAddUuidPk: false };
const pkColumnName = tableInfo.primaryKey?.columns[0]?.columnName;
if (!pkColumnName) return { shouldAddUuidPk: true, pkColumnName: 'id' };
const columnInfo = getColumnInfo(tableInfo, pkColumnName);
if (!columnInfo) return { shouldAddUuidPk: false };
const shouldAddUuidPk = tableInfo.columns.every((i) => i.columnName !== columnInfo.columnName);
if (!shouldAddUuidPk) return { shouldAddUuidPk };
return { shouldAddUuidPk, pkColumnName };
}
/**
*
* @param {import('dbgate-types').EngineDriver<import('cassandra-driver').Client>} driver
* @param {import('stream')} stream
* @param {import('dbgate-types').DatabaseHandle<import('cassandra-driver').Client>} dbhan
* @param {import('dbgate-types').NamedObjectInfo} name
* @param {import('dbgate-types').WriteTableOptions} option
*/
function createCassandraBulkInsertStream(driver, stream, dbhan, name, options) {
const writable = createBulkInsertStreamBase(driver, stream, dbhan, name, options);
writable.send = async () => {
const { shouldAddUuidPk, pkColumnName } = getShouldAddUuidPkInfo(writable.structure);
const rows = writable.buffer;
const fullNameQuoted = writable.fullNameQuoted;
writable.buffer = [];
for (const row of rows) {
const dmp = driver.createDumper();
dmp.putRaw(`INSERT INTO ${fullNameQuoted} (`);
if (shouldAddUuidPk) {
dmp.putRaw(driver.dialect.quoteIdentifier(pkColumnName));
dmp.putRaw(', ');
}
dmp.putCollection(',', writable.columnNames, (col) => dmp.putRaw(driver.dialect.quoteIdentifier(col)));
dmp.putRaw(')\n VALUES\n');
dmp.putRaw('(');
if (shouldAddUuidPk) {
dmp.putRaw('uuid()');
dmp.putRaw(', ');
}
dmp.putCollection(',', writable.columnNames, (col) => {
const existingColumn = getColumnInfo(writable.structure, col);
const dataType = existingColumn?.dataType;
if (dataType) {
dmp.putValue(row[col], dataType);
} else {
dmp.putValue(row[col]?.toString());
}
});
dmp.putRaw(')');
await driver.query(dbhan, dmp.s, { discardResult: true });
}
};
return writable;
}
module.exports = createCassandraBulkInsertStream;

View File

@@ -0,0 +1,195 @@
const _ = require('lodash');
const stream = require('stream');
const driverBase = require('../frontend/driver');
const Analyser = require('./Analyser');
const cassandra = require('cassandra-driver');
const createCassandraBulkInsertStream = require('./createBulkInsertStream.js');
const { makeUniqueColumnNames } = require('dbgate-tools');
function getTypeName(code) {
return Object.keys(cassandra.types.dataTypes).find((key) => cassandra.types.dataTypes[key] === code);
}
function extractLineFromError(err) {
const match = err.message.match(/line (\d+):(\d+)/);
if (!match) return {};
const line = parseInt(match[1], 10) - 1;
const col = parseInt(match[2], 10);
return { line, col };
}
function zipDataRow(row, header) {
const zippedRow = {};
for (let i = 0; i < header.length; i++) {
zippedRow[header[i].columnName] = row.get(i);
}
return zippedRow;
}
function extractCassandraColumns(row) {
if (!row) return [];
const columns = row.__columns.map((column) => ({ columnName: column.name }));
makeUniqueColumnNames(columns);
return columns;
}
/** @type {import('dbgate-types').EngineDriver<cassandra.Client>} */
const driver = {
...driverBase,
analyserClass: Analyser,
// creating connection
async connect({ server, user, password, database, localDataCenter, useDatabaseUrl, databaseUrl }) {
const client = new cassandra.Client({
// user,
// password,
contactPoints: server.split(','),
localDataCenter: localDataCenter ?? this.defaultLocalDataCenter,
keyspace: database,
});
client.connect();
return {
client,
database,
};
},
// called for retrieve data (eg. browse in data grid) and for update database
async query(dbhan, query, options) {
const offset = options?.range?.offset;
if (options?.discardResult) {
await dbhan.client.execute(query);
return {
rows: [],
columns: [],
};
}
const result = await dbhan.client.execute(query);
if (!result.rows?.[0]) {
return {
rows: [],
columns: [],
};
}
const columns = result.columns.map(({ name, type: { code } }) => ({
columnName: name,
dataType: getTypeName(code),
}));
return {
rows: offset ? result.rows.slice(offset) : result.rows,
columns,
};
},
// called in query console
async stream(dbhan, query, options) {
try {
if (!query.match(/^\s*SELECT/i)) {
await dbhan.client.execute(query);
options.done();
return;
}
const strm = dbhan.client.stream(query);
let header;
strm.on('readable', () => {
let row;
while ((row = strm.read())) {
if (!header) {
header = extractCassandraColumns(row);
options.recordset(header);
}
options.row(zipDataRow(row, header));
}
});
strm.on('end', () => {
options.done();
});
strm.on('error', (err) => {
const { line } = extractLineFromError(err);
options.info({
message: err.toString(),
time: new Date(),
severity: 'error',
line,
});
options.done();
});
} catch (err) {
const { line } = extractLineFromError(err);
options.info({
message: err.message,
time: new Date(),
severity: 'error',
line,
});
options.done();
}
},
// called when exporting table or view
async readQuery(dbhan, query, structure) {
const pass = new stream.PassThrough({
objectMode: true,
highWaterMark: 100,
});
const strm = dbhan.client.stream(query);
strm.on('readable', () => {
let row;
while ((row = strm.read())) {
pass.write(row);
}
});
strm.on('end', () => {
pass.end();
});
strm.on('error', (err) => {
const { line } = extractLineFromError(err);
pass.info({
message: err.toString(),
time: new Date(),
severity: 'error',
line,
});
pass.end();
});
return pass;
},
async writeTable(dbhan, name, options) {
return createCassandraBulkInsertStream(this, stream, dbhan, name, options);
},
// detect server version
async getVersion(dbhan) {
const result = await dbhan.client.execute('SELECT release_version from system.local');
return { version: result.rows[0].release_version };
},
// list databases on server
async listDatabases(dbhan) {
const result = await dbhan.client.execute('SELECT keyspace_name FROM system_schema.keyspaces');
return result.rows.map((row) => ({ name: row.keyspace_name }));
},
async close(dbhan) {
return dbhan.client.shutdown();
},
};
module.exports = driver;

View File

@@ -0,0 +1,6 @@
const driver = require('./driver');
module.exports = {
packageName: 'dbgate-plugin-cassandra',
drivers: [driver],
};

View File

@@ -0,0 +1,9 @@
module.exports = `
SELECT
table_name as "pureName",
column_name as "columnName",
type as "dataType",
kind as "kind"
FROM system_schema.columns
WHERE keyspace_name = '#DATABASE#'
`;

View File

@@ -0,0 +1,9 @@
const columns = require('./columns');
const tables = require('./tables');
const views = require('./views');
module.exports = {
columns,
tables,
views,
};

View File

@@ -0,0 +1,5 @@
module.exports = `
SELECT table_name as "pureName"
FROM system_schema.tables
WHERE keyspace_name='#DATABASE#';
`;

View File

@@ -0,0 +1,10 @@
module.exports = `
select
tables.name as "pureName",
tables.uuid as "objectId",
views.view_definition as "viewDefinition",
tables.metadata_modification_time as "contentHash"
from information_schema.views
inner join system.tables on views.table_name = tables.name and views.table_schema = tables.database
where views.table_schema='#DATABASE#' and tables.uuid =OBJECT_ID_CONDITION
`;

View File

@@ -0,0 +1,75 @@
/**
* @type {{ SqlDumper: import('dbgate-types').SqlDumper}}
*/
const { SqlDumper } = global.DBGATE_PACKAGES['dbgate-tools'];
const numericDataTypes = ['tinyint', 'smallint', 'int', 'bigint', 'varint', 'float', 'double', 'decimal'];
class Dumper extends SqlDumper {
/**
* @param {import('dbgate-types').ColumnInfo} column
* @param {string} newName
*
* @returns {void}
*/
renameColumn(column, newName) {
this.putCmd('^alter ^table %f ^rename %i ^to %i', column, column.columnName, newName);
}
/**
* @param {import('dbgate-types').ColumnInfo} column
*
* @returns {void}
*/
dropColumn(column) {
this.putCmd('^alter ^table %f ^drop %i', column, column.columnName);
}
/**
* @param {import('dbgate-types').ColumnInfo} column
*
* @returns {void}
*/
createDatabase(name, replicationCalss = 'SimpleStrategy', replicationFactor = 1) {
this.putCmd(
"^create ^keyspace %s ^with replication = {'class': '%s','replication_factor': %s}",
name,
replicationCalss,
replicationFactor
);
}
/**
* @param {import('dbgate-types').NamedObjectInfo} obj
*
* @returns {void}
*/
dropDatabase(name) {
this.putCmd('^drop ^keyspace %s', name);
}
/**
* @param {string} value
* @param {string} dataType
*
* @returns {void}
*/
putValue(value, dataType) {
if (
dataType?.toLowerCase() === 'uuid' &&
value.match(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/)
) {
this.putRaw(value);
return;
}
if (numericDataTypes.includes(dataType?.toLowerCase()) && !Number.isNaN(parseFloat(value))) {
this.putRaw(value);
return;
}
super.putValue(value);
}
}
module.exports = Dumper;

View File

@@ -0,0 +1,124 @@
const { driverBase } = global.DBGATE_PACKAGES['dbgate-tools'];
const Dumper = require('./Dumper');
const { mysqlSplitterOptions } = require('dbgate-query-splitter/lib/options');
const _cloneDeepWith = require('lodash/cloneDeepWith');
/** @type {import('dbgate-types').SqlDialect} */
const dialect = {
limitSelect: true,
rangeSelect: true,
stringEscapeChar: "'",
fallbackDataType: 'varchar',
offsetNotSupported: true,
allowMultipleValuesInsert: false,
createColumn: true,
dropColumn: true,
changeColumn: true,
changeAutoIncrement: true,
createIndex: true,
dropIndex: true,
anonymousPrimaryKey: true,
createColumnWithColumnKeyword: false,
specificNullabilityImplementation: true,
disableRenameTable: true,
generateDefaultValueForUuid: 'uuid()',
omitForeignKeys: true,
omitUniqueConstraints: true,
omitIndexes: true,
omitTableAliases: true,
omitTableBeforeColumn: true,
sortingKeys: true,
predefinedDataTypes: [
'custom',
'ascii',
'bigint',
'blob',
'boolean',
'counter',
'decimal',
'double',
'float',
'int',
'text',
'timestamp',
'uuid',
'varchar',
'varint',
'timeuuid',
'inet',
'date',
'time',
'smallint',
'tinyint',
'duration',
'list',
'map',
'set',
'udt',
'tuple',
],
disableAutoIncrement: true,
disableNonPrimaryKeyRename: true,
defaultNewTableColumns: [
{
columnName: 'id',
dataType: 'uuid',
notNull: true,
},
],
columnProperties: {
columnComment: true,
},
quoteIdentifier(s) {
return `"${s}"`;
},
};
/** @type {import('dbgate-types').EngineDriver} */
const driver = {
...driverBase,
supportsTransactions: false,
defaultPort: 9042,
defaultLocalDataCenter: 'datacenter1',
dumperClass: Dumper,
dialect,
engine: 'cassandra@dbgate-plugin-cassandra',
title: 'Cassandra',
showConnectionField: (field, values) =>
['server', 'singleDatabase', 'localDataCenter', 'isReadOnly', 'user', 'password'].includes(field),
getQuerySplitterOptions: (usage) =>
usage == 'editor'
? { ...mysqlSplitterOptions, ignoreComments: true, preventSingleLineSplit: true }
: mysqlSplitterOptions,
adaptTableInfo(table) {
if (!table.primaryKey && !table.sortingKey) {
const hasIdColumn = table.columns.some((x) => x.columnName == 'id');
return {
...table,
primaryKey: {
columns: [
{
columnName: 'id',
},
],
},
columns: [
...(!hasIdColumn
? [
{
columnName: 'id',
dataType: 'uuid',
},
]
: []),
...table.columns,
],
};
}
return table;
},
};
module.exports = driver;

View File

@@ -0,0 +1,6 @@
import driver from './driver';
export default {
packageName: 'dbgate-plugin-cassandra',
drivers: [driver],
};

View File

@@ -0,0 +1,29 @@
var webpack = require('webpack');
var path = require('path');
const packageJson = require('./package.json');
const buildPluginExternals = require('../../common/buildPluginExternals');
const externals = buildPluginExternals(packageJson);
var config = {
context: __dirname + '/src/backend',
entry: {
app: './index.js',
},
target: 'node',
output: {
path: path.resolve(__dirname, 'dist'),
filename: 'backend.js',
libraryTarget: 'commonjs2',
},
// uncomment for disable minimalization
// optimization: {
// minimize: false,
// },
externals,
};
module.exports = config;

View File

@@ -0,0 +1,24 @@
var webpack = require("webpack");
var path = require("path");
var config = {
context: __dirname + "/src/frontend",
entry: {
app: "./index.js",
},
target: "web",
output: {
path: path.resolve(__dirname, "dist"),
filename: "frontend.js",
libraryTarget: "var",
library: 'plugin',
},
// uncomment for disable minimalization
// optimization: {
// minimize: false,
// },
};
module.exports = config;

View File

@@ -106,3 +106,8 @@ jobs:
ORACLE_PASSWORD: Pwd2020Db
ports:
- 15006:1521
cassandradb:
image: cassandra:5.0.2
ports:
- 15942:9042

View File

@@ -2444,6 +2444,13 @@
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.1.tgz#0fabfcf2f2127ef73b119d98452bd317c4a17eb8"
integrity sha512-X+2qazGS3jxLAIz5JDXDzglAF3KpijdhFxlf/V1+hEsOUc+HnWi81L/uv/EvGuV90WY+7mPGFCUDGfQC3Gj95Q==
"@types/long@~5.0.0":
version "5.0.0"
resolved "https://registry.yarnpkg.com/@types/long/-/long-5.0.0.tgz#daaa7b7f74c919c946ff74889d5ca2afe363b2cd"
integrity sha512-eQs9RsucA/LNjnMoJvWG/nXa7Pot/RbBzilF/QRIU/xRl+0ApxrSUFsV5lmf01SvSlqMzJ7Zwxe440wmz2SJGA==
dependencies:
long "*"
"@types/markdown-it@^14.1.1":
version "14.1.2"
resolved "https://registry.yarnpkg.com/@types/markdown-it/-/markdown-it-14.1.2.tgz#57f2532a0800067d9b934f3521429a2e8bfb4c61"
@@ -2479,6 +2486,13 @@
dependencies:
undici-types "~5.26.4"
"@types/node@>=8":
version "22.10.5"
resolved "https://registry.yarnpkg.com/@types/node/-/node-22.10.5.tgz#95af89a3fb74a2bb41ef9927f206e6472026e48b"
integrity sha512-F8Q+SeGimwOo86fiovQh8qiXfFEh2/ocYv7tU5pJ3EXMSSxk1Joj5wefpFK2fHTf/N6HKGSxIDBT9f3gCxXPkQ==
dependencies:
undici-types "~6.20.0"
"@types/node@^13.7.0":
version "13.13.52"
resolved "https://registry.yarnpkg.com/@types/node/-/node-13.13.52.tgz#03c13be70b9031baaed79481c0c0cfb0045e53f7"
@@ -2846,6 +2860,11 @@ adler-32@~1.3.0:
resolved "https://registry.yarnpkg.com/adler-32/-/adler-32-1.3.1.tgz#1dbf0b36dda0012189a32b3679061932df1821e2"
integrity sha512-ynZ4w/nUUv5rrsR8UUGoe1VC9hZj6V5hU9Qw1HlMDJGEJw5S7TfTErWTjMys6M7vr0YWcPqs3qAr4ss0nDfP+A==
adm-zip@~0.5.10:
version "0.5.16"
resolved "https://registry.yarnpkg.com/adm-zip/-/adm-zip-0.5.16.tgz#0b5e4c779f07dedea5805cdccb1147071d94a909"
integrity sha512-TGw5yVi4saajsSEgz25grObGHEUaDrniwvA2qwSC060KfqGPdglhvPMA2lPIoxs3PQIItj2iag35fONcQqgUaQ==
agent-base@6, agent-base@^6.0.2:
version "6.0.2"
resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77"
@@ -3683,6 +3702,16 @@ caseless@~0.12.0:
resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc"
integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==
cassandra-driver@^4.7.2:
version "4.7.2"
resolved "https://registry.yarnpkg.com/cassandra-driver/-/cassandra-driver-4.7.2.tgz#87f120b6d73d64f0ff3e91cdd4e56bec416fca48"
integrity sha512-gwl1DeYvL8Wy3i1GDMzFtpUg5G473fU7EnHFZj7BUtdLB7loAfgZgB3zBhROc9fbaDSUDs6YwOPPojS5E1kbSA==
dependencies:
"@types/long" "~5.0.0"
"@types/node" ">=8"
adm-zip "~0.5.10"
long "~5.2.3"
catharsis@^0.9.0:
version "0.9.0"
resolved "https://registry.yarnpkg.com/catharsis/-/catharsis-0.9.0.tgz#40382a168be0e6da308c277d3a2b3eb40c7d2121"
@@ -8350,7 +8379,7 @@ lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.21, lodash@^4.7.
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
long@^5.2.1:
long@*, long@^5.2.1, long@~5.2.3:
version "5.2.3"
resolved "https://registry.yarnpkg.com/long/-/long-5.2.3.tgz#a3ba97f3877cf1d778eccbcb048525ebb77499e1"
integrity sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==
@@ -11823,6 +11852,11 @@ undici-types@~5.26.4:
resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617"
integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==
undici-types@~6.20.0:
version "6.20.0"
resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.20.0.tgz#8171bf22c1f588d1554d55bf204bc624af388433"
integrity sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==
union-value@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847"