Merge pull request #1091 from dbgate/feature/duckdb-2

Feature/duckdb 2
This commit is contained in:
Jan Prochazka
2025-04-28 15:44:26 +02:00
committed by GitHub
61 changed files with 2051 additions and 415 deletions

View File

@@ -4,6 +4,7 @@ module.exports = ({ editMenu, isMac }) => [
submenu: [
{ command: 'new.connection', hideDisabled: true },
{ command: 'new.sqliteDatabase', hideDisabled: true },
{ command: 'new.duckdbDatabase', hideDisabled: true },
{ divider: true },
{ command: 'new.query', hideDisabled: true },
{ command: 'new.queryDesign', hideDisabled: true },

View File

@@ -21,6 +21,7 @@ const volatilePackages = [
'axios',
'ssh2',
'wkx',
'@duckdb/node-api',
];
module.exports = volatilePackages;

View File

@@ -52,7 +52,7 @@ async function testDatabaseDiff(conn, driver, mangle, createObject = null) {
}
describe('Alter database', () => {
test.each(engines.filter(x => !x.skipReferences).map(engine => [engine.label, engine]))(
test.each(engines.filter(x => !x.skipReferences && !x.skipDropReferences).map(engine => [engine.label, engine]))(
'Drop referenced table - %s',
testWrapper(async (conn, driver, engine) => {
await testDatabaseDiff(conn, driver, db => {

View File

@@ -90,7 +90,7 @@ const TESTED_COLUMNS = ['col_pk', 'col_std', 'col_def', 'col_fk', 'col_ref', 'co
// const TESTED_COLUMNS = ['col_std'];
// const TESTED_COLUMNS = ['col_ref'];
function create_engines_columns_source(engines) {
function createEnginesColumnsSource(engines) {
return _.flatten(
engines.map(engine =>
TESTED_COLUMNS.filter(col => col.endsWith('_pk') || !engine.skipNonPkRename)
@@ -116,45 +116,30 @@ describe('Alter table', () => {
})
);
const columnsSource = create_engines_columns_source(engines);
const dropableColumnsSrouce = columnsSource.filter(
([_label, col, engine]) => !engine.skipPkDrop || !col.endsWith('_pk')
test.each(
createEnginesColumnsSource(engines.filter(x => !x.skipDropColumn)).filter(
([_label, col, engine]) => !engine.skipPkDrop || !col.endsWith('_pk')
)
)(
'Drop column - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(engine, conn, driver, tbl => (tbl.columns = tbl.columns.filter(x => x.columnName != column)));
})
);
const hasDropableColumns = dropableColumnsSrouce.length > 0;
if (hasDropableColumns) {
test.each(dropableColumnsSrouce)(
'Drop column - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(
engine,
conn,
driver,
tbl => (tbl.columns = tbl.columns.filter(x => x.columnName != column))
);
})
);
}
test.each(createEnginesColumnsSource(engines.filter(x => !x.skipNullable && !x.skipChangeNullability)))(
'Change nullability - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(
engine,
conn,
driver,
tbl => (tbl.columns = tbl.columns.map(x => (x.columnName == column ? { ...x, notNull: true } : x)))
);
})
);
const hasEnginesWithNullable = engines.filter(x => !x.skipNullable).length > 0;
if (hasEnginesWithNullable) {
const source = create_engines_columns_source(engines.filter(x => !x.skipNullable));
test.each(source)(
'Change nullability - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(
engine,
conn,
driver,
tbl => (tbl.columns = tbl.columns.map(x => (x.columnName == column ? { ...x, notNull: true } : x)))
);
})
);
}
test.each(columnsSource)(
test.each(createEnginesColumnsSource(engines.filter(x => !x.skipRenameColumn)))(
'Rename column - %s - %s',
testWrapper(async (conn, driver, column, engine) => {
await testTableDiff(
@@ -175,37 +160,32 @@ describe('Alter table', () => {
})
);
const enginesWithDefault = engines.filter(x => !x.skipDefaultValue);
const hasEnginesWithDefault = enginesWithDefault.length > 0;
test.each(engines.filter(x => !x.skipDefaultValue).map(engine => [engine.label, engine]))(
'Add default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_std').defaultValue = '123';
});
})
);
if (hasEnginesWithDefault) {
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
'Add default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_std').defaultValue = '123';
});
})
);
test.each(engines.filter(x => !x.skipDefaultValue).map(engine => [engine.label, engine]))(
'Unset default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = undefined;
});
})
);
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
'Unset default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = undefined;
});
})
);
test.each(enginesWithDefault.map(engine => [engine.label, engine]))(
'Change default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = '567';
});
})
);
}
test.each(engines.filter(x => !x.skipDefaultValue).map(engine => [engine.label, engine]))(
'Change default value - %s',
testWrapper(async (conn, driver, engine) => {
await testTableDiff(engine, conn, driver, tbl => {
tbl.columns.find(x => x.columnName == 'col_def').defaultValue = '567';
});
})
);
// test.each(engines.map(engine => [engine.label, engine]))(
// 'Change autoincrement - %s',

View File

@@ -51,7 +51,8 @@ describe('DB Import/export', () => {
await copyStream(reader, writer);
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res.rows[0].cnt.toString()).toEqual('6');
const cnt = parseInt(res.rows[0].cnt.toString());
expect(cnt).toEqual(6);
})
);
@@ -75,7 +76,8 @@ describe('DB Import/export', () => {
await copyStream(reader, writer);
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res.rows[0].cnt.toString()).toEqual('6');
const cnt = parseInt(res.rows[0].cnt.toString());
expect(cnt).toEqual(6);
})
);
@@ -103,10 +105,12 @@ describe('DB Import/export', () => {
await copyStream(reader2, writer2);
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t1`));
expect(res1.rows[0].cnt.toString()).toEqual('6');
const cnt = parseInt(res1.rows[0].cnt.toString());
expect(cnt).toEqual(6);
const res2 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~t2`));
expect(res2.rows[0].cnt.toString()).toEqual('6');
const cnt2 = parseInt(res2.rows[0].cnt.toString());
expect(cnt2).toEqual(6);
})
);
const enginesWithDumpFile = engines.filter(x => x.dumpFile);
@@ -192,7 +196,8 @@ describe('DB Import/export', () => {
});
const res1 = await runQueryOnDriver(conn, driver, dmp => dmp.put(`select count(*) as ~cnt from ~categories`));
expect(res1.rows[0].cnt.toString()).toEqual('4');
const cnt1 = parseInt(res1.rows[0].cnt.toString());
expect(cnt1).toEqual(4);
})
);
});

View File

@@ -20,7 +20,11 @@ function flatSourceParameters() {
}
function flatSourceTriggers() {
return _.flatten(engines.map(engine => (engine.triggers || []).map(trigger => [engine.label, trigger, engine])));
return _.flatten(
engines
.filter(engine => !engine.skipTriggers)
.map(engine => (engine.triggers || []).map(trigger => [engine.label, trigger, engine]))
);
}
function flatSourceSchedulerEvents() {

View File

@@ -183,8 +183,8 @@ describe('Query', () => {
{ discardResult: true }
);
const res = await runQueryOnDriver(conn, driver, dmp => dmp.put('SELECT COUNT(*) AS ~cnt FROM ~t1'));
// console.log(res);
expect(res.rows[0].cnt == 3).toBeTruthy();
const cnt = parseInt(res.rows[0].cnt);
expect(cnt).toEqual(3);
})
);

View File

@@ -654,6 +654,32 @@ const cassandraEngine = {
objects: [],
};
/** @type {import('dbgate-types').TestEngineInfo} */
const duckdbEngine = {
label: 'DuckDB',
generateDbFile: true,
defaultSchemaName: 'main',
connection: {
engine: 'duckdb@dbgate-plugin-duckdb',
},
objects: [views],
skipOnCI: false,
skipChangeColumn: true,
// skipIndexes: true,
skipStringLength: true,
skipTriggers: true,
skipDataReplicator: true,
skipAutoIncrement: true,
skipDropColumn: true,
skipRenameColumn: true,
skipChangeNullability: true,
skipDeploy: true,
supportRenameSqlObject: true,
skipIncrementalAnalysis: true,
skipDefaultValue: true,
skipDropReferences: true,
};
const enginesOnCi = [
// all engines, which would be run on GitHub actions
mysqlEngine,
@@ -667,13 +693,14 @@ const enginesOnCi = [
clickhouseEngine,
oracleEngine,
cassandraEngine,
duckdbEngine,
];
const enginesOnLocal = [
// all engines, which would be run on local test
// cassandraEngine,
// mysqlEngine,
mariaDbEngine,
// mariaDbEngine,
// postgreSqlEngine,
// sqlServerEngine,
// sqliteEngine,
@@ -682,6 +709,7 @@ const enginesOnLocal = [
// libsqlFileEngine,
// libsqlWsEngine,
// oracleEngine,
duckdbEngine,
];
/** @type {import('dbgate-types').TestEngineInfo[] & Record<string, import('dbgate-types').TestEngineInfo>} */
@@ -696,3 +724,6 @@ module.exports.cockroachDbEngine = cockroachDbEngine;
module.exports.clickhouseEngine = clickhouseEngine;
module.exports.oracleEngine = oracleEngine;
module.exports.cassandraEngine = cassandraEngine;
module.exports.libsqlFileEngine = libsqlFileEngine;
module.exports.libsqlWsEngine = libsqlWsEngine;
module.exports.duckdbEngine = duckdbEngine;

View File

@@ -12,7 +12,7 @@
"wait:local": "cross-env DEVMODE=1 LOCALTEST=1 node wait.js",
"wait:ci": "cross-env DEVMODE=1 CITEST=1 node wait.js",
"test:local": "cross-env DEVMODE=1 LOCALTEST=1 jest --testTimeout=5000",
"test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/data-replicator.spec.js",
"test:local:path": "cross-env DEVMODE=1 LOCALTEST=1 jest --runTestsByPath __tests__/alter-database.spec.js",
"test:ci": "cross-env DEVMODE=1 CITEST=1 jest --runInBand --json --outputFile=result.json --testLocationInResults --detectOpenHandles --forceExit --testTimeout=10000",
"run:local": "docker-compose down && docker-compose up -d && yarn wait:local && yarn test:local"
},

View File

@@ -38,6 +38,11 @@ function getNamedArgs() {
res.databaseFile = name;
res.engine = 'sqlite@dbgate-plugin-sqlite';
}
if (name.endsWith('.duckdb')) {
res.databaseFile = name;
res.engine = 'duckdb@dbgate-plugin-duckdb';
}
}
}
return res;
@@ -447,6 +452,22 @@ module.exports = {
return res;
},
newDuckdbDatabase_meta: true,
async newDuckdbDatabase({ file }) {
const duckdbDir = path.join(filesdir(), 'duckdb');
if (!(await fs.exists(duckdbDir))) {
await fs.mkdir(duckdbDir);
}
const databaseFile = path.join(duckdbDir, `${file}.duckdb`);
const res = await this.save({
engine: 'duckdb@dbgate-plugin-duckdb',
databaseFile,
singleDatabase: true,
defaultDatabase: `${file}.duckdb`,
});
return res;
},
dbloginWeb_meta: {
raw: true,
method: 'get',

View File

@@ -39,6 +39,8 @@ const axios = require('axios');
const { callTextToSqlApi, callCompleteOnCursorApi, callRefactorSqlQueryApi } = require('../utility/authProxy');
const { decryptConnection } = require('../utility/crypting');
const { getSshTunnel } = require('../utility/sshTunnel');
const sessions = require('./sessions');
const jsldata = require('./jsldata');
const logger = getLogger('databaseConnections');
@@ -96,6 +98,52 @@ module.exports = {
handle_ping() {},
// session event handlers
handle_info(conid, database, props) {
const { sesid, info } = props;
sessions.dispatchMessage(sesid, info);
},
handle_done(conid, database, props) {
const { sesid } = props;
socket.emit(`session-done-${sesid}`);
sessions.dispatchMessage(sesid, 'Query execution finished');
},
handle_recordset(conid, database, props) {
const { jslid, resultIndex } = props;
socket.emit(`session-recordset-${props.sesid}`, { jslid, resultIndex });
},
handle_stats(conid, database, stats) {
jsldata.notifyChangedStats(stats);
},
handle_initializeFile(conid, database, props) {
const { jslid } = props;
socket.emit(`session-initialize-file-${jslid}`);
},
// eval event handler
handle_runnerDone(conid, database, props) {
const { runid } = props;
socket.emit(`runner-done-${runid}`);
},
handle_progress(conid, database, progressData) {
const { progressName } = progressData;
const { name, runid } = progressName;
socket.emit(`runner-progress-${runid}`, { ...progressData, progressName: name });
},
handle_copyStreamError(conid, database, { copyStreamError }) {
const { progressName } = copyStreamError;
const { runid } = progressName;
logger.error(`Error in database connection ${conid}, database ${database}: ${copyStreamError}`);
socket.emit(`runner-done-${runid}`);
},
async ensureOpened(conid, database) {
const existing = this.opened.find(x => x.conid == conid && x.database == database);
if (existing) return existing;
@@ -136,7 +184,13 @@ module.exports = {
const { msgtype } = message;
if (handleProcessCommunication(message, subprocess)) return;
if (newOpened.disconnected) return;
this[`handle_${msgtype}`](conid, database, message);
const funcName = `handle_${msgtype}`;
if (!this[funcName]) {
logger.error(`Unknown message type ${msgtype} from subprocess databaseConnectionProcess`);
return;
}
this[funcName](conid, database, message);
});
subprocess.on('exit', () => {
if (newOpened.disconnected) return;
@@ -763,4 +817,25 @@ module.exports = {
commandLine: this.commandArgsToCommandLine(commandArgs),
};
},
executeSessionQuery_meta: true,
async executeSessionQuery({ sesid, conid, database, sql }, req) {
testConnectionPermission(conid, req);
logger.info({ sesid, sql }, 'Processing query');
sessions.dispatchMessage(sesid, 'Query execution started');
const opened = await this.ensureOpened(conid, database);
opened.subprocess.send({ msgtype: 'executeSessionQuery', sql, sesid });
return { state: 'ok' };
},
evalJsonScript_meta: true,
async evalJsonScript({ conid, database, script, runid }, req) {
testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid, database);
opened.subprocess.send({ msgtype: 'evalJsonScript', script, runid });
return { state: 'ok' };
},
};

View File

@@ -8,7 +8,7 @@ const { fork, spawn } = require('child_process');
const { rundir, uploadsdir, pluginsdir, getPluginBackendPath, packagedPluginList } = require('../utility/directories');
const {
extractShellApiPlugins,
extractShellApiFunctionName,
compileShellApiFunctionName,
jsonScriptToJavascript,
getLogger,
safeJsonParse,
@@ -58,7 +58,7 @@ dbgateApi.initializeApiEnvironment();
${requirePluginsTemplate(extractShellApiPlugins(functionName, props))}
require=null;
async function run() {
const reader=await ${extractShellApiFunctionName(functionName)}(${JSON.stringify(props)});
const reader=await ${compileShellApiFunctionName(functionName)}(${JSON.stringify(props)});
const writer=await dbgateApi.collectorWriter({runid: '${runid}'});
await dbgateApi.copyStream(reader, writer);
}
@@ -273,7 +273,7 @@ module.exports = {
const runid = crypto.randomUUID();
if (script.type == 'json') {
const js = jsonScriptToJavascript(script);
const js = await jsonScriptToJavascript(script);
return this.startCore(runid, scriptTemplate(js, false));
}
@@ -335,7 +335,7 @@ module.exports = {
return { errorMessage: 'Only JSON scripts are allowed' };
}
const promise = new Promise((resolve, reject) => {
const promise = new Promise(async (resolve, reject) => {
const runid = crypto.randomUUID();
this.requests[runid] = { resolve, reject, exitOnStreamError: true };
const cloned = _.cloneDeepWith(script, node => {
@@ -343,7 +343,7 @@ module.exports = {
return runid;
}
});
const js = jsonScriptToJavascript(cloned);
const js = await jsonScriptToJavascript(cloned);
this.startCore(runid, scriptTemplate(js, false));
});
return promise;

View File

@@ -54,6 +54,9 @@ module.exports = {
if (!connection) {
throw new Error(`Connection with conid="${conid}" not found`);
}
if (connection.singleDatabase) {
return null;
}
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
}
@@ -142,14 +145,14 @@ module.exports = {
if (conid == '__model') return [];
testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid);
return opened.databases;
return opened?.databases ?? [];
},
version_meta: true,
async version({ conid }, req) {
testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid);
return opened.version;
return opened?.version ?? null;
},
serverStatus_meta: true,
@@ -170,6 +173,9 @@ module.exports = {
}
this.lastPinged[conid] = new Date().getTime();
const opened = await this.ensureOpened(conid);
if (!opened) {
return Promise.resolve();
}
try {
opened.subprocess.send({ msgtype: 'ping' });
} catch (err) {
@@ -194,6 +200,9 @@ module.exports = {
async sendDatabaseOp({ conid, msgtype, name }, req) {
testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid);
if (!opened) {
return null;
}
if (opened.connection.isReadOnly) return false;
const res = await this.sendRequest(opened, { msgtype, name });
if (res.errorMessage) {
@@ -233,6 +242,9 @@ module.exports = {
async loadDataCore(msgtype, { conid, ...args }, req) {
testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid);
if (!opened) {
return null;
}
const res = await this.sendRequest(opened, { msgtype, ...args });
if (res.errorMessage) {
console.error(res.errorMessage);
@@ -254,6 +266,9 @@ module.exports = {
async summaryCommand({ conid, command, row }, req) {
testConnectionPermission(conid, req);
const opened = await this.ensureOpened(conid);
if (!opened) {
return null;
}
if (opened.connection.isReadOnly) return false;
return this.loadDataCore('summaryCommand', { conid, command, row });
},

View File

@@ -9,13 +9,21 @@ const {
dbNameLogCategory,
extractErrorMessage,
extractErrorLogData,
ScriptWriterEval,
SqlGenerator,
playJsonScriptWriter,
} = require('dbgate-tools');
const requireEngineDriver = require('../utility/requireEngineDriver');
const { connectUtility } = require('../utility/connectUtility');
const { handleProcessCommunication } = require('../utility/processComm');
const { SqlGenerator } = require('dbgate-tools');
const generateDeploySql = require('../shell/generateDeploySql');
const { dumpSqlSelect } = require('dbgate-sqltree');
const { allowExecuteCustomScript, handleQueryStream } = require('../utility/handleQueryStream');
const dbgateApi = require('../shell');
const requirePlugin = require('../shell/requirePlugin');
const path = require('path');
const { rundir } = require('../utility/directories');
const fs = require('fs-extra');
const logger = getLogger('dbconnProcess');
@@ -375,6 +383,52 @@ async function handleGenerateDeploySql({ msgid, modelFolder }) {
}
}
async function handleExecuteSessionQuery({ sesid, sql }) {
await waitConnected();
const driver = requireEngineDriver(storedConnection);
if (!allowExecuteCustomScript(storedConnection, driver)) {
process.send({
msgtype: 'info',
info: {
message: 'Connection without read-only sessions is read only',
severity: 'error',
},
sesid,
});
process.send({ msgtype: 'done', sesid, skipFinishedMessage: true });
return;
//process.send({ msgtype: 'error', error: e.message });
}
const resultIndexHolder = {
value: 0,
};
for (const sqlItem of splitQuery(sql, {
...driver.getQuerySplitterOptions('stream'),
returnRichInfo: true,
})) {
await handleQueryStream(dbhan, driver, resultIndexHolder, sqlItem, sesid);
}
process.send({ msgtype: 'done', sesid });
}
async function handleEvalJsonScript({ script, runid }) {
const directory = path.join(rundir(), runid);
fs.mkdirSync(directory);
const originalCwd = process.cwd();
try {
process.chdir(directory);
const evalWriter = new ScriptWriterEval(dbgateApi, requirePlugin, dbhan, runid);
await playJsonScriptWriter(script, evalWriter);
process.send({ msgtype: 'runnerDone', runid });
} finally {
process.chdir(originalCwd);
}
}
// async function handleRunCommand({ msgid, sql }) {
// await waitConnected();
// const driver = engines(storedConnection);
@@ -405,6 +459,8 @@ const messageHandlers = {
sqlSelect: handleSqlSelect,
exportKeys: handleExportKeys,
schemaList: handleSchemaList,
executeSessionQuery: handleExecuteSessionQuery,
evalJsonScript: handleEvalJsonScript,
// runCommand: handleRunCommand,
};

View File

@@ -11,6 +11,7 @@ const { decryptConnection } = require('../utility/crypting');
const { connectUtility } = require('../utility/connectUtility');
const { handleProcessCommunication } = require('../utility/processComm');
const { getLogger, extractIntSettingsValue, extractBoolSettingsValue } = require('dbgate-tools');
const { handleQueryStream, QueryStreamTableWriter, allowExecuteCustomScript } = require('../utility/handleQueryStream');
const logger = getLogger('sessionProcess');
@@ -23,175 +24,6 @@ let lastActivity = null;
let currentProfiler = null;
let executingScripts = 0;
class TableWriter {
constructor() {
this.currentRowCount = 0;
this.currentChangeIndex = 1;
this.initializedFile = false;
}
initializeFromQuery(structure, resultIndex) {
this.jslid = crypto.randomUUID();
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
fs.writeFileSync(
this.currentFile,
JSON.stringify({
...structure,
__isStreamHeader: true,
}) + '\n'
);
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
this.writeCurrentStats(false, false);
this.resultIndex = resultIndex;
this.initializedFile = true;
process.send({ msgtype: 'recordset', jslid: this.jslid, resultIndex });
}
initializeFromReader(jslid) {
this.jslid = jslid;
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
this.writeCurrentStats(false, false);
}
row(row) {
// console.log('ACCEPT ROW', row);
this.currentStream.write(JSON.stringify(row) + '\n');
this.currentRowCount += 1;
if (!this.plannedStats) {
this.plannedStats = true;
process.nextTick(() => {
if (this.currentStream) this.currentStream.uncork();
process.nextTick(() => this.writeCurrentStats(false, true));
this.plannedStats = false;
});
}
}
rowFromReader(row) {
if (!this.initializedFile) {
process.send({ msgtype: 'initializeFile', jslid: this.jslid });
this.initializedFile = true;
fs.writeFileSync(this.currentFile, JSON.stringify(row) + '\n');
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
this.writeCurrentStats(false, false);
this.initializedFile = true;
return;
}
this.row(row);
}
writeCurrentStats(isFinished = false, emitEvent = false) {
const stats = {
rowCount: this.currentRowCount,
changeIndex: this.currentChangeIndex,
isFinished,
jslid: this.jslid,
};
fs.writeFileSync(`${this.currentFile}.stats`, JSON.stringify(stats));
this.currentChangeIndex += 1;
if (emitEvent) {
process.send({ msgtype: 'stats', ...stats });
}
}
close(afterClose) {
if (this.currentStream) {
this.currentStream.end(() => {
this.writeCurrentStats(true, true);
if (afterClose) afterClose();
});
}
}
}
class StreamHandler {
constructor(resultIndexHolder, resolve, startLine) {
this.recordset = this.recordset.bind(this);
this.startLine = startLine;
this.row = this.row.bind(this);
// this.error = this.error.bind(this);
this.done = this.done.bind(this);
this.info = this.info.bind(this);
// use this for cancelling - not implemented
// this.stream = null;
this.plannedStats = false;
this.resultIndexHolder = resultIndexHolder;
this.resolve = resolve;
// currentHandlers = [...currentHandlers, this];
}
closeCurrentWriter() {
if (this.currentWriter) {
this.currentWriter.close();
this.currentWriter = null;
}
}
recordset(columns) {
this.closeCurrentWriter();
this.currentWriter = new TableWriter();
this.currentWriter.initializeFromQuery(
Array.isArray(columns) ? { columns } : columns,
this.resultIndexHolder.value
);
this.resultIndexHolder.value += 1;
// this.writeCurrentStats();
// this.onRow = _.throttle((jslid) => {
// if (jslid == this.jslid) {
// this.writeCurrentStats(false, true);
// }
// }, 500);
}
row(row) {
if (this.currentWriter) this.currentWriter.row(row);
else if (row.message) process.send({ msgtype: 'info', info: { message: row.message } });
// this.onRow(this.jslid);
}
// error(error) {
// process.send({ msgtype: 'error', error });
// }
done(result) {
this.closeCurrentWriter();
// currentHandlers = currentHandlers.filter((x) => x != this);
this.resolve();
}
info(info) {
if (info && info.line != null) {
info = {
...info,
line: this.startLine + info.line,
};
}
process.send({ msgtype: 'info', info });
}
}
function handleStream(driver, resultIndexHolder, sqlItem) {
return new Promise((resolve, reject) => {
const start = sqlItem.trimStart || sqlItem.start;
const handler = new StreamHandler(resultIndexHolder, resolve, start && start.line);
driver.stream(dbhan, sqlItem.text, handler);
});
}
function allowExecuteCustomScript(driver) {
if (driver.readOnlySessions) {
return true;
}
if (storedConnection.isReadOnly) {
return false;
// throw new Error('Connection is read only');
}
return true;
}
async function handleConnect(connection) {
storedConnection = connection;
@@ -222,12 +54,12 @@ async function handleStartProfiler({ jslid }) {
await waitConnected();
const driver = requireEngineDriver(storedConnection);
if (!allowExecuteCustomScript(driver)) {
if (!allowExecuteCustomScript(storedConnection, driver)) {
process.send({ msgtype: 'done' });
return;
}
const writer = new TableWriter();
const writer = new QueryStreamTableWriter();
writer.initializeFromReader(jslid);
currentProfiler = await driver.startProfiler(dbhan, {
@@ -251,7 +83,7 @@ async function handleExecuteControlCommand({ command }) {
await waitConnected();
const driver = requireEngineDriver(storedConnection);
if (command == 'commitTransaction' && !allowExecuteCustomScript(driver)) {
if (command == 'commitTransaction' && !allowExecuteCustomScript(storedConnection, driver)) {
process.send({
msgtype: 'info',
info: {
@@ -291,7 +123,7 @@ async function handleExecuteQuery({ sql, autoCommit }) {
await waitConnected();
const driver = requireEngineDriver(storedConnection);
if (!allowExecuteCustomScript(driver)) {
if (!allowExecuteCustomScript(storedConnection, driver)) {
process.send({
msgtype: 'info',
info: {
@@ -313,7 +145,7 @@ async function handleExecuteQuery({ sql, autoCommit }) {
...driver.getQuerySplitterOptions('stream'),
returnRichInfo: true,
})) {
await handleStream(driver, resultIndexHolder, sqlItem);
await handleQueryStream(dbhan, driver, resultIndexHolder, sqlItem);
// const handler = new StreamHandler(resultIndex);
// const stream = await driver.stream(systemConnection, sqlItem, handler);
// handler.stream = stream;
@@ -335,13 +167,13 @@ async function handleExecuteReader({ jslid, sql, fileName }) {
if (fileName) {
sql = fs.readFileSync(fileName, 'utf-8');
} else {
if (!allowExecuteCustomScript(driver)) {
if (!allowExecuteCustomScript(storedConnection, driver)) {
process.send({ msgtype: 'done' });
return;
}
}
const writer = new TableWriter();
const writer = new QueryStreamTableWriter();
writer.initializeFromReader(jslid);
const reader = await driver.readQuery(dbhan, sql);

View File

@@ -69,6 +69,7 @@ async function copyStream(input, output, options) {
msgtype: 'copyStreamError',
copyStreamError: {
message: extractErrorMessage(err),
progressName,
...err,
},
});

View File

@@ -0,0 +1,183 @@
const crypto = require('crypto');
const path = require('path');
const fs = require('fs');
const _ = require('lodash');
const { jsldir } = require('../utility/directories');
class QueryStreamTableWriter {
constructor(sesid = undefined) {
this.currentRowCount = 0;
this.currentChangeIndex = 1;
this.initializedFile = false;
this.sesid = sesid;
}
initializeFromQuery(structure, resultIndex) {
this.jslid = crypto.randomUUID();
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
fs.writeFileSync(
this.currentFile,
JSON.stringify({
...structure,
__isStreamHeader: true,
}) + '\n'
);
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
this.writeCurrentStats(false, false);
this.resultIndex = resultIndex;
this.initializedFile = true;
process.send({ msgtype: 'recordset', jslid: this.jslid, resultIndex, sesid: this.sesid });
}
initializeFromReader(jslid) {
this.jslid = jslid;
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
this.writeCurrentStats(false, false);
}
row(row) {
// console.log('ACCEPT ROW', row);
this.currentStream.write(JSON.stringify(row) + '\n');
this.currentRowCount += 1;
if (!this.plannedStats) {
this.plannedStats = true;
process.nextTick(() => {
if (this.currentStream) this.currentStream.uncork();
process.nextTick(() => this.writeCurrentStats(false, true));
this.plannedStats = false;
});
}
}
rowFromReader(row) {
if (!this.initializedFile) {
process.send({ msgtype: 'initializeFile', jslid: this.jslid, sesid: this.sesid });
this.initializedFile = true;
fs.writeFileSync(this.currentFile, JSON.stringify(row) + '\n');
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
this.writeCurrentStats(false, false);
this.initializedFile = true;
return;
}
this.row(row);
}
writeCurrentStats(isFinished = false, emitEvent = false) {
const stats = {
rowCount: this.currentRowCount,
changeIndex: this.currentChangeIndex,
isFinished,
jslid: this.jslid,
};
fs.writeFileSync(`${this.currentFile}.stats`, JSON.stringify(stats));
this.currentChangeIndex += 1;
if (emitEvent) {
process.send({ msgtype: 'stats', sesid: this.sesid, ...stats });
}
}
close(afterClose) {
if (this.currentStream) {
this.currentStream.end(() => {
this.writeCurrentStats(true, true);
if (afterClose) afterClose();
});
}
}
}
class StreamHandler {
constructor(resultIndexHolder, resolve, startLine, sesid = undefined) {
this.recordset = this.recordset.bind(this);
this.startLine = startLine;
this.sesid = sesid;
this.row = this.row.bind(this);
// this.error = this.error.bind(this);
this.done = this.done.bind(this);
this.info = this.info.bind(this);
// use this for cancelling - not implemented
// this.stream = null;
this.plannedStats = false;
this.resultIndexHolder = resultIndexHolder;
this.resolve = resolve;
// currentHandlers = [...currentHandlers, this];
}
closeCurrentWriter() {
if (this.currentWriter) {
this.currentWriter.close();
this.currentWriter = null;
}
}
recordset(columns) {
this.closeCurrentWriter();
this.currentWriter = new QueryStreamTableWriter(this.sesid);
this.currentWriter.initializeFromQuery(
Array.isArray(columns) ? { columns } : columns,
this.resultIndexHolder.value
);
this.resultIndexHolder.value += 1;
// this.writeCurrentStats();
// this.onRow = _.throttle((jslid) => {
// if (jslid == this.jslid) {
// this.writeCurrentStats(false, true);
// }
// }, 500);
}
row(row) {
if (this.currentWriter) this.currentWriter.row(row);
else if (row.message) process.send({ msgtype: 'info', info: { message: row.message }, sesid: this.sesid });
// this.onRow(this.jslid);
}
// error(error) {
// process.send({ msgtype: 'error', error });
// }
done(result) {
this.closeCurrentWriter();
// currentHandlers = currentHandlers.filter((x) => x != this);
this.resolve();
}
info(info) {
if (info && info.line != null) {
info = {
...info,
line: this.startLine + info.line,
};
}
process.send({ msgtype: 'info', info, sesid: this.sesid });
}
}
function handleQueryStream(dbhan, driver, resultIndexHolder, sqlItem, sesid = undefined) {
return new Promise((resolve, reject) => {
const start = sqlItem.trimStart || sqlItem.start;
const handler = new StreamHandler(resultIndexHolder, resolve, start && start.line, sesid);
driver.stream(dbhan, sqlItem.text, handler);
});
}
function allowExecuteCustomScript(storedConnection, driver) {
if (driver.readOnlySessions) {
return true;
}
if (storedConnection.isReadOnly) {
return false;
// throw new Error('Connection is read only');
}
return true;
}
module.exports = {
handleQueryStream,
QueryStreamTableWriter,
allowExecuteCustomScript,
};

View File

@@ -92,10 +92,10 @@ export class DatabaseAnalyser {
this.singleObjectFilter = { ...name, typeField };
await this._computeSingleObjectId();
const res = this.addEngineField(await this._runAnalysis());
// console.log('SINGLE OBJECT RES', res);
// console.log('SINGLE OBJECT RES', JSON.stringify(res, null, 2));
const obj =
res[typeField]?.length == 1
? res[typeField][0]
? res[typeField]?.find(x => x.pureName.toLowerCase() == name.pureName.toLowerCase())
: res[typeField]?.find(x => x.pureName == name.pureName && x.schemaName == name.schemaName);
// console.log('SINGLE OBJECT', obj);
return obj;

View File

@@ -1,7 +1,22 @@
import _uniq from 'lodash/uniq';
import { extractShellApiFunctionName, extractShellApiPlugins } from './packageTools';
import _cloneDeepWith from 'lodash/cloneDeepWith';
import { evalShellApiFunctionName, compileShellApiFunctionName, extractShellApiPlugins } from './packageTools';
export class ScriptWriter {
export interface ScriptWriterGeneric {
allocVariable(prefix?: string);
endLine();
assign(variableName: string, functionName: string, props: any);
assignValue(variableName: string, jsonValue: any);
requirePackage(packageName: string);
copyStream(sourceVar: string, targetVar: string, colmapVar?: string, progressName?: string);
importDatabase(options: any);
dataReplicator(options: any);
comment(s: string);
zipDirectory(inputDirectory: string, outputFile: string);
getScript(schedule?: any): any;
}
export class ScriptWriterJavaScript implements ScriptWriterGeneric {
s = '';
packageNames: string[] = [];
varCount = 0;
@@ -29,7 +44,7 @@ export class ScriptWriter {
}
assign(variableName, functionName, props) {
this.assignCore(variableName, extractShellApiFunctionName(functionName), props);
this.assignCore(variableName, compileShellApiFunctionName(functionName), props);
this.packageNames.push(...extractShellApiPlugins(functionName, props));
}
@@ -41,10 +56,10 @@ export class ScriptWriter {
this.packageNames.push(packageName);
}
copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string) {
copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string | { name: string; runid: string }) {
let opts = '{';
if (colmapVar) opts += `columns: ${colmapVar}, `;
if (progressName) opts += `progressName: "${progressName}", `;
if (progressName) opts += `progressName: ${JSON.stringify(progressName)}, `;
opts += '}';
this._put(`await dbgateApi.copyStream(${sourceVar}, ${targetVar}, ${opts});`);
@@ -78,7 +93,7 @@ export class ScriptWriter {
}
}
export class ScriptWriterJson {
export class ScriptWriterJson implements ScriptWriterGeneric {
s = '';
packageNames: string[] = [];
varCount = 0;
@@ -103,13 +118,17 @@ export class ScriptWriterJson {
this.commands.push({
type: 'assign',
variableName,
functionName: extractShellApiFunctionName(functionName),
functionName,
props,
});
this.packageNames.push(...extractShellApiPlugins(functionName, props));
}
requirePackage(packageName) {
this.packageNames.push(packageName);
}
assignValue(variableName, jsonValue) {
this.commands.push({
type: 'assignValue',
@@ -118,7 +137,7 @@ export class ScriptWriterJson {
});
}
copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string) {
copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string | { name: string; runid: string }) {
this.commands.push({
type: 'copyStream',
sourceVar,
@@ -167,9 +186,119 @@ export class ScriptWriterJson {
}
}
export function jsonScriptToJavascript(json) {
const { schedule, commands, packageNames } = json;
const script = new ScriptWriter();
export class ScriptWriterEval implements ScriptWriterGeneric {
s = '';
varCount = 0;
commands = [];
dbgateApi: any;
requirePlugin: (name: string) => any;
variables: { [name: string]: any } = {};
hostConnection: any;
runid: string;
constructor(dbgateApi, requirePlugin, hostConnection, runid, varCount = '0') {
this.varCount = parseInt(varCount) || 0;
this.dbgateApi = dbgateApi;
this.requirePlugin = requirePlugin;
this.hostConnection = hostConnection;
this.runid = runid;
}
allocVariable(prefix = 'var') {
this.varCount += 1;
return `${prefix}${this.varCount}`;
}
endLine() {}
requirePackage(packageName) {}
async assign(variableName, functionName, props) {
const func = evalShellApiFunctionName(functionName, this.dbgateApi, this.requirePlugin);
this.variables[variableName] = await func(
_cloneDeepWith(props, node => {
if (node?.$hostConnection) {
return this.hostConnection;
}
})
);
}
assignValue(variableName, jsonValue) {
this.variables[variableName] = jsonValue;
}
async copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string | { name: string; runid: string }) {
await this.dbgateApi.copyStream(this.variables[sourceVar], this.variables[targetVar], {
progressName: _cloneDeepWith(progressName, node => {
if (node?.$runid) {
if (node?.$runid) {
return this.runid;
}
}
}),
columns: colmapVar ? this.variables[colmapVar] : null,
});
}
comment(text) {}
async importDatabase(options) {
await this.dbgateApi.importDatabase(options);
}
async dataReplicator(options) {
await this.dbgateApi.dataReplicator(options);
}
async zipDirectory(inputDirectory, outputFile) {
await this.dbgateApi.zipDirectory(inputDirectory, outputFile);
}
getScript(schedule?: any) {
throw new Error('Not implemented');
}
}
async function playJsonCommand(cmd, script: ScriptWriterGeneric) {
switch (cmd.type) {
case 'assign':
await script.assign(cmd.variableName, cmd.functionName, cmd.props);
break;
case 'assignValue':
await script.assignValue(cmd.variableName, cmd.jsonValue);
break;
case 'copyStream':
await script.copyStream(cmd.sourceVar, cmd.targetVar, cmd.colmapVar, cmd.progressName);
break;
case 'endLine':
await script.endLine();
break;
case 'comment':
await script.comment(cmd.text);
break;
case 'importDatabase':
await script.importDatabase(cmd.options);
break;
case 'dataReplicator':
await script.dataReplicator(cmd.options);
break;
case 'zipDirectory':
await script.zipDirectory(cmd.inputDirectory, cmd.outputFile);
break;
}
}
export async function playJsonScriptWriter(json, script: ScriptWriterGeneric) {
for (const cmd of json.commands) {
await playJsonCommand(cmd, script);
}
}
export async function jsonScriptToJavascript(json) {
const { schedule, packageNames } = json;
const script = new ScriptWriterJavaScript();
for (const packageName of packageNames) {
if (!/^dbgate-plugin-.*$/.test(packageName)) {
throw new Error('Unallowed package name:' + packageName);
@@ -177,34 +306,7 @@ export function jsonScriptToJavascript(json) {
script.packageNames.push(packageName);
}
for (const cmd of commands) {
switch (cmd.type) {
case 'assign':
script.assignCore(cmd.variableName, cmd.functionName, cmd.props);
break;
case 'assignValue':
script.assignValue(cmd.variableName, cmd.jsonValue);
break;
case 'copyStream':
script.copyStream(cmd.sourceVar, cmd.targetVar, cmd.colmapVar, cmd.progressName);
break;
case 'endLine':
script.endLine();
break;
case 'comment':
script.comment(cmd.text);
break;
case 'importDatabase':
script.importDatabase(cmd.options);
break;
case 'dataReplicator':
script.dataReplicator(cmd.options);
break;
case 'zipDirectory':
script.zipDirectory(cmd.inputDirectory, cmd.outputFile);
break;
}
}
await playJsonScriptWriter(json, script);
return script.getScript(schedule);
}

View File

@@ -27,7 +27,7 @@ export function extractPackageName(name): string {
return null;
}
export function extractShellApiFunctionName(functionName) {
export function compileShellApiFunctionName(functionName) {
const nsMatch = functionName.match(/^([^@]+)@([^@]+)/);
if (nsMatch) {
return `${_camelCase(nsMatch[2])}.shellApi.${nsMatch[1]}`;
@@ -35,6 +35,14 @@ export function extractShellApiFunctionName(functionName) {
return `dbgateApi.${functionName}`;
}
export function evalShellApiFunctionName(functionName, dbgateApi, requirePlugin) {
const nsMatch = functionName.match(/^([^@]+)@([^@]+)/);
if (nsMatch) {
return requirePlugin(nsMatch[2]).shellApi[nsMatch[1]];
}
return dbgateApi[functionName];
}
export function findEngineDriver(connection, extensions: ExtensionsDirectory): EngineDriver {
if (!extensions) {
return null;

View File

@@ -33,6 +33,7 @@ export interface QueryOptions {
discardResult?: boolean;
importSqlDump?: boolean;
range?: { offset: number; limit: number };
readonly?: boolean;
}
export interface WriteTableOptions {
@@ -286,7 +287,7 @@ export interface EngineDriver<TClient = any> extends FilterBehaviourProvider {
adaptTableInfo(table: TableInfo): TableInfo;
// simple data type adapter
adaptDataType(dataType: string): string;
listSchemas(dbhan: DatabaseHandle<TClient>): SchemaInfo[];
listSchemas(dbhan: DatabaseHandle<TClient>): Promise<SchemaInfo[] | null>;
backupDatabaseCommand(
connection: any,
settings: BackupDatabaseSettings,
@@ -309,6 +310,7 @@ export interface EngineDriver<TClient = any> extends FilterBehaviourProvider {
analyserClass?: any;
dumperClass?: any;
singleConnectionOnly?: boolean;
}
export interface DatabaseModification {

View File

@@ -7,6 +7,7 @@ export interface QueryResultColumn {
columnName: string;
notNull: boolean;
autoIncrement?: boolean;
dataType?: string;
}
export interface QueryResult {

View File

@@ -40,6 +40,11 @@ export type TestEngineInfo = {
skipPkDrop?: boolean;
skipOrderBy?: boolean;
skipImportModel?: boolean;
skipTriggers?: boolean;
skipDropColumn?: boolean;
skipChangeNullability?: boolean;
skipRenameColumn?: boolean;
skipDropReferences?: boolean;
forceSortResults?: boolean;
forceSortStructureColumns?: boolean;

View File

@@ -893,9 +893,10 @@
{
functionName: menu.functionName,
props: {
connection: extractShellConnection(coninfo, data.database),
...extractShellConnectionHostable(coninfo, data.database),
..._.pick(data, ['pureName', 'schemaName']),
},
hostConnection: extractShellHostConnection(coninfo, data.database),
},
fmt
);
@@ -1031,7 +1032,7 @@
import { alterDatabaseDialog, renameDatabaseObjectDialog } from '../utility/alterDatabaseTools';
import ConfirmModal from '../modals/ConfirmModal.svelte';
import InputTextModal from '../modals/InputTextModal.svelte';
import { extractShellConnection } from '../impexp/createImpExpScript';
import { extractShellConnectionHostable, extractShellHostConnection } from '../impexp/createImpExpScript';
import { format as dateFormat } from 'date-fns';
import { getDefaultFileFormat } from '../plugins/fileformats';
import hasPermission from '../utility/hasPermission';

View File

@@ -46,6 +46,7 @@ import { openImportExportTab } from '../utility/importExportTools';
import newTable from '../tableeditor/newTable';
import { isProApp } from '../utility/proTools';
import { openWebLink } from '../utility/simpleTools';
import { _t } from '../translations';
import ExportImportConnectionsModal from '../modals/ExportImportConnectionsModal.svelte';
// function themeCommand(theme: ThemeDefinition) {
@@ -390,12 +391,12 @@ registerCommand({
category: 'New',
icon: 'img sqlite-database',
name: 'SQLite database',
menuName: 'New SQLite database',
menuName: _t('command.new.sqliteDatabase', { defaultMessage: 'New SQLite database' }),
onClick: () => {
showModal(InputTextModal, {
value: 'newdb',
label: 'New database name',
header: 'Create SQLite database',
label: _t('app.databaseName', { defaultMessage: 'Database name' }),
header: _t('command.new.sqliteDatabase', { defaultMessage: 'New SQLite database' }),
onConfirm: async file => {
const resp = await apiCall('connections/new-sqlite-database', { file });
const connection = resp;
@@ -405,6 +406,26 @@ registerCommand({
},
});
registerCommand({
id: 'new.duckdbDatabase',
category: 'New',
icon: 'img sqlite-database',
name: 'DuckDB database',
menuName: _t('command.new.duckdbDatabase', { defaultMessage: 'New DuckDB database' }),
onClick: () => {
showModal(InputTextModal, {
value: 'newdb',
label: _t('app.databaseName', { defaultMessage: 'Database name' }),
header: _t('command.new.duckdbDatabase', { defaultMessage: 'New DuckDB database' }),
onConfirm: async file => {
const resp = await apiCall('connections/new-duckdb-database', { file });
const connection = resp;
switchCurrentDatabase({ connection, name: `${file}.duckdb` });
},
});
},
});
registerCommand({
id: 'tabs.changelog',
category: 'Tabs',

View File

@@ -121,7 +121,7 @@
import _ from 'lodash';
import { registerQuickExportHandler } from '../buttons/ToolStripExportButton.svelte';
import registerCommand from '../commands/registerCommand';
import { extractShellConnection } from '../impexp/createImpExpScript';
import { extractShellConnection, extractShellConnectionHostable, extractShellHostConnection } from '../impexp/createImpExpScript';
import { apiCall } from '../utility/api';
import { registerMenu } from '../utility/contextMenu';
@@ -235,10 +235,11 @@
{
functionName: 'queryReader',
props: {
connection: extractShellConnection(coninfo, database),
...extractShellConnectionHostable(coninfo, database),
queryType: coninfo.isReadOnly ? 'json' : 'native',
query: coninfo.isReadOnly ? getExportQueryJson() : getExportQuery(),
},
hostConnection: extractShellHostConnection(coninfo, database),
},
fmt,
display.getExportColumnMap()

View File

@@ -68,7 +68,11 @@
import { registerQuickExportHandler } from '../buttons/ToolStripExportButton.svelte';
import registerCommand from '../commands/registerCommand';
import { extractShellConnection } from '../impexp/createImpExpScript';
import {
extractShellConnection,
extractShellConnectionHostable,
extractShellHostConnection,
} from '../impexp/createImpExpScript';
import { apiCall } from '../utility/api';
import { registerMenu } from '../utility/contextMenu';
@@ -215,10 +219,11 @@
{
functionName: 'queryReader',
props: {
connection: extractShellConnection(coninfo, database),
...extractShellConnectionHostable(coninfo, database),
queryType: coninfo.isReadOnly ? 'json' : 'native',
query: coninfo.isReadOnly ? display.getExportQueryJson() : display.getExportQuery(),
},
hostConnection: extractShellHostConnection(coninfo, database),
},
fmt,
display.getExportColumnMap()

View File

@@ -1,11 +1,11 @@
import _ from 'lodash';
import moment from 'moment';
import { ScriptWriter, ScriptWriterJson } from 'dbgate-tools';
import { ScriptWriterGeneric, ScriptWriterJavaScript, ScriptWriterJson } from 'dbgate-tools';
import getAsArray from '../utility/getAsArray';
import { getConnectionInfo } from '../utility/metadataLoaders';
import { findEngineDriver, findObjectLike } from 'dbgate-tools';
import { findFileFormat } from '../plugins/fileformats';
import { getCurrentConfig } from '../stores';
import { getCurrentConfig, getExtensions } from '../stores';
export function getTargetName(extensions, source, values) {
const key = `targetName_${source}`;
@@ -53,6 +53,32 @@ export function extractShellConnection(connection, database) {
};
}
export function extractShellConnectionHostable(connection, database) {
const driver = findEngineDriver(connection, getExtensions());
if (driver?.singleConnectionOnly) {
return {
systemConnection: { $hostConnection: true },
connection: driver.engine,
};
}
return {
connection: extractShellConnection(connection, database),
};
}
export function extractShellHostConnection(connection, database) {
const driver = findEngineDriver(connection, getExtensions());
if (driver?.singleConnectionOnly) {
return {
conid: connection._id,
database,
};
}
return undefined;
}
async function getConnection(extensions, storageType, conid, database) {
if (storageType == 'database' || storageType == 'query') {
const conn = await getConnectionInfo({ conid });
@@ -63,14 +89,23 @@ async function getConnection(extensions, storageType, conid, database) {
return [null, null];
}
function getSourceExpr(extensions, sourceName, values, sourceConnection, sourceDriver) {
function getSourceExpr(extensions, sourceName, values, sourceConnection, sourceDriver, hostConnection) {
const { sourceStorageType } = values;
const connectionParams =
sourceDriver?.singleConnectionOnly && hostConnection
? {
systemConnection: { $hostConnection: true },
connection: sourceDriver?.engine,
}
: {
connection: sourceConnection,
};
if (sourceStorageType == 'database') {
const fullName = { schemaName: values.sourceSchemaName, pureName: sourceName };
return [
'tableReader',
{
connection: sourceConnection,
...connectionParams,
...extractDriverApiParameters(values, 'source', sourceDriver),
...fullName,
},
@@ -80,7 +115,7 @@ function getSourceExpr(extensions, sourceName, values, sourceConnection, sourceD
return [
'queryReader',
{
connection: sourceConnection,
...connectionParams,
...extractDriverApiParameters(values, 'source', sourceDriver),
queryType: values.sourceQueryType,
query: values.sourceQueryType == 'json' ? JSON.parse(values.sourceQuery) : values.sourceQuery,
@@ -145,8 +180,17 @@ function getFlagsFroAction(action) {
};
}
function getTargetExpr(extensions, sourceName, values, targetConnection, targetDriver) {
function getTargetExpr(extensions, sourceName, values, targetConnection, targetDriver, hostConnection) {
const { targetStorageType } = values;
const connectionParams =
targetDriver?.singleConnectionOnly && hostConnection
? {
systemConnection: { $hostConnection: true },
connection: targetDriver?.engine,
}
: {
connection: targetConnection,
};
const format = findFileFormat(extensions, targetStorageType);
if (format && format.writerFunc) {
const outputParams = format.getOutputParams && format.getOutputParams(sourceName, values);
@@ -166,7 +210,7 @@ function getTargetExpr(extensions, sourceName, values, targetConnection, targetD
return [
'tableWriter',
{
connection: targetConnection,
...connectionParams,
schemaName: values.targetSchemaName,
pureName: getTargetName(extensions, sourceName, values),
...extractDriverApiParameters(values, 'target', targetDriver),
@@ -203,12 +247,12 @@ export function normalizeExportColumnMap(colmap) {
return null;
}
export default async function createImpExpScript(extensions, values, forceScript = false) {
export default async function createImpExpScript(extensions, values, format = undefined, detectHostConnection = false) {
const config = getCurrentConfig();
const script =
config.allowShellScripting || forceScript
? new ScriptWriter(values.startVariableIndex || 0)
: new ScriptWriterJson(values.startVariableIndex || 0);
let script: ScriptWriterGeneric = new ScriptWriterJson(values.startVariableIndex || 0);
if (format == 'script' && config.allowShellScripting) {
script = new ScriptWriterJavaScript(values.startVariableIndex || 0);
}
const [sourceConnection, sourceDriver] = await getConnection(
extensions,
@@ -223,15 +267,39 @@ export default async function createImpExpScript(extensions, values, forceScript
values.targetDatabaseName
);
let hostConnection = null;
if (detectHostConnection) {
// @ts-ignore
if (sourceDriver?.singleConnectionOnly) {
hostConnection = { conid: values.sourceConnectionId, database: values.sourceDatabaseName };
}
// @ts-ignore
if (targetDriver?.singleConnectionOnly) {
if (
hostConnection &&
(hostConnection.conid != values.targetConnectionId || hostConnection.database != values.targetDatabaseName)
) {
throw new Error('Cannot use two different single-connections in the same script');
}
hostConnection = { conid: values.targetConnectionId, database: values.targetDatabaseName };
}
}
const sourceList = getAsArray(values.sourceList);
for (const sourceName of sourceList) {
const sourceVar = script.allocVariable();
// @ts-ignore
script.assign(sourceVar, ...getSourceExpr(extensions, sourceName, values, sourceConnection, sourceDriver));
script.assign(
sourceVar,
// @ts-ignore
...getSourceExpr(extensions, sourceName, values, sourceConnection, sourceDriver, hostConnection)
);
const targetVar = script.allocVariable();
// @ts-ignore
script.assign(targetVar, ...getTargetExpr(extensions, sourceName, values, targetConnection, targetDriver));
script.assign(
targetVar,
// @ts-ignore
...getTargetExpr(extensions, sourceName, values, targetConnection, targetDriver, hostConnection)
);
const colmap = normalizeExportColumnMap(values[`columns_${sourceName}`]);
@@ -241,7 +309,12 @@ export default async function createImpExpScript(extensions, values, forceScript
script.assignValue(colmapVar, colmap);
}
script.copyStream(sourceVar, targetVar, colmapVar, sourceName);
script.copyStream(
sourceVar,
targetVar,
colmapVar,
hostConnection ? { name: sourceName, runid: { $runid: true } } : sourceName
);
script.endLine();
}
@@ -251,7 +324,11 @@ export default async function createImpExpScript(extensions, values, forceScript
script.zipDirectory('.', values.createZipFileInArchive ? 'archive:' + zipFileName : zipFileName);
}
return script.getScript(values.schedule);
const res = script.getScript(values.schedule);
if (format == 'json') {
res.hostConnection = hostConnection;
}
return res;
}
export function getActionOptions(extensions, source, values, targetDbinfo) {
@@ -289,7 +366,7 @@ export async function createPreviewReader(extensions, values, sourceName) {
values.sourceConnectionId,
values.sourceDatabaseName
);
const [functionName, props] = getSourceExpr(extensions, sourceName, values, sourceConnection, sourceDriver);
const [functionName, props] = getSourceExpr(extensions, sourceName, values, sourceConnection, sourceDriver, null);
return {
functionName,
props: {

View File

@@ -50,6 +50,8 @@
import { registerFileCommands } from '../commands/stdCommands';
import ToolStripCommandButton from '../buttons/ToolStripCommandButton.svelte';
import ToolStripSaveButton from '../buttons/ToolStripSaveButton.svelte';
import uuidv1 from 'uuid/v1';
import { tick } from 'svelte';
let busy = false;
let executeNumber = 0;
@@ -167,7 +169,7 @@
const handleGenerateScript = async e => {
const values = $formValues as any;
const code = await createImpExpScript($extensions, values, true);
const code = await createImpExpScript($extensions, values, 'script', false);
openNewTab(
{
title: 'Shell #',
@@ -183,12 +185,24 @@
progressHolder = {};
const values = $formValues as any;
busy = true;
const script = await createImpExpScript($extensions, values);
const script = await createImpExpScript($extensions, values, 'json', true);
executeNumber += 1;
let runid = runnerId;
const resp = await apiCall('runners/start', { script });
runid = resp.runid;
runnerId = runid;
if (script.hostConnection) {
runnerId = uuidv1();
await tick();
await apiCall('database-connections/eval-json-script', {
runid: runnerId,
conid: script.hostConnection.conid,
database: script.hostConnection.database,
script,
});
} else {
let runid = runnerId;
const resp = await apiCall('runners/start', { script });
runid = resp.runid;
runnerId = runid;
}
if (values.targetStorageType == 'archive') {
refreshArchiveFolderRef.set(values.targetArchiveFolder);

View File

@@ -107,7 +107,7 @@
</script>
<script lang="ts">
import { getContext, onDestroy, onMount } from 'svelte';
import { getContext, onDestroy, onMount, tick } from 'svelte';
import sqlFormatter from 'sql-formatter';
import VerticalSplitter from '../elements/VerticalSplitter.svelte';
@@ -143,6 +143,7 @@
import { isProApp } from '../utility/proTools';
import HorizontalSplitter from '../elements/HorizontalSplitter.svelte';
import QueryAiAssistant from '../query/QueryAiAssistant.svelte';
import uuidv1 from 'uuid/v1';
export let tabid;
export let conid;
@@ -198,7 +199,7 @@
onMount(() => {
intervalId = setInterval(() => {
if (sessionId) {
if (!driver?.singleConnectionOnly && sessionId) {
apiCall('sessions/ping', {
sesid: sessionId,
});
@@ -330,23 +331,34 @@
busy = true;
timerLabel.start();
let sesid = sessionId;
if (!sesid) {
const resp = await apiCall('sessions/create', {
if (driver?.singleConnectionOnly) {
sessionId = uuidv1();
await tick();
await apiCall('database-connections/execute-session-query', {
sesid: sessionId,
conid,
database,
sql,
});
} else {
let sesid = sessionId;
if (!sesid) {
const resp = await apiCall('sessions/create', {
conid,
database,
});
sesid = resp.sesid;
sessionId = sesid;
}
if (driver?.implicitTransactions) {
isInTransaction = true;
}
await apiCall('sessions/execute-query', {
sesid,
sql,
autoCommit: driver?.implicitTransactions && isAutocommit,
});
sesid = resp.sesid;
sessionId = sesid;
}
if (driver?.implicitTransactions) {
isInTransaction = true;
}
await apiCall('sessions/execute-query', {
sesid,
sql,
autoCommit: driver?.implicitTransactions && isAutocommit,
});
await apiCall('query-history/write', {
data: {
sql,

View File

@@ -1,4 +1,4 @@
import { ScriptWriter, ScriptWriterJson } from 'dbgate-tools';
import { ScriptWriterJson } from 'dbgate-tools';
import getElectron from './getElectron';
import {
showSnackbar,
@@ -10,58 +10,75 @@ import {
import resolveApi, { resolveApiHeaders } from './resolveApi';
import { apiCall, apiOff, apiOn } from './api';
import { normalizeExportColumnMap } from '../impexp/createImpExpScript';
import { getCurrentConfig } from '../stores';
import { showModal } from '../modals/modalTools';
import RunScriptModal from '../modals/RunScriptModal.svelte';
import { QuickExportDefinition } from 'dbgate-types';
import uuidv1 from 'uuid/v1';
export async function importSqlDump(inputFile, connection) {
const script = getCurrentConfig().allowShellScripting ? new ScriptWriter() : new ScriptWriterJson();
// export async function importSqlDump(inputFile, connection) {
// const script = getCurrentConfig().allowShellScripting ? new ScriptWriterJavaScript() : new ScriptWriterJson();
script.importDatabase({
inputFile,
connection,
});
// script.importDatabase({
// inputFile,
// connection,
// });
showModal(RunScriptModal, { script: script.getScript(), header: 'Importing database' });
// showModal(RunScriptModal, { script: script.getScript(), header: 'Importing database' });
// await runImportExportScript({
// script: script.getScript(),
// runningMessage: 'Importing database',
// canceledMessage: 'Database import canceled',
// finishedMessage: 'Database import finished',
// });
}
// // await runImportExportScript({
// // script: script.getScript(),
// // runningMessage: 'Importing database',
// // canceledMessage: 'Database import canceled',
// // finishedMessage: 'Database import finished',
// // });
// }
export async function exportSqlDump(outputFile, connection, databaseName, pureFileName) {
const script = getCurrentConfig().allowShellScripting ? new ScriptWriter() : new ScriptWriterJson();
// export async function exportSqlDump(outputFile, connection, databaseName, pureFileName) {
// const script = getCurrentConfig().allowShellScripting ? new ScriptWriterJavaScript() : new ScriptWriterJson();
script.dumpDatabase({
connection,
databaseName,
outputFile,
});
// script.dumpDatabase({
// connection,
// databaseName,
// outputFile,
// });
showModal(RunScriptModal, {
script: script.getScript(),
header: 'Exporting database',
onOpenResult:
pureFileName && !getElectron()
? () => {
downloadFromApi(`uploads/get?file=${pureFileName}`, 'file.sql');
}
: null,
openResultLabel: 'Download SQL file',
});
}
// showModal(RunScriptModal, {
// script: script.getScript(),
// header: 'Exporting database',
// onOpenResult:
// pureFileName && !getElectron()
// ? () => {
// downloadFromApi(`uploads/get?file=${pureFileName}`, 'file.sql');
// }
// : null,
// openResultLabel: 'Download SQL file',
// });
// }
async function runImportExportScript({ script, runningMessage, canceledMessage, finishedMessage, afterFinish = null }) {
async function runImportExportScript({
script,
runningMessage,
canceledMessage,
finishedMessage,
afterFinish = null,
hostConnection = null,
}) {
const electron = getElectron();
const resp = await apiCall('runners/start', { script });
const runid = resp.runid;
let runid;
let isCanceled = false;
if (hostConnection) {
runid = uuidv1();
await apiCall('database-connections/eval-json-script', {
runid,
conid: hostConnection.conid,
database: hostConnection.database,
script,
});
} else {
const resp = await apiCall('runners/start', { script });
runid = resp.runid;
}
const snackId = showSnackbar({
message: runningMessage,
icon: 'icon loading',
@@ -99,7 +116,14 @@ async function runImportExportScript({ script, runningMessage, canceledMessage,
apiOn(`runner-progress-${runid}`, handleRunnerProgress);
}
export async function saveExportedFile(filters, defaultPath, extension, dataName, getScript: (filaPath: string) => {}) {
export async function saveExportedFile(
filters,
defaultPath,
extension,
dataName,
getScript: (filaPath: string) => {},
hostConnection = null
) {
const electron = getElectron();
let filePath;
@@ -130,6 +154,7 @@ export async function saveExportedFile(filters, defaultPath, extension, dataName
downloadFromApi(`uploads/get?file=${pureFileName}`, defaultPath);
}
},
hostConnection,
});
}
@@ -140,7 +165,7 @@ function generateQuickExportScript(
dataName: string,
columnMap
) {
const script = getCurrentConfig().allowShellScripting ? new ScriptWriter() : new ScriptWriterJson();
const script = new ScriptWriterJson();
const sourceVar = script.allocVariable();
script.assign(sourceVar, reader.functionName, reader.props);
@@ -170,6 +195,7 @@ export async function exportQuickExportFile(dataName, reader, format: QuickExpor
runningMessage: `Exporting ${dataName}`,
canceledMessage: `Export ${dataName} canceled`,
finishedMessage: `Export ${dataName} finished`,
hostConnection: reader.hostConnection,
});
} else {
await saveExportedFile(
@@ -177,7 +203,8 @@ export async function exportQuickExportFile(dataName, reader, format: QuickExpor
`${dataName}.${format.extension}`,
format.extension,
dataName,
filePath => generateQuickExportScript(reader, format, filePath, dataName, columnMap)
filePath => generateQuickExportScript(reader, format, filePath, dataName, columnMap),
reader.hostConnection
);
}
}

View File

@@ -2,7 +2,7 @@ import { showModal } from '../modals/modalTools';
import { get } from 'svelte/store';
import newQuery from '../query/newQuery';
import getElectron from './getElectron';
import { currentDatabase, extensions, getCurrentDatabase } from '../stores';
import { extensions, getCurrentDatabase } from '../stores';
import { getUploadListener } from './uploadFiles';
import { getConnectionLabel, getDatabaseFileLabel } from 'dbgate-tools';
import { apiCall } from './api';
@@ -13,6 +13,7 @@ import _ from 'lodash';
import ErrorMessageModal from '../modals/ErrorMessageModal.svelte';
import { openImportExportTab } from './importExportTools';
import { switchCurrentDatabase } from './common';
import { _t } from '../translations';
export function canOpenByElectron(file, extensions) {
if (!file) return false;
@@ -22,7 +23,13 @@ export function canOpenByElectron(file, extensions) {
if (nameLower.endsWith('.qdesign')) return true;
if (nameLower.endsWith('.perspective')) return true;
if (nameLower.endsWith('.json')) return true;
if (nameLower.endsWith('.db') || nameLower.endsWith('.sqlite') || nameLower.endsWith('.sqlite3')) return true;
if (
nameLower.endsWith('.db') ||
nameLower.endsWith('.sqlite') ||
nameLower.endsWith('.sqlite3') ||
nameLower.endsWith('.duckdb')
)
return true;
for (const format of extensions.fileFormats) {
if (nameLower.endsWith(`.${format.extension}`)) return true;
if (format.extensions?.find(ext => nameLower.endsWith(`.${ext}`))) return true;
@@ -30,12 +37,12 @@ export function canOpenByElectron(file, extensions) {
return false;
}
export async function openSqliteFile(filePath) {
export async function openDatabaseFile(filePath, engine) {
const defaultDatabase = getDatabaseFileLabel(filePath);
const resp = await apiCall('connections/save', {
_id: undefined,
databaseFile: filePath,
engine: 'sqlite@dbgate-plugin-sqlite',
engine,
singleDatabase: true,
defaultDatabase,
});
@@ -136,7 +143,11 @@ export function openElectronFileCore(filePath, extensions) {
return;
}
if (nameLower.endsWith('.db') || nameLower.endsWith('.sqlite') || nameLower.endsWith('.sqlite')) {
openSqliteFile(filePath);
openDatabaseFile(filePath, 'sqlite@dbgate-plugin-sqlite');
return;
}
if (nameLower.endsWith('.duckdb')) {
openDatabaseFile(filePath, 'duckdb@dbgate-plugin-duckdb');
return;
}
if (nameLower.endsWith('.jsonl') || nameLower.endsWith('.ndjson')) {
@@ -225,7 +236,7 @@ export async function openElectronFile() {
const filePaths = await electron.showOpenDialog({
filters: [
{
name: `All supported files`,
name: _t('file.allSupported', { defaultMessage: 'All supported files' }),
extensions: [
'sql',
'sqlite',
@@ -235,15 +246,20 @@ export async function openElectronFile() {
'qdesign',
'perspective',
'json',
'duckdb',
...getFileFormatExtensions(ext),
],
},
{ name: `SQL files`, extensions: ['sql'] },
{ name: `JSON files`, extensions: ['json'] },
{ name: `Diagram files`, extensions: ['diagram'] },
{ name: `Query designer files`, extensions: ['qdesign'] },
{ name: `Perspective files`, extensions: ['perspective'] },
{ name: `SQLite database`, extensions: ['sqlite', 'db', 'sqlite3'] },
{ name: _t('file.sqlFiles', { defaultMessage: 'SQL files' }), extensions: ['sql'] },
{ name: _t('file.jsonFiles', { defaultMessage: 'JSON files' }), extensions: ['json'] },
{ name: _t('file.diagramFiles', { defaultMessage: 'Diagram files' }), extensions: ['diagram'] },
{ name: _t('file.queryDesignerFiles', { defaultMessage: 'Query designer files' }), extensions: ['qdesign'] },
{ name: _t('file.perspectiveFiles', { defaultMessage: 'Perspective files' }), extensions: ['perspective'] },
{
name: _t('file.sqliteDatabase', { defaultMessage: 'SQLite database' }),
extensions: ['sqlite', 'db', 'sqlite3'],
},
{ name: _t('file.duckdb', { defaultMessage: 'DuckDB database' }), extensions: ['duckdb', 'db'] },
...getFileFormatFilters(ext),
],
properties: ['showHiddenFiles', 'openFile'],

View File

@@ -41,9 +41,9 @@ class Analyser extends DatabaseAnalyser {
return res;
}
async singleObjectAnalysis(dbhan, typeField) {
const structure = await this._runAnalysis(dbhan, typeField);
const item = structure[typeField]?.find((i) => i.pureName === dbhan.pureName);
async singleObjectAnalysis(name, typeField) {
const structure = await this._runAnalysis(name, typeField);
const item = structure[typeField]?.find((i) => i.pureName === name.pureName);
return item;
}

25
plugins/dbgate-plugin-duckdb/.gitignore vendored Normal file
View File

@@ -0,0 +1,25 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
node_modules
/.pnp
.pnp.js
# testing
/coverage
# production
build
dist
lib
# misc
.DS_Store
.env.local
.env.development.local
.env.test.local
.env.production.local
npm-debug.log*
yarn-debug.log*
yarn-error.log*

View File

@@ -0,0 +1,6 @@
[![styled with prettier](https://img.shields.io/badge/styled_with-prettier-ff69b4.svg)](https://github.com/prettier/prettier)
[![NPM version](https://img.shields.io/npm/v/dbgate-plugin-sqlite.svg)](https://www.npmjs.com/package/dbgate-plugin-sqlite)
# dbgate-plugin-sqlite
Use DbGate for install of this plugin

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="64" height="64" viewBox="0 0 6.554 6.555" preserveAspectRatio="xMidYMid"><defs><linearGradient x1="2.983" y1=".53" x2="2.983" y2="4.744" id="A" gradientUnits="userSpaceOnUse"><stop stop-color="#97d9f6" offset="0%"/><stop stop-color="#0f80cc" offset="92.024%"/><stop stop-color="#0f80cc" offset="100%"/></linearGradient></defs><path d="M4.96.29H.847c-.276 0-.5.226-.5.5v4.536c0 .276.226.5.5.5h2.71c-.03-1.348.43-3.964 1.404-5.54z" fill="#0f80cc"/><path d="M4.81.437H.847c-.196 0-.355.16-.355.355v4.205c.898-.345 2.245-.642 3.177-.628A28.93 28.93 0 0 1 4.811.437z" fill="url(#A)"/><path d="M5.92.142c-.282-.25-.623-.15-.96.148l-.15.146c-.576.61-1.1 1.742-1.276 2.607a2.38 2.38 0 0 1 .148.426l.022.1.022.102s-.005-.02-.026-.08l-.014-.04a.461.461 0 0 0-.009-.022c-.038-.087-.14-.272-.187-.352a8.789 8.789 0 0 0-.103.321c.132.242.212.656.212.656s-.007-.027-.04-.12c-.03-.083-.176-.34-.21-.4-.06.22-.083.368-.062.404.04.07.08.2.115.324a7.52 7.52 0 0 1 .132.666l.005.062a6.11 6.11 0 0 0 .015.75c.026.313.075.582.137.726l.042-.023c-.09-.284-.128-.655-.112-1.084.025-.655.175-1.445.454-2.268C4.548 1.938 5.2.94 5.798.464c-.545.492-1.282 2.084-1.502 2.673-.247.66-.422 1.28-.528 1.873.182-.556.77-.796.77-.796s.29-.356.626-.865l-.645.172-.208.092s.53-.323.987-.47c.627-.987 1.31-2.39.622-3.002" fill="#003b57"/></svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -0,0 +1,45 @@
{
"name": "dbgate-plugin-duckdb",
"main": "dist/backend.js",
"version": "6.0.0-alpha.1",
"homepage": "https://dbgate.org",
"description": "DuckDB connect plugin for DbGate",
"repository": {
"type": "git",
"url": "https://github.com/dbgate/dbgate"
},
"author": "Jan Prochazka",
"license": "GPL-3.0",
"keywords": [
"dbgate",
"duckdb",
"dbgatebuiltin"
],
"files": [
"dist",
"icon.svg"
],
"scripts": {
"build:frontend": "webpack --config webpack-frontend.config",
"build:frontend:watch": "webpack --watch --config webpack-frontend.config",
"build:backend": "webpack --config webpack-backend.config.js",
"build": "yarn build:frontend && yarn build:backend",
"plugin": "yarn build && yarn pack && dbgate-plugin dbgate-plugin-duckdb",
"copydist": "yarn build && yarn pack && dbgate-copydist ../dist/dbgate-plugin-duckdb",
"plugout": "dbgate-plugout dbgate-plugin-duckdb",
"prepublishOnly": "yarn build"
},
"devDependencies": {
"dbgate-plugin-tools": "^1.0.4",
"webpack": "^5.91.0",
"webpack-cli": "^5.1.4"
},
"dependencies": {
"dbgate-tools": "^6.0.0-alpha.1",
"lodash": "^4.17.21",
"dbgate-query-splitter": "^4.11.3"
},
"optionalDependencies": {
"@duckdb/node-api": "^1.2.1-alpha.16"
}
}

View File

@@ -0,0 +1,8 @@
module.exports = {
trailingComma: 'es5',
tabWidth: 2,
semi: true,
singleQuote: true,
arrowParen: 'avoid',
printWidth: 120,
};

View File

@@ -0,0 +1,402 @@
/**
* @typedef {object} DuckDbStringList
* @property {string[]} items
*/
const extractIndexColumns = require('./extractIndexColumns');
/**
* @typedef {object} DuckDbColumnRow
* @property {number | null} numeric_scale
* @property {number | null} numeric_precision_radix
* @property {number | null} numeric_precision
* @property {number | null} character_maximum_length
* @property {string | null} data_type_id
* @property {string} data_type
* @property {boolean} is_nullable
* @property {string | null} column_default
* @property {boolean} internal
* @property {string | null} comment
* @property {number} column_index
* @property {string} column_name
* @property {string} table_oid
* @property {string} table_name
* @property {string} schema_oid
* @property {string} schema_name
* @property {string} database_oid
* @property {string} database_name
*/
/**
* @typedef {object} DuckDbConstraintRow
* @property {DuckDbStringList} referenced_column_names
* @property {string | null} referenced_table
* @property {string | null} constraint_name
* @property {DuckDbStringList} constraint_column_names
* @property {DuckDbStringList} constraint_column_indexes
* @property {string | null} expression
* @property {string | null} constraint_text
* @property {string} constraint_type
* @property {string} constraint_index
* @property {string} table_oid
* @property {string} table_name
* @property {string} schema_oid
* @property {string} schema_name
* @property {string} database_oid
* @property {string} database_name
*/
/**
* @typedef {object} DuckDbTableRow
* @property {string | null} sql
* @property {string} check_constraint_count
* @property {string} index_count
* @property {string} column_count
* @property {string} estimated_size
* @property {boolean} has_primary_key
* @property {boolean} temporary
* @property {boolean} internal
* @property {{ entries: Array<any> }} tags
* @property {string | null} comment
* @property {string} table_oid
* @property {string} table_name
* @property {string} schema_oid
* @property {string} schema_name
* @property {string} database_oid
* @property {string} database_name
*/
/**
* Represents a single row returned from the duckdb_views() function.
* Note: Assumes OIDs and counts are represented as strings based on previous examples.
*
* @typedef {object} DuckDbViewRow
* @property {string} database_name
* @property {string} database_oid
* @property {string} schema_name
* @property {string} schema_oid
* @property {string} view_name
* @property {string} view_oid
* @property {string | null} comment
* @property {{ [key: string]: string } | null} tags
* @property {boolean} internal
* @property {boolean} temporary
* @property {string} column_count
* @property {string | null} sql
*/
/**
* @param {DuckDbViewRow} duckDbViewRow
* @returns {import("dbgate-types").ViewInfo}
*/
function mapViewRowToViewInfo(duckDbViewRow) {
const viewInfo = {
pureName: duckDbViewRow.view_name,
schemaName: duckDbViewRow.schema_name,
objectId: duckDbViewRow.view_oid,
objectTypeField: 'views',
columns: [],
};
if (duckDbViewRow.comment != null) {
viewInfo.objectComment = duckDbViewRow.comment;
}
if (duckDbViewRow.sql != null) {
viewInfo.createSql = duckDbViewRow.sql;
}
return /** @type {import("dbgate-types").ViewInfo} */ (viewInfo);
}
/**
* @param {DuckDbTableRow} rawTableData
*/
function mapRawTableToTableInfo(rawTableData) {
const pureName = rawTableData.table_name;
const schemaName = rawTableData.schema_name;
const objectId = rawTableData.table_oid;
const objectTypeField = 'tables';
const objectComment = rawTableData.comment;
return {
pureName: pureName,
schemaName: schemaName,
objectId: objectId,
objectTypeField: objectTypeField,
objectComment: objectComment,
};
}
/**
* @typedef {object} DuckDbColumnDataTypeInfo
* @property {string} data_type
* @property {number | null} numeric_precision
* @property {number | null} numeric_scale
* @property {number | null} character_maximum_length
*/
/**
* @param {DuckDbColumnDataTypeInfo | null | undefined} columnInfo
* @returns {string}
*/
function extractDataType(columnInfo) {
const baseType = columnInfo.data_type.toUpperCase();
const precision = columnInfo.numeric_precision;
const scale = columnInfo.numeric_scale;
const maxLength = columnInfo.character_maximum_length;
switch (baseType) {
case 'DECIMAL':
case 'NUMERIC':
if (typeof precision === 'number' && precision > 0 && typeof scale === 'number' && scale >= 0) {
return `${baseType}(${precision}, ${scale})`;
}
return baseType;
case 'VARCHAR':
case 'CHAR':
if (typeof maxLength === 'number' && maxLength > 0) {
return `${baseType}(${maxLength})`;
}
return baseType;
default:
return baseType;
}
}
/**
* @param {DuckDbColumnRow} duckDbColumnData
*/
function mapRawColumnToColumnInfo(duckDbColumnData) {
const columnInfo = {
pureName: duckDbColumnData.table_name,
schemaName: duckDbColumnData.schema_name,
columnName: duckDbColumnData.column_name,
dataType: extractDataType(duckDbColumnData),
};
columnInfo.notNull = !duckDbColumnData.is_nullable;
if (duckDbColumnData.column_default != null) {
columnInfo.defaultValue = duckDbColumnData.column_default;
}
if (duckDbColumnData.comment != null) {
columnInfo.columnComment = duckDbColumnData.comment;
}
if (duckDbColumnData.numeric_precision != null) {
columnInfo.precision = duckDbColumnData.numeric_precision;
}
if (duckDbColumnData.numeric_scale != null) {
columnInfo.scale = duckDbColumnData.numeric_scale;
}
if (duckDbColumnData.character_maximum_length != null) {
columnInfo.length = duckDbColumnData.character_maximum_length;
}
return columnInfo;
}
/**
* @param {DuckDbConstraintRow} duckDbConstraintData
* @returns {import("dbgate-types").ForeignKeyInfo}
*/
function mapConstraintRowToForeignKeyInfo(duckDbConstraintData) {
if (
!duckDbConstraintData ||
duckDbConstraintData.constraint_type !== 'FOREIGN KEY' ||
duckDbConstraintData.referenced_table == null
) {
return null;
}
const columns = [];
const constraintColumns = duckDbConstraintData.constraint_column_names?.items;
const referencedColumns = duckDbConstraintData.referenced_column_names?.items;
for (let i = 0; i < constraintColumns.length; i++) {
columns.push({
columnName: constraintColumns[i],
refColumnName: referencedColumns[i],
});
}
const foreignKeyInfo = {
pureName: duckDbConstraintData.table_name,
schemaName: duckDbConstraintData.schema_name,
constraintType: 'foreignKey',
columns: columns,
refTableName: duckDbConstraintData.referenced_table,
refSchemaName: duckDbConstraintData.schema_name,
};
if (duckDbConstraintData.constraint_name != null) {
foreignKeyInfo.constraintName = duckDbConstraintData.constraint_name;
}
return /** @type {import("dbgate-types").ForeignKeyInfo} */ (foreignKeyInfo);
}
/**
* @param {DuckDbConstraintRow} duckDbConstraintData
* @returns {import("dbgate-types").PrimaryKeyInfo}
*/
function mapConstraintRowToPrimaryKeyInfo(duckDbConstraintData) {
const columns = [];
const constraintColumns = duckDbConstraintData.constraint_column_names?.items;
for (let i = 0; i < constraintColumns.length; i++) {
columns.push({
columnName: constraintColumns[i],
});
}
const primaryKeyInfo = {
pureName: duckDbConstraintData.table_name,
schemaName: duckDbConstraintData.schema_name,
constraintType: 'primaryKey',
columns: columns,
};
if (duckDbConstraintData.constraint_name != null) {
primaryKeyInfo.constraintName = duckDbConstraintData.constraint_name;
}
return /** @type {import("dbgate-types").PrimaryKeyInfo} */ (primaryKeyInfo);
}
/**
* @typedef {object} DuckDbConstraintRow
* @property {DuckDbStringList} referenced_column_names
* @property {string | null} referenced_table
* @property {string | null} constraint_name
* @property {DuckDbStringList} constraint_column_names
* @property {DuckDbStringList} constraint_column_indexes
* @property {string | null} expression
* @property {string | null} constraint_text
* @property {string} constraint_type
* @property {string} constraint_index
* @property {string} table_oid
* @property {string} table_name
* @property {string} schema_oid
* @property {string} schema_name
* @property {string} database_oid
* @property {string} database_name
*/
/**
* Maps a single DuckDbConstraintRow object to a UniqueInfo object if it represents a UNIQUE constraint.
* Assumes UniqueInfo and DuckDbConstraintRow are defined types/interfaces.
* @param {DuckDbConstraintRow} duckDbConstraintData - A single object conforming to DuckDbConstraintRow.
* @returns {import("dbgate-types").UniqueInfo | null} An object structured like UniqueInfo, or null if the input is not a valid UNIQUE constraint.
*/
function mapConstraintRowToUniqueInfo(duckDbConstraintData) {
if (!duckDbConstraintData || duckDbConstraintData.constraint_type !== 'UNIQUE') {
return null;
}
const columns = [];
const constraintColumns = duckDbConstraintData.constraint_column_names?.items;
if (Array.isArray(constraintColumns) && constraintColumns.length > 0) {
for (let i = 0; i < constraintColumns.length; i++) {
columns.push({
columnName: constraintColumns[i],
});
}
} else {
return null;
}
const uniqueInfo = {
pureName: duckDbConstraintData.table_name,
schemaName: duckDbConstraintData.schema_name,
constraintType: 'unique',
columns: columns,
};
if (duckDbConstraintData.constraint_name != null) {
uniqueInfo.constraintName = duckDbConstraintData.constraint_name;
}
return /** @type {import("dbgate-types").UniqueInfo} */ (uniqueInfo);
}
/**
* @typedef {object} DuckDbIndexRow
* @property {string} database_name
* @property {string} database_oid
* @property {string} schema_name
* @property {string} schema_oid
* @property {string} index_name
* @property {string} index_oid
* @property {string} table_name
* @property {string} table_oid
* @property {string | null} comment
* @property {{ [key: string]: string } | null} tags
* @property {boolean} is_unique
* @property {boolean} is_primary
* @property {string | null} expressions
* @property {string | null} sql
*/
/**
* @param {DuckDbIndexRow} duckDbIndexRow
* @returns {import("dbgate-types").IndexInfo}
*/
function mapIndexRowToIndexInfo(duckDbIndexRow) {
const indexInfo = {
pureName: duckDbIndexRow.table_name,
schemaName: duckDbIndexRow.schema_name,
constraintType: 'index',
columns: extractIndexColumns(duckDbIndexRow.sql).map((columnName) => ({ columnName })),
isUnique: duckDbIndexRow.is_unique,
};
if (duckDbIndexRow.index_name != null) {
indexInfo.constraintName = duckDbIndexRow.index_name;
}
return /** @type {import("dbgate-types").IndexInfo} */ (indexInfo);
}
/**
* @typedef {object} DuckDbSchemaRow
* @property {string} oid
* @property {string} database_name
* @property {string} database_oid
* @property {string} schema_name
* @property {string | null} comment
* @property {{ [key: string]: string } | null} tags
* @property {boolean} internal
* @property {string | null} sql
*/
/**
* @param {DuckDbSchemaRow} duckDbSchemaRow
* @returns {import("dbgate-types").SchemaInfo}
*/
function mapSchemaRowToSchemaInfo(duckDbSchemaRow) {
const schemaInfo = {
schemaName: duckDbSchemaRow.schema_name,
objectId: duckDbSchemaRow.oid,
};
return /** @type {import("dbgate-types").SchemaInfo} */ (schemaInfo);
}
module.exports = {
mapRawTableToTableInfo,
mapRawColumnToColumnInfo,
mapConstraintRowToForeignKeyInfo,
mapConstraintRowToPrimaryKeyInfo,
mapConstraintRowToUniqueInfo,
mapViewRowToViewInfo,
mapIndexRowToIndexInfo,
mapSchemaRowToSchemaInfo,
};

View File

@@ -0,0 +1,82 @@
const { DatabaseAnalyser } = require('dbgate-tools');
const sql = require('./sql');
const {
mapRawTableToTableInfo,
mapRawColumnToColumnInfo,
mapConstraintRowToForeignKeyInfo: mapDuckDbFkConstraintToForeignKeyInfo,
mapConstraintRowToPrimaryKeyInfo,
mapIndexRowToIndexInfo,
mapConstraintRowToUniqueInfo,
mapViewRowToViewInfo,
} = require('./Analyser.helpers');
class Analyser extends DatabaseAnalyser {
constructor(dbhan, driver, version) {
super(dbhan, driver, version);
}
async _computeSingleObjectId() {
const { schemaName, pureName } = this.singleObjectFilter;
this.singleObjectId = `${schemaName || 'main'}.${pureName}`;
}
createQuery(resFileName, typeFields) {
if (!sql[resFileName]) throw new Error(`Missing analyse file ${resFileName}`);
return super.createQuery(sql[resFileName], typeFields);
}
async _runAnalysis() {
const tablesResult = await this.analyserQuery('tables', ['tables']);
const columnsResult = await this.analyserQuery('columns', ['tables']);
const foreignKeysResult = await this.analyserQuery('foreignKeys', ['tables']);
const primaryKeysResult = await this.analyserQuery('primaryKeys', ['tables']);
const uniquesResults = await this.analyserQuery('uniques', ['tables']);
const indexesResult = await this.analyserQuery('indexes', ['tables']);
const viewsResult = await this.analyserQuery('views', ['views']);
/**
* @type {import('dbgate-types').ForeignKeyInfo[]}
*/
const foreignKeys = foreignKeysResult.rows?.map(mapDuckDbFkConstraintToForeignKeyInfo).filter(Boolean);
/**
* @type {import('dbgate-types').PrimaryKeyInfo[]}
*/
const primaryKeys = primaryKeysResult.rows?.map(mapConstraintRowToPrimaryKeyInfo).filter(Boolean);
/**
* @type {import('dbgate-types').UniqueInfo[]}
*/
const uniques = uniquesResults.rows?.map(mapConstraintRowToUniqueInfo).filter(Boolean);
/**
* @type {import('dbgate-types').IndexInfo[]}
*/
const indexes = indexesResult.rows?.map(mapIndexRowToIndexInfo).filter(Boolean);
const views = viewsResult.rows?.map(mapViewRowToViewInfo);
const columns = columnsResult.rows?.map(mapRawColumnToColumnInfo);
const tables = tablesResult.rows?.map(mapRawTableToTableInfo);
const tablesExtended = tables.map((table) => ({
...table,
columns: columns.filter((x) => x.pureName == table.pureName && x.schemaName == table.schemaName),
foreignKeys: foreignKeys.filter((x) => x.pureName == table.pureName && x.schemaName == table.schemaName),
primaryKey: primaryKeys.find((x) => x.pureName == table.pureName && x.schemaName == table.schemaName),
indexes: indexes.filter((x) => x.pureName == table.pureName && x.schemaName == table.schemaName),
uniques: uniques.filter((x) => x.pureName == table.pureName && x.schemaName == table.schemaName),
}));
const viewsExtended = views.map((view) => ({
...view,
columns: columns.filter((x) => x.pureName == view.pureName && x.schemaName == view.schemaName),
}));
return {
tables: tablesExtended,
views: viewsExtended,
};
}
}
module.exports = Analyser;

View File

@@ -0,0 +1,196 @@
// @ts-check
//
const stream = require('stream');
const Analyser = require('./Analyser');
const driverBase = require('../frontend/driver');
const { getLogger, extractErrorLogData, createBulkInsertStreamBase } = require('dbgate-tools');
const { getColumnsInfo, normalizeRow } = require('./helpers');
const sql = require('./sql');
const { mapSchemaRowToSchemaInfo } = require('./Analyser.helpers');
const logger = getLogger('sqliteDriver');
/**
* @type {import('@duckdb/node-api')}
*/
let duckDb;
function getDuckDb() {
if (!duckDb) {
duckDb = require('@duckdb/node-api');
}
return duckDb;
}
/** @type {import('dbgate-types').EngineDriver<import('@duckdb/node-api').DuckDBConnection>} */
const driver = {
...driverBase,
analyserClass: Analyser,
async connect({ databaseFile, isReadOnly }) {
const instance = await getDuckDb().DuckDBInstance.create(databaseFile);
const connection = await instance.connect();
return {
client: connection,
};
},
async close(dbhan) {
dbhan.client.disconnect();
dbhan.client.close();
},
async query(dbhan, sql, { readonly } = {}) {
const res = await dbhan.client.runAndReadAll(sql);
const rowsObjects = res.getRowObjects();
const columnNames = res.columnNames();
const columnTypes = res.columnTypes();
const columns = getColumnsInfo(columnNames, columnTypes).map(normalizeRow);
const rows = rowsObjects.map(normalizeRow);
return {
rows,
columns,
};
},
async stream(dbhan, sql, options) {
const duckdb = getDuckDb();
try {
const statements = await dbhan.client.extractStatements(sql);
const count = statements.count;
for (let i = 0; i < count; i++) {
let hasSentColumns = false;
const stmt = await statements.prepare(i);
const res = await stmt.runAndReadAll();
const returningStatemetes = [
duckdb.StatementType.SELECT,
duckdb.StatementType.EXPLAIN,
duckdb.StatementType.EXECUTE,
duckdb.StatementType.RELATION,
duckdb.StatementType.LOGICAL_PLAN,
];
if (!returningStatemetes.includes(stmt.statementType)) {
continue;
}
// options.info({
// message: JSON.stringify(res),
// time: new Date(),
// severity: 'info',
// });
if (!hasSentColumns) {
const columnNames = res.columnNames();
const columnTypes = res.columnTypes();
const columns = getColumnsInfo(columnNames, columnTypes);
options.recordset(columns);
hasSentColumns = true;
}
const rows = res.getRowObjects();
for (const row of rows) {
options.row(normalizeRow(row));
}
}
options.done();
} catch (error) {
logger.error(extractErrorLogData(error), 'Stream error');
const { message, procName } = error;
options.info({
message,
line: 0,
procedure: procName,
time: new Date(),
severity: 'error',
});
options.done();
}
},
async script(dbhan, sql, { useTransaction } = { useTransaction: false }) {
if (useTransaction) {
const dmp1 = driver.createDumper();
dmp1.beginTransaction();
await dbhan.client.run(dmp1.s);
}
const statements = await dbhan.client.extractStatements(sql);
const count = statements.count;
for (let i = 0; i < count; i++) {
const stmt = await statements.prepare(i);
await stmt.run();
}
if (useTransaction) {
const dmp2 = driver.createDumper();
dmp2.commitTransaction();
await dbhan.client.run(dmp2.s);
}
},
async readQuery(dbhan, sql, structure) {
const pass = new stream.PassThrough({
objectMode: true,
highWaterMark: 100,
});
const res = await dbhan.client.runAndReadAll(sql);
const rowsObjects = res.getRowObjects();
const columnNames = res.columnNames();
const columnTypes = res.columnTypes();
const columns = getColumnsInfo(columnNames, columnTypes).map(normalizeRow);
const rows = rowsObjects.map(normalizeRow);
pass.write({
__isStreamHeader: true,
...(structure || {
columns: columns.map((col) => ({
columnName: col.name,
dataType: col.type,
})),
}),
});
for (const row of rows) {
pass.write(row);
}
pass.end();
return pass;
},
async writeTable(dbhan, name, options) {
return createBulkInsertStreamBase(this, stream, dbhan, name, options);
},
async getVersion(dbhan) {
const { rows } = await this.query(dbhan, 'SELECT version() AS version;');
const { version } = rows?.[0];
return {
version,
versionText: `DuckDB ${version}`,
};
},
async listSchemas(dbhan) {
const schemasResult = await this.query(dbhan, sql.schemas);
const schemas = schemasResult.rows?.map(mapSchemaRowToSchemaInfo);
return schemas ?? null;
},
};
module.exports = driver;

View File

@@ -0,0 +1,70 @@
// this algoruthm is implemented by ChatGPT o3
function sliceColumnList(ddl) {
// ① jump to the first “(” that follows the ON <table> qualifier
const onMatch = /ON\s+(?:"[^"]+"|\w+)(?:\s*\.\s*(?:"[^"]+"|\w+))*\s*\(/i.exec(ddl);
if (!onMatch) return '';
const start = onMatch.index + onMatch[0].length - 1; // points at the "("
// ② walk forward until depth returns to 0
let depth = 0,
i = start,
end = -1;
while (i < ddl.length) {
const ch = ddl[i++];
if (ch === '(') depth++;
else if (ch === ')') {
depth--;
if (depth === 0) {
end = i - 1;
break;
}
}
}
return end === -1 ? '' : ddl.slice(start + 1, end); // inside the ()
}
function splitTopLevel(list) {
const parts = [];
let depth = 0,
last = 0;
for (let i = 0; i <= list.length; i++) {
const ch = list[i] ?? ','; // virtual comma at the end
if (ch === '(') depth++;
else if (ch === ')') depth--;
if (ch === ',' && depth === 0) {
parts.push(list.slice(last, i).trim());
last = i + 1;
}
}
return parts.filter(Boolean);
}
function clean(segment) {
return (
segment
// drop ASC|DESC, NULLS FIRST|LAST
.replace(/\bASC\b|\bDESC\b|\bNULLS\s+(FIRST|LAST)\b/gi, '')
// un-wrap one-liner functions: LOWER(col) -> col
.replace(/^[A-Za-z_][\w$]*\(\s*([^()]+?)\s*\)$/i, '$1')
// chop any schema- or table- prefix: tbl.col -> col
.replace(/^.*\.(?=[^.)]+$)/, '')
// strip double-quotes
.replace(/"/g, '')
.trim()
);
}
function extractIndexColumns(sql) {
if (!sql) return [];
const sqlText = sql // your variable
.replace(/\s+/g, ' ') // collapse whitespace
.replace(/--.*?$/gm, '') // strip line comments
.trim();
const list = sliceColumnList(sqlText);
if (!list) return [];
return splitTopLevel(list).map(clean);
}
module.exports = extractIndexColumns;

View File

@@ -0,0 +1,65 @@
/**
* @param {string[} columnNames
* @param {import('@duckdb/node-api').DuckDBType[]} columnTypes
*/
function getColumnsInfo(columnNames, columnTypes) {
const columns = [];
for (let i = columnNames.length - 1; i >= 0; i--) {
columns.push({
columnName: columnNames[i],
dataType: columnTypes[i].toString(),
});
}
return columns;
}
function _normalizeValue(value) {
if (value === null) {
return null;
}
if (typeof value === 'bigint') {
return parseInt(value);
}
if (Array.isArray(value)) {
return value.map((item) => _normalizeValue(item));
}
if (typeof value === 'object') {
const normalizedObj = {};
for (const key in value) {
if (Object.hasOwnProperty.call(value, key)) {
normalizedObj[key] = _normalizeValue(value[key]);
}
}
return normalizedObj;
}
return value;
}
/**
* @param {Record<string, import('@duckdb/node-api').DuckDBValue>} obj
*
*/
function normalizeRow(obj) {
if (typeof obj !== 'object' || obj === null || Array.isArray(obj)) {
return _normalizeValue(obj);
}
const normalized = {};
for (const key in obj) {
if (Object.hasOwnProperty.call(obj, key)) {
normalized[key] = _normalizeValue(obj[key]);
}
}
return normalized;
}
module.exports = {
normalizeRow,
getColumnsInfo,
};

View File

@@ -0,0 +1,6 @@
const driver = require('./driver');
module.exports = {
packageName: 'dbgate-plugin-duckdb',
drivers: [driver],
};

View File

@@ -0,0 +1 @@
module.exports = `SELECT * from duckdb_columns() WHERE internal = false and (schema_name || '.' || table_name) =OBJECT_ID_CONDITION`;

View File

@@ -0,0 +1 @@
module.exports = `SELECT * FROM duckdb_constraints() WHERE constraint_type = 'FOREIGN KEY'`;

View File

@@ -0,0 +1,19 @@
const tables = require('./tables.js');
const columns = require('./columns.js');
const foreignKeys = require('./foreignKeys.js');
const primaryKeys = require('./primaryKeys.js');
const indexes = require('./indexes.js');
const uniques = require('./uniques.js');
const views = require('./views.js');
const schemas = require('./schemas.js');
module.exports = {
tables,
columns,
foreignKeys,
primaryKeys,
indexes,
uniques,
views,
schemas,
};

View File

@@ -0,0 +1 @@
module.exports = `SELECT * FROM duckdb_indexes()`;

View File

@@ -0,0 +1 @@
module.exports = `SELECT * FROM duckdb_constraints() WHERE constraint_type = 'PRIMARY KEY'`;

View File

@@ -0,0 +1 @@
module.exports = `SELECT * FROM duckdb_schemas() WHERE internal = false`;

View File

@@ -0,0 +1 @@
module.exports = `SELECT * from duckdb_tables() WHERE internal = false and (schema_name || '.' || table_name) =OBJECT_ID_CONDITION`;

View File

@@ -0,0 +1 @@
module.exports = `SELECT * FROM duckdb_constraints() WHERE constraint_type = 'UNIQUE'`;

View File

@@ -0,0 +1 @@
module.exports = `SELECT * FROM duckdb_views() WHERE internal = false and (schema_name || '.' || view_name) =OBJECT_ID_CONDITION`;

View File

@@ -0,0 +1,19 @@
const { SqlDumper, arrayToHexString } = require('dbgate-tools');
class Dumper extends SqlDumper {
autoIncrement() {}
renameSqlObject(obj, newname) {
this.putCmd('^alter %k %f ^rename ^to %i', this.getSqlObjectSqlName(obj.objectTypeField), obj, newname);
}
renameTable(obj, newname) {
this.putCmd('^alter ^table %f ^rename ^to %i', obj, newname);
}
renameColumn(column, newcol) {
this.putCmd('^alter ^table %f ^rename %i ^to %i', column, column.columnName, newcol);
}
}
module.exports = Dumper;

View File

@@ -0,0 +1,78 @@
// @ts-check
const { driverBase } = global.DBGATE_PACKAGES['dbgate-tools'];
const Dumper = require('./Dumper');
const { sqliteSplitterOptions, noSplitSplitterOptions } = require('dbgate-query-splitter/lib/options');
/**
* @param {string} databaseFile
*/
function getDatabaseFileLabel(databaseFile) {
if (!databaseFile) return databaseFile;
const m = databaseFile.match(/[\/]([^\/]+)$/);
if (m) return m[1];
return databaseFile;
}
/** @type {import('dbgate-types').SqlDialect} */
const dialect = {
limitSelect: true,
rangeSelect: true,
defaultSchemaName: 'main',
offsetFetchRangeSyntax: false,
explicitDropConstraint: true,
stringEscapeChar: "'",
fallbackDataType: 'nvarchar',
allowMultipleValuesInsert: true,
dropColumnDependencies: ['indexes', 'primaryKey', 'uniques'],
quoteIdentifier(s) {
return `"${s}"`;
},
anonymousPrimaryKey: true,
requireStandaloneSelectForScopeIdentity: true,
createColumn: true,
dropColumn: true,
multipleSchema: true,
createIndex: true,
dropIndex: true,
createForeignKey: false,
enableForeignKeyChecks: false,
// dropCheck: true,
// dropUnique: true,
// dropForeignKey: true,
createPrimaryKey: false,
// dropPrimaryKey: true,
dropReferencesWhenDropTable: true,
// dropIndexContainsTableSpec: true,
// filteredIndexes: true,
anonymousForeignKey: true,
};
/** @type {import('dbgate-types').EngineDriver} */
const driver = {
...driverBase,
dumperClass: Dumper,
dialect,
engine: 'duckdb@dbgate-plugin-duckdb',
title: 'DuckDB',
readOnlySessions: true,
supportsTransactions: true,
singleConnectionOnly: true,
getQuerySplitterOptions: (usage) =>
usage == 'editor'
? { ...sqliteSplitterOptions, ignoreComments: true, preventSingleLineSplit: true }
: usage == 'stream'
? noSplitSplitterOptions
: sqliteSplitterOptions,
showConnectionTab: (field) => false,
showConnectionField: (field) => ['databaseFile'].includes(field),
beforeConnectionSave: (connection) => ({
...connection,
singleDatabase: true,
defaultDatabase: getDatabaseFileLabel(connection.databaseFile),
}),
};
module.exports = driver;

View File

@@ -0,0 +1,6 @@
import driver from './driver';
export default {
packageName: 'dbgate-plugin-duckdb',
drivers: [driver],
};

View File

@@ -0,0 +1,28 @@
var webpack = require('webpack');
var path = require('path');
const packageJson = require('./package.json');
const buildPluginExternals = require('../../common/buildPluginExternals');
const externals = buildPluginExternals(packageJson);
var config = {
context: __dirname + '/src/backend',
entry: {
app: './index.js',
},
target: 'node',
output: {
path: path.resolve(__dirname, 'dist'),
filename: 'backend.js',
libraryTarget: 'commonjs2',
},
// uncomment for disable minimalization
// optimization: {
// minimize: false,
// },
externals,
};
module.exports = config;

View File

@@ -0,0 +1,30 @@
var webpack = require('webpack');
var path = require('path');
var config = {
context: __dirname + '/src/frontend',
entry: {
app: './index.js',
},
target: 'web',
output: {
path: path.resolve(__dirname, 'dist'),
filename: 'frontend.js',
libraryTarget: 'var',
library: 'plugin',
},
plugins: [
new webpack.DefinePlugin({
'global.DBGATE_PACKAGES': 'window.DBGATE_PACKAGES',
}),
],
// uncomment for disable minimalization
// optimization: {
// minimize: false,
// },
};
module.exports = config;

View File

@@ -1,7 +1,6 @@
// @ts-check
const _ = require('lodash');
const stream = require('stream');
const sqliteDriver = require('./driver.sqlite');
const driverBases = require('../frontend/drivers');
const Analyser = require('./Analyser');
const { splitQuery, sqliteSplitterOptions } = require('dbgate-query-splitter');
@@ -30,7 +29,6 @@ const libsqlDriver = {
...driverBases[1],
analyserClass: Analyser,
async connect({ databaseFile, isReadOnly, authToken, databaseUrl, ...rest }) {
console.log('connect', databaseFile, isReadOnly, authToken, databaseUrl, rest);
const Database = getLibsql();
const client = databaseFile
? new Database(databaseFile, { readonly: !!isReadOnly })

View File

@@ -26,6 +26,10 @@
"command.datagrid.undo": "Undo",
"command.datagrid.viewJsonDocument": "View row as JSON document",
"command.datagrid.viewJsonValue": "View cell as JSON document",
"command.new.duckdbDatabase": "New DuckDB database",
"command.new.duckdbDatabase.header": "Create DuckDB database",
"command.new.sqliteDatabase": "New SQLite database",
"command.new.sqliteDatabase.header": "Create SQLite database",
"command.tabs.addToFavorites": "Add current tab to favorites",
"command.tabs.closeAll": "Close all tabs",
"command.tabs.closeTab": "Close tab",
@@ -51,7 +55,6 @@
"connection.newQuery": "New Query (server)",
"connection.refresh": "Refresh",
"connection.serverSummary": "Server summary",
"connection.sqlRestore": "Restore/import SQL dump",
"connection.viewDetails": "View details",
"error.driverNotFound": "Invalid database connection, driver not found",
"importExport.sourceFiles": "Source files",
@@ -61,6 +64,7 @@
"schema.delete": "Delete schema",
"schema.resetToDefault": "Reset to default",
"schema.schemaName": "Schema name",
"settings.behaviour.jsonPreviewWrap": "Wrap json in preview",
"settings.localization": "Localization",
"tab.administration": "Administration",
"widget.databaseContent": "Database content",

View File

@@ -1169,6 +1169,49 @@
resolved "https://registry.yarnpkg.com/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz#1d572bfbbe14b7704e0ba0f39b74815b84870d70"
integrity sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==
"@duckdb/node-api@^1.2.1-alpha.16":
version "1.2.1-alpha.16"
resolved "https://registry.yarnpkg.com/@duckdb/node-api/-/node-api-1.2.1-alpha.16.tgz#4e0d8a17f227eed336ab4d50922bfc7be0b1024d"
integrity sha512-r2wkrqcDl3IsmFffpTj7xgSUEkjVaqi7wV0uypQWw4wTM5bIYk5ABc/Hxn2xlwV9UBRnxhk0Ls0EJypYS7g8ZQ==
dependencies:
"@duckdb/node-bindings" "1.2.1-alpha.16"
"@duckdb/node-bindings-darwin-arm64@1.2.1-alpha.16":
version "1.2.1-alpha.16"
resolved "https://registry.yarnpkg.com/@duckdb/node-bindings-darwin-arm64/-/node-bindings-darwin-arm64-1.2.1-alpha.16.tgz#a7061c8fc8d968bf9657211ccacd1139bfad96af"
integrity sha512-anfLXcxjo6S0Kx8Z+e6/ca7WayprJ8iI4cpTvzWQc9NT/vKFHcGjvhGAiosHvtjWGOvAYo+O/eyAcmzMzazlMg==
"@duckdb/node-bindings-darwin-x64@1.2.1-alpha.16":
version "1.2.1-alpha.16"
resolved "https://registry.yarnpkg.com/@duckdb/node-bindings-darwin-x64/-/node-bindings-darwin-x64-1.2.1-alpha.16.tgz#7d21558a50384115ba8eb8c41a84d689e2c797e9"
integrity sha512-IA2bQ/f0qFYb7Sd+leSjNg/JMBpWVVBoCmqp/1zzlw6fwhtT0BMSAT3FL4306t5StA8biOznlHz3rN3jovdVxg==
"@duckdb/node-bindings-linux-arm64@1.2.1-alpha.16":
version "1.2.1-alpha.16"
resolved "https://registry.yarnpkg.com/@duckdb/node-bindings-linux-arm64/-/node-bindings-linux-arm64-1.2.1-alpha.16.tgz#5915a71f3520b8a2cfbf63ad89d63198aec6db57"
integrity sha512-zy9jTrrhTXJAOrYRTbT/HtBLClAoyo8vNRAqojFHVBxXL1nr4o+5Je9AJwb9IfS1/e38zdykDWeGnY/gB3NpfA==
"@duckdb/node-bindings-linux-x64@1.2.1-alpha.16":
version "1.2.1-alpha.16"
resolved "https://registry.yarnpkg.com/@duckdb/node-bindings-linux-x64/-/node-bindings-linux-x64-1.2.1-alpha.16.tgz#8cff0e412d6201b57069c311775c375268cd5707"
integrity sha512-tdDAhUKenBhUQiTN+qvKj6nBshoooBLPbxVuLas8v64KmphjxOHd9zQ2KMzw1tN+fXhV9dqUTbCqiUN9A6ZFSQ==
"@duckdb/node-bindings-win32-x64@1.2.1-alpha.16":
version "1.2.1-alpha.16"
resolved "https://registry.yarnpkg.com/@duckdb/node-bindings-win32-x64/-/node-bindings-win32-x64-1.2.1-alpha.16.tgz#9fb46578f55d5b24524ea9a2d791f8fa6982e613"
integrity sha512-FE9bZV8+LIiy5jQIsLoEmwLFIyPPJD4SXjKNCaU48DNVf1q81ZkhnoT7PVgeyNJmkR6rG2+mq6LzTSmdCBX0ig==
"@duckdb/node-bindings@1.2.1-alpha.16":
version "1.2.1-alpha.16"
resolved "https://registry.yarnpkg.com/@duckdb/node-bindings/-/node-bindings-1.2.1-alpha.16.tgz#700ce66c74772be7a870ae68dd91a968d736ed7b"
integrity sha512-6ITHy26o99zxUhCGOxwkQbfmi5I8VXNGanhnrOe3pqUYRDXvGAe6T2MmBymYwU+fMZB341UE8krw7hUkPLfIeA==
optionalDependencies:
"@duckdb/node-bindings-darwin-arm64" "1.2.1-alpha.16"
"@duckdb/node-bindings-darwin-x64" "1.2.1-alpha.16"
"@duckdb/node-bindings-linux-arm64" "1.2.1-alpha.16"
"@duckdb/node-bindings-linux-x64" "1.2.1-alpha.16"
"@duckdb/node-bindings-win32-x64" "1.2.1-alpha.16"
"@gar/promisify@^1.0.1":
version "1.1.3"
resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6"