Merge branch 'master' into feature/1137-mssql-column-desc

This commit is contained in:
Pavel
2025-08-07 12:53:09 +02:00
102 changed files with 1333 additions and 341 deletions

View File

@@ -39,7 +39,7 @@ jobs:
repository: dbgate/dbgate-pro repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }} token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro path: dbgate-pro
ref: 36b6ce878c3c0a0c9623163c8a8b3bdeefc7da53 ref: 4b28757ade169ac0a1696351519bbaa4bbba5db9
- name: Merge dbgate/dbgate-pro - name: Merge dbgate/dbgate-pro
run: | run: |
mkdir ../dbgate-pro mkdir ../dbgate-pro

View File

@@ -39,7 +39,7 @@ jobs:
repository: dbgate/dbgate-pro repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }} token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro path: dbgate-pro
ref: 36b6ce878c3c0a0c9623163c8a8b3bdeefc7da53 ref: 4b28757ade169ac0a1696351519bbaa4bbba5db9
- name: Merge dbgate/dbgate-pro - name: Merge dbgate/dbgate-pro
run: | run: |
mkdir ../dbgate-pro mkdir ../dbgate-pro

View File

@@ -39,7 +39,7 @@ jobs:
repository: dbgate/dbgate-pro repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }} token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro path: dbgate-pro
ref: 36b6ce878c3c0a0c9623163c8a8b3bdeefc7da53 ref: 4b28757ade169ac0a1696351519bbaa4bbba5db9
- name: Merge dbgate/dbgate-pro - name: Merge dbgate/dbgate-pro
run: | run: |
mkdir ../dbgate-pro mkdir ../dbgate-pro

View File

@@ -44,7 +44,7 @@ jobs:
repository: dbgate/dbgate-pro repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }} token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro path: dbgate-pro
ref: 36b6ce878c3c0a0c9623163c8a8b3bdeefc7da53 ref: 4b28757ade169ac0a1696351519bbaa4bbba5db9
- name: Merge dbgate/dbgate-pro - name: Merge dbgate/dbgate-pro
run: | run: |
mkdir ../dbgate-pro mkdir ../dbgate-pro

View File

@@ -32,7 +32,7 @@ jobs:
repository: dbgate/dbgate-pro repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }} token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro path: dbgate-pro
ref: 36b6ce878c3c0a0c9623163c8a8b3bdeefc7da53 ref: 4b28757ade169ac0a1696351519bbaa4bbba5db9
- name: Merge dbgate/dbgate-pro - name: Merge dbgate/dbgate-pro
run: | run: |
mkdir ../dbgate-pro mkdir ../dbgate-pro

View File

@@ -26,7 +26,7 @@ jobs:
repository: dbgate/dbgate-pro repository: dbgate/dbgate-pro
token: ${{ secrets.GH_TOKEN }} token: ${{ secrets.GH_TOKEN }}
path: dbgate-pro path: dbgate-pro
ref: 36b6ce878c3c0a0c9623163c8a8b3bdeefc7da53 ref: 4b28757ade169ac0a1696351519bbaa4bbba5db9
- name: Merge dbgate/dbgate-pro - name: Merge dbgate/dbgate-pro
run: | run: |
mkdir ../dbgate-pro mkdir ../dbgate-pro
@@ -107,7 +107,7 @@ jobs:
ports: ports:
- '16009:5556' - '16009:5556'
mongo: mongo:
image: mongo:6.0.25 image: mongo:4.4.29
env: env:
MONGO_INITDB_ROOT_USERNAME: root MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: Pwd2020Db MONGO_INITDB_ROOT_PASSWORD: Pwd2020Db

View File

@@ -0,0 +1,129 @@
#!/usr/bin/env node
// assign-dbgm-codes.mjs
import fs from 'fs/promises';
import path from 'path';
const PLACEHOLDER = 'DBGM-00000';
const CODE_RE = /DBGM-(\d{5})/g;
const JS_TS_RE = /\.(mjs|cjs|js|ts|jsx|tsx)$/i;
const IGNORE_DIRS = new Set([
'node_modules',
'.git',
'.hg',
'.svn',
'dist',
'build',
'out',
'.next',
'.turbo',
'.cache',
]);
const IGNORE_FILES = ['assign-dbgm-codes.mjs', 'package.json', 'README.md'];
// --- CLI ---
const args = process.argv.slice(2);
const dryRun = args.includes('--dry');
const rootArg = args.find(a => a !== '--dry') || process.cwd();
const root = path.resolve(rootArg);
// --- helpers ---
async function* walk(dir) {
const entries = await fs.readdir(dir, { withFileTypes: true });
for (const e of entries) {
if (e.isDirectory()) {
if (IGNORE_DIRS.has(e.name)) continue;
yield* walk(path.join(dir, e.name));
} else if (e.isFile()) {
if (JS_TS_RE.test(e.name) && !IGNORE_FILES.includes(e.name)) yield path.join(dir, e.name);
}
}
}
function formatCode(n) {
return `DBGM-${String(n).padStart(5, '0')}`;
}
// Find the smallest positive integer not in `taken`
function makeNextCodeFn(taken) {
let n = 1;
// advance n to first free
while (taken.has(n)) n++;
return () => {
const code = n;
taken.add(code);
// move n to next free for next call
do {
n++;
} while (taken.has(n));
return formatCode(code);
};
}
// --- main ---
(async () => {
console.log(`Scanning: ${root} ${dryRun ? '(dry run)' : ''}`);
// 1) Collect all taken codes across the repo
const taken = new Set(); // numeric parts only
const files = [];
for await (const file of walk(root)) files.push(file);
await Promise.all(
files.map(async file => {
try {
const text = await fs.readFile(file, 'utf8');
for (const m of text.matchAll(CODE_RE)) {
const num = Number(m[1]);
if (Number.isInteger(num) && num > 0) taken.add(num);
}
} catch (err) {
console.warn(`! Failed to read ${file}: ${err.message}`);
}
})
);
console.log(`Found ${taken.size} occupied code(s).`);
// 2) Replace placeholders with next available unique code
const nextCode = makeNextCodeFn(taken);
let filesChanged = 0;
let placeholdersReplaced = 0;
for (const file of files) {
let text;
try {
text = await fs.readFile(file, 'utf8');
} catch (err) {
console.warn(`! Failed to read ${file}: ${err.message}`);
continue;
}
if (!text.includes(PLACEHOLDER)) continue;
let countInFile = 0;
const updated = text.replaceAll(PLACEHOLDER, () => {
countInFile++;
return nextCode();
});
if (countInFile > 0) {
placeholdersReplaced += countInFile;
filesChanged++;
console.log(`${dryRun ? '[dry]' : '[write]'} ${file}${countInFile} replacement(s)`);
if (!dryRun) {
try {
await fs.writeFile(file, updated, 'utf8');
} catch (err) {
console.warn(`! Failed to write ${file}: ${err.message}`);
}
}
}
}
console.log(`Done. Files changed: ${filesChanged}, placeholders replaced: ${placeholdersReplaced}.`);
})().catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -37,7 +37,7 @@ services:
- "16009:5556" - "16009:5556"
mongo: mongo:
image: mongo:4.0.12 image: mongo:4.4.29
restart: always restart: always
environment: environment:
MONGO_INITDB_ROOT_USERNAME: root MONGO_INITDB_ROOT_USERNAME: root

View File

@@ -1,6 +1,6 @@
{ {
"private": true, "private": true,
"version": "6.6.1-beta.7", "version": "6.6.1-premium-beta.14",
"name": "dbgate-all", "name": "dbgate-all",
"workspaces": [ "workspaces": [
"packages/*", "packages/*",
@@ -72,7 +72,8 @@
"translations:extract": "node common/translations-cli/index.js extract", "translations:extract": "node common/translations-cli/index.js extract",
"translations:add-missing": "node common/translations-cli/index.js add-missing", "translations:add-missing": "node common/translations-cli/index.js add-missing",
"translations:remove-unused": "node common/translations-cli/index.js remove-unused", "translations:remove-unused": "node common/translations-cli/index.js remove-unused",
"translations:check": "node common/translations-cli/index.js check" "translations:check": "node common/translations-cli/index.js check",
"errors": "node common/assign-dbgm-codes.mjs ."
}, },
"dependencies": { "dependencies": {
"concurrently": "^5.1.0", "concurrently": "^5.1.0",

View File

@@ -56,7 +56,7 @@
"ncp": "^2.0.0", "ncp": "^2.0.0",
"node-cron": "^2.0.3", "node-cron": "^2.0.3",
"on-finished": "^2.4.1", "on-finished": "^2.4.1",
"pinomin": "^1.0.4", "pinomin": "^1.0.5",
"portfinder": "^1.0.28", "portfinder": "^1.0.28",
"rimraf": "^3.0.0", "rimraf": "^3.0.0",
"semver": "^7.6.3", "semver": "^7.6.3",

View File

@@ -94,7 +94,7 @@ class OAuthProvider extends AuthProviderBase {
payload = jwt.decode(id_token); payload = jwt.decode(id_token);
} }
logger.info({ payload }, 'User payload returned from OAUTH'); logger.info({ payload }, 'DBGM-00002 User payload returned from OAUTH');
const login = const login =
process.env.OAUTH_LOGIN_FIELD && payload && payload[process.env.OAUTH_LOGIN_FIELD] process.env.OAUTH_LOGIN_FIELD && payload && payload[process.env.OAUTH_LOGIN_FIELD]

View File

@@ -102,7 +102,7 @@ module.exports = {
...fileType('.matview.sql', 'matview.sql'), ...fileType('.matview.sql', 'matview.sql'),
]; ];
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error reading archive files'); logger.error(extractErrorLogData(err), 'DBGM-00001 Error reading archive files');
return []; return [];
} }
}, },

View File

@@ -99,7 +99,7 @@ function authMiddleware(req, res, next) {
return next(); return next();
} }
logger.error(extractErrorLogData(err), 'Sending invalid token error'); logger.error(extractErrorLogData(err), 'DBGM-00098 Sending invalid token error');
return unauthorizedResponse(req, res, 'invalid token'); return unauthorizedResponse(req, res, 'invalid token');
} }

View File

@@ -45,7 +45,7 @@ module.exports = {
const resp = await callCloudApiGet('content-list'); const resp = await callCloudApiGet('content-list');
return resp; return resp;
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error getting cloud content list'); logger.error(extractErrorLogData(err), 'DBGM-00099 Error getting cloud content list');
return []; return [];
} }

View File

@@ -116,12 +116,12 @@ function getPortalCollections() {
} }
} }
logger.info({ connections: connections.map(pickSafeConnectionInfo) }, 'Using connections from ENV variables'); logger.info({ connections: connections.map(pickSafeConnectionInfo) }, 'DBGM-00005 Using connections from ENV variables');
const noengine = connections.filter(x => !x.engine); const noengine = connections.filter(x => !x.engine);
if (noengine.length > 0) { if (noengine.length > 0) {
logger.warn( logger.warn(
{ connections: noengine.map(x => x._id) }, { connections: noengine.map(x => x._id) },
'Invalid CONNECTIONS configuration, missing ENGINE for connection ID' 'DBGM-00006 Invalid CONNECTIONS configuration, missing ENGINE for connection ID'
); );
} }
return connections; return connections;
@@ -530,7 +530,7 @@ module.exports = {
socket.emit('got-volatile-token', { strmid, savedConId: conid, volatileConId: volatile._id }); socket.emit('got-volatile-token', { strmid, savedConId: conid, volatileConId: volatile._id });
return { success: true }; return { success: true };
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB token'); logger.error(extractErrorLogData(err), 'DBGM-00100 Error getting DB token');
return { error: err.message }; return { error: err.message };
} }
}, },
@@ -546,7 +546,7 @@ module.exports = {
const resp = await authProvider.login(null, null, { conid: volatile._id }, req); const resp = await authProvider.login(null, null, { conid: volatile._id }, req);
return resp; return resp;
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB token'); logger.error(extractErrorLogData(err), 'DBGM-00101 Error getting DB token');
return { error: err.message }; return { error: err.message };
} }
}, },

View File

@@ -76,7 +76,7 @@ module.exports = {
handle_error(conid, database, props) { handle_error(conid, database, props) {
const { error } = props; const { error } = props;
logger.error(`Error in database connection ${conid}, database ${database}: ${error}`); logger.error(`DBGM-00102 Error in database connection ${conid}, database ${database}: ${error}`);
if (props?.msgid) { if (props?.msgid) {
const [resolve, reject] = this.requests[props?.msgid]; const [resolve, reject] = this.requests[props?.msgid];
reject(error); reject(error);
@@ -144,7 +144,7 @@ module.exports = {
handle_copyStreamError(conid, database, { copyStreamError }) { handle_copyStreamError(conid, database, { copyStreamError }) {
const { progressName } = copyStreamError; const { progressName } = copyStreamError;
const { runid } = progressName; const { runid } = progressName;
logger.error(`Error in database connection ${conid}, database ${database}: ${copyStreamError}`); logger.error(`DBGM-00103 Error in database connection ${conid}, database ${database}: ${copyStreamError}`);
socket.emit(`runner-done-${runid}`); socket.emit(`runner-done-${runid}`);
}, },
@@ -193,7 +193,7 @@ module.exports = {
if (newOpened.disconnected) return; if (newOpened.disconnected) return;
const funcName = `handle_${msgtype}`; const funcName = `handle_${msgtype}`;
if (!this[funcName]) { if (!this[funcName]) {
logger.error(`Unknown message type ${msgtype} from subprocess databaseConnectionProcess`); logger.error(`DBGM-00104 Unknown message type ${msgtype} from subprocess databaseConnectionProcess`);
return; return;
} }
@@ -204,7 +204,7 @@ module.exports = {
this.close(conid, database, false); this.close(conid, database, false);
}); });
subprocess.on('error', err => { subprocess.on('error', err => {
logger.error(extractErrorLogData(err), 'Error in database connection subprocess'); logger.error(extractErrorLogData(err), 'DBGM-00114 Error in database connection subprocess');
if (newOpened.disconnected) return; if (newOpened.disconnected) return;
this.close(conid, database, false); this.close(conid, database, false);
}); });
@@ -226,7 +226,7 @@ module.exports = {
try { try {
conn.subprocess.send({ msgid, ...message }); conn.subprocess.send({ msgid, ...message });
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error sending request do process'); logger.error(extractErrorLogData(err), 'DBGM-00115 Error sending request do process');
this.close(conn.conid, conn.database); this.close(conn.conid, conn.database);
} }
}); });
@@ -236,7 +236,7 @@ module.exports = {
queryData_meta: true, queryData_meta: true,
async queryData({ conid, database, sql }, req) { async queryData({ conid, database, sql }, req) {
testConnectionPermission(conid, req); testConnectionPermission(conid, req);
logger.info({ conid, database, sql }, 'Processing query'); logger.info({ conid, database, sql }, 'DBGM-00007 Processing query');
const opened = await this.ensureOpened(conid, database); const opened = await this.ensureOpened(conid, database);
// if (opened && opened.status && opened.status.name == 'error') { // if (opened && opened.status && opened.status.name == 'error') {
// return opened.status; // return opened.status;
@@ -283,7 +283,7 @@ module.exports = {
runScript_meta: true, runScript_meta: true,
async runScript({ conid, database, sql, useTransaction, logMessage }, req) { async runScript({ conid, database, sql, useTransaction, logMessage }, req) {
testConnectionPermission(conid, req); testConnectionPermission(conid, req);
logger.info({ conid, database, sql }, 'Processing script'); logger.info({ conid, database, sql }, 'DBGM-00008 Processing script');
const opened = await this.ensureOpened(conid, database); const opened = await this.ensureOpened(conid, database);
sendToAuditLog(req, { sendToAuditLog(req, {
category: 'dbop', category: 'dbop',
@@ -304,7 +304,7 @@ module.exports = {
runOperation_meta: true, runOperation_meta: true,
async runOperation({ conid, database, operation, useTransaction }, req) { async runOperation({ conid, database, operation, useTransaction }, req) {
testConnectionPermission(conid, req); testConnectionPermission(conid, req);
logger.info({ conid, database, operation }, 'Processing operation'); logger.info({ conid, database, operation }, 'DBGM-00009 Processing operation');
sendToAuditLog(req, { sendToAuditLog(req, {
category: 'dbop', category: 'dbop',
@@ -481,7 +481,7 @@ module.exports = {
try { try {
existing.subprocess.send({ msgtype: 'ping' }); existing.subprocess.send({ msgtype: 'ping' });
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error pinging DB connection'); logger.error(extractErrorLogData(err), 'DBGM-00116 Error pinging DB connection');
this.close(conid, database); this.close(conid, database);
return { return {
@@ -530,7 +530,7 @@ module.exports = {
try { try {
existing.subprocess.kill(); existing.subprocess.kill();
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error killing subprocess'); logger.error(extractErrorLogData(err), 'DBGM-00117 Error killing subprocess');
} }
} }
this.opened = this.opened.filter(x => x.conid != conid || x.database != database); this.opened = this.opened.filter(x => x.conid != conid || x.database != database);
@@ -924,7 +924,7 @@ module.exports = {
executeSessionQuery_meta: true, executeSessionQuery_meta: true,
async executeSessionQuery({ sesid, conid, database, sql }, req) { async executeSessionQuery({ sesid, conid, database, sql }, req) {
testConnectionPermission(conid, req); testConnectionPermission(conid, req);
logger.info({ sesid, sql }, 'Processing query'); logger.info({ sesid, sql }, 'DBGM-00010 Processing query');
sessions.dispatchMessage(sesid, 'Query execution started'); sessions.dispatchMessage(sesid, 'Query execution started');
const opened = await this.ensureOpened(conid, database); const opened = await this.ensureOpened(conid, database);

View File

@@ -1,7 +1,7 @@
const fs = require('fs-extra'); const fs = require('fs-extra');
const path = require('path'); const path = require('path');
const crypto = require('crypto'); const crypto = require('crypto');
const { filesdir, archivedir, resolveArchiveFolder, uploadsdir, appdir } = require('../utility/directories'); const { filesdir, archivedir, resolveArchiveFolder, uploadsdir, appdir, jsldir } = require('../utility/directories');
const getChartExport = require('../utility/getChartExport'); const getChartExport = require('../utility/getChartExport');
const { hasPermission } = require('../utility/hasPermission'); const { hasPermission } = require('../utility/hasPermission');
const socket = require('../utility/socket'); const socket = require('../utility/socket');
@@ -13,6 +13,7 @@ const dbgateApi = require('../shell');
const { getLogger } = require('dbgate-tools'); const { getLogger } = require('dbgate-tools');
const platformInfo = require('../utility/platformInfo'); const platformInfo = require('../utility/platformInfo');
const { checkSecureFilePathsWithoutDirectory, checkSecureDirectories } = require('../utility/security'); const { checkSecureFilePathsWithoutDirectory, checkSecureDirectories } = require('../utility/security');
const { copyAppLogsIntoFile, getRecentAppLogRecords } = require('../utility/appLogStore');
const logger = getLogger('files'); const logger = getLogger('files');
function serialize(format, data) { function serialize(format, data) {
@@ -253,7 +254,7 @@ module.exports = {
createZipFromJsons_meta: true, createZipFromJsons_meta: true,
async createZipFromJsons({ db, filePath }) { async createZipFromJsons({ db, filePath }) {
logger.info(`Creating zip file from JSONS ${filePath}`); logger.info(`DBGM-00011 Creating zip file from JSONS ${filePath}`);
await dbgateApi.zipJsonLinesData(db, filePath); await dbgateApi.zipJsonLinesData(db, filePath);
return true; return true;
}, },
@@ -279,7 +280,7 @@ module.exports = {
const FOLDERS = ['sql', 'sqlite']; const FOLDERS = ['sql', 'sqlite'];
for (const folder of FOLDERS) { for (const folder of FOLDERS) {
if (fileName.toLowerCase().endsWith('.' + folder)) { if (fileName.toLowerCase().endsWith('.' + folder)) {
logger.info(`Saving ${folder} file ${fileName}`); logger.info(`DBGM-00012 Saving ${folder} file ${fileName}`);
await fs.copyFile(filePath, path.join(filesdir(), folder, fileName)); await fs.copyFile(filePath, path.join(filesdir(), folder, fileName));
socket.emitChanged(`files-changed`, { folder: folder }); socket.emitChanged(`files-changed`, { folder: folder });
@@ -291,7 +292,7 @@ module.exports = {
} }
} }
throw new Error(`${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`); throw new Error(`DBGM-00013 ${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`);
}, },
exportFile_meta: true, exportFile_meta: true,
@@ -311,4 +312,23 @@ module.exports = {
await fs.copyFile(sourceFilePath, targetFilePath); await fs.copyFile(sourceFilePath, targetFilePath);
return true; return true;
}, },
fillAppLogs_meta: true,
async fillAppLogs({ dateFrom = 0, dateTo = new Date().getTime(), prepareForExport = false }) {
const jslid = crypto.randomUUID();
const outputFile = path.join(jsldir(), `${jslid}.jsonl`);
await copyAppLogsIntoFile(dateFrom, dateTo, outputFile, prepareForExport);
return {
jslid,
};
},
getRecentAppLog_meta: true,
getRecentAppLog({ limit }) {
const res = getRecentAppLogRecords();
if (limit) {
return res.slice(-limit);
}
return res;
},
}; };

View File

@@ -48,7 +48,7 @@ require=null;
async function run() { async function run() {
${script} ${script}
await dbgateApi.finalizer.run(); await dbgateApi.finalizer.run();
logger.info('Finished job script'); logger.info('DBGM-00014 Finished job script');
} }
dbgateApi.runScript(run); dbgateApi.runScript(run);
`; `;
@@ -74,7 +74,8 @@ module.exports = {
dispatchMessage(runid, message) { dispatchMessage(runid, message) {
if (message) { if (message) {
if (_.isPlainObject(message)) logger.log(message); if (_.isPlainObject(message))
logger.log({ ...message, msg: message.msg || message.message || '', message: undefined });
else logger.info(message); else logger.info(message);
const toEmit = _.isPlainObject(message) const toEmit = _.isPlainObject(message)
@@ -132,7 +133,7 @@ module.exports = {
const pluginNames = extractPlugins(scriptText); const pluginNames = extractPlugins(scriptText);
// console.log('********************** SCRIPT TEXT **********************'); // console.log('********************** SCRIPT TEXT **********************');
// console.log(scriptText); // console.log(scriptText);
logger.info({ scriptFile }, 'Running script'); logger.info({ scriptFile }, 'DBGM-00015 Running script');
// const subprocess = fork(scriptFile, ['--checkParent', '--max-old-space-size=8192'], { // const subprocess = fork(scriptFile, ['--checkParent', '--max-old-space-size=8192'], {
const subprocess = fork( const subprocess = fork(
scriptFile, scriptFile,
@@ -171,7 +172,7 @@ module.exports = {
subprocess.on('exit', code => { subprocess.on('exit', code => {
// console.log('... EXITED', code); // console.log('... EXITED', code);
this.rejectRequest(runid, { message: 'No data returned, maybe input data source is too big' }); this.rejectRequest(runid, { message: 'No data returned, maybe input data source is too big' });
logger.info({ code, pid: subprocess.pid }, 'Exited process'); logger.info({ code, pid: subprocess.pid }, 'DBGM-00016 Exited process');
socket.emit(`runner-done-${runid}`, code); socket.emit(`runner-done-${runid}`, code);
this.opened = this.opened.filter(x => x.runid != runid); this.opened = this.opened.filter(x => x.runid != runid);
}); });
@@ -222,7 +223,7 @@ module.exports = {
subprocess.on('exit', code => { subprocess.on('exit', code => {
console.log('... EXITED', code); console.log('... EXITED', code);
logger.info({ code, pid: subprocess.pid }, 'Exited process'); logger.info({ code, pid: subprocess.pid }, 'DBGM-00017 Exited process');
this.dispatchMessage(runid, `Finished external process with code ${code}`); this.dispatchMessage(runid, `Finished external process with code ${code}`);
socket.emit(`runner-done-${runid}`, code); socket.emit(`runner-done-${runid}`, code);
if (onFinished) { if (onFinished) {
@@ -258,7 +259,7 @@ module.exports = {
severity: 'error', severity: 'error',
message: extractErrorMessage(err), message: extractErrorMessage(err),
}); });
logger.error(extractErrorLogData(err), 'Caught error on stdin'); logger.error(extractErrorLogData(err), 'DBGM-00118 Caught error on stdin');
}); });
} }

View File

@@ -24,7 +24,7 @@ module.exports = {
if (!match) return; if (!match) return;
const pattern = match[1]; const pattern = match[1];
if (!cron.validate(pattern)) return; if (!cron.validate(pattern)) return;
logger.info(`Schedule script ${file} with pattern ${pattern}`); logger.info(`DBGM-00018 Schedule script ${file} with pattern ${pattern}`);
const task = cron.schedule(pattern, () => runners.start({ script: text })); const task = cron.schedule(pattern, () => runners.start({ script: text }));
this.tasks.push(task); this.tasks.push(task);
}, },

View File

@@ -103,7 +103,7 @@ module.exports = {
this.close(conid, false); this.close(conid, false);
}); });
subprocess.on('error', err => { subprocess.on('error', err => {
logger.error(extractErrorLogData(err), 'Error in server connection subprocess'); logger.error(extractErrorLogData(err), 'DBGM-00119 Error in server connection subprocess');
if (newOpened.disconnected) return; if (newOpened.disconnected) return;
this.close(conid, false); this.close(conid, false);
}); });
@@ -121,7 +121,7 @@ module.exports = {
try { try {
existing.subprocess.kill(); existing.subprocess.kill();
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error killing subprocess'); logger.error(extractErrorLogData(err), 'DBGM-00120 Error killing subprocess');
} }
} }
this.opened = this.opened.filter(x => x.conid != conid); this.opened = this.opened.filter(x => x.conid != conid);
@@ -191,7 +191,7 @@ module.exports = {
try { try {
opened.subprocess.send({ msgtype: 'ping' }); opened.subprocess.send({ msgtype: 'ping' });
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error pinging server connection'); logger.error(extractErrorLogData(err), 'DBGM-00121 Error pinging server connection');
this.close(conid); this.close(conid);
} }
}) })
@@ -244,7 +244,7 @@ module.exports = {
try { try {
conn.subprocess.send({ msgid, ...message }); conn.subprocess.send({ msgid, ...message });
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error sending request'); logger.error(extractErrorLogData(err), 'DBGM-00122 Error sending request');
this.close(conn.conid); this.close(conn.conid);
} }
}); });

View File

@@ -165,7 +165,7 @@ module.exports = {
message: 'Executing query', message: 'Executing query',
}); });
logger.info({ sesid, sql }, 'Processing query'); logger.info({ sesid, sql }, 'DBGM-00019 Processing query');
this.dispatchMessage(sesid, 'Query execution started'); this.dispatchMessage(sesid, 'Query execution started');
session.subprocess.send({ session.subprocess.send({
msgtype: 'executeQuery', msgtype: 'executeQuery',
@@ -186,7 +186,7 @@ module.exports = {
throw new Error('Invalid session'); throw new Error('Invalid session');
} }
logger.info({ sesid, command }, 'Processing control command'); logger.info({ sesid, command }, 'DBGM-00020 Processing control command');
this.dispatchMessage(sesid, `${_.startCase(command)} started`); this.dispatchMessage(sesid, `${_.startCase(command)} started`);
session.subprocess.send({ msgtype: 'executeControlCommand', command }); session.subprocess.send({ msgtype: 'executeControlCommand', command });
@@ -224,7 +224,7 @@ module.exports = {
throw new Error('Invalid session'); throw new Error('Invalid session');
} }
logger.info({ sesid }, 'Starting profiler'); logger.info({ sesid }, 'DBGM-00021 Starting profiler');
session.loadingReader_jslid = jslid; session.loadingReader_jslid = jslid;
session.subprocess.send({ msgtype: 'startProfiler', jslid }); session.subprocess.send({ msgtype: 'startProfiler', jslid });
@@ -271,7 +271,7 @@ module.exports = {
try { try {
session.subprocess.send({ msgtype: 'ping' }); session.subprocess.send({ msgtype: 'ping' });
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error pinging session'); logger.error(extractErrorLogData(err), 'DBGM-00145 Error pinging session');
return { return {
status: 'error', status: 'error',

View File

@@ -28,7 +28,7 @@ module.exports = {
} }
const uploadName = crypto.randomUUID(); const uploadName = crypto.randomUUID();
const filePath = path.join(uploadsdir(), uploadName); const filePath = path.join(uploadsdir(), uploadName);
logger.info(`Uploading file ${data.name}, size=${data.size}`); logger.info(`DBGM-00025 Uploading file ${data.name}, size=${data.size}`);
data.mv(filePath, () => { data.mv(filePath, () => {
res.json({ res.json({
@@ -115,7 +115,7 @@ module.exports = {
return response.data; return response.data;
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error uploading gist'); logger.error(extractErrorLogData(err), 'DBGM-00148 Error uploading gist');
return { return {
apiErrorMessage: err.message, apiErrorMessage: err.message,

View File

@@ -9,7 +9,7 @@ const currentVersion = require('./currentVersion');
const logger = getLogger('apiIndex'); const logger = getLogger('apiIndex');
process.on('uncaughtException', err => { process.on('uncaughtException', err => {
logger.fatal(extractErrorLogData(err), 'Uncaught exception, exiting process'); logger.fatal(extractErrorLogData(err), 'DBGM-00259 Uncaught exception, exiting process');
process.exit(1); process.exit(1);
}); });
@@ -33,6 +33,9 @@ if (processArgs.processDisplayName) {
// } // }
function configureLogger() { function configureLogger() {
const { initializeRecentLogProvider, pushToRecentLogs } = require('./utility/appLogStore');
initializeRecentLogProvider();
const logsFilePath = path.join(logsdir(), `${moment().format('YYYY-MM-DD-HH-mm')}-${process.pid}.ndjson`); const logsFilePath = path.join(logsdir(), `${moment().format('YYYY-MM-DD-HH-mm')}-${process.pid}.ndjson`);
setLogsFilePath(logsFilePath); setLogsFilePath(logsFilePath);
setLoggerName('main'); setLoggerName('main');
@@ -40,6 +43,8 @@ function configureLogger() {
const consoleLogLevel = process.env.CONSOLE_LOG_LEVEL || process.env.LOG_LEVEL || 'info'; const consoleLogLevel = process.env.CONSOLE_LOG_LEVEL || process.env.LOG_LEVEL || 'info';
const fileLogLevel = process.env.FILE_LOG_LEVEL || process.env.LOG_LEVEL || 'debug'; const fileLogLevel = process.env.FILE_LOG_LEVEL || process.env.LOG_LEVEL || 'debug';
const streamsByDatePart = {};
const logConfig = { const logConfig = {
base: { pid: process.pid }, base: { pid: process.pid },
targets: [ targets: [
@@ -49,10 +54,35 @@ function configureLogger() {
level: consoleLogLevel, level: consoleLogLevel,
}, },
{ {
type: 'stream', type: 'objstream',
// @ts-ignore // @ts-ignore
level: fileLogLevel, level: fileLogLevel,
stream: fs.createWriteStream(logsFilePath, { flags: 'a' }), objstream: {
send(msg) {
const datePart = moment(msg.time).format('YYYY-MM-DD');
if (!streamsByDatePart[datePart]) {
streamsByDatePart[datePart] = fs.createWriteStream(
path.join(logsdir(), `${moment().format('YYYY-MM-DD-HH-mm')}-${process.pid}.ndjson`),
{ flags: 'a' }
);
}
const additionals = {};
const finalMsg =
msg.msg && msg.msg.match(/^DBGM-\d\d\d\d\d/)
? {
...msg,
msg: msg.msg.substring(10).trimStart(),
msgcode: msg.msg.substring(0, 10),
...additionals,
}
: {
...msg,
...additionals,
};
streamsByDatePart[datePart].write(`${JSON.stringify(finalMsg)}\n`);
pushToRecentLogs(finalMsg);
},
},
}, },
], ],
}; };
@@ -101,10 +131,10 @@ function configureLogger() {
if (processArgs.listenApi) { if (processArgs.listenApi) {
configureLogger(); configureLogger();
logger.info(`Starting API process version ${currentVersion.version}`); logger.info(`DBGM-00026 Starting API process version ${currentVersion.version}`);
if (process.env.DEBUG_PRINT_ENV_VARIABLES) { if (process.env.DEBUG_PRINT_ENV_VARIABLES) {
logger.info('Debug print environment variables:'); logger.info('DBGM-00027 Debug print environment variables:');
for (const key of Object.keys(process.env)) { for (const key of Object.keys(process.env)) {
logger.info(` ${key}: ${JSON.stringify(process.env[key])}`); logger.info(` ${key}: ${JSON.stringify(process.env[key])}`);
} }

View File

@@ -6,6 +6,7 @@ const http = require('http');
const cors = require('cors'); const cors = require('cors');
const getPort = require('get-port'); const getPort = require('get-port');
const path = require('path'); const path = require('path');
const fs = require('fs/promises');
const useController = require('./utility/useController'); const useController = require('./utility/useController');
const socket = require('./utility/socket'); const socket = require('./utility/socket');
@@ -44,6 +45,48 @@ const { startCloudFiles } = require('./utility/cloudIntf');
const logger = getLogger('main'); const logger = getLogger('main');
function registerExpressStatic(app, publicDir) {
app.get([getExpressPath('/'), getExpressPath('/*.html')], async (req, res, next) => {
try {
const relPath = req.path === getExpressPath('/') ? '/index.html' : req.path;
const filePath = path.join(publicDir, relPath);
let html = await fs.readFile(filePath, 'utf8');
if (process.env.DBGATE_GTM_ID) {
html = html.replace(
/<!--HEAD_SCRIPT-->/g,
`<!-- Google Tag Manager -->
<script>(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start':
new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0],
j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src=
'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f);
})(window,document,'script','dataLayer','${process.env.DBGATE_GTM_ID}');</script>
<!-- End Google Tag Manager -->`
);
html = html.replace(
/<!--BODY_SCRIPT-->/g,
process.env.PAGE_BODY_SCRIPT ??
`<!-- Google Tag Manager (noscript) -->
<noscript><iframe src="https://www.googletagmanager.com/ns.html?id=${process.env.DBGATE_GTM_ID}" height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript>
<!-- End Google Tag Manager (noscript) -->`
);
} else {
html = html.replace(/<!--HEAD_SCRIPT-->/g, process.env.PAGE_HEAD_SCRIPT ?? '');
html = html.replace(/<!--BODY_SCRIPT-->/g, process.env.PAGE_BODY_SCRIPT ?? '');
}
res.type('html').send(html);
} catch (err) {
if (err.code === 'ENOENT') return next();
next(err);
}
});
// 2) Static assets for everything else (css/js/images/etc.)
app.use(getExpressPath('/'), express.static(publicDir));
}
function start() { function start() {
// console.log('process.argv', process.argv); // console.log('process.argv', process.argv);
@@ -78,22 +121,18 @@ function start() {
if (platformInfo.isDocker) { if (platformInfo.isDocker) {
// server static files inside docker container // server static files inside docker container
app.use(getExpressPath('/'), express.static('/home/dbgate-docker/public')); registerExpressStatic(app, '/home/dbgate-docker/public');
} else if (platformInfo.isAwsUbuntuLayout) { } else if (platformInfo.isAwsUbuntuLayout) {
app.use(getExpressPath('/'), express.static('/home/ubuntu/build/public')); registerExpressStatic(app, '/home/dbgate-docker/public');
registerExpressStatic(app, '/home/ubuntu/build/public');
} else if (platformInfo.isAzureUbuntuLayout) { } else if (platformInfo.isAzureUbuntuLayout) {
app.use(getExpressPath('/'), express.static('/home/azureuser/build/public')); registerExpressStatic(app, '/home/azureuser/build/public');
} else if (processArgs.runE2eTests) { } else if (processArgs.runE2eTests) {
app.use(getExpressPath('/'), express.static(path.resolve('packer/build/public'))); registerExpressStatic(app, path.resolve('packer/build/public'));
} else if (platformInfo.isNpmDist) { } else if (platformInfo.isNpmDist) {
app.use( registerExpressStatic(app, path.join(__dirname, isProApp() ? '../../dbgate-web-premium/public' : '../../dbgate-web/public'));
getExpressPath('/'),
express.static(path.join(__dirname, isProApp() ? '../../dbgate-web-premium/public' : '../../dbgate-web/public'))
);
} else if (process.env.DEVWEB) { } else if (process.env.DEVWEB) {
// console.log('__dirname', __dirname); registerExpressStatic(app, path.join(__dirname, '../../web/public'));
// console.log(path.join(__dirname, '../../web/public/build'));
app.use(getExpressPath('/'), express.static(path.join(__dirname, '../../web/public')));
} else { } else {
app.get(getExpressPath('/'), (req, res) => { app.get(getExpressPath('/'), (req, res) => {
res.send('DbGate API'); res.send('DbGate API');
@@ -152,15 +191,15 @@ function start() {
if (platformInfo.isDocker) { if (platformInfo.isDocker) {
const port = process.env.PORT || 3000; const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (docker build)`); logger.info(`DBGM-00028 DbGate API listening on port ${port} (docker build)`);
server.listen(port); server.listen(port);
} else if (platformInfo.isAwsUbuntuLayout) { } else if (platformInfo.isAwsUbuntuLayout) {
const port = process.env.PORT || 3000; const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (AWS AMI build)`); logger.info(`DBGM-00029 DbGate API listening on port ${port} (AWS AMI build)`);
server.listen(port); server.listen(port);
} else if (platformInfo.isAzureUbuntuLayout) { } else if (platformInfo.isAzureUbuntuLayout) {
const port = process.env.PORT || 3000; const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (Azure VM build)`); logger.info(`DBGM-00030 DbGate API listening on port ${port} (Azure VM build)`);
server.listen(port); server.listen(port);
} else if (platformInfo.isNpmDist) { } else if (platformInfo.isNpmDist) {
getPort({ getPort({
@@ -170,27 +209,27 @@ function start() {
), ),
}).then(port => { }).then(port => {
server.listen(port, () => { server.listen(port, () => {
logger.info(`DbGate API listening on port ${port} (NPM build)`); logger.info(`DBGM-00031 DbGate API listening on port ${port} (NPM build)`);
}); });
}); });
} else if (process.env.DEVWEB) { } else if (process.env.DEVWEB) {
const port = process.env.PORT || 3000; const port = process.env.PORT || 3000;
logger.info(`DbGate API & web listening on port ${port} (dev web build)`); logger.info(`DBGM-00032 DbGate API & web listening on port ${port} (dev web build)`);
server.listen(port); server.listen(port);
} else { } else {
const port = process.env.PORT || 3000; const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (dev API build)`); logger.info(`DBGM-00033 DbGate API listening on port ${port} (dev API build)`);
server.listen(port); server.listen(port);
} }
function shutdown() { function shutdown() {
logger.info('\nShutting down DbGate API server'); logger.info('DBGM-00034 Shutting down DbGate API server');
server.close(() => { server.close(() => {
logger.info('Server shut down, terminating'); logger.info('DBGM-00035 Server shut down, terminating');
process.exit(0); process.exit(0);
}); });
setTimeout(() => { setTimeout(() => {
logger.info('Server close timeout, terminating'); logger.info('DBGM-00036 Server close timeout, terminating');
process.exit(0); process.exit(0);
}, 1000); }, 1000);
} }

View File

@@ -6,7 +6,6 @@ const {
extractIntSettingsValue, extractIntSettingsValue,
getLogger, getLogger,
isCompositeDbName, isCompositeDbName,
dbNameLogCategory,
extractErrorMessage, extractErrorMessage,
extractErrorLogData, extractErrorLogData,
ScriptWriterEval, ScriptWriterEval,
@@ -45,6 +44,14 @@ function getStatusCounter() {
return statusCounter; return statusCounter;
} }
function getLogInfo() {
return {
database: dbhan ? dbhan.database : undefined,
conid: dbhan ? dbhan.conid : undefined,
engine: storedConnection ? storedConnection.engine : undefined,
};
}
async function checkedAsyncCall(promise) { async function checkedAsyncCall(promise) {
try { try {
const res = await promise; const res = await promise;
@@ -131,10 +138,10 @@ async function readVersion() {
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
try { try {
const version = await driver.getVersion(dbhan); const version = await driver.getVersion(dbhan);
logger.debug(`Got server version: ${version.version}`); logger.debug(getLogInfo(), `DBGM-00037 Got server version: ${version.version}`);
serverVersion = version; serverVersion = version;
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB server version'); logger.error(extractErrorLogData(err, getLogInfo()), 'DBGM-00149 Error getting DB server version');
serverVersion = { version: 'Unknown' }; serverVersion = { version: 'Unknown' };
} }
process.send({ msgtype: 'version', version: serverVersion }); process.send({ msgtype: 'version', version: serverVersion });
@@ -148,9 +155,8 @@ async function handleConnect({ connection, structure, globalSettings }) {
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
dbhan = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app')); dbhan = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app'));
logger.debug( logger.debug(
`Connected to database, driver: ${storedConnection.engine}, separate schemas: ${ getLogInfo(),
storedConnection.useSeparateSchemas ? 'YES' : 'NO' `DBGM-00038 Connected to database, separate schemas: ${storedConnection.useSeparateSchemas ? 'YES' : 'NO'}`
}, 'DB: ${dbNameLogCategory(dbhan.database)} }`
); );
dbhan.feedback = feedback => setStatus({ feedback }); dbhan.feedback = feedback => setStatus({ feedback });
await checkedAsyncCall(readVersion()); await checkedAsyncCall(readVersion());
@@ -257,13 +263,16 @@ async function handleDriverDataCore(msgid, callMethod, { logName }) {
const result = await callMethod(driver); const result = await callMethod(driver);
process.send({ msgtype: 'response', msgid, result: serializeJsTypesForJsonStringify(result) }); process.send({ msgtype: 'response', msgid, result: serializeJsTypesForJsonStringify(result) });
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err, { logName }), `Error when handling message ${logName}`); logger.error(
extractErrorLogData(err, { logName, ...getLogInfo() }),
`DBGM-00150 Error when handling message ${logName}`
);
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error executing DB data') }); process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error executing DB data') });
} }
} }
async function handleSchemaList({ msgid }) { async function handleSchemaList({ msgid }) {
logger.debug('Loading schema list'); logger.debug(getLogInfo(), 'DBGM-00039 Loading schema list');
return handleDriverDataCore(msgid, driver => driver.listSchemas(dbhan), { logName: 'listSchemas' }); return handleDriverDataCore(msgid, driver => driver.listSchemas(dbhan), { logName: 'listSchemas' });
} }
@@ -351,7 +360,7 @@ async function handleSqlPreview({ msgid, objects, options }) {
process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated }); process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated });
if (generator.isUnhandledException) { if (generator.isUnhandledException) {
setTimeout(async () => { setTimeout(async () => {
logger.error('Exiting because of unhandled exception'); logger.error(getLogInfo(), 'DBGM-00151 Exiting because of unhandled exception');
await driver.close(dbhan); await driver.close(dbhan);
process.exit(0); process.exit(0);
}, 500); }, 500);
@@ -485,7 +494,7 @@ function start() {
setInterval(async () => { setInterval(async () => {
const time = new Date().getTime(); const time = new Date().getTime();
if (time - lastPing > 40 * 1000) { if (time - lastPing > 40 * 1000) {
logger.info('Database connection not alive, exiting'); logger.info(getLogInfo(), 'DBGM-00040 Database connection not alive, exiting');
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan); await driver.close(dbhan);
process.exit(0); process.exit(0);
@@ -497,10 +506,10 @@ function start() {
try { try {
await handleMessage(message); await handleMessage(message);
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error in DB connection'); logger.error(extractErrorLogData(err, getLogInfo()), 'DBGM-00041 Error in DB connection');
process.send({ process.send({
msgtype: 'error', msgtype: 'error',
error: extractErrorMessage(err, 'Error processing message'), error: extractErrorMessage(err, 'DBGM-00042 Error processing message'),
msgid: message?.msgid, msgid: message?.msgid,
}); });
} }

View File

@@ -39,7 +39,7 @@ async function handleRefresh() {
name: 'error', name: 'error',
message: err.message, message: err.message,
}); });
logger.error(extractErrorLogData(err), 'Error refreshing server databases'); logger.error(extractErrorLogData(err), 'DBGM-00152 Error refreshing server databases');
setTimeout(() => process.exit(1), 1000); setTimeout(() => process.exit(1), 1000);
} }
} }
@@ -50,7 +50,7 @@ async function readVersion() {
try { try {
version = await driver.getVersion(dbhan); version = await driver.getVersion(dbhan);
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB server version'); logger.error(extractErrorLogData(err), 'DBGM-00153 Error getting DB server version');
version = { version: 'Unknown' }; version = { version: 'Unknown' };
} }
process.send({ msgtype: 'version', version }); process.send({ msgtype: 'version', version });
@@ -90,7 +90,7 @@ async function handleConnect(connection) {
name: 'error', name: 'error',
message: err.message, message: err.message,
}); });
logger.error(extractErrorLogData(err), 'Error connecting to server'); logger.error(extractErrorLogData(err), 'DBGM-00154 Error connecting to server');
setTimeout(() => process.exit(1), 1000); setTimeout(() => process.exit(1), 1000);
} }
@@ -120,7 +120,7 @@ async function handleDatabaseOp(op, { msgid, name }) {
} else { } else {
const dmp = driver.createDumper(); const dmp = driver.createDumper();
dmp[op](name); dmp[op](name);
logger.info({ sql: dmp.s }, 'Running script'); logger.info({ sql: dmp.s }, 'DBGM-00043 Running script');
await driver.query(dbhan, dmp.s, { discardResult: true }); await driver.query(dbhan, dmp.s, { discardResult: true });
} }
await handleRefresh(); await handleRefresh();
@@ -170,7 +170,7 @@ function start() {
setInterval(async () => { setInterval(async () => {
const time = new Date().getTime(); const time = new Date().getTime();
if (time - lastPing > 40 * 1000) { if (time - lastPing > 40 * 1000) {
logger.info('Server connection not alive, exiting'); logger.info('DBGM-00044 Server connection not alive, exiting');
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
if (dbhan) { if (dbhan) {
await driver.close(dbhan); await driver.close(dbhan);
@@ -188,7 +188,7 @@ function start() {
name: 'error', name: 'error',
message: err.message, message: err.message,
}); });
logger.error(extractErrorLogData(err), `Error processing message ${message?.['msgtype']}`); logger.error(extractErrorLogData(err), `DBGM-00155 Error processing message ${message?.['msgtype']}`);
} }
}); });
} }

View File

@@ -230,7 +230,7 @@ function start() {
setInterval(async () => { setInterval(async () => {
const time = new Date().getTime(); const time = new Date().getTime();
if (time - lastPing > 25 * 1000) { if (time - lastPing > 25 * 1000) {
logger.info('Session not alive, exiting'); logger.info('DBGM-00045 Session not alive, exiting');
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan); await driver.close(dbhan);
process.exit(0); process.exit(0);
@@ -250,7 +250,7 @@ function start() {
!currentProfiler && !currentProfiler &&
executingScripts == 0 executingScripts == 0
) { ) {
logger.info('Session not active, exiting'); logger.info('DBGM-00046 Session not active, exiting');
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan); await driver.close(dbhan);
process.exit(0); process.exit(0);

View File

@@ -41,7 +41,7 @@ async function handleStart({ connection, tunnelConfig }) {
tunnelConfig, tunnelConfig,
}); });
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel connection:'); logger.error(extractErrorLogData(err), 'DBGM-00156 Error creating SSH tunnel connection:');
process.send({ process.send({
msgtype: 'error', msgtype: 'error',

View File

@@ -10,7 +10,7 @@ const logger = getLogger();
function archiveWriter({ folderName, fileName }) { function archiveWriter({ folderName, fileName }) {
const dir = resolveArchiveFolder(folderName); const dir = resolveArchiveFolder(folderName);
if (!fs.existsSync(dir)) { if (!fs.existsSync(dir)) {
logger.info(`Creating directory ${dir}`); logger.info(`DBGM-00047 Creating directory ${dir}`);
fs.mkdirSync(dir); fs.mkdirSync(dir);
} }
const jsonlFile = path.join(dir, `${fileName}.jsonl`); const jsonlFile = path.join(dir, `${fileName}.jsonl`);

View File

@@ -83,7 +83,7 @@ async function copyStream(input, output, options) {
}); });
} }
logger.error(extractErrorLogData(err, { progressName }), 'Import/export job failed'); logger.error(extractErrorLogData(err, { progressName }), 'DBGM-00157 Import/export job failed');
// throw err; // throw err;
} }
} }

View File

@@ -28,20 +28,20 @@ async function executeQuery({
useTransaction, useTransaction,
}) { }) {
if (!logScriptItems && !skipLogging) { if (!logScriptItems && !skipLogging) {
logger.info({ sql: getLimitedQuery(sql) }, `Execute query`); logger.info({ sql: getLimitedQuery(sql) }, `DBGM-00048 Execute query`);
} }
if (!driver) driver = requireEngineDriver(connection); if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'script')); const dbhan = systemConnection || (await connectUtility(driver, connection, 'script'));
if (sqlFile) { if (sqlFile) {
logger.debug(`Loading SQL file ${sqlFile}`); logger.debug(`DBGM-00049 Loading SQL file ${sqlFile}`);
sql = await fs.readFile(sqlFile, { encoding: 'utf-8' }); sql = await fs.readFile(sqlFile, { encoding: 'utf-8' });
} }
try { try {
if (!skipLogging) { if (!skipLogging) {
logger.debug(`Running SQL query, length: ${sql.length}`); logger.debug(`DBGM-00050 Running SQL query, length: ${sql.length}`);
} }
await driver.script(dbhan, sql, { logScriptItems, useTransaction }); await driver.script(dbhan, sql, { logScriptItems, useTransaction });

View File

@@ -45,14 +45,14 @@ class ImportStream extends stream.Transform {
} }
async function importDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, inputFile }) { async function importDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, inputFile }) {
logger.info(`Importing database`); logger.info(`DBGM-00051 Importing database`);
if (!driver) driver = requireEngineDriver(connection); if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write')); const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
try { try {
logger.info(`Input file: ${inputFile}`); logger.info(`DBGM-00052 Input file: ${inputFile}`);
const downloadedFile = await download(inputFile); const downloadedFile = await download(inputFile);
logger.info(`Downloaded file: ${downloadedFile}`); logger.info(`DBGM-00053 Downloaded file: ${downloadedFile}`);
const fileStream = fs.createReadStream(downloadedFile, 'utf-8'); const fileStream = fs.createReadStream(downloadedFile, 'utf-8');
const splittedStream = splitQueryStream(fileStream, { const splittedStream = splitQueryStream(fileStream, {

View File

@@ -42,7 +42,7 @@ class ParseStream extends stream.Transform {
* @returns {Promise<readerType>} - reader object * @returns {Promise<readerType>} - reader object
*/ */
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) { async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
logger.info(`Reading file ${fileName}`); logger.info(`DBGM-00054 Reading file ${fileName}`);
const downloadedFile = await download(fileName); const downloadedFile = await download(fileName);

View File

@@ -33,7 +33,7 @@ class StringifyStream extends stream.Transform {
* @returns {Promise<writerType>} - writer object * @returns {Promise<writerType>} - writer object
*/ */
async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true }) { async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true }) {
logger.info(`Writing file ${fileName}`); logger.info(`DBGM-00055 Writing file ${fileName}`);
const stringify = new StringifyStream({ header }); const stringify = new StringifyStream({ header });
const fileStream = fs.createWriteStream(fileName, encoding); const fileStream = fs.createWriteStream(fileName, encoding);
return [stringify, fileStream]; return [stringify, fileStream];

View File

@@ -63,7 +63,7 @@ async function jsonReader({
encoding = 'utf-8', encoding = 'utf-8',
limitRows = undefined, limitRows = undefined,
}) { }) {
logger.info(`Reading file ${fileName}`); logger.info(`DBGM-00056 Reading file ${fileName}`);
const downloadedFile = await download(fileName); const downloadedFile = await download(fileName);
const fileStream = fs.createReadStream( const fileStream = fs.createReadStream(

View File

@@ -96,7 +96,7 @@ class StringifyStream extends stream.Transform {
* @returns {Promise<writerType>} - writer object * @returns {Promise<writerType>} - writer object
*/ */
async function jsonWriter({ fileName, jsonStyle, keyField = '_key', rootField, encoding = 'utf-8' }) { async function jsonWriter({ fileName, jsonStyle, keyField = '_key', rootField, encoding = 'utf-8' }) {
logger.info(`Writing file ${fileName}`); logger.info(`DBGM-00057 Writing file ${fileName}`);
const stringify = new StringifyStream({ jsonStyle, keyField, rootField }); const stringify = new StringifyStream({ jsonStyle, keyField, rootField });
const fileStream = fs.createWriteStream(fileName, encoding); const fileStream = fs.createWriteStream(fileName, encoding);
return [stringify, fileStream]; return [stringify, fileStream];

View File

@@ -6,13 +6,13 @@ const exportDbModel = require('../utility/exportDbModel');
const logger = getLogger('analyseDb'); const logger = getLogger('analyseDb');
async function loadDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, outputDir }) { async function loadDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, outputDir }) {
logger.debug(`Analysing database`); logger.debug(`DBGM-00058 Analysing database`);
if (!driver) driver = requireEngineDriver(connection); if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true })); const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
try { try {
const dbInfo = await driver.analyseFull(dbhan); const dbInfo = await driver.analyseFull(dbhan);
logger.debug(`Analyse finished`); logger.debug(`DBGM-00059 Analyse finished`);
await exportDbModel(dbInfo, outputDir); await exportDbModel(dbInfo, outputDir);
} finally { } finally {

View File

@@ -132,7 +132,7 @@ async function modifyJsonLinesReader({
mergeKey = null, mergeKey = null,
mergeMode = 'merge', mergeMode = 'merge',
}) { }) {
logger.info(`Reading file ${fileName} with change set`); logger.info(`DBGM-00060 Reading file ${fileName} with change set`);
const fileStream = fs.createReadStream( const fileStream = fs.createReadStream(
fileName, fileName,

View File

@@ -29,7 +29,7 @@ async function queryReader({
// if (!sql && !json) { // if (!sql && !json) {
// throw new Error('One of sql or json must be set'); // throw new Error('One of sql or json must be set');
// } // }
logger.info({ sql: query || sql }, `Reading query`); logger.info({ sql: query || sql }, `DBGM-00061 Reading query`);
// else console.log(`Reading query ${JSON.stringify(json)}`); // else console.log(`Reading query ${JSON.stringify(json)}`);
if (!driver) { if (!driver) {

View File

@@ -4,7 +4,7 @@ const { pluginsdir, packagedPluginsDir, getPluginBackendPath } = require('../uti
const platformInfo = require('../utility/platformInfo'); const platformInfo = require('../utility/platformInfo');
const authProxy = require('../utility/authProxy'); const authProxy = require('../utility/authProxy');
const { getLogger } = require('dbgate-tools'); const { getLogger } = require('dbgate-tools');
const { isProApp } = require('../utility/checkLicense'); //
const logger = getLogger('requirePlugin'); const logger = getLogger('requirePlugin');
const loadedPlugins = {}; const loadedPlugins = {};
@@ -13,7 +13,10 @@ const dbgateEnv = {
dbgateApi: null, dbgateApi: null,
platformInfo, platformInfo,
authProxy, authProxy,
isProApp: isProApp() isProApp: () =>{
const { isProApp } = require('../utility/checkLicense');
return isProApp();
}
}; };
function requirePlugin(packageName, requiredPlugin = null) { function requirePlugin(packageName, requiredPlugin = null) {
if (!packageName) throw new Error('Missing packageName in plugin'); if (!packageName) throw new Error('Missing packageName in plugin');
@@ -22,7 +25,7 @@ function requirePlugin(packageName, requiredPlugin = null) {
if (requiredPlugin == null) { if (requiredPlugin == null) {
let module; let module;
const modulePath = getPluginBackendPath(packageName); const modulePath = getPluginBackendPath(packageName);
logger.info(`Loading module ${packageName} from ${modulePath}`); logger.info(`DBGM-00062 Loading module ${packageName} from ${modulePath}`);
try { try {
// @ts-ignore // @ts-ignore
module = __non_webpack_require__(modulePath); module = __non_webpack_require__(modulePath);

View File

@@ -11,7 +11,7 @@ async function runScript(func) {
await func(); await func();
process.exit(0); process.exit(0);
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), `Error running script`); logger.error(extractErrorLogData(err), `DBGM-00158 Error running script`);
process.exit(1); process.exit(1);
} }
} }

View File

@@ -41,7 +41,7 @@ class SqlizeStream extends stream.Transform {
} }
async function sqlDataWriter({ fileName, dataName, driver, encoding = 'utf-8' }) { async function sqlDataWriter({ fileName, dataName, driver, encoding = 'utf-8' }) {
logger.info(`Writing file ${fileName}`); logger.info(`DBGM-00063 Writing file ${fileName}`);
const stringify = new SqlizeStream({ fileName, dataName }); const stringify = new SqlizeStream({ fileName, dataName });
const fileStream = fs.createWriteStream(fileName, encoding); const fileStream = fs.createWriteStream(fileName, encoding);
return [stringify, fileStream]; return [stringify, fileStream];

View File

@@ -23,7 +23,7 @@ async function tableReader({ connection, systemConnection, pureName, schemaName,
if (driver.databaseEngineTypes.includes('document')) { if (driver.databaseEngineTypes.includes('document')) {
// @ts-ignore // @ts-ignore
logger.info(`Reading collection ${fullNameToString(fullName)}`); logger.info(`DBGM-00064 Reading collection ${fullNameToString(fullName)}`);
// @ts-ignore // @ts-ignore
return await driver.readQuery(dbhan, JSON.stringify(fullName)); return await driver.readQuery(dbhan, JSON.stringify(fullName));
} }
@@ -32,14 +32,14 @@ async function tableReader({ connection, systemConnection, pureName, schemaName,
const query = `select * from ${quoteFullName(driver.dialect, fullName)}`; const query = `select * from ${quoteFullName(driver.dialect, fullName)}`;
if (table) { if (table) {
// @ts-ignore // @ts-ignore
logger.info(`Reading table ${fullNameToString(table)}`); logger.info(`DBGM-00065 Reading table ${fullNameToString(table)}`);
// @ts-ignore // @ts-ignore
return await driver.readQuery(dbhan, query, table); return await driver.readQuery(dbhan, query, table);
} }
const view = await driver.analyseSingleObject(dbhan, fullName, 'views'); const view = await driver.analyseSingleObject(dbhan, fullName, 'views');
if (view) { if (view) {
// @ts-ignore // @ts-ignore
logger.info(`Reading view ${fullNameToString(view)}`); logger.info(`DBGM-00066 Reading view ${fullNameToString(view)}`);
// @ts-ignore // @ts-ignore
return await driver.readQuery(dbhan, query, view); return await driver.readQuery(dbhan, query, view);
} }

View File

@@ -20,7 +20,7 @@ const logger = getLogger('tableWriter');
* @returns {Promise<writerType>} - writer object * @returns {Promise<writerType>} - writer object
*/ */
async function tableWriter({ connection, schemaName, pureName, driver, systemConnection, ...options }) { async function tableWriter({ connection, schemaName, pureName, driver, systemConnection, ...options }) {
logger.info(`Writing table ${fullNameToString({ schemaName, pureName })}`); logger.info(`DBGM-00067 Writing table ${fullNameToString({ schemaName, pureName })}`);
if (!driver) { if (!driver) {
driver = requireEngineDriver(connection); driver = requireEngineDriver(connection);

View File

@@ -52,14 +52,14 @@ function unzipDirectory(zipPath, outputDirectory) {
readStream.on('end', () => zipFile.readEntry()); readStream.on('end', () => zipFile.readEntry());
writeStream.on('finish', () => { writeStream.on('finish', () => {
logger.info(`Extracted "${entry.fileName}" → "${destPath}".`); logger.info(`DBGM-00068 Extracted "${entry.fileName}" → "${destPath}".`);
res(); res();
}); });
writeStream.on('error', writeErr => { writeStream.on('error', writeErr => {
logger.error( logger.error(
extractErrorLogData(writeErr), extractErrorLogData(writeErr),
`Error extracting "${entry.fileName}" from "${zipPath}".` `DBGM-00069 Error extracting "${entry.fileName}" from "${zipPath}".`
); );
rej(writeErr); rej(writeErr);
}); });
@@ -74,14 +74,14 @@ function unzipDirectory(zipPath, outputDirectory) {
zipFile.on('end', () => { zipFile.on('end', () => {
Promise.all(pending) Promise.all(pending)
.then(() => { .then(() => {
logger.info(`Archive "${zipPath}" fully extracted to "${outputDirectory}".`); logger.info(`DBGM-00070 Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
resolve(true); resolve(true);
}) })
.catch(reject); .catch(reject);
}); });
zipFile.on('error', err => { zipFile.on('error', err => {
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`); logger.error(extractErrorLogData(err), `DBGM-00071 ZIP file error in ${zipPath}.`);
reject(err); reject(err);
}); });
}); });

View File

@@ -16,16 +16,16 @@ function zipDirectory(inputDirectory, outputFile) {
// Listen for all archive data to be written // Listen for all archive data to be written
output.on('close', () => { output.on('close', () => {
logger.info(`ZIP file created (${archive.pointer()} total bytes)`); logger.info(`DBGM-00072 ZIP file created (${archive.pointer()} total bytes)`);
resolve(); resolve();
}); });
archive.on('warning', err => { archive.on('warning', err => {
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`); logger.warn(extractErrorLogData(err), `DBGM-00073 Warning while creating ZIP: ${err.message}`);
}); });
archive.on('error', err => { archive.on('error', err => {
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`); logger.error(extractErrorLogData(err), `DBGM-00074 Error while creating ZIP: ${err.message}`);
reject(err); reject(err);
}); });

View File

@@ -17,16 +17,16 @@ function zipDirectory(jsonDb, outputFile) {
// Listen for all archive data to be written // Listen for all archive data to be written
output.on('close', () => { output.on('close', () => {
logger.info(`ZIP file created (${archive.pointer()} total bytes)`); logger.info(`DBGM-00075 ZIP file created (${archive.pointer()} total bytes)`);
resolve(); resolve();
}); });
archive.on('warning', err => { archive.on('warning', err => {
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`); logger.warn(extractErrorLogData(err), `DBGM-00076 Warning while creating ZIP: ${err.message}`);
}); });
archive.on('error', err => { archive.on('error', err => {
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`); logger.error(extractErrorLogData(err), `DBGM-00077 Error while creating ZIP: ${err.message}`);
reject(err); reject(err);
}); });

View File

@@ -61,7 +61,7 @@ class DatastoreProxy {
this.subprocess = null; this.subprocess = null;
}); });
this.subprocess.on('error', err => { this.subprocess.on('error', err => {
logger.error(extractErrorLogData(err), 'Error in data store subprocess'); logger.error(extractErrorLogData(err), 'DBGM-00167 Error in data store subprocess');
this.subprocess = null; this.subprocess = null;
}); });
this.subprocess.send({ msgtype: 'open', file: this.file }); this.subprocess.send({ msgtype: 'open', file: this.file });
@@ -77,7 +77,7 @@ class DatastoreProxy {
try { try {
this.subprocess.send({ msgtype: 'read', msgid, offset, limit }); this.subprocess.send({ msgtype: 'read', msgid, offset, limit });
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error getting rows'); logger.error(extractErrorLogData(err), 'DBGM-00168 Error getting rows');
this.subprocess = null; this.subprocess = null;
} }
}); });
@@ -91,7 +91,7 @@ class DatastoreProxy {
try { try {
this.subprocess.send({ msgtype: 'notify', msgid }); this.subprocess.send({ msgtype: 'notify', msgid });
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error notifying subprocess'); logger.error(extractErrorLogData(err), 'DBGM-00169 Error notifying subprocess');
this.subprocess = null; this.subprocess = null;
} }
}); });

View File

@@ -7,7 +7,6 @@ const AsyncLock = require('async-lock');
const lock = new AsyncLock(); const lock = new AsyncLock();
const stableStringify = require('json-stable-stringify'); const stableStringify = require('json-stable-stringify');
const { evaluateCondition } = require('dbgate-sqltree'); const { evaluateCondition } = require('dbgate-sqltree');
const requirePluginFunction = require('./requirePluginFunction');
const esort = require('external-sorting'); const esort = require('external-sorting');
const { jsldir } = require('./directories'); const { jsldir } = require('./directories');
const LineReader = require('./LineReader'); const LineReader = require('./LineReader');
@@ -23,7 +22,10 @@ class JsonLinesDatastore {
this.notifyChangedCallback = null; this.notifyChangedCallback = null;
this.currentFilter = null; this.currentFilter = null;
this.currentSort = null; this.currentSort = null;
if (formatterFunction) {
const requirePluginFunction = require('./requirePluginFunction');
this.rowFormatter = requirePluginFunction(formatterFunction); this.rowFormatter = requirePluginFunction(formatterFunction);
}
this.sortedFiles = {}; this.sortedFiles = {};
} }

View File

@@ -0,0 +1,119 @@
const fs = require('fs-extra');
const path = require('path');
const { logsdir } = require('./directories');
const { format, addDays, startOfDay } = require('date-fns');
const LineReader = require('./LineReader');
const socket = require('./socket');
const _ = require('lodash');
async function getLogFiles(timeFrom, timeTo) {
const dir = logsdir();
const files = await fs.readdir(dir);
const startPrefix = format(timeFrom, 'yyyy-MM-dd');
const endPrefix = format(addDays(timeTo, 1), 'yyyy-MM-dd');
const logFiles = files
.filter(file => file.endsWith('.ndjson'))
.filter(file => file >= startPrefix && file < endPrefix);
return logFiles.sort().map(x => path.join(dir, x));
}
const RECENT_LOG_LIMIT = 1000;
let recentLogs = null;
const beforeRecentLogs = [];
function adjustRecentLogs() {
if (recentLogs.length > RECENT_LOG_LIMIT) {
recentLogs.splice(0, recentLogs.length - RECENT_LOG_LIMIT);
}
}
function prepareEntryForExport(entry, lastEntry) {
return {
date: format(new Date(entry.time), 'yyyy-MM-dd'),
time: format(new Date(entry.time), 'HH:mm:ss'),
dtime: lastEntry ? entry.time - lastEntry.time : 0,
msgcode: entry.msgcode || '',
message: entry.msg || '',
..._.omit(entry, ['time', 'msg', 'msgcode']),
conid: entry.conid || '',
database: entry.database || '',
engine: entry.engine || '',
ts: entry.time,
};
}
async function copyAppLogsIntoFile(timeFrom, timeTo, fileName, prepareForExport) {
const writeStream = fs.createWriteStream(fileName);
let lastEntry = null;
for (const file of await getLogFiles(timeFrom, timeTo)) {
const readStream = fs.createReadStream(file);
const reader = new LineReader(readStream);
do {
const line = await reader.readLine();
if (line == null) break;
try {
const logEntry = JSON.parse(line);
if (logEntry.time >= timeFrom && logEntry.time <= timeTo) {
writeStream.write(
JSON.stringify(prepareForExport ? prepareEntryForExport(logEntry, lastEntry) : logEntry) + '\n'
);
lastEntry = logEntry;
}
} catch (e) {
continue;
}
} while (true);
}
}
async function initializeRecentLogProvider() {
const logs = [];
for (const file of await getLogFiles(startOfDay(new Date()), new Date())) {
const fileStream = fs.createReadStream(file);
const reader = new LineReader(fileStream);
do {
const line = await reader.readLine();
if (line == null) break;
try {
const logEntry = JSON.parse(line);
logs.push(logEntry);
if (logs.length > RECENT_LOG_LIMIT) {
logs.shift();
}
} catch (e) {
continue;
}
} while (true);
}
recentLogs = logs;
recentLogs.push(...beforeRecentLogs);
}
let counter = 0;
function pushToRecentLogs(msg) {
const finalMsg = {
...msg,
counter,
};
counter += 1;
if (recentLogs) {
recentLogs.push(finalMsg);
adjustRecentLogs();
socket.emit('applog-event', finalMsg);
} else {
beforeRecentLogs.push(finalMsg);
}
}
function getRecentAppLogRecords() {
return recentLogs ?? beforeRecentLogs;
}
module.exports = {
initializeRecentLogProvider,
getRecentAppLogRecords,
pushToRecentLogs,
copyAppLogsIntoFile,
};

View File

@@ -12,7 +12,7 @@ function childProcessChecker() {
// This will come once parent dies. // This will come once parent dies.
// One way can be to check for error code ERR_IPC_CHANNEL_CLOSED // One way can be to check for error code ERR_IPC_CHANNEL_CLOSED
// and call process.exit() // and call process.exit()
logger.error(extractErrorLogData(err), 'parent died'); logger.error(extractErrorLogData(err), 'DBGM-00163 parent died');
process.exit(1); process.exit(1);
} }
}, 1000); }, 1000);

View File

@@ -77,7 +77,7 @@ function startCloudTokenChecking(sid, callback) {
callback(resp.data); callback(resp.data);
} }
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error checking cloud token'); logger.error(extractErrorLogData(err), 'DBGM-00164 Error checking cloud token');
} }
}, 500); }, 500);
} }
@@ -125,7 +125,7 @@ async function getCloudUsedEngines() {
const resp = await callCloudApiGet('content-engines'); const resp = await callCloudApiGet('content-engines');
return resp || []; return resp || [];
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error getting cloud content list'); logger.error(extractErrorLogData(err), 'DBGM-00165 Error getting cloud content list');
return []; return [];
} }
} }
@@ -208,7 +208,7 @@ async function updateCloudFiles(isRefresh) {
lastCheckedTm = _.max(cloudFiles.map(x => parseInt(x.modifiedTm))); lastCheckedTm = _.max(cloudFiles.map(x => parseInt(x.modifiedTm)));
} }
logger.info({ tags, lastCheckedTm }, 'Downloading cloud files'); logger.info({ tags, lastCheckedTm }, 'DBGM-00082 Downloading cloud files');
const resp = await axios.default.get( const resp = await axios.default.get(
`${DBGATE_CLOUD_URL}/public-cloud-updates?lastCheckedTm=${lastCheckedTm}&tags=${tags}&isRefresh=${ `${DBGATE_CLOUD_URL}/public-cloud-updates?lastCheckedTm=${lastCheckedTm}&tags=${tags}&isRefresh=${
@@ -223,7 +223,7 @@ async function updateCloudFiles(isRefresh) {
} }
); );
logger.info(`Downloaded ${resp.data.length} cloud files`); logger.info(`DBGM-00083 Downloaded ${resp.data.length} cloud files`);
const filesByPath = lastCheckedTm == 0 ? {} : _.keyBy(cloudFiles, 'path'); const filesByPath = lastCheckedTm == 0 ? {} : _.keyBy(cloudFiles, 'path');
for (const file of resp.data) { for (const file of resp.data) {
@@ -269,7 +269,7 @@ async function refreshPublicFiles(isRefresh) {
try { try {
await updateCloudFiles(isRefresh); await updateCloudFiles(isRefresh);
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error updating cloud files'); logger.error(extractErrorLogData(err), 'DBGM-00166 Error updating cloud files');
} }
} }

View File

@@ -132,7 +132,7 @@ async function connectUtility(driver, storedConnection, connectionMode, addition
connection.ssl = await extractConnectionSslParams(connection); connection.ssl = await extractConnectionSslParams(connection);
const conn = await driver.connect({ ...connection, ...additionalOptions }); const conn = await driver.connect({ conid: connectionLoaded?._id, ...connection, ...additionalOptions });
return conn; return conn;
} }

View File

@@ -14,11 +14,11 @@ const createDirectories = {};
const ensureDirectory = (dir, clean) => { const ensureDirectory = (dir, clean) => {
if (!createDirectories[dir]) { if (!createDirectories[dir]) {
if (clean && fs.existsSync(dir) && !platformInfo.isForkedApi) { if (clean && fs.existsSync(dir) && !platformInfo.isForkedApi) {
getLogger('directories').info(`Cleaning directory ${dir}`); getLogger('directories').info(`DBGM-00170 Cleaning directory ${dir}`);
cleanDirectory(dir, _.isNumber(clean) ? clean : null); cleanDirectory(dir, _.isNumber(clean) ? clean : null);
} }
if (!fs.existsSync(dir)) { if (!fs.existsSync(dir)) {
getLogger('directories').info(`Creating directory ${dir}`); getLogger('directories').info(`DBGM-00171 Creating directory ${dir}`);
fs.mkdirSync(dir); fs.mkdirSync(dir);
} }
createDirectories[dir] = true; createDirectories[dir] = true;

View File

@@ -42,13 +42,13 @@ function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
// When the file is finished writing, resolve // When the file is finished writing, resolve
writeStream.on('finish', () => { writeStream.on('finish', () => {
logger.info(`File "${fileInZip}" extracted to "${outputPath}".`); logger.info(`DBGM-00088 File "${fileInZip}" extracted to "${outputPath}".`);
resolve(true); resolve(true);
}); });
// Handle write errors // Handle write errors
writeStream.on('error', writeErr => { writeStream.on('error', writeErr => {
logger.error(extractErrorLogData(writeErr), `Error extracting "${fileInZip}" from "${zipPath}".`); logger.error(extractErrorLogData(writeErr), `DBGM-00089 Error extracting "${fileInZip}" from "${zipPath}".`);
reject(writeErr); reject(writeErr);
}); });
}); });
@@ -67,7 +67,7 @@ function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
// Handle general errors // Handle general errors
zipFile.on('error', err => { zipFile.on('error', err => {
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`); logger.error(extractErrorLogData(err), `DBGM-00172 ZIP file error in ${zipPath}.`);
reject(err); reject(err);
}); });
}); });

View File

@@ -28,7 +28,7 @@ async function loadModelTransform(file) {
} }
return null; return null;
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), `Error loading model transform ${file}`); logger.error(extractErrorLogData(err), `DBGM-00173 Error loading model transform ${file}`);
return null; return null;
} }
} }

View File

@@ -40,7 +40,7 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
tunnelConfig, tunnelConfig,
}); });
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error connecting SSH'); logger.error(extractErrorLogData(err), 'DBGM-00174 Error connecting SSH');
} }
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let promiseHandled = false; let promiseHandled = false;
@@ -57,18 +57,18 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
} }
}); });
subprocess.on('exit', code => { subprocess.on('exit', code => {
logger.info(`SSH forward process exited with code ${code}`); logger.info(`DBGM-00090 SSH forward process exited with code ${code}`);
delete sshTunnelCache[tunnelCacheKey]; delete sshTunnelCache[tunnelCacheKey];
if (!promiseHandled) { if (!promiseHandled) {
reject( reject(
new Error( new Error(
'SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections' 'DBGM-00091 SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'
) )
); );
} }
}); });
subprocess.on('error', error => { subprocess.on('error', error => {
logger.error(extractErrorLogData(error), 'SSH forward process error'); logger.error(extractErrorLogData(error), 'DBGM-00092 SSH forward process error');
delete sshTunnelCache[tunnelCacheKey]; delete sshTunnelCache[tunnelCacheKey];
if (!promiseHandled) { if (!promiseHandled) {
reject(error); reject(error);
@@ -97,13 +97,13 @@ async function getSshTunnel(connection) {
}; };
try { try {
logger.info( logger.info(
`Creating SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}` `DBGM-00093 Creating SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
); );
const subprocess = await callForwardProcess(connection, tunnelConfig, tunnelCacheKey); const subprocess = await callForwardProcess(connection, tunnelConfig, tunnelCacheKey);
logger.info( logger.info(
`Created SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}` `DBGM-00094 Created SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
); );
sshTunnelCache[tunnelCacheKey] = { sshTunnelCache[tunnelCacheKey] = {
@@ -114,7 +114,7 @@ async function getSshTunnel(connection) {
}; };
return sshTunnelCache[tunnelCacheKey]; return sshTunnelCache[tunnelCacheKey];
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel:'); logger.error(extractErrorLogData(err), 'DBGM-00095 Error creating SSH tunnel:');
// error is not cached // error is not cached
return { return {
state: 'error', state: 'error',

View File

@@ -10,7 +10,7 @@ async function handleGetSshTunnelRequest({ msgid, connection }, subprocess) {
try { try {
subprocess.send({ msgtype: 'getsshtunnel-response', msgid, response }); subprocess.send({ msgtype: 'getsshtunnel-response', msgid, response });
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error sending to SSH tunnel'); logger.error(extractErrorLogData(err), 'DBGM-00175 Error sending to SSH tunnel');
} }
} }

View File

@@ -12,11 +12,11 @@ module.exports = function useController(app, electron, route, controller) {
const router = express.Router(); const router = express.Router();
if (controller._init) { if (controller._init) {
logger.info(`Calling init controller for controller ${route}`); logger.info(`DBGM-00096 Calling init controller for controller ${route}`);
try { try {
controller._init(); controller._init();
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), `Error initializing controller, exiting application`); logger.error(extractErrorLogData(err), `DBGM-00097 Error initializing controller, exiting application`);
process.exit(1); process.exit(1);
} }
} }
@@ -78,7 +78,7 @@ module.exports = function useController(app, electron, route, controller) {
const data = await controller[key]({ ...req.body, ...req.query }, req); const data = await controller[key]({ ...req.body, ...req.query }, req);
res.json(data); res.json(data);
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), `Error when processing route ${route}/${key}`); logger.error(extractErrorLogData(err), `DBGM-00176 Error when processing route ${route}/${key}`);
if (err instanceof MissingCredentialsError) { if (err instanceof MissingCredentialsError) {
res.json({ res.json({
missingCredentials: true, missingCredentials: true,

View File

@@ -330,7 +330,7 @@ class ReplicatorItemHolder {
if (new Date().getTime() - lastLogged.getTime() > 5000) { if (new Date().getTime() - lastLogged.getTime() > 5000) {
logger.info( logger.info(
`Replicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows, updated ${updated} rows` `DBGM-00105 Replicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows, updated ${updated} rows`
); );
lastLogged = new Date(); lastLogged = new Date();
} }
@@ -489,19 +489,19 @@ export class DataReplicator {
for (const item of this.itemPlan) { for (const item of this.itemPlan) {
const stats = await item.runImport(); const stats = await item.runImport();
logger.info( logger.info(
`Replicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows, updated ${stats.updated} rows, deleted ${stats.deleted} rows` `DBGM-00106 Replicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows, updated ${stats.updated} rows, deleted ${stats.deleted} rows`
); );
} }
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), `Failed replicator job, rollbacking. ${err.message}`); logger.error(extractErrorLogData(err), `DBGM-00179 Failed replicator job, rollbacking. ${err.message}`);
await this.runDumperCommand(dmp => dmp.rollbackTransaction()); await this.runDumperCommand(dmp => dmp.rollbackTransaction());
return; return;
} }
if (this.options.rollbackAfterFinish) { if (this.options.rollbackAfterFinish) {
logger.info('Rollbacking transaction, nothing was changed'); logger.info('DBGM-00107 Rollbacking transaction, nothing was changed');
await this.runDumperCommand(dmp => dmp.rollbackTransaction()); await this.runDumperCommand(dmp => dmp.rollbackTransaction());
} else { } else {
logger.info('Committing replicator transaction'); logger.info('DBGM-00108 Committing replicator transaction');
await this.runDumperCommand(dmp => dmp.commitTransaction()); await this.runDumperCommand(dmp => dmp.commitTransaction());
} }

View File

@@ -43,11 +43,11 @@ export class ScriptDrivedDeployer {
dmp.put('select * from ~dbgate_deploy_journal') dmp.put('select * from ~dbgate_deploy_journal')
); );
this.journalItems = rows; this.journalItems = rows;
logger.debug(`Loaded ${rows.length} items from DbGate deploy journal`); logger.debug(`DBGM-00109 Loaded ${rows.length} items from DbGate deploy journal`);
} catch (err) { } catch (err) {
logger.warn( logger.warn(
extractErrorLogData(err), extractErrorLogData(err),
'Error loading DbGate deploy journal, creating table dbgate_deploy_journal' 'DBGM-00110 Error loading DbGate deploy journal, creating table dbgate_deploy_journal'
); );
const dmp = this.driver.createDumper(); const dmp = this.driver.createDumper();
dmp.createTable({ dmp.createTable({
@@ -126,12 +126,12 @@ export class ScriptDrivedDeployer {
runCommandOnDriver(this.dbhan, this.driver, dmp => dmp.beginTransaction()); runCommandOnDriver(this.dbhan, this.driver, dmp => dmp.beginTransaction());
} }
logger.debug(`Running ${category} script ${file.name}`); logger.debug(`DBGM-00111 Running ${category} script ${file.name}`);
try { try {
await this.driver.script(this.dbhan, file.text, { useTransaction: false }); await this.driver.script(this.dbhan, file.text, { useTransaction: false });
await this.saveToJournal(file, category, hash); await this.saveToJournal(file, category, hash);
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), `Error running ${category} script ${file.name}`); logger.error(extractErrorLogData(err), `DBGM-00180 Error running ${category} script ${file.name}`);
if (this.driver.supportsTransactions) { if (this.driver.supportsTransactions) {
runCommandOnDriver(this.dbhan, this.driver, dmp => dmp.rollbackTransaction()); runCommandOnDriver(this.dbhan, this.driver, dmp => dmp.rollbackTransaction());
return; return;

View File

@@ -20,10 +20,10 @@ const logger = createLogger('dbmodel');
async function runAndExit(promise) { async function runAndExit(promise) {
try { try {
await promise; await promise;
logger.info('Success'); logger.info('DBGM-00112 Success');
process.exit(); process.exit();
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Processing failed'); logger.error(extractErrorLogData(err), 'DBGM-00113 Processing failed');
process.exit(1); process.exit(1);
} }
} }

View File

@@ -41,6 +41,6 @@
"dbgate-plugin-oracle": "^6.0.0-alpha.1", "dbgate-plugin-oracle": "^6.0.0-alpha.1",
"dbgate-web": "^6.0.0-alpha.1", "dbgate-web": "^6.0.0-alpha.1",
"dotenv": "^16.0.0", "dotenv": "^16.0.0",
"pinomin": "^1.0.4" "pinomin": "^1.0.5"
} }
} }

View File

@@ -78,5 +78,12 @@ export function evaluateCondition(condition: Condition, values) {
}); });
return evaluateCondition(replaced, values); return evaluateCondition(replaced, values);
}); });
case 'in':
const value = extractRawValue(evaluateExpression(condition.expr, values));
const list = condition.values;
if (Array.isArray(list)) {
return list.some(item => item == value);
}
return false;
} }
} }

View File

@@ -83,3 +83,44 @@ export function selectKeysFromTable(options: {
}; };
return res; return res;
} }
export function createLogCompoudCondition(
fieldFilters: { [field: string]: string[] },
timeColumn: string,
timeFrom: number,
timeTo: number
): Condition {
const conditions: Condition[] = [
{
conditionType: 'binary',
operator: '>=',
left: { exprType: 'column', columnName: timeColumn },
right: { exprType: 'value', value: timeFrom },
},
{
conditionType: 'binary',
operator: '<=',
left: { exprType: 'column', columnName: timeColumn },
right: { exprType: 'value', value: timeTo },
},
];
for (const [key, values] of Object.entries(fieldFilters)) {
if (values.length == 1 && values[0] == null) {
conditions.push({
conditionType: 'isNull',
expr: { exprType: 'column', columnName: key },
});
continue;
}
conditions.push({
conditionType: 'in',
expr: { exprType: 'column', columnName: key },
values,
});
}
return {
conditionType: 'and',
conditions,
};
}

View File

@@ -37,7 +37,7 @@
"debug": "^4.3.4", "debug": "^4.3.4",
"json-stable-stringify": "^1.0.1", "json-stable-stringify": "^1.0.1",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"pinomin": "^1.0.4", "pinomin": "^1.0.5",
"toposort": "^2.0.2", "toposort": "^2.0.2",
"uuid": "^3.4.0" "uuid": "^3.4.0"
} }

View File

@@ -5,7 +5,7 @@ import _pick from 'lodash/pick';
import _compact from 'lodash/compact'; import _compact from 'lodash/compact';
import { getLogger } from './getLogger'; import { getLogger } from './getLogger';
import { type Logger } from 'pinomin'; import { type Logger } from 'pinomin';
import { dbNameLogCategory, isCompositeDbName, splitCompositeDbName } from './schemaInfoTools'; import { isCompositeDbName, splitCompositeDbName } from './schemaInfoTools';
import { extractErrorLogData } from './stringTools'; import { extractErrorLogData } from './stringTools';
const logger = getLogger('dbAnalyser'); const logger = getLogger('dbAnalyser');
@@ -77,10 +77,12 @@ export class DatabaseAnalyser<TClient = any> {
return db; return db;
} }
getLogDbInfo() {
return this.driver.getLogDbInfo(this.dbhan);
}
async fullAnalysis() { async fullAnalysis() {
logger.debug( logger.debug(this.getLogDbInfo(), 'DBGM-00126 Performing full analysis');
`Performing full analysis, DB=${dbNameLogCategory(this.dbhan.database)}, engine=${this.driver.engine}`
);
const res = this.addEngineField(await this._runAnalysis()); const res = this.addEngineField(await this._runAnalysis());
// console.log('FULL ANALYSIS', res); // console.log('FULL ANALYSIS', res);
return res; return res;
@@ -101,9 +103,7 @@ export class DatabaseAnalyser<TClient = any> {
} }
async incrementalAnalysis(structure) { async incrementalAnalysis(structure) {
logger.info( logger.info(this.getLogDbInfo(), 'DBGM-00127 Performing incremental analysis');
`Performing incremental analysis, DB=${dbNameLogCategory(this.dbhan.database)}, engine=${this.driver.engine}`
);
this.structure = structure; this.structure = structure;
const modifications = await this.getModifications(); const modifications = await this.getModifications();
@@ -129,7 +129,7 @@ export class DatabaseAnalyser<TClient = any> {
this.modifications = structureModifications; this.modifications = structureModifications;
if (structureWithRowCounts) this.structure = structureWithRowCounts; if (structureWithRowCounts) this.structure = structureWithRowCounts;
logger.info({ modifications: this.modifications }, 'DB modifications detected:'); logger.info({ ...this.getLogDbInfo(), modifications: this.modifications }, 'DBGM-00128 DB modifications detected');
return this.addEngineField(this.mergeAnalyseResult(await this._runAnalysis())); return this.addEngineField(this.mergeAnalyseResult(await this._runAnalysis()));
} }
@@ -274,7 +274,7 @@ export class DatabaseAnalyser<TClient = any> {
this.dbhan.feedback(obj); this.dbhan.feedback(obj);
} }
if (obj && obj.analysingMessage) { if (obj && obj.analysingMessage) {
logger.debug(obj.analysingMessage); logger.debug(this.getLogDbInfo(), obj.analysingMessage);
} }
} }
@@ -347,10 +347,16 @@ export class DatabaseAnalyser<TClient = any> {
} }
try { try {
const res = await this.driver.query(this.dbhan, sql); const res = await this.driver.query(this.dbhan, sql);
this.logger.debug({ rows: res.rows.length, template }, `Loaded analyser query`); this.logger.debug(
{ ...this.getLogDbInfo(), rows: res.rows.length, template },
`DBGM-00129 Loaded analyser query`
);
return res; return res;
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err, { template }), 'Error running analyser query'); logger.error(
extractErrorLogData(err, { template, ...this.getLogDbInfo() }),
'DBGM-00130 Error running analyser query'
);
return { return {
rows: [], rows: [],
isError: true, isError: true,

View File

@@ -93,7 +93,7 @@ export class SqlGenerator {
} }
private handleException = error => { private handleException = error => {
logger.error(extractErrorLogData(error), 'Unhandled error'); logger.error(extractErrorLogData(error), 'DBGM-00186 Unhandled error');
this.isUnhandledException = true; this.isUnhandledException = true;
}; };

View File

@@ -41,20 +41,20 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
writable.structure = structure; writable.structure = structure;
} }
if (structure && options.dropIfExists) { if (structure && options.dropIfExists) {
logger.info(`Dropping table ${fullNameQuoted}`); logger.info(`DBGM-00123 Dropping table ${fullNameQuoted}`);
await driver.script(dbhan, `DROP TABLE ${fullNameQuoted}`); await driver.script(dbhan, `DROP TABLE ${fullNameQuoted}`);
} }
if (options.createIfNotExists && (!structure || options.dropIfExists)) { if (options.createIfNotExists && (!structure || options.dropIfExists)) {
const dmp = driver.createDumper(); const dmp = driver.createDumper();
const createdTableInfo = driver.adaptTableInfo(prepareTableForImport({ ...writable.structure, ...name })); const createdTableInfo = driver.adaptTableInfo(prepareTableForImport({ ...writable.structure, ...name }));
dmp.createTable(createdTableInfo); dmp.createTable(createdTableInfo);
logger.info({ sql: dmp.s }, `Creating table ${fullNameQuoted}`); logger.info({ sql: dmp.s }, `DBGM-00124 Creating table ${fullNameQuoted}`);
await driver.script(dbhan, dmp.s); await driver.script(dbhan, dmp.s);
structure = await driver.analyseSingleTable(dbhan, name); structure = await driver.analyseSingleTable(dbhan, name);
writable.structure = structure; writable.structure = structure;
} }
if (!writable.structure) { if (!writable.structure) {
throw new Error(`Error importing table - ${fullNameQuoted} not found`); throw new Error(`DBGM-00125 Error importing table - ${fullNameQuoted} not found`);
} }
if (options.truncate) { if (options.truncate) {
await driver.script(dbhan, `TRUNCATE TABLE ${fullNameQuoted}`); await driver.script(dbhan, `TRUNCATE TABLE ${fullNameQuoted}`);
@@ -71,7 +71,7 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
]) ])
); );
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error during preparing bulk insert table, stopped'); logger.error(extractErrorLogData(err), 'DBGM-00184 Error during preparing bulk insert table, stopped');
writable.destroy(err); writable.destroy(err);
} }
}; };
@@ -129,7 +129,7 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
await driver.query(dbhan, dmp.s, { discardResult: true }); await driver.query(dbhan, dmp.s, { discardResult: true });
} }
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error during base bulk insert, insert stopped'); logger.error(extractErrorLogData(err), 'DBGM-00185 Error during base bulk insert, insert stopped');
writable.destroy(err); writable.destroy(err);
} }
}; };

View File

@@ -101,7 +101,7 @@ export const driverBase = {
for (const sqlItem of splitQuery(sql, this.getQuerySplitterOptions('script'))) { for (const sqlItem of splitQuery(sql, this.getQuerySplitterOptions('script'))) {
try { try {
if (options?.logScriptItems) { if (options?.logScriptItems) {
logger.info({ sql: getLimitedQuery(sqlItem as string) }, 'Execute script item'); logger.info({ sql: getLimitedQuery(sqlItem as string) }, 'DBGM-00131 Execute script item');
} }
await this.query(pool, sqlItem, { discardResult: true, ...options?.queryOptions }); await this.query(pool, sqlItem, { discardResult: true, ...options?.queryOptions });
} catch (err) { } catch (err) {
@@ -254,4 +254,12 @@ export const driverBase = {
async writeQueryFromStream(dbhan, sql) { async writeQueryFromStream(dbhan, sql) {
return null; return null;
}, },
getLogDbInfo(dbhan) {
return {
database: dbhan ? dbhan.database : undefined,
engine: this.engine,
conid: dbhan ? dbhan.conid : undefined,
};
},
}; };

View File

@@ -37,15 +37,15 @@ export function extractSchemaNameFromComposite(name: string) {
return splitCompositeDbName(name)?.schema; return splitCompositeDbName(name)?.schema;
} }
export function dbNameLogCategory(database: string): string { // export function getDbNameLogFace(database: string): string {
if (isCompositeDbName(database)) { // if (isCompositeDbName(database)) {
return '~composite'; // return '~composite';
} // }
if (database) { // if (database) {
return '~simple'; // return '~simple';
} // }
return '~nodb'; // return '~nodb';
} // }
export function compositeDbNameIfNeeded( export function compositeDbNameIfNeeded(
connnection: { useSeparateSchemas: boolean }, connnection: { useSeparateSchemas: boolean },

View File

@@ -164,6 +164,7 @@ export interface FilterBehaviourProvider {
export interface DatabaseHandle<TClient = any> { export interface DatabaseHandle<TClient = any> {
client: TClient; client: TClient;
database?: string; database?: string;
conid?: string;
feedback?: (message: any) => void; feedback?: (message: any) => void;
getDatabase?: () => any; getDatabase?: () => any;
connectionType?: string; connectionType?: string;
@@ -336,6 +337,11 @@ export interface EngineDriver<TClient = any> extends FilterBehaviourProvider {
analyserClass?: any; analyserClass?: any;
dumperClass?: any; dumperClass?: any;
singleConnectionOnly?: boolean; singleConnectionOnly?: boolean;
getLogDbInfo(dbhan: DatabaseHandle<TClient>): {
database?: string;
engine: string;
conid?: string;
};
} }
export interface DatabaseModification { export interface DatabaseModification {

View File

@@ -1,6 +1,7 @@
<!DOCTYPE html> <!DOCTYPE html>
<html lang="en"> <html lang="en">
<head> <head>
<!--HEAD_SCRIPT-->
<meta charset="utf-8" /> <meta charset="utf-8" />
<meta name="viewport" content="width=device-width,initial-scale=1" /> <meta name="viewport" content="width=device-width,initial-scale=1" />
@@ -107,6 +108,7 @@
</head> </head>
<body> <body>
<!--BODY_SCRIPT-->
<div id="starting_dbgate_zero"> <div id="starting_dbgate_zero">
<div class="inner-flex"> <div class="inner-flex">
<div class="lds-ellipsis"> <div class="lds-ellipsis">

View File

@@ -184,6 +184,10 @@ select[disabled] {
background-color: var(--theme-bg-1); background-color: var(--theme-bg-1);
} }
.classicform select {
padding: 5px 5px 4px;
}
textarea { textarea {
background-color: var(--theme-bg-0); background-color: var(--theme-bg-0);
color: var(--theme-font-1); color: var(--theme-font-1);

View File

@@ -311,6 +311,7 @@
'img sort-asc': 'mdi mdi-sort-alphabetical-ascending color-icon-green', 'img sort-asc': 'mdi mdi-sort-alphabetical-ascending color-icon-green',
'img sort-desc': 'mdi mdi-sort-alphabetical-descending color-icon-green', 'img sort-desc': 'mdi mdi-sort-alphabetical-descending color-icon-green',
'img map': 'mdi mdi-map color-icon-blue', 'img map': 'mdi mdi-map color-icon-blue',
'img applog': 'mdi mdi-desktop-classic color-icon-green',
'img reference': 'mdi mdi-link-box', 'img reference': 'mdi mdi-link-box',
'img link': 'mdi mdi-link', 'img link': 'mdi mdi-link',

View File

@@ -0,0 +1,526 @@
<script lang="ts" context="module">
export const matchingProps = [];
</script>
<script lang="ts">
import _ from 'lodash';
import ToolStripContainer from '../buttons/ToolStripContainer.svelte';
import ToolStripButton from '../buttons/ToolStripButton.svelte';
import { apiCall, apiOff, apiOn } from '../utility/api';
import { format, startOfDay, endOfDay } from 'date-fns';
import { getIntSettingsValue } from '../settings/settingsTools';
import DateRangeSelector from '../elements/DateRangeSelector.svelte';
import Chip from '../elements/Chip.svelte';
import TabControl from '../elements/TabControl.svelte';
import Link from '../elements/Link.svelte';
import SelectField from '../forms/SelectField.svelte';
import { onDestroy, onMount, tick } from 'svelte';
import DropDownButton from '../buttons/DropDownButton.svelte';
import { showModal } from '../modals/modalTools';
import ValueLookupModal from '../modals/ValueLookupModal.svelte';
import { createLogCompoudCondition } from 'dbgate-sqltree';
import { exportQuickExportFile } from '../utility/exportFileTools';
import ToolStripExportButton, {
createQuickExportHandlerRef,
registerQuickExportHandler,
} from '../buttons/ToolStripExportButton.svelte';
let loadedRows = [];
let loadedAll = false;
let domLoadNext;
let observer;
let dateFilter = [new Date(), new Date()];
let selectedLogIndex = null;
let filters = {};
let mode = 'recent';
let autoScroll = true;
let domTable;
let jslid;
const quickExportHandlerRef = createQuickExportHandlerRef();
function formatPossibleUuid(value) {
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
if (_.isString(value) && value.match(uuidRegex)) {
return value.slice(0, 8);
}
if (value == null) {
return 'N/A';
}
return value;
}
async function loadNextRows() {
const pageSize = getIntSettingsValue('dataGrid.pageSize', 100, 5, 1000);
const rows = await apiCall('jsldata/get-rows', {
jslid,
offset: loadedRows.length,
limit: pageSize,
filters: createLogCompoudCondition(
filters,
'time',
startOfDay(dateFilter[0]).getTime(),
endOfDay(dateFilter[1]).getTime()
),
});
loadedRows = [...loadedRows, ...rows];
if (rows.length < 10) {
loadedAll = true;
}
}
function startObserver(dom) {
if (observer) {
observer.disconnect();
observer = null;
}
if (dom) {
observer = new IntersectionObserver(entries => {
if (entries.find(x => x.isIntersecting)) {
loadNextRows();
}
});
observer.observe(dom);
}
}
$: if (mode == 'date') {
startObserver(domLoadNext);
}
async function reloadData(createNewJslId = true) {
switch (mode) {
case 'recent':
loadedRows = await apiCall('files/get-recent-app-log', { limit: 100 });
await tick();
scrollToRecent();
break;
case 'date':
if (createNewJslId) {
const resp = await apiCall('files/fill-app-logs', {
dateFrom: startOfDay(dateFilter[0]).getTime(),
dateTo: endOfDay(dateFilter[1]).getTime(),
});
jslid = resp.jslid;
}
loadedRows = [];
loadedAll = false;
break;
}
}
function doSetFilter(field, values) {
filters = {
...filters,
[field]: values,
};
reloadData(false);
}
const ColumnNamesMap = {
msgcode: 'Code',
};
function handleLogMessage(msg) {
// console.log('AppLogTab: handleLogMessage', msg);
if (mode !== 'recent') return;
if (loadedRows.find(x => x.counter == msg.counter)) {
return; // Already loaded
}
loadedRows = [...loadedRows, msg];
scrollToRecent();
}
function scrollToRecent() {
if (autoScroll && domTable) {
domTable.scrollTop = domTable.scrollHeight;
}
}
function filterBy(field) {
showModal(ValueLookupModal, {
jslid,
field,
multiselect: true,
onConfirm: values => {
doSetFilter(field, values);
},
});
}
onMount(() => {
apiOn('applog-event', handleLogMessage);
reloadData();
});
onDestroy(() => {
apiOff('applog-event', handleLogMessage);
});
const quickExportHandler = fmt => async () => {
const resp =
mode == 'recent'
? await apiCall('files/fill-app-logs', {
dateFrom: startOfDay(new Date()).getTime(),
dateTo: endOfDay(new Date()).getTime(),
prepareForExport: true,
})
: await apiCall('files/fill-app-logs', {
dateFrom: startOfDay(dateFilter[0]).getTime(),
dateTo: endOfDay(dateFilter[1]).getTime(),
prepareForExport: true,
});
exportQuickExportFile(
'Log',
{
functionName: 'jslDataReader',
props: {
jslid: resp.jslid,
},
},
fmt
);
};
registerQuickExportHandler(quickExportHandler);
</script>
<ToolStripContainer>
<div class="wrapper classicform">
<div class="filters">
<div class="filter-label">Mode:</div>
<SelectField
isNative
options={[
{ label: 'Recent logs', value: 'recent' },
{ label: 'Choose date', value: 'date' },
]}
value={mode}
on:change={e => {
mode = e.detail;
reloadData();
}}
/>
{#if mode === 'recent'}
<div class="filter-label ml-2">Auto-scroll</div>
<input
type="checkbox"
checked={autoScroll}
on:change={e => {
autoScroll = e.target['checked'];
}}
/>
{/if}
{#if mode === 'date'}
<div class="filter-label">Date:</div>
<DateRangeSelector
onChange={value => {
dateFilter = value;
reloadData();
}}
/>
<div class="ml-2">
<DropDownButton
data-testid="AdminAuditLogTab_addFilter"
icon="icon filter"
menu={[
{ text: 'Connection ID', onClick: () => filterBy('conid') },
{ text: 'Database', onClick: () => filterBy('database') },
{ text: 'Engine', onClick: () => filterBy('engine') },
{ text: 'Message code', onClick: () => filterBy('msgcode') },
{ text: 'Caller', onClick: () => filterBy('caller') },
{ text: 'Name', onClick: () => filterBy('name') },
]}
/>
</div>
{#each Object.keys(filters) as filterKey}
<div class="ml-2">
<span class="filter-label">{ColumnNamesMap[filterKey] || filterKey}:</span>
{#each filters[filterKey] as value}
<Chip
onClose={() => {
filters = { ...filters, [filterKey]: filters[filterKey].filter(x => x !== value) };
if (!filters[filterKey].length) {
filters = _.omit(filters, filterKey);
}
reloadData(false);
}}
>
{formatPossibleUuid(value)}
</Chip>
{/each}
</div>
{/each}
{/if}
</div>
<div class="tablewrap" bind:this={domTable}>
<table>
<thead>
<tr>
<th style="width:80px">Date</th>
<th>Time</th>
<th>Code</th>
<th>Message</th>
<th>Connection</th>
<th>Database</th>
<th>Engine</th>
<th>Caller</th>
<th>Name</th>
</tr>
</thead>
<tbody>
{#each loadedRows as row, index}
<tr
class="clickable"
on:click={() => {
if (selectedLogIndex === index) {
selectedLogIndex = null;
} else {
selectedLogIndex = index;
}
}}
>
<td>{format(new Date(parseInt(row.time)), 'yyyy-MM-dd')}</td>
<td>{format(new Date(parseInt(row.time)), 'HH:mm:ss')}</td>
<td>{row.msgcode || ''}</td>
<td>{row.msg}</td>
<td>{formatPossibleUuid(row.conid) || ''}</td>
<td>{row.database || ''}</td>
<td>{row.engine?.includes('@') ? row.engine.split('@')[0] : row.engine || ''}</td>
<td>{row.caller || ''}</td>
<td>{row.name || ''}</td>
</tr>
{#if index === selectedLogIndex}
<tr>
<td colspan="9">
<TabControl
isInline
tabs={_.compact([
{ label: 'Details', slot: 1 },
{ label: 'JSON', slot: 2 },
])}
>
<svelte:fragment slot="1">
<div class="details-wrap">
<div class="row">
<div>Message code:</div>
{#if mode == 'date'}
<Link onClick={() => doSetFilter('msgcode', [row.msgcode])}>{row.msgcode || 'N/A'}</Link>
{:else}
{row.msgcode || 'N/A'}
{/if}
</div>
<div class="row">
<div>Message:</div>
{row.msg}
</div>
<div class="row">
<div>Time:</div>
<b>{format(new Date(parseInt(row.time)), 'yyyy-MM-dd HH:mm:ss')}</b>
</div>
<div class="row">
<div>Caller:</div>
{#if mode == 'date'}
<Link onClick={() => doSetFilter('caller', [row.caller])}>{row.caller || 'N/A'}</Link>
{:else}
{row.caller || 'N/A'}
{/if}
</div>
<div class="row">
<div>Name:</div>
{#if mode == 'date'}
<Link onClick={() => doSetFilter('name', [row.name])}>{row.name || 'N/A'}</Link>
{:else}
{row.name || 'N/A'}
{/if}
</div>
{#if row.conid}
<div class="row">
<div>Connection ID:</div>
{#if mode == 'date'}
<Link onClick={() => doSetFilter('conid', [row.conid])}
>{formatPossibleUuid(row.conid)}</Link
>
{:else}
{formatPossibleUuid(row.conid)}
{/if}
</div>
{/if}
{#if row.database}
<div class="row">
<div>Database:</div>
{#if mode == 'date'}
<Link onClick={() => doSetFilter('database', [row.database])}>{row.database}</Link>
{:else}
{row.database}
{/if}
</div>
{/if}
{#if row.engine}
<div class="row">
<div>Engine:</div>
{#if mode == 'date'}
<Link onClick={() => doSetFilter('engine', [row.engine])}>{row.engine}</Link>
{:else}
{row.engine}
{/if}
</div>
{/if}
</div></svelte:fragment
>
<svelte:fragment slot="2">
<pre>{JSON.stringify(row, null, 2)}</pre>
</svelte:fragment>
</TabControl>
</td>
</tr>
{/if}
{/each}
{#if !loadedRows?.length && mode === 'date'}
<tr>
<td colspan="6">No data for selected date</td>
</tr>
{/if}
{#if !loadedAll && mode === 'date'}
{#key loadedRows}
<tr>
<td colspan="6" bind:this={domLoadNext}>Loading next rows... </td>
</tr>
{/key}
{/if}
</tbody>
</table>
</div>
</div>
<svelte:fragment slot="toolstrip">
<ToolStripButton
icon="icon refresh"
data-testid="AdminAuditLogTab_refreshButton"
on:click={() => {
reloadData();
}}>Refresh</ToolStripButton
>
<ToolStripExportButton {quickExportHandlerRef} />
</svelte:fragment>
</ToolStripContainer>
<style>
.editor-wrap {
height: 200px;
}
.tablewrap {
overflow: auto;
flex: 1;
}
.wrapper {
flex: 1;
display: flex;
flex-direction: column;
}
table.disableFocusOutline:focus {
outline: none;
}
table {
border-collapse: collapse;
width: 100%;
}
table.selectable {
user-select: none;
}
tbody tr {
background: var(--theme-bg-0);
}
tbody tr.selected {
background: var(--theme-bg-3);
}
table:focus tbody tr.selected {
background: var(--theme-bg-selected);
}
tbody tr.clickable:hover {
background: var(--theme-bg-hover);
}
thead th {
border: 1px solid var(--theme-border);
background-color: var(--theme-bg-1);
padding: 5px;
}
tbody td {
border: 1px solid var(--theme-border);
}
tbody td {
padding: 5px;
}
td.isHighlighted {
background-color: var(--theme-bg-1);
}
td.clickable {
cursor: pointer;
}
thead {
position: sticky;
top: 0;
z-index: 1;
border-top: 1px solid var(--theme-border);
}
table th {
border-left: none;
}
thead :global(tr:first-child) :global(th) {
border-top: 1px solid var(--theme-border);
}
table td {
border: 0px;
border-bottom: 1px solid var(--theme-border);
border-right: 1px solid var(--theme-border);
}
table {
border-spacing: 0;
border-collapse: separate;
border-left: 1px solid var(--theme-border);
}
.empty-cell {
background-color: var(--theme-bg-1);
}
.filters {
display: flex;
align-items: center;
flex-wrap: wrap;
}
.filter-label {
margin-right: 5px;
color: var(--theme-font-2);
}
.details-wrap {
padding: 10px;
display: flex;
flex-direction: column;
}
.details-wrap .row {
display: flex;
}
.details-wrap .row div:first-child {
width: 150px;
}
pre {
overflow: auto;
max-width: 50vw;
}
</style>

View File

@@ -70,7 +70,7 @@
function handleTest(requestDbList = false) { function handleTest(requestDbList = false) {
const connection = getCurrentConnection(); const connection = getCurrentConnection();
return new Promise((resolve, reject) => { return new Promise(async (resolve, reject) => {
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') { if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
showModal(DatabaseLoginModal, { showModal(DatabaseLoginModal, {
testedConnection: connection, testedConnection: connection,
@@ -78,7 +78,8 @@
onCancel: () => resolve(null), onCancel: () => resolve(null),
}); });
} else { } else {
return handleTestCore(connection, requestDbList); const res = await handleTestCore(connection, requestDbList);
resolve(res);
} }
}); });
} }

View File

@@ -24,6 +24,7 @@ import * as MapTab from './MapTab.svelte';
import * as ServerSummaryTab from './ServerSummaryTab.svelte'; import * as ServerSummaryTab from './ServerSummaryTab.svelte';
import * as ImportExportTab from './ImportExportTab.svelte'; import * as ImportExportTab from './ImportExportTab.svelte';
import * as SqlObjectTab from './SqlObjectTab.svelte'; import * as SqlObjectTab from './SqlObjectTab.svelte';
import * as AppLogTab from './AppLogTab.svelte';
import protabs from './index-pro'; import protabs from './index-pro';
@@ -54,5 +55,6 @@ export default {
ServerSummaryTab, ServerSummaryTab,
ImportExportTab, ImportExportTab,
SqlObjectTab, SqlObjectTab,
AppLogTab,
...protabs, ...protabs,
}; };

View File

@@ -19,6 +19,7 @@
import getElectron from '../utility/getElectron'; import getElectron from '../utility/getElectron';
import { showModal } from '../modals/modalTools'; import { showModal } from '../modals/modalTools';
import NewObjectModal from '../modals/NewObjectModal.svelte'; import NewObjectModal from '../modals/NewObjectModal.svelte';
import openNewTab from '../utility/openNewTab';
let domSettings; let domSettings;
let domCloudAccount; let domCloudAccount;
@@ -123,6 +124,16 @@
$visibleWidgetSideBar = true; $visibleWidgetSideBar = true;
}, },
}, },
{
text: 'View application logs',
onClick: () => {
openNewTab({
title: 'Application log',
icon: 'img applog',
tabComponent: 'AppLogTab',
});
},
},
]; ];
currentDropDownMenu.set({ left, top, items }); currentDropDownMenu.set({ left, top, items });
} }

View File

@@ -13,9 +13,9 @@ class Analyser extends DatabaseAnalyser {
} }
async _runAnalysis() { async _runAnalysis() {
this.feedback({ analysingMessage: 'Loading tables' }); this.feedback({ analysingMessage: 'DBGM-00177 Loading tables' });
const tables = await this.analyserQuery('tables', ['tables']); const tables = await this.analyserQuery('tables', ['tables']);
this.feedback({ analysingMessage: 'Loading columns' }); this.feedback({ analysingMessage: 'DBGM-00178 Loading columns' });
const columns = await this.analyserQuery('columns', ['tables']); const columns = await this.analyserQuery('columns', ['tables']);
// this.feedback({ analysingMessage: 'Loading views' }); // this.feedback({ analysingMessage: 'Loading views' });
// const views = await this.analyserQuery('views', ['views']); // const views = await this.analyserQuery('views', ['views']);

View File

@@ -29,11 +29,11 @@ class Analyser extends DatabaseAnalyser {
} }
async _runAnalysis() { async _runAnalysis() {
this.feedback({ analysingMessage: 'Loading tables' }); this.feedback({ analysingMessage: 'DBGM-00181 Loading tables' });
const tables = await this.analyserQuery('tables', ['tables']); const tables = await this.analyserQuery('tables', ['tables']);
this.feedback({ analysingMessage: 'Loading columns' }); this.feedback({ analysingMessage: 'DBGM-00182 Loading columns' });
const columns = await this.analyserQuery('columns', ['tables', 'views']); const columns = await this.analyserQuery('columns', ['tables', 'views']);
this.feedback({ analysingMessage: 'Loading views' }); this.feedback({ analysingMessage: 'DBGM-00183 Loading views' });
let views = await this.analyserQuery('views', ['views']); let views = await this.analyserQuery('views', ['views']);
if (views?.isError) { if (views?.isError) {
views = await this.analyserQuery('viewsNoDefinition', ['views']); views = await this.analyserQuery('viewsNoDefinition', ['views']);

View File

@@ -27,7 +27,7 @@ class CsvPrepareStream extends stream.Transform {
} }
async function writer({ fileName, encoding = 'utf-8', header = true, delimiter, quoted }) { async function writer({ fileName, encoding = 'utf-8', header = true, delimiter, quoted }) {
logger.info(`Writing file ${fileName}`); logger.info(`DBGM-00133 Writing file ${fileName}`);
const csvPrepare = new CsvPrepareStream({ header }); const csvPrepare = new CsvPrepareStream({ header });
const csvStream = csv.stringify({ delimiter, quoted }); const csvStream = csv.stringify({ delimiter, quoted });
const fileStream = fs.createWriteStream(fileName, encoding); const fileStream = fs.createWriteStream(fileName, encoding);

View File

@@ -75,7 +75,7 @@ async function reader({ fileName, encoding = 'ISO-8859-1', includeDeletedRecords
pass.end(); pass.end();
} catch (error) { } catch (error) {
// If any error occurs, destroy the stream with the error // If any error occurs, destroy the stream with the error
logger.error(extractErrorLogData(error), 'Error reading DBF file'); logger.error(extractErrorLogData(error), 'DBGM-00187 Error reading DBF file');
pass.end(); pass.end();
} }
})(); })();

View File

@@ -114,7 +114,7 @@ const driver = {
options.done(); options.done();
} catch (error) { } catch (error) {
logger.error(extractErrorLogData(error), 'Stream error'); logger.error(extractErrorLogData(error), 'DBGM-00188 Stream error');
const { message, procName } = error; const { message, procName } = error;
options.info({ options.info({
message, message,
@@ -206,7 +206,7 @@ const driver = {
pass.end(); pass.end();
return pass; return pass;
} catch (error) { } catch (error) {
logger.error(extractErrorLogData(error), 'ReadQuery error'); logger.error(extractErrorLogData(error), 'DBGM-00189 ReadQuery error');
const { message, procName } = error; const { message, procName } = error;
pass.write({ pass.write({
__isStreamInfo: true, __isStreamInfo: true,

View File

@@ -97,7 +97,7 @@ const driver = {
options.done(); options.done();
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Stream error'); logger.error(extractErrorLogData(err), 'DBGM-00190 Stream error');
options.info({ options.info({
message: err.message, message: err.message,
line: err.line, line: err.line,
@@ -199,8 +199,8 @@ const driver = {
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
transactionPromise.query(currentSql, function (err, result) { transactionPromise.query(currentSql, function (err, result) {
if (err) { if (err) {
logger.error(extractErrorLogData(err), 'Error executing SQL in transaction'); logger.error(extractErrorLogData(err), 'DBGM-00191 Error executing SQL in transaction');
logger.error({ sql: currentSql }, 'SQL that caused the error'); logger.error({ sql: currentSql }, 'DBGM-00192 SQL that caused the error');
return reject(err); return reject(err);
} }
resolve(result); resolve(result);
@@ -211,19 +211,19 @@ const driver = {
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
transactionPromise.commit(function (err) { transactionPromise.commit(function (err) {
if (err) { if (err) {
logger.error(extractErrorLogData(err), 'Error committing transaction'); logger.error(extractErrorLogData(err), 'DBGM-00193 Error committing transaction');
return reject(err); return reject(err);
} }
resolve(); resolve();
}); });
}); });
} catch (error) { } catch (error) {
logger.error(extractErrorLogData(error), 'Transaction error'); logger.error(extractErrorLogData(error), 'DBGM-00194 Transaction error');
if (transactionPromise) { if (transactionPromise) {
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
transactionPromise.rollback(function (rollbackErr) { transactionPromise.rollback(function (rollbackErr) {
if (rollbackErr) { if (rollbackErr) {
logger.error(extractErrorLogData(rollbackErr), 'Error rolling back transaction'); logger.error(extractErrorLogData(rollbackErr), 'DBGM-00195 Error rolling back transaction');
return reject(rollbackErr); // Re-reject the rollback error return reject(rollbackErr); // Re-reject the rollback error
} }
resolve(); resolve();

View File

@@ -32,15 +32,15 @@ function createBulkInsertStream(driver, stream, dbhan, name, options) {
writable.checkStructure = async () => { writable.checkStructure = async () => {
try { try {
if (options.dropIfExists) { if (options.dropIfExists) {
logger.info(`Dropping collection ${collectionName}`); logger.info(`DBGM-00137 Dropping collection ${collectionName}`);
await db.collection(collectionName).drop(); await db.collection(collectionName).drop();
} }
if (options.truncate) { if (options.truncate) {
logger.info(`Truncating collection ${collectionName}`); logger.info(`DBGM-00138 Truncating collection ${collectionName}`);
await db.collection(collectionName).deleteMany({}); await db.collection(collectionName).deleteMany({});
} }
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error during preparing mongo bulk insert collection, stopped'); logger.error(extractErrorLogData(err), 'DBGM-00139 Error during preparing mongo bulk insert collection, stopped');
writable.destroy(err); writable.destroy(err);
} }
}; };
@@ -52,7 +52,7 @@ function createBulkInsertStream(driver, stream, dbhan, name, options) {
await db.collection(collectionName).insertMany(rows); await db.collection(collectionName).insertMany(rows);
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error bulk insert collection, stopped'); logger.error(extractErrorLogData(err), 'DBGM-00197 Error bulk insert collection, stopped');
writable.destroy(err); writable.destroy(err);
} }
}; };

View File

@@ -1,4 +1,5 @@
const _ = require('lodash'); const _ = require('lodash');
const { EventEmitter } = require('events');
const stream = require('stream'); const stream = require('stream');
const driverBase = require('../frontend/driver'); const driverBase = require('../frontend/driver');
const Analyser = require('./Analyser'); const Analyser = require('./Analyser');
@@ -58,27 +59,26 @@ async function getScriptableDb(dbhan) {
return db; return db;
} }
/** // /**
* @param {string} uri // * @param {string} uri
* @param {string} dbName // * @param {string} dbName
* @returns {string} // * @returns {string}
*/ // */
function ensureDatabaseInMongoURI(uri, dbName) { // function ensureDatabaseInMongoURI(uri, dbName) {
if (!dbName) return uri; // if (!dbName) return uri;
try { // try {
const url = new URL(uri); // const url = new URL(uri);
const hasDatabase = url.pathname && url.pathname !== '/' && url.pathname.length > 1; // const hasDatabase = url.pathname && url.pathname !== '/' && url.pathname.length > 1;
if (hasDatabase) return uri; // if (hasDatabase) return uri;
// url.pathname = `/${dbName}`;
url.pathname = `/${dbName}`; // return url.toString();
return url.toString(); // } catch (error) {
} catch (error) { // logger.error('DBGM-00198 Invalid URI format:', error.message);
logger.error('Invalid URI format:', error.message); // return uri;
return uri; // }
} // }
}
/** @type {import('dbgate-types').EngineDriver<MongoClient>} */ /** @type {import('dbgate-types').EngineDriver<MongoClient>} */
const driver = { const driver = {
@@ -119,6 +119,7 @@ const driver = {
return { return {
client, client,
database, database,
// mongoUrl,
getDatabase: database ? () => client.db(database) : () => client.db(), getDatabase: database ? () => client.db(database) : () => client.db(),
}; };
}, },
@@ -130,33 +131,33 @@ const driver = {
}; };
}, },
async script(dbhan, sql) { async script(dbhan, sql) {
if (isProApp) { // MongoSH should be used only in stream method
const { NodeDriverServiceProvider } = require('@mongosh/service-provider-node-driver'); // if (isProApp) {
const { ElectronRuntime } = require('@mongosh/browser-runtime-electron'); // const { NodeDriverServiceProvider } = require('@mongosh/service-provider-node-driver');
// const { ElectronRuntime } = require('@mongosh/browser-runtime-electron');
const connectionString = ensureDatabaseInMongoURI(dbhan.client.s.url, dbhan.database); // const connectionString = ensureDatabaseInMongoURI(dbhan.client.s.url, dbhan.database);
const serviceProvider = await NodeDriverServiceProvider.connect(connectionString); // const serviceProvider = await NodeDriverServiceProvider.connect(connectionString);
const runtime = new ElectronRuntime(serviceProvider); // const runtime = new ElectronRuntime(serviceProvider);
const exprValue = await runtime.evaluate(sql); // const exprValue = await runtime.evaluate(sql);
const { printable } = exprValue; // const { printable } = exprValue;
if (Array.isArray(printable)) { // if (Array.isArray(printable)) {
return printable; // return printable;
} else if ('documents' in printable) { // } else if ('documents' in printable) {
return printable.documents; // return printable.documents;
} else if ('cursor' in printable && 'firstBatch' in printable.cursor) { // } else if ('cursor' in printable && 'firstBatch' in printable.cursor) {
return printable.cursor.firstBatch; // return printable.cursor.firstBatch;
} // }
return printable; // return printable;
} else { // }
let func; let func;
func = eval(`(db,ObjectId) => ${sql}`); func = eval(`(db,ObjectId) => ${sql}`);
const db = await getScriptableDb(dbhan); const db = await getScriptableDb(dbhan);
const res = func(db, ObjectId.createFromHexString); const res = func(db, ObjectId.createFromHexString);
if (isPromise(res)) await res; if (isPromise(res)) await res;
}
}, },
async operation(dbhan, operation, options) { async operation(dbhan, operation, options) {
const { type } = operation; const { type } = operation;
@@ -185,16 +186,16 @@ const driver = {
// saveScriptToDatabase({ conid: connection._id, database: name }, `db.createCollection('${newCollection}')`); // saveScriptToDatabase({ conid: connection._id, database: name }, `db.createCollection('${newCollection}')`);
}, },
async stream(dbhan, sql, options) { async stream(dbhan, sql, options) {
if (isProApp) { if (isProApp()) {
const { NodeDriverServiceProvider } = require('@mongosh/service-provider-node-driver'); const { NodeDriverServiceProvider } = require('@mongosh/service-provider-node-driver');
const { ElectronRuntime } = require('@mongosh/browser-runtime-electron'); const { ElectronRuntime } = require('@mongosh/browser-runtime-electron');
let exprValue; let exprValue;
try { try {
const connectionString = ensureDatabaseInMongoURI(dbhan.client.s.url, dbhan.database); const serviceProvider = new NodeDriverServiceProvider(dbhan.client, new EventEmitter(), { productDocsLink: '', productName: 'DbGate' });
const serviceProvider = await NodeDriverServiceProvider.connect(connectionString);
const runtime = new ElectronRuntime(serviceProvider); const runtime = new ElectronRuntime(serviceProvider);
await runtime.evaluate(`use ${dbhan.database}`);
exprValue = await runtime.evaluate(sql); exprValue = await runtime.evaluate(sql);
} catch (err) { } catch (err) {
options.info({ options.info({
@@ -208,6 +209,25 @@ const driver = {
const { printable, type } = exprValue; const { printable, type } = exprValue;
if (typeof printable === 'string') {
options.info({
time: new Date(),
severity: 'info',
message: printable,
});
options.done();
return;
} else if (typeof printable !== 'object' || printable === null) {
options.info({
printable: printable,
time: new Date(),
severity: 'info',
message: 'Query returned not supported value.',
});
options.done();
return;
}
if (type === 'Document') { if (type === 'Document') {
options.recordset({ __isDynamicStructure: true }); options.recordset({ __isDynamicStructure: true });
options.row(printable); options.row(printable);

View File

@@ -130,26 +130,25 @@ class MsSqlAnalyser extends DatabaseAnalyser {
} }
async _runAnalysis() { async _runAnalysis() {
this.feedback({ analysingMessage: 'Loading tables' }); this.feedback({ analysingMessage: 'DBGM-00205 Loading tables' });
const tablesRows = await this.analyserQuery('tables', ['tables']); const tablesRows = await this.analyserQuery('tables', ['tables']);
this.feedback({ analysingMessage: 'Loading columns' }); this.feedback({ analysingMessage: 'DBGM-00206 Loading columns' });
const columnsRows = await this.analyserQuery('columns', ['tables']); const columnsRows = await this.analyserQuery('columns', ['tables']);
const columns = columnsRows.rows.map(getColumnInfo); const columns = columnsRows.rows.map(getColumnInfo);
this.feedback({ analysingMessage: 'DBGM-00207 Loading primary keys' });
this.feedback({ analysingMessage: 'Loading primary keys' });
const pkColumnsRows = await this.analyserQuery('primaryKeys', ['tables']); const pkColumnsRows = await this.analyserQuery('primaryKeys', ['tables']);
this.feedback({ analysingMessage: 'Loading foreign keys' }); this.feedback({ analysingMessage: 'DBGM-00208 Loading foreign keys' });
const fkColumnsRows = await this.analyserQuery('foreignKeys', ['tables']); const fkColumnsRows = await this.analyserQuery('foreignKeys', ['tables']);
this.feedback({ analysingMessage: 'Loading indexes' }); this.feedback({ analysingMessage: 'DBGM-00209 Loading indexes' });
const indexesRows = await this.analyserQuery('indexes', ['tables']); const indexesRows = await this.analyserQuery('indexes', ['tables']);
this.feedback({ analysingMessage: 'Loading index columns' }); this.feedback({ analysingMessage: 'DBGM-00210 Loading index columns' });
const indexcolsRows = await this.analyserQuery('indexcols', ['tables']); const indexcolsRows = await this.analyserQuery('indexcols', ['tables']);
this.feedback({ analysingMessage: 'Loading table sizes' }); this.feedback({ analysingMessage: 'DBGM-00211 Loading table sizes' });
const tableSizes = await this.analyserQuery('tableSizes'); const tableSizes = await this.analyserQuery('tableSizes');
const tableSizesDict = _.mapValues(_.keyBy(tableSizes.rows, 'objectId'), 'tableRowCount'); const tableSizesDict = _.mapValues(_.keyBy(tableSizes.rows, 'objectId'), 'tableRowCount');
this.feedback({ analysingMessage: 'Loading SQL code' }); this.feedback({ analysingMessage: 'DBGM-00212 Loading SQL code' });
const sqlCodeRows = await this.analyserQuery('loadSqlCode', ['views', 'procedures', 'functions', 'triggers']); const sqlCodeRows = await this.analyserQuery('loadSqlCode', ['views', 'procedures', 'functions', 'triggers']);
const getCreateSql = row => const getCreateSql = row =>
sqlCodeRows.rows sqlCodeRows.rows
@@ -157,21 +156,21 @@ class MsSqlAnalyser extends DatabaseAnalyser {
.map(x => x.codeText) .map(x => x.codeText)
.join(''); .join('');
this.feedback({ analysingMessage: 'Loading views' }); this.feedback({ analysingMessage: 'DBGM-00213 Loading views' });
const viewsRows = await this.analyserQuery('views', ['views']); const viewsRows = await this.analyserQuery('views', ['views']);
this.feedback({ analysingMessage: 'Loading procedures & functions' }); this.feedback({ analysingMessage: 'DBGM-00214 Loading procedures & functions' });
const programmableRows = await this.analyserQuery('programmables', ['procedures', 'functions']); const programmableRows = await this.analyserQuery('programmables', ['procedures', 'functions']);
const procedureParameterRows = await this.analyserQuery('proceduresParameters'); const procedureParameterRows = await this.analyserQuery('proceduresParameters');
const functionParameterRows = await this.analyserQuery('functionParameters'); const functionParameterRows = await this.analyserQuery('functionParameters');
this.feedback({ analysingMessage: 'Loading triggers' }); this.feedback({ analysingMessage: 'DBGM-00215 Loading triggers' });
const triggerRows = await this.analyserQuery('triggers'); const triggerRows = await this.analyserQuery('triggers');
this.feedback({ analysingMessage: 'Loading view columns' }); this.feedback({ analysingMessage: 'DBGM-00216 Loading view columns' });
const viewColumnRows = await this.analyserQuery('viewColumns', ['views']); const viewColumnRows = await this.analyserQuery('viewColumns', ['views']);
this.feedback({ analysingMessage: 'Finalizing DB structure' }); this.feedback({ analysingMessage: 'DBGM-00217 Finalizing DB structure' });
const tables = tablesRows.rows.map(row => ({ const tables = tablesRows.rows.map(row => ({
...row, ...row,
contentHash: createObjectContentHash('tables', row, columns), contentHash: createObjectContentHash('tables', row, columns),
@@ -273,8 +272,8 @@ class MsSqlAnalyser extends DatabaseAnalyser {
async _getFastSnapshot() { async _getFastSnapshot() {
const modificationsQueryData = await this.analyserQuery('modifications'); const modificationsQueryData = await this.analyserQuery('modifications');
const baseColumnsRows = await this.analyserQuery('columns', ['tables']); const baseColumnsRows = await this.analyserQuery('baseColumns', ['tables']);
const baseColumns = baseColumnsRows.rows.map(getColumnInfo); const baseColumns = baseColumnsRows.rows;
const tableSizes = await this.analyserQuery('tableSizes'); const tableSizes = await this.analyserQuery('tableSizes');
const res = DatabaseAnalyser.createEmptyStructure(); const res = DatabaseAnalyser.createEmptyStructure();

View File

@@ -72,7 +72,7 @@ function createTediousBulkInsertStream(driver, stream, dbhan, name, options) {
try { try {
await runBulkInsertBatch(dbhan, fullName, writable, rows); await runBulkInsertBatch(dbhan, fullName, writable, rows);
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'Error during bulk insert, insert stopped'); logger.error(extractErrorLogData(err), 'DBGM-00199 Error during bulk insert, insert stopped');
// writable.emit('error', err); // writable.emit('error', err);
writable.destroy(err); writable.destroy(err);
} }

View File

@@ -96,6 +96,7 @@ const driver = {
client, client,
connectionType, connectionType,
database: conn.database, database: conn.database,
conid: conn.conid,
}; };
}, },
async close(dbhan) { async close(dbhan) {
@@ -169,7 +170,7 @@ const driver = {
const defaultSchemaRows = await this.query(dbhan, 'SELECT SCHEMA_NAME() as name'); const defaultSchemaRows = await this.query(dbhan, 'SELECT SCHEMA_NAME() as name');
const defaultSchema = defaultSchemaRows.rows[0]?.name; const defaultSchema = defaultSchemaRows.rows[0]?.name;
logger.debug(`Loaded ${rows.length} mssql schemas`); logger.debug(`DBGM-00140 Loaded ${rows.length} mssql schemas`);
return rows.map(x => ({ return rows.map(x => ({
...x, ...x,

View File

@@ -118,17 +118,17 @@ class Analyser extends DatabaseAnalyser {
} }
async _runAnalysis() { async _runAnalysis() {
this.feedback({ analysingMessage: 'Loading tables' }); this.feedback({ analysingMessage: 'DBGM-00218 Loading tables' });
const tables = await this.analyserQuery('tables', ['tables']); const tables = await this.analyserQuery('tables', ['tables']);
this.feedback({ analysingMessage: 'Loading columns' }); this.feedback({ analysingMessage: 'DBGM-00219 Loading columns' });
const columns = await this.analyserQuery('columns', ['tables', 'views']); const columns = await this.analyserQuery('columns', ['tables', 'views']);
this.feedback({ analysingMessage: 'Loading primary keys' }); this.feedback({ analysingMessage: 'DBGM-00220 Loading primary keys' });
const pkColumns = await this.analyserQuery('primaryKeys', ['tables']); const pkColumns = await this.analyserQuery('primaryKeys', ['tables']);
this.feedback({ analysingMessage: 'Loading foreign keys' }); this.feedback({ analysingMessage: 'DBGM-00221 Loading foreign keys' });
const fkColumns = await this.analyserQuery('foreignKeys', ['tables']); const fkColumns = await this.analyserQuery('foreignKeys', ['tables']);
this.feedback({ analysingMessage: 'Loading views' }); this.feedback({ analysingMessage: 'DBGM-00222 Loading views' });
const views = await this.analyserQuery('views', ['views']); const views = await this.analyserQuery('views', ['views']);
this.feedback({ analysingMessage: 'Loading programmables' }); this.feedback({ analysingMessage: 'DBGM-00223 Loading programmables' });
const programmables = await this.analyserQuery('programmables', ['procedures', 'functions']); const programmables = await this.analyserQuery('programmables', ['procedures', 'functions']);
const parameters = await this.analyserQuery('parameters', ['procedures', 'functions']); const parameters = await this.analyserQuery('parameters', ['procedures', 'functions']);
@@ -155,20 +155,20 @@ class Analyser extends DatabaseAnalyser {
return acc; return acc;
}, {}); }, {});
this.feedback({ analysingMessage: 'Loading view texts' }); this.feedback({ analysingMessage: 'DBGM-00224 Loading view texts' });
const viewTexts = await this.getViewTexts(views.rows.map(x => x.pureName)); const viewTexts = await this.getViewTexts(views.rows.map(x => x.pureName));
this.feedback({ analysingMessage: 'Loading indexes' }); this.feedback({ analysingMessage: 'DBGM-00225 Loading indexes' });
const indexes = await this.analyserQuery('indexes', ['tables']); const indexes = await this.analyserQuery('indexes', ['tables']);
this.feedback({ analysingMessage: 'Loading uniques' }); this.feedback({ analysingMessage: 'DBGM-00226 Loading uniques' });
this.feedback({ analysingMessage: 'Loading triggers' }); this.feedback({ analysingMessage: 'DBGM-00227 Loading triggers' });
const triggers = await this.analyserQuery('triggers'); const triggers = await this.analyserQuery('triggers');
this.feedback({ analysingMessage: 'Loading scheduler events' }); this.feedback({ analysingMessage: 'DBGM-00228 Loading scheduler events' });
const schedulerEvents = await this.analyserQuery('schedulerEvents'); const schedulerEvents = await this.analyserQuery('schedulerEvents');
const uniqueNames = await this.analyserQuery('uniqueNames', ['tables']); const uniqueNames = await this.analyserQuery('uniqueNames', ['tables']);
this.feedback({ analysingMessage: 'Finalizing DB structure' }); this.feedback({ analysingMessage: 'DBGM-00229 Finalizing DB structure' });
const res = { const res = {
tables: tables.rows.map(table => ({ tables: tables.rows.map(table => ({

View File

@@ -34,7 +34,8 @@ const drivers = driverBases.map(driverBase => ({
analyserClass: Analyser, analyserClass: Analyser,
async connect(props) { async connect(props) {
const { server, port, user, password, database, ssl, isReadOnly, forceRowsAsObjects, socketPath, authType } = props; const { conid, server, port, user, password, database, ssl, isReadOnly, forceRowsAsObjects, socketPath, authType } =
props;
let awsIamToken = null; let awsIamToken = null;
if (authType == 'awsIam') { if (authType == 'awsIam') {
awsIamToken = await authProxy.getAwsIamToken(props); awsIamToken = await authProxy.getAwsIamToken(props);
@@ -60,6 +61,7 @@ const drivers = driverBases.map(driverBase => ({
const dbhan = { const dbhan = {
client, client,
database, database,
conid,
}; };
if (isReadOnly) { if (isReadOnly) {
await this.query(dbhan, 'SET SESSION TRANSACTION READ ONLY'); await this.query(dbhan, 'SET SESSION TRANSACTION READ ONLY');
@@ -138,7 +140,7 @@ const drivers = driverBases.map(driverBase => ({
}; };
const handleError = error => { const handleError = error => {
logger.error(extractErrorLogData(error), 'Stream error'); logger.error(extractErrorLogData(error, this.getLogDbInfo(dbhan)), 'DBGM-00200 Stream error');
const { message } = error; const { message } = error;
options.info({ options.info({
message, message,

View File

@@ -45,26 +45,26 @@ class Analyser extends DatabaseAnalyser {
} }
async _runAnalysis() { async _runAnalysis() {
this.feedback({ analysingMessage: 'Loading tables' }); this.feedback({ analysingMessage: 'DBGM-00230 Loading tables' });
const tables = await this.analyserQuery('tableList', ['tables'], { $owner: this.dbhan.database }); const tables = await this.analyserQuery('tableList', ['tables'], { $owner: this.dbhan.database });
this.feedback({ analysingMessage: 'Loading columns' }); this.feedback({ analysingMessage: 'DBGM-00231 Loading columns' });
const columns = await this.analyserQuery('columns', ['tables', 'views'], { $owner: this.dbhan.database }); const columns = await this.analyserQuery('columns', ['tables', 'views'], { $owner: this.dbhan.database });
this.feedback({ analysingMessage: 'Loading primary keys' }); this.feedback({ analysingMessage: 'DBGM-00232 Loading primary keys' });
const pkColumns = await this.analyserQuery('primaryKeys', ['tables'], { $owner: this.dbhan.database }); const pkColumns = await this.analyserQuery('primaryKeys', ['tables'], { $owner: this.dbhan.database });
//let fkColumns = null; //let fkColumns = null;
this.feedback({ analysingMessage: 'Loading foreign keys' }); this.feedback({ analysingMessage: 'DBGM-00233 Loading foreign keys' });
const fkColumns = await this.analyserQuery('foreignKeys', ['tables'], { $owner: this.dbhan.database }); const fkColumns = await this.analyserQuery('foreignKeys', ['tables'], { $owner: this.dbhan.database });
this.feedback({ analysingMessage: 'Loading views' }); this.feedback({ analysingMessage: 'DBGM-00234 Loading views' });
const views = await this.analyserQuery('views', ['views'], { $owner: this.dbhan.database }); const views = await this.analyserQuery('views', ['views'], { $owner: this.dbhan.database });
this.feedback({ analysingMessage: 'Loading materialized views' }); this.feedback({ analysingMessage: 'DBGM-00235 Loading materialized views' });
const matviews = this.driver.dialect.materializedViews const matviews = this.driver.dialect.materializedViews
? await this.analyserQuery('matviews', ['matviews'], { $owner: this.dbhan.database }) ? await this.analyserQuery('matviews', ['matviews'], { $owner: this.dbhan.database })
: null; : null;
this.feedback({ analysingMessage: 'Loading routines' }); this.feedback({ analysingMessage: 'DBGM-00236 Loading routines' });
const routines = await this.analyserQuery('routines', ['procedures', 'functions'], { const routines = await this.analyserQuery('routines', ['procedures', 'functions'], {
$owner: this.dbhan.database, $owner: this.dbhan.database,
}); });
@@ -91,15 +91,15 @@ class Analyser extends DatabaseAnalyser {
return acc; return acc;
}, {}); }, {});
this.feedback({ analysingMessage: 'Loading indexes' }); this.feedback({ analysingMessage: 'DBGM-00237 Loading indexes' });
const indexes = await this.analyserQuery('indexes', ['tables'], { $owner: this.dbhan.database }); const indexes = await this.analyserQuery('indexes', ['tables'], { $owner: this.dbhan.database });
this.feedback({ analysingMessage: 'Loading unique names' }); this.feedback({ analysingMessage: 'DBGM-00238 Loading unique names' });
const triggers = await this.analyserQuery('triggers', undefined, { $owner: this.dbhan.database }); const triggers = await this.analyserQuery('triggers', undefined, { $owner: this.dbhan.database });
this.feedback({ analysingMessage: 'Loading triggers' }); this.feedback({ analysingMessage: 'DBGM-00239 Loading triggers' });
const uniqueNames = await this.analyserQuery('uniqueNames', ['tables'], { $owner: this.dbhan.database }); const uniqueNames = await this.analyserQuery('uniqueNames', ['tables'], { $owner: this.dbhan.database });
this.feedback({ analysingMessage: 'Finalizing DB structure' }); this.feedback({ analysingMessage: 'DBGM-00240 Finalizing DB structure' });
const fkColumnsMapped = fkColumns.rows.map(x => ({ const fkColumnsMapped = fkColumns.rows.map(x => ({
pureName: x.pure_name, pureName: x.pure_name,

View File

@@ -83,26 +83,26 @@ class Analyser extends DatabaseAnalyser {
} }
async _runAnalysis() { async _runAnalysis() {
this.feedback({ analysingMessage: 'Loading tables' }); this.feedback({ analysingMessage: 'DBGM-00241 Loading tables' });
const tables = await this.analyserQuery(this.driver.dialect.stringAgg ? 'tableModifications' : 'tableList', [ const tables = await this.analyserQuery(this.driver.dialect.stringAgg ? 'tableModifications' : 'tableList', [
'tables', 'tables',
]); ]);
this.feedback({ analysingMessage: 'Loading columns' }); this.feedback({ analysingMessage: 'DBGM-00242 Loading columns' });
const columns = await this.analyserQuery('columns', ['tables', 'views']); const columns = await this.analyserQuery('columns', ['tables', 'views']);
this.feedback({ analysingMessage: 'Loading primary keys' }); this.feedback({ analysingMessage: 'DBGM-00243 Loading primary keys' });
const pkColumns = await this.analyserQuery('primaryKeys', ['tables']); const pkColumns = await this.analyserQuery('primaryKeys', ['tables']);
let fkColumns = null; let fkColumns = null;
this.feedback({ analysingMessage: 'Loading foreign key constraints' }); this.feedback({ analysingMessage: 'DBGM-00244 Loading foreign key constraints' });
// const fk_tableConstraints = await this.analyserQuery('fk_tableConstraints', ['tables']); // const fk_tableConstraints = await this.analyserQuery('fk_tableConstraints', ['tables']);
this.feedback({ analysingMessage: 'Loading foreign key refs' }); this.feedback({ analysingMessage: 'DBGM-00245 Loading foreign key refs' });
const foreignKeys = await this.analyserQuery('foreignKeys', ['tables']); const foreignKeys = await this.analyserQuery('foreignKeys', ['tables']);
this.feedback({ analysingMessage: 'Loading foreign key columns' }); this.feedback({ analysingMessage: 'DBGM-00246 Loading foreign key columns' });
const fk_keyColumnUsage = await this.analyserQuery('fk_keyColumnUsage', ['tables']); const fk_keyColumnUsage = await this.analyserQuery('fk_keyColumnUsage', ['tables']);
// const cntKey = x => `${x.constraint_name}|${x.constraint_schema}`; // const cntKey = x => `${x.constraint_name}|${x.constraint_schema}`;
@@ -149,52 +149,52 @@ class Analyser extends DatabaseAnalyser {
} }
fkColumns = { rows: fkRows }; fkColumns = { rows: fkRows };
this.feedback({ analysingMessage: 'Loading views' }); this.feedback({ analysingMessage: 'DBGM-00247 Loading views' });
const views = await this.analyserQuery('views', ['views']); const views = await this.analyserQuery('views', ['views']);
this.feedback({ analysingMessage: 'Loading materialized views' }); this.feedback({ analysingMessage: 'DBGM-00248 Loading materialized views' });
const matviews = this.driver.dialect.materializedViews ? await this.analyserQuery('matviews', ['matviews']) : null; const matviews = this.driver.dialect.materializedViews ? await this.analyserQuery('matviews', ['matviews']) : null;
this.feedback({ analysingMessage: 'Loading materialized view columns' }); this.feedback({ analysingMessage: 'DBGM-00249 Loading materialized view columns' });
const matviewColumns = this.driver.dialect.materializedViews const matviewColumns = this.driver.dialect.materializedViews
? await this.analyserQuery('matviewColumns', ['matviews']) ? await this.analyserQuery('matviewColumns', ['matviews'])
: null; : null;
this.feedback({ analysingMessage: 'Loading routines' }); this.feedback({ analysingMessage: 'DBGM-00250 Loading routines' });
const routines = await this.analyserQuery('routines', ['procedures', 'functions']); const routines = await this.analyserQuery('routines', ['procedures', 'functions']);
this.feedback({ analysingMessage: 'Loading routine parameters' }); this.feedback({ analysingMessage: 'DBGM-00251 Loading routine parameters' });
const routineParametersRows = await this.analyserQuery('proceduresParameters'); const routineParametersRows = await this.analyserQuery('proceduresParameters');
this.feedback({ analysingMessage: 'Loading indexes' }); this.feedback({ analysingMessage: 'DBGM-00252 Loading indexes' });
const indexes = this.driver.__analyserInternals.skipIndexes const indexes = this.driver.__analyserInternals.skipIndexes
? { rows: [] } ? { rows: [] }
: await this.analyserQuery('indexes', ['tables']); : await this.analyserQuery('indexes', ['tables']);
this.feedback({ analysingMessage: 'Loading index columns' }); this.feedback({ analysingMessage: 'DBGM-00253 Loading index columns' });
const indexcols = this.driver.__analyserInternals.skipIndexes const indexcols = this.driver.__analyserInternals.skipIndexes
? { rows: [] } ? { rows: [] }
: await this.analyserQuery('indexcols', ['tables']); : await this.analyserQuery('indexcols', ['tables']);
this.feedback({ analysingMessage: 'Loading unique names' }); this.feedback({ analysingMessage: 'DBGM-00254 Loading unique names' });
const uniqueNames = await this.analyserQuery('uniqueNames', ['tables']); const uniqueNames = await this.analyserQuery('uniqueNames', ['tables']);
let geometryColumns = { rows: [] }; let geometryColumns = { rows: [] };
if (views.rows.find(x => x.pure_name == 'geometry_columns' && x.schema_name == 'public')) { if (views.rows.find(x => x.pure_name == 'geometry_columns' && x.schema_name == 'public')) {
this.feedback({ analysingMessage: 'Loading geometry columns' }); this.feedback({ analysingMessage: 'DBGM-00255 Loading geometry columns' });
geometryColumns = await this.analyserQuery('geometryColumns', ['tables']); geometryColumns = await this.analyserQuery('geometryColumns', ['tables']);
} }
let geographyColumns = { rows: [] }; let geographyColumns = { rows: [] };
if (views.rows.find(x => x.pure_name == 'geography_columns' && x.schema_name == 'public')) { if (views.rows.find(x => x.pure_name == 'geography_columns' && x.schema_name == 'public')) {
this.feedback({ analysingMessage: 'Loading geography columns' }); this.feedback({ analysingMessage: 'DBGM-00256 Loading geography columns' });
geographyColumns = await this.analyserQuery('geographyColumns', ['tables']); geographyColumns = await this.analyserQuery('geographyColumns', ['tables']);
} }
this.feedback({ analysingMessage: 'Loading triggers' }); this.feedback({ analysingMessage: 'DBGM-00257 Loading triggers' });
const triggers = await this.analyserQuery('triggers'); const triggers = await this.analyserQuery('triggers');
this.feedback({ analysingMessage: 'Finalizing DB structure' }); this.feedback({ analysingMessage: 'DBGM-00258 Finalizing DB structure' });
const columnColumnsMapped = fkColumns.rows.map(x => ({ const columnColumnsMapped = fkColumns.rows.map(x => ({
pureName: x.pure_name, pureName: x.pure_name,
@@ -387,7 +387,7 @@ class Analyser extends DatabaseAnalyser {
procedures: res.procedures?.length, procedures: res.procedures?.length,
functions: res.functions?.length, functions: res.functions?.length,
}, },
'Database structured finalized' 'DBGM-00141 Database structured finalized'
); );
return res; return res;

View File

@@ -78,6 +78,7 @@ const drivers = driverBases.map(driverBase => ({
async connect(props) { async connect(props) {
const { const {
conid,
engine, engine,
server, server,
port, port,
@@ -137,6 +138,7 @@ const drivers = driverBases.map(driverBase => ({
const dbhan = { const dbhan = {
client, client,
database, database,
conid,
}; };
const datatypes = await this.query(dbhan, `SELECT oid, typname FROM pg_type WHERE typname in ('geography')`); const datatypes = await this.query(dbhan, `SELECT oid, typname FROM pg_type WHERE typname in ('geography')`);
@@ -228,7 +230,7 @@ const drivers = driverBases.map(driverBase => ({
}); });
query.on('error', error => { query.on('error', error => {
logger.error(extractErrorLogData(error), 'Stream error'); logger.error(extractErrorLogData(error, this.getLogDbInfo(dbhan)), 'DBGM-00201 Stream error');
const { message, position, procName } = error; const { message, position, procName } = error;
let line = null; let line = null;
if (position) { if (position) {
@@ -382,7 +384,7 @@ const drivers = driverBases.map(driverBase => ({
const defaultSchemaRows = await this.query(dbhan, 'SELECT current_schema'); const defaultSchemaRows = await this.query(dbhan, 'SELECT current_schema');
const defaultSchema = defaultSchemaRows.rows[0]?.current_schema?.trim(); const defaultSchema = defaultSchemaRows.rows[0]?.current_schema?.trim();
logger.debug(`Loaded ${schemaRows.rows.length} postgres schemas`); logger.debug(this.getLogDbInfo(dbhan), `DBGM-00142 Loaded ${schemaRows.rows.length} postgres schemas`);
const schemas = schemaRows.rows.map(x => ({ const schemas = schemaRows.rows.map(x => ({
schemaName: x.schema_name, schemaName: x.schema_name,

View File

@@ -81,7 +81,7 @@ const libsqlDriver = {
try { try {
inTransaction(); inTransaction();
} catch (error) { } catch (error) {
logger.error(extractErrorLogData(error), 'Stream error'); logger.error(extractErrorLogData(error), 'DBGM-00202 Stream error');
const { message, procName } = error; const { message, procName } = error;
options.info({ options.info({
message, message,

View File

@@ -76,7 +76,7 @@ const driver = {
try { try {
inTransaction(); inTransaction();
} catch (error) { } catch (error) {
logger.error(extractErrorLogData(error), 'Stream error'); logger.error(extractErrorLogData(error), 'DBGM-00203 Stream error');
const { message, procName } = error; const { message, procName } = error;
options.info({ options.info({
message, message,

View File

@@ -59,7 +59,7 @@ class ParseStream extends stream.Transform {
} }
async function reader({ fileName, encoding = 'utf-8', itemElementName }) { async function reader({ fileName, encoding = 'utf-8', itemElementName }) {
logger.info(`Reading file ${fileName}`); logger.info(`DBGM-00143 Reading file ${fileName}`);
const fileStream = fs.createReadStream(fileName, encoding); const fileStream = fs.createReadStream(fileName, encoding);
const parser = new ParseStream({ itemElementName }); const parser = new ParseStream({ itemElementName });

View File

@@ -70,7 +70,7 @@ class StringifyStream extends stream.Transform {
} }
async function writer({ fileName, encoding = 'utf-8', itemElementName, rootElementName }) { async function writer({ fileName, encoding = 'utf-8', itemElementName, rootElementName }) {
logger.info(`Writing file ${fileName}`); logger.info(`DBGM-00144 Writing file ${fileName}`);
const stringify = new StringifyStream({ itemElementName, rootElementName }); const stringify = new StringifyStream({ itemElementName, rootElementName });
const fileStream = fs.createWriteStream(fileName, encoding); const fileStream = fs.createWriteStream(fileName, encoding);
return [stringify, fileStream]; return [stringify, fileStream];

View File

@@ -110,7 +110,7 @@ jobs:
- 16009:5556 - 16009:5556
mongo: mongo:
image: mongo:6.0.25 image: mongo:4.4.29
env: env:
MONGO_INITDB_ROOT_USERNAME: root MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: Pwd2020Db MONGO_INITDB_ROOT_PASSWORD: Pwd2020Db

Some files were not shown because too many files have changed in this diff Show More