mirror of
https://github.com/DeNNiiInc/dbgate.git
synced 2026-04-17 21:26:00 +00:00
SYNC: Merge pull request #7 from dbgate/feature/applog
This commit is contained in:
129
common/assign-dbgm-codes.mjs
Normal file
129
common/assign-dbgm-codes.mjs
Normal file
@@ -0,0 +1,129 @@
|
||||
#!/usr/bin/env node
|
||||
// assign-dbgm-codes.mjs
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
const PLACEHOLDER = 'DBGM-00000';
|
||||
const CODE_RE = /DBGM-(\d{5})/g;
|
||||
const JS_TS_RE = /\.(mjs|cjs|js|ts|jsx|tsx)$/i;
|
||||
|
||||
const IGNORE_DIRS = new Set([
|
||||
'node_modules',
|
||||
'.git',
|
||||
'.hg',
|
||||
'.svn',
|
||||
'dist',
|
||||
'build',
|
||||
'out',
|
||||
'.next',
|
||||
'.turbo',
|
||||
'.cache',
|
||||
]);
|
||||
const IGNORE_FILES = ['assign-dbgm-codes.mjs', 'package.json', 'README.md'];
|
||||
|
||||
// --- CLI ---
|
||||
const args = process.argv.slice(2);
|
||||
const dryRun = args.includes('--dry');
|
||||
const rootArg = args.find(a => a !== '--dry') || process.cwd();
|
||||
const root = path.resolve(rootArg);
|
||||
|
||||
// --- helpers ---
|
||||
async function* walk(dir) {
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
if (e.isDirectory()) {
|
||||
if (IGNORE_DIRS.has(e.name)) continue;
|
||||
yield* walk(path.join(dir, e.name));
|
||||
} else if (e.isFile()) {
|
||||
if (JS_TS_RE.test(e.name) && !IGNORE_FILES.includes(e.name)) yield path.join(dir, e.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function formatCode(n) {
|
||||
return `DBGM-${String(n).padStart(5, '0')}`;
|
||||
}
|
||||
|
||||
// Find the smallest positive integer not in `taken`
|
||||
function makeNextCodeFn(taken) {
|
||||
let n = 1;
|
||||
// advance n to first free
|
||||
while (taken.has(n)) n++;
|
||||
return () => {
|
||||
const code = n;
|
||||
taken.add(code);
|
||||
// move n to next free for next call
|
||||
do {
|
||||
n++;
|
||||
} while (taken.has(n));
|
||||
return formatCode(code);
|
||||
};
|
||||
}
|
||||
|
||||
// --- main ---
|
||||
(async () => {
|
||||
console.log(`Scanning: ${root} ${dryRun ? '(dry run)' : ''}`);
|
||||
|
||||
// 1) Collect all taken codes across the repo
|
||||
const taken = new Set(); // numeric parts only
|
||||
const files = [];
|
||||
for await (const file of walk(root)) files.push(file);
|
||||
|
||||
await Promise.all(
|
||||
files.map(async file => {
|
||||
try {
|
||||
const text = await fs.readFile(file, 'utf8');
|
||||
for (const m of text.matchAll(CODE_RE)) {
|
||||
const num = Number(m[1]);
|
||||
if (Number.isInteger(num) && num > 0) taken.add(num);
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(`! Failed to read ${file}: ${err.message}`);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
console.log(`Found ${taken.size} occupied code(s).`);
|
||||
|
||||
// 2) Replace placeholders with next available unique code
|
||||
const nextCode = makeNextCodeFn(taken);
|
||||
|
||||
let filesChanged = 0;
|
||||
let placeholdersReplaced = 0;
|
||||
|
||||
for (const file of files) {
|
||||
let text;
|
||||
try {
|
||||
text = await fs.readFile(file, 'utf8');
|
||||
} catch (err) {
|
||||
console.warn(`! Failed to read ${file}: ${err.message}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!text.includes(PLACEHOLDER)) continue;
|
||||
|
||||
let countInFile = 0;
|
||||
const updated = text.replaceAll(PLACEHOLDER, () => {
|
||||
countInFile++;
|
||||
return nextCode();
|
||||
});
|
||||
|
||||
if (countInFile > 0) {
|
||||
placeholdersReplaced += countInFile;
|
||||
filesChanged++;
|
||||
console.log(`${dryRun ? '[dry]' : '[write]'} ${file} — ${countInFile} replacement(s)`);
|
||||
if (!dryRun) {
|
||||
try {
|
||||
await fs.writeFile(file, updated, 'utf8');
|
||||
} catch (err) {
|
||||
console.warn(`! Failed to write ${file}: ${err.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Done. Files changed: ${filesChanged}, placeholders replaced: ${placeholdersReplaced}.`);
|
||||
})().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -72,7 +72,8 @@
|
||||
"translations:extract": "node common/translations-cli/index.js extract",
|
||||
"translations:add-missing": "node common/translations-cli/index.js add-missing",
|
||||
"translations:remove-unused": "node common/translations-cli/index.js remove-unused",
|
||||
"translations:check": "node common/translations-cli/index.js check"
|
||||
"translations:check": "node common/translations-cli/index.js check",
|
||||
"errors": "node common/assign-dbgm-codes.mjs ."
|
||||
},
|
||||
"dependencies": {
|
||||
"concurrently": "^5.1.0",
|
||||
|
||||
@@ -94,7 +94,7 @@ class OAuthProvider extends AuthProviderBase {
|
||||
payload = jwt.decode(id_token);
|
||||
}
|
||||
|
||||
logger.info({ payload }, 'User payload returned from OAUTH');
|
||||
logger.info({ payload }, 'DBGM-00002 User payload returned from OAUTH');
|
||||
|
||||
const login =
|
||||
process.env.OAUTH_LOGIN_FIELD && payload && payload[process.env.OAUTH_LOGIN_FIELD]
|
||||
|
||||
@@ -102,7 +102,7 @@ module.exports = {
|
||||
...fileType('.matview.sql', 'matview.sql'),
|
||||
];
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error reading archive files');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00001 Error reading archive files');
|
||||
return [];
|
||||
}
|
||||
},
|
||||
|
||||
@@ -99,7 +99,7 @@ function authMiddleware(req, res, next) {
|
||||
return next();
|
||||
}
|
||||
|
||||
logger.error(extractErrorLogData(err), 'Sending invalid token error');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00098 Sending invalid token error');
|
||||
|
||||
return unauthorizedResponse(req, res, 'invalid token');
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ module.exports = {
|
||||
const resp = await callCloudApiGet('content-list');
|
||||
return resp;
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting cloud content list');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00099 Error getting cloud content list');
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
@@ -116,12 +116,12 @@ function getPortalCollections() {
|
||||
}
|
||||
}
|
||||
|
||||
logger.info({ connections: connections.map(pickSafeConnectionInfo) }, 'Using connections from ENV variables');
|
||||
logger.info({ connections: connections.map(pickSafeConnectionInfo) }, 'DBGM-00005 Using connections from ENV variables');
|
||||
const noengine = connections.filter(x => !x.engine);
|
||||
if (noengine.length > 0) {
|
||||
logger.warn(
|
||||
{ connections: noengine.map(x => x._id) },
|
||||
'Invalid CONNECTIONS configuration, missing ENGINE for connection ID'
|
||||
'DBGM-00006 Invalid CONNECTIONS configuration, missing ENGINE for connection ID'
|
||||
);
|
||||
}
|
||||
return connections;
|
||||
@@ -530,7 +530,7 @@ module.exports = {
|
||||
socket.emit('got-volatile-token', { strmid, savedConId: conid, volatileConId: volatile._id });
|
||||
return { success: true };
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting DB token');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00100 Error getting DB token');
|
||||
return { error: err.message };
|
||||
}
|
||||
},
|
||||
@@ -546,7 +546,7 @@ module.exports = {
|
||||
const resp = await authProvider.login(null, null, { conid: volatile._id }, req);
|
||||
return resp;
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting DB token');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00101 Error getting DB token');
|
||||
return { error: err.message };
|
||||
}
|
||||
},
|
||||
|
||||
@@ -76,7 +76,7 @@ module.exports = {
|
||||
|
||||
handle_error(conid, database, props) {
|
||||
const { error } = props;
|
||||
logger.error(`Error in database connection ${conid}, database ${database}: ${error}`);
|
||||
logger.error(`DBGM-00102 Error in database connection ${conid}, database ${database}: ${error}`);
|
||||
if (props?.msgid) {
|
||||
const [resolve, reject] = this.requests[props?.msgid];
|
||||
reject(error);
|
||||
@@ -144,7 +144,7 @@ module.exports = {
|
||||
handle_copyStreamError(conid, database, { copyStreamError }) {
|
||||
const { progressName } = copyStreamError;
|
||||
const { runid } = progressName;
|
||||
logger.error(`Error in database connection ${conid}, database ${database}: ${copyStreamError}`);
|
||||
logger.error(`DBGM-00103 Error in database connection ${conid}, database ${database}: ${copyStreamError}`);
|
||||
socket.emit(`runner-done-${runid}`);
|
||||
},
|
||||
|
||||
@@ -193,7 +193,7 @@ module.exports = {
|
||||
if (newOpened.disconnected) return;
|
||||
const funcName = `handle_${msgtype}`;
|
||||
if (!this[funcName]) {
|
||||
logger.error(`Unknown message type ${msgtype} from subprocess databaseConnectionProcess`);
|
||||
logger.error(`DBGM-00104 Unknown message type ${msgtype} from subprocess databaseConnectionProcess`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -204,7 +204,7 @@ module.exports = {
|
||||
this.close(conid, database, false);
|
||||
});
|
||||
subprocess.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'Error in database connection subprocess');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00114 Error in database connection subprocess');
|
||||
if (newOpened.disconnected) return;
|
||||
this.close(conid, database, false);
|
||||
});
|
||||
@@ -226,7 +226,7 @@ module.exports = {
|
||||
try {
|
||||
conn.subprocess.send({ msgid, ...message });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error sending request do process');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00115 Error sending request do process');
|
||||
this.close(conn.conid, conn.database);
|
||||
}
|
||||
});
|
||||
@@ -236,7 +236,7 @@ module.exports = {
|
||||
queryData_meta: true,
|
||||
async queryData({ conid, database, sql }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
logger.info({ conid, database, sql }, 'Processing query');
|
||||
logger.info({ conid, database, sql }, 'DBGM-00007 Processing query');
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
// if (opened && opened.status && opened.status.name == 'error') {
|
||||
// return opened.status;
|
||||
@@ -283,7 +283,7 @@ module.exports = {
|
||||
runScript_meta: true,
|
||||
async runScript({ conid, database, sql, useTransaction, logMessage }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
logger.info({ conid, database, sql }, 'Processing script');
|
||||
logger.info({ conid, database, sql }, 'DBGM-00008 Processing script');
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
sendToAuditLog(req, {
|
||||
category: 'dbop',
|
||||
@@ -304,7 +304,7 @@ module.exports = {
|
||||
runOperation_meta: true,
|
||||
async runOperation({ conid, database, operation, useTransaction }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
logger.info({ conid, database, operation }, 'Processing operation');
|
||||
logger.info({ conid, database, operation }, 'DBGM-00009 Processing operation');
|
||||
|
||||
sendToAuditLog(req, {
|
||||
category: 'dbop',
|
||||
@@ -481,7 +481,7 @@ module.exports = {
|
||||
try {
|
||||
existing.subprocess.send({ msgtype: 'ping' });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error pinging DB connection');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00116 Error pinging DB connection');
|
||||
this.close(conid, database);
|
||||
|
||||
return {
|
||||
@@ -530,7 +530,7 @@ module.exports = {
|
||||
try {
|
||||
existing.subprocess.kill();
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error killing subprocess');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00117 Error killing subprocess');
|
||||
}
|
||||
}
|
||||
this.opened = this.opened.filter(x => x.conid != conid || x.database != database);
|
||||
@@ -924,7 +924,7 @@ module.exports = {
|
||||
executeSessionQuery_meta: true,
|
||||
async executeSessionQuery({ sesid, conid, database, sql }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
logger.info({ sesid, sql }, 'Processing query');
|
||||
logger.info({ sesid, sql }, 'DBGM-00010 Processing query');
|
||||
sessions.dispatchMessage(sesid, 'Query execution started');
|
||||
|
||||
const opened = await this.ensureOpened(conid, database);
|
||||
|
||||
@@ -13,6 +13,7 @@ const dbgateApi = require('../shell');
|
||||
const { getLogger } = require('dbgate-tools');
|
||||
const platformInfo = require('../utility/platformInfo');
|
||||
const { checkSecureFilePathsWithoutDirectory, checkSecureDirectories } = require('../utility/security');
|
||||
const { AppLogDatastore, getRecentAppLogRecords } = require('../utility/AppLogDatastore');
|
||||
const logger = getLogger('files');
|
||||
|
||||
function serialize(format, data) {
|
||||
@@ -28,6 +29,9 @@ function deserialize(format, text) {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
currentLogReader: null,
|
||||
currentLogParamsKey: null,
|
||||
|
||||
list_meta: true,
|
||||
async list({ folder }, req) {
|
||||
if (!hasPermission(`files/${folder}/read`, req)) return [];
|
||||
@@ -253,7 +257,7 @@ module.exports = {
|
||||
|
||||
createZipFromJsons_meta: true,
|
||||
async createZipFromJsons({ db, filePath }) {
|
||||
logger.info(`Creating zip file from JSONS ${filePath}`);
|
||||
logger.info(`DBGM-00011 Creating zip file from JSONS ${filePath}`);
|
||||
await dbgateApi.zipJsonLinesData(db, filePath);
|
||||
return true;
|
||||
},
|
||||
@@ -279,7 +283,7 @@ module.exports = {
|
||||
const FOLDERS = ['sql', 'sqlite'];
|
||||
for (const folder of FOLDERS) {
|
||||
if (fileName.toLowerCase().endsWith('.' + folder)) {
|
||||
logger.info(`Saving ${folder} file ${fileName}`);
|
||||
logger.info(`DBGM-00012 Saving ${folder} file ${fileName}`);
|
||||
await fs.copyFile(filePath, path.join(filesdir(), folder, fileName));
|
||||
|
||||
socket.emitChanged(`files-changed`, { folder: folder });
|
||||
@@ -291,7 +295,7 @@ module.exports = {
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`);
|
||||
throw new Error(`DBGM-00013 ${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`);
|
||||
},
|
||||
|
||||
exportFile_meta: true,
|
||||
@@ -311,4 +315,28 @@ module.exports = {
|
||||
await fs.copyFile(sourceFilePath, targetFilePath);
|
||||
return true;
|
||||
},
|
||||
|
||||
getAppLog_meta: true,
|
||||
async getAppLog({ offset = 0, limit = 100, dateFrom = 0, dateTo = new Date().getTime(), filters = {} }) {
|
||||
const paramsKey = `${dateFrom}-${dateTo}`;
|
||||
if (paramsKey != this.currentLogParamsKey) {
|
||||
if (this.currentLogReader) {
|
||||
this.currentLogReader._closeReader();
|
||||
this.currentLogReader = null;
|
||||
}
|
||||
this.currentLogReader = new AppLogDatastore({ timeFrom: dateFrom, timeTo: dateTo });
|
||||
this.currentLogParamsKey = paramsKey;
|
||||
}
|
||||
|
||||
return this.currentLogReader.getRows(offset, limit, filters);
|
||||
},
|
||||
|
||||
getRecentAppLog_meta: true,
|
||||
getRecentAppLog({ limit }) {
|
||||
const res = getRecentAppLogRecords();
|
||||
if (limit) {
|
||||
return res.slice(-limit);
|
||||
}
|
||||
return res;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -48,7 +48,7 @@ require=null;
|
||||
async function run() {
|
||||
${script}
|
||||
await dbgateApi.finalizer.run();
|
||||
logger.info('Finished job script');
|
||||
logger.info('DBGM-00014 Finished job script');
|
||||
}
|
||||
dbgateApi.runScript(run);
|
||||
`;
|
||||
@@ -132,7 +132,7 @@ module.exports = {
|
||||
const pluginNames = extractPlugins(scriptText);
|
||||
// console.log('********************** SCRIPT TEXT **********************');
|
||||
// console.log(scriptText);
|
||||
logger.info({ scriptFile }, 'Running script');
|
||||
logger.info({ scriptFile }, 'DBGM-00015 Running script');
|
||||
// const subprocess = fork(scriptFile, ['--checkParent', '--max-old-space-size=8192'], {
|
||||
const subprocess = fork(
|
||||
scriptFile,
|
||||
@@ -171,7 +171,7 @@ module.exports = {
|
||||
subprocess.on('exit', code => {
|
||||
// console.log('... EXITED', code);
|
||||
this.rejectRequest(runid, { message: 'No data returned, maybe input data source is too big' });
|
||||
logger.info({ code, pid: subprocess.pid }, 'Exited process');
|
||||
logger.info({ code, pid: subprocess.pid }, 'DBGM-00016 Exited process');
|
||||
socket.emit(`runner-done-${runid}`, code);
|
||||
this.opened = this.opened.filter(x => x.runid != runid);
|
||||
});
|
||||
@@ -222,7 +222,7 @@ module.exports = {
|
||||
|
||||
subprocess.on('exit', code => {
|
||||
console.log('... EXITED', code);
|
||||
logger.info({ code, pid: subprocess.pid }, 'Exited process');
|
||||
logger.info({ code, pid: subprocess.pid }, 'DBGM-00017 Exited process');
|
||||
this.dispatchMessage(runid, `Finished external process with code ${code}`);
|
||||
socket.emit(`runner-done-${runid}`, code);
|
||||
if (onFinished) {
|
||||
@@ -258,7 +258,7 @@ module.exports = {
|
||||
severity: 'error',
|
||||
message: extractErrorMessage(err),
|
||||
});
|
||||
logger.error(extractErrorLogData(err), 'Caught error on stdin');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00118 Caught error on stdin');
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ module.exports = {
|
||||
if (!match) return;
|
||||
const pattern = match[1];
|
||||
if (!cron.validate(pattern)) return;
|
||||
logger.info(`Schedule script ${file} with pattern ${pattern}`);
|
||||
logger.info(`DBGM-00018 Schedule script ${file} with pattern ${pattern}`);
|
||||
const task = cron.schedule(pattern, () => runners.start({ script: text }));
|
||||
this.tasks.push(task);
|
||||
},
|
||||
|
||||
@@ -103,7 +103,7 @@ module.exports = {
|
||||
this.close(conid, false);
|
||||
});
|
||||
subprocess.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'Error in server connection subprocess');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00119 Error in server connection subprocess');
|
||||
if (newOpened.disconnected) return;
|
||||
this.close(conid, false);
|
||||
});
|
||||
@@ -121,7 +121,7 @@ module.exports = {
|
||||
try {
|
||||
existing.subprocess.kill();
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error killing subprocess');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00120 Error killing subprocess');
|
||||
}
|
||||
}
|
||||
this.opened = this.opened.filter(x => x.conid != conid);
|
||||
@@ -191,7 +191,7 @@ module.exports = {
|
||||
try {
|
||||
opened.subprocess.send({ msgtype: 'ping' });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error pinging server connection');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00121 Error pinging server connection');
|
||||
this.close(conid);
|
||||
}
|
||||
})
|
||||
@@ -244,7 +244,7 @@ module.exports = {
|
||||
try {
|
||||
conn.subprocess.send({ msgid, ...message });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error sending request');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00122 Error sending request');
|
||||
this.close(conn.conid);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -165,7 +165,7 @@ module.exports = {
|
||||
message: 'Executing query',
|
||||
});
|
||||
|
||||
logger.info({ sesid, sql }, 'Processing query');
|
||||
logger.info({ sesid, sql }, 'DBGM-00019 Processing query');
|
||||
this.dispatchMessage(sesid, 'Query execution started');
|
||||
session.subprocess.send({
|
||||
msgtype: 'executeQuery',
|
||||
@@ -186,7 +186,7 @@ module.exports = {
|
||||
throw new Error('Invalid session');
|
||||
}
|
||||
|
||||
logger.info({ sesid, command }, 'Processing control command');
|
||||
logger.info({ sesid, command }, 'DBGM-00020 Processing control command');
|
||||
this.dispatchMessage(sesid, `${_.startCase(command)} started`);
|
||||
session.subprocess.send({ msgtype: 'executeControlCommand', command });
|
||||
|
||||
@@ -224,7 +224,7 @@ module.exports = {
|
||||
throw new Error('Invalid session');
|
||||
}
|
||||
|
||||
logger.info({ sesid }, 'Starting profiler');
|
||||
logger.info({ sesid }, 'DBGM-00021 Starting profiler');
|
||||
session.loadingReader_jslid = jslid;
|
||||
session.subprocess.send({ msgtype: 'startProfiler', jslid });
|
||||
|
||||
@@ -271,7 +271,7 @@ module.exports = {
|
||||
try {
|
||||
session.subprocess.send({ msgtype: 'ping' });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error pinging session');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00145 Error pinging session');
|
||||
|
||||
return {
|
||||
status: 'error',
|
||||
|
||||
@@ -28,7 +28,7 @@ module.exports = {
|
||||
}
|
||||
const uploadName = crypto.randomUUID();
|
||||
const filePath = path.join(uploadsdir(), uploadName);
|
||||
logger.info(`Uploading file ${data.name}, size=${data.size}`);
|
||||
logger.info(`DBGM-00025 Uploading file ${data.name}, size=${data.size}`);
|
||||
|
||||
data.mv(filePath, () => {
|
||||
res.json({
|
||||
@@ -115,7 +115,7 @@ module.exports = {
|
||||
|
||||
return response.data;
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error uploading gist');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00148 Error uploading gist');
|
||||
|
||||
return {
|
||||
apiErrorMessage: err.message,
|
||||
|
||||
@@ -9,7 +9,7 @@ const currentVersion = require('./currentVersion');
|
||||
const logger = getLogger('apiIndex');
|
||||
|
||||
process.on('uncaughtException', err => {
|
||||
logger.fatal(extractErrorLogData(err), 'Uncaught exception, exiting process');
|
||||
logger.fatal(extractErrorLogData(err), 'DBGM-00259 Uncaught exception, exiting process');
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
@@ -33,6 +33,9 @@ if (processArgs.processDisplayName) {
|
||||
// }
|
||||
|
||||
function configureLogger() {
|
||||
const { initializeRecentLogProvider, pushToRecentLogs } = require('./utility/AppLogDatastore');
|
||||
initializeRecentLogProvider();
|
||||
|
||||
const logsFilePath = path.join(logsdir(), `${moment().format('YYYY-MM-DD-HH-mm')}-${process.pid}.ndjson`);
|
||||
setLogsFilePath(logsFilePath);
|
||||
setLoggerName('main');
|
||||
@@ -63,7 +66,21 @@ function configureLogger() {
|
||||
{ flags: 'a' }
|
||||
);
|
||||
}
|
||||
streamsByDatePart[datePart].write(`${JSON.stringify(msg)}\n`);
|
||||
const additionals = {};
|
||||
const finalMsg =
|
||||
msg.msg && msg.msg.match(/^DBGM-\d\d\d\d\d/)
|
||||
? {
|
||||
...msg,
|
||||
msg: msg.msg.substring(10).trimStart(),
|
||||
msgcode: msg.msg.substring(0, 10),
|
||||
...additionals,
|
||||
}
|
||||
: {
|
||||
...msg,
|
||||
...additionals,
|
||||
};
|
||||
streamsByDatePart[datePart].write(`${JSON.stringify(finalMsg)}\n`);
|
||||
pushToRecentLogs(finalMsg);
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -114,10 +131,10 @@ function configureLogger() {
|
||||
|
||||
if (processArgs.listenApi) {
|
||||
configureLogger();
|
||||
logger.info(`Starting API process version ${currentVersion.version}`);
|
||||
logger.info(`DBGM-00026 Starting API process version ${currentVersion.version}`);
|
||||
|
||||
if (process.env.DEBUG_PRINT_ENV_VARIABLES) {
|
||||
logger.info('Debug print environment variables:');
|
||||
logger.info('DBGM-00027 Debug print environment variables:');
|
||||
for (const key of Object.keys(process.env)) {
|
||||
logger.info(` ${key}: ${JSON.stringify(process.env[key])}`);
|
||||
}
|
||||
|
||||
@@ -191,15 +191,15 @@ function start() {
|
||||
|
||||
if (platformInfo.isDocker) {
|
||||
const port = process.env.PORT || 3000;
|
||||
logger.info(`DbGate API listening on port ${port} (docker build)`);
|
||||
logger.info(`DBGM-00028 DbGate API listening on port ${port} (docker build)`);
|
||||
server.listen(port);
|
||||
} else if (platformInfo.isAwsUbuntuLayout) {
|
||||
const port = process.env.PORT || 3000;
|
||||
logger.info(`DbGate API listening on port ${port} (AWS AMI build)`);
|
||||
logger.info(`DBGM-00029 DbGate API listening on port ${port} (AWS AMI build)`);
|
||||
server.listen(port);
|
||||
} else if (platformInfo.isAzureUbuntuLayout) {
|
||||
const port = process.env.PORT || 3000;
|
||||
logger.info(`DbGate API listening on port ${port} (Azure VM build)`);
|
||||
logger.info(`DBGM-00030 DbGate API listening on port ${port} (Azure VM build)`);
|
||||
server.listen(port);
|
||||
} else if (platformInfo.isNpmDist) {
|
||||
getPort({
|
||||
@@ -209,27 +209,27 @@ function start() {
|
||||
),
|
||||
}).then(port => {
|
||||
server.listen(port, () => {
|
||||
logger.info(`DbGate API listening on port ${port} (NPM build)`);
|
||||
logger.info(`DBGM-00031 DbGate API listening on port ${port} (NPM build)`);
|
||||
});
|
||||
});
|
||||
} else if (process.env.DEVWEB) {
|
||||
const port = process.env.PORT || 3000;
|
||||
logger.info(`DbGate API & web listening on port ${port} (dev web build)`);
|
||||
logger.info(`DBGM-00032 DbGate API & web listening on port ${port} (dev web build)`);
|
||||
server.listen(port);
|
||||
} else {
|
||||
const port = process.env.PORT || 3000;
|
||||
logger.info(`DbGate API listening on port ${port} (dev API build)`);
|
||||
logger.info(`DBGM-00033 DbGate API listening on port ${port} (dev API build)`);
|
||||
server.listen(port);
|
||||
}
|
||||
|
||||
function shutdown() {
|
||||
logger.info('\nShutting down DbGate API server');
|
||||
logger.info('DBGM-00034 Shutting down DbGate API server');
|
||||
server.close(() => {
|
||||
logger.info('Server shut down, terminating');
|
||||
logger.info('DBGM-00035 Server shut down, terminating');
|
||||
process.exit(0);
|
||||
});
|
||||
setTimeout(() => {
|
||||
logger.info('Server close timeout, terminating');
|
||||
logger.info('DBGM-00036 Server close timeout, terminating');
|
||||
process.exit(0);
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
@@ -131,10 +131,10 @@ async function readVersion() {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
const version = await driver.getVersion(dbhan);
|
||||
logger.debug(`Got server version: ${version.version}`);
|
||||
logger.debug(`DBGM-00037 Got server version: ${version.version}`);
|
||||
serverVersion = version;
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting DB server version');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00149 Error getting DB server version');
|
||||
serverVersion = { version: 'Unknown' };
|
||||
}
|
||||
process.send({ msgtype: 'version', version: serverVersion });
|
||||
@@ -148,9 +148,9 @@ async function handleConnect({ connection, structure, globalSettings }) {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
dbhan = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app'));
|
||||
logger.debug(
|
||||
`Connected to database, driver: ${storedConnection.engine}, separate schemas: ${
|
||||
`DBGM-00038 Connected to database, driver: ${storedConnection.engine}, separate schemas: ${
|
||||
storedConnection.useSeparateSchemas ? 'YES' : 'NO'
|
||||
}, 'DB: ${dbNameLogCategory(dbhan.database)} }`
|
||||
}, 'DB: ${dbNameLogCategory(dbhan.database)}`
|
||||
);
|
||||
dbhan.feedback = feedback => setStatus({ feedback });
|
||||
await checkedAsyncCall(readVersion());
|
||||
@@ -257,13 +257,13 @@ async function handleDriverDataCore(msgid, callMethod, { logName }) {
|
||||
const result = await callMethod(driver);
|
||||
process.send({ msgtype: 'response', msgid, result: serializeJsTypesForJsonStringify(result) });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err, { logName }), `Error when handling message ${logName}`);
|
||||
logger.error(extractErrorLogData(err, { logName }), `DBGM-00150 Error when handling message ${logName}`);
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error executing DB data') });
|
||||
}
|
||||
}
|
||||
|
||||
async function handleSchemaList({ msgid }) {
|
||||
logger.debug('Loading schema list');
|
||||
logger.debug('DBGM-00039 Loading schema list');
|
||||
return handleDriverDataCore(msgid, driver => driver.listSchemas(dbhan), { logName: 'listSchemas' });
|
||||
}
|
||||
|
||||
@@ -351,7 +351,7 @@ async function handleSqlPreview({ msgid, objects, options }) {
|
||||
process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated });
|
||||
if (generator.isUnhandledException) {
|
||||
setTimeout(async () => {
|
||||
logger.error('Exiting because of unhandled exception');
|
||||
logger.error('DBGM-00151 Exiting because of unhandled exception');
|
||||
await driver.close(dbhan);
|
||||
process.exit(0);
|
||||
}, 500);
|
||||
@@ -485,7 +485,7 @@ function start() {
|
||||
setInterval(async () => {
|
||||
const time = new Date().getTime();
|
||||
if (time - lastPing > 40 * 1000) {
|
||||
logger.info('Database connection not alive, exiting');
|
||||
logger.info('DBGM-00040 Database connection not alive, exiting');
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
await driver.close(dbhan);
|
||||
process.exit(0);
|
||||
@@ -497,10 +497,10 @@ function start() {
|
||||
try {
|
||||
await handleMessage(message);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error in DB connection');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00041 Error in DB connection');
|
||||
process.send({
|
||||
msgtype: 'error',
|
||||
error: extractErrorMessage(err, 'Error processing message'),
|
||||
error: extractErrorMessage(err, 'DBGM-00042 Error processing message'),
|
||||
msgid: message?.msgid,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ async function handleRefresh() {
|
||||
name: 'error',
|
||||
message: err.message,
|
||||
});
|
||||
logger.error(extractErrorLogData(err), 'Error refreshing server databases');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00152 Error refreshing server databases');
|
||||
setTimeout(() => process.exit(1), 1000);
|
||||
}
|
||||
}
|
||||
@@ -50,7 +50,7 @@ async function readVersion() {
|
||||
try {
|
||||
version = await driver.getVersion(dbhan);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting DB server version');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00153 Error getting DB server version');
|
||||
version = { version: 'Unknown' };
|
||||
}
|
||||
process.send({ msgtype: 'version', version });
|
||||
@@ -90,7 +90,7 @@ async function handleConnect(connection) {
|
||||
name: 'error',
|
||||
message: err.message,
|
||||
});
|
||||
logger.error(extractErrorLogData(err), 'Error connecting to server');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00154 Error connecting to server');
|
||||
setTimeout(() => process.exit(1), 1000);
|
||||
}
|
||||
|
||||
@@ -120,7 +120,7 @@ async function handleDatabaseOp(op, { msgid, name }) {
|
||||
} else {
|
||||
const dmp = driver.createDumper();
|
||||
dmp[op](name);
|
||||
logger.info({ sql: dmp.s }, 'Running script');
|
||||
logger.info({ sql: dmp.s }, 'DBGM-00043 Running script');
|
||||
await driver.query(dbhan, dmp.s, { discardResult: true });
|
||||
}
|
||||
await handleRefresh();
|
||||
@@ -170,7 +170,7 @@ function start() {
|
||||
setInterval(async () => {
|
||||
const time = new Date().getTime();
|
||||
if (time - lastPing > 40 * 1000) {
|
||||
logger.info('Server connection not alive, exiting');
|
||||
logger.info('DBGM-00044 Server connection not alive, exiting');
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
if (dbhan) {
|
||||
await driver.close(dbhan);
|
||||
@@ -188,7 +188,7 @@ function start() {
|
||||
name: 'error',
|
||||
message: err.message,
|
||||
});
|
||||
logger.error(extractErrorLogData(err), `Error processing message ${message?.['msgtype']}`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00155 Error processing message ${message?.['msgtype']}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -230,7 +230,7 @@ function start() {
|
||||
setInterval(async () => {
|
||||
const time = new Date().getTime();
|
||||
if (time - lastPing > 25 * 1000) {
|
||||
logger.info('Session not alive, exiting');
|
||||
logger.info('DBGM-00045 Session not alive, exiting');
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
await driver.close(dbhan);
|
||||
process.exit(0);
|
||||
@@ -250,7 +250,7 @@ function start() {
|
||||
!currentProfiler &&
|
||||
executingScripts == 0
|
||||
) {
|
||||
logger.info('Session not active, exiting');
|
||||
logger.info('DBGM-00046 Session not active, exiting');
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
await driver.close(dbhan);
|
||||
process.exit(0);
|
||||
|
||||
@@ -41,7 +41,7 @@ async function handleStart({ connection, tunnelConfig }) {
|
||||
tunnelConfig,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel connection:');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00156 Error creating SSH tunnel connection:');
|
||||
|
||||
process.send({
|
||||
msgtype: 'error',
|
||||
|
||||
@@ -10,7 +10,7 @@ const logger = getLogger();
|
||||
function archiveWriter({ folderName, fileName }) {
|
||||
const dir = resolveArchiveFolder(folderName);
|
||||
if (!fs.existsSync(dir)) {
|
||||
logger.info(`Creating directory ${dir}`);
|
||||
logger.info(`DBGM-00047 Creating directory ${dir}`);
|
||||
fs.mkdirSync(dir);
|
||||
}
|
||||
const jsonlFile = path.join(dir, `${fileName}.jsonl`);
|
||||
|
||||
@@ -83,7 +83,7 @@ async function copyStream(input, output, options) {
|
||||
});
|
||||
}
|
||||
|
||||
logger.error(extractErrorLogData(err, { progressName }), 'Import/export job failed');
|
||||
logger.error(extractErrorLogData(err, { progressName }), 'DBGM-00157 Import/export job failed');
|
||||
// throw err;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,20 +28,20 @@ async function executeQuery({
|
||||
useTransaction,
|
||||
}) {
|
||||
if (!logScriptItems && !skipLogging) {
|
||||
logger.info({ sql: getLimitedQuery(sql) }, `Execute query`);
|
||||
logger.info({ sql: getLimitedQuery(sql) }, `DBGM-00048 Execute query`);
|
||||
}
|
||||
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'script'));
|
||||
|
||||
if (sqlFile) {
|
||||
logger.debug(`Loading SQL file ${sqlFile}`);
|
||||
logger.debug(`DBGM-00049 Loading SQL file ${sqlFile}`);
|
||||
sql = await fs.readFile(sqlFile, { encoding: 'utf-8' });
|
||||
}
|
||||
|
||||
try {
|
||||
if (!skipLogging) {
|
||||
logger.debug(`Running SQL query, length: ${sql.length}`);
|
||||
logger.debug(`DBGM-00050 Running SQL query, length: ${sql.length}`);
|
||||
}
|
||||
|
||||
await driver.script(dbhan, sql, { logScriptItems, useTransaction });
|
||||
|
||||
@@ -45,14 +45,14 @@ class ImportStream extends stream.Transform {
|
||||
}
|
||||
|
||||
async function importDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, inputFile }) {
|
||||
logger.info(`Importing database`);
|
||||
logger.info(`DBGM-00051 Importing database`);
|
||||
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
|
||||
try {
|
||||
logger.info(`Input file: ${inputFile}`);
|
||||
logger.info(`DBGM-00052 Input file: ${inputFile}`);
|
||||
const downloadedFile = await download(inputFile);
|
||||
logger.info(`Downloaded file: ${downloadedFile}`);
|
||||
logger.info(`DBGM-00053 Downloaded file: ${downloadedFile}`);
|
||||
|
||||
const fileStream = fs.createReadStream(downloadedFile, 'utf-8');
|
||||
const splittedStream = splitQueryStream(fileStream, {
|
||||
|
||||
@@ -42,7 +42,7 @@ class ParseStream extends stream.Transform {
|
||||
* @returns {Promise<readerType>} - reader object
|
||||
*/
|
||||
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
|
||||
logger.info(`Reading file ${fileName}`);
|
||||
logger.info(`DBGM-00054 Reading file ${fileName}`);
|
||||
|
||||
const downloadedFile = await download(fileName);
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ class StringifyStream extends stream.Transform {
|
||||
* @returns {Promise<writerType>} - writer object
|
||||
*/
|
||||
async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true }) {
|
||||
logger.info(`Writing file ${fileName}`);
|
||||
logger.info(`DBGM-00055 Writing file ${fileName}`);
|
||||
const stringify = new StringifyStream({ header });
|
||||
const fileStream = fs.createWriteStream(fileName, encoding);
|
||||
return [stringify, fileStream];
|
||||
|
||||
@@ -63,7 +63,7 @@ async function jsonReader({
|
||||
encoding = 'utf-8',
|
||||
limitRows = undefined,
|
||||
}) {
|
||||
logger.info(`Reading file ${fileName}`);
|
||||
logger.info(`DBGM-00056 Reading file ${fileName}`);
|
||||
|
||||
const downloadedFile = await download(fileName);
|
||||
const fileStream = fs.createReadStream(
|
||||
|
||||
@@ -96,7 +96,7 @@ class StringifyStream extends stream.Transform {
|
||||
* @returns {Promise<writerType>} - writer object
|
||||
*/
|
||||
async function jsonWriter({ fileName, jsonStyle, keyField = '_key', rootField, encoding = 'utf-8' }) {
|
||||
logger.info(`Writing file ${fileName}`);
|
||||
logger.info(`DBGM-00057 Writing file ${fileName}`);
|
||||
const stringify = new StringifyStream({ jsonStyle, keyField, rootField });
|
||||
const fileStream = fs.createWriteStream(fileName, encoding);
|
||||
return [stringify, fileStream];
|
||||
|
||||
@@ -6,13 +6,13 @@ const exportDbModel = require('../utility/exportDbModel');
|
||||
const logger = getLogger('analyseDb');
|
||||
|
||||
async function loadDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, outputDir }) {
|
||||
logger.debug(`Analysing database`);
|
||||
logger.debug(`DBGM-00058 Analysing database`);
|
||||
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
|
||||
try {
|
||||
const dbInfo = await driver.analyseFull(dbhan);
|
||||
logger.debug(`Analyse finished`);
|
||||
logger.debug(`DBGM-00059 Analyse finished`);
|
||||
|
||||
await exportDbModel(dbInfo, outputDir);
|
||||
} finally {
|
||||
|
||||
@@ -132,7 +132,7 @@ async function modifyJsonLinesReader({
|
||||
mergeKey = null,
|
||||
mergeMode = 'merge',
|
||||
}) {
|
||||
logger.info(`Reading file ${fileName} with change set`);
|
||||
logger.info(`DBGM-00060 Reading file ${fileName} with change set`);
|
||||
|
||||
const fileStream = fs.createReadStream(
|
||||
fileName,
|
||||
|
||||
@@ -29,7 +29,7 @@ async function queryReader({
|
||||
// if (!sql && !json) {
|
||||
// throw new Error('One of sql or json must be set');
|
||||
// }
|
||||
logger.info({ sql: query || sql }, `Reading query`);
|
||||
logger.info({ sql: query || sql }, `DBGM-00061 Reading query`);
|
||||
// else console.log(`Reading query ${JSON.stringify(json)}`);
|
||||
|
||||
if (!driver) {
|
||||
|
||||
@@ -22,7 +22,7 @@ function requirePlugin(packageName, requiredPlugin = null) {
|
||||
if (requiredPlugin == null) {
|
||||
let module;
|
||||
const modulePath = getPluginBackendPath(packageName);
|
||||
logger.info(`Loading module ${packageName} from ${modulePath}`);
|
||||
logger.info(`DBGM-00062 Loading module ${packageName} from ${modulePath}`);
|
||||
try {
|
||||
// @ts-ignore
|
||||
module = __non_webpack_require__(modulePath);
|
||||
|
||||
@@ -11,7 +11,7 @@ async function runScript(func) {
|
||||
await func();
|
||||
process.exit(0);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Error running script`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00158 Error running script`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ class SqlizeStream extends stream.Transform {
|
||||
}
|
||||
|
||||
async function sqlDataWriter({ fileName, dataName, driver, encoding = 'utf-8' }) {
|
||||
logger.info(`Writing file ${fileName}`);
|
||||
logger.info(`DBGM-00063 Writing file ${fileName}`);
|
||||
const stringify = new SqlizeStream({ fileName, dataName });
|
||||
const fileStream = fs.createWriteStream(fileName, encoding);
|
||||
return [stringify, fileStream];
|
||||
|
||||
@@ -23,7 +23,7 @@ async function tableReader({ connection, systemConnection, pureName, schemaName,
|
||||
|
||||
if (driver.databaseEngineTypes.includes('document')) {
|
||||
// @ts-ignore
|
||||
logger.info(`Reading collection ${fullNameToString(fullName)}`);
|
||||
logger.info(`DBGM-00064 Reading collection ${fullNameToString(fullName)}`);
|
||||
// @ts-ignore
|
||||
return await driver.readQuery(dbhan, JSON.stringify(fullName));
|
||||
}
|
||||
@@ -32,14 +32,14 @@ async function tableReader({ connection, systemConnection, pureName, schemaName,
|
||||
const query = `select * from ${quoteFullName(driver.dialect, fullName)}`;
|
||||
if (table) {
|
||||
// @ts-ignore
|
||||
logger.info(`Reading table ${fullNameToString(table)}`);
|
||||
logger.info(`DBGM-00065 Reading table ${fullNameToString(table)}`);
|
||||
// @ts-ignore
|
||||
return await driver.readQuery(dbhan, query, table);
|
||||
}
|
||||
const view = await driver.analyseSingleObject(dbhan, fullName, 'views');
|
||||
if (view) {
|
||||
// @ts-ignore
|
||||
logger.info(`Reading view ${fullNameToString(view)}`);
|
||||
logger.info(`DBGM-00066 Reading view ${fullNameToString(view)}`);
|
||||
// @ts-ignore
|
||||
return await driver.readQuery(dbhan, query, view);
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ const logger = getLogger('tableWriter');
|
||||
* @returns {Promise<writerType>} - writer object
|
||||
*/
|
||||
async function tableWriter({ connection, schemaName, pureName, driver, systemConnection, ...options }) {
|
||||
logger.info(`Writing table ${fullNameToString({ schemaName, pureName })}`);
|
||||
logger.info(`DBGM-00067 Writing table ${fullNameToString({ schemaName, pureName })}`);
|
||||
|
||||
if (!driver) {
|
||||
driver = requireEngineDriver(connection);
|
||||
|
||||
@@ -52,14 +52,14 @@ function unzipDirectory(zipPath, outputDirectory) {
|
||||
readStream.on('end', () => zipFile.readEntry());
|
||||
|
||||
writeStream.on('finish', () => {
|
||||
logger.info(`Extracted "${entry.fileName}" → "${destPath}".`);
|
||||
logger.info(`DBGM-00068 Extracted "${entry.fileName}" → "${destPath}".`);
|
||||
res();
|
||||
});
|
||||
|
||||
writeStream.on('error', writeErr => {
|
||||
logger.error(
|
||||
extractErrorLogData(writeErr),
|
||||
`Error extracting "${entry.fileName}" from "${zipPath}".`
|
||||
`DBGM-00069 Error extracting "${entry.fileName}" from "${zipPath}".`
|
||||
);
|
||||
rej(writeErr);
|
||||
});
|
||||
@@ -74,14 +74,14 @@ function unzipDirectory(zipPath, outputDirectory) {
|
||||
zipFile.on('end', () => {
|
||||
Promise.all(pending)
|
||||
.then(() => {
|
||||
logger.info(`Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
|
||||
logger.info(`DBGM-00070 Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
|
||||
resolve(true);
|
||||
})
|
||||
.catch(reject);
|
||||
});
|
||||
|
||||
zipFile.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00071 ZIP file error in ${zipPath}.`);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -16,16 +16,16 @@ function zipDirectory(inputDirectory, outputFile) {
|
||||
|
||||
// Listen for all archive data to be written
|
||||
output.on('close', () => {
|
||||
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
|
||||
logger.info(`DBGM-00072 ZIP file created (${archive.pointer()} total bytes)`);
|
||||
resolve();
|
||||
});
|
||||
|
||||
archive.on('warning', err => {
|
||||
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
|
||||
logger.warn(extractErrorLogData(err), `DBGM-00073 Warning while creating ZIP: ${err.message}`);
|
||||
});
|
||||
|
||||
archive.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00074 Error while creating ZIP: ${err.message}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
|
||||
@@ -17,16 +17,16 @@ function zipDirectory(jsonDb, outputFile) {
|
||||
|
||||
// Listen for all archive data to be written
|
||||
output.on('close', () => {
|
||||
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
|
||||
logger.info(`DBGM-00075 ZIP file created (${archive.pointer()} total bytes)`);
|
||||
resolve();
|
||||
});
|
||||
|
||||
archive.on('warning', err => {
|
||||
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
|
||||
logger.warn(extractErrorLogData(err), `DBGM-00076 Warning while creating ZIP: ${err.message}`);
|
||||
});
|
||||
|
||||
archive.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00077 Error while creating ZIP: ${err.message}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
|
||||
148
packages/api/src/utility/AppLogDatastore.js
Normal file
148
packages/api/src/utility/AppLogDatastore.js
Normal file
@@ -0,0 +1,148 @@
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const { logsdir } = require('./directories');
|
||||
const { format, addDays, startOfDay } = require('date-fns');
|
||||
const JsonLinesDatastore = require('./JsonLinesDatastore');
|
||||
const LineReader = require('./LineReader');
|
||||
const socket = require('./socket');
|
||||
|
||||
async function getLogFiles(timeFrom, timeTo) {
|
||||
const dir = logsdir();
|
||||
const files = await fs.readdir(dir);
|
||||
const startPrefix = format(timeFrom, 'yyyy-MM-dd');
|
||||
const endPrefix = format(addDays(timeTo, 1), 'yyyy-MM-dd');
|
||||
const logFiles = files
|
||||
.filter(file => file.endsWith('.ndjson'))
|
||||
.filter(file => file >= startPrefix && file < endPrefix);
|
||||
return logFiles.sort().map(x => path.join(dir, x));
|
||||
}
|
||||
|
||||
class AppLogDatastore {
|
||||
constructor({ timeFrom, timeTo }) {
|
||||
this.timeFrom = timeFrom;
|
||||
this.timeTo = timeTo;
|
||||
}
|
||||
|
||||
async resolveNextFile(file) {
|
||||
const files = await getLogFiles(this.timeFrom, this.timeTo);
|
||||
const index = files.indexOf(file);
|
||||
if (index < 0 || index >= files.length - 1) return null;
|
||||
return files[index + 1];
|
||||
}
|
||||
|
||||
async getRows(offset = 0, limit = 100, filters = {}) {
|
||||
if (!this.linesReader) {
|
||||
const files = await getLogFiles(this.timeFrom, this.timeTo);
|
||||
this.linesReader = new JsonLinesDatastore(files[0], null, file => this.resolveNextFile(file));
|
||||
}
|
||||
|
||||
const conditions = [
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator: '>=',
|
||||
left: { exprType: 'column', columnName: 'time' },
|
||||
right: { exprType: 'value', value: this.timeFrom },
|
||||
},
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator: '<=',
|
||||
left: { exprType: 'column', columnName: 'time' },
|
||||
right: { exprType: 'value', value: this.timeTo },
|
||||
},
|
||||
];
|
||||
for (const [key, values] of Object.entries(filters)) {
|
||||
if (values.length == 1 && values[0] == null) {
|
||||
// @ts-ignore
|
||||
conditions.push({
|
||||
conditionType: 'isNull',
|
||||
expr: { exprType: 'column', columnName: key },
|
||||
});
|
||||
continue;
|
||||
}
|
||||
// @ts-ignore
|
||||
conditions.push({
|
||||
conditionType: 'in',
|
||||
expr: { exprType: 'column', columnName: key },
|
||||
values,
|
||||
});
|
||||
}
|
||||
|
||||
return this.linesReader.getRows(
|
||||
offset,
|
||||
limit,
|
||||
{
|
||||
conditionType: 'and',
|
||||
conditions,
|
||||
},
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
_closeReader() {
|
||||
if (this.linesReader) {
|
||||
this.linesReader._closeReader();
|
||||
this.linesReader = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const RECENT_LOG_LIMIT = 1000;
|
||||
|
||||
let recentLogs = null;
|
||||
const beforeRecentLogs = [];
|
||||
|
||||
function adjustRecentLogs() {
|
||||
if (recentLogs.length > RECENT_LOG_LIMIT) {
|
||||
recentLogs.splice(0, recentLogs.length - RECENT_LOG_LIMIT);
|
||||
}
|
||||
}
|
||||
|
||||
async function initializeRecentLogProvider() {
|
||||
const logs = [];
|
||||
for (const file of await getLogFiles(startOfDay(new Date()), new Date())) {
|
||||
const fileStream = fs.createReadStream(file);
|
||||
const reader = new LineReader(fileStream);
|
||||
do {
|
||||
const line = await reader.readLine();
|
||||
if (line == null) break;
|
||||
try {
|
||||
const logEntry = JSON.parse(line);
|
||||
logs.push(logEntry);
|
||||
if (logs.length > RECENT_LOG_LIMIT) {
|
||||
logs.shift();
|
||||
}
|
||||
} catch (e) {
|
||||
continue;
|
||||
}
|
||||
} while (true);
|
||||
}
|
||||
recentLogs = logs;
|
||||
recentLogs.push(...beforeRecentLogs);
|
||||
}
|
||||
|
||||
let counter = 0;
|
||||
function pushToRecentLogs(msg) {
|
||||
const finalMsg = {
|
||||
...msg,
|
||||
counter,
|
||||
};
|
||||
counter += 1;
|
||||
if (recentLogs) {
|
||||
recentLogs.push(finalMsg);
|
||||
adjustRecentLogs();
|
||||
socket.emit('applog-event', finalMsg);
|
||||
} else {
|
||||
beforeRecentLogs.push(finalMsg);
|
||||
}
|
||||
}
|
||||
|
||||
function getRecentAppLogRecords() {
|
||||
return recentLogs ?? beforeRecentLogs;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
AppLogDatastore,
|
||||
initializeRecentLogProvider,
|
||||
getRecentAppLogRecords,
|
||||
pushToRecentLogs,
|
||||
};
|
||||
@@ -61,7 +61,7 @@ class DatastoreProxy {
|
||||
this.subprocess = null;
|
||||
});
|
||||
this.subprocess.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'Error in data store subprocess');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00167 Error in data store subprocess');
|
||||
this.subprocess = null;
|
||||
});
|
||||
this.subprocess.send({ msgtype: 'open', file: this.file });
|
||||
@@ -77,7 +77,7 @@ class DatastoreProxy {
|
||||
try {
|
||||
this.subprocess.send({ msgtype: 'read', msgid, offset, limit });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting rows');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00168 Error getting rows');
|
||||
this.subprocess = null;
|
||||
}
|
||||
});
|
||||
@@ -91,7 +91,7 @@ class DatastoreProxy {
|
||||
try {
|
||||
this.subprocess.send({ msgtype: 'notify', msgid });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error notifying subprocess');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00169 Error notifying subprocess');
|
||||
this.subprocess = null;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -7,15 +7,15 @@ const AsyncLock = require('async-lock');
|
||||
const lock = new AsyncLock();
|
||||
const stableStringify = require('json-stable-stringify');
|
||||
const { evaluateCondition } = require('dbgate-sqltree');
|
||||
const requirePluginFunction = require('./requirePluginFunction');
|
||||
const esort = require('external-sorting');
|
||||
const { jsldir } = require('./directories');
|
||||
const LineReader = require('./LineReader');
|
||||
|
||||
class JsonLinesDatastore {
|
||||
constructor(file, formatterFunction) {
|
||||
constructor(file, formatterFunction, resolveNextFile = null) {
|
||||
this.file = file;
|
||||
this.formatterFunction = formatterFunction;
|
||||
this.resolveNextFile = resolveNextFile;
|
||||
this.reader = null;
|
||||
this.readedDataRowCount = 0;
|
||||
this.readedSchemaRow = false;
|
||||
@@ -23,7 +23,12 @@ class JsonLinesDatastore {
|
||||
this.notifyChangedCallback = null;
|
||||
this.currentFilter = null;
|
||||
this.currentSort = null;
|
||||
this.rowFormatter = requirePluginFunction(formatterFunction);
|
||||
this.currentFileName = null;
|
||||
if (formatterFunction) {
|
||||
const requirePluginFunction = require('./requirePluginFunction');
|
||||
this.rowFormatter = requirePluginFunction(formatterFunction);
|
||||
}
|
||||
|
||||
this.sortedFiles = {};
|
||||
}
|
||||
|
||||
@@ -67,6 +72,7 @@ class JsonLinesDatastore {
|
||||
// this.firstRowToBeReturned = null;
|
||||
this.currentFilter = null;
|
||||
this.currentSort = null;
|
||||
this.currentFileName = null;
|
||||
await reader.close();
|
||||
}
|
||||
|
||||
@@ -100,8 +106,18 @@ class JsonLinesDatastore {
|
||||
// return res;
|
||||
// }
|
||||
for (;;) {
|
||||
const line = await this.reader.readLine();
|
||||
if (!line) {
|
||||
let line = await this.reader.readLine();
|
||||
while (!line) {
|
||||
if (!this.currentSort && this.resolveNextFile) {
|
||||
const nextFile = await this.resolveNextFile(this.currentFileName);
|
||||
if (nextFile) {
|
||||
await this.reader.close();
|
||||
this.reader = await this._openReader(nextFile);
|
||||
this.currentFileName = nextFile;
|
||||
line = await this.reader.readLine();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// EOF
|
||||
return null;
|
||||
}
|
||||
@@ -173,6 +189,7 @@ class JsonLinesDatastore {
|
||||
}
|
||||
if (!this.reader) {
|
||||
const reader = await this._openReader(sort ? this.sortedFiles[stableStringify(sort)] : this.file);
|
||||
this.currentFileName = this.file;
|
||||
this.reader = reader;
|
||||
this.currentFilter = filter;
|
||||
this.currentSort = sort;
|
||||
|
||||
@@ -12,7 +12,7 @@ function childProcessChecker() {
|
||||
// This will come once parent dies.
|
||||
// One way can be to check for error code ERR_IPC_CHANNEL_CLOSED
|
||||
// and call process.exit()
|
||||
logger.error(extractErrorLogData(err), 'parent died');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00163 parent died');
|
||||
process.exit(1);
|
||||
}
|
||||
}, 1000);
|
||||
|
||||
@@ -77,7 +77,7 @@ function startCloudTokenChecking(sid, callback) {
|
||||
callback(resp.data);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error checking cloud token');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00164 Error checking cloud token');
|
||||
}
|
||||
}, 500);
|
||||
}
|
||||
@@ -125,7 +125,7 @@ async function getCloudUsedEngines() {
|
||||
const resp = await callCloudApiGet('content-engines');
|
||||
return resp || [];
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting cloud content list');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00165 Error getting cloud content list');
|
||||
return [];
|
||||
}
|
||||
}
|
||||
@@ -208,7 +208,7 @@ async function updateCloudFiles(isRefresh) {
|
||||
lastCheckedTm = _.max(cloudFiles.map(x => parseInt(x.modifiedTm)));
|
||||
}
|
||||
|
||||
logger.info({ tags, lastCheckedTm }, 'Downloading cloud files');
|
||||
logger.info({ tags, lastCheckedTm }, 'DBGM-00082 Downloading cloud files');
|
||||
|
||||
const resp = await axios.default.get(
|
||||
`${DBGATE_CLOUD_URL}/public-cloud-updates?lastCheckedTm=${lastCheckedTm}&tags=${tags}&isRefresh=${
|
||||
@@ -223,7 +223,7 @@ async function updateCloudFiles(isRefresh) {
|
||||
}
|
||||
);
|
||||
|
||||
logger.info(`Downloaded ${resp.data.length} cloud files`);
|
||||
logger.info(`DBGM-00083 Downloaded ${resp.data.length} cloud files`);
|
||||
|
||||
const filesByPath = lastCheckedTm == 0 ? {} : _.keyBy(cloudFiles, 'path');
|
||||
for (const file of resp.data) {
|
||||
@@ -269,7 +269,7 @@ async function refreshPublicFiles(isRefresh) {
|
||||
try {
|
||||
await updateCloudFiles(isRefresh);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error updating cloud files');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00166 Error updating cloud files');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,11 +14,11 @@ const createDirectories = {};
|
||||
const ensureDirectory = (dir, clean) => {
|
||||
if (!createDirectories[dir]) {
|
||||
if (clean && fs.existsSync(dir) && !platformInfo.isForkedApi) {
|
||||
getLogger('directories').info(`Cleaning directory ${dir}`);
|
||||
getLogger('directories').info(`DBGM-00170 Cleaning directory ${dir}`);
|
||||
cleanDirectory(dir, _.isNumber(clean) ? clean : null);
|
||||
}
|
||||
if (!fs.existsSync(dir)) {
|
||||
getLogger('directories').info(`Creating directory ${dir}`);
|
||||
getLogger('directories').info(`DBGM-00171 Creating directory ${dir}`);
|
||||
fs.mkdirSync(dir);
|
||||
}
|
||||
createDirectories[dir] = true;
|
||||
|
||||
@@ -42,13 +42,13 @@ function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
|
||||
|
||||
// When the file is finished writing, resolve
|
||||
writeStream.on('finish', () => {
|
||||
logger.info(`File "${fileInZip}" extracted to "${outputPath}".`);
|
||||
logger.info(`DBGM-00088 File "${fileInZip}" extracted to "${outputPath}".`);
|
||||
resolve(true);
|
||||
});
|
||||
|
||||
// Handle write errors
|
||||
writeStream.on('error', writeErr => {
|
||||
logger.error(extractErrorLogData(writeErr), `Error extracting "${fileInZip}" from "${zipPath}".`);
|
||||
logger.error(extractErrorLogData(writeErr), `DBGM-00089 Error extracting "${fileInZip}" from "${zipPath}".`);
|
||||
reject(writeErr);
|
||||
});
|
||||
});
|
||||
@@ -67,7 +67,7 @@ function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
|
||||
|
||||
// Handle general errors
|
||||
zipFile.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00172 ZIP file error in ${zipPath}.`);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -28,7 +28,7 @@ async function loadModelTransform(file) {
|
||||
}
|
||||
return null;
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Error loading model transform ${file}`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00173 Error loading model transform ${file}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
|
||||
tunnelConfig,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error connecting SSH');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00174 Error connecting SSH');
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
let promiseHandled = false;
|
||||
@@ -57,18 +57,18 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
|
||||
}
|
||||
});
|
||||
subprocess.on('exit', code => {
|
||||
logger.info(`SSH forward process exited with code ${code}`);
|
||||
logger.info(`DBGM-00090 SSH forward process exited with code ${code}`);
|
||||
delete sshTunnelCache[tunnelCacheKey];
|
||||
if (!promiseHandled) {
|
||||
reject(
|
||||
new Error(
|
||||
'SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'
|
||||
'DBGM-00091 SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'
|
||||
)
|
||||
);
|
||||
}
|
||||
});
|
||||
subprocess.on('error', error => {
|
||||
logger.error(extractErrorLogData(error), 'SSH forward process error');
|
||||
logger.error(extractErrorLogData(error), 'DBGM-00092 SSH forward process error');
|
||||
delete sshTunnelCache[tunnelCacheKey];
|
||||
if (!promiseHandled) {
|
||||
reject(error);
|
||||
@@ -97,13 +97,13 @@ async function getSshTunnel(connection) {
|
||||
};
|
||||
try {
|
||||
logger.info(
|
||||
`Creating SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
|
||||
`DBGM-00093 Creating SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
|
||||
);
|
||||
|
||||
const subprocess = await callForwardProcess(connection, tunnelConfig, tunnelCacheKey);
|
||||
|
||||
logger.info(
|
||||
`Created SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
|
||||
`DBGM-00094 Created SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
|
||||
);
|
||||
|
||||
sshTunnelCache[tunnelCacheKey] = {
|
||||
@@ -114,7 +114,7 @@ async function getSshTunnel(connection) {
|
||||
};
|
||||
return sshTunnelCache[tunnelCacheKey];
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel:');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00095 Error creating SSH tunnel:');
|
||||
// error is not cached
|
||||
return {
|
||||
state: 'error',
|
||||
|
||||
@@ -10,7 +10,7 @@ async function handleGetSshTunnelRequest({ msgid, connection }, subprocess) {
|
||||
try {
|
||||
subprocess.send({ msgtype: 'getsshtunnel-response', msgid, response });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error sending to SSH tunnel');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00175 Error sending to SSH tunnel');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -12,11 +12,11 @@ module.exports = function useController(app, electron, route, controller) {
|
||||
const router = express.Router();
|
||||
|
||||
if (controller._init) {
|
||||
logger.info(`Calling init controller for controller ${route}`);
|
||||
logger.info(`DBGM-00096 Calling init controller for controller ${route}`);
|
||||
try {
|
||||
controller._init();
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Error initializing controller, exiting application`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00097 Error initializing controller, exiting application`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
@@ -78,7 +78,7 @@ module.exports = function useController(app, electron, route, controller) {
|
||||
const data = await controller[key]({ ...req.body, ...req.query }, req);
|
||||
res.json(data);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Error when processing route ${route}/${key}`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00176 Error when processing route ${route}/${key}`);
|
||||
if (err instanceof MissingCredentialsError) {
|
||||
res.json({
|
||||
missingCredentials: true,
|
||||
|
||||
@@ -330,7 +330,7 @@ class ReplicatorItemHolder {
|
||||
|
||||
if (new Date().getTime() - lastLogged.getTime() > 5000) {
|
||||
logger.info(
|
||||
`Replicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows, updated ${updated} rows`
|
||||
`DBGM-00105 Replicating ${this.item.name} in progress, inserted ${inserted} rows, mapped ${mapped} rows, missing ${missing} rows, skipped ${skipped} rows, updated ${updated} rows`
|
||||
);
|
||||
lastLogged = new Date();
|
||||
}
|
||||
@@ -489,19 +489,19 @@ export class DataReplicator {
|
||||
for (const item of this.itemPlan) {
|
||||
const stats = await item.runImport();
|
||||
logger.info(
|
||||
`Replicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows, updated ${stats.updated} rows, deleted ${stats.deleted} rows`
|
||||
`DBGM-00106 Replicated ${item.name}, inserted ${stats.inserted} rows, mapped ${stats.mapped} rows, missing ${stats.missing} rows, skipped ${stats.skipped} rows, updated ${stats.updated} rows, deleted ${stats.deleted} rows`
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Failed replicator job, rollbacking. ${err.message}`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00179 Failed replicator job, rollbacking. ${err.message}`);
|
||||
await this.runDumperCommand(dmp => dmp.rollbackTransaction());
|
||||
return;
|
||||
}
|
||||
if (this.options.rollbackAfterFinish) {
|
||||
logger.info('Rollbacking transaction, nothing was changed');
|
||||
logger.info('DBGM-00107 Rollbacking transaction, nothing was changed');
|
||||
await this.runDumperCommand(dmp => dmp.rollbackTransaction());
|
||||
} else {
|
||||
logger.info('Committing replicator transaction');
|
||||
logger.info('DBGM-00108 Committing replicator transaction');
|
||||
await this.runDumperCommand(dmp => dmp.commitTransaction());
|
||||
}
|
||||
|
||||
|
||||
@@ -43,11 +43,11 @@ export class ScriptDrivedDeployer {
|
||||
dmp.put('select * from ~dbgate_deploy_journal')
|
||||
);
|
||||
this.journalItems = rows;
|
||||
logger.debug(`Loaded ${rows.length} items from DbGate deploy journal`);
|
||||
logger.debug(`DBGM-00109 Loaded ${rows.length} items from DbGate deploy journal`);
|
||||
} catch (err) {
|
||||
logger.warn(
|
||||
extractErrorLogData(err),
|
||||
'Error loading DbGate deploy journal, creating table dbgate_deploy_journal'
|
||||
'DBGM-00110 Error loading DbGate deploy journal, creating table dbgate_deploy_journal'
|
||||
);
|
||||
const dmp = this.driver.createDumper();
|
||||
dmp.createTable({
|
||||
@@ -126,12 +126,12 @@ export class ScriptDrivedDeployer {
|
||||
runCommandOnDriver(this.dbhan, this.driver, dmp => dmp.beginTransaction());
|
||||
}
|
||||
|
||||
logger.debug(`Running ${category} script ${file.name}`);
|
||||
logger.debug(`DBGM-00111 Running ${category} script ${file.name}`);
|
||||
try {
|
||||
await this.driver.script(this.dbhan, file.text, { useTransaction: false });
|
||||
await this.saveToJournal(file, category, hash);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Error running ${category} script ${file.name}`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00180 Error running ${category} script ${file.name}`);
|
||||
if (this.driver.supportsTransactions) {
|
||||
runCommandOnDriver(this.dbhan, this.driver, dmp => dmp.rollbackTransaction());
|
||||
return;
|
||||
|
||||
@@ -20,10 +20,10 @@ const logger = createLogger('dbmodel');
|
||||
async function runAndExit(promise) {
|
||||
try {
|
||||
await promise;
|
||||
logger.info('Success');
|
||||
logger.info('DBGM-00112 Success');
|
||||
process.exit();
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Processing failed');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00113 Processing failed');
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,5 +78,12 @@ export function evaluateCondition(condition: Condition, values) {
|
||||
});
|
||||
return evaluateCondition(replaced, values);
|
||||
});
|
||||
case 'in':
|
||||
const value = extractRawValue(evaluateExpression(condition.expr, values));
|
||||
const list = condition.values;
|
||||
if (Array.isArray(list)) {
|
||||
return list.some(item => item == value);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -79,7 +79,7 @@ export class DatabaseAnalyser<TClient = any> {
|
||||
|
||||
async fullAnalysis() {
|
||||
logger.debug(
|
||||
`Performing full analysis, DB=${dbNameLogCategory(this.dbhan.database)}, engine=${this.driver.engine}`
|
||||
`DBGM-00126 Performing full analysis, DB=${dbNameLogCategory(this.dbhan.database)}, engine=${this.driver.engine}`
|
||||
);
|
||||
const res = this.addEngineField(await this._runAnalysis());
|
||||
// console.log('FULL ANALYSIS', res);
|
||||
@@ -102,7 +102,7 @@ export class DatabaseAnalyser<TClient = any> {
|
||||
|
||||
async incrementalAnalysis(structure) {
|
||||
logger.info(
|
||||
`Performing incremental analysis, DB=${dbNameLogCategory(this.dbhan.database)}, engine=${this.driver.engine}`
|
||||
`DBGM-00127 Performing incremental analysis, DB=${dbNameLogCategory(this.dbhan.database)}, engine=${this.driver.engine}`
|
||||
);
|
||||
this.structure = structure;
|
||||
|
||||
@@ -129,7 +129,7 @@ export class DatabaseAnalyser<TClient = any> {
|
||||
|
||||
this.modifications = structureModifications;
|
||||
if (structureWithRowCounts) this.structure = structureWithRowCounts;
|
||||
logger.info({ modifications: this.modifications }, 'DB modifications detected:');
|
||||
logger.info({ modifications: this.modifications }, 'DBGM-00128 DB modifications detected:');
|
||||
return this.addEngineField(this.mergeAnalyseResult(await this._runAnalysis()));
|
||||
}
|
||||
|
||||
@@ -347,10 +347,10 @@ export class DatabaseAnalyser<TClient = any> {
|
||||
}
|
||||
try {
|
||||
const res = await this.driver.query(this.dbhan, sql);
|
||||
this.logger.debug({ rows: res.rows.length, template }, `Loaded analyser query`);
|
||||
this.logger.debug({ rows: res.rows.length, template }, `DBGM-00129 Loaded analyser query`);
|
||||
return res;
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err, { template }), 'Error running analyser query');
|
||||
logger.error(extractErrorLogData(err, { template }), 'DBGM-00130 Error running analyser query');
|
||||
return {
|
||||
rows: [],
|
||||
isError: true,
|
||||
|
||||
@@ -93,7 +93,7 @@ export class SqlGenerator {
|
||||
}
|
||||
|
||||
private handleException = error => {
|
||||
logger.error(extractErrorLogData(error), 'Unhandled error');
|
||||
logger.error(extractErrorLogData(error), 'DBGM-00186 Unhandled error');
|
||||
this.isUnhandledException = true;
|
||||
};
|
||||
|
||||
|
||||
@@ -41,20 +41,20 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
|
||||
writable.structure = structure;
|
||||
}
|
||||
if (structure && options.dropIfExists) {
|
||||
logger.info(`Dropping table ${fullNameQuoted}`);
|
||||
logger.info(`DBGM-00123 Dropping table ${fullNameQuoted}`);
|
||||
await driver.script(dbhan, `DROP TABLE ${fullNameQuoted}`);
|
||||
}
|
||||
if (options.createIfNotExists && (!structure || options.dropIfExists)) {
|
||||
const dmp = driver.createDumper();
|
||||
const createdTableInfo = driver.adaptTableInfo(prepareTableForImport({ ...writable.structure, ...name }));
|
||||
dmp.createTable(createdTableInfo);
|
||||
logger.info({ sql: dmp.s }, `Creating table ${fullNameQuoted}`);
|
||||
logger.info({ sql: dmp.s }, `DBGM-00124 Creating table ${fullNameQuoted}`);
|
||||
await driver.script(dbhan, dmp.s);
|
||||
structure = await driver.analyseSingleTable(dbhan, name);
|
||||
writable.structure = structure;
|
||||
}
|
||||
if (!writable.structure) {
|
||||
throw new Error(`Error importing table - ${fullNameQuoted} not found`);
|
||||
throw new Error(`DBGM-00125 Error importing table - ${fullNameQuoted} not found`);
|
||||
}
|
||||
if (options.truncate) {
|
||||
await driver.script(dbhan, `TRUNCATE TABLE ${fullNameQuoted}`);
|
||||
@@ -71,7 +71,7 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
|
||||
])
|
||||
);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error during preparing bulk insert table, stopped');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00184 Error during preparing bulk insert table, stopped');
|
||||
writable.destroy(err);
|
||||
}
|
||||
};
|
||||
@@ -129,7 +129,7 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
|
||||
await driver.query(dbhan, dmp.s, { discardResult: true });
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error during base bulk insert, insert stopped');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00185 Error during base bulk insert, insert stopped');
|
||||
writable.destroy(err);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -101,7 +101,7 @@ export const driverBase = {
|
||||
for (const sqlItem of splitQuery(sql, this.getQuerySplitterOptions('script'))) {
|
||||
try {
|
||||
if (options?.logScriptItems) {
|
||||
logger.info({ sql: getLimitedQuery(sqlItem as string) }, 'Execute script item');
|
||||
logger.info({ sql: getLimitedQuery(sqlItem as string) }, 'DBGM-00131 Execute script item');
|
||||
}
|
||||
await this.query(pool, sqlItem, { discardResult: true, ...options?.queryOptions });
|
||||
} catch (err) {
|
||||
|
||||
@@ -184,6 +184,10 @@ select[disabled] {
|
||||
background-color: var(--theme-bg-1);
|
||||
}
|
||||
|
||||
.classicform select {
|
||||
padding: 5px 5px 4px;
|
||||
}
|
||||
|
||||
textarea {
|
||||
background-color: var(--theme-bg-0);
|
||||
color: var(--theme-font-1);
|
||||
|
||||
@@ -311,6 +311,7 @@
|
||||
'img sort-asc': 'mdi mdi-sort-alphabetical-ascending color-icon-green',
|
||||
'img sort-desc': 'mdi mdi-sort-alphabetical-descending color-icon-green',
|
||||
'img map': 'mdi mdi-map color-icon-blue',
|
||||
'img applog': 'mdi mdi-desktop-classic color-icon-green',
|
||||
|
||||
'img reference': 'mdi mdi-link-box',
|
||||
'img link': 'mdi mdi-link',
|
||||
|
||||
407
packages/web/src/tabs/AppLogTab.svelte
Normal file
407
packages/web/src/tabs/AppLogTab.svelte
Normal file
@@ -0,0 +1,407 @@
|
||||
<script lang="ts" context="module">
|
||||
export const matchingProps = [];
|
||||
</script>
|
||||
|
||||
<script lang="ts">
|
||||
import _ from 'lodash';
|
||||
import ToolStripContainer from '../buttons/ToolStripContainer.svelte';
|
||||
import ToolStripButton from '../buttons/ToolStripButton.svelte';
|
||||
import { apiCall, apiOff, apiOn } from '../utility/api';
|
||||
import { format, startOfDay, endOfDay } from 'date-fns';
|
||||
import { getIntSettingsValue } from '../settings/settingsTools';
|
||||
import DateRangeSelector from '../elements/DateRangeSelector.svelte';
|
||||
import Chip from '../elements/Chip.svelte';
|
||||
import TabControl from '../elements/TabControl.svelte';
|
||||
import Link from '../elements/Link.svelte';
|
||||
import SelectField from '../forms/SelectField.svelte';
|
||||
import { onDestroy, onMount, tick } from 'svelte';
|
||||
|
||||
let loadedRows = [];
|
||||
let loadedAll = false;
|
||||
let domLoadNext;
|
||||
let observer;
|
||||
let dateFilter = [new Date(), new Date()];
|
||||
let selectedLogIndex = null;
|
||||
let filters = {};
|
||||
let mode = 'recent';
|
||||
let autoScroll = true;
|
||||
let domTable;
|
||||
|
||||
function formatValue(value) {
|
||||
if (value == null) {
|
||||
return 'N/A';
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
async function loadNextRows() {
|
||||
const pageSize = getIntSettingsValue('dataGrid.pageSize', 100, 5, 1000);
|
||||
const rows = await apiCall('files/get-app-log', {
|
||||
offset: loadedRows.length,
|
||||
limit: pageSize,
|
||||
dateFrom: startOfDay(dateFilter[0]).getTime(),
|
||||
dateTo: endOfDay(dateFilter[1]).getTime(),
|
||||
filters,
|
||||
});
|
||||
loadedRows = [...loadedRows, ...rows];
|
||||
if (rows.length < 10) {
|
||||
loadedAll = true;
|
||||
}
|
||||
}
|
||||
|
||||
function startObserver(dom) {
|
||||
if (observer) {
|
||||
observer.disconnect();
|
||||
observer = null;
|
||||
}
|
||||
if (dom) {
|
||||
observer = new IntersectionObserver(entries => {
|
||||
if (entries.find(x => x.isIntersecting)) {
|
||||
loadNextRows();
|
||||
}
|
||||
});
|
||||
observer.observe(dom);
|
||||
}
|
||||
}
|
||||
|
||||
$: if (mode == 'date') {
|
||||
startObserver(domLoadNext);
|
||||
}
|
||||
|
||||
async function reloadData() {
|
||||
switch (mode) {
|
||||
case 'recent':
|
||||
loadedRows = await apiCall('files/get-recent-app-log', { limit: 100 });
|
||||
await tick();
|
||||
scrollToRecent();
|
||||
break;
|
||||
case 'date':
|
||||
loadedRows = [];
|
||||
loadedAll = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
function doSetFilter(field, values) {
|
||||
filters = {
|
||||
...filters,
|
||||
[field]: values,
|
||||
};
|
||||
reloadData();
|
||||
}
|
||||
|
||||
const ColumnNamesMap = {
|
||||
msgcode: 'Code',
|
||||
};
|
||||
|
||||
function handleLogMessage(msg) {
|
||||
// console.log('AppLogTab: handleLogMessage', msg);
|
||||
if (mode !== 'recent') return;
|
||||
if (loadedRows.find(x => x.counter == msg.counter)) {
|
||||
return; // Already loaded
|
||||
}
|
||||
loadedRows = [...loadedRows, msg];
|
||||
scrollToRecent();
|
||||
}
|
||||
|
||||
function scrollToRecent() {
|
||||
if (autoScroll && domTable) {
|
||||
domTable.scrollTop = domTable.scrollHeight;
|
||||
}
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
apiOn('applog-event', handleLogMessage);
|
||||
reloadData();
|
||||
});
|
||||
onDestroy(() => {
|
||||
apiOff('applog-event', handleLogMessage);
|
||||
});
|
||||
</script>
|
||||
|
||||
<ToolStripContainer>
|
||||
<div class="wrapper classicform">
|
||||
<div class="filters">
|
||||
<div class="filter-label">Mode:</div>
|
||||
<SelectField
|
||||
isNative
|
||||
options={[
|
||||
{ label: 'Recent logs', value: 'recent' },
|
||||
{ label: 'Choose date', value: 'date' },
|
||||
]}
|
||||
value={mode}
|
||||
on:change={e => {
|
||||
mode = e.detail;
|
||||
reloadData();
|
||||
}}
|
||||
/>
|
||||
|
||||
{#if mode === 'recent'}
|
||||
<div class="filter-label ml-2">Auto-scroll</div>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={autoScroll}
|
||||
on:change={e => {
|
||||
autoScroll = e.target['checked'];
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
|
||||
{#if mode === 'date'}
|
||||
<div class="filter-label">Date:</div>
|
||||
<DateRangeSelector
|
||||
onChange={value => {
|
||||
dateFilter = value;
|
||||
reloadData();
|
||||
}}
|
||||
/>
|
||||
{#each Object.keys(filters) as filterKey}
|
||||
<div class="ml-2">
|
||||
<span class="filter-label">{ColumnNamesMap[filterKey] || filterKey}:</span>
|
||||
{#each filters[filterKey] as value}
|
||||
<Chip
|
||||
onClose={() => {
|
||||
filters = { ...filters, [filterKey]: filters[filterKey].filter(x => x !== value) };
|
||||
if (!filters[filterKey].length) {
|
||||
filters = _.omit(filters, filterKey);
|
||||
}
|
||||
reloadData();
|
||||
}}
|
||||
>
|
||||
{formatValue(value)}
|
||||
</Chip>
|
||||
{/each}
|
||||
</div>
|
||||
{/each}
|
||||
{/if}
|
||||
</div>
|
||||
<div class="tablewrap" bind:this={domTable}>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th style="width:80px">Date</th>
|
||||
<th>Time</th>
|
||||
<th>Code</th>
|
||||
<th>Message</th>
|
||||
<th>Caller</th>
|
||||
<th>Name</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each loadedRows as row, index}
|
||||
<tr
|
||||
class="clickable"
|
||||
on:click={() => {
|
||||
if (selectedLogIndex === index) {
|
||||
selectedLogIndex = null;
|
||||
} else {
|
||||
selectedLogIndex = index;
|
||||
}
|
||||
}}
|
||||
>
|
||||
<td>{format(new Date(parseInt(row.time)), 'yyyy-MM-dd')}</td>
|
||||
<td>{format(new Date(parseInt(row.time)), 'HH:mm:ss')}</td>
|
||||
<td>{row.msgcode || ''}</td>
|
||||
<td>{row.msg}</td>
|
||||
<td>{row.caller || ''}</td>
|
||||
<td>{row.name || ''}</td>
|
||||
</tr>
|
||||
|
||||
{#if index === selectedLogIndex}
|
||||
<tr>
|
||||
<td colspan="6">
|
||||
<TabControl
|
||||
isInline
|
||||
tabs={_.compact([
|
||||
{ label: 'Details', slot: 1 },
|
||||
{ label: 'JSON', slot: 2 },
|
||||
])}
|
||||
>
|
||||
<svelte:fragment slot="1">
|
||||
<div class="details-wrap">
|
||||
<div class="row">
|
||||
<div>Message code:</div>
|
||||
{#if mode == 'date'}
|
||||
<Link onClick={() => doSetFilter('msgcode', [row.msgcode])}>{row.msgcode || 'N/A'}</Link>
|
||||
{:else}
|
||||
{row.msgcode || 'N/A'}
|
||||
{/if}
|
||||
</div>
|
||||
<div class="row">
|
||||
<div>Message:</div>
|
||||
{row.msg}
|
||||
</div>
|
||||
<div class="row">
|
||||
<div>Time:</div>
|
||||
<b>{format(new Date(parseInt(row.time)), 'yyyy-MM-dd HH:mm:ss')}</b>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div>Caller:</div>
|
||||
{#if mode == 'date'}
|
||||
<Link onClick={() => doSetFilter('caller', [row.caller])}>{row.caller || 'N/A'}</Link>
|
||||
{:else}
|
||||
{row.caller || 'N/A'}
|
||||
{/if}
|
||||
</div>
|
||||
<div class="row">
|
||||
<div>Name:</div>
|
||||
{#if mode == 'date'}
|
||||
<Link onClick={() => doSetFilter('name', [row.name])}>{row.name || 'N/A'}</Link>
|
||||
{:else}
|
||||
{row.name || 'N/A'}
|
||||
{/if}
|
||||
</div>
|
||||
</div></svelte:fragment
|
||||
>
|
||||
<svelte:fragment slot="2">
|
||||
<pre>{JSON.stringify(row, null, 2)}</pre>
|
||||
</svelte:fragment>
|
||||
</TabControl>
|
||||
</td>
|
||||
</tr>
|
||||
{/if}
|
||||
{/each}
|
||||
{#if !loadedRows?.length && mode === 'date'}
|
||||
<tr>
|
||||
<td colspan="6">No data for selected date</td>
|
||||
</tr>
|
||||
{/if}
|
||||
{#if !loadedAll && mode === 'date'}
|
||||
{#key loadedRows}
|
||||
<tr>
|
||||
<td colspan="6" bind:this={domLoadNext}>Loading next rows... </td>
|
||||
</tr>
|
||||
{/key}
|
||||
{/if}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<svelte:fragment slot="toolstrip">
|
||||
<ToolStripButton
|
||||
icon="icon refresh"
|
||||
data-testid="AdminAuditLogTab_refreshButton"
|
||||
on:click={() => {
|
||||
reloadData();
|
||||
}}>Refresh</ToolStripButton
|
||||
>
|
||||
</svelte:fragment>
|
||||
</ToolStripContainer>
|
||||
|
||||
<style>
|
||||
.editor-wrap {
|
||||
height: 200px;
|
||||
}
|
||||
.tablewrap {
|
||||
overflow: auto;
|
||||
flex: 1;
|
||||
}
|
||||
.wrapper {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
table.disableFocusOutline:focus {
|
||||
outline: none;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
width: 100%;
|
||||
}
|
||||
table.selectable {
|
||||
user-select: none;
|
||||
}
|
||||
tbody tr {
|
||||
background: var(--theme-bg-0);
|
||||
}
|
||||
tbody tr.selected {
|
||||
background: var(--theme-bg-3);
|
||||
}
|
||||
table:focus tbody tr.selected {
|
||||
background: var(--theme-bg-selected);
|
||||
}
|
||||
tbody tr.clickable:hover {
|
||||
background: var(--theme-bg-hover);
|
||||
}
|
||||
|
||||
thead th {
|
||||
border: 1px solid var(--theme-border);
|
||||
background-color: var(--theme-bg-1);
|
||||
padding: 5px;
|
||||
}
|
||||
tbody td {
|
||||
border: 1px solid var(--theme-border);
|
||||
}
|
||||
tbody td {
|
||||
padding: 5px;
|
||||
}
|
||||
td.isHighlighted {
|
||||
background-color: var(--theme-bg-1);
|
||||
}
|
||||
|
||||
td.clickable {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
thead {
|
||||
position: sticky;
|
||||
top: 0;
|
||||
z-index: 1;
|
||||
border-top: 1px solid var(--theme-border);
|
||||
}
|
||||
|
||||
table th {
|
||||
border-left: none;
|
||||
}
|
||||
|
||||
thead :global(tr:first-child) :global(th) {
|
||||
border-top: 1px solid var(--theme-border);
|
||||
}
|
||||
|
||||
table td {
|
||||
border: 0px;
|
||||
border-bottom: 1px solid var(--theme-border);
|
||||
border-right: 1px solid var(--theme-border);
|
||||
}
|
||||
|
||||
table {
|
||||
border-spacing: 0;
|
||||
border-collapse: separate;
|
||||
border-left: 1px solid var(--theme-border);
|
||||
}
|
||||
|
||||
.empty-cell {
|
||||
background-color: var(--theme-bg-1);
|
||||
}
|
||||
|
||||
.filters {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.filter-label {
|
||||
margin-right: 5px;
|
||||
color: var(--theme-font-2);
|
||||
}
|
||||
|
||||
.details-wrap {
|
||||
padding: 10px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.details-wrap .row {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.details-wrap .row div:first-child {
|
||||
width: 150px;
|
||||
}
|
||||
|
||||
pre {
|
||||
overflow: auto;
|
||||
max-width: 50vw;
|
||||
}
|
||||
</style>
|
||||
@@ -24,6 +24,7 @@ import * as MapTab from './MapTab.svelte';
|
||||
import * as ServerSummaryTab from './ServerSummaryTab.svelte';
|
||||
import * as ImportExportTab from './ImportExportTab.svelte';
|
||||
import * as SqlObjectTab from './SqlObjectTab.svelte';
|
||||
import * as AppLogTab from './AppLogTab.svelte';
|
||||
|
||||
import protabs from './index-pro';
|
||||
|
||||
@@ -54,5 +55,6 @@ export default {
|
||||
ServerSummaryTab,
|
||||
ImportExportTab,
|
||||
SqlObjectTab,
|
||||
AppLogTab,
|
||||
...protabs,
|
||||
};
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
import getElectron from '../utility/getElectron';
|
||||
import { showModal } from '../modals/modalTools';
|
||||
import NewObjectModal from '../modals/NewObjectModal.svelte';
|
||||
import openNewTab from '../utility/openNewTab';
|
||||
|
||||
let domSettings;
|
||||
let domCloudAccount;
|
||||
@@ -123,6 +124,16 @@
|
||||
$visibleWidgetSideBar = true;
|
||||
},
|
||||
},
|
||||
{
|
||||
text: 'View application logs',
|
||||
onClick: () => {
|
||||
openNewTab({
|
||||
title: 'Application log',
|
||||
icon: 'img applog',
|
||||
tabComponent: 'AppLogTab',
|
||||
});
|
||||
},
|
||||
},
|
||||
];
|
||||
currentDropDownMenu.set({ left, top, items });
|
||||
}
|
||||
|
||||
@@ -13,9 +13,9 @@ class Analyser extends DatabaseAnalyser {
|
||||
}
|
||||
|
||||
async _runAnalysis() {
|
||||
this.feedback({ analysingMessage: 'Loading tables' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00177 Loading tables' });
|
||||
const tables = await this.analyserQuery('tables', ['tables']);
|
||||
this.feedback({ analysingMessage: 'Loading columns' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00178 Loading columns' });
|
||||
const columns = await this.analyserQuery('columns', ['tables']);
|
||||
// this.feedback({ analysingMessage: 'Loading views' });
|
||||
// const views = await this.analyserQuery('views', ['views']);
|
||||
|
||||
@@ -29,11 +29,11 @@ class Analyser extends DatabaseAnalyser {
|
||||
}
|
||||
|
||||
async _runAnalysis() {
|
||||
this.feedback({ analysingMessage: 'Loading tables' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00181 Loading tables' });
|
||||
const tables = await this.analyserQuery('tables', ['tables']);
|
||||
this.feedback({ analysingMessage: 'Loading columns' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00182 Loading columns' });
|
||||
const columns = await this.analyserQuery('columns', ['tables', 'views']);
|
||||
this.feedback({ analysingMessage: 'Loading views' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00183 Loading views' });
|
||||
let views = await this.analyserQuery('views', ['views']);
|
||||
if (views?.isError) {
|
||||
views = await this.analyserQuery('viewsNoDefinition', ['views']);
|
||||
|
||||
@@ -27,7 +27,7 @@ class CsvPrepareStream extends stream.Transform {
|
||||
}
|
||||
|
||||
async function writer({ fileName, encoding = 'utf-8', header = true, delimiter, quoted }) {
|
||||
logger.info(`Writing file ${fileName}`);
|
||||
logger.info(`DBGM-00133 Writing file ${fileName}`);
|
||||
const csvPrepare = new CsvPrepareStream({ header });
|
||||
const csvStream = csv.stringify({ delimiter, quoted });
|
||||
const fileStream = fs.createWriteStream(fileName, encoding);
|
||||
|
||||
@@ -75,7 +75,7 @@ async function reader({ fileName, encoding = 'ISO-8859-1', includeDeletedRecords
|
||||
pass.end();
|
||||
} catch (error) {
|
||||
// If any error occurs, destroy the stream with the error
|
||||
logger.error(extractErrorLogData(error), 'Error reading DBF file');
|
||||
logger.error(extractErrorLogData(error), 'DBGM-00187 Error reading DBF file');
|
||||
pass.end();
|
||||
}
|
||||
})();
|
||||
|
||||
@@ -114,7 +114,7 @@ const driver = {
|
||||
|
||||
options.done();
|
||||
} catch (error) {
|
||||
logger.error(extractErrorLogData(error), 'Stream error');
|
||||
logger.error(extractErrorLogData(error), 'DBGM-00188 Stream error');
|
||||
const { message, procName } = error;
|
||||
options.info({
|
||||
message,
|
||||
@@ -206,7 +206,7 @@ const driver = {
|
||||
pass.end();
|
||||
return pass;
|
||||
} catch (error) {
|
||||
logger.error(extractErrorLogData(error), 'ReadQuery error');
|
||||
logger.error(extractErrorLogData(error), 'DBGM-00189 ReadQuery error');
|
||||
const { message, procName } = error;
|
||||
pass.write({
|
||||
__isStreamInfo: true,
|
||||
|
||||
@@ -97,7 +97,7 @@ const driver = {
|
||||
|
||||
options.done();
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Stream error');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00190 Stream error');
|
||||
options.info({
|
||||
message: err.message,
|
||||
line: err.line,
|
||||
@@ -199,8 +199,8 @@ const driver = {
|
||||
await new Promise((resolve, reject) => {
|
||||
transactionPromise.query(currentSql, function (err, result) {
|
||||
if (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error executing SQL in transaction');
|
||||
logger.error({ sql: currentSql }, 'SQL that caused the error');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00191 Error executing SQL in transaction');
|
||||
logger.error({ sql: currentSql }, 'DBGM-00192 SQL that caused the error');
|
||||
return reject(err);
|
||||
}
|
||||
resolve(result);
|
||||
@@ -211,19 +211,19 @@ const driver = {
|
||||
await new Promise((resolve, reject) => {
|
||||
transactionPromise.commit(function (err) {
|
||||
if (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error committing transaction');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00193 Error committing transaction');
|
||||
return reject(err);
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(extractErrorLogData(error), 'Transaction error');
|
||||
logger.error(extractErrorLogData(error), 'DBGM-00194 Transaction error');
|
||||
if (transactionPromise) {
|
||||
await new Promise((resolve, reject) => {
|
||||
transactionPromise.rollback(function (rollbackErr) {
|
||||
if (rollbackErr) {
|
||||
logger.error(extractErrorLogData(rollbackErr), 'Error rolling back transaction');
|
||||
logger.error(extractErrorLogData(rollbackErr), 'DBGM-00195 Error rolling back transaction');
|
||||
return reject(rollbackErr); // Re-reject the rollback error
|
||||
}
|
||||
resolve();
|
||||
|
||||
@@ -32,15 +32,15 @@ function createBulkInsertStream(driver, stream, dbhan, name, options) {
|
||||
writable.checkStructure = async () => {
|
||||
try {
|
||||
if (options.dropIfExists) {
|
||||
logger.info(`Dropping collection ${collectionName}`);
|
||||
logger.info(`DBGM-00137 Dropping collection ${collectionName}`);
|
||||
await db.collection(collectionName).drop();
|
||||
}
|
||||
if (options.truncate) {
|
||||
logger.info(`Truncating collection ${collectionName}`);
|
||||
logger.info(`DBGM-00138 Truncating collection ${collectionName}`);
|
||||
await db.collection(collectionName).deleteMany({});
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error during preparing mongo bulk insert collection, stopped');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00139 Error during preparing mongo bulk insert collection, stopped');
|
||||
writable.destroy(err);
|
||||
}
|
||||
};
|
||||
@@ -52,7 +52,7 @@ function createBulkInsertStream(driver, stream, dbhan, name, options) {
|
||||
|
||||
await db.collection(collectionName).insertMany(rows);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error bulk insert collection, stopped');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00197 Error bulk insert collection, stopped');
|
||||
writable.destroy(err);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -75,7 +75,7 @@ function ensureDatabaseInMongoURI(uri, dbName) {
|
||||
url.pathname = `/${dbName}`;
|
||||
return url.toString();
|
||||
} catch (error) {
|
||||
logger.error('Invalid URI format:', error.message);
|
||||
logger.error('DBGM-00198 Invalid URI format:', error.message);
|
||||
return uri;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -100,24 +100,24 @@ class MsSqlAnalyser extends DatabaseAnalyser {
|
||||
}
|
||||
|
||||
async _runAnalysis() {
|
||||
this.feedback({ analysingMessage: 'Loading tables' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00205 Loading tables' });
|
||||
const tablesRows = await this.analyserQuery('tables', ['tables']);
|
||||
this.feedback({ analysingMessage: 'Loading columns' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00206 Loading columns' });
|
||||
const columnsRows = await this.analyserQuery('columns', ['tables']);
|
||||
this.feedback({ analysingMessage: 'Loading primary keys' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00207 Loading primary keys' });
|
||||
const pkColumnsRows = await this.analyserQuery('primaryKeys', ['tables']);
|
||||
this.feedback({ analysingMessage: 'Loading foreign keys' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00208 Loading foreign keys' });
|
||||
const fkColumnsRows = await this.analyserQuery('foreignKeys', ['tables']);
|
||||
this.feedback({ analysingMessage: 'Loading indexes' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00209 Loading indexes' });
|
||||
const indexesRows = await this.analyserQuery('indexes', ['tables']);
|
||||
this.feedback({ analysingMessage: 'Loading index columns' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00210 Loading index columns' });
|
||||
const indexcolsRows = await this.analyserQuery('indexcols', ['tables']);
|
||||
this.feedback({ analysingMessage: 'Loading table sizes' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00211 Loading table sizes' });
|
||||
const tableSizes = await this.analyserQuery('tableSizes');
|
||||
|
||||
const tableSizesDict = _.mapValues(_.keyBy(tableSizes.rows, 'objectId'), 'tableRowCount');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading SQL code' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00212 Loading SQL code' });
|
||||
const sqlCodeRows = await this.analyserQuery('loadSqlCode', ['views', 'procedures', 'functions', 'triggers']);
|
||||
const getCreateSql = row =>
|
||||
sqlCodeRows.rows
|
||||
@@ -125,21 +125,21 @@ class MsSqlAnalyser extends DatabaseAnalyser {
|
||||
.map(x => x.codeText)
|
||||
.join('');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading views' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00213 Loading views' });
|
||||
const viewsRows = await this.analyserQuery('views', ['views']);
|
||||
this.feedback({ analysingMessage: 'Loading procedures & functions' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00214 Loading procedures & functions' });
|
||||
|
||||
const programmableRows = await this.analyserQuery('programmables', ['procedures', 'functions']);
|
||||
const procedureParameterRows = await this.analyserQuery('proceduresParameters');
|
||||
const functionParameterRows = await this.analyserQuery('functionParameters');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading triggers' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00215 Loading triggers' });
|
||||
const triggerRows = await this.analyserQuery('triggers');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading view columns' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00216 Loading view columns' });
|
||||
const viewColumnRows = await this.analyserQuery('viewColumns', ['views']);
|
||||
|
||||
this.feedback({ analysingMessage: 'Finalizing DB structure' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00217 Finalizing DB structure' });
|
||||
const tables = tablesRows.rows.map(row => ({
|
||||
...row,
|
||||
contentHash: row.modifyDate && row.modifyDate.toISOString(),
|
||||
|
||||
@@ -72,7 +72,7 @@ function createTediousBulkInsertStream(driver, stream, dbhan, name, options) {
|
||||
try {
|
||||
await runBulkInsertBatch(dbhan, fullName, writable, rows);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error during bulk insert, insert stopped');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00199 Error during bulk insert, insert stopped');
|
||||
// writable.emit('error', err);
|
||||
writable.destroy(err);
|
||||
}
|
||||
|
||||
@@ -169,7 +169,7 @@ const driver = {
|
||||
const defaultSchemaRows = await this.query(dbhan, 'SELECT SCHEMA_NAME() as name');
|
||||
const defaultSchema = defaultSchemaRows.rows[0]?.name;
|
||||
|
||||
logger.debug(`Loaded ${rows.length} mssql schemas`);
|
||||
logger.debug(`DBGM-00140 Loaded ${rows.length} mssql schemas`);
|
||||
|
||||
return rows.map(x => ({
|
||||
...x,
|
||||
|
||||
@@ -118,17 +118,17 @@ class Analyser extends DatabaseAnalyser {
|
||||
}
|
||||
|
||||
async _runAnalysis() {
|
||||
this.feedback({ analysingMessage: 'Loading tables' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00218 Loading tables' });
|
||||
const tables = await this.analyserQuery('tables', ['tables']);
|
||||
this.feedback({ analysingMessage: 'Loading columns' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00219 Loading columns' });
|
||||
const columns = await this.analyserQuery('columns', ['tables', 'views']);
|
||||
this.feedback({ analysingMessage: 'Loading primary keys' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00220 Loading primary keys' });
|
||||
const pkColumns = await this.analyserQuery('primaryKeys', ['tables']);
|
||||
this.feedback({ analysingMessage: 'Loading foreign keys' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00221 Loading foreign keys' });
|
||||
const fkColumns = await this.analyserQuery('foreignKeys', ['tables']);
|
||||
this.feedback({ analysingMessage: 'Loading views' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00222 Loading views' });
|
||||
const views = await this.analyserQuery('views', ['views']);
|
||||
this.feedback({ analysingMessage: 'Loading programmables' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00223 Loading programmables' });
|
||||
const programmables = await this.analyserQuery('programmables', ['procedures', 'functions']);
|
||||
|
||||
const parameters = await this.analyserQuery('parameters', ['procedures', 'functions']);
|
||||
@@ -155,20 +155,20 @@ class Analyser extends DatabaseAnalyser {
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading view texts' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00224 Loading view texts' });
|
||||
const viewTexts = await this.getViewTexts(views.rows.map(x => x.pureName));
|
||||
this.feedback({ analysingMessage: 'Loading indexes' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00225 Loading indexes' });
|
||||
const indexes = await this.analyserQuery('indexes', ['tables']);
|
||||
this.feedback({ analysingMessage: 'Loading uniques' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00226 Loading uniques' });
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading triggers' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00227 Loading triggers' });
|
||||
const triggers = await this.analyserQuery('triggers');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading scheduler events' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00228 Loading scheduler events' });
|
||||
const schedulerEvents = await this.analyserQuery('schedulerEvents');
|
||||
|
||||
const uniqueNames = await this.analyserQuery('uniqueNames', ['tables']);
|
||||
this.feedback({ analysingMessage: 'Finalizing DB structure' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00229 Finalizing DB structure' });
|
||||
|
||||
const res = {
|
||||
tables: tables.rows.map(table => ({
|
||||
|
||||
@@ -138,7 +138,7 @@ const drivers = driverBases.map(driverBase => ({
|
||||
};
|
||||
|
||||
const handleError = error => {
|
||||
logger.error(extractErrorLogData(error), 'Stream error');
|
||||
logger.error(extractErrorLogData(error), 'DBGM-00200 Stream error');
|
||||
const { message } = error;
|
||||
options.info({
|
||||
message,
|
||||
|
||||
@@ -45,26 +45,26 @@ class Analyser extends DatabaseAnalyser {
|
||||
}
|
||||
|
||||
async _runAnalysis() {
|
||||
this.feedback({ analysingMessage: 'Loading tables' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00230 Loading tables' });
|
||||
const tables = await this.analyserQuery('tableList', ['tables'], { $owner: this.dbhan.database });
|
||||
this.feedback({ analysingMessage: 'Loading columns' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00231 Loading columns' });
|
||||
const columns = await this.analyserQuery('columns', ['tables', 'views'], { $owner: this.dbhan.database });
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading primary keys' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00232 Loading primary keys' });
|
||||
const pkColumns = await this.analyserQuery('primaryKeys', ['tables'], { $owner: this.dbhan.database });
|
||||
|
||||
//let fkColumns = null;
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading foreign keys' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00233 Loading foreign keys' });
|
||||
const fkColumns = await this.analyserQuery('foreignKeys', ['tables'], { $owner: this.dbhan.database });
|
||||
this.feedback({ analysingMessage: 'Loading views' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00234 Loading views' });
|
||||
const views = await this.analyserQuery('views', ['views'], { $owner: this.dbhan.database });
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading materialized views' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00235 Loading materialized views' });
|
||||
const matviews = this.driver.dialect.materializedViews
|
||||
? await this.analyserQuery('matviews', ['matviews'], { $owner: this.dbhan.database })
|
||||
: null;
|
||||
this.feedback({ analysingMessage: 'Loading routines' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00236 Loading routines' });
|
||||
const routines = await this.analyserQuery('routines', ['procedures', 'functions'], {
|
||||
$owner: this.dbhan.database,
|
||||
});
|
||||
@@ -91,15 +91,15 @@ class Analyser extends DatabaseAnalyser {
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading indexes' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00237 Loading indexes' });
|
||||
const indexes = await this.analyserQuery('indexes', ['tables'], { $owner: this.dbhan.database });
|
||||
this.feedback({ analysingMessage: 'Loading unique names' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00238 Loading unique names' });
|
||||
|
||||
const triggers = await this.analyserQuery('triggers', undefined, { $owner: this.dbhan.database });
|
||||
this.feedback({ analysingMessage: 'Loading triggers' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00239 Loading triggers' });
|
||||
|
||||
const uniqueNames = await this.analyserQuery('uniqueNames', ['tables'], { $owner: this.dbhan.database });
|
||||
this.feedback({ analysingMessage: 'Finalizing DB structure' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00240 Finalizing DB structure' });
|
||||
|
||||
const fkColumnsMapped = fkColumns.rows.map(x => ({
|
||||
pureName: x.pure_name,
|
||||
|
||||
@@ -83,26 +83,26 @@ class Analyser extends DatabaseAnalyser {
|
||||
}
|
||||
|
||||
async _runAnalysis() {
|
||||
this.feedback({ analysingMessage: 'Loading tables' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00241 Loading tables' });
|
||||
const tables = await this.analyserQuery(this.driver.dialect.stringAgg ? 'tableModifications' : 'tableList', [
|
||||
'tables',
|
||||
]);
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading columns' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00242 Loading columns' });
|
||||
const columns = await this.analyserQuery('columns', ['tables', 'views']);
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading primary keys' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00243 Loading primary keys' });
|
||||
const pkColumns = await this.analyserQuery('primaryKeys', ['tables']);
|
||||
|
||||
let fkColumns = null;
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading foreign key constraints' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00244 Loading foreign key constraints' });
|
||||
// const fk_tableConstraints = await this.analyserQuery('fk_tableConstraints', ['tables']);
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading foreign key refs' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00245 Loading foreign key refs' });
|
||||
const foreignKeys = await this.analyserQuery('foreignKeys', ['tables']);
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading foreign key columns' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00246 Loading foreign key columns' });
|
||||
const fk_keyColumnUsage = await this.analyserQuery('fk_keyColumnUsage', ['tables']);
|
||||
|
||||
// const cntKey = x => `${x.constraint_name}|${x.constraint_schema}`;
|
||||
@@ -149,52 +149,52 @@ class Analyser extends DatabaseAnalyser {
|
||||
}
|
||||
fkColumns = { rows: fkRows };
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading views' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00247 Loading views' });
|
||||
const views = await this.analyserQuery('views', ['views']);
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading materialized views' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00248 Loading materialized views' });
|
||||
const matviews = this.driver.dialect.materializedViews ? await this.analyserQuery('matviews', ['matviews']) : null;
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading materialized view columns' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00249 Loading materialized view columns' });
|
||||
const matviewColumns = this.driver.dialect.materializedViews
|
||||
? await this.analyserQuery('matviewColumns', ['matviews'])
|
||||
: null;
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading routines' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00250 Loading routines' });
|
||||
const routines = await this.analyserQuery('routines', ['procedures', 'functions']);
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading routine parameters' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00251 Loading routine parameters' });
|
||||
const routineParametersRows = await this.analyserQuery('proceduresParameters');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading indexes' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00252 Loading indexes' });
|
||||
const indexes = this.driver.__analyserInternals.skipIndexes
|
||||
? { rows: [] }
|
||||
: await this.analyserQuery('indexes', ['tables']);
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading index columns' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00253 Loading index columns' });
|
||||
const indexcols = this.driver.__analyserInternals.skipIndexes
|
||||
? { rows: [] }
|
||||
: await this.analyserQuery('indexcols', ['tables']);
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading unique names' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00254 Loading unique names' });
|
||||
const uniqueNames = await this.analyserQuery('uniqueNames', ['tables']);
|
||||
|
||||
let geometryColumns = { rows: [] };
|
||||
if (views.rows.find(x => x.pure_name == 'geometry_columns' && x.schema_name == 'public')) {
|
||||
this.feedback({ analysingMessage: 'Loading geometry columns' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00255 Loading geometry columns' });
|
||||
geometryColumns = await this.analyserQuery('geometryColumns', ['tables']);
|
||||
}
|
||||
|
||||
let geographyColumns = { rows: [] };
|
||||
if (views.rows.find(x => x.pure_name == 'geography_columns' && x.schema_name == 'public')) {
|
||||
this.feedback({ analysingMessage: 'Loading geography columns' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00256 Loading geography columns' });
|
||||
geographyColumns = await this.analyserQuery('geographyColumns', ['tables']);
|
||||
}
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading triggers' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00257 Loading triggers' });
|
||||
const triggers = await this.analyserQuery('triggers');
|
||||
|
||||
this.feedback({ analysingMessage: 'Finalizing DB structure' });
|
||||
this.feedback({ analysingMessage: 'DBGM-00258 Finalizing DB structure' });
|
||||
|
||||
const columnColumnsMapped = fkColumns.rows.map(x => ({
|
||||
pureName: x.pure_name,
|
||||
@@ -387,7 +387,7 @@ class Analyser extends DatabaseAnalyser {
|
||||
procedures: res.procedures?.length,
|
||||
functions: res.functions?.length,
|
||||
},
|
||||
'Database structured finalized'
|
||||
'DBGM-00141 Database structured finalized'
|
||||
);
|
||||
|
||||
return res;
|
||||
|
||||
@@ -228,7 +228,7 @@ const drivers = driverBases.map(driverBase => ({
|
||||
});
|
||||
|
||||
query.on('error', error => {
|
||||
logger.error(extractErrorLogData(error), 'Stream error');
|
||||
logger.error(extractErrorLogData(error), 'DBGM-00201 Stream error');
|
||||
const { message, position, procName } = error;
|
||||
let line = null;
|
||||
if (position) {
|
||||
@@ -382,7 +382,7 @@ const drivers = driverBases.map(driverBase => ({
|
||||
const defaultSchemaRows = await this.query(dbhan, 'SELECT current_schema');
|
||||
const defaultSchema = defaultSchemaRows.rows[0]?.current_schema?.trim();
|
||||
|
||||
logger.debug(`Loaded ${schemaRows.rows.length} postgres schemas`);
|
||||
logger.debug(`DBGM-00142 Loaded ${schemaRows.rows.length} postgres schemas`);
|
||||
|
||||
const schemas = schemaRows.rows.map(x => ({
|
||||
schemaName: x.schema_name,
|
||||
|
||||
@@ -81,7 +81,7 @@ const libsqlDriver = {
|
||||
try {
|
||||
inTransaction();
|
||||
} catch (error) {
|
||||
logger.error(extractErrorLogData(error), 'Stream error');
|
||||
logger.error(extractErrorLogData(error), 'DBGM-00202 Stream error');
|
||||
const { message, procName } = error;
|
||||
options.info({
|
||||
message,
|
||||
|
||||
@@ -76,7 +76,7 @@ const driver = {
|
||||
try {
|
||||
inTransaction();
|
||||
} catch (error) {
|
||||
logger.error(extractErrorLogData(error), 'Stream error');
|
||||
logger.error(extractErrorLogData(error), 'DBGM-00203 Stream error');
|
||||
const { message, procName } = error;
|
||||
options.info({
|
||||
message,
|
||||
|
||||
@@ -59,7 +59,7 @@ class ParseStream extends stream.Transform {
|
||||
}
|
||||
|
||||
async function reader({ fileName, encoding = 'utf-8', itemElementName }) {
|
||||
logger.info(`Reading file ${fileName}`);
|
||||
logger.info(`DBGM-00143 Reading file ${fileName}`);
|
||||
|
||||
const fileStream = fs.createReadStream(fileName, encoding);
|
||||
const parser = new ParseStream({ itemElementName });
|
||||
|
||||
@@ -70,7 +70,7 @@ class StringifyStream extends stream.Transform {
|
||||
}
|
||||
|
||||
async function writer({ fileName, encoding = 'utf-8', itemElementName, rootElementName }) {
|
||||
logger.info(`Writing file ${fileName}`);
|
||||
logger.info(`DBGM-00144 Writing file ${fileName}`);
|
||||
const stringify = new StringifyStream({ itemElementName, rootElementName });
|
||||
const fileStream = fs.createWriteStream(fileName, encoding);
|
||||
return [stringify, fileStream];
|
||||
|
||||
Reference in New Issue
Block a user