SYNC: Merge pull request #7 from dbgate/feature/applog

This commit is contained in:
Jan Prochazka
2025-08-05 17:13:33 +02:00
committed by Diflow
parent ac0aebd751
commit a96f1d0b49
83 changed files with 1014 additions and 242 deletions

View File

@@ -94,7 +94,7 @@ class OAuthProvider extends AuthProviderBase {
payload = jwt.decode(id_token);
}
logger.info({ payload }, 'User payload returned from OAUTH');
logger.info({ payload }, 'DBGM-00002 User payload returned from OAUTH');
const login =
process.env.OAUTH_LOGIN_FIELD && payload && payload[process.env.OAUTH_LOGIN_FIELD]

View File

@@ -102,7 +102,7 @@ module.exports = {
...fileType('.matview.sql', 'matview.sql'),
];
} catch (err) {
logger.error(extractErrorLogData(err), 'Error reading archive files');
logger.error(extractErrorLogData(err), 'DBGM-00001 Error reading archive files');
return [];
}
},

View File

@@ -99,7 +99,7 @@ function authMiddleware(req, res, next) {
return next();
}
logger.error(extractErrorLogData(err), 'Sending invalid token error');
logger.error(extractErrorLogData(err), 'DBGM-00098 Sending invalid token error');
return unauthorizedResponse(req, res, 'invalid token');
}

View File

@@ -45,7 +45,7 @@ module.exports = {
const resp = await callCloudApiGet('content-list');
return resp;
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting cloud content list');
logger.error(extractErrorLogData(err), 'DBGM-00099 Error getting cloud content list');
return [];
}

View File

@@ -116,12 +116,12 @@ function getPortalCollections() {
}
}
logger.info({ connections: connections.map(pickSafeConnectionInfo) }, 'Using connections from ENV variables');
logger.info({ connections: connections.map(pickSafeConnectionInfo) }, 'DBGM-00005 Using connections from ENV variables');
const noengine = connections.filter(x => !x.engine);
if (noengine.length > 0) {
logger.warn(
{ connections: noengine.map(x => x._id) },
'Invalid CONNECTIONS configuration, missing ENGINE for connection ID'
'DBGM-00006 Invalid CONNECTIONS configuration, missing ENGINE for connection ID'
);
}
return connections;
@@ -530,7 +530,7 @@ module.exports = {
socket.emit('got-volatile-token', { strmid, savedConId: conid, volatileConId: volatile._id });
return { success: true };
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB token');
logger.error(extractErrorLogData(err), 'DBGM-00100 Error getting DB token');
return { error: err.message };
}
},
@@ -546,7 +546,7 @@ module.exports = {
const resp = await authProvider.login(null, null, { conid: volatile._id }, req);
return resp;
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB token');
logger.error(extractErrorLogData(err), 'DBGM-00101 Error getting DB token');
return { error: err.message };
}
},

View File

@@ -76,7 +76,7 @@ module.exports = {
handle_error(conid, database, props) {
const { error } = props;
logger.error(`Error in database connection ${conid}, database ${database}: ${error}`);
logger.error(`DBGM-00102 Error in database connection ${conid}, database ${database}: ${error}`);
if (props?.msgid) {
const [resolve, reject] = this.requests[props?.msgid];
reject(error);
@@ -144,7 +144,7 @@ module.exports = {
handle_copyStreamError(conid, database, { copyStreamError }) {
const { progressName } = copyStreamError;
const { runid } = progressName;
logger.error(`Error in database connection ${conid}, database ${database}: ${copyStreamError}`);
logger.error(`DBGM-00103 Error in database connection ${conid}, database ${database}: ${copyStreamError}`);
socket.emit(`runner-done-${runid}`);
},
@@ -193,7 +193,7 @@ module.exports = {
if (newOpened.disconnected) return;
const funcName = `handle_${msgtype}`;
if (!this[funcName]) {
logger.error(`Unknown message type ${msgtype} from subprocess databaseConnectionProcess`);
logger.error(`DBGM-00104 Unknown message type ${msgtype} from subprocess databaseConnectionProcess`);
return;
}
@@ -204,7 +204,7 @@ module.exports = {
this.close(conid, database, false);
});
subprocess.on('error', err => {
logger.error(extractErrorLogData(err), 'Error in database connection subprocess');
logger.error(extractErrorLogData(err), 'DBGM-00114 Error in database connection subprocess');
if (newOpened.disconnected) return;
this.close(conid, database, false);
});
@@ -226,7 +226,7 @@ module.exports = {
try {
conn.subprocess.send({ msgid, ...message });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error sending request do process');
logger.error(extractErrorLogData(err), 'DBGM-00115 Error sending request do process');
this.close(conn.conid, conn.database);
}
});
@@ -236,7 +236,7 @@ module.exports = {
queryData_meta: true,
async queryData({ conid, database, sql }, req) {
testConnectionPermission(conid, req);
logger.info({ conid, database, sql }, 'Processing query');
logger.info({ conid, database, sql }, 'DBGM-00007 Processing query');
const opened = await this.ensureOpened(conid, database);
// if (opened && opened.status && opened.status.name == 'error') {
// return opened.status;
@@ -283,7 +283,7 @@ module.exports = {
runScript_meta: true,
async runScript({ conid, database, sql, useTransaction, logMessage }, req) {
testConnectionPermission(conid, req);
logger.info({ conid, database, sql }, 'Processing script');
logger.info({ conid, database, sql }, 'DBGM-00008 Processing script');
const opened = await this.ensureOpened(conid, database);
sendToAuditLog(req, {
category: 'dbop',
@@ -304,7 +304,7 @@ module.exports = {
runOperation_meta: true,
async runOperation({ conid, database, operation, useTransaction }, req) {
testConnectionPermission(conid, req);
logger.info({ conid, database, operation }, 'Processing operation');
logger.info({ conid, database, operation }, 'DBGM-00009 Processing operation');
sendToAuditLog(req, {
category: 'dbop',
@@ -481,7 +481,7 @@ module.exports = {
try {
existing.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error pinging DB connection');
logger.error(extractErrorLogData(err), 'DBGM-00116 Error pinging DB connection');
this.close(conid, database);
return {
@@ -530,7 +530,7 @@ module.exports = {
try {
existing.subprocess.kill();
} catch (err) {
logger.error(extractErrorLogData(err), 'Error killing subprocess');
logger.error(extractErrorLogData(err), 'DBGM-00117 Error killing subprocess');
}
}
this.opened = this.opened.filter(x => x.conid != conid || x.database != database);
@@ -924,7 +924,7 @@ module.exports = {
executeSessionQuery_meta: true,
async executeSessionQuery({ sesid, conid, database, sql }, req) {
testConnectionPermission(conid, req);
logger.info({ sesid, sql }, 'Processing query');
logger.info({ sesid, sql }, 'DBGM-00010 Processing query');
sessions.dispatchMessage(sesid, 'Query execution started');
const opened = await this.ensureOpened(conid, database);

View File

@@ -13,6 +13,7 @@ const dbgateApi = require('../shell');
const { getLogger } = require('dbgate-tools');
const platformInfo = require('../utility/platformInfo');
const { checkSecureFilePathsWithoutDirectory, checkSecureDirectories } = require('../utility/security');
const { AppLogDatastore, getRecentAppLogRecords } = require('../utility/AppLogDatastore');
const logger = getLogger('files');
function serialize(format, data) {
@@ -28,6 +29,9 @@ function deserialize(format, text) {
}
module.exports = {
currentLogReader: null,
currentLogParamsKey: null,
list_meta: true,
async list({ folder }, req) {
if (!hasPermission(`files/${folder}/read`, req)) return [];
@@ -253,7 +257,7 @@ module.exports = {
createZipFromJsons_meta: true,
async createZipFromJsons({ db, filePath }) {
logger.info(`Creating zip file from JSONS ${filePath}`);
logger.info(`DBGM-00011 Creating zip file from JSONS ${filePath}`);
await dbgateApi.zipJsonLinesData(db, filePath);
return true;
},
@@ -279,7 +283,7 @@ module.exports = {
const FOLDERS = ['sql', 'sqlite'];
for (const folder of FOLDERS) {
if (fileName.toLowerCase().endsWith('.' + folder)) {
logger.info(`Saving ${folder} file ${fileName}`);
logger.info(`DBGM-00012 Saving ${folder} file ${fileName}`);
await fs.copyFile(filePath, path.join(filesdir(), folder, fileName));
socket.emitChanged(`files-changed`, { folder: folder });
@@ -291,7 +295,7 @@ module.exports = {
}
}
throw new Error(`${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`);
throw new Error(`DBGM-00013 ${fileName} doesn't have one of supported extensions: ${FOLDERS.join(', ')}`);
},
exportFile_meta: true,
@@ -311,4 +315,28 @@ module.exports = {
await fs.copyFile(sourceFilePath, targetFilePath);
return true;
},
getAppLog_meta: true,
async getAppLog({ offset = 0, limit = 100, dateFrom = 0, dateTo = new Date().getTime(), filters = {} }) {
const paramsKey = `${dateFrom}-${dateTo}`;
if (paramsKey != this.currentLogParamsKey) {
if (this.currentLogReader) {
this.currentLogReader._closeReader();
this.currentLogReader = null;
}
this.currentLogReader = new AppLogDatastore({ timeFrom: dateFrom, timeTo: dateTo });
this.currentLogParamsKey = paramsKey;
}
return this.currentLogReader.getRows(offset, limit, filters);
},
getRecentAppLog_meta: true,
getRecentAppLog({ limit }) {
const res = getRecentAppLogRecords();
if (limit) {
return res.slice(-limit);
}
return res;
},
};

View File

@@ -48,7 +48,7 @@ require=null;
async function run() {
${script}
await dbgateApi.finalizer.run();
logger.info('Finished job script');
logger.info('DBGM-00014 Finished job script');
}
dbgateApi.runScript(run);
`;
@@ -132,7 +132,7 @@ module.exports = {
const pluginNames = extractPlugins(scriptText);
// console.log('********************** SCRIPT TEXT **********************');
// console.log(scriptText);
logger.info({ scriptFile }, 'Running script');
logger.info({ scriptFile }, 'DBGM-00015 Running script');
// const subprocess = fork(scriptFile, ['--checkParent', '--max-old-space-size=8192'], {
const subprocess = fork(
scriptFile,
@@ -171,7 +171,7 @@ module.exports = {
subprocess.on('exit', code => {
// console.log('... EXITED', code);
this.rejectRequest(runid, { message: 'No data returned, maybe input data source is too big' });
logger.info({ code, pid: subprocess.pid }, 'Exited process');
logger.info({ code, pid: subprocess.pid }, 'DBGM-00016 Exited process');
socket.emit(`runner-done-${runid}`, code);
this.opened = this.opened.filter(x => x.runid != runid);
});
@@ -222,7 +222,7 @@ module.exports = {
subprocess.on('exit', code => {
console.log('... EXITED', code);
logger.info({ code, pid: subprocess.pid }, 'Exited process');
logger.info({ code, pid: subprocess.pid }, 'DBGM-00017 Exited process');
this.dispatchMessage(runid, `Finished external process with code ${code}`);
socket.emit(`runner-done-${runid}`, code);
if (onFinished) {
@@ -258,7 +258,7 @@ module.exports = {
severity: 'error',
message: extractErrorMessage(err),
});
logger.error(extractErrorLogData(err), 'Caught error on stdin');
logger.error(extractErrorLogData(err), 'DBGM-00118 Caught error on stdin');
});
}

View File

@@ -24,7 +24,7 @@ module.exports = {
if (!match) return;
const pattern = match[1];
if (!cron.validate(pattern)) return;
logger.info(`Schedule script ${file} with pattern ${pattern}`);
logger.info(`DBGM-00018 Schedule script ${file} with pattern ${pattern}`);
const task = cron.schedule(pattern, () => runners.start({ script: text }));
this.tasks.push(task);
},

View File

@@ -103,7 +103,7 @@ module.exports = {
this.close(conid, false);
});
subprocess.on('error', err => {
logger.error(extractErrorLogData(err), 'Error in server connection subprocess');
logger.error(extractErrorLogData(err), 'DBGM-00119 Error in server connection subprocess');
if (newOpened.disconnected) return;
this.close(conid, false);
});
@@ -121,7 +121,7 @@ module.exports = {
try {
existing.subprocess.kill();
} catch (err) {
logger.error(extractErrorLogData(err), 'Error killing subprocess');
logger.error(extractErrorLogData(err), 'DBGM-00120 Error killing subprocess');
}
}
this.opened = this.opened.filter(x => x.conid != conid);
@@ -191,7 +191,7 @@ module.exports = {
try {
opened.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error pinging server connection');
logger.error(extractErrorLogData(err), 'DBGM-00121 Error pinging server connection');
this.close(conid);
}
})
@@ -244,7 +244,7 @@ module.exports = {
try {
conn.subprocess.send({ msgid, ...message });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error sending request');
logger.error(extractErrorLogData(err), 'DBGM-00122 Error sending request');
this.close(conn.conid);
}
});

View File

@@ -165,7 +165,7 @@ module.exports = {
message: 'Executing query',
});
logger.info({ sesid, sql }, 'Processing query');
logger.info({ sesid, sql }, 'DBGM-00019 Processing query');
this.dispatchMessage(sesid, 'Query execution started');
session.subprocess.send({
msgtype: 'executeQuery',
@@ -186,7 +186,7 @@ module.exports = {
throw new Error('Invalid session');
}
logger.info({ sesid, command }, 'Processing control command');
logger.info({ sesid, command }, 'DBGM-00020 Processing control command');
this.dispatchMessage(sesid, `${_.startCase(command)} started`);
session.subprocess.send({ msgtype: 'executeControlCommand', command });
@@ -224,7 +224,7 @@ module.exports = {
throw new Error('Invalid session');
}
logger.info({ sesid }, 'Starting profiler');
logger.info({ sesid }, 'DBGM-00021 Starting profiler');
session.loadingReader_jslid = jslid;
session.subprocess.send({ msgtype: 'startProfiler', jslid });
@@ -271,7 +271,7 @@ module.exports = {
try {
session.subprocess.send({ msgtype: 'ping' });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error pinging session');
logger.error(extractErrorLogData(err), 'DBGM-00145 Error pinging session');
return {
status: 'error',

View File

@@ -28,7 +28,7 @@ module.exports = {
}
const uploadName = crypto.randomUUID();
const filePath = path.join(uploadsdir(), uploadName);
logger.info(`Uploading file ${data.name}, size=${data.size}`);
logger.info(`DBGM-00025 Uploading file ${data.name}, size=${data.size}`);
data.mv(filePath, () => {
res.json({
@@ -115,7 +115,7 @@ module.exports = {
return response.data;
} catch (err) {
logger.error(extractErrorLogData(err), 'Error uploading gist');
logger.error(extractErrorLogData(err), 'DBGM-00148 Error uploading gist');
return {
apiErrorMessage: err.message,

View File

@@ -9,7 +9,7 @@ const currentVersion = require('./currentVersion');
const logger = getLogger('apiIndex');
process.on('uncaughtException', err => {
logger.fatal(extractErrorLogData(err), 'Uncaught exception, exiting process');
logger.fatal(extractErrorLogData(err), 'DBGM-00259 Uncaught exception, exiting process');
process.exit(1);
});
@@ -33,6 +33,9 @@ if (processArgs.processDisplayName) {
// }
function configureLogger() {
const { initializeRecentLogProvider, pushToRecentLogs } = require('./utility/AppLogDatastore');
initializeRecentLogProvider();
const logsFilePath = path.join(logsdir(), `${moment().format('YYYY-MM-DD-HH-mm')}-${process.pid}.ndjson`);
setLogsFilePath(logsFilePath);
setLoggerName('main');
@@ -63,7 +66,21 @@ function configureLogger() {
{ flags: 'a' }
);
}
streamsByDatePart[datePart].write(`${JSON.stringify(msg)}\n`);
const additionals = {};
const finalMsg =
msg.msg && msg.msg.match(/^DBGM-\d\d\d\d\d/)
? {
...msg,
msg: msg.msg.substring(10).trimStart(),
msgcode: msg.msg.substring(0, 10),
...additionals,
}
: {
...msg,
...additionals,
};
streamsByDatePart[datePart].write(`${JSON.stringify(finalMsg)}\n`);
pushToRecentLogs(finalMsg);
},
},
},
@@ -114,10 +131,10 @@ function configureLogger() {
if (processArgs.listenApi) {
configureLogger();
logger.info(`Starting API process version ${currentVersion.version}`);
logger.info(`DBGM-00026 Starting API process version ${currentVersion.version}`);
if (process.env.DEBUG_PRINT_ENV_VARIABLES) {
logger.info('Debug print environment variables:');
logger.info('DBGM-00027 Debug print environment variables:');
for (const key of Object.keys(process.env)) {
logger.info(` ${key}: ${JSON.stringify(process.env[key])}`);
}

View File

@@ -191,15 +191,15 @@ function start() {
if (platformInfo.isDocker) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (docker build)`);
logger.info(`DBGM-00028 DbGate API listening on port ${port} (docker build)`);
server.listen(port);
} else if (platformInfo.isAwsUbuntuLayout) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (AWS AMI build)`);
logger.info(`DBGM-00029 DbGate API listening on port ${port} (AWS AMI build)`);
server.listen(port);
} else if (platformInfo.isAzureUbuntuLayout) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (Azure VM build)`);
logger.info(`DBGM-00030 DbGate API listening on port ${port} (Azure VM build)`);
server.listen(port);
} else if (platformInfo.isNpmDist) {
getPort({
@@ -209,27 +209,27 @@ function start() {
),
}).then(port => {
server.listen(port, () => {
logger.info(`DbGate API listening on port ${port} (NPM build)`);
logger.info(`DBGM-00031 DbGate API listening on port ${port} (NPM build)`);
});
});
} else if (process.env.DEVWEB) {
const port = process.env.PORT || 3000;
logger.info(`DbGate API & web listening on port ${port} (dev web build)`);
logger.info(`DBGM-00032 DbGate API & web listening on port ${port} (dev web build)`);
server.listen(port);
} else {
const port = process.env.PORT || 3000;
logger.info(`DbGate API listening on port ${port} (dev API build)`);
logger.info(`DBGM-00033 DbGate API listening on port ${port} (dev API build)`);
server.listen(port);
}
function shutdown() {
logger.info('\nShutting down DbGate API server');
logger.info('DBGM-00034 Shutting down DbGate API server');
server.close(() => {
logger.info('Server shut down, terminating');
logger.info('DBGM-00035 Server shut down, terminating');
process.exit(0);
});
setTimeout(() => {
logger.info('Server close timeout, terminating');
logger.info('DBGM-00036 Server close timeout, terminating');
process.exit(0);
}, 1000);
}

View File

@@ -131,10 +131,10 @@ async function readVersion() {
const driver = requireEngineDriver(storedConnection);
try {
const version = await driver.getVersion(dbhan);
logger.debug(`Got server version: ${version.version}`);
logger.debug(`DBGM-00037 Got server version: ${version.version}`);
serverVersion = version;
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB server version');
logger.error(extractErrorLogData(err), 'DBGM-00149 Error getting DB server version');
serverVersion = { version: 'Unknown' };
}
process.send({ msgtype: 'version', version: serverVersion });
@@ -148,9 +148,9 @@ async function handleConnect({ connection, structure, globalSettings }) {
const driver = requireEngineDriver(storedConnection);
dbhan = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app'));
logger.debug(
`Connected to database, driver: ${storedConnection.engine}, separate schemas: ${
`DBGM-00038 Connected to database, driver: ${storedConnection.engine}, separate schemas: ${
storedConnection.useSeparateSchemas ? 'YES' : 'NO'
}, 'DB: ${dbNameLogCategory(dbhan.database)} }`
}, 'DB: ${dbNameLogCategory(dbhan.database)}`
);
dbhan.feedback = feedback => setStatus({ feedback });
await checkedAsyncCall(readVersion());
@@ -257,13 +257,13 @@ async function handleDriverDataCore(msgid, callMethod, { logName }) {
const result = await callMethod(driver);
process.send({ msgtype: 'response', msgid, result: serializeJsTypesForJsonStringify(result) });
} catch (err) {
logger.error(extractErrorLogData(err, { logName }), `Error when handling message ${logName}`);
logger.error(extractErrorLogData(err, { logName }), `DBGM-00150 Error when handling message ${logName}`);
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error executing DB data') });
}
}
async function handleSchemaList({ msgid }) {
logger.debug('Loading schema list');
logger.debug('DBGM-00039 Loading schema list');
return handleDriverDataCore(msgid, driver => driver.listSchemas(dbhan), { logName: 'listSchemas' });
}
@@ -351,7 +351,7 @@ async function handleSqlPreview({ msgid, objects, options }) {
process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated });
if (generator.isUnhandledException) {
setTimeout(async () => {
logger.error('Exiting because of unhandled exception');
logger.error('DBGM-00151 Exiting because of unhandled exception');
await driver.close(dbhan);
process.exit(0);
}, 500);
@@ -485,7 +485,7 @@ function start() {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 40 * 1000) {
logger.info('Database connection not alive, exiting');
logger.info('DBGM-00040 Database connection not alive, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);
@@ -497,10 +497,10 @@ function start() {
try {
await handleMessage(message);
} catch (err) {
logger.error(extractErrorLogData(err), 'Error in DB connection');
logger.error(extractErrorLogData(err), 'DBGM-00041 Error in DB connection');
process.send({
msgtype: 'error',
error: extractErrorMessage(err, 'Error processing message'),
error: extractErrorMessage(err, 'DBGM-00042 Error processing message'),
msgid: message?.msgid,
});
}

View File

@@ -39,7 +39,7 @@ async function handleRefresh() {
name: 'error',
message: err.message,
});
logger.error(extractErrorLogData(err), 'Error refreshing server databases');
logger.error(extractErrorLogData(err), 'DBGM-00152 Error refreshing server databases');
setTimeout(() => process.exit(1), 1000);
}
}
@@ -50,7 +50,7 @@ async function readVersion() {
try {
version = await driver.getVersion(dbhan);
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting DB server version');
logger.error(extractErrorLogData(err), 'DBGM-00153 Error getting DB server version');
version = { version: 'Unknown' };
}
process.send({ msgtype: 'version', version });
@@ -90,7 +90,7 @@ async function handleConnect(connection) {
name: 'error',
message: err.message,
});
logger.error(extractErrorLogData(err), 'Error connecting to server');
logger.error(extractErrorLogData(err), 'DBGM-00154 Error connecting to server');
setTimeout(() => process.exit(1), 1000);
}
@@ -120,7 +120,7 @@ async function handleDatabaseOp(op, { msgid, name }) {
} else {
const dmp = driver.createDumper();
dmp[op](name);
logger.info({ sql: dmp.s }, 'Running script');
logger.info({ sql: dmp.s }, 'DBGM-00043 Running script');
await driver.query(dbhan, dmp.s, { discardResult: true });
}
await handleRefresh();
@@ -170,7 +170,7 @@ function start() {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 40 * 1000) {
logger.info('Server connection not alive, exiting');
logger.info('DBGM-00044 Server connection not alive, exiting');
const driver = requireEngineDriver(storedConnection);
if (dbhan) {
await driver.close(dbhan);
@@ -188,7 +188,7 @@ function start() {
name: 'error',
message: err.message,
});
logger.error(extractErrorLogData(err), `Error processing message ${message?.['msgtype']}`);
logger.error(extractErrorLogData(err), `DBGM-00155 Error processing message ${message?.['msgtype']}`);
}
});
}

View File

@@ -230,7 +230,7 @@ function start() {
setInterval(async () => {
const time = new Date().getTime();
if (time - lastPing > 25 * 1000) {
logger.info('Session not alive, exiting');
logger.info('DBGM-00045 Session not alive, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);
@@ -250,7 +250,7 @@ function start() {
!currentProfiler &&
executingScripts == 0
) {
logger.info('Session not active, exiting');
logger.info('DBGM-00046 Session not active, exiting');
const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan);
process.exit(0);

View File

@@ -41,7 +41,7 @@ async function handleStart({ connection, tunnelConfig }) {
tunnelConfig,
});
} catch (err) {
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel connection:');
logger.error(extractErrorLogData(err), 'DBGM-00156 Error creating SSH tunnel connection:');
process.send({
msgtype: 'error',

View File

@@ -10,7 +10,7 @@ const logger = getLogger();
function archiveWriter({ folderName, fileName }) {
const dir = resolveArchiveFolder(folderName);
if (!fs.existsSync(dir)) {
logger.info(`Creating directory ${dir}`);
logger.info(`DBGM-00047 Creating directory ${dir}`);
fs.mkdirSync(dir);
}
const jsonlFile = path.join(dir, `${fileName}.jsonl`);

View File

@@ -83,7 +83,7 @@ async function copyStream(input, output, options) {
});
}
logger.error(extractErrorLogData(err, { progressName }), 'Import/export job failed');
logger.error(extractErrorLogData(err, { progressName }), 'DBGM-00157 Import/export job failed');
// throw err;
}
}

View File

@@ -28,20 +28,20 @@ async function executeQuery({
useTransaction,
}) {
if (!logScriptItems && !skipLogging) {
logger.info({ sql: getLimitedQuery(sql) }, `Execute query`);
logger.info({ sql: getLimitedQuery(sql) }, `DBGM-00048 Execute query`);
}
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'script'));
if (sqlFile) {
logger.debug(`Loading SQL file ${sqlFile}`);
logger.debug(`DBGM-00049 Loading SQL file ${sqlFile}`);
sql = await fs.readFile(sqlFile, { encoding: 'utf-8' });
}
try {
if (!skipLogging) {
logger.debug(`Running SQL query, length: ${sql.length}`);
logger.debug(`DBGM-00050 Running SQL query, length: ${sql.length}`);
}
await driver.script(dbhan, sql, { logScriptItems, useTransaction });

View File

@@ -45,14 +45,14 @@ class ImportStream extends stream.Transform {
}
async function importDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, inputFile }) {
logger.info(`Importing database`);
logger.info(`DBGM-00051 Importing database`);
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
try {
logger.info(`Input file: ${inputFile}`);
logger.info(`DBGM-00052 Input file: ${inputFile}`);
const downloadedFile = await download(inputFile);
logger.info(`Downloaded file: ${downloadedFile}`);
logger.info(`DBGM-00053 Downloaded file: ${downloadedFile}`);
const fileStream = fs.createReadStream(downloadedFile, 'utf-8');
const splittedStream = splitQueryStream(fileStream, {

View File

@@ -42,7 +42,7 @@ class ParseStream extends stream.Transform {
* @returns {Promise<readerType>} - reader object
*/
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
logger.info(`Reading file ${fileName}`);
logger.info(`DBGM-00054 Reading file ${fileName}`);
const downloadedFile = await download(fileName);

View File

@@ -33,7 +33,7 @@ class StringifyStream extends stream.Transform {
* @returns {Promise<writerType>} - writer object
*/
async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true }) {
logger.info(`Writing file ${fileName}`);
logger.info(`DBGM-00055 Writing file ${fileName}`);
const stringify = new StringifyStream({ header });
const fileStream = fs.createWriteStream(fileName, encoding);
return [stringify, fileStream];

View File

@@ -63,7 +63,7 @@ async function jsonReader({
encoding = 'utf-8',
limitRows = undefined,
}) {
logger.info(`Reading file ${fileName}`);
logger.info(`DBGM-00056 Reading file ${fileName}`);
const downloadedFile = await download(fileName);
const fileStream = fs.createReadStream(

View File

@@ -96,7 +96,7 @@ class StringifyStream extends stream.Transform {
* @returns {Promise<writerType>} - writer object
*/
async function jsonWriter({ fileName, jsonStyle, keyField = '_key', rootField, encoding = 'utf-8' }) {
logger.info(`Writing file ${fileName}`);
logger.info(`DBGM-00057 Writing file ${fileName}`);
const stringify = new StringifyStream({ jsonStyle, keyField, rootField });
const fileStream = fs.createWriteStream(fileName, encoding);
return [stringify, fileStream];

View File

@@ -6,13 +6,13 @@ const exportDbModel = require('../utility/exportDbModel');
const logger = getLogger('analyseDb');
async function loadDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, outputDir }) {
logger.debug(`Analysing database`);
logger.debug(`DBGM-00058 Analysing database`);
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
try {
const dbInfo = await driver.analyseFull(dbhan);
logger.debug(`Analyse finished`);
logger.debug(`DBGM-00059 Analyse finished`);
await exportDbModel(dbInfo, outputDir);
} finally {

View File

@@ -132,7 +132,7 @@ async function modifyJsonLinesReader({
mergeKey = null,
mergeMode = 'merge',
}) {
logger.info(`Reading file ${fileName} with change set`);
logger.info(`DBGM-00060 Reading file ${fileName} with change set`);
const fileStream = fs.createReadStream(
fileName,

View File

@@ -29,7 +29,7 @@ async function queryReader({
// if (!sql && !json) {
// throw new Error('One of sql or json must be set');
// }
logger.info({ sql: query || sql }, `Reading query`);
logger.info({ sql: query || sql }, `DBGM-00061 Reading query`);
// else console.log(`Reading query ${JSON.stringify(json)}`);
if (!driver) {

View File

@@ -22,7 +22,7 @@ function requirePlugin(packageName, requiredPlugin = null) {
if (requiredPlugin == null) {
let module;
const modulePath = getPluginBackendPath(packageName);
logger.info(`Loading module ${packageName} from ${modulePath}`);
logger.info(`DBGM-00062 Loading module ${packageName} from ${modulePath}`);
try {
// @ts-ignore
module = __non_webpack_require__(modulePath);

View File

@@ -11,7 +11,7 @@ async function runScript(func) {
await func();
process.exit(0);
} catch (err) {
logger.error(extractErrorLogData(err), `Error running script`);
logger.error(extractErrorLogData(err), `DBGM-00158 Error running script`);
process.exit(1);
}
}

View File

@@ -41,7 +41,7 @@ class SqlizeStream extends stream.Transform {
}
async function sqlDataWriter({ fileName, dataName, driver, encoding = 'utf-8' }) {
logger.info(`Writing file ${fileName}`);
logger.info(`DBGM-00063 Writing file ${fileName}`);
const stringify = new SqlizeStream({ fileName, dataName });
const fileStream = fs.createWriteStream(fileName, encoding);
return [stringify, fileStream];

View File

@@ -23,7 +23,7 @@ async function tableReader({ connection, systemConnection, pureName, schemaName,
if (driver.databaseEngineTypes.includes('document')) {
// @ts-ignore
logger.info(`Reading collection ${fullNameToString(fullName)}`);
logger.info(`DBGM-00064 Reading collection ${fullNameToString(fullName)}`);
// @ts-ignore
return await driver.readQuery(dbhan, JSON.stringify(fullName));
}
@@ -32,14 +32,14 @@ async function tableReader({ connection, systemConnection, pureName, schemaName,
const query = `select * from ${quoteFullName(driver.dialect, fullName)}`;
if (table) {
// @ts-ignore
logger.info(`Reading table ${fullNameToString(table)}`);
logger.info(`DBGM-00065 Reading table ${fullNameToString(table)}`);
// @ts-ignore
return await driver.readQuery(dbhan, query, table);
}
const view = await driver.analyseSingleObject(dbhan, fullName, 'views');
if (view) {
// @ts-ignore
logger.info(`Reading view ${fullNameToString(view)}`);
logger.info(`DBGM-00066 Reading view ${fullNameToString(view)}`);
// @ts-ignore
return await driver.readQuery(dbhan, query, view);
}

View File

@@ -20,7 +20,7 @@ const logger = getLogger('tableWriter');
* @returns {Promise<writerType>} - writer object
*/
async function tableWriter({ connection, schemaName, pureName, driver, systemConnection, ...options }) {
logger.info(`Writing table ${fullNameToString({ schemaName, pureName })}`);
logger.info(`DBGM-00067 Writing table ${fullNameToString({ schemaName, pureName })}`);
if (!driver) {
driver = requireEngineDriver(connection);

View File

@@ -52,14 +52,14 @@ function unzipDirectory(zipPath, outputDirectory) {
readStream.on('end', () => zipFile.readEntry());
writeStream.on('finish', () => {
logger.info(`Extracted "${entry.fileName}" → "${destPath}".`);
logger.info(`DBGM-00068 Extracted "${entry.fileName}" → "${destPath}".`);
res();
});
writeStream.on('error', writeErr => {
logger.error(
extractErrorLogData(writeErr),
`Error extracting "${entry.fileName}" from "${zipPath}".`
`DBGM-00069 Error extracting "${entry.fileName}" from "${zipPath}".`
);
rej(writeErr);
});
@@ -74,14 +74,14 @@ function unzipDirectory(zipPath, outputDirectory) {
zipFile.on('end', () => {
Promise.all(pending)
.then(() => {
logger.info(`Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
logger.info(`DBGM-00070 Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
resolve(true);
})
.catch(reject);
});
zipFile.on('error', err => {
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
logger.error(extractErrorLogData(err), `DBGM-00071 ZIP file error in ${zipPath}.`);
reject(err);
});
});

View File

@@ -16,16 +16,16 @@ function zipDirectory(inputDirectory, outputFile) {
// Listen for all archive data to be written
output.on('close', () => {
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
logger.info(`DBGM-00072 ZIP file created (${archive.pointer()} total bytes)`);
resolve();
});
archive.on('warning', err => {
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
logger.warn(extractErrorLogData(err), `DBGM-00073 Warning while creating ZIP: ${err.message}`);
});
archive.on('error', err => {
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
logger.error(extractErrorLogData(err), `DBGM-00074 Error while creating ZIP: ${err.message}`);
reject(err);
});

View File

@@ -17,16 +17,16 @@ function zipDirectory(jsonDb, outputFile) {
// Listen for all archive data to be written
output.on('close', () => {
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
logger.info(`DBGM-00075 ZIP file created (${archive.pointer()} total bytes)`);
resolve();
});
archive.on('warning', err => {
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
logger.warn(extractErrorLogData(err), `DBGM-00076 Warning while creating ZIP: ${err.message}`);
});
archive.on('error', err => {
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
logger.error(extractErrorLogData(err), `DBGM-00077 Error while creating ZIP: ${err.message}`);
reject(err);
});

View File

@@ -0,0 +1,148 @@
const fs = require('fs-extra');
const path = require('path');
const { logsdir } = require('./directories');
const { format, addDays, startOfDay } = require('date-fns');
const JsonLinesDatastore = require('./JsonLinesDatastore');
const LineReader = require('./LineReader');
const socket = require('./socket');
async function getLogFiles(timeFrom, timeTo) {
const dir = logsdir();
const files = await fs.readdir(dir);
const startPrefix = format(timeFrom, 'yyyy-MM-dd');
const endPrefix = format(addDays(timeTo, 1), 'yyyy-MM-dd');
const logFiles = files
.filter(file => file.endsWith('.ndjson'))
.filter(file => file >= startPrefix && file < endPrefix);
return logFiles.sort().map(x => path.join(dir, x));
}
class AppLogDatastore {
constructor({ timeFrom, timeTo }) {
this.timeFrom = timeFrom;
this.timeTo = timeTo;
}
async resolveNextFile(file) {
const files = await getLogFiles(this.timeFrom, this.timeTo);
const index = files.indexOf(file);
if (index < 0 || index >= files.length - 1) return null;
return files[index + 1];
}
async getRows(offset = 0, limit = 100, filters = {}) {
if (!this.linesReader) {
const files = await getLogFiles(this.timeFrom, this.timeTo);
this.linesReader = new JsonLinesDatastore(files[0], null, file => this.resolveNextFile(file));
}
const conditions = [
{
conditionType: 'binary',
operator: '>=',
left: { exprType: 'column', columnName: 'time' },
right: { exprType: 'value', value: this.timeFrom },
},
{
conditionType: 'binary',
operator: '<=',
left: { exprType: 'column', columnName: 'time' },
right: { exprType: 'value', value: this.timeTo },
},
];
for (const [key, values] of Object.entries(filters)) {
if (values.length == 1 && values[0] == null) {
// @ts-ignore
conditions.push({
conditionType: 'isNull',
expr: { exprType: 'column', columnName: key },
});
continue;
}
// @ts-ignore
conditions.push({
conditionType: 'in',
expr: { exprType: 'column', columnName: key },
values,
});
}
return this.linesReader.getRows(
offset,
limit,
{
conditionType: 'and',
conditions,
},
null
);
}
_closeReader() {
if (this.linesReader) {
this.linesReader._closeReader();
this.linesReader = null;
}
}
}
const RECENT_LOG_LIMIT = 1000;
let recentLogs = null;
const beforeRecentLogs = [];
function adjustRecentLogs() {
if (recentLogs.length > RECENT_LOG_LIMIT) {
recentLogs.splice(0, recentLogs.length - RECENT_LOG_LIMIT);
}
}
async function initializeRecentLogProvider() {
const logs = [];
for (const file of await getLogFiles(startOfDay(new Date()), new Date())) {
const fileStream = fs.createReadStream(file);
const reader = new LineReader(fileStream);
do {
const line = await reader.readLine();
if (line == null) break;
try {
const logEntry = JSON.parse(line);
logs.push(logEntry);
if (logs.length > RECENT_LOG_LIMIT) {
logs.shift();
}
} catch (e) {
continue;
}
} while (true);
}
recentLogs = logs;
recentLogs.push(...beforeRecentLogs);
}
let counter = 0;
function pushToRecentLogs(msg) {
const finalMsg = {
...msg,
counter,
};
counter += 1;
if (recentLogs) {
recentLogs.push(finalMsg);
adjustRecentLogs();
socket.emit('applog-event', finalMsg);
} else {
beforeRecentLogs.push(finalMsg);
}
}
function getRecentAppLogRecords() {
return recentLogs ?? beforeRecentLogs;
}
module.exports = {
AppLogDatastore,
initializeRecentLogProvider,
getRecentAppLogRecords,
pushToRecentLogs,
};

View File

@@ -61,7 +61,7 @@ class DatastoreProxy {
this.subprocess = null;
});
this.subprocess.on('error', err => {
logger.error(extractErrorLogData(err), 'Error in data store subprocess');
logger.error(extractErrorLogData(err), 'DBGM-00167 Error in data store subprocess');
this.subprocess = null;
});
this.subprocess.send({ msgtype: 'open', file: this.file });
@@ -77,7 +77,7 @@ class DatastoreProxy {
try {
this.subprocess.send({ msgtype: 'read', msgid, offset, limit });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting rows');
logger.error(extractErrorLogData(err), 'DBGM-00168 Error getting rows');
this.subprocess = null;
}
});
@@ -91,7 +91,7 @@ class DatastoreProxy {
try {
this.subprocess.send({ msgtype: 'notify', msgid });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error notifying subprocess');
logger.error(extractErrorLogData(err), 'DBGM-00169 Error notifying subprocess');
this.subprocess = null;
}
});

View File

@@ -7,15 +7,15 @@ const AsyncLock = require('async-lock');
const lock = new AsyncLock();
const stableStringify = require('json-stable-stringify');
const { evaluateCondition } = require('dbgate-sqltree');
const requirePluginFunction = require('./requirePluginFunction');
const esort = require('external-sorting');
const { jsldir } = require('./directories');
const LineReader = require('./LineReader');
class JsonLinesDatastore {
constructor(file, formatterFunction) {
constructor(file, formatterFunction, resolveNextFile = null) {
this.file = file;
this.formatterFunction = formatterFunction;
this.resolveNextFile = resolveNextFile;
this.reader = null;
this.readedDataRowCount = 0;
this.readedSchemaRow = false;
@@ -23,7 +23,12 @@ class JsonLinesDatastore {
this.notifyChangedCallback = null;
this.currentFilter = null;
this.currentSort = null;
this.rowFormatter = requirePluginFunction(formatterFunction);
this.currentFileName = null;
if (formatterFunction) {
const requirePluginFunction = require('./requirePluginFunction');
this.rowFormatter = requirePluginFunction(formatterFunction);
}
this.sortedFiles = {};
}
@@ -67,6 +72,7 @@ class JsonLinesDatastore {
// this.firstRowToBeReturned = null;
this.currentFilter = null;
this.currentSort = null;
this.currentFileName = null;
await reader.close();
}
@@ -100,8 +106,18 @@ class JsonLinesDatastore {
// return res;
// }
for (;;) {
const line = await this.reader.readLine();
if (!line) {
let line = await this.reader.readLine();
while (!line) {
if (!this.currentSort && this.resolveNextFile) {
const nextFile = await this.resolveNextFile(this.currentFileName);
if (nextFile) {
await this.reader.close();
this.reader = await this._openReader(nextFile);
this.currentFileName = nextFile;
line = await this.reader.readLine();
continue;
}
}
// EOF
return null;
}
@@ -173,6 +189,7 @@ class JsonLinesDatastore {
}
if (!this.reader) {
const reader = await this._openReader(sort ? this.sortedFiles[stableStringify(sort)] : this.file);
this.currentFileName = this.file;
this.reader = reader;
this.currentFilter = filter;
this.currentSort = sort;

View File

@@ -12,7 +12,7 @@ function childProcessChecker() {
// This will come once parent dies.
// One way can be to check for error code ERR_IPC_CHANNEL_CLOSED
// and call process.exit()
logger.error(extractErrorLogData(err), 'parent died');
logger.error(extractErrorLogData(err), 'DBGM-00163 parent died');
process.exit(1);
}
}, 1000);

View File

@@ -77,7 +77,7 @@ function startCloudTokenChecking(sid, callback) {
callback(resp.data);
}
} catch (err) {
logger.error(extractErrorLogData(err), 'Error checking cloud token');
logger.error(extractErrorLogData(err), 'DBGM-00164 Error checking cloud token');
}
}, 500);
}
@@ -125,7 +125,7 @@ async function getCloudUsedEngines() {
const resp = await callCloudApiGet('content-engines');
return resp || [];
} catch (err) {
logger.error(extractErrorLogData(err), 'Error getting cloud content list');
logger.error(extractErrorLogData(err), 'DBGM-00165 Error getting cloud content list');
return [];
}
}
@@ -208,7 +208,7 @@ async function updateCloudFiles(isRefresh) {
lastCheckedTm = _.max(cloudFiles.map(x => parseInt(x.modifiedTm)));
}
logger.info({ tags, lastCheckedTm }, 'Downloading cloud files');
logger.info({ tags, lastCheckedTm }, 'DBGM-00082 Downloading cloud files');
const resp = await axios.default.get(
`${DBGATE_CLOUD_URL}/public-cloud-updates?lastCheckedTm=${lastCheckedTm}&tags=${tags}&isRefresh=${
@@ -223,7 +223,7 @@ async function updateCloudFiles(isRefresh) {
}
);
logger.info(`Downloaded ${resp.data.length} cloud files`);
logger.info(`DBGM-00083 Downloaded ${resp.data.length} cloud files`);
const filesByPath = lastCheckedTm == 0 ? {} : _.keyBy(cloudFiles, 'path');
for (const file of resp.data) {
@@ -269,7 +269,7 @@ async function refreshPublicFiles(isRefresh) {
try {
await updateCloudFiles(isRefresh);
} catch (err) {
logger.error(extractErrorLogData(err), 'Error updating cloud files');
logger.error(extractErrorLogData(err), 'DBGM-00166 Error updating cloud files');
}
}

View File

@@ -14,11 +14,11 @@ const createDirectories = {};
const ensureDirectory = (dir, clean) => {
if (!createDirectories[dir]) {
if (clean && fs.existsSync(dir) && !platformInfo.isForkedApi) {
getLogger('directories').info(`Cleaning directory ${dir}`);
getLogger('directories').info(`DBGM-00170 Cleaning directory ${dir}`);
cleanDirectory(dir, _.isNumber(clean) ? clean : null);
}
if (!fs.existsSync(dir)) {
getLogger('directories').info(`Creating directory ${dir}`);
getLogger('directories').info(`DBGM-00171 Creating directory ${dir}`);
fs.mkdirSync(dir);
}
createDirectories[dir] = true;

View File

@@ -42,13 +42,13 @@ function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
// When the file is finished writing, resolve
writeStream.on('finish', () => {
logger.info(`File "${fileInZip}" extracted to "${outputPath}".`);
logger.info(`DBGM-00088 File "${fileInZip}" extracted to "${outputPath}".`);
resolve(true);
});
// Handle write errors
writeStream.on('error', writeErr => {
logger.error(extractErrorLogData(writeErr), `Error extracting "${fileInZip}" from "${zipPath}".`);
logger.error(extractErrorLogData(writeErr), `DBGM-00089 Error extracting "${fileInZip}" from "${zipPath}".`);
reject(writeErr);
});
});
@@ -67,7 +67,7 @@ function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
// Handle general errors
zipFile.on('error', err => {
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
logger.error(extractErrorLogData(err), `DBGM-00172 ZIP file error in ${zipPath}.`);
reject(err);
});
});

View File

@@ -28,7 +28,7 @@ async function loadModelTransform(file) {
}
return null;
} catch (err) {
logger.error(extractErrorLogData(err), `Error loading model transform ${file}`);
logger.error(extractErrorLogData(err), `DBGM-00173 Error loading model transform ${file}`);
return null;
}
}

View File

@@ -40,7 +40,7 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
tunnelConfig,
});
} catch (err) {
logger.error(extractErrorLogData(err), 'Error connecting SSH');
logger.error(extractErrorLogData(err), 'DBGM-00174 Error connecting SSH');
}
return new Promise((resolve, reject) => {
let promiseHandled = false;
@@ -57,18 +57,18 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
}
});
subprocess.on('exit', code => {
logger.info(`SSH forward process exited with code ${code}`);
logger.info(`DBGM-00090 SSH forward process exited with code ${code}`);
delete sshTunnelCache[tunnelCacheKey];
if (!promiseHandled) {
reject(
new Error(
'SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'
'DBGM-00091 SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'
)
);
}
});
subprocess.on('error', error => {
logger.error(extractErrorLogData(error), 'SSH forward process error');
logger.error(extractErrorLogData(error), 'DBGM-00092 SSH forward process error');
delete sshTunnelCache[tunnelCacheKey];
if (!promiseHandled) {
reject(error);
@@ -97,13 +97,13 @@ async function getSshTunnel(connection) {
};
try {
logger.info(
`Creating SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
`DBGM-00093 Creating SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
);
const subprocess = await callForwardProcess(connection, tunnelConfig, tunnelCacheKey);
logger.info(
`Created SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
`DBGM-00094 Created SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
);
sshTunnelCache[tunnelCacheKey] = {
@@ -114,7 +114,7 @@ async function getSshTunnel(connection) {
};
return sshTunnelCache[tunnelCacheKey];
} catch (err) {
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel:');
logger.error(extractErrorLogData(err), 'DBGM-00095 Error creating SSH tunnel:');
// error is not cached
return {
state: 'error',

View File

@@ -10,7 +10,7 @@ async function handleGetSshTunnelRequest({ msgid, connection }, subprocess) {
try {
subprocess.send({ msgtype: 'getsshtunnel-response', msgid, response });
} catch (err) {
logger.error(extractErrorLogData(err), 'Error sending to SSH tunnel');
logger.error(extractErrorLogData(err), 'DBGM-00175 Error sending to SSH tunnel');
}
}

View File

@@ -12,11 +12,11 @@ module.exports = function useController(app, electron, route, controller) {
const router = express.Router();
if (controller._init) {
logger.info(`Calling init controller for controller ${route}`);
logger.info(`DBGM-00096 Calling init controller for controller ${route}`);
try {
controller._init();
} catch (err) {
logger.error(extractErrorLogData(err), `Error initializing controller, exiting application`);
logger.error(extractErrorLogData(err), `DBGM-00097 Error initializing controller, exiting application`);
process.exit(1);
}
}
@@ -78,7 +78,7 @@ module.exports = function useController(app, electron, route, controller) {
const data = await controller[key]({ ...req.body, ...req.query }, req);
res.json(data);
} catch (err) {
logger.error(extractErrorLogData(err), `Error when processing route ${route}/${key}`);
logger.error(extractErrorLogData(err), `DBGM-00176 Error when processing route ${route}/${key}`);
if (err instanceof MissingCredentialsError) {
res.json({
missingCredentials: true,