mirror of
https://github.com/DeNNiiInc/dbgate.git
synced 2026-04-19 22:26:01 +00:00
SYNC: Merge pull request #7 from dbgate/feature/applog
This commit is contained in:
148
packages/api/src/utility/AppLogDatastore.js
Normal file
148
packages/api/src/utility/AppLogDatastore.js
Normal file
@@ -0,0 +1,148 @@
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const { logsdir } = require('./directories');
|
||||
const { format, addDays, startOfDay } = require('date-fns');
|
||||
const JsonLinesDatastore = require('./JsonLinesDatastore');
|
||||
const LineReader = require('./LineReader');
|
||||
const socket = require('./socket');
|
||||
|
||||
async function getLogFiles(timeFrom, timeTo) {
|
||||
const dir = logsdir();
|
||||
const files = await fs.readdir(dir);
|
||||
const startPrefix = format(timeFrom, 'yyyy-MM-dd');
|
||||
const endPrefix = format(addDays(timeTo, 1), 'yyyy-MM-dd');
|
||||
const logFiles = files
|
||||
.filter(file => file.endsWith('.ndjson'))
|
||||
.filter(file => file >= startPrefix && file < endPrefix);
|
||||
return logFiles.sort().map(x => path.join(dir, x));
|
||||
}
|
||||
|
||||
class AppLogDatastore {
|
||||
constructor({ timeFrom, timeTo }) {
|
||||
this.timeFrom = timeFrom;
|
||||
this.timeTo = timeTo;
|
||||
}
|
||||
|
||||
async resolveNextFile(file) {
|
||||
const files = await getLogFiles(this.timeFrom, this.timeTo);
|
||||
const index = files.indexOf(file);
|
||||
if (index < 0 || index >= files.length - 1) return null;
|
||||
return files[index + 1];
|
||||
}
|
||||
|
||||
async getRows(offset = 0, limit = 100, filters = {}) {
|
||||
if (!this.linesReader) {
|
||||
const files = await getLogFiles(this.timeFrom, this.timeTo);
|
||||
this.linesReader = new JsonLinesDatastore(files[0], null, file => this.resolveNextFile(file));
|
||||
}
|
||||
|
||||
const conditions = [
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator: '>=',
|
||||
left: { exprType: 'column', columnName: 'time' },
|
||||
right: { exprType: 'value', value: this.timeFrom },
|
||||
},
|
||||
{
|
||||
conditionType: 'binary',
|
||||
operator: '<=',
|
||||
left: { exprType: 'column', columnName: 'time' },
|
||||
right: { exprType: 'value', value: this.timeTo },
|
||||
},
|
||||
];
|
||||
for (const [key, values] of Object.entries(filters)) {
|
||||
if (values.length == 1 && values[0] == null) {
|
||||
// @ts-ignore
|
||||
conditions.push({
|
||||
conditionType: 'isNull',
|
||||
expr: { exprType: 'column', columnName: key },
|
||||
});
|
||||
continue;
|
||||
}
|
||||
// @ts-ignore
|
||||
conditions.push({
|
||||
conditionType: 'in',
|
||||
expr: { exprType: 'column', columnName: key },
|
||||
values,
|
||||
});
|
||||
}
|
||||
|
||||
return this.linesReader.getRows(
|
||||
offset,
|
||||
limit,
|
||||
{
|
||||
conditionType: 'and',
|
||||
conditions,
|
||||
},
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
_closeReader() {
|
||||
if (this.linesReader) {
|
||||
this.linesReader._closeReader();
|
||||
this.linesReader = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const RECENT_LOG_LIMIT = 1000;
|
||||
|
||||
let recentLogs = null;
|
||||
const beforeRecentLogs = [];
|
||||
|
||||
function adjustRecentLogs() {
|
||||
if (recentLogs.length > RECENT_LOG_LIMIT) {
|
||||
recentLogs.splice(0, recentLogs.length - RECENT_LOG_LIMIT);
|
||||
}
|
||||
}
|
||||
|
||||
async function initializeRecentLogProvider() {
|
||||
const logs = [];
|
||||
for (const file of await getLogFiles(startOfDay(new Date()), new Date())) {
|
||||
const fileStream = fs.createReadStream(file);
|
||||
const reader = new LineReader(fileStream);
|
||||
do {
|
||||
const line = await reader.readLine();
|
||||
if (line == null) break;
|
||||
try {
|
||||
const logEntry = JSON.parse(line);
|
||||
logs.push(logEntry);
|
||||
if (logs.length > RECENT_LOG_LIMIT) {
|
||||
logs.shift();
|
||||
}
|
||||
} catch (e) {
|
||||
continue;
|
||||
}
|
||||
} while (true);
|
||||
}
|
||||
recentLogs = logs;
|
||||
recentLogs.push(...beforeRecentLogs);
|
||||
}
|
||||
|
||||
let counter = 0;
|
||||
function pushToRecentLogs(msg) {
|
||||
const finalMsg = {
|
||||
...msg,
|
||||
counter,
|
||||
};
|
||||
counter += 1;
|
||||
if (recentLogs) {
|
||||
recentLogs.push(finalMsg);
|
||||
adjustRecentLogs();
|
||||
socket.emit('applog-event', finalMsg);
|
||||
} else {
|
||||
beforeRecentLogs.push(finalMsg);
|
||||
}
|
||||
}
|
||||
|
||||
function getRecentAppLogRecords() {
|
||||
return recentLogs ?? beforeRecentLogs;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
AppLogDatastore,
|
||||
initializeRecentLogProvider,
|
||||
getRecentAppLogRecords,
|
||||
pushToRecentLogs,
|
||||
};
|
||||
@@ -61,7 +61,7 @@ class DatastoreProxy {
|
||||
this.subprocess = null;
|
||||
});
|
||||
this.subprocess.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), 'Error in data store subprocess');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00167 Error in data store subprocess');
|
||||
this.subprocess = null;
|
||||
});
|
||||
this.subprocess.send({ msgtype: 'open', file: this.file });
|
||||
@@ -77,7 +77,7 @@ class DatastoreProxy {
|
||||
try {
|
||||
this.subprocess.send({ msgtype: 'read', msgid, offset, limit });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting rows');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00168 Error getting rows');
|
||||
this.subprocess = null;
|
||||
}
|
||||
});
|
||||
@@ -91,7 +91,7 @@ class DatastoreProxy {
|
||||
try {
|
||||
this.subprocess.send({ msgtype: 'notify', msgid });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error notifying subprocess');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00169 Error notifying subprocess');
|
||||
this.subprocess = null;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -7,15 +7,15 @@ const AsyncLock = require('async-lock');
|
||||
const lock = new AsyncLock();
|
||||
const stableStringify = require('json-stable-stringify');
|
||||
const { evaluateCondition } = require('dbgate-sqltree');
|
||||
const requirePluginFunction = require('./requirePluginFunction');
|
||||
const esort = require('external-sorting');
|
||||
const { jsldir } = require('./directories');
|
||||
const LineReader = require('./LineReader');
|
||||
|
||||
class JsonLinesDatastore {
|
||||
constructor(file, formatterFunction) {
|
||||
constructor(file, formatterFunction, resolveNextFile = null) {
|
||||
this.file = file;
|
||||
this.formatterFunction = formatterFunction;
|
||||
this.resolveNextFile = resolveNextFile;
|
||||
this.reader = null;
|
||||
this.readedDataRowCount = 0;
|
||||
this.readedSchemaRow = false;
|
||||
@@ -23,7 +23,12 @@ class JsonLinesDatastore {
|
||||
this.notifyChangedCallback = null;
|
||||
this.currentFilter = null;
|
||||
this.currentSort = null;
|
||||
this.rowFormatter = requirePluginFunction(formatterFunction);
|
||||
this.currentFileName = null;
|
||||
if (formatterFunction) {
|
||||
const requirePluginFunction = require('./requirePluginFunction');
|
||||
this.rowFormatter = requirePluginFunction(formatterFunction);
|
||||
}
|
||||
|
||||
this.sortedFiles = {};
|
||||
}
|
||||
|
||||
@@ -67,6 +72,7 @@ class JsonLinesDatastore {
|
||||
// this.firstRowToBeReturned = null;
|
||||
this.currentFilter = null;
|
||||
this.currentSort = null;
|
||||
this.currentFileName = null;
|
||||
await reader.close();
|
||||
}
|
||||
|
||||
@@ -100,8 +106,18 @@ class JsonLinesDatastore {
|
||||
// return res;
|
||||
// }
|
||||
for (;;) {
|
||||
const line = await this.reader.readLine();
|
||||
if (!line) {
|
||||
let line = await this.reader.readLine();
|
||||
while (!line) {
|
||||
if (!this.currentSort && this.resolveNextFile) {
|
||||
const nextFile = await this.resolveNextFile(this.currentFileName);
|
||||
if (nextFile) {
|
||||
await this.reader.close();
|
||||
this.reader = await this._openReader(nextFile);
|
||||
this.currentFileName = nextFile;
|
||||
line = await this.reader.readLine();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// EOF
|
||||
return null;
|
||||
}
|
||||
@@ -173,6 +189,7 @@ class JsonLinesDatastore {
|
||||
}
|
||||
if (!this.reader) {
|
||||
const reader = await this._openReader(sort ? this.sortedFiles[stableStringify(sort)] : this.file);
|
||||
this.currentFileName = this.file;
|
||||
this.reader = reader;
|
||||
this.currentFilter = filter;
|
||||
this.currentSort = sort;
|
||||
|
||||
@@ -12,7 +12,7 @@ function childProcessChecker() {
|
||||
// This will come once parent dies.
|
||||
// One way can be to check for error code ERR_IPC_CHANNEL_CLOSED
|
||||
// and call process.exit()
|
||||
logger.error(extractErrorLogData(err), 'parent died');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00163 parent died');
|
||||
process.exit(1);
|
||||
}
|
||||
}, 1000);
|
||||
|
||||
@@ -77,7 +77,7 @@ function startCloudTokenChecking(sid, callback) {
|
||||
callback(resp.data);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error checking cloud token');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00164 Error checking cloud token');
|
||||
}
|
||||
}, 500);
|
||||
}
|
||||
@@ -125,7 +125,7 @@ async function getCloudUsedEngines() {
|
||||
const resp = await callCloudApiGet('content-engines');
|
||||
return resp || [];
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting cloud content list');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00165 Error getting cloud content list');
|
||||
return [];
|
||||
}
|
||||
}
|
||||
@@ -208,7 +208,7 @@ async function updateCloudFiles(isRefresh) {
|
||||
lastCheckedTm = _.max(cloudFiles.map(x => parseInt(x.modifiedTm)));
|
||||
}
|
||||
|
||||
logger.info({ tags, lastCheckedTm }, 'Downloading cloud files');
|
||||
logger.info({ tags, lastCheckedTm }, 'DBGM-00082 Downloading cloud files');
|
||||
|
||||
const resp = await axios.default.get(
|
||||
`${DBGATE_CLOUD_URL}/public-cloud-updates?lastCheckedTm=${lastCheckedTm}&tags=${tags}&isRefresh=${
|
||||
@@ -223,7 +223,7 @@ async function updateCloudFiles(isRefresh) {
|
||||
}
|
||||
);
|
||||
|
||||
logger.info(`Downloaded ${resp.data.length} cloud files`);
|
||||
logger.info(`DBGM-00083 Downloaded ${resp.data.length} cloud files`);
|
||||
|
||||
const filesByPath = lastCheckedTm == 0 ? {} : _.keyBy(cloudFiles, 'path');
|
||||
for (const file of resp.data) {
|
||||
@@ -269,7 +269,7 @@ async function refreshPublicFiles(isRefresh) {
|
||||
try {
|
||||
await updateCloudFiles(isRefresh);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error updating cloud files');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00166 Error updating cloud files');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,11 +14,11 @@ const createDirectories = {};
|
||||
const ensureDirectory = (dir, clean) => {
|
||||
if (!createDirectories[dir]) {
|
||||
if (clean && fs.existsSync(dir) && !platformInfo.isForkedApi) {
|
||||
getLogger('directories').info(`Cleaning directory ${dir}`);
|
||||
getLogger('directories').info(`DBGM-00170 Cleaning directory ${dir}`);
|
||||
cleanDirectory(dir, _.isNumber(clean) ? clean : null);
|
||||
}
|
||||
if (!fs.existsSync(dir)) {
|
||||
getLogger('directories').info(`Creating directory ${dir}`);
|
||||
getLogger('directories').info(`DBGM-00171 Creating directory ${dir}`);
|
||||
fs.mkdirSync(dir);
|
||||
}
|
||||
createDirectories[dir] = true;
|
||||
|
||||
@@ -42,13 +42,13 @@ function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
|
||||
|
||||
// When the file is finished writing, resolve
|
||||
writeStream.on('finish', () => {
|
||||
logger.info(`File "${fileInZip}" extracted to "${outputPath}".`);
|
||||
logger.info(`DBGM-00088 File "${fileInZip}" extracted to "${outputPath}".`);
|
||||
resolve(true);
|
||||
});
|
||||
|
||||
// Handle write errors
|
||||
writeStream.on('error', writeErr => {
|
||||
logger.error(extractErrorLogData(writeErr), `Error extracting "${fileInZip}" from "${zipPath}".`);
|
||||
logger.error(extractErrorLogData(writeErr), `DBGM-00089 Error extracting "${fileInZip}" from "${zipPath}".`);
|
||||
reject(writeErr);
|
||||
});
|
||||
});
|
||||
@@ -67,7 +67,7 @@ function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
|
||||
|
||||
// Handle general errors
|
||||
zipFile.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00172 ZIP file error in ${zipPath}.`);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -28,7 +28,7 @@ async function loadModelTransform(file) {
|
||||
}
|
||||
return null;
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Error loading model transform ${file}`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00173 Error loading model transform ${file}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
|
||||
tunnelConfig,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error connecting SSH');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00174 Error connecting SSH');
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
let promiseHandled = false;
|
||||
@@ -57,18 +57,18 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
|
||||
}
|
||||
});
|
||||
subprocess.on('exit', code => {
|
||||
logger.info(`SSH forward process exited with code ${code}`);
|
||||
logger.info(`DBGM-00090 SSH forward process exited with code ${code}`);
|
||||
delete sshTunnelCache[tunnelCacheKey];
|
||||
if (!promiseHandled) {
|
||||
reject(
|
||||
new Error(
|
||||
'SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'
|
||||
'DBGM-00091 SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'
|
||||
)
|
||||
);
|
||||
}
|
||||
});
|
||||
subprocess.on('error', error => {
|
||||
logger.error(extractErrorLogData(error), 'SSH forward process error');
|
||||
logger.error(extractErrorLogData(error), 'DBGM-00092 SSH forward process error');
|
||||
delete sshTunnelCache[tunnelCacheKey];
|
||||
if (!promiseHandled) {
|
||||
reject(error);
|
||||
@@ -97,13 +97,13 @@ async function getSshTunnel(connection) {
|
||||
};
|
||||
try {
|
||||
logger.info(
|
||||
`Creating SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
|
||||
`DBGM-00093 Creating SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
|
||||
);
|
||||
|
||||
const subprocess = await callForwardProcess(connection, tunnelConfig, tunnelCacheKey);
|
||||
|
||||
logger.info(
|
||||
`Created SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
|
||||
`DBGM-00094 Created SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
|
||||
);
|
||||
|
||||
sshTunnelCache[tunnelCacheKey] = {
|
||||
@@ -114,7 +114,7 @@ async function getSshTunnel(connection) {
|
||||
};
|
||||
return sshTunnelCache[tunnelCacheKey];
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel:');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00095 Error creating SSH tunnel:');
|
||||
// error is not cached
|
||||
return {
|
||||
state: 'error',
|
||||
|
||||
@@ -10,7 +10,7 @@ async function handleGetSshTunnelRequest({ msgid, connection }, subprocess) {
|
||||
try {
|
||||
subprocess.send({ msgtype: 'getsshtunnel-response', msgid, response });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error sending to SSH tunnel');
|
||||
logger.error(extractErrorLogData(err), 'DBGM-00175 Error sending to SSH tunnel');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -12,11 +12,11 @@ module.exports = function useController(app, electron, route, controller) {
|
||||
const router = express.Router();
|
||||
|
||||
if (controller._init) {
|
||||
logger.info(`Calling init controller for controller ${route}`);
|
||||
logger.info(`DBGM-00096 Calling init controller for controller ${route}`);
|
||||
try {
|
||||
controller._init();
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Error initializing controller, exiting application`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00097 Error initializing controller, exiting application`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
@@ -78,7 +78,7 @@ module.exports = function useController(app, electron, route, controller) {
|
||||
const data = await controller[key]({ ...req.body, ...req.query }, req);
|
||||
res.json(data);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Error when processing route ${route}/${key}`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00176 Error when processing route ${route}/${key}`);
|
||||
if (err instanceof MissingCredentialsError) {
|
||||
res.json({
|
||||
missingCredentials: true,
|
||||
|
||||
Reference in New Issue
Block a user