SYNC: Merge branch 'feature/dblogs'

This commit is contained in:
SPRINX0\prochazka
2025-08-06 12:34:25 +02:00
committed by Diflow
parent 4ed437fd4e
commit ed7605eccd
15 changed files with 241 additions and 153 deletions

View File

@@ -1,7 +1,7 @@
const fs = require('fs-extra'); const fs = require('fs-extra');
const path = require('path'); const path = require('path');
const crypto = require('crypto'); const crypto = require('crypto');
const { filesdir, archivedir, resolveArchiveFolder, uploadsdir, appdir } = require('../utility/directories'); const { filesdir, archivedir, resolveArchiveFolder, uploadsdir, appdir, jsldir } = require('../utility/directories');
const getChartExport = require('../utility/getChartExport'); const getChartExport = require('../utility/getChartExport');
const { hasPermission } = require('../utility/hasPermission'); const { hasPermission } = require('../utility/hasPermission');
const socket = require('../utility/socket'); const socket = require('../utility/socket');
@@ -13,7 +13,7 @@ const dbgateApi = require('../shell');
const { getLogger } = require('dbgate-tools'); const { getLogger } = require('dbgate-tools');
const platformInfo = require('../utility/platformInfo'); const platformInfo = require('../utility/platformInfo');
const { checkSecureFilePathsWithoutDirectory, checkSecureDirectories } = require('../utility/security'); const { checkSecureFilePathsWithoutDirectory, checkSecureDirectories } = require('../utility/security');
const { AppLogDatastore, getRecentAppLogRecords } = require('../utility/AppLogDatastore'); const { copyAppLogsIntoFile, getRecentAppLogRecords } = require('../utility/appLogStore');
const logger = getLogger('files'); const logger = getLogger('files');
function serialize(format, data) { function serialize(format, data) {
@@ -29,9 +29,6 @@ function deserialize(format, text) {
} }
module.exports = { module.exports = {
currentLogReader: null,
currentLogParamsKey: null,
list_meta: true, list_meta: true,
async list({ folder }, req) { async list({ folder }, req) {
if (!hasPermission(`files/${folder}/read`, req)) return []; if (!hasPermission(`files/${folder}/read`, req)) return [];
@@ -316,19 +313,14 @@ module.exports = {
return true; return true;
}, },
getAppLog_meta: true, fillAppLogs_meta: true,
async getAppLog({ offset = 0, limit = 100, dateFrom = 0, dateTo = new Date().getTime(), filters = {} }) { async fillAppLogs({ dateFrom = 0, dateTo = new Date().getTime() }) {
const paramsKey = `${dateFrom}-${dateTo}`; const jslid = crypto.randomUUID();
if (paramsKey != this.currentLogParamsKey) { const outputFile = path.join(jsldir(), `${jslid}.jsonl`);
if (this.currentLogReader) { await copyAppLogsIntoFile(dateFrom, dateTo, outputFile);
this.currentLogReader._closeReader(); return {
this.currentLogReader = null; jslid,
} };
this.currentLogReader = new AppLogDatastore({ timeFrom: dateFrom, timeTo: dateTo });
this.currentLogParamsKey = paramsKey;
}
return this.currentLogReader.getRows(offset, limit, filters);
}, },
getRecentAppLog_meta: true, getRecentAppLog_meta: true,

View File

@@ -33,7 +33,7 @@ if (processArgs.processDisplayName) {
// } // }
function configureLogger() { function configureLogger() {
const { initializeRecentLogProvider, pushToRecentLogs } = require('./utility/AppLogDatastore'); const { initializeRecentLogProvider, pushToRecentLogs } = require('./utility/appLogStore');
initializeRecentLogProvider(); initializeRecentLogProvider();
const logsFilePath = path.join(logsdir(), `${moment().format('YYYY-MM-DD-HH-mm')}-${process.pid}.ndjson`); const logsFilePath = path.join(logsdir(), `${moment().format('YYYY-MM-DD-HH-mm')}-${process.pid}.ndjson`);

View File

@@ -6,7 +6,6 @@ const {
extractIntSettingsValue, extractIntSettingsValue,
getLogger, getLogger,
isCompositeDbName, isCompositeDbName,
dbNameLogCategory,
extractErrorMessage, extractErrorMessage,
extractErrorLogData, extractErrorLogData,
ScriptWriterEval, ScriptWriterEval,
@@ -45,6 +44,14 @@ function getStatusCounter() {
return statusCounter; return statusCounter;
} }
function getLogInfo() {
return {
database: dbhan ? dbhan.database : undefined,
conid: dbhan ? dbhan.conid : undefined,
engine: storedConnection ? storedConnection.engine : undefined,
};
}
async function checkedAsyncCall(promise) { async function checkedAsyncCall(promise) {
try { try {
const res = await promise; const res = await promise;
@@ -131,10 +138,10 @@ async function readVersion() {
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
try { try {
const version = await driver.getVersion(dbhan); const version = await driver.getVersion(dbhan);
logger.debug(`DBGM-00037 Got server version: ${version.version}`); logger.debug(getLogInfo(), `DBGM-00037 Got server version: ${version.version}`);
serverVersion = version; serverVersion = version;
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'DBGM-00149 Error getting DB server version'); logger.error(extractErrorLogData(err, getLogInfo()), 'DBGM-00149 Error getting DB server version');
serverVersion = { version: 'Unknown' }; serverVersion = { version: 'Unknown' };
} }
process.send({ msgtype: 'version', version: serverVersion }); process.send({ msgtype: 'version', version: serverVersion });
@@ -148,9 +155,8 @@ async function handleConnect({ connection, structure, globalSettings }) {
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
dbhan = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app')); dbhan = await checkedAsyncCall(connectUtility(driver, storedConnection, 'app'));
logger.debug( logger.debug(
`DBGM-00038 Connected to database, driver: ${storedConnection.engine}, separate schemas: ${ getLogInfo(),
storedConnection.useSeparateSchemas ? 'YES' : 'NO' `DBGM-00038 Connected to database, separate schemas: ${storedConnection.useSeparateSchemas ? 'YES' : 'NO'}`
}, 'DB: ${dbNameLogCategory(dbhan.database)}`
); );
dbhan.feedback = feedback => setStatus({ feedback }); dbhan.feedback = feedback => setStatus({ feedback });
await checkedAsyncCall(readVersion()); await checkedAsyncCall(readVersion());
@@ -257,13 +263,16 @@ async function handleDriverDataCore(msgid, callMethod, { logName }) {
const result = await callMethod(driver); const result = await callMethod(driver);
process.send({ msgtype: 'response', msgid, result: serializeJsTypesForJsonStringify(result) }); process.send({ msgtype: 'response', msgid, result: serializeJsTypesForJsonStringify(result) });
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err, { logName }), `DBGM-00150 Error when handling message ${logName}`); logger.error(
extractErrorLogData(err, { logName, ...getLogInfo() }),
`DBGM-00150 Error when handling message ${logName}`
);
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error executing DB data') }); process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error executing DB data') });
} }
} }
async function handleSchemaList({ msgid }) { async function handleSchemaList({ msgid }) {
logger.debug('DBGM-00039 Loading schema list'); logger.debug(getLogInfo(), 'DBGM-00039 Loading schema list');
return handleDriverDataCore(msgid, driver => driver.listSchemas(dbhan), { logName: 'listSchemas' }); return handleDriverDataCore(msgid, driver => driver.listSchemas(dbhan), { logName: 'listSchemas' });
} }
@@ -351,7 +360,7 @@ async function handleSqlPreview({ msgid, objects, options }) {
process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated }); process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated });
if (generator.isUnhandledException) { if (generator.isUnhandledException) {
setTimeout(async () => { setTimeout(async () => {
logger.error('DBGM-00151 Exiting because of unhandled exception'); logger.error(getLogInfo(), 'DBGM-00151 Exiting because of unhandled exception');
await driver.close(dbhan); await driver.close(dbhan);
process.exit(0); process.exit(0);
}, 500); }, 500);
@@ -485,7 +494,7 @@ function start() {
setInterval(async () => { setInterval(async () => {
const time = new Date().getTime(); const time = new Date().getTime();
if (time - lastPing > 40 * 1000) { if (time - lastPing > 40 * 1000) {
logger.info('DBGM-00040 Database connection not alive, exiting'); logger.info(getLogInfo(), 'DBGM-00040 Database connection not alive, exiting');
const driver = requireEngineDriver(storedConnection); const driver = requireEngineDriver(storedConnection);
await driver.close(dbhan); await driver.close(dbhan);
process.exit(0); process.exit(0);
@@ -497,7 +506,7 @@ function start() {
try { try {
await handleMessage(message); await handleMessage(message);
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err), 'DBGM-00041 Error in DB connection'); logger.error(extractErrorLogData(err, getLogInfo()), 'DBGM-00041 Error in DB connection');
process.send({ process.send({
msgtype: 'error', msgtype: 'error',
error: extractErrorMessage(err, 'DBGM-00042 Error processing message'), error: extractErrorMessage(err, 'DBGM-00042 Error processing message'),

View File

@@ -12,10 +12,9 @@ const { jsldir } = require('./directories');
const LineReader = require('./LineReader'); const LineReader = require('./LineReader');
class JsonLinesDatastore { class JsonLinesDatastore {
constructor(file, formatterFunction, resolveNextFile = null) { constructor(file, formatterFunction) {
this.file = file; this.file = file;
this.formatterFunction = formatterFunction; this.formatterFunction = formatterFunction;
this.resolveNextFile = resolveNextFile;
this.reader = null; this.reader = null;
this.readedDataRowCount = 0; this.readedDataRowCount = 0;
this.readedSchemaRow = false; this.readedSchemaRow = false;
@@ -23,12 +22,10 @@ class JsonLinesDatastore {
this.notifyChangedCallback = null; this.notifyChangedCallback = null;
this.currentFilter = null; this.currentFilter = null;
this.currentSort = null; this.currentSort = null;
this.currentFileName = null;
if (formatterFunction) { if (formatterFunction) {
const requirePluginFunction = require('./requirePluginFunction'); const requirePluginFunction = require('./requirePluginFunction');
this.rowFormatter = requirePluginFunction(formatterFunction); this.rowFormatter = requirePluginFunction(formatterFunction);
} }
this.sortedFiles = {}; this.sortedFiles = {};
} }
@@ -72,7 +69,6 @@ class JsonLinesDatastore {
// this.firstRowToBeReturned = null; // this.firstRowToBeReturned = null;
this.currentFilter = null; this.currentFilter = null;
this.currentSort = null; this.currentSort = null;
this.currentFileName = null;
await reader.close(); await reader.close();
} }
@@ -106,18 +102,8 @@ class JsonLinesDatastore {
// return res; // return res;
// } // }
for (;;) { for (;;) {
let line = await this.reader.readLine(); const line = await this.reader.readLine();
while (!line) { if (!line) {
if (!this.currentSort && this.resolveNextFile) {
const nextFile = await this.resolveNextFile(this.currentFileName);
if (nextFile) {
await this.reader.close();
this.reader = await this._openReader(nextFile);
this.currentFileName = nextFile;
line = await this.reader.readLine();
continue;
}
}
// EOF // EOF
return null; return null;
} }
@@ -189,7 +175,6 @@ class JsonLinesDatastore {
} }
if (!this.reader) { if (!this.reader) {
const reader = await this._openReader(sort ? this.sortedFiles[stableStringify(sort)] : this.file); const reader = await this._openReader(sort ? this.sortedFiles[stableStringify(sort)] : this.file);
this.currentFileName = this.file;
this.reader = reader; this.reader = reader;
this.currentFilter = filter; this.currentFilter = filter;
this.currentSort = sort; this.currentSort = sort;

View File

@@ -17,75 +17,6 @@ async function getLogFiles(timeFrom, timeTo) {
return logFiles.sort().map(x => path.join(dir, x)); return logFiles.sort().map(x => path.join(dir, x));
} }
class AppLogDatastore {
constructor({ timeFrom, timeTo }) {
this.timeFrom = timeFrom;
this.timeTo = timeTo;
}
async resolveNextFile(file) {
const files = await getLogFiles(this.timeFrom, this.timeTo);
const index = files.indexOf(file);
if (index < 0 || index >= files.length - 1) return null;
return files[index + 1];
}
async getRows(offset = 0, limit = 100, filters = {}) {
if (!this.linesReader) {
const files = await getLogFiles(this.timeFrom, this.timeTo);
this.linesReader = new JsonLinesDatastore(files[0], null, file => this.resolveNextFile(file));
}
const conditions = [
{
conditionType: 'binary',
operator: '>=',
left: { exprType: 'column', columnName: 'time' },
right: { exprType: 'value', value: this.timeFrom },
},
{
conditionType: 'binary',
operator: '<=',
left: { exprType: 'column', columnName: 'time' },
right: { exprType: 'value', value: this.timeTo },
},
];
for (const [key, values] of Object.entries(filters)) {
if (values.length == 1 && values[0] == null) {
// @ts-ignore
conditions.push({
conditionType: 'isNull',
expr: { exprType: 'column', columnName: key },
});
continue;
}
// @ts-ignore
conditions.push({
conditionType: 'in',
expr: { exprType: 'column', columnName: key },
values,
});
}
return this.linesReader.getRows(
offset,
limit,
{
conditionType: 'and',
conditions,
},
null
);
}
_closeReader() {
if (this.linesReader) {
this.linesReader._closeReader();
this.linesReader = null;
}
}
}
const RECENT_LOG_LIMIT = 1000; const RECENT_LOG_LIMIT = 1000;
let recentLogs = null; let recentLogs = null;
@@ -97,6 +28,27 @@ function adjustRecentLogs() {
} }
} }
async function copyAppLogsIntoFile(timeFrom, timeTo, fileName) {
const writeStream = fs.createWriteStream(fileName);
for (const file of await getLogFiles(timeFrom, timeTo)) {
const readStream = fs.createReadStream(file);
const reader = new LineReader(readStream);
do {
const line = await reader.readLine();
if (line == null) break;
try {
const logEntry = JSON.parse(line);
if (logEntry.time >= timeFrom && logEntry.time <= timeTo) {
writeStream.write(JSON.stringify(logEntry) + '\n');
}
} catch (e) {
continue;
}
} while (true);
}
}
async function initializeRecentLogProvider() { async function initializeRecentLogProvider() {
const logs = []; const logs = [];
for (const file of await getLogFiles(startOfDay(new Date()), new Date())) { for (const file of await getLogFiles(startOfDay(new Date()), new Date())) {
@@ -141,8 +93,8 @@ function getRecentAppLogRecords() {
} }
module.exports = { module.exports = {
AppLogDatastore,
initializeRecentLogProvider, initializeRecentLogProvider,
getRecentAppLogRecords, getRecentAppLogRecords,
pushToRecentLogs, pushToRecentLogs,
copyAppLogsIntoFile,
}; };

View File

@@ -132,7 +132,7 @@ async function connectUtility(driver, storedConnection, connectionMode, addition
connection.ssl = await extractConnectionSslParams(connection); connection.ssl = await extractConnectionSslParams(connection);
const conn = await driver.connect({ ...connection, ...additionalOptions }); const conn = await driver.connect({ conid: connectionLoaded?._id, ...connection, ...additionalOptions });
return conn; return conn;
} }

View File

@@ -83,3 +83,44 @@ export function selectKeysFromTable(options: {
}; };
return res; return res;
} }
export function createLogCompoudCondition(
fieldFilters: { [field: string]: string[] },
timeColumn: string,
timeFrom: number,
timeTo: number
): Condition {
const conditions: Condition[] = [
{
conditionType: 'binary',
operator: '>=',
left: { exprType: 'column', columnName: timeColumn },
right: { exprType: 'value', value: timeFrom },
},
{
conditionType: 'binary',
operator: '<=',
left: { exprType: 'column', columnName: timeColumn },
right: { exprType: 'value', value: timeTo },
},
];
for (const [key, values] of Object.entries(fieldFilters)) {
if (values.length == 1 && values[0] == null) {
conditions.push({
conditionType: 'isNull',
expr: { exprType: 'column', columnName: key },
});
continue;
}
conditions.push({
conditionType: 'in',
expr: { exprType: 'column', columnName: key },
values,
});
}
return {
conditionType: 'and',
conditions,
};
}

View File

@@ -5,7 +5,7 @@ import _pick from 'lodash/pick';
import _compact from 'lodash/compact'; import _compact from 'lodash/compact';
import { getLogger } from './getLogger'; import { getLogger } from './getLogger';
import { type Logger } from 'pinomin'; import { type Logger } from 'pinomin';
import { dbNameLogCategory, isCompositeDbName, splitCompositeDbName } from './schemaInfoTools'; import { isCompositeDbName, splitCompositeDbName } from './schemaInfoTools';
import { extractErrorLogData } from './stringTools'; import { extractErrorLogData } from './stringTools';
const logger = getLogger('dbAnalyser'); const logger = getLogger('dbAnalyser');
@@ -77,10 +77,12 @@ export class DatabaseAnalyser<TClient = any> {
return db; return db;
} }
getLogDbInfo() {
return this.driver.getLogDbInfo(this.dbhan);
}
async fullAnalysis() { async fullAnalysis() {
logger.debug( logger.debug(this.getLogDbInfo(), 'DBGM-00126 Performing full analysis');
`DBGM-00126 Performing full analysis, DB=${dbNameLogCategory(this.dbhan.database)}, engine=${this.driver.engine}`
);
const res = this.addEngineField(await this._runAnalysis()); const res = this.addEngineField(await this._runAnalysis());
// console.log('FULL ANALYSIS', res); // console.log('FULL ANALYSIS', res);
return res; return res;
@@ -101,9 +103,7 @@ export class DatabaseAnalyser<TClient = any> {
} }
async incrementalAnalysis(structure) { async incrementalAnalysis(structure) {
logger.info( logger.info(this.getLogDbInfo(), 'DBGM-00127 Performing incremental analysis');
`DBGM-00127 Performing incremental analysis, DB=${dbNameLogCategory(this.dbhan.database)}, engine=${this.driver.engine}`
);
this.structure = structure; this.structure = structure;
const modifications = await this.getModifications(); const modifications = await this.getModifications();
@@ -129,7 +129,7 @@ export class DatabaseAnalyser<TClient = any> {
this.modifications = structureModifications; this.modifications = structureModifications;
if (structureWithRowCounts) this.structure = structureWithRowCounts; if (structureWithRowCounts) this.structure = structureWithRowCounts;
logger.info({ modifications: this.modifications }, 'DBGM-00128 DB modifications detected:'); logger.info({ ...this.getLogDbInfo(), modifications: this.modifications }, 'DBGM-00128 DB modifications detected');
return this.addEngineField(this.mergeAnalyseResult(await this._runAnalysis())); return this.addEngineField(this.mergeAnalyseResult(await this._runAnalysis()));
} }
@@ -274,7 +274,7 @@ export class DatabaseAnalyser<TClient = any> {
this.dbhan.feedback(obj); this.dbhan.feedback(obj);
} }
if (obj && obj.analysingMessage) { if (obj && obj.analysingMessage) {
logger.debug(obj.analysingMessage); logger.debug(this.getLogDbInfo(), obj.analysingMessage);
} }
} }
@@ -347,10 +347,16 @@ export class DatabaseAnalyser<TClient = any> {
} }
try { try {
const res = await this.driver.query(this.dbhan, sql); const res = await this.driver.query(this.dbhan, sql);
this.logger.debug({ rows: res.rows.length, template }, `DBGM-00129 Loaded analyser query`); this.logger.debug(
{ ...this.getLogDbInfo(), rows: res.rows.length, template },
`DBGM-00129 Loaded analyser query`
);
return res; return res;
} catch (err) { } catch (err) {
logger.error(extractErrorLogData(err, { template }), 'DBGM-00130 Error running analyser query'); logger.error(
extractErrorLogData(err, { template, ...this.getLogDbInfo() }),
'DBGM-00130 Error running analyser query'
);
return { return {
rows: [], rows: [],
isError: true, isError: true,

View File

@@ -254,4 +254,12 @@ export const driverBase = {
async writeQueryFromStream(dbhan, sql) { async writeQueryFromStream(dbhan, sql) {
return null; return null;
}, },
getLogDbInfo(dbhan) {
return {
database: dbhan ? dbhan.database : undefined,
engine: this.engine,
conid: dbhan ? dbhan.conid : undefined,
};
},
}; };

View File

@@ -37,15 +37,15 @@ export function extractSchemaNameFromComposite(name: string) {
return splitCompositeDbName(name)?.schema; return splitCompositeDbName(name)?.schema;
} }
export function dbNameLogCategory(database: string): string { // export function getDbNameLogFace(database: string): string {
if (isCompositeDbName(database)) { // if (isCompositeDbName(database)) {
return '~composite'; // return '~composite';
} // }
if (database) { // if (database) {
return '~simple'; // return '~simple';
} // }
return '~nodb'; // return '~nodb';
} // }
export function compositeDbNameIfNeeded( export function compositeDbNameIfNeeded(
connnection: { useSeparateSchemas: boolean }, connnection: { useSeparateSchemas: boolean },

View File

@@ -164,6 +164,7 @@ export interface FilterBehaviourProvider {
export interface DatabaseHandle<TClient = any> { export interface DatabaseHandle<TClient = any> {
client: TClient; client: TClient;
database?: string; database?: string;
conid?: string;
feedback?: (message: any) => void; feedback?: (message: any) => void;
getDatabase?: () => any; getDatabase?: () => any;
connectionType?: string; connectionType?: string;
@@ -336,6 +337,11 @@ export interface EngineDriver<TClient = any> extends FilterBehaviourProvider {
analyserClass?: any; analyserClass?: any;
dumperClass?: any; dumperClass?: any;
singleConnectionOnly?: boolean; singleConnectionOnly?: boolean;
getLogDbInfo(dbhan: DatabaseHandle<TClient>): {
database?: string;
engine: string;
conid?: string;
};
} }
export interface DatabaseModification { export interface DatabaseModification {

View File

@@ -15,6 +15,10 @@
import Link from '../elements/Link.svelte'; import Link from '../elements/Link.svelte';
import SelectField from '../forms/SelectField.svelte'; import SelectField from '../forms/SelectField.svelte';
import { onDestroy, onMount, tick } from 'svelte'; import { onDestroy, onMount, tick } from 'svelte';
import DropDownButton from '../buttons/DropDownButton.svelte';
import { showModal } from '../modals/modalTools';
import ValueLookupModal from '../modals/ValueLookupModal.svelte';
import { createLogCompoudCondition } from 'dbgate-sqltree';
let loadedRows = []; let loadedRows = [];
let loadedAll = false; let loadedAll = false;
@@ -26,8 +30,13 @@
let mode = 'recent'; let mode = 'recent';
let autoScroll = true; let autoScroll = true;
let domTable; let domTable;
let jslid;
function formatValue(value) { function formatPossibleUuid(value) {
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
if (_.isString(value) && value.match(uuidRegex)) {
return value.slice(0, 8);
}
if (value == null) { if (value == null) {
return 'N/A'; return 'N/A';
} }
@@ -36,12 +45,16 @@
async function loadNextRows() { async function loadNextRows() {
const pageSize = getIntSettingsValue('dataGrid.pageSize', 100, 5, 1000); const pageSize = getIntSettingsValue('dataGrid.pageSize', 100, 5, 1000);
const rows = await apiCall('files/get-app-log', { const rows = await apiCall('jsldata/get-rows', {
jslid,
offset: loadedRows.length, offset: loadedRows.length,
limit: pageSize, limit: pageSize,
dateFrom: startOfDay(dateFilter[0]).getTime(), filters: createLogCompoudCondition(
dateTo: endOfDay(dateFilter[1]).getTime(),
filters, filters,
'time',
startOfDay(dateFilter[0]).getTime(),
endOfDay(dateFilter[1]).getTime()
),
}); });
loadedRows = [...loadedRows, ...rows]; loadedRows = [...loadedRows, ...rows];
if (rows.length < 10) { if (rows.length < 10) {
@@ -68,7 +81,7 @@
startObserver(domLoadNext); startObserver(domLoadNext);
} }
async function reloadData() { async function reloadData(createNewJslId = true) {
switch (mode) { switch (mode) {
case 'recent': case 'recent':
loadedRows = await apiCall('files/get-recent-app-log', { limit: 100 }); loadedRows = await apiCall('files/get-recent-app-log', { limit: 100 });
@@ -76,6 +89,13 @@
scrollToRecent(); scrollToRecent();
break; break;
case 'date': case 'date':
if (createNewJslId) {
const resp = await apiCall('files/fill-app-logs', {
dateFrom: startOfDay(dateFilter[0]).getTime(),
dateTo: endOfDay(dateFilter[1]).getTime(),
});
jslid = resp.jslid;
}
loadedRows = []; loadedRows = [];
loadedAll = false; loadedAll = false;
break; break;
@@ -87,7 +107,7 @@
...filters, ...filters,
[field]: values, [field]: values,
}; };
reloadData(); reloadData(false);
} }
const ColumnNamesMap = { const ColumnNamesMap = {
@@ -110,6 +130,17 @@
} }
} }
function filterBy(field) {
showModal(ValueLookupModal, {
jslid,
field,
multiselect: true,
onConfirm: values => {
doSetFilter(field, values);
},
});
}
onMount(() => { onMount(() => {
apiOn('applog-event', handleLogMessage); apiOn('applog-event', handleLogMessage);
reloadData(); reloadData();
@@ -155,6 +186,21 @@
reloadData(); reloadData();
}} }}
/> />
<div class="ml-2">
<DropDownButton
data-testid="AdminAuditLogTab_addFilter"
icon="icon filter"
menu={[
{ text: 'Connection ID', onClick: () => filterBy('conid') },
{ text: 'Database', onClick: () => filterBy('database') },
{ text: 'Engine', onClick: () => filterBy('engine') },
{ text: 'Message code', onClick: () => filterBy('msgcode') },
{ text: 'Caller', onClick: () => filterBy('caller') },
{ text: 'Name', onClick: () => filterBy('name') },
]}
/>
</div>
{#each Object.keys(filters) as filterKey} {#each Object.keys(filters) as filterKey}
<div class="ml-2"> <div class="ml-2">
<span class="filter-label">{ColumnNamesMap[filterKey] || filterKey}:</span> <span class="filter-label">{ColumnNamesMap[filterKey] || filterKey}:</span>
@@ -165,10 +211,10 @@
if (!filters[filterKey].length) { if (!filters[filterKey].length) {
filters = _.omit(filters, filterKey); filters = _.omit(filters, filterKey);
} }
reloadData(); reloadData(false);
}} }}
> >
{formatValue(value)} {formatPossibleUuid(value)}
</Chip> </Chip>
{/each} {/each}
</div> </div>
@@ -183,6 +229,9 @@
<th>Time</th> <th>Time</th>
<th>Code</th> <th>Code</th>
<th>Message</th> <th>Message</th>
<th>Connection</th>
<th>Database</th>
<th>Engine</th>
<th>Caller</th> <th>Caller</th>
<th>Name</th> <th>Name</th>
</tr> </tr>
@@ -203,13 +252,16 @@
<td>{format(new Date(parseInt(row.time)), 'HH:mm:ss')}</td> <td>{format(new Date(parseInt(row.time)), 'HH:mm:ss')}</td>
<td>{row.msgcode || ''}</td> <td>{row.msgcode || ''}</td>
<td>{row.msg}</td> <td>{row.msg}</td>
<td>{formatPossibleUuid(row.conid) || ''}</td>
<td>{row.database || ''}</td>
<td>{row.engine?.includes('@') ? row.engine.split('@')[0] : row.engine || ''}</td>
<td>{row.caller || ''}</td> <td>{row.caller || ''}</td>
<td>{row.name || ''}</td> <td>{row.name || ''}</td>
</tr> </tr>
{#if index === selectedLogIndex} {#if index === selectedLogIndex}
<tr> <tr>
<td colspan="6"> <td colspan="9">
<TabControl <TabControl
isInline isInline
tabs={_.compact([ tabs={_.compact([
@@ -251,6 +303,38 @@
{row.name || 'N/A'} {row.name || 'N/A'}
{/if} {/if}
</div> </div>
{#if row.conid}
<div class="row">
<div>Connection ID:</div>
{#if mode == 'date'}
<Link onClick={() => doSetFilter('conid', [row.conid])}
>{formatPossibleUuid(row.conid)}</Link
>
{:else}
{formatPossibleUuid(row.conid)}
{/if}
</div>
{/if}
{#if row.database}
<div class="row">
<div>Database:</div>
{#if mode == 'date'}
<Link onClick={() => doSetFilter('database', [row.database])}>{row.database}</Link>
{:else}
{row.database}
{/if}
</div>
{/if}
{#if row.engine}
<div class="row">
<div>Engine:</div>
{#if mode == 'date'}
<Link onClick={() => doSetFilter('engine', [row.engine])}>{row.engine}</Link>
{:else}
{row.engine}
{/if}
</div>
{/if}
</div></svelte:fragment </div></svelte:fragment
> >
<svelte:fragment slot="2"> <svelte:fragment slot="2">

View File

@@ -96,6 +96,7 @@ const driver = {
client, client,
connectionType, connectionType,
database: conn.database, database: conn.database,
conid: conn.conid,
}; };
}, },
async close(dbhan) { async close(dbhan) {

View File

@@ -34,7 +34,8 @@ const drivers = driverBases.map(driverBase => ({
analyserClass: Analyser, analyserClass: Analyser,
async connect(props) { async connect(props) {
const { server, port, user, password, database, ssl, isReadOnly, forceRowsAsObjects, socketPath, authType } = props; const { conid, server, port, user, password, database, ssl, isReadOnly, forceRowsAsObjects, socketPath, authType } =
props;
let awsIamToken = null; let awsIamToken = null;
if (authType == 'awsIam') { if (authType == 'awsIam') {
awsIamToken = await authProxy.getAwsIamToken(props); awsIamToken = await authProxy.getAwsIamToken(props);
@@ -60,6 +61,7 @@ const drivers = driverBases.map(driverBase => ({
const dbhan = { const dbhan = {
client, client,
database, database,
conid,
}; };
if (isReadOnly) { if (isReadOnly) {
await this.query(dbhan, 'SET SESSION TRANSACTION READ ONLY'); await this.query(dbhan, 'SET SESSION TRANSACTION READ ONLY');
@@ -138,7 +140,7 @@ const drivers = driverBases.map(driverBase => ({
}; };
const handleError = error => { const handleError = error => {
logger.error(extractErrorLogData(error), 'DBGM-00200 Stream error'); logger.error(extractErrorLogData(error, this.getLogDbInfo(dbhan)), 'DBGM-00200 Stream error');
const { message } = error; const { message } = error;
options.info({ options.info({
message, message,

View File

@@ -78,6 +78,7 @@ const drivers = driverBases.map(driverBase => ({
async connect(props) { async connect(props) {
const { const {
conid,
engine, engine,
server, server,
port, port,
@@ -137,6 +138,7 @@ const drivers = driverBases.map(driverBase => ({
const dbhan = { const dbhan = {
client, client,
database, database,
conid,
}; };
const datatypes = await this.query(dbhan, `SELECT oid, typname FROM pg_type WHERE typname in ('geography')`); const datatypes = await this.query(dbhan, `SELECT oid, typname FROM pg_type WHERE typname in ('geography')`);
@@ -228,7 +230,7 @@ const drivers = driverBases.map(driverBase => ({
}); });
query.on('error', error => { query.on('error', error => {
logger.error(extractErrorLogData(error), 'DBGM-00201 Stream error'); logger.error(extractErrorLogData(error, this.getLogDbInfo(dbhan)), 'DBGM-00201 Stream error');
const { message, position, procName } = error; const { message, position, procName } = error;
let line = null; let line = null;
if (position) { if (position) {
@@ -382,7 +384,7 @@ const drivers = driverBases.map(driverBase => ({
const defaultSchemaRows = await this.query(dbhan, 'SELECT current_schema'); const defaultSchemaRows = await this.query(dbhan, 'SELECT current_schema');
const defaultSchema = defaultSchemaRows.rows[0]?.current_schema?.trim(); const defaultSchema = defaultSchemaRows.rows[0]?.current_schema?.trim();
logger.debug(`DBGM-00142 Loaded ${schemaRows.rows.length} postgres schemas`); logger.debug(this.getLogDbInfo(dbhan), `DBGM-00142 Loaded ${schemaRows.rows.length} postgres schemas`);
const schemas = schemaRows.rows.map(x => ({ const schemas = schemaRows.rows.map(x => ({
schemaName: x.schema_name, schemaName: x.schema_name,