PINO JSON logging

This commit is contained in:
Jan Prochazka
2023-01-21 17:32:28 +01:00
parent dd230b008f
commit 4d93be61b5
47 changed files with 429 additions and 113 deletions

View File

@@ -3,6 +3,9 @@ import _sortBy from 'lodash/sortBy';
import _groupBy from 'lodash/groupBy';
import _pick from 'lodash/pick';
import _compact from 'lodash/compact';
import { getLogger } from './getLogger';
const logger = getLogger();
const STRUCTURE_FIELDS = ['tables', 'collections', 'views', 'matviews', 'functions', 'procedures', 'triggers'];
@@ -107,7 +110,7 @@ export class DatabaseAnalyser {
this.modifications = structureModifications;
if (structureWithRowCounts) this.structure = structureWithRowCounts;
console.log('DB modifications detected:', this.modifications);
logger.info({ modifications: this.modifications }, 'DB modifications detected:');
return this.addEngineField(this.mergeAnalyseResult(await this._runAnalysis()));
}
@@ -304,7 +307,7 @@ export class DatabaseAnalyser {
try {
return await this.driver.query(this.pool, sql);
} catch (err) {
console.log('Error running analyser query', err.message);
logger.error('Error running analyser query', err);
return {
rows: [],
};

View File

@@ -8,10 +8,13 @@ import type {
ViewInfo,
} from 'dbgate-types';
import _flatten from 'lodash/flatten';
import _uniqBy from 'lodash/uniqBy'
import _uniqBy from 'lodash/uniqBy';
import { getLogger } from './getLogger';
import { SqlDumper } from './SqlDumper';
import { extendDatabaseInfo } from './structureTools';
const logger = getLogger();
interface SqlGeneratorOptions {
dropTables: boolean;
checkIfTableExists: boolean;
@@ -82,7 +85,7 @@ export class SqlGenerator {
}
private handleException = error => {
console.log('Unhandled error', error);
logger.error('Unhandled error', error);
this.isUnhandledException = true;
};

View File

@@ -1,6 +1,9 @@
import _intersection from 'lodash/intersection';
import { getLogger } from './getLogger';
import { prepareTableForImport } from './tableTransforms';
const logger = getLogger();
export function createBulkInsertStreamBase(driver, stream, pool, name, options): any {
const fullNameQuoted = name.schemaName
? `${driver.dialect.quoteIdentifier(name.schemaName)}.${driver.dialect.quoteIdentifier(name.pureName)}`
@@ -28,14 +31,13 @@ export function createBulkInsertStreamBase(driver, stream, pool, name, options):
let structure = await driver.analyseSingleTable(pool, name);
// console.log('ANALYSING', name, structure);
if (structure && options.dropIfExists) {
console.log(`Dropping table ${fullNameQuoted}`);
logger.info(`Dropping table ${fullNameQuoted}`);
await driver.script(pool, `DROP TABLE ${fullNameQuoted}`);
}
if (options.createIfNotExists && (!structure || options.dropIfExists)) {
console.log(`Creating table ${fullNameQuoted}`);
const dmp = driver.createDumper();
dmp.createTable(prepareTableForImport({ ...writable.structure, ...name }));
console.log(dmp.s);
logger.info({ sql: dmp.s }, `Creating table ${fullNameQuoted}`);
await driver.script(pool, dmp.s);
structure = await driver.analyseSingleTable(pool, name);
}

View File

@@ -0,0 +1,12 @@
import pino, { Logger } from 'pino';
let _logger: Logger;
const defaultLogger: Logger = pino();
export function setLogger(value: Logger) {
_logger = value;
}
export function getLogger(): Logger {
return _logger || defaultLogger;
}

View File

@@ -18,3 +18,4 @@ export * from './stringTools';
export * from './computeDiffRows';
export * from './preloadedRowsTools';
export * from './ScriptWriter';
export * from './getLogger';