PINO JSON logging

This commit is contained in:
Jan Prochazka
2023-01-21 17:32:28 +01:00
parent dd230b008f
commit 4d93be61b5
47 changed files with 429 additions and 113 deletions

View File

@@ -2,6 +2,10 @@ const csv = require('csv');
const fs = require('fs');
const stream = require('stream');
const { getLogger } = global.DBGATE_TOOLS;
const logger = getLogger();
class CsvPrepareStream extends stream.Transform {
constructor({ header }) {
super({ objectMode: true });
@@ -23,7 +27,7 @@ class CsvPrepareStream extends stream.Transform {
}
async function writer({ fileName, encoding = 'utf-8', header = true, delimiter, quoted }) {
console.log(`Writing file ${fileName}`);
logger.info(`Writing file ${fileName}`);
const csvPrepare = new CsvPrepareStream({ header });
const csvStream = csv.stringify({ delimiter, quoted });
const fileStream = fs.createWriteStream(fileName, encoding);

View File

@@ -1,4 +1,8 @@
const ObjectId = require('mongodb').ObjectId;
const { getLogger } = global.DBGATE_TOOLS;
const logger = getLogger();
function createBulkInsertStream(driver, stream, pool, name, options) {
const collectionName = name.pureName;
@@ -27,11 +31,11 @@ function createBulkInsertStream(driver, stream, pool, name, options) {
writable.checkStructure = async () => {
if (options.dropIfExists) {
console.log(`Dropping collection ${collectionName}`);
logger.info(`Dropping collection ${collectionName}`);
await db.collection(collectionName).drop();
}
if (options.truncate) {
console.log(`Truncating collection ${collectionName}`);
logger.info(`Truncating collection ${collectionName}`);
await db.collection(collectionName).deleteMany({});
}
};

View File

@@ -5,6 +5,9 @@ const Analyser = require('./Analyser');
const mysql2 = require('mysql2');
const { createBulkInsertStreamBase, makeUniqueColumnNames } = require('dbgate-tools');
const { MySqlDumper } = require('antares-mysql-dumper');
const { getLogger } = global.DBGATE_TOOLS;
const logger = getLogger();
function extractColumns(fields) {
if (fields) {
@@ -111,7 +114,7 @@ const drivers = driverBases.map(driverBase => ({
};
const handleError = error => {
console.log('ERROR', error);
logger.error('Stream error', error);
const { message } = error;
options.info({
message,

View File

@@ -5,6 +5,9 @@ const driverBases = require('../frontend/drivers');
const Analyser = require('./Analyser');
const pg = require('pg');
const { createBulkInsertStreamBase, makeUniqueColumnNames } = require('dbgate-tools');
const { getLogger } = global.DBGATE_TOOLS;
const logger = getLogger();
pg.types.setTypeParser(1082, 'text', val => val); // date
pg.types.setTypeParser(1114, 'text', val => val); // timestamp without timezone
@@ -144,7 +147,7 @@ const drivers = driverBases.map(driverBase => ({
});
query.on('error', error => {
console.log('ERROR', error);
logger.error('Stream error', error);
const { message, position, procName } = error;
let line = null;
if (position) {

View File

@@ -4,6 +4,9 @@ const driverBase = require('../frontend/driver');
const Analyser = require('./Analyser');
const { splitQuery, sqliteSplitterOptions } = require('dbgate-query-splitter');
const { createBulkInsertStreamBase, makeUniqueColumnNames } = require('dbgate-tools');
const { getLogger } = global.DBGATE_TOOLS;
const logger = getLogger();
let Database;
@@ -103,7 +106,7 @@ const driver = {
try {
inTransaction();
} catch (error) {
console.log('ERROR', error);
logger.error('Stream error', error);
const { message, procName } = error;
options.info({
message,

View File

@@ -1,6 +1,9 @@
const fs = require('fs');
const stream = require('stream');
const NodeXmlStream = require('node-xml-stream-parser');
const { getLogger } = global.DBGATE_TOOLS;
const logger = getLogger();
class ParseStream extends stream.Transform {
constructor({ itemElementName }) {
@@ -56,7 +59,7 @@ class ParseStream extends stream.Transform {
}
async function reader({ fileName, encoding = 'utf-8', itemElementName }) {
console.log(`Reading file ${fileName}`);
logger.info(`Reading file ${fileName}`);
const fileStream = fs.createReadStream(fileName, encoding);
const parser = new ParseStream({ itemElementName });

View File

@@ -1,5 +1,8 @@
const fs = require('fs');
const stream = require('stream');
const { getLogger } = global.DBGATE_TOOLS;
const logger = getLogger();
function escapeXml(value) {
return value.replace(/[<>&'"]/g, function (c) {
@@ -67,7 +70,7 @@ class StringifyStream extends stream.Transform {
}
async function writer({ fileName, encoding = 'utf-8', itemElementName, rootElementName }) {
console.log(`Writing file ${fileName}`);
logger.info(`Writing file ${fileName}`);
const stringify = new StringifyStream({ itemElementName, rootElementName });
const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream);