mirror of
https://github.com/DeNNiiInc/dbgate.git
synced 2026-04-20 17:06:01 +00:00
SYNC: Merge pull request #7 from dbgate/feature/applog
This commit is contained in:
@@ -10,7 +10,7 @@ const logger = getLogger();
|
||||
function archiveWriter({ folderName, fileName }) {
|
||||
const dir = resolveArchiveFolder(folderName);
|
||||
if (!fs.existsSync(dir)) {
|
||||
logger.info(`Creating directory ${dir}`);
|
||||
logger.info(`DBGM-00047 Creating directory ${dir}`);
|
||||
fs.mkdirSync(dir);
|
||||
}
|
||||
const jsonlFile = path.join(dir, `${fileName}.jsonl`);
|
||||
|
||||
@@ -83,7 +83,7 @@ async function copyStream(input, output, options) {
|
||||
});
|
||||
}
|
||||
|
||||
logger.error(extractErrorLogData(err, { progressName }), 'Import/export job failed');
|
||||
logger.error(extractErrorLogData(err, { progressName }), 'DBGM-00157 Import/export job failed');
|
||||
// throw err;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,20 +28,20 @@ async function executeQuery({
|
||||
useTransaction,
|
||||
}) {
|
||||
if (!logScriptItems && !skipLogging) {
|
||||
logger.info({ sql: getLimitedQuery(sql) }, `Execute query`);
|
||||
logger.info({ sql: getLimitedQuery(sql) }, `DBGM-00048 Execute query`);
|
||||
}
|
||||
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'script'));
|
||||
|
||||
if (sqlFile) {
|
||||
logger.debug(`Loading SQL file ${sqlFile}`);
|
||||
logger.debug(`DBGM-00049 Loading SQL file ${sqlFile}`);
|
||||
sql = await fs.readFile(sqlFile, { encoding: 'utf-8' });
|
||||
}
|
||||
|
||||
try {
|
||||
if (!skipLogging) {
|
||||
logger.debug(`Running SQL query, length: ${sql.length}`);
|
||||
logger.debug(`DBGM-00050 Running SQL query, length: ${sql.length}`);
|
||||
}
|
||||
|
||||
await driver.script(dbhan, sql, { logScriptItems, useTransaction });
|
||||
|
||||
@@ -45,14 +45,14 @@ class ImportStream extends stream.Transform {
|
||||
}
|
||||
|
||||
async function importDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, inputFile }) {
|
||||
logger.info(`Importing database`);
|
||||
logger.info(`DBGM-00051 Importing database`);
|
||||
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
|
||||
try {
|
||||
logger.info(`Input file: ${inputFile}`);
|
||||
logger.info(`DBGM-00052 Input file: ${inputFile}`);
|
||||
const downloadedFile = await download(inputFile);
|
||||
logger.info(`Downloaded file: ${downloadedFile}`);
|
||||
logger.info(`DBGM-00053 Downloaded file: ${downloadedFile}`);
|
||||
|
||||
const fileStream = fs.createReadStream(downloadedFile, 'utf-8');
|
||||
const splittedStream = splitQueryStream(fileStream, {
|
||||
|
||||
@@ -42,7 +42,7 @@ class ParseStream extends stream.Transform {
|
||||
* @returns {Promise<readerType>} - reader object
|
||||
*/
|
||||
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
|
||||
logger.info(`Reading file ${fileName}`);
|
||||
logger.info(`DBGM-00054 Reading file ${fileName}`);
|
||||
|
||||
const downloadedFile = await download(fileName);
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ class StringifyStream extends stream.Transform {
|
||||
* @returns {Promise<writerType>} - writer object
|
||||
*/
|
||||
async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true }) {
|
||||
logger.info(`Writing file ${fileName}`);
|
||||
logger.info(`DBGM-00055 Writing file ${fileName}`);
|
||||
const stringify = new StringifyStream({ header });
|
||||
const fileStream = fs.createWriteStream(fileName, encoding);
|
||||
return [stringify, fileStream];
|
||||
|
||||
@@ -63,7 +63,7 @@ async function jsonReader({
|
||||
encoding = 'utf-8',
|
||||
limitRows = undefined,
|
||||
}) {
|
||||
logger.info(`Reading file ${fileName}`);
|
||||
logger.info(`DBGM-00056 Reading file ${fileName}`);
|
||||
|
||||
const downloadedFile = await download(fileName);
|
||||
const fileStream = fs.createReadStream(
|
||||
|
||||
@@ -96,7 +96,7 @@ class StringifyStream extends stream.Transform {
|
||||
* @returns {Promise<writerType>} - writer object
|
||||
*/
|
||||
async function jsonWriter({ fileName, jsonStyle, keyField = '_key', rootField, encoding = 'utf-8' }) {
|
||||
logger.info(`Writing file ${fileName}`);
|
||||
logger.info(`DBGM-00057 Writing file ${fileName}`);
|
||||
const stringify = new StringifyStream({ jsonStyle, keyField, rootField });
|
||||
const fileStream = fs.createWriteStream(fileName, encoding);
|
||||
return [stringify, fileStream];
|
||||
|
||||
@@ -6,13 +6,13 @@ const exportDbModel = require('../utility/exportDbModel');
|
||||
const logger = getLogger('analyseDb');
|
||||
|
||||
async function loadDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, outputDir }) {
|
||||
logger.debug(`Analysing database`);
|
||||
logger.debug(`DBGM-00058 Analysing database`);
|
||||
|
||||
if (!driver) driver = requireEngineDriver(connection);
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
|
||||
try {
|
||||
const dbInfo = await driver.analyseFull(dbhan);
|
||||
logger.debug(`Analyse finished`);
|
||||
logger.debug(`DBGM-00059 Analyse finished`);
|
||||
|
||||
await exportDbModel(dbInfo, outputDir);
|
||||
} finally {
|
||||
|
||||
@@ -132,7 +132,7 @@ async function modifyJsonLinesReader({
|
||||
mergeKey = null,
|
||||
mergeMode = 'merge',
|
||||
}) {
|
||||
logger.info(`Reading file ${fileName} with change set`);
|
||||
logger.info(`DBGM-00060 Reading file ${fileName} with change set`);
|
||||
|
||||
const fileStream = fs.createReadStream(
|
||||
fileName,
|
||||
|
||||
@@ -29,7 +29,7 @@ async function queryReader({
|
||||
// if (!sql && !json) {
|
||||
// throw new Error('One of sql or json must be set');
|
||||
// }
|
||||
logger.info({ sql: query || sql }, `Reading query`);
|
||||
logger.info({ sql: query || sql }, `DBGM-00061 Reading query`);
|
||||
// else console.log(`Reading query ${JSON.stringify(json)}`);
|
||||
|
||||
if (!driver) {
|
||||
|
||||
@@ -22,7 +22,7 @@ function requirePlugin(packageName, requiredPlugin = null) {
|
||||
if (requiredPlugin == null) {
|
||||
let module;
|
||||
const modulePath = getPluginBackendPath(packageName);
|
||||
logger.info(`Loading module ${packageName} from ${modulePath}`);
|
||||
logger.info(`DBGM-00062 Loading module ${packageName} from ${modulePath}`);
|
||||
try {
|
||||
// @ts-ignore
|
||||
module = __non_webpack_require__(modulePath);
|
||||
|
||||
@@ -11,7 +11,7 @@ async function runScript(func) {
|
||||
await func();
|
||||
process.exit(0);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), `Error running script`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00158 Error running script`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ class SqlizeStream extends stream.Transform {
|
||||
}
|
||||
|
||||
async function sqlDataWriter({ fileName, dataName, driver, encoding = 'utf-8' }) {
|
||||
logger.info(`Writing file ${fileName}`);
|
||||
logger.info(`DBGM-00063 Writing file ${fileName}`);
|
||||
const stringify = new SqlizeStream({ fileName, dataName });
|
||||
const fileStream = fs.createWriteStream(fileName, encoding);
|
||||
return [stringify, fileStream];
|
||||
|
||||
@@ -23,7 +23,7 @@ async function tableReader({ connection, systemConnection, pureName, schemaName,
|
||||
|
||||
if (driver.databaseEngineTypes.includes('document')) {
|
||||
// @ts-ignore
|
||||
logger.info(`Reading collection ${fullNameToString(fullName)}`);
|
||||
logger.info(`DBGM-00064 Reading collection ${fullNameToString(fullName)}`);
|
||||
// @ts-ignore
|
||||
return await driver.readQuery(dbhan, JSON.stringify(fullName));
|
||||
}
|
||||
@@ -32,14 +32,14 @@ async function tableReader({ connection, systemConnection, pureName, schemaName,
|
||||
const query = `select * from ${quoteFullName(driver.dialect, fullName)}`;
|
||||
if (table) {
|
||||
// @ts-ignore
|
||||
logger.info(`Reading table ${fullNameToString(table)}`);
|
||||
logger.info(`DBGM-00065 Reading table ${fullNameToString(table)}`);
|
||||
// @ts-ignore
|
||||
return await driver.readQuery(dbhan, query, table);
|
||||
}
|
||||
const view = await driver.analyseSingleObject(dbhan, fullName, 'views');
|
||||
if (view) {
|
||||
// @ts-ignore
|
||||
logger.info(`Reading view ${fullNameToString(view)}`);
|
||||
logger.info(`DBGM-00066 Reading view ${fullNameToString(view)}`);
|
||||
// @ts-ignore
|
||||
return await driver.readQuery(dbhan, query, view);
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ const logger = getLogger('tableWriter');
|
||||
* @returns {Promise<writerType>} - writer object
|
||||
*/
|
||||
async function tableWriter({ connection, schemaName, pureName, driver, systemConnection, ...options }) {
|
||||
logger.info(`Writing table ${fullNameToString({ schemaName, pureName })}`);
|
||||
logger.info(`DBGM-00067 Writing table ${fullNameToString({ schemaName, pureName })}`);
|
||||
|
||||
if (!driver) {
|
||||
driver = requireEngineDriver(connection);
|
||||
|
||||
@@ -52,14 +52,14 @@ function unzipDirectory(zipPath, outputDirectory) {
|
||||
readStream.on('end', () => zipFile.readEntry());
|
||||
|
||||
writeStream.on('finish', () => {
|
||||
logger.info(`Extracted "${entry.fileName}" → "${destPath}".`);
|
||||
logger.info(`DBGM-00068 Extracted "${entry.fileName}" → "${destPath}".`);
|
||||
res();
|
||||
});
|
||||
|
||||
writeStream.on('error', writeErr => {
|
||||
logger.error(
|
||||
extractErrorLogData(writeErr),
|
||||
`Error extracting "${entry.fileName}" from "${zipPath}".`
|
||||
`DBGM-00069 Error extracting "${entry.fileName}" from "${zipPath}".`
|
||||
);
|
||||
rej(writeErr);
|
||||
});
|
||||
@@ -74,14 +74,14 @@ function unzipDirectory(zipPath, outputDirectory) {
|
||||
zipFile.on('end', () => {
|
||||
Promise.all(pending)
|
||||
.then(() => {
|
||||
logger.info(`Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
|
||||
logger.info(`DBGM-00070 Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
|
||||
resolve(true);
|
||||
})
|
||||
.catch(reject);
|
||||
});
|
||||
|
||||
zipFile.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00071 ZIP file error in ${zipPath}.`);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -16,16 +16,16 @@ function zipDirectory(inputDirectory, outputFile) {
|
||||
|
||||
// Listen for all archive data to be written
|
||||
output.on('close', () => {
|
||||
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
|
||||
logger.info(`DBGM-00072 ZIP file created (${archive.pointer()} total bytes)`);
|
||||
resolve();
|
||||
});
|
||||
|
||||
archive.on('warning', err => {
|
||||
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
|
||||
logger.warn(extractErrorLogData(err), `DBGM-00073 Warning while creating ZIP: ${err.message}`);
|
||||
});
|
||||
|
||||
archive.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00074 Error while creating ZIP: ${err.message}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
|
||||
@@ -17,16 +17,16 @@ function zipDirectory(jsonDb, outputFile) {
|
||||
|
||||
// Listen for all archive data to be written
|
||||
output.on('close', () => {
|
||||
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
|
||||
logger.info(`DBGM-00075 ZIP file created (${archive.pointer()} total bytes)`);
|
||||
resolve();
|
||||
});
|
||||
|
||||
archive.on('warning', err => {
|
||||
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
|
||||
logger.warn(extractErrorLogData(err), `DBGM-00076 Warning while creating ZIP: ${err.message}`);
|
||||
});
|
||||
|
||||
archive.on('error', err => {
|
||||
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
|
||||
logger.error(extractErrorLogData(err), `DBGM-00077 Error while creating ZIP: ${err.message}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
|
||||
Reference in New Issue
Block a user