SYNC: mognodb - correct handle stream errors

This commit is contained in:
SPRINX0\prochazka
2025-03-05 18:09:17 +01:00
committed by Diflow
parent 2b7f4281c2
commit ba0eba7132
3 changed files with 28 additions and 19 deletions

View File

@@ -1,8 +1,8 @@
module.exports = { module.exports = {
// mysql: true, mysql: true,
// postgres: true, postgres: true,
// mssql: true, mssql: true,
// oracle: true, oracle: true,
// sqlite: true, sqlite: true,
mongo: true mongo: true
}; };

View File

@@ -184,7 +184,7 @@ describe('Import CSV - source error', () => {
}); });
describe('Import CSV - target error', () => { describe('Import CSV - target error', () => {
multiTest({ skipMongo: true }, (connectionName, databaseName, engine, options = {}) => { multiTest({}, (connectionName, databaseName, engine, options = {}) => {
cy.contains(connectionName).click(); cy.contains(connectionName).click();
if (databaseName) cy.contains(databaseName).click(); if (databaseName) cy.contains(databaseName).click();
cy.testid('ConnectionList_container') cy.testid('ConnectionList_container')
@@ -194,7 +194,7 @@ describe('Import CSV - target error', () => {
cy.get('input[type=file]').selectFile('cypress/fixtures/customers-20.csv', { force: true }); cy.get('input[type=file]').selectFile('cypress/fixtures/customers-20.csv', { force: true });
cy.contains('customers-20'); cy.contains('customers-20');
cy.testid('ImportExportConfigurator_targetName_customers-20').clear().type('"]`'); cy.testid('ImportExportConfigurator_targetName_customers-20').clear().type('system."]`');
cy.testid('ImportExportTab_executeButton').click(); cy.testid('ImportExportTab_executeButton').click();
cy.testid('ImportExportConfigurator_errorInfoIcon_customers-20').click(); cy.testid('ImportExportConfigurator_errorInfoIcon_customers-20').click();
cy.testid('ErrorMessageModal_message').should('be.visible'); cy.testid('ErrorMessageModal_message').should('be.visible');

View File

@@ -1,10 +1,9 @@
const ObjectId = require('mongodb').ObjectId; const ObjectId = require('mongodb').ObjectId;
const { getLogger } = global.DBGATE_PACKAGES['dbgate-tools']; const { getLogger, extractErrorLogData } = global.DBGATE_PACKAGES['dbgate-tools'];
const { EJSON } = require('bson'); const { EJSON } = require('bson');
const logger = getLogger('mongoBulkInsert'); const logger = getLogger('mongoBulkInsert');
function createBulkInsertStream(driver, stream, dbhan, name, options) { function createBulkInsertStream(driver, stream, dbhan, name, options) {
const collectionName = name.pureName; const collectionName = name.pureName;
const db = dbhan.getDatabase(); const db = dbhan.getDatabase();
@@ -31,21 +30,31 @@ function createBulkInsertStream(driver, stream, dbhan, name, options) {
}; };
writable.checkStructure = async () => { writable.checkStructure = async () => {
if (options.dropIfExists) { try {
logger.info(`Dropping collection ${collectionName}`); if (options.dropIfExists) {
await db.collection(collectionName).drop(); logger.info(`Dropping collection ${collectionName}`);
} await db.collection(collectionName).drop();
if (options.truncate) { }
logger.info(`Truncating collection ${collectionName}`); if (options.truncate) {
await db.collection(collectionName).deleteMany({}); logger.info(`Truncating collection ${collectionName}`);
await db.collection(collectionName).deleteMany({});
}
} catch (err) {
logger.error(extractErrorLogData(err), 'Error during preparing mongo bulk insert collection, stopped');
writable.destroy(err);
} }
}; };
writable.send = async () => { writable.send = async () => {
const rows = writable.buffer; try {
writable.buffer = []; const rows = writable.buffer;
writable.buffer = [];
await db.collection(collectionName).insertMany(rows); await db.collection(collectionName).insertMany(rows);
} catch (err) {
logger.error(extractErrorLogData(err), 'Error bulk insert collection, stopped');
writable.destroy(err);
}
}; };
writable.sendIfFull = async () => { writable.sendIfFull = async () => {