Enhance binary data handling in Oracle driver and adjust dumper for byte array values

This commit is contained in:
Stela Augustinova
2025-11-06 13:08:50 +01:00
parent dca9ea24d7
commit 98f2b5dd08
2 changed files with 20 additions and 10 deletions

View File

@@ -37,6 +37,15 @@ function zipDataRow(rowArray, columns) {
return obj; return obj;
} }
function modifyRow(row, columns) {
columns.forEach(col => {
if (Buffer.isBuffer(row[col.columnName])) {
row[col.columnName] = { $binary: { base64: row[col.columnName].toString('base64') } };
}
});
return row;
}
let oracleClientInitialized = false; let oracleClientInitialized = false;
/** @type {import('dbgate-types').EngineDriver} */ /** @type {import('dbgate-types').EngineDriver} */
@@ -106,7 +115,7 @@ const driver = {
const res = await dbhan.client.execute(sql); const res = await dbhan.client.execute(sql);
try { try {
const columns = extractOracleColumns(res.metaData); const columns = extractOracleColumns(res.metaData);
return { rows: (res.rows || []).map(row => zipDataRow(row, columns)), columns }; return { rows: (res.rows || []).map(row => modifyRow(zipDataRow(row, columns), columns)), columns };
} catch (err) { } catch (err) {
return { return {
rows: [], rows: [],
@@ -134,7 +143,7 @@ const driver = {
if (!wasHeader) { if (!wasHeader) {
columns = extractOracleColumns(row); columns = extractOracleColumns(row);
if (columns && columns.length > 0) { if (columns && columns.length > 0) {
options.recordset(columns); options.recordset(columns, { engine: driverBase.engine });
} }
wasHeader = true; wasHeader = true;
} }
@@ -147,11 +156,11 @@ const driver = {
if (!wasHeader) { if (!wasHeader) {
columns = extractOracleColumns(row); columns = extractOracleColumns(row);
if (columns && columns.length > 0) { if (columns && columns.length > 0) {
options.recordset(columns); options.recordset(columns, { engine: driverBase.engine });
} }
wasHeader = true; wasHeader = true;
} }
options.row(zipDataRow(row, columns)); options.row(modifyRow(zipDataRow(row, columns), columns));
}); });
query.on('end', () => { query.on('end', () => {
@@ -214,9 +223,9 @@ const driver = {
if (rows && metaData) { if (rows && metaData) {
const columns = extractOracleColumns(metaData); const columns = extractOracleColumns(metaData);
options.recordset(columns); options.recordset(columns, { engine: driverBase.engine });
for (const row of rows) { for (const row of rows) {
options.row(zipDataRow(row, columns)); options.row(modifyRow(zipDataRow(row, columns), columns));
} }
} else if (rowsAffected) { } else if (rowsAffected) {
options.info({ options.info({
@@ -302,6 +311,7 @@ const driver = {
if (columns && columns.length > 0) { if (columns && columns.length > 0) {
pass.write({ pass.write({
__isStreamHeader: true, __isStreamHeader: true,
engine: driverBase.engine,
...(structure || { columns }), ...(structure || { columns }),
}); });
} }
@@ -310,7 +320,7 @@ const driver = {
}); });
query.on('data', row => { query.on('data', row => {
pass.write(zipDataRow(row, columns)); pass.write(modifyRow(zipDataRow(row, columns), columns));
}); });
query.on('end', () => { query.on('end', () => {

View File

@@ -136,9 +136,9 @@ class Dumper extends SqlDumper {
// else super.putValue(value); // else super.putValue(value);
// } // }
// putByteArrayValue(value) { putByteArrayValue(value) {
// this.putRaw(`e'\\\\x${arrayToHexString(value)}'`); this.putRaw(`HEXTORAW('${arrayToHexString(value)}')`);
// } }
putValue(value, dataType) { putValue(value, dataType) {
if (dataType?.toLowerCase() == 'timestamp') { if (dataType?.toLowerCase() == 'timestamp') {