Merge pull request #1258 from dbgate/feature/postgresql-export-bytea

Feature/postgresql export bytea
This commit is contained in:
Jan Prochazka
2025-11-13 15:27:28 +01:00
committed by GitHub
24 changed files with 277 additions and 74 deletions

View File

@@ -49,6 +49,32 @@ class StreamHandler {
} }
} }
class BinaryTestStreamHandler {
constructor(resolve, reject, expectedValue) {
this.resolve = resolve;
this.reject = reject;
this.expectedValue = expectedValue;
this.rowsReceived = [];
}
row(row) {
try {
this.rowsReceived.push(row);
if (this.expectedValue) {
expect(row).toEqual(this.expectedValue);
}
} catch (error) {
this.reject(error);
return;
}
}
recordset(columns) {}
done(result) {
this.resolve(this.rowsReceived);
}
info(msg) {}
}
function executeStreamItem(driver, conn, sql) { function executeStreamItem(driver, conn, sql) {
return new Promise(resolve => { return new Promise(resolve => {
const handler = new StreamHandler(resolve); const handler = new StreamHandler(resolve);
@@ -223,4 +249,51 @@ describe('Query', () => {
expect(row[keys[0]] == 1).toBeTruthy(); expect(row[keys[0]] == 1).toBeTruthy();
}) })
); );
test.each(engines.filter(x => x.binaryDataType).map(engine => [engine.label, engine]))(
'Binary - %s',
testWrapper(async (dbhan, driver, engine) => {
await runCommandOnDriver(dbhan, driver, dmp =>
dmp.createTable({
pureName: 't1',
columns: [
{ columnName: 'id', dataType: 'int', notNull: true, autoIncrement: true },
{ columnName: 'val', dataType: engine.binaryDataType },
],
primaryKey: {
columns: [{ columnName: 'id' }],
},
})
);
const structure = await driver.analyseFull(dbhan);
const table = structure.tables.find(x => x.pureName == 't1');
const dmp = driver.createDumper();
dmp.putCmd("INSERT INTO ~t1 (~val) VALUES (%v)", {
$binary: { base64: 'iVBORw0KWgo=' },
});
await driver.query(dbhan, dmp.s, {discardResult: true});
const dmp2 = driver.createDumper();
dmp2.put('SELECT ~val FROM ~t1');
const res = await driver.query(dbhan, dmp2.s);
const row = res.rows[0];
const keys = Object.keys(row);
expect(keys.length).toEqual(1);
expect(row[keys[0]]).toEqual({$binary: {base64: 'iVBORw0KWgo='}});
const res2 = await driver.readQuery(dbhan, dmp2.s);
const rows = await Array.fromAsync(res2);
const rowsVal = rows.filter(r => r.val != null);
expect(rowsVal.length).toEqual(1);
expect(rowsVal[0].val).toEqual({$binary: {base64: 'iVBORw0KWgo='}});
const res3 = await new Promise((resolve, reject) => {
const handler = new BinaryTestStreamHandler(resolve, reject, {val: {$binary: {base64: 'iVBORw0KWgo='}}});
driver.stream(dbhan, dmp2.s, handler);
});
})
);
}); });

View File

@@ -44,6 +44,7 @@ const mysqlEngine = {
supportRenameSqlObject: false, supportRenameSqlObject: false,
dbSnapshotBySeconds: true, dbSnapshotBySeconds: true,
dumpFile: 'data/chinook-mysql.sql', dumpFile: 'data/chinook-mysql.sql',
binaryDataType: 'blob',
dumpChecks: [ dumpChecks: [
{ {
sql: 'select count(*) as res from genre', sql: 'select count(*) as res from genre',
@@ -216,6 +217,7 @@ const postgreSqlEngine = {
supportSchemas: true, supportSchemas: true,
supportRenameSqlObject: true, supportRenameSqlObject: true,
defaultSchemaName: 'public', defaultSchemaName: 'public',
binaryDataType: 'bytea',
dumpFile: 'data/chinook-postgre.sql', dumpFile: 'data/chinook-postgre.sql',
dumpChecks: [ dumpChecks: [
{ {
@@ -446,6 +448,7 @@ const sqlServerEngine = {
supportTableComments: true, supportTableComments: true,
supportColumnComments: true, supportColumnComments: true,
// skipSeparateSchemas: true, // skipSeparateSchemas: true,
binaryDataType: 'varbinary(100)',
triggers: [ triggers: [
{ {
testName: 'triggers before each row', testName: 'triggers before each row',
@@ -506,6 +509,7 @@ const sqliteEngine = {
}, },
}, },
], ],
binaryDataType: 'blob',
}; };
const libsqlFileEngine = { const libsqlFileEngine = {
@@ -619,6 +623,7 @@ const oracleEngine = {
}, },
}, },
], ],
binaryDataType: 'blob',
}; };
/** @type {import('dbgate-types').TestEngineInfo} */ /** @type {import('dbgate-types').TestEngineInfo} */
@@ -756,14 +761,14 @@ const enginesOnLocal = [
// mariaDbEngine, // mariaDbEngine,
//postgreSqlEngine, //postgreSqlEngine,
//sqlServerEngine, //sqlServerEngine,
// sqliteEngine, sqliteEngine,
// cockroachDbEngine, // cockroachDbEngine,
// clickhouseEngine, // clickhouseEngine,
// libsqlFileEngine, // libsqlFileEngine,
// libsqlWsEngine, // libsqlWsEngine,
//oracleEngine, //oracleEngine,
// duckdbEngine, // duckdbEngine,
firebirdEngine, //firebirdEngine,
]; ];
/** @type {import('dbgate-types').TestEngineInfo[] & Record<string, import('dbgate-types').TestEngineInfo>} */ /** @type {import('dbgate-types').TestEngineInfo[] & Record<string, import('dbgate-types').TestEngineInfo>} */

View File

@@ -17,7 +17,7 @@ class QueryStreamTableWriter {
this.started = new Date().getTime(); this.started = new Date().getTime();
} }
initializeFromQuery(structure, resultIndex, chartDefinition, autoDetectCharts = false) { initializeFromQuery(structure, resultIndex, chartDefinition, autoDetectCharts = false, options = {}) {
this.jslid = crypto.randomUUID(); this.jslid = crypto.randomUUID();
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`); this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
fs.writeFileSync( fs.writeFileSync(
@@ -25,6 +25,7 @@ class QueryStreamTableWriter {
JSON.stringify({ JSON.stringify({
...structure, ...structure,
__isStreamHeader: true, __isStreamHeader: true,
...options
}) + '\n' }) + '\n'
); );
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' }); this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
@@ -179,7 +180,7 @@ class StreamHandler {
process.send({ msgtype: 'changedCurrentDatabase', database, sesid: this.sesid }); process.send({ msgtype: 'changedCurrentDatabase', database, sesid: this.sesid });
} }
recordset(columns) { recordset(columns, options) {
if (this.rowsLimitOverflow) { if (this.rowsLimitOverflow) {
return; return;
} }
@@ -189,7 +190,8 @@ class StreamHandler {
Array.isArray(columns) ? { columns } : columns, Array.isArray(columns) ? { columns } : columns,
this.queryStreamInfoHolder.resultIndex, this.queryStreamInfoHolder.resultIndex,
this.frontMatter?.[`chart-${this.queryStreamInfoHolder.resultIndex + 1}`], this.frontMatter?.[`chart-${this.queryStreamInfoHolder.resultIndex + 1}`],
this.autoDetectCharts this.autoDetectCharts,
options
); );
this.queryStreamInfoHolder.resultIndex += 1; this.queryStreamInfoHolder.resultIndex += 1;
this.rowCounter = 0; this.rowCounter = 0;

View File

@@ -1,4 +1,4 @@
import { arrayToHexString, evalFilterBehaviour, isTypeDateTime } from 'dbgate-tools'; import { arrayToHexString, base64ToHex, evalFilterBehaviour, isTypeDateTime } from 'dbgate-tools';
import { format, toDate } from 'date-fns'; import { format, toDate } from 'date-fns';
import _isString from 'lodash/isString'; import _isString from 'lodash/isString';
import _cloneDeepWith from 'lodash/cloneDeepWith'; import _cloneDeepWith from 'lodash/cloneDeepWith';
@@ -24,7 +24,9 @@ export function getFilterValueExpression(value, dataType?) {
if (value.type == 'Buffer' && Array.isArray(value.data)) { if (value.type == 'Buffer' && Array.isArray(value.data)) {
return '0x' + arrayToHexString(value.data); return '0x' + arrayToHexString(value.data);
} }
if (value?.$binary?.base64) {
return base64ToHex(value.$binary.base64);
}
return `="${value}"`; return `="${value}"`;
} }

View File

@@ -2,7 +2,7 @@ import P from 'parsimmon';
import moment from 'moment'; import moment from 'moment';
import { Condition } from 'dbgate-sqltree'; import { Condition } from 'dbgate-sqltree';
import { interpretEscapes, token, word, whitespace } from './common'; import { interpretEscapes, token, word, whitespace } from './common';
import { hexStringToArray, parseNumberSafe } from 'dbgate-tools'; import { hexToBase64, parseNumberSafe } from 'dbgate-tools';
import { FilterBehaviour, TransformType } from 'dbgate-types'; import { FilterBehaviour, TransformType } from 'dbgate-types';
const binaryCondition = const binaryCondition =
@@ -385,10 +385,7 @@ const createParser = (filterBehaviour: FilterBehaviour) => {
hexstring: () => hexstring: () =>
token(P.regexp(/0x(([0-9a-fA-F][0-9a-fA-F])+)/, 1)) token(P.regexp(/0x(([0-9a-fA-F][0-9a-fA-F])+)/, 1))
.map(x => ({ .map(x => ({ $binary: { base64: hexToBase64(x) } }))
type: 'Buffer',
data: hexStringToArray(x),
}))
.desc('hex string'), .desc('hex string'),
noQuotedString: () => P.regexp(/[^\s^,^'^"]+/).desc('string unquoted'), noQuotedString: () => P.regexp(/[^\s^,^'^"]+/).desc('string unquoted'),

View File

@@ -78,6 +78,14 @@ export class SqlDumper implements AlterProcessor {
else if (_isNumber(value)) this.putRaw(value.toString()); else if (_isNumber(value)) this.putRaw(value.toString());
else if (_isDate(value)) this.putStringValue(new Date(value).toISOString()); else if (_isDate(value)) this.putStringValue(new Date(value).toISOString());
else if (value?.type == 'Buffer' && _isArray(value?.data)) this.putByteArrayValue(value?.data); else if (value?.type == 'Buffer' && _isArray(value?.data)) this.putByteArrayValue(value?.data);
else if (value?.$binary?.base64) {
const binary = atob(value.$binary.base64);
const bytes = new Array(binary.length);
for (let i = 0; i < binary.length; i++) {
bytes[i] = binary.charCodeAt(i);
}
this.putByteArrayValue(bytes);
}
else if (value?.$bigint) this.putRaw(value?.$bigint); else if (value?.$bigint) this.putRaw(value?.$bigint);
else if (_isPlainObject(value) || _isArray(value)) this.putStringValue(JSON.stringify(value)); else if (_isPlainObject(value) || _isArray(value)) this.putStringValue(JSON.stringify(value));
else this.put('^null'); else this.put('^null');

View File

@@ -47,6 +47,7 @@ export const mongoFilterBehaviour: FilterBehaviour = {
allowStringToken: true, allowStringToken: true,
allowNumberDualTesting: true, allowNumberDualTesting: true,
allowObjectIdTesting: true, allowObjectIdTesting: true,
allowHexString: true,
}; };
export const evalFilterBehaviour: FilterBehaviour = { export const evalFilterBehaviour: FilterBehaviour = {

View File

@@ -43,6 +43,19 @@ export function hexStringToArray(inputString) {
return res; return res;
} }
export function base64ToHex(base64String) {
const binaryString = atob(base64String);
const hexString = Array.from(binaryString, c =>
c.charCodeAt(0).toString(16).padStart(2, '0')
).join('');
return '0x' + hexString.toUpperCase();
};
export function hexToBase64(hexString) {
const binaryString = hexString.match(/.{1,2}/g).map(byte => String.fromCharCode(parseInt(byte, 16))).join('');
return btoa(binaryString);
}
export function parseCellValue(value, editorTypes?: DataEditorTypesBehaviour) { export function parseCellValue(value, editorTypes?: DataEditorTypesBehaviour) {
if (!_isString(value)) return value; if (!_isString(value)) return value;
@@ -54,9 +67,10 @@ export function parseCellValue(value, editorTypes?: DataEditorTypesBehaviour) {
const mHex = value.match(/^0x([0-9a-fA-F][0-9a-fA-F])+$/); const mHex = value.match(/^0x([0-9a-fA-F][0-9a-fA-F])+$/);
if (mHex) { if (mHex) {
return { return {
type: 'Buffer', $binary: {
data: hexStringToArray(value.substring(2)), base64: hexToBase64(value.substring(2))
}; }
}
} }
} }
@@ -230,11 +244,19 @@ export function stringifyCellValue(
if (value === true) return { value: 'true', gridStyle: 'valueCellStyle' }; if (value === true) return { value: 'true', gridStyle: 'valueCellStyle' };
if (value === false) return { value: 'false', gridStyle: 'valueCellStyle' }; if (value === false) return { value: 'false', gridStyle: 'valueCellStyle' };
if (value?.$binary?.base64) {
return {
value: base64ToHex(value.$binary.base64),
gridStyle: 'valueCellStyle',
};
}
if (editorTypes?.parseHexAsBuffer) { if (editorTypes?.parseHexAsBuffer) {
if (value?.type == 'Buffer' && _isArray(value.data)) { // if (value?.type == 'Buffer' && _isArray(value.data)) {
return { value: '0x' + arrayToHexString(value.data), gridStyle: 'valueCellStyle' }; // return { value: '0x' + arrayToHexString(value.data), gridStyle: 'valueCellStyle' };
} // }
} }
if (editorTypes?.parseObjectIdAsDollar) { if (editorTypes?.parseObjectIdAsDollar) {
if (value?.$oid) { if (value?.$oid) {
switch (intent) { switch (intent) {
@@ -482,6 +504,9 @@ export function getAsImageSrc(obj) {
if (obj?.type == 'Buffer' && _isArray(obj?.data)) { if (obj?.type == 'Buffer' && _isArray(obj?.data)) {
return `data:image/png;base64, ${arrayBufferToBase64(obj?.data)}`; return `data:image/png;base64, ${arrayBufferToBase64(obj?.data)}`;
} }
if (obj?.$binary?.base64) {
return `data:image/png;base64, ${obj.$binary.base64}`;
}
if (_isString(obj) && (obj.startsWith('http://') || obj.startsWith('https://'))) { if (_isString(obj) && (obj.startsWith('http://') || obj.startsWith('https://'))) {
return obj; return obj;

View File

@@ -96,4 +96,6 @@ export type TestEngineInfo = {
}>; }>;
objects?: Array<TestObjectInfo>; objects?: Array<TestObjectInfo>;
binaryDataType?: string;
}; };

View File

@@ -10,6 +10,9 @@
if (value?.type == 'Buffer' && _.isArray(value?.data)) { if (value?.type == 'Buffer' && _.isArray(value?.data)) {
return 'data:image/png;base64, ' + btoa(String.fromCharCode.apply(null, value?.data)); return 'data:image/png;base64, ' + btoa(String.fromCharCode.apply(null, value?.data));
} }
if (value?.$binary?.base64) {
return 'data:image/png;base64, ' + value.$binary.base64;
}
return null; return null;
} catch (err) { } catch (err) {
console.log('Error showing picture', err); console.log('Error showing picture', err);

View File

@@ -361,6 +361,7 @@
detectSqlFilterBehaviour, detectSqlFilterBehaviour,
stringifyCellValue, stringifyCellValue,
shouldOpenMultilineDialog, shouldOpenMultilineDialog,
base64ToHex,
} from 'dbgate-tools'; } from 'dbgate-tools';
import { getContext, onDestroy } from 'svelte'; import { getContext, onDestroy } from 'svelte';
import _, { map } from 'lodash'; import _, { map } from 'lodash';
@@ -758,7 +759,7 @@
export function saveCellToFileEnabled() { export function saveCellToFileEnabled() {
const value = getSelectedExportableCell(); const value = getSelectedExportableCell();
return _.isString(value) || (value?.type == 'Buffer' && _.isArray(value?.data)); return _.isString(value) || (value?.type == 'Buffer' && _.isArray(value?.data)) || (value?.$binary?.base64);
} }
export async function saveCellToFile() { export async function saveCellToFile() {
@@ -771,6 +772,8 @@
fs.promises.writeFile(file, value); fs.promises.writeFile(file, value);
} else if (value?.type == 'Buffer' && _.isArray(value?.data)) { } else if (value?.type == 'Buffer' && _.isArray(value?.data)) {
fs.promises.writeFile(file, window['Buffer'].from(value.data)); fs.promises.writeFile(file, window['Buffer'].from(value.data));
} else if (value?.$binary?.base64) {
fs.promises.writeFile(file, window['Buffer'].from(value.$binary.base64, 'base64'));
} }
} }
} }
@@ -796,8 +799,9 @@
isText isText
? data ? data
: { : {
type: 'Buffer', $binary: {
data: [...data], base64: data.toString('base64'),
},
} }
); );
} }

View File

@@ -15,6 +15,7 @@
import _ from 'lodash'; import _ from 'lodash';
import { apiCall } from '../utility/api'; import { apiCall } from '../utility/api';
import ErrorInfo from '../elements/ErrorInfo.svelte'; import ErrorInfo from '../elements/ErrorInfo.svelte';
import { base64ToHex } from 'dbgate-tools';
import { _t } from '../translations'; import { _t } from '../translations';
export let onConfirm; export let onConfirm;
@@ -113,7 +114,7 @@
{ {
fieldName: 'value', fieldName: 'value',
header: _t('dataGrid.value', { defaultMessage: 'Value' }), header: _t('dataGrid.value', { defaultMessage: 'Value' }),
formatter: row => (row.value == null ? '(NULL)' : row.value), formatter: row => (row.value == null ? '(NULL)' : row.value?.$binary?.base64 ? base64ToHex(row.value.$binary.base64) : row.value),
}, },
]} ]}
> >

View File

@@ -15,6 +15,9 @@
if (force && value?.type == 'Buffer' && _.isArray(value.data)) { if (force && value?.type == 'Buffer' && _.isArray(value.data)) {
return String.fromCharCode.apply(String, value.data); return String.fromCharCode.apply(String, value.data);
} }
else if (force && value?.$binary?.base64) {
return atob(value.$binary.base64);
}
return stringifyCellValue(value, 'gridCellIntent').value; return stringifyCellValue(value, 'gridCellIntent').value;
} }
</script> </script>

View File

@@ -51,6 +51,10 @@ function findArrayResult(resValue) {
return null; return null;
} }
function BinData(_subType, base64) {
return Buffer.from(base64, 'base64');
}
async function getScriptableDb(dbhan) { async function getScriptableDb(dbhan) {
const db = dbhan.getDatabase(); const db = dbhan.getDatabase();
db.getCollection = (name) => db.collection(name); db.getCollection = (name) => db.collection(name);
@@ -156,9 +160,9 @@ const driver = {
// return printable; // return printable;
// } // }
let func; let func;
func = eval(`(db,ObjectId) => ${sql}`); func = eval(`(db,ObjectId,BinData) => ${sql}`);
const db = await getScriptableDb(dbhan); const db = await getScriptableDb(dbhan);
const res = func(db, ObjectId.createFromHexString); const res = func(db, ObjectId.createFromHexString, BinData);
if (isPromise(res)) await res; if (isPromise(res)) await res;
}, },
async operation(dbhan, operation, options) { async operation(dbhan, operation, options) {
@@ -285,7 +289,7 @@ const driver = {
} else { } else {
let func; let func;
try { try {
func = eval(`(db,ObjectId) => ${sql}`); func = eval(`(db,ObjectId,BinData) => ${sql}`);
} catch (err) { } catch (err) {
options.info({ options.info({
message: 'Error compiling expression: ' + err.message, message: 'Error compiling expression: ' + err.message,
@@ -299,7 +303,7 @@ const driver = {
let exprValue; let exprValue;
try { try {
exprValue = func(db, ObjectId.createFromHexString); exprValue = func(db, ObjectId.createFromHexString, BinData);
} catch (err) { } catch (err) {
options.info({ options.info({
message: 'Error evaluating expression: ' + err.message, message: 'Error evaluating expression: ' + err.message,
@@ -411,9 +415,9 @@ const driver = {
// highWaterMark: 100, // highWaterMark: 100,
// }); // });
func = eval(`(db,ObjectId) => ${sql}`); func = eval(`(db,ObjectId,BinData) => ${sql}`);
const db = await getScriptableDb(dbhan); const db = await getScriptableDb(dbhan);
exprValue = func(db, ObjectId.createFromHexString); exprValue = func(db, ObjectId.createFromHexString, BinData);
const pass = new stream.PassThrough({ const pass = new stream.PassThrough({
objectMode: true, objectMode: true,

View File

@@ -15,7 +15,10 @@ function mongoReplacer(key, value) {
function jsonStringifyWithObjectId(obj) { function jsonStringifyWithObjectId(obj) {
return JSON.stringify(obj, mongoReplacer, 2) return JSON.stringify(obj, mongoReplacer, 2)
.replace(/\{\s*\"\$oid\"\s*\:\s*\"([0-9a-f]+)\"\s*\}/g, (m, id) => `ObjectId("${id}")`) .replace(/\{\s*\"\$oid\"\s*\:\s*\"([0-9a-f]+)\"\s*\}/g, (m, id) => `ObjectId("${id}")`)
.replace(/\{\s*\"\$bigint\"\s*\:\s*\"([0-9]+)\"\s*\}/g, (m, num) => `${num}n`); .replace(/\{\s*\"\$bigint\"\s*\:\s*\"([0-9]+)\"\s*\}/g, (m, num) => `${num}n`)
.replace(/\{\s*"\$binary"\s*:\s*\{\s*"base64"\s*:\s*"([^"]+)"(?:\s*,\s*"subType"\s*:\s*"([0-9a-fA-F]{2})")?\s*\}\s*\}/g, (m, base64, subType) => {
return `BinData(${parseInt(subType || "00", 16)}, "${base64}")`;
});
} }
/** @type {import('dbgate-types').SqlDialect} */ /** @type {import('dbgate-types').SqlDialect} */
@@ -129,7 +132,7 @@ const driver = {
getCollectionExportQueryScript(collection, condition, sort) { getCollectionExportQueryScript(collection, condition, sort) {
return `db.getCollection('${collection}') return `db.getCollection('${collection}')
.find(${JSON.stringify(convertToMongoCondition(condition) || {})}) .find(${jsonStringifyWithObjectId(convertToMongoCondition(condition) || {})})
.sort(${JSON.stringify(convertToMongoSort(sort) || {})})`; .sort(${JSON.stringify(convertToMongoSort(sort) || {})})`;
}, },
getCollectionExportQueryJson(collection, condition, sort) { getCollectionExportQueryJson(collection, condition, sort) {
@@ -148,6 +151,7 @@ const driver = {
parseJsonObject: true, parseJsonObject: true,
parseObjectIdAsDollar: true, parseObjectIdAsDollar: true,
parseDateAsDollar: true, parseDateAsDollar: true,
parseHexAsBuffer: true,
explicitDataType: true, explicitDataType: true,
supportNumberType: true, supportNumberType: true,

View File

@@ -24,6 +24,15 @@ function extractTediousColumns(columns, addDriverNativeColumn = false) {
return res; return res;
} }
function modifyRow(row, columns) {
columns.forEach((col) => {
if (Buffer.isBuffer(row[col.columnName])) {
row[col.columnName] = { $binary: { base64: Buffer.from(row[col.columnName]).toString('base64') } };
}
});
return row;
}
async function getDefaultAzureSqlToken() { async function getDefaultAzureSqlToken() {
const credential = new ManagedIdentityCredential(); const credential = new ManagedIdentityCredential();
const tokenResponse = await credential.getToken('https://database.windows.net/.default'); const tokenResponse = await credential.getToken('https://database.windows.net/.default');
@@ -125,9 +134,12 @@ async function tediousQueryCore(dbhan, sql, options) {
}); });
request.on('row', function (columns) { request.on('row', function (columns) {
result.rows.push( result.rows.push(
modifyRow(
_.zipObject( _.zipObject(
result.columns.map(x => x.columnName), result.columns.map(x => x.columnName),
columns.map(x => x.value) columns.map(x => x.value)
),
result.columns
) )
); );
}); });
@@ -152,13 +164,17 @@ async function tediousReadQuery(dbhan, sql, structure) {
currentColumns = extractTediousColumns(columns); currentColumns = extractTediousColumns(columns);
pass.write({ pass.write({
__isStreamHeader: true, __isStreamHeader: true,
engine: 'mssql@dbgate-plugin-mssql',
...(structure || { columns: currentColumns }), ...(structure || { columns: currentColumns }),
}); });
}); });
request.on('row', function (columns) { request.on('row', function (columns) {
const row = _.zipObject( const row = modifyRow(
_.zipObject(
currentColumns.map(x => x.columnName), currentColumns.map(x => x.columnName),
columns.map(x => x.value) columns.map(x => x.value)
),
currentColumns
); );
pass.write(row); pass.write(row);
}); });
@@ -216,12 +232,15 @@ async function tediousStream(dbhan, sql, options) {
}); });
request.on('columnMetadata', function (columns) { request.on('columnMetadata', function (columns) {
currentColumns = extractTediousColumns(columns); currentColumns = extractTediousColumns(columns);
options.recordset(currentColumns); options.recordset(currentColumns, { engine: 'mssql@dbgate-plugin-mssql' });
}); });
request.on('row', function (columns) { request.on('row', function (columns) {
const row = _.zipObject( const row = modifyRow(
_.zipObject(
currentColumns.map(x => x.columnName), currentColumns.map(x => x.columnName),
columns.map(x => x.value) columns.map(x => x.value)
),
currentColumns
); );
options.row(row); options.row(row);
skipAffectedMessage = true; skipAffectedMessage = true;

View File

@@ -23,6 +23,15 @@ function extractColumns(fields) {
return null; return null;
} }
function modifyRow(row, columns) {
columns.forEach((col) => {
if (Buffer.isBuffer(row[col.columnName])) {
row[col.columnName] = { $binary: { base64: Buffer.from(row[col.columnName]).toString('base64') } };
}
});
return row;
}
function zipDataRow(rowArray, columns) { function zipDataRow(rowArray, columns) {
return _.zipObject( return _.zipObject(
columns.map(x => x.columnName), columns.map(x => x.columnName),
@@ -100,7 +109,7 @@ const drivers = driverBases.map(driverBase => ({
dbhan.client.query(sql, function (error, results, fields) { dbhan.client.query(sql, function (error, results, fields) {
if (error) reject(error); if (error) reject(error);
const columns = extractColumns(fields); const columns = extractColumns(fields);
resolve({ rows: results && columns && results.map && results.map(row => zipDataRow(row, columns)), columns }); resolve({ rows: results && columns && results.map && results.map(row => modifyRow(zipDataRow(row, columns), columns)), columns });
}); });
}); });
}, },
@@ -136,14 +145,14 @@ const drivers = driverBases.map(driverBase => ({
} }
} else { } else {
if (columns) { if (columns) {
options.row(zipDataRow(row, columns)); options.row(modifyRow(zipDataRow(row, columns), columns));
} }
} }
}; };
const handleFields = fields => { const handleFields = fields => {
columns = extractColumns(fields); columns = extractColumns(fields);
if (columns) options.recordset(columns); if (columns) options.recordset(columns, { engine: driverBase.engine });
}; };
const handleError = error => { const handleError = error => {
@@ -177,10 +186,11 @@ const drivers = driverBases.map(driverBase => ({
columns = extractColumns(fields); columns = extractColumns(fields);
pass.write({ pass.write({
__isStreamHeader: true, __isStreamHeader: true,
engine: driverBase.engine,
...(structure || { columns }), ...(structure || { columns }),
}); });
}) })
.on('result', row => pass.write(zipDataRow(row, columns))) .on('result', row => pass.write(modifyRow(zipDataRow(row, columns), columns)))
.on('end', () => pass.end()); .on('end', () => pass.end());
return pass; return pass;

View File

@@ -37,6 +37,15 @@ function zipDataRow(rowArray, columns) {
return obj; return obj;
} }
function modifyRow(row, columns) {
columns.forEach(col => {
if (Buffer.isBuffer(row[col.columnName])) {
row[col.columnName] = { $binary: { base64: row[col.columnName].toString('base64') } };
}
});
return row;
}
let oracleClientInitialized = false; let oracleClientInitialized = false;
/** @type {import('dbgate-types').EngineDriver} */ /** @type {import('dbgate-types').EngineDriver} */
@@ -106,7 +115,7 @@ const driver = {
const res = await dbhan.client.execute(sql); const res = await dbhan.client.execute(sql);
try { try {
const columns = extractOracleColumns(res.metaData); const columns = extractOracleColumns(res.metaData);
return { rows: (res.rows || []).map(row => zipDataRow(row, columns)), columns }; return { rows: (res.rows || []).map(row => modifyRow(zipDataRow(row, columns), columns)), columns };
} catch (err) { } catch (err) {
return { return {
rows: [], rows: [],
@@ -134,7 +143,7 @@ const driver = {
if (!wasHeader) { if (!wasHeader) {
columns = extractOracleColumns(row); columns = extractOracleColumns(row);
if (columns && columns.length > 0) { if (columns && columns.length > 0) {
options.recordset(columns); options.recordset(columns, { engine: driverBase.engine });
} }
wasHeader = true; wasHeader = true;
} }
@@ -147,11 +156,11 @@ const driver = {
if (!wasHeader) { if (!wasHeader) {
columns = extractOracleColumns(row); columns = extractOracleColumns(row);
if (columns && columns.length > 0) { if (columns && columns.length > 0) {
options.recordset(columns); options.recordset(columns, { engine: driverBase.engine });
} }
wasHeader = true; wasHeader = true;
} }
options.row(zipDataRow(row, columns)); options.row(modifyRow(zipDataRow(row, columns), columns));
}); });
query.on('end', () => { query.on('end', () => {
@@ -214,9 +223,9 @@ const driver = {
if (rows && metaData) { if (rows && metaData) {
const columns = extractOracleColumns(metaData); const columns = extractOracleColumns(metaData);
options.recordset(columns); options.recordset(columns, { engine: driverBase.engine });
for (const row of rows) { for (const row of rows) {
options.row(zipDataRow(row, columns)); options.row(modifyRow(zipDataRow(row, columns), columns));
} }
} else if (rowsAffected) { } else if (rowsAffected) {
options.info({ options.info({
@@ -303,6 +312,7 @@ const driver = {
if (columns && columns.length > 0) { if (columns && columns.length > 0) {
pass.write({ pass.write({
__isStreamHeader: true, __isStreamHeader: true,
engine: driverBase.engine,
...(structure || { columns }), ...(structure || { columns }),
}); });
} }
@@ -311,7 +321,7 @@ const driver = {
}); });
query.on('data', row => { query.on('data', row => {
pass.write(zipDataRow(row, columns)); pass.write(modifyRow(zipDataRow(row, columns), columns));
}); });
query.on('end', () => { query.on('end', () => {

View File

@@ -136,9 +136,9 @@ class Dumper extends SqlDumper {
// else super.putValue(value); // else super.putValue(value);
// } // }
// putByteArrayValue(value) { putByteArrayValue(value) {
// this.putRaw(`e'\\\\x${arrayToHexString(value)}'`); this.putRaw(`HEXTORAW('${arrayToHexString(value)}')`);
// } }
putValue(value, dataType) { putValue(value, dataType) {
if (dataType?.toLowerCase() == 'timestamp') { if (dataType?.toLowerCase() == 'timestamp') {

View File

@@ -48,6 +48,9 @@ function transformRow(row, columnsToTransform) {
if (dataTypeName == 'geography') { if (dataTypeName == 'geography') {
row[columnName] = extractGeographyDate(row[columnName]); row[columnName] = extractGeographyDate(row[columnName]);
} }
else if (dataTypeName == 'bytea' && row[columnName]) {
row[columnName] = { $binary: { base64: Buffer.from(row[columnName]).toString('base64') } };
}
} }
return row; return row;
@@ -159,7 +162,7 @@ const drivers = driverBases.map(driverBase => ({
conid, conid,
}; };
const datatypes = await this.query(dbhan, `SELECT oid, typname FROM pg_type WHERE typname in ('geography')`); const datatypes = await this.query(dbhan, `SELECT oid, typname FROM pg_type WHERE typname in ('geography', 'bytea')`);
const typeIdToName = _.fromPairs(datatypes.rows.map(cur => [cur.oid, cur.typname])); const typeIdToName = _.fromPairs(datatypes.rows.map(cur => [cur.oid, cur.typname]));
dbhan['typeIdToName'] = typeIdToName; dbhan['typeIdToName'] = typeIdToName;
@@ -181,7 +184,14 @@ const drivers = driverBases.map(driverBase => ({
} }
const res = await dbhan.client.query({ text: sql, rowMode: 'array' }); const res = await dbhan.client.query({ text: sql, rowMode: 'array' });
const columns = extractPostgresColumns(res, dbhan); const columns = extractPostgresColumns(res, dbhan);
return { rows: (res.rows || []).map(row => zipDataRow(row, columns)), columns };
const transormableTypeNames = Object.values(dbhan.typeIdToName ?? {});
const columnsToTransform = columns.filter(x => transormableTypeNames.includes(x.dataTypeName));
const zippedRows = (res.rows || []).map(row => zipDataRow(row, columns));
const transformedRows = zippedRows.map(row => transformRow(row, columnsToTransform));
return { rows: transformedRows, columns };
}, },
stream(dbhan, sql, options) { stream(dbhan, sql, options) {
const handleNotice = notice => { const handleNotice = notice => {
@@ -208,7 +218,7 @@ const drivers = driverBases.map(driverBase => ({
if (!wasHeader) { if (!wasHeader) {
columns = extractPostgresColumns(query._result, dbhan); columns = extractPostgresColumns(query._result, dbhan);
if (columns && columns.length > 0) { if (columns && columns.length > 0) {
options.recordset(columns); options.recordset(columns, { engine: driverBase.engine });
} }
wasHeader = true; wasHeader = true;
} }
@@ -328,6 +338,7 @@ const drivers = driverBases.map(driverBase => ({
columns = extractPostgresColumns(query._result, dbhan); columns = extractPostgresColumns(query._result, dbhan);
pass.write({ pass.write({
__isStreamHeader: true, __isStreamHeader: true,
engine: driverBase.engine,
...(structure || { columns }), ...(structure || { columns }),
}); });
wasHeader = true; wasHeader = true;

View File

@@ -4,7 +4,7 @@ const stream = require('stream');
const driverBases = require('../frontend/drivers'); const driverBases = require('../frontend/drivers');
const Analyser = require('./Analyser'); const Analyser = require('./Analyser');
const { splitQuery, sqliteSplitterOptions } = require('dbgate-query-splitter'); const { splitQuery, sqliteSplitterOptions } = require('dbgate-query-splitter');
const { runStreamItem, waitForDrain } = require('./helpers'); const { runStreamItem, waitForDrain, modifyRow } = require('./helpers');
const { getLogger, createBulkInsertStreamBase, extractErrorLogData } = global.DBGATE_PACKAGES['dbgate-tools']; const { getLogger, createBulkInsertStreamBase, extractErrorLogData } = global.DBGATE_PACKAGES['dbgate-tools'];
const logger = getLogger('sqliteDriver'); const logger = getLogger('sqliteDriver');
@@ -51,7 +51,7 @@ const libsqlDriver = {
const columns = stmtColumns.length > 0 ? stmtColumns : extractColumns(rows[0]); const columns = stmtColumns.length > 0 ? stmtColumns : extractColumns(rows[0]);
return { return {
rows, rows: rows.map((row) => modifyRow(row, columns)),
columns: columns.map((col) => ({ columns: columns.map((col) => ({
columnName: col.name, columnName: col.name,
dataType: col.type, dataType: col.type,
@@ -66,7 +66,7 @@ const libsqlDriver = {
console.log('#stream', sql); console.log('#stream', sql);
const inTransaction = dbhan.client.transaction(() => { const inTransaction = dbhan.client.transaction(() => {
for (const sqlItem of sqlSplitted) { for (const sqlItem of sqlSplitted) {
runStreamItem(dbhan, sqlItem, options, rowCounter); runStreamItem(dbhan, sqlItem, options, rowCounter, driverBases[1].engine);
} }
if (rowCounter.date) { if (rowCounter.date) {
@@ -115,9 +115,10 @@ const libsqlDriver = {
async readQueryTask(stmt, pass) { async readQueryTask(stmt, pass) {
// let sent = 0; // let sent = 0;
const columns = stmt.columns();
for (const row of stmt.iterate()) { for (const row of stmt.iterate()) {
// sent++; // sent++;
if (!pass.write(row)) { if (!pass.write(modifyRow(row, columns))) {
// console.log('WAIT DRAIN', sent); // console.log('WAIT DRAIN', sent);
await waitForDrain(pass); await waitForDrain(pass);
} }
@@ -135,6 +136,7 @@ const libsqlDriver = {
pass.write({ pass.write({
__isStreamHeader: true, __isStreamHeader: true,
engine: driverBases[1].engine,
...(structure || { ...(structure || {
columns: columns.map((col) => ({ columns: columns.map((col) => ({
columnName: col.name, columnName: col.name,

View File

@@ -5,7 +5,7 @@ const Analyser = require('./Analyser');
const driverBases = require('../frontend/drivers'); const driverBases = require('../frontend/drivers');
const { splitQuery, sqliteSplitterOptions } = require('dbgate-query-splitter'); const { splitQuery, sqliteSplitterOptions } = require('dbgate-query-splitter');
const { getLogger, createBulkInsertStreamBase, extractErrorLogData } = global.DBGATE_PACKAGES['dbgate-tools']; const { getLogger, createBulkInsertStreamBase, extractErrorLogData } = global.DBGATE_PACKAGES['dbgate-tools'];
const { runStreamItem, waitForDrain } = require('./helpers'); const { runStreamItem, waitForDrain, modifyRow } = require('./helpers');
const logger = getLogger('sqliteDriver'); const logger = getLogger('sqliteDriver');
@@ -40,7 +40,7 @@ const driver = {
const columns = stmt.columns(); const columns = stmt.columns();
const rows = stmt.all(); const rows = stmt.all();
return { return {
rows, rows: rows.map((row) => modifyRow(row, columns)),
columns: columns.map((col) => ({ columns: columns.map((col) => ({
columnName: col.name, columnName: col.name,
dataType: col.type, dataType: col.type,
@@ -61,7 +61,7 @@ const driver = {
const inTransaction = dbhan.client.transaction(() => { const inTransaction = dbhan.client.transaction(() => {
for (const sqlItem of sqlSplitted) { for (const sqlItem of sqlSplitted) {
runStreamItem(dbhan, sqlItem, options, rowCounter); runStreamItem(dbhan, sqlItem, options, rowCounter, driverBases[0].engine);
} }
if (rowCounter.date) { if (rowCounter.date) {
@@ -103,9 +103,10 @@ const driver = {
async readQueryTask(stmt, pass) { async readQueryTask(stmt, pass) {
// let sent = 0; // let sent = 0;
const columns = stmt.columns();
for (const row of stmt.iterate()) { for (const row of stmt.iterate()) {
// sent++; // sent++;
if (!pass.write(row)) { if (!pass.write(modifyRow(row, columns))) {
// console.log('WAIT DRAIN', sent); // console.log('WAIT DRAIN', sent);
await waitForDrain(pass); await waitForDrain(pass);
} }
@@ -123,6 +124,7 @@ const driver = {
pass.write({ pass.write({
__isStreamHeader: true, __isStreamHeader: true,
engine: driverBases[0].engine,
...(structure || { ...(structure || {
columns: columns.map((col) => ({ columns: columns.map((col) => ({
columnName: col.name, columnName: col.name,

View File

@@ -1,6 +1,6 @@
// @ts-check // @ts-check
function runStreamItem(dbhan, sql, options, rowCounter) { function runStreamItem(dbhan, sql, options, rowCounter, engine) {
const stmt = dbhan.client.prepare(sql); const stmt = dbhan.client.prepare(sql);
console.log(stmt); console.log(stmt);
console.log(stmt.reader); console.log(stmt.reader);
@@ -12,11 +12,12 @@ function runStreamItem(dbhan, sql, options, rowCounter) {
columns.map((col) => ({ columns.map((col) => ({
columnName: col.name, columnName: col.name,
dataType: col.type, dataType: col.type,
})) })),
{ engine }
); );
for (const row of stmt.iterate()) { for (const row of stmt.iterate()) {
options.row(row); options.row(modifyRow(row, columns));
} }
} else { } else {
const info = stmt.run(); const info = stmt.run();
@@ -44,7 +45,17 @@ async function waitForDrain(stream) {
}); });
} }
function modifyRow(row, columns) {
columns.forEach((col) => {
if (row[col.name] instanceof Uint8Array || row[col.name] instanceof ArrayBuffer) {
row[col.name] = { $binary: { base64: Buffer.from(row[col.name]).toString('base64') } };
}
});
return row;
}
module.exports = { module.exports = {
runStreamItem, runStreamItem,
waitForDrain, waitForDrain,
modifyRow,
}; };

View File

@@ -45,6 +45,10 @@ class StringifyStream extends stream.Transform {
elementValue(element, value) { elementValue(element, value) {
this.startElement(element); this.startElement(element);
if (value?.$binary?.base64) {
const buffer = Buffer.from(value.$binary.base64, 'base64');
value = '0x' +buffer.toString('hex').toUpperCase();
}
this.push(escapeXml(`${value}`)); this.push(escapeXml(`${value}`));
this.endElement(element); this.endElement(element);
} }