DBF reader plugin

This commit is contained in:
SPRINX0\prochazka
2025-01-07 17:24:56 +01:00
parent 79bf9016a3
commit 2521f05526
11 changed files with 322 additions and 0 deletions

View File

@@ -0,0 +1,12 @@
const reader = require('./reader');
// const writer = require('./writer');
module.exports = {
packageName: 'dbgate-plugin-dbf',
shellApi: {
reader,
},
initialize(dbgateEnv) {
reader.initialize(dbgateEnv);
},
};

View File

@@ -0,0 +1,85 @@
const _ = require('lodash');
const csv = require('csv');
const fs = require('fs');
const stream = require('stream');
const { DBFFile } = require('dbffile');
let dbgateApi;
function getFieldType(field) {
const { type, size } = field;
switch (type) {
case 'C':
return `varchar(${size})`;
case 'N':
return 'numeric';
case 'F':
return 'float';
case 'Y':
return 'money';
case 'I':
return 'int';
case 'L':
return 'boolean';
case 'D':
return 'date';
case 'T':
return 'datetime';
case 'B':
return 'duouble';
case 'M':
return 'varchar(max)';
default:
return 'string';
}
}
async function reader({ fileName, encoding = 'ISO-8859-1', includeDeletedRecords = false, limitRows = undefined }) {
console.log(`Reading file ${fileName}`);
const downloadedFile = await dbgateApi.download(fileName);
const pass = new stream.PassThrough({
objectMode: true,
});
(async () => {
try {
// Open the DBF file
const dbf = await DBFFile.open(downloadedFile, { encoding, includeDeletedRecords });
const columns = dbf.fields.map((field) => ({
columnName: field.name,
dataType: getFieldType(field),
}));
pass.write({
__isStreamHeader: true,
columns,
})
let readedRows = 0;
// Read each record and push it into the stream
for await (const record of dbf) {
// Emit the record as a chunk
pass.write(record);
readedRows++;
if (limitRows && readedRows >= limitRows) {
break;
}
}
pass.end();
} catch (error) {
// If any error occurs, destroy the stream with the error
pass.end();
}
})();
return pass;
}
reader.initialize = (dbgateEnv) => {
dbgateApi = dbgateEnv.dbgateApi;
};
module.exports = reader;