Revert "feat: transform rows suport for json lines reader"

This reverts commit b74b6b3284.
This commit is contained in:
Pavel
2025-06-12 13:29:48 +02:00
parent 1e2474921b
commit 17711bc5c9
2 changed files with 7 additions and 14 deletions

View File

@@ -17,9 +17,8 @@ const copyStream = require('./copyStream');
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
* @param {string} options.folder - folder with model files (YAML files for tables, SQL files for views, procedures, ...)
* @param {function[]} options.modelTransforms - array of functions for transforming model
* @param {((row: Record<string, any>) => Record<string, any>) | undefined} options.transformRow - function to transform each row
*/
async function importDbFromFolder({ connection, systemConnection, driver, folder, modelTransforms, transformRow }) {
async function importDbFromFolder({ connection, systemConnection, driver, folder, modelTransforms }) {
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
@@ -78,7 +77,7 @@ async function importDbFromFolder({ connection, systemConnection, driver, folder
for (const table of modelAdapted.tables) {
const fileName = path.join(folder, `${table.pureName}.jsonl`);
if (await fs.exists(fileName)) {
const src = await jsonLinesReader({ fileName, transformRow });
const src = await jsonLinesReader({ fileName });
const dst = await tableWriter({
systemConnection: dbhan,
pureName: table.pureName,
@@ -106,7 +105,7 @@ async function importDbFromFolder({ connection, systemConnection, driver, folder
for (const file of fs.readdirSync(folder)) {
if (!file.endsWith('.jsonl')) continue;
const pureName = path.parse(file).name;
const src = await jsonLinesReader({ fileName: path.join(folder, file), transformRow });
const src = await jsonLinesReader({ fileName: path.join(folder, file) });
const dst = await tableWriter({
systemConnection: dbhan,
pureName,

View File

@@ -6,11 +6,10 @@ const download = require('./download');
const logger = getLogger('jsonLinesReader');
class ParseStream extends stream.Transform {
constructor({ limitRows, transformRow }) {
constructor({ limitRows }) {
super({ objectMode: true });
this.wasHeader = false;
this.limitRows = limitRows;
this.transformRow = transformRow;
this.rowsWritten = 0;
}
_transform(chunk, encoding, done) {
@@ -27,11 +26,7 @@ class ParseStream extends stream.Transform {
this.wasHeader = true;
}
if (!this.limitRows || this.rowsWritten < this.limitRows) {
if (this.transformRow) {
this.push(this.transformRow(obj));
} else {
this.push(obj);
}
this.push(obj);
this.rowsWritten += 1;
}
done();
@@ -44,10 +39,9 @@ class ParseStream extends stream.Transform {
* @param {string} options.fileName - file name or URL
* @param {string} options.encoding - encoding of the file
* @param {number} options.limitRows - maximum number of rows to read
* @param {((row: Record<string, any>) => Record<string, any>) | undefined} options.transformRow - function to transform each row
* @returns {Promise<readerType>} - reader object
*/
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined, transformRow }) {
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
logger.info(`Reading file ${fileName}`);
const downloadedFile = await download(fileName);
@@ -58,7 +52,7 @@ async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undef
encoding
);
const liner = byline(fileStream);
const parser = new ParseStream({ limitRows, transformRow });
const parser = new ParseStream({ limitRows });
return [liner, parser];
}