diff --git a/packages/api/src/shell/index.js b/packages/api/src/shell/index.js index 373d8bbf5..acd99c2d7 100644 --- a/packages/api/src/shell/index.js +++ b/packages/api/src/shell/index.js @@ -7,6 +7,8 @@ const copyStream = require('./copyStream'); const fakeObjectReader = require('./fakeObjectReader'); const consoleObjectWriter = require('./consoleObjectWriter'); const excelSheetReader = require('./excelSheetReader'); +const jsonLinesWriter = require('./jsonLinesWriter'); +const jsonLinesReader = require('./jsonLinesReader'); module.exports = { queryReader, @@ -16,6 +18,8 @@ module.exports = { tableWriter, copyStream, excelSheetReader, + jsonLinesWriter, + jsonLinesReader, fakeObjectReader, consoleObjectWriter, }; diff --git a/packages/api/src/shell/jsonLinesReader.js b/packages/api/src/shell/jsonLinesReader.js new file mode 100644 index 000000000..8b3a25243 --- /dev/null +++ b/packages/api/src/shell/jsonLinesReader.js @@ -0,0 +1,32 @@ +const fs = require('fs'); +const stream = require('stream'); +const byline = require('byline'); + +class ParseStream extends stream.Transform { + constructor({ header }) { + super({ objectMode: true }); + this.header = header; + this.wasHeader = false; + } + _transform(chunk, encoding, done) { + const obj = JSON.parse(chunk); + if (!this.wasHeader) { + if (!this.header) this.push({ columns: Object.keys(obj).map((columnName) => ({ columnName })) }); + this.wasHeader = true; + } + this.push(obj); + done(); + } +} + +async function jsonLinesReader({ fileName, encoding = 'utf-8', header = true }) { + console.log(`Reading file ${fileName}`); + + const fileStream = fs.createReadStream(fileName, encoding); + const liner = byline(fileStream); + const parser = new ParseStream({ header }); + liner.pipe(parser); + return parser; +} + +module.exports = jsonLinesReader; diff --git a/packages/api/src/shell/jsonLinesWriter.js b/packages/api/src/shell/jsonLinesWriter.js new file mode 100644 index 000000000..df31b5fed --- /dev/null +++ b/packages/api/src/shell/jsonLinesWriter.js @@ -0,0 +1,30 @@ +const fs = require('fs'); +const stream = require('stream'); + +class StringifyStream extends stream.Transform { + constructor({ header }) { + super({ objectMode: true }); + this.header = header; + this.wasHeader = false; + } + _transform(chunk, encoding, done) { + if (!this.wasHeader) { + if (this.header) this.push(JSON.stringify(chunk) + '\n'); + this.wasHeader = true; + } else { + this.push(JSON.stringify(chunk) + '\n'); + } + done(); + } +} + +async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true }) { + console.log(`Writing file ${fileName}`); + const stringify = new StringifyStream({ header }); + const fileStream = fs.createWriteStream(fileName, encoding); + stringify.pipe(fileStream); + stringify['finisher'] = fileStream; + return stringify; +} + +module.exports = jsonLinesWriter; diff --git a/test/exportTable.js b/test/exportTable.js index 4fb9b4121..a687949f3 100644 --- a/test/exportTable.js +++ b/test/exportTable.js @@ -31,9 +31,15 @@ async function run() { // header: false, }); + const jsonWriter = await dbgateApi.jsonLinesWriter({ + fileName: 'test.jsonl', + header: false, + }); + const consoleWriter = await dbgateApi.consoleObjectWriter(); - await dbgateApi.copyStream(queryReader, csvWriter); + // await dbgateApi.copyStream(queryReader, csvWriter); + await dbgateApi.copyStream(queryReader, jsonWriter); // await dbgateApi.copyStream(queryReader, consoleWriter); } diff --git a/test/importTable.js b/test/importTable.js index 96036f62b..52772b374 100644 --- a/test/importTable.js +++ b/test/importTable.js @@ -11,6 +11,11 @@ async function run() { sheetName: 'Events', }); + const jsonReader = await dbgateApi.jsonLinesReader({ + fileName: 'test.jsonl', + header: false, + }); + const tableWriter = await dbgateApi.tableWriter({ connection: { server: 'localhost', @@ -20,7 +25,7 @@ async function run() { database: 'Chinook', }, schemaName: 'dbo', - pureName: 'Events', + pureName: 'Genre3', createIfNotExists: true, truncate: true, }); @@ -40,7 +45,7 @@ async function run() { const consoleWriter = await dbgateApi.consoleObjectWriter(); // await dbgateApi.copyStream(excelReader, consoleWriter); - await dbgateApi.copyStream(excelReader, tableWriter); + await dbgateApi.copyStream(jsonReader, tableWriter); // await dbgateApi.copyStream(csvReader, consoleWriter); // await dbgateApi.copyStream(csvReader, tableWriter); } diff --git a/test/test.csv b/test/test.csv index a129fcf4c..e69de29bb 100644 --- a/test/test.csv +++ b/test/test.csv @@ -1,26 +0,0 @@ -GenreId,Name -1,Rock -2,Jazz -3,Metal -4,Alternative & Punk -5,Rock And Roll -6,Blues -7,Latin -8,Reggae -9,Pop -10,Soundtrack -11,Bossa Nova -12,Easy Listening -13,Heavy Metal -14,R&B/Soul -15,Electronica/Dance -16,World -17,Hip Hop/Rap -18,Science Fiction -19,TV Shows -20,Sci Fi & Fantasy -21,Drama -22,Comedy -23,Alternative -24,Classical -25,Opera diff --git a/test/test.jsonl b/test/test.jsonl new file mode 100644 index 000000000..9fd1d85b0 --- /dev/null +++ b/test/test.jsonl @@ -0,0 +1,25 @@ +{"GenreId":1,"Name":"Rock"} +{"GenreId":2,"Name":"Jazz"} +{"GenreId":3,"Name":"Metal"} +{"GenreId":4,"Name":"Alternative & Punk"} +{"GenreId":5,"Name":"Rock And Roll"} +{"GenreId":6,"Name":"Blues"} +{"GenreId":7,"Name":"Latin"} +{"GenreId":8,"Name":"Reggae"} +{"GenreId":9,"Name":"Pop"} +{"GenreId":10,"Name":"Soundtrack"} +{"GenreId":11,"Name":"Bossa Nova"} +{"GenreId":12,"Name":"Easy Listening"} +{"GenreId":13,"Name":"Heavy Metal"} +{"GenreId":14,"Name":"R&B/Soul"} +{"GenreId":15,"Name":"Electronica/Dance"} +{"GenreId":16,"Name":"World"} +{"GenreId":17,"Name":"Hip Hop/Rap"} +{"GenreId":18,"Name":"Science Fiction"} +{"GenreId":19,"Name":"TV Shows"} +{"GenreId":20,"Name":"Sci Fi & Fantasy"} +{"GenreId":21,"Name":"Drama"} +{"GenreId":22,"Name":"Comedy"} +{"GenreId":23,"Name":"Alternative"} +{"GenreId":24,"Name":"Classical"} +{"GenreId":25,"Name":"Opera"}