CSV - completely remove, logic moved to plugin

This commit is contained in:
Jan Prochazka
2020-11-22 09:17:51 +01:00
parent 3cdba4339f
commit e23e749cc5
6 changed files with 1 additions and 159 deletions

View File

@@ -2,21 +2,6 @@ const path = require('path');
const { uploadsdir } = require('../utility/directories');
const uuidv1 = require('uuid/v1');
// const extensions = [
// {
// ext: '.xlsx',
// type: 'excel',
// },
// {
// ext: '.jsonl',
// type: 'jsonl',
// },
// {
// ext: '.csv',
// type: 'csv',
// },
// ];
module.exports = {
upload_meta: {
method: 'post',
@@ -31,19 +16,10 @@ module.exports = {
const uploadName = uuidv1();
const filePath = path.join(uploadsdir(), uploadName);
console.log(`Uploading file ${data.name}, size=${data.size}`);
// let storageType = null;
// let shortName = data.name;
// for (const { ext, type } of extensions) {
// if (data.name.endsWith(ext)) {
// storageType = type;
// shortName = data.name.slice(0, -ext.length);
// }
// }
data.mv(filePath, () => {
res.json({
originalName: data.name,
// shortName,
// storageType,
uploadName,
filePath,
});

View File

@@ -1,55 +0,0 @@
const _ = require('lodash');
const csv = require('csv');
const fs = require('fs');
const stream = require('stream');
class CsvPrepareStream extends stream.Transform {
constructor({ header }) {
super({ objectMode: true });
this.structure = null;
this.header = header;
}
_transform(chunk, encoding, done) {
if (this.structure) {
this.push(
_.zipObject(
this.structure.columns.map((x) => x.columnName),
chunk
)
);
done();
} else {
if (this.header) {
this.structure = { columns: chunk.map((columnName) => ({ columnName })) };
this.push(this.structure);
} else {
this.structure = { columns: chunk.map((value, index) => ({ columnName: `col${index + 1}` })) };
this.push(this.structure);
this.push(
_.zipObject(
this.structure.columns.map((x) => x.columnName),
chunk
)
);
}
done();
}
}
}
async function csvReader({ fileName, encoding = 'utf-8', header = true, delimiter, limitRows = undefined }) {
console.log(`Reading file ${fileName}`);
const csvStream = csv.parse({
// @ts-ignore
delimiter,
skip_lines_with_error: true,
to_line: limitRows ? limitRows + 1 : -1,
});
const fileStream = fs.createReadStream(fileName, encoding);
const csvPrepare = new CsvPrepareStream({ header });
fileStream.pipe(csvStream);
csvStream.pipe(csvPrepare);
return csvPrepare;
}
module.exports = csvReader;

View File

@@ -1,36 +0,0 @@
const csv = require('csv');
const fs = require('fs');
const stream = require('stream');
class CsvPrepareStream extends stream.Transform {
constructor({ header }) {
super({ objectMode: true });
this.structure = null;
this.header = header;
}
_transform(chunk, encoding, done) {
if (this.structure) {
this.push(this.structure.columns.map((col) => chunk[col.columnName]));
done();
} else {
this.structure = chunk;
if (this.header) {
this.push(chunk.columns.map((x) => x.columnName));
}
done();
}
}
}
async function csvWriter({ fileName, encoding = 'utf-8', header = true, delimiter, quoted }) {
console.log(`Writing file ${fileName}`);
const csvPrepare = new CsvPrepareStream({ header });
const csvStream = csv.stringify({ delimiter, quoted });
const fileStream = fs.createWriteStream(fileName, encoding);
csvPrepare.pipe(csvStream);
csvStream.pipe(fileStream);
csvPrepare['finisher'] = fileStream;
return csvPrepare;
}
module.exports = csvWriter;

View File

@@ -1,6 +1,4 @@
const queryReader = require('./queryReader');
const csvWriter = require('./csvWriter');
const csvReader = require('./csvReader');
const runScript = require('./runScript');
const tableWriter = require('./tableWriter');
const tableReader = require('./tableReader');
@@ -19,8 +17,6 @@ const finalizer = require('./finalizer');
module.exports = {
queryReader,
csvWriter,
csvReader,
runScript,
tableWriter,
tableReader,