diff --git a/packages/api/src/shell/archiveReader.js b/packages/api/src/shell/archiveReader.js index 090597f7b..0c8eff5d6 100644 --- a/packages/api/src/shell/archiveReader.js +++ b/packages/api/src/shell/archiveReader.js @@ -2,9 +2,9 @@ const path = require('path'); const { archivedir } = require('../utility/directories'); const jsonLinesReader = require('./jsonLinesReader'); -function archiveReader({ folderName, fileName }) { +function archiveReader({ folderName, fileName, ...other }) { const jsonlFile = path.join(archivedir(), folderName, `${fileName}.jsonl`); - const res = jsonLinesReader({ fileName: jsonlFile }); + const res = jsonLinesReader({ fileName: jsonlFile, ...other }); return res; } diff --git a/packages/api/src/shell/csvReader.js b/packages/api/src/shell/csvReader.js index 68e9bd9b9..d4016376a 100644 --- a/packages/api/src/shell/csvReader.js +++ b/packages/api/src/shell/csvReader.js @@ -37,12 +37,13 @@ class CsvPrepareStream extends stream.Transform { } } -async function csvReader({ fileName, encoding = 'utf-8', header = true, delimiter, quoted }) { +async function csvReader({ fileName, encoding = 'utf-8', header = true, delimiter, quoted, limitRows = undefined }) { console.log(`Reading file ${fileName}`); const csvStream = csv.parse({ // @ts-ignore delimiter, quoted, + to_line: limitRows ? limitRows + 1 : -1, }); const fileStream = fs.createReadStream(fileName, encoding); const csvPrepare = new CsvPrepareStream({ header }); diff --git a/packages/api/src/shell/excelSheetReader.js b/packages/api/src/shell/excelSheetReader.js index c8787618c..763a95f29 100644 --- a/packages/api/src/shell/excelSheetReader.js +++ b/packages/api/src/shell/excelSheetReader.js @@ -14,7 +14,7 @@ async function loadWorkbook(fileName) { return workbook; } -async function excelSheetReader({ fileName, sheetName }) { +async function excelSheetReader({ fileName, sheetName, limitRows = undefined }) { const workbook = await loadWorkbook(fileName); const sheet = workbook.getWorksheet(sheetName); @@ -27,6 +27,7 @@ async function excelSheetReader({ fileName, sheetName }) { }; pass.write(structure); for (let rowIndex = 2; rowIndex <= sheet.rowCount; rowIndex++) { + if (limitRows && rowIndex > limitRows + 1) break; const row = sheet.getRow(rowIndex); const rowData = _.fromPairs(structure.columns.map((col, index) => [col.columnName, row.getCell(index + 1).value])); if (_.isEmpty(_.omitBy(rowData, (v) => v == null || v.toString().trim().length == 0))) continue; diff --git a/packages/api/src/shell/jslDataReader.js b/packages/api/src/shell/jslDataReader.js index 590a9946e..085cbe0a1 100644 --- a/packages/api/src/shell/jslDataReader.js +++ b/packages/api/src/shell/jslDataReader.js @@ -1,9 +1,9 @@ const getJslFileName = require('../utility/getJslFileName'); const jsonLinesReader = require('./jsonLinesReader'); -function jslDataReader({ jslid }) { +function jslDataReader({ jslid, ...other }) { const fileName = getJslFileName(jslid); - return jsonLinesReader({ fileName }); + return jsonLinesReader({ fileName, ...other }); } module.exports = jslDataReader; diff --git a/packages/api/src/shell/jsonLinesReader.js b/packages/api/src/shell/jsonLinesReader.js index 8b3a25243..e36db95fc 100644 --- a/packages/api/src/shell/jsonLinesReader.js +++ b/packages/api/src/shell/jsonLinesReader.js @@ -3,10 +3,12 @@ const stream = require('stream'); const byline = require('byline'); class ParseStream extends stream.Transform { - constructor({ header }) { + constructor({ header, limitRows }) { super({ objectMode: true }); this.header = header; this.wasHeader = false; + this.limitRows = limitRows; + this.rowsWritten = 0; } _transform(chunk, encoding, done) { const obj = JSON.parse(chunk); @@ -14,17 +16,19 @@ class ParseStream extends stream.Transform { if (!this.header) this.push({ columns: Object.keys(obj).map((columnName) => ({ columnName })) }); this.wasHeader = true; } - this.push(obj); + if (!this.limitRows || this.rowsWritten < this.limitRows) { + this.push(obj); + } done(); } } -async function jsonLinesReader({ fileName, encoding = 'utf-8', header = true }) { +async function jsonLinesReader({ fileName, encoding = 'utf-8', header = true, limitRows = undefined }) { console.log(`Reading file ${fileName}`); const fileStream = fs.createReadStream(fileName, encoding); const liner = byline(fileStream); - const parser = new ParseStream({ header }); + const parser = new ParseStream({ header, limitRows }); liner.pipe(parser); return parser; } diff --git a/packages/api/src/shell/tableReader.js b/packages/api/src/shell/tableReader.js index 6396675aa..023819d7c 100644 --- a/packages/api/src/shell/tableReader.js +++ b/packages/api/src/shell/tableReader.js @@ -3,7 +3,7 @@ const driverConnect = require('../utility/driverConnect'); const engines = require('@dbgate/engines'); -async function queryReader({ connection, pureName, schemaName }) { +async function tableReader({ connection, pureName, schemaName }) { const driver = engines(connection); const pool = await driverConnect(driver, connection); console.log(`Connected.`); @@ -25,4 +25,4 @@ async function queryReader({ connection, pureName, schemaName }) { return await driver.readQuery(pool, query); } -module.exports = queryReader; +module.exports = tableReader; diff --git a/packages/web/src/impexp/ImportExportConfigurator.js b/packages/web/src/impexp/ImportExportConfigurator.js index 00d645008..84594b526 100644 --- a/packages/web/src/impexp/ImportExportConfigurator.js +++ b/packages/web/src/impexp/ImportExportConfigurator.js @@ -14,8 +14,8 @@ import { } from '../utility/forms'; import { useArchiveFiles, useConnectionInfo, useDatabaseInfo } from '../utility/metadataLoaders'; import TableControl, { TableColumn } from '../utility/TableControl'; -import { TextField, SelectField } from '../utility/inputs'; -import { getActionOptions, getTargetName, isFileStorage } from './createImpExpScript'; +import { TextField, SelectField, CheckboxField } from '../utility/inputs'; +import { createPreviewReader, getActionOptions, getTargetName, isFileStorage } from './createImpExpScript'; import getElectron from '../utility/getElectron'; import ErrorInfo from '../widgets/ErrorInfo'; import getAsArray from '../utility/getAsArray'; @@ -86,7 +86,7 @@ function getFileFilters(storageType) { return res; } -async function addFilesToSourceList(files, values, setFieldValue, preferedStorageType) { +async function addFilesToSourceList(files, values, setFieldValue, preferedStorageType, setPreviewSource) { const newSources = []; const storage = preferedStorageType || values.sourceStorageType; for (const file of getAsArray(files)) { @@ -116,6 +116,9 @@ async function addFilesToSourceList(files, values, setFieldValue, preferedStorag if (preferedStorageType && preferedStorageType != values.sourceStorageType) { setFieldValue('sourceStorageType', preferedStorageType); } + if (setPreviewSource && newSources.length == 1) { + setPreviewSource(newSources[0]); + } } function ElectronFilesInput() { @@ -308,7 +311,7 @@ function SourceName({ name }) { ); } -export default function ImportExportConfigurator({ uploadedFile = undefined }) { +export default function ImportExportConfigurator({ uploadedFile = undefined, onChangePreview = undefined }) { const { values, setFieldValue } = useFormikContext(); const targetDbinfo = useDatabaseInfo({ conid: values.targetConnectionId, database: values.targetDatabaseName }); const sourceConnectionInfo = useConnectionInfo({ conid: values.sourceConnectionId }); @@ -316,6 +319,7 @@ export default function ImportExportConfigurator({ uploadedFile = undefined }) { const { sourceList } = values; const { setUploadListener } = useUploadsProvider(); const theme = useTheme(); + const [previewSource, setPreviewSource] = React.useState(null); const handleUpload = React.useCallback( (file) => { @@ -328,7 +332,8 @@ export default function ImportExportConfigurator({ uploadedFile = undefined }) { ], values, setFieldValue, - !sourceList || sourceList.length == 0 ? file.storageType : null + !sourceList || sourceList.length == 0 ? file.storageType : null, + setPreviewSource ); // setFieldValue('sourceList', [...(sourceList || []), file.originalName]); }, @@ -348,6 +353,21 @@ export default function ImportExportConfigurator({ uploadedFile = undefined }) { } }, []); + const supportsPreview = ['csv', 'jsonl', 'excel'].includes(values.sourceStorageType); + + const handleChangePreviewSource = async () => { + if (previewSource && supportsPreview) { + const reader = await createPreviewReader(values, previewSource); + if (onChangePreview) onChangePreview(reader); + } else { + onChangePreview(null); + } + }; + + React.useEffect(() => { + handleChangePreviewSource(); + }, [previewSource, supportsPreview]); + return ( @@ -396,6 +416,21 @@ export default function ImportExportConfigurator({ uploadedFile = undefined }) { /> )} /> + + supportsPreview ? ( + { + if (e.target.checked) setPreviewSource(row); + else setPreviewSource(null); + }} + /> + ) : null + } + /> ); diff --git a/packages/web/src/impexp/PreviewDataGrid.js b/packages/web/src/impexp/PreviewDataGrid.js new file mode 100644 index 000000000..34df29f11 --- /dev/null +++ b/packages/web/src/impexp/PreviewDataGrid.js @@ -0,0 +1,59 @@ +import { createGridCache, createGridConfig, FreeTableGridDisplay } from '@dbgate/datalib'; +import React from 'react'; +import DataGridCore from '../datagrid/DataGridCore'; +import RowsArrayGrider from '../datagrid/RowsArrayGrider'; +import axios from '../utility/axios'; +import ErrorInfo from '../widgets/ErrorInfo'; +import LoadingInfo from '../widgets/LoadingInfo'; + +export default function PreviewDataGrid({ reader, ...other }) { + const [isLoading, setIsLoading] = React.useState(false); + const [errorMessage, setErrorMessage] = React.useState(null); + const [model, setModel] = React.useState(null); + const [config, setConfig] = React.useState(createGridConfig()); + const [cache, setCache] = React.useState(createGridCache()); + const [grider, setGrider] = React.useState(null); + + const handleLoadInitialData = async () => { + try { + if (!reader) { + setModel(null); + setGrider(null); + return; + } + setIsLoading(true); + const resp = await axios.post('runners/load-reader', reader); + // @ts-ignore + setModel(resp.data); + setGrider(new RowsArrayGrider(resp.data.rows)); + setIsLoading(false); + } catch (err) { + setIsLoading(false); + const errorMessage = (err && err.response && err.response.data && err.response.data.error) || 'Loading failed'; + setErrorMessage(errorMessage); + console.error(err.response); + } + }; + + React.useEffect(() => { + handleLoadInitialData(); + }, [reader]); + + const display = React.useMemo(() => new FreeTableGridDisplay(model, config, setConfig, cache, setCache), [ + model, + config, + cache, + grider, + ]); + + if (isLoading) { + return ; + } + if (errorMessage) { + return ; + } + + if (!grider) return null; + + return ; +} diff --git a/packages/web/src/impexp/createImpExpScript.js b/packages/web/src/impexp/createImpExpScript.js index 5cbacd522..ce03dbf4b 100644 --- a/packages/web/src/impexp/createImpExpScript.js +++ b/packages/web/src/impexp/createImpExpScript.js @@ -211,3 +211,19 @@ export function getActionOptions(source, values, targetDbinfo) { } return res; } + +export async function createPreviewReader(values, sourceName) { + const [sourceConnection, sourceDriver] = await getConnection( + values.sourceStorageType, + values.sourceConnectionId, + values.sourceDatabaseName + ); + const [functionName, props] = getSourceExpr(sourceName, values, sourceConnection, sourceDriver); + return { + functionName, + props: { + ...props, + limitRows: 100, + }, + }; +} diff --git a/packages/web/src/modals/ImportExportModal.js b/packages/web/src/modals/ImportExportModal.js index 040917222..aead0d510 100644 --- a/packages/web/src/modals/ImportExportModal.js +++ b/packages/web/src/modals/ImportExportModal.js @@ -1,4 +1,4 @@ -import React from 'react'; +import React, { useState } from 'react'; import ModalBase from './ModalBase'; import FormStyledButton from '../widgets/FormStyledButton'; import { Formik, Form, useFormikContext } from 'formik'; @@ -16,6 +16,7 @@ import WidgetColumnBar, { WidgetColumnBarItem } from '../widgets/WidgetColumnBar import SocketMessagesView from '../query/SocketMessagesView'; import RunnerOutputFiles from '../query/RunnerOuputFiles'; import useTheme from '../theme/useTheme'; +import PreviewDataGrid from '../impexp/PreviewDataGrid'; const headerHeight = '60px'; const footerHeight = '60px'; @@ -106,6 +107,7 @@ export default function ImportExportModal({ modalState, initialValues, uploadedF const [runnerId, setRunnerId] = React.useState(null); const archive = useCurrentArchive(); const theme = useTheme(); + const [previewReader, setPreviewReader] = useState(0); const handleExecute = async (values) => { const script = await createImpExpScript(values); @@ -134,13 +136,10 @@ export default function ImportExportModal({ modalState, initialValues, uploadedF Import/Export - + - {/* - Preview - */} @@ -150,6 +149,11 @@ export default function ImportExportModal({ modalState, initialValues, uploadedF executeNumber={executeNumber} /> + {previewReader && ( + + + + )}