Merge branch 'feature/impexp'

This commit is contained in:
SPRINX0\prochazka
2025-03-04 15:55:10 +01:00
36 changed files with 415 additions and 196 deletions

View File

@@ -94,14 +94,26 @@ module.exports = {
handle_ping() {},
handle_freeData(runid, { freeData }) {
const [resolve, reject] = this.requests[runid];
const { resolve } = this.requests[runid];
resolve(freeData);
delete this.requests[runid];
},
handle_copyStreamError(runid, { copyStreamError }) {
const { reject, exitOnStreamError } = this.requests[runid] || {};
if (exitOnStreamError) {
reject(copyStreamError);
delete this.requests[runid];
}
},
handle_progress(runid, progressData) {
socket.emit(`runner-progress-${runid}`, progressData);
},
rejectRequest(runid, error) {
if (this.requests[runid]) {
const [resolve, reject] = this.requests[runid];
const { reject } = this.requests[runid];
reject(error);
delete this.requests[runid];
}
@@ -113,6 +125,8 @@ module.exports = {
fs.writeFileSync(`${scriptFile}`, scriptText);
fs.mkdirSync(directory);
const pluginNames = extractPlugins(scriptText);
// console.log('********************** SCRIPT TEXT **********************');
// console.log(scriptText);
logger.info({ scriptFile }, 'Running script');
// const subprocess = fork(scriptFile, ['--checkParent', '--max-old-space-size=8192'], {
const subprocess = fork(
@@ -150,11 +164,13 @@ module.exports = {
byline(subprocess.stdout).on('data', pipeDispatcher('info'));
byline(subprocess.stderr).on('data', pipeDispatcher('error'));
subprocess.on('exit', code => {
// console.log('... EXITED', code);
this.rejectRequest(runid, { message: 'No data returned, maybe input data source is too big' });
logger.info({ code, pid: subprocess.pid }, 'Exited process');
socket.emit(`runner-done-${runid}`, code);
});
subprocess.on('error', error => {
// console.log('... ERROR subprocess', error);
this.rejectRequest(runid, { message: error && (error.message || error.toString()) });
console.error('... ERROR subprocess', error);
this.dispatchMessage({
@@ -231,7 +247,7 @@ module.exports = {
const promise = new Promise((resolve, reject) => {
const runid = crypto.randomUUID();
this.requests[runid] = [resolve, reject];
this.requests[runid] = { resolve, reject, exitOnStreamError: true };
this.startCore(runid, loaderScriptTemplate(prefix, functionName, props, runid));
});
return promise;

View File

@@ -1,6 +1,25 @@
const EnsureStreamHeaderStream = require('../utility/EnsureStreamHeaderStream');
const Stream = require('stream');
const ColumnMapTransformStream = require('../utility/ColumnMapTransformStream');
const streamPipeline = require('../utility/streamPipeline');
const { getLogger, extractErrorLogData, RowProgressReporter } = require('dbgate-tools');
const logger = getLogger('copyStream');
const stream = require('stream');
class ReportingTransform extends stream.Transform {
constructor(reporter, options = {}) {
super({ ...options, objectMode: true });
this.reporter = reporter;
}
_transform(chunk, encoding, callback) {
this.reporter.add(1);
this.push(chunk);
callback();
}
_flush(callback) {
this.reporter.finish();
callback();
}
}
/**
* Copies reader to writer. Used for import, export tables and transfer data between tables
@@ -9,10 +28,23 @@ const ColumnMapTransformStream = require('../utility/ColumnMapTransformStream');
* @param {object} options - options
* @returns {Promise}
*/
function copyStream(input, output, options) {
const { columns } = options || {};
async function copyStream(input, output, options) {
const { columns, progressName } = options || {};
if (progressName) {
process.send({
msgtype: 'progress',
progressName,
status: 'running',
});
}
const transforms = [];
if (progressName) {
const reporter = new RowProgressReporter(progressName, 'readRowCount');
transforms.push(new ReportingTransform(reporter));
}
if (columns) {
transforms.push(new ColumnMapTransformStream(columns));
}
@@ -20,36 +52,37 @@ function copyStream(input, output, options) {
transforms.push(new EnsureStreamHeaderStream());
}
// return new Promise((resolve, reject) => {
// Stream.pipeline(input, ...transforms, output, err => {
// if (err) {
// reject(err);
// } else {
// resolve();
// }
// });
// });
try {
await streamPipeline(input, transforms, output);
return new Promise((resolve, reject) => {
const finisher = output['finisher'] || output;
finisher.on('finish', resolve);
finisher.on('error', reject);
let lastStream = input;
for (const tran of transforms) {
lastStream.pipe(tran);
lastStream = tran;
if (progressName) {
process.send({
msgtype: 'progress',
progressName,
status: 'done',
});
}
lastStream.pipe(output);
} catch (err) {
process.send({
msgtype: 'copyStreamError',
copyStreamError: {
message: err.message,
...err,
},
});
// if (output.requireFixedStructure) {
// const ensureHeader = new EnsureStreamHeaderStream();
// input.pipe(ensureHeader);
// ensureHeader.pipe(output);
// } else {
// input.pipe(output);
// }
});
if (progressName) {
process.send({
msgtype: 'progress',
progressName,
status: 'error',
errorMessage: err.message,
});
}
logger.error(extractErrorLogData(err, { progressName }), 'Import/export job failed');
// throw err;
}
}
module.exports = copyStream;

View File

@@ -24,8 +24,6 @@ async function dataDuplicator({
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
try {
logger.info(`Connected.`);
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(dbhan);
}

View File

@@ -19,8 +19,6 @@ async function dropAllDbObjects({ connection, systemConnection, driver, analysed
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
logger.info(`Connected.`);
if (!analysedStructure) {
analysedStructure = await driver.analyseFull(dbhan);
}

View File

@@ -31,8 +31,6 @@ async function dumpDatabase({
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
try {
logger.info(`Connected.`);
const dumper = await driver.createBackupDumper(dbhan, {
outputFile,
databaseName,

View File

@@ -36,7 +36,7 @@ async function executeQuery({
}
try {
logger.info(`Connected.`);
logger.debug(`Running SQL query, length: ${sql.length}`);
await driver.script(dbhan, sql, { logScriptItems });
} finally {

View File

@@ -5,6 +5,7 @@ const { splitQueryStream } = require('dbgate-query-splitter/lib/splitQueryStream
const download = require('./download');
const stream = require('stream');
const { getLogger } = require('dbgate-tools');
const streamPipeline = require('../utility/streamPipeline');
const logger = getLogger('importDb');
@@ -43,25 +44,12 @@ class ImportStream extends stream.Transform {
}
}
function awaitStreamEnd(stream) {
return new Promise((resolve, reject) => {
stream.once('end', () => {
resolve(true);
});
stream.once('error', err => {
reject(err);
});
});
}
async function importDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, inputFile }) {
logger.info(`Importing database`);
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
try {
logger.info(`Connected.`);
logger.info(`Input file: ${inputFile}`);
const downloadedFile = await download(inputFile);
logger.info(`Downloaded file: ${downloadedFile}`);
@@ -72,9 +60,8 @@ async function importDatabase({ connection = undefined, systemConnection = undef
returnRichInfo: true,
});
const importStream = new ImportStream(dbhan, driver);
// @ts-ignore
splittedStream.pipe(importStream);
await awaitStreamEnd(importStream);
await streamPipeline(splittedStream, importStream);
} finally {
if (!systemConnection) {
await driver.close(dbhan);

View File

@@ -53,8 +53,7 @@ async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undef
);
const liner = byline(fileStream);
const parser = new ParseStream({ limitRows });
liner.pipe(parser);
return parser;
return [liner, parser];
}
module.exports = jsonLinesReader;

View File

@@ -10,7 +10,6 @@ const download = require('./download');
const logger = getLogger('jsonReader');
class ParseStream extends stream.Transform {
constructor({ limitRows, jsonStyle, keyField }) {
super({ objectMode: true });
@@ -72,8 +71,12 @@ async function jsonReader({
// @ts-ignore
encoding
);
const parseJsonStream = parser();
fileStream.pipe(parseJsonStream);
const resultPipe = [fileStream, parseJsonStream];
// fileStream.pipe(parseJsonStream);
const parseStream = new ParseStream({ limitRows, jsonStyle, keyField });
@@ -81,15 +84,20 @@ async function jsonReader({
if (rootField) {
const filterStream = pick({ filter: rootField });
parseJsonStream.pipe(filterStream);
filterStream.pipe(tramsformer);
} else {
parseJsonStream.pipe(tramsformer);
resultPipe.push(filterStream);
// parseJsonStream.pipe(filterStream);
// filterStream.pipe(tramsformer);
}
// else {
// parseJsonStream.pipe(tramsformer);
// }
tramsformer.pipe(parseStream);
resultPipe.push(tramsformer);
resultPipe.push(parseStream);
return parseStream;
// tramsformer.pipe(parseStream);
return resultPipe;
}
module.exports = jsonReader;

View File

@@ -99,9 +99,10 @@ async function jsonWriter({ fileName, jsonStyle, keyField = '_key', rootField, e
logger.info(`Writing file ${fileName}`);
const stringify = new StringifyStream({ jsonStyle, keyField, rootField });
const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream);
stringify['finisher'] = fileStream;
return stringify;
return [stringify, fileStream];
// stringify.pipe(fileStream);
// stringify['finisher'] = fileStream;
// return stringify;
}
module.exports = jsonWriter;

View File

@@ -6,15 +6,13 @@ const exportDbModel = require('../utility/exportDbModel');
const logger = getLogger('analyseDb');
async function loadDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, outputDir }) {
logger.info(`Analysing database`);
logger.debug(`Analysing database`);
if (!driver) driver = requireEngineDriver(connection);
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
try {
logger.info(`Connected.`);
const dbInfo = await driver.analyseFull(dbhan);
logger.info(`Analyse finished`);
logger.debug(`Analyse finished`);
await exportDbModel(dbInfo, outputDir);
} finally {

View File

@@ -141,8 +141,9 @@ async function modifyJsonLinesReader({
);
const liner = byline(fileStream);
const parser = new ParseStream({ limitRows, changeSet, mergedRows, mergeKey, mergeMode });
liner.pipe(parser);
return parser;
return [liner, parser];
// liner.pipe(parser);
// return parser;
}
module.exports = modifyJsonLinesReader;

View File

@@ -30,7 +30,6 @@ async function queryReader({
const driver = requireEngineDriver(connection);
const pool = await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script');
logger.info(`Connected.`);
const reader =
queryType == 'json' ? await driver.readJsonQuery(pool, query) : await driver.readQuery(pool, query || sql);
return reader;

View File

@@ -44,9 +44,10 @@ async function sqlDataWriter({ fileName, dataName, driver, encoding = 'utf-8' })
logger.info(`Writing file ${fileName}`);
const stringify = new SqlizeStream({ fileName, dataName });
const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream);
stringify['finisher'] = fileStream;
return stringify;
return [stringify, fileStream];
// stringify.pipe(fileStream);
// stringify['finisher'] = fileStream;
// return stringify;
}
module.exports = sqlDataWriter;

View File

@@ -18,7 +18,6 @@ async function tableReader({ connection, systemConnection, pureName, schemaName,
driver = requireEngineDriver(connection);
}
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
logger.info(`Connected.`);
const fullName = { pureName, schemaName };

View File

@@ -26,7 +26,6 @@ async function tableWriter({ connection, schemaName, pureName, driver, systemCon
}
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
logger.info(`Connected.`);
return await driver.writeTable(dbhan, { schemaName, pureName }, options);
}

View File

@@ -0,0 +1,18 @@
const stream = require('stream');
const _ = require('lodash');
function streamPipeline(...processedStreams) {
const streams = _.flattenDeep(processedStreams);
return new Promise((resolve, reject) => {
// @ts-ignore
stream.pipeline(...streams, err => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
module.exports = streamPipeline;

View File

@@ -41,12 +41,13 @@ export class ScriptWriter {
this.packageNames.push(packageName);
}
copyStream(sourceVar, targetVar, colmapVar = null) {
if (colmapVar) {
this._put(`await dbgateApi.copyStream(${sourceVar}, ${targetVar}, {columns: ${colmapVar}});`);
} else {
this._put(`await dbgateApi.copyStream(${sourceVar}, ${targetVar});`);
}
copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string) {
let opts = '{';
if (colmapVar) opts += `columns: ${colmapVar}, `;
if (progressName) opts += `progressName: "${progressName}", `;
opts += '}';
this._put(`await dbgateApi.copyStream(${sourceVar}, ${targetVar}, ${opts});`);
}
dumpDatabase(options) {
@@ -117,12 +118,13 @@ export class ScriptWriterJson {
});
}
copyStream(sourceVar, targetVar, colmapVar = null) {
copyStream(sourceVar, targetVar, colmapVar = null, progressName?: string) {
this.commands.push({
type: 'copyStream',
sourceVar,
targetVar,
colmapVar,
progressName,
});
}
@@ -183,7 +185,7 @@ export function jsonScriptToJavascript(json) {
script.assignValue(cmd.variableName, cmd.jsonValue);
break;
case 'copyStream':
script.copyStream(cmd.sourceVar, cmd.targetVar, cmd.colmapVar);
script.copyStream(cmd.sourceVar, cmd.targetVar, cmd.colmapVar, cmd.progressName);
break;
case 'endLine':
script.endLine();

View File

@@ -3,6 +3,7 @@ import _intersection from 'lodash/intersection';
import _fromPairs from 'lodash/fromPairs';
import { getLogger } from './getLogger';
import { prepareTableForImport } from './tableTransforms';
import { RowProgressReporter } from './rowProgressReporter';
const logger = getLogger('bulkStreamBase');
@@ -21,6 +22,7 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
writable.columnNames = null;
writable.columnDataTypes = null;
writable.requireFixedStructure = driver.databaseEngineTypes.includes('sql');
writable.rowsReporter = new RowProgressReporter(options.progressName);
writable.addRow = async row => {
if (writable.structure) {
@@ -92,6 +94,7 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
// require('fs').writeFileSync('/home/jena/test.sql', dmp.s);
// console.log(dmp.s);
await driver.query(dbhan, dmp.s, { discardResult: true });
writable.rowsReporter.add(rows.length);
} else {
for (const row of rows) {
const dmp = driver.createDumper();
@@ -106,6 +109,7 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
dmp.putRaw(')');
// console.log(dmp.s);
await driver.query(dbhan, dmp.s, { discardResult: true });
writable.rowsReporter.add(1);
}
}
if (options.commitAfterInsert) {
@@ -129,6 +133,7 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, dbhan,
writable._final = async callback => {
await writable.send();
writable.rowsReporter.finish();
callback();
};

View File

@@ -25,3 +25,4 @@ export * from './detectSqlFilterBehaviour';
export * from './filterBehaviours';
export * from './schemaInfoTools';
export * from './dbKeysLoader';
export * from './rowProgressReporter';

View File

@@ -0,0 +1,45 @@
export class RowProgressReporter {
counter = 0;
timeoutHandle = null;
constructor(public progressName, public field = 'writtenRowCount') {}
add(count: number) {
this.counter += count;
if (!this.progressName) {
return;
}
if (this.timeoutHandle) {
return;
}
this.timeoutHandle = setTimeout(() => {
this.timeoutHandle = null;
this.send();
}, 1000);
}
finish() {
if (!this.progressName) {
return;
}
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle);
this.timeoutHandle = null;
}
this.send();
}
send() {
if (!this.progressName) {
return;
}
process.send({
msgtype: 'progress',
progressName: this.progressName,
[this.field]: this.counter,
});
}
}

View File

@@ -41,6 +41,7 @@ export interface WriteTableOptions {
createIfNotExists?: boolean;
commitAfterInsert?: boolean;
targetTableStructure?: TableInfo;
progressName?: string;
}
export interface EngineAuthType {
@@ -144,6 +145,8 @@ export interface DatabaseHandle<TClient = any> {
treeKeySeparator?: string;
}
export type StreamResult = stream.Readable | (stream.Readable | stream.Writable)[];
export interface EngineDriver<TClient = any> extends FilterBehaviourProvider {
engine: string;
title: string;
@@ -191,15 +194,11 @@ export interface EngineDriver<TClient = any> extends FilterBehaviourProvider {
close(dbhan: DatabaseHandle<TClient>): Promise<any>;
query(dbhan: DatabaseHandle<TClient>, sql: string, options?: QueryOptions): Promise<QueryResult>;
stream(dbhan: DatabaseHandle<TClient>, sql: string, options: StreamOptions);
readQuery(dbhan: DatabaseHandle<TClient>, sql: string, structure?: TableInfo): Promise<stream.Readable>;
readJsonQuery(dbhan: DatabaseHandle<TClient>, query: any, structure?: TableInfo): Promise<stream.Readable>;
readQuery(dbhan: DatabaseHandle<TClient>, sql: string, structure?: TableInfo): Promise<StreamResult>;
readJsonQuery(dbhan: DatabaseHandle<TClient>, query: any, structure?: TableInfo): Promise<StreamResult>;
// eg. PostgreSQL COPY FROM stdin
writeQueryFromStream(dbhan: DatabaseHandle<TClient>, sql: string): Promise<stream.Writable>;
writeTable(
dbhan: DatabaseHandle<TClient>,
name: NamedObjectInfo,
options: WriteTableOptions
): Promise<stream.Writable>;
writeQueryFromStream(dbhan: DatabaseHandle<TClient>, sql: string): Promise<StreamResult>;
writeTable(dbhan: DatabaseHandle<TClient>, name: NamedObjectInfo, options: WriteTableOptions): Promise<StreamResult>;
analyseSingleObject(
dbhan: DatabaseHandle<TClient>,
name: NamedObjectInfo,

View File

@@ -20,7 +20,7 @@
import { createEventDispatcher } from 'svelte';
import FontIcon from '../icons/FontIcon.svelte';
export let columns: TableControlColumn[];
export let columns: (TableControlColumn | false)[];
export let rows;
export let focusOnCreate = false;
export let selectable = false;

View File

@@ -149,6 +149,7 @@
'icon download': 'mdi mdi-download',
'icon text': 'mdi mdi-text',
'icon ai': 'mdi mdi-head-lightbulb',
'icon wait': 'mdi mdi-timer-sand',
'icon run': 'mdi mdi-play',
'icon chevron-down': 'mdi mdi-chevron-down',

View File

@@ -76,6 +76,7 @@
import { compositeDbNameIfNeeded } from 'dbgate-tools';
import createRef from '../utility/createRef';
import DropDownButton from '../buttons/DropDownButton.svelte';
import ErrorMessageModal from '../modals/ErrorMessageModal.svelte';
// export let uploadedFile = undefined;
// export let openedFile = undefined;
@@ -104,6 +105,7 @@
$: sourceList = $values.sourceList;
let targetEditKey = 0;
export let progressHolder = null;
const previewSource = writable(null);
@@ -211,92 +213,132 @@
<div class="title"><FontIcon icon="icon tables" /> Map source tables/files</div>
{#key targetEditKey}
<TableControl
rows={$values.sourceList || []}
columns={[
{
fieldName: 'source',
header: 'Source',
component: SourceName,
getProps: row => ({ name: row }),
},
{
fieldName: 'action',
header: 'Action',
component: SourceAction,
getProps: row => ({ name: row, targetDbinfo }),
},
{
fieldName: 'target',
header: 'Target',
slot: 1,
},
{
fieldName: 'preview',
header: 'Preview',
slot: 0,
},
{
fieldName: 'columns',
header: 'Columns',
slot: 2,
},
]}
>
<svelte:fragment slot="0" let:row>
{#if supportsPreview}
<CheckboxField
checked={$previewSource == row}
on:change={e => {
// @ts-ignore
if (e.target.checked) $previewSource = row;
else $previewSource = null;
}}
/>
{/if}
</svelte:fragment>
<svelte:fragment slot="1" let:row>
<div class="flex">
<TextField
value={getTargetName($extensions, row, $values)}
on:input={e =>
setFieldValue(
`targetName_${row}`,
{#key progressHolder}
<TableControl
rows={$values.sourceList || []}
columns={[
{
fieldName: 'source',
header: 'Source',
component: SourceName,
getProps: row => ({ name: row }),
},
{
fieldName: 'action',
header: 'Action',
component: SourceAction,
getProps: row => ({ name: row, targetDbinfo }),
},
{
fieldName: 'target',
header: 'Target',
slot: 1,
},
supportsPreview && {
fieldName: 'preview',
header: 'Preview',
slot: 0,
},
!!progressHolder && {
fieldName: 'status',
header: 'Status',
slot: 3,
},
{
fieldName: 'columns',
header: 'Columns',
slot: 2,
},
]}
>
<svelte:fragment slot="0" let:row>
{#if supportsPreview}
<CheckboxField
checked={$previewSource == row}
on:change={e => {
// @ts-ignore
e.target.value
)}
/>
{#if $targetDbinfo}
<DropDownButton
menu={() => {
return $targetDbinfo.tables.map(opt => ({
text: opt.pureName,
onClick: () => {
setFieldValue(`targetName_${row}`, opt.pureName);
targetEditKey += 1;
},
}));
if (e.target.checked) $previewSource = row;
else $previewSource = null;
}}
/>
{/if}
</div>
</svelte:fragment>
<svelte:fragment slot="2" let:row>
{@const columnCount = ($values[`columns_${row}`] || []).filter(x => !x.skip).length}
<Link
onClick={() => {
const targetNameLower = ($values[`targetName_${row}`] || row)?.toLowerCase();
showModal(ColumnMapModal, {
initialValue: $values[`columns_${row}`],
sourceTableInfo: $sourceDbinfo?.tables?.find(x => x.pureName?.toLowerCase() == row?.toLowerCase()),
targetTableInfo: $targetDbinfo?.tables?.find(x => x.pureName?.toLowerCase() == targetNameLower),
onConfirm: value => setFieldValue(`columns_${row}`, value),
});
}}
>{columnCount > 0 ? `(${columnCount} columns)` : '(copy from source)'}
</Link>
</svelte:fragment>
</TableControl>
</svelte:fragment>
<svelte:fragment slot="1" let:row>
<div class="flex">
<TextField
value={getTargetName($extensions, row, $values)}
on:input={e =>
setFieldValue(
`targetName_${row}`,
// @ts-ignore
e.target.value
)}
/>
{#if $targetDbinfo}
<DropDownButton
menu={() => {
return $targetDbinfo.tables.map(opt => ({
text: opt.pureName,
onClick: () => {
setFieldValue(`targetName_${row}`, opt.pureName);
targetEditKey += 1;
},
}));
}}
/>
{/if}
</div>
</svelte:fragment>
<svelte:fragment slot="2" let:row>
{@const columnCount = ($values[`columns_${row}`] || []).filter(x => !x.skip).length}
<Link
onClick={() => {
const targetNameLower = ($values[`targetName_${row}`] || row)?.toLowerCase();
showModal(ColumnMapModal, {
initialValue: $values[`columns_${row}`],
sourceTableInfo: $sourceDbinfo?.tables?.find(x => x.pureName?.toLowerCase() == row?.toLowerCase()),
targetTableInfo: $targetDbinfo?.tables?.find(x => x.pureName?.toLowerCase() == targetNameLower),
onConfirm: value => setFieldValue(`columns_${row}`, value),
});
}}
>{columnCount > 0 ? `(${columnCount} columns)` : '(copy from source)'}
</Link>
</svelte:fragment>
<svelte:fragment slot="3" let:row>
{#if progressHolder[row]?.status == 'running'}
<FontIcon icon="icon loading" />
{#if progressHolder[row]?.writtenRowCount}
{progressHolder[row]?.writtenRowCount} rows writtem
{:else if progressHolder[row]?.readRowCount}
{progressHolder[row]?.readRowCount} rows read
{:else}
Running
{/if}
{:else if progressHolder[row]?.status == 'error'}
<FontIcon icon="img error" /> Error
{#if progressHolder[row]?.errorMessage}
<FontIcon
icon="img info"
title={progressHolder[row]?.errorMessage}
on:click={() => showModal(ErrorMessageModal, { message: progressHolder[row]?.errorMessage })}
style="cursor: pointer"
/>
{/if}
{:else if progressHolder[row]?.status == 'done'}
<FontIcon icon="img ok" />
{#if progressHolder[row]?.writtenRowCount}
{progressHolder[row]?.writtenRowCount} rows written
{:else if progressHolder[row]?.readRowCount}
{progressHolder[row]?.readRowCount} rows written
{:else}
Done
{/if}
{:else}
<FontIcon icon="icon wait" /> Queued
{/if}
</svelte:fragment>
</TableControl>
{/key}
{/key}
</div>
</div>

View File

@@ -164,6 +164,7 @@ function getTargetExpr(extensions, sourceName, values, targetConnection, targetD
pureName: getTargetName(extensions, sourceName, values),
...extractDriverApiParameters(values, 'target', targetDriver),
...getFlagsFroAction(values[`actionType_${sourceName}`]),
progressName: sourceName,
},
];
}
@@ -233,7 +234,7 @@ export default async function createImpExpScript(extensions, values, forceScript
script.assignValue(colmapVar, colmap);
}
script.copyStream(sourceVar, targetVar, colmapVar);
script.copyStream(sourceVar, targetVar, colmapVar, sourceName);
script.endLine();
}
return script.getScript(values.schedule);

View File

@@ -18,7 +18,7 @@
// $: console.log('MESSAGE ROWS', items);
const values = writable({
hideDebug: false,
hideDebug: true,
hideInfo: false,
hideError: false,
});

View File

@@ -65,6 +65,7 @@
export let savedFile;
export let savedFilePath;
let progressHolder = null;
const refreshArchiveFolderRef = createRef(null);
const formValues = writable({});
@@ -179,6 +180,7 @@
const handleExecute = async e => {
if (busy) return;
progressHolder = {};
const values = $formValues as any;
busy = true;
const script = await createImpExpScript($extensions, values);
@@ -228,6 +230,29 @@
title: `${getSourceTargetTitle('source', values)}->${getSourceTargetTitle('target', values)}(${values.sourceList?.length || 0})`,
}));
}
const handleProgress = progress => {
progressHolder = {
...progressHolder,
[progress.progressName]: {
...progressHolder[progress.progressName],
...progress,
},
};
};
$: progressEffect = useEffect(() => {
if (runnerId) {
const eventName = `runner-progress-${runnerId}`;
apiOn(eventName, handleProgress);
return () => {
apiOff(eventName, handleProgress);
};
}
return () => {};
});
$progressEffect;
</script>
<ToolStripContainer>
@@ -237,6 +262,7 @@
<ImportExportConfigurator
bind:this={domConfigurator}
{previewReaderStore}
{progressHolder}
isTabActive={tabid == $activeTabId}
/>

View File

@@ -1,6 +1,12 @@
import { ScriptWriter, ScriptWriterJson } from 'dbgate-tools';
import getElectron from './getElectron';
import { showSnackbar, showSnackbarInfo, showSnackbarError, closeSnackbar } from '../utility/snackbar';
import {
showSnackbar,
showSnackbarInfo,
showSnackbarError,
closeSnackbar,
updateSnackbarProgressMessage,
} from '../utility/snackbar';
import resolveApi, { resolveApiHeaders } from './resolveApi';
import { apiCall, apiOff, apiOn } from './api';
import { normalizeExportColumnMap } from '../impexp/createImpExpScript';
@@ -70,9 +76,17 @@ async function runImportExportScript({ script, runningMessage, canceledMessage,
],
});
function handleRunnerProgress(data) {
const rows = data.writtenRowsCount || data.readRowCount;
if (rows) {
updateSnackbarProgressMessage(snackId, `${rows} rows processed`);
}
}
function handleRunnerDone() {
closeSnackbar(snackId);
apiOff(`runner-done-${runid}`, handleRunnerDone);
apiOff(`runner-progress-${runid}`, handleRunnerProgress);
if (isCanceled) {
showSnackbarError(canceledMessage);
} else {
@@ -82,6 +96,7 @@ async function runImportExportScript({ script, runningMessage, canceledMessage,
}
apiOn(`runner-done-${runid}`, handleRunnerDone);
apiOn(`runner-progress-${runid}`, handleRunnerProgress);
}
export async function saveExportedFile(filters, defaultPath, extension, dataName, getScript: (filaPath: string) => {}) {
@@ -141,7 +156,7 @@ function generateQuickExportScript(
script.assignValue(colmapVar, colmap);
}
script.copyStream(sourceVar, targetVar, colmapVar);
script.copyStream(sourceVar, targetVar, colmapVar, 'data');
script.endLine();
return script.getScript();

View File

@@ -8,6 +8,7 @@ export interface SnackbarButton {
export interface SnackbarInfo {
message: string;
progressMessage?: string;
icon?: string;
autoClose?: boolean;
allowClose?: boolean;
@@ -59,6 +60,11 @@ export function showSnackbarError(message: string) {
export function closeSnackbar(snackId: string) {
openedSnackbars.update(x => x.filter(x => x.id != snackId));
}
export function updateSnackbarProgressMessage(snackId: string, progressMessage: string) {
openedSnackbars.update(x => x.map(x => (x.id === snackId ? { ...x, progressMessage } : x)));
}
// showSnackbar({
// icon: 'img ok',
// message: 'Test snackbar',

View File

@@ -10,6 +10,7 @@
export let autoClose = false;
export let allowClose = false;
export let buttons = [];
export let progressMessage = null;
function handleClose() {
openedSnackbars.update(x => x.filter(x => x.id != id));
@@ -25,6 +26,11 @@
<FontIcon {icon} />
{message}
</div>
{#if progressMessage}
<div class="progress-message">
{progressMessage}
</div>
{/if}
{#if allowClose}
<div class="close" on:click={handleClose}>
@@ -83,4 +89,10 @@
.button {
margin: 5px;
}
.progress-message {
color: var(--theme-font-3);
margin: 10px;
margin-left: 30px;
}
</style>

View File

@@ -95,9 +95,10 @@ async function reader({ fileName, encoding = 'utf-8', header = true, delimiter,
});
const fileStream = fs.createReadStream(downloadedFile, encoding);
const csvPrepare = new CsvPrepareStream({ header });
fileStream.pipe(csvStream);
csvStream.pipe(csvPrepare);
return csvPrepare;
return [fileStream, csvStream, csvPrepare];
// fileStream.pipe(csvStream);
// csvStream.pipe(csvPrepare);
// return csvPrepare;
}
reader.initialize = (dbgateEnv) => {

View File

@@ -31,11 +31,13 @@ async function writer({ fileName, encoding = 'utf-8', header = true, delimiter,
const csvPrepare = new CsvPrepareStream({ header });
const csvStream = csv.stringify({ delimiter, quoted });
const fileStream = fs.createWriteStream(fileName, encoding);
csvPrepare.pipe(csvStream);
csvStream.pipe(fileStream);
csvPrepare['finisher'] = fileStream;
// csvPrepare.pipe(csvStream);
// csvStream.pipe(fileStream);
// csvPrepare['finisher'] = fileStream;
csvPrepare.requireFixedStructure = true;
return csvPrepare;
return [csvPrepare, csvStream, fileStream];
// return csvPrepare;
}
module.exports = writer;

View File

@@ -266,6 +266,11 @@ const driver = {
pass.write(transformMongoData(row));
});
// propagate error
cursorStream.on('error', (err) => {
pass.emit('error', err);
});
// Called once the cursor is fully read
cursorStream.on('end', () => {
pass.emit('end');

View File

@@ -63,8 +63,10 @@ async function reader({ fileName, encoding = 'utf-8', itemElementName }) {
const fileStream = fs.createReadStream(fileName, encoding);
const parser = new ParseStream({ itemElementName });
fileStream.pipe(parser);
return parser;
return [fileStream, parser];
// fileStream.pipe(parser);
// return parser;
}
module.exports = reader;

View File

@@ -73,9 +73,10 @@ async function writer({ fileName, encoding = 'utf-8', itemElementName, rootEleme
logger.info(`Writing file ${fileName}`);
const stringify = new StringifyStream({ itemElementName, rootElementName });
const fileStream = fs.createWriteStream(fileName, encoding);
stringify.pipe(fileStream);
stringify['finisher'] = fileStream;
return stringify;
return [stringify, fileStream];
// stringify.pipe(fileStream);
// stringify['finisher'] = fileStream;
// return stringify;
}
module.exports = writer;