mirror of
https://github.com/DeNNiiInc/dbgate.git
synced 2026-04-17 23:45:59 +00:00
Merge branch 'master' into feature/firebird
This commit is contained in:
116
.github/workflows/build-app-check.yaml
vendored
Normal file
116
.github/workflows/build-app-check.yaml
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
# --------------------------------------------------------------------------------------------
|
||||
# This file is generated. Do not edit manually
|
||||
# --------------------------------------------------------------------------------------------
|
||||
name: Electron app check build
|
||||
'on':
|
||||
push:
|
||||
tags:
|
||||
- check-[0-9]+-[0-9]+-[0-9]+.[0-9]+
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- macos-14
|
||||
- windows-2022
|
||||
- ubuntu-22.04
|
||||
steps:
|
||||
- name: Install python 3.11 (MacOS)
|
||||
if: matrix.os == 'macos-14'
|
||||
run: |
|
||||
brew install python@3.11
|
||||
echo "PYTHON=/opt/homebrew/bin/python3.11" >> $GITHUB_ENV
|
||||
- name: Context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Use Node.js 22.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 22.x
|
||||
- name: adjustPackageJson
|
||||
run: |
|
||||
|
||||
node adjustPackageJson --community
|
||||
- name: yarn set timeout
|
||||
run: |
|
||||
|
||||
yarn config set network-timeout 100000
|
||||
- name: yarn install
|
||||
run: |
|
||||
|
||||
yarn install
|
||||
- name: setCurrentVersion
|
||||
run: |
|
||||
|
||||
yarn setCurrentVersion
|
||||
- name: printSecrets
|
||||
run: |
|
||||
|
||||
yarn printSecrets
|
||||
env:
|
||||
GIST_UPLOAD_SECRET: ${{secrets.GIST_UPLOAD_SECRET}}
|
||||
- name: fillPackagedPlugins
|
||||
run: |
|
||||
|
||||
yarn fillPackagedPlugins
|
||||
- name: Install Snapcraft
|
||||
if: matrix.os == 'ubuntu-22.04'
|
||||
uses: samuelmeuli/action-snapcraft@v1
|
||||
- name: Publish
|
||||
run: |
|
||||
|
||||
yarn run build:app
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
||||
WIN_CSC_LINK: ${{ secrets.WINCERT_2025 }}
|
||||
WIN_CSC_KEY_PASSWORD: ${{ secrets.WINCERT_2025_PASSWORD }}
|
||||
CSC_LINK: ${{ secrets.APPLECERT_CERTIFICATE }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLECERT_PASSWORD }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
|
||||
SNAPCRAFT_STORE_CREDENTIALS: ${{secrets.SNAPCRAFT_LOGIN}}
|
||||
APPLE_APP_SPECIFIC_PASSWORD: ${{secrets.APPLE_APP_SPECIFIC_PASSWORD}}
|
||||
- name: Copy artifacts
|
||||
run: |
|
||||
mkdir artifacts
|
||||
|
||||
cp app/dist/*.deb artifacts/dbgate-check.deb || true
|
||||
cp app/dist/*x86*.AppImage artifacts/dbgate-check.AppImage || true
|
||||
cp app/dist/*arm64*.AppImage artifacts/dbgate-check-arm64.AppImage || true
|
||||
cp app/dist/*armv7l*.AppImage artifacts/dbgate-check-armv7l.AppImage || true
|
||||
cp app/dist/*win*.exe artifacts/dbgate-check.exe || true
|
||||
cp app/dist/*win_x64.zip artifacts/dbgate-windows-check.zip || true
|
||||
cp app/dist/*win_arm64.zip artifacts/dbgate-windows-check-arm64.zip || true
|
||||
cp app/dist/*-mac_universal.dmg artifacts/dbgate-check.dmg || true
|
||||
cp app/dist/*-mac_x64.dmg artifacts/dbgate-check-x64.dmg || true
|
||||
cp app/dist/*-mac_arm64.dmg artifacts/dbgate-check-arm64.dmg || true
|
||||
mv app/dist/*.snap artifacts/dbgate-check.snap || true
|
||||
|
||||
mv app/dist/*.exe artifacts/ || true
|
||||
mv app/dist/*.zip artifacts/ || true
|
||||
mv app/dist/*.tar.gz artifacts/ || true
|
||||
mv app/dist/*.AppImage artifacts/ || true
|
||||
mv app/dist/*.deb artifacts/ || true
|
||||
mv app/dist/*.snap artifacts/ || true
|
||||
mv app/dist/*.dmg artifacts/ || true
|
||||
mv app/dist/*.blockmap artifacts/ || true
|
||||
|
||||
mv app/dist/*.yml artifacts/ || true
|
||||
rm artifacts/builder-debug.yml
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}
|
||||
path: artifacts
|
||||
- name: Print content of notarization-error.log
|
||||
if: failure() && matrix.os == 'macos-14'
|
||||
run: |
|
||||
|
||||
find . -type f -name "notarization-error.log" -exec echo "=== Start of {} ===" \; -exec cat {} \; -exec echo "=== End of {} ===" \;
|
||||
2
.github/workflows/build-app-pro-beta.yaml
vendored
2
.github/workflows/build-app-pro-beta.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: '${{ secrets.GH_TOKEN }}'
|
||||
path: dbgate-pro
|
||||
ref: 55cf42d58b843c4f1ffd6ab9b808f5f971bc3c8b
|
||||
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
2
.github/workflows/build-app-pro.yaml
vendored
2
.github/workflows/build-app-pro.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: '${{ secrets.GH_TOKEN }}'
|
||||
path: dbgate-pro
|
||||
ref: 55cf42d58b843c4f1ffd6ab9b808f5f971bc3c8b
|
||||
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
2
.github/workflows/build-cloud-pro.yaml
vendored
2
.github/workflows/build-cloud-pro.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: '${{ secrets.GH_TOKEN }}'
|
||||
path: dbgate-pro
|
||||
ref: 55cf42d58b843c4f1ffd6ab9b808f5f971bc3c8b
|
||||
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
2
.github/workflows/build-docker-pro.yaml
vendored
2
.github/workflows/build-docker-pro.yaml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: '${{ secrets.GH_TOKEN }}'
|
||||
path: dbgate-pro
|
||||
ref: 55cf42d58b843c4f1ffd6ab9b808f5f971bc3c8b
|
||||
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
2
.github/workflows/build-npm-pro.yaml
vendored
2
.github/workflows/build-npm-pro.yaml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: '${{ secrets.GH_TOKEN }}'
|
||||
path: dbgate-pro
|
||||
ref: 55cf42d58b843c4f1ffd6ab9b808f5f971bc3c8b
|
||||
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
2
.github/workflows/e2e-pro.yaml
vendored
2
.github/workflows/e2e-pro.yaml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
repository: dbgate/dbgate-pro
|
||||
token: '${{ secrets.GH_TOKEN }}'
|
||||
path: dbgate-pro
|
||||
ref: 55cf42d58b843c4f1ffd6ab9b808f5f971bc3c8b
|
||||
ref: ecea1eef17c69c56b0633317e24a68c5220a4810
|
||||
- name: Merge dbgate/dbgate-pro
|
||||
run: |
|
||||
mkdir ../dbgate-pro
|
||||
|
||||
23
CHANGELOG.md
23
CHANGELOG.md
@@ -8,7 +8,19 @@ Builds:
|
||||
- linux - application for linux
|
||||
- win - application for Windows
|
||||
|
||||
## 6.4.0 - not released yet
|
||||
## 6.4.2
|
||||
|
||||
- ADDED: Source label to docker container #1105
|
||||
- FIXED: DbGate restart needed to take effect after trigger is created/deleted on mariadb #1112
|
||||
- ADDED: View PostgreSQL query console output #1108
|
||||
- FIXED: Single quote generete MySql error #1107
|
||||
- ADDED: Ability to limit query result count #1098
|
||||
- CHANGED: Correct processing of bigint columns #1087 #1055 #583
|
||||
- CHANGED: Improved and optimalized algorithm of loading redis keys #1062, #1034
|
||||
- FIXED: Fixed loading Redis keys with :: in key name
|
||||
|
||||
## 6.4.0
|
||||
- ADDED: DuckDB support
|
||||
- ADDED: Data deployer (Premium)
|
||||
- ADDED: Compare data between JSON lines file in archive and database table
|
||||
- CHANGED: Data Duplicator => Data Replicator (suitable for update, create and delete data, much more customizable)
|
||||
@@ -18,6 +30,15 @@ Builds:
|
||||
- ADDED: Upload SQLite files
|
||||
- ADDED: Upload archive as ZIP folder (Premium)
|
||||
- ADDED: Compress, uncompress archive folder (Premium)
|
||||
- ADDED: Export connections and settings #357
|
||||
- ADDED: Filtering by MongoDB ObjectId works now also without ObjectId(...) wrapper
|
||||
- ADDED: Split queries using blank lines #1089
|
||||
- FIXED: JSON-to-Grid only works if there is no newline #1085
|
||||
- CHANGED: When running multiple commands in script, stop execution after first error #1070
|
||||
- FIXED: Selection rectangle remains visible after closing JSONB edit cell value form #1031
|
||||
- FIXED: Diplaying numeric FK column with right alignement #1021
|
||||
- ADDED: Additional arguments for MySQL and PostgreSQL backup #1092
|
||||
- CHANGED: Amazon and Azure instalations are not auto-upgraded by default
|
||||
|
||||
## 6.3.3
|
||||
- CHANGED: New administration UI, redesigned administration of users, connections and roles
|
||||
|
||||
@@ -20,6 +20,7 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
|
||||
* Run web version as [NPM package](https://www.npmjs.com/package/dbgate-serve) or as [docker image](https://hub.docker.com/r/dbgate/dbgate)
|
||||
* Use nodeJs [scripting interface](https://docs.dbgate.io/scripting) ([API documentation](https://docs.dbgate.io/apidoc))
|
||||
* [Recommend DbGate](https://testimonial.to/dbgate) | [Rate on G2](https://www.g2.com/products/dbgate/reviews)
|
||||
* [Give us feedback](https://dbgate.org/feedback) - it will help us to decide, how to improve DbGate in future
|
||||
|
||||
## Supported databases
|
||||
* MySQL
|
||||
@@ -35,6 +36,8 @@ DbGate is licensed under GPL-3.0 license and is free to use for any purpose.
|
||||
* CosmosDB (Premium)
|
||||
* ClickHouse
|
||||
* Apache Cassandra
|
||||
* libSQL/Turso (Premium)
|
||||
* DuckDB
|
||||
|
||||
|
||||
<a href="https://raw.githubusercontent.com/dbgate/dbgate/master/img/screenshot1.png">
|
||||
|
||||
@@ -108,6 +108,7 @@ module.exports = ({ editMenu, isMac }) => [
|
||||
{ command: 'app.openWeb', hideDisabled: true },
|
||||
{ command: 'app.openIssue', hideDisabled: true },
|
||||
{ command: 'app.openSponsoring', hideDisabled: true },
|
||||
{ command: 'app.giveFeedback', hideDisabled: true },
|
||||
{ divider: true },
|
||||
{ command: 'settings.commands', hideDisabled: true },
|
||||
{ command: 'tabs.changelog', hideDisabled: true },
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
FROM node:22
|
||||
|
||||
LABEL org.opencontainers.image.source="https://github.com/dbgate/dbgate"
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
iputils-ping \
|
||||
iproute2 \
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
FROM node:18-alpine
|
||||
|
||||
LABEL org.opencontainers.image.source="https://github.com/dbgate/dbgate"
|
||||
|
||||
WORKDIR /home/dbgate-docker
|
||||
|
||||
RUN apk --no-cache upgrade \
|
||||
|
||||
@@ -112,4 +112,11 @@ describe('Add connection', () => {
|
||||
|
||||
cy.contains('performance_schema');
|
||||
});
|
||||
|
||||
it('export connections', () => {
|
||||
cy.testid('WidgetIconPanel_menu').click();
|
||||
cy.contains('Tools').click();
|
||||
cy.contains('Export connections').click();
|
||||
cy.themeshot('export-connections');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -248,14 +248,14 @@ describe('Data browser data', () => {
|
||||
cy.themeshot('database-diagram');
|
||||
});
|
||||
|
||||
it('Charts', () => {
|
||||
cy.testid('WidgetIconPanel_file').click();
|
||||
cy.contains('pie-chart').click();
|
||||
cy.contains('line-chart').click();
|
||||
cy.testid('TabsPanel_buttonSplit').click();
|
||||
cy.testid('WidgetIconPanel_file').click();
|
||||
cy.themeshot('view-split-charts');
|
||||
});
|
||||
// it('Charts', () => {
|
||||
// cy.testid('WidgetIconPanel_file').click();
|
||||
// cy.contains('pie-chart').click();
|
||||
// cy.contains('line-chart').click();
|
||||
// cy.testid('TabsPanel_buttonSplit').click();
|
||||
// cy.testid('WidgetIconPanel_file').click();
|
||||
// cy.themeshot('view-split-charts');
|
||||
// });
|
||||
|
||||
it('Keyboard configuration', () => {
|
||||
cy.testid('WidgetIconPanel_settings').click();
|
||||
|
||||
@@ -17,6 +17,17 @@ services:
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=Pwd2020Db
|
||||
|
||||
db2:
|
||||
image: icr.io/db2_community/db2:11.5.8.0
|
||||
privileged: true
|
||||
ports:
|
||||
- "15055:50000"
|
||||
environment:
|
||||
LICENSE: accept
|
||||
DB2INST1_PASSWORD: Pwd2020Db
|
||||
DBNAME: testdb
|
||||
DB2INSTANCE: db2inst1
|
||||
|
||||
# mysql:
|
||||
# image: mysql:8.0.18
|
||||
# command: --default-authentication-plugin=mysql_native_password
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"version": "6.3.4-premium-beta.3",
|
||||
"version": "6.4.3-premium-beta.4",
|
||||
"name": "dbgate-all",
|
||||
"workspaces": [
|
||||
"packages/*",
|
||||
@@ -9,6 +9,7 @@
|
||||
],
|
||||
"scripts": {
|
||||
"start:api": "yarn workspace dbgate-api start | pino-pretty",
|
||||
"start:api:watch": "nodemon --watch 'src/**' --ext 'ts,json,js' --exec yarn start:api",
|
||||
"start:api:json": "yarn workspace dbgate-api start",
|
||||
"start:app": "cd app && yarn start | pino-pretty",
|
||||
"start:app:singledb": "CONNECTIONS=con1 SERVER_con1=localhost ENGINE_con1=mysql@dbgate-plugin-mysql USER_con1=root PASSWORD_con1=Pwd2020Db SINGLE_CONNECTION=con1 SINGLE_DATABASE=Chinook yarn start:app",
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
DEVMODE=1
|
||||
SHELL_SCRIPTING=1
|
||||
# LOCAL_DBGATE_CLOUD=1
|
||||
# LOCAL_DBGATE_IDENTITY=1
|
||||
|
||||
# CLOUD_UPGRADE_FILE=c:\test\upg\upgrade.zip
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
"cors": "^2.8.5",
|
||||
"cross-env": "^6.0.3",
|
||||
"dbgate-datalib": "^6.0.0-alpha.1",
|
||||
"dbgate-query-splitter": "^4.11.4",
|
||||
"dbgate-query-splitter": "^4.11.5",
|
||||
"dbgate-sqltree": "^6.0.0-alpha.1",
|
||||
"dbgate-tools": "^6.0.0-alpha.1",
|
||||
"debug": "^4.3.4",
|
||||
|
||||
@@ -13,6 +13,8 @@ const {
|
||||
} = require('../auth/authProvider');
|
||||
const storage = require('./storage');
|
||||
const { decryptPasswordString } = require('../utility/crypting');
|
||||
const { createDbGateIdentitySession, startCloudTokenChecking } = require('../utility/cloudIntf');
|
||||
const socket = require('../utility/socket');
|
||||
|
||||
const logger = getLogger('auth');
|
||||
|
||||
@@ -135,5 +137,14 @@ module.exports = {
|
||||
return getAuthProviderById(amoid).redirect(params);
|
||||
},
|
||||
|
||||
createCloudLoginSession_meta: true,
|
||||
async createCloudLoginSession({ client }) {
|
||||
const res = await createDbGateIdentitySession(client);
|
||||
startCloudTokenChecking(res.sid, tokenHolder => {
|
||||
socket.emit('got-cloud-token', tokenHolder);
|
||||
});
|
||||
return res;
|
||||
},
|
||||
|
||||
authMiddleware,
|
||||
};
|
||||
|
||||
250
packages/api/src/controllers/cloud.js
Normal file
250
packages/api/src/controllers/cloud.js
Normal file
@@ -0,0 +1,250 @@
|
||||
const {
|
||||
getPublicCloudFiles,
|
||||
getPublicFileData,
|
||||
refreshPublicFiles,
|
||||
callCloudApiGet,
|
||||
callCloudApiPost,
|
||||
getCloudFolderEncryptor,
|
||||
getCloudContent,
|
||||
putCloudContent,
|
||||
removeCloudCachedConnection,
|
||||
} = require('../utility/cloudIntf');
|
||||
const connections = require('./connections');
|
||||
const socket = require('../utility/socket');
|
||||
const { recryptConnection, getInternalEncryptor, encryptConnection } = require('../utility/crypting');
|
||||
const { getConnectionLabel, getLogger, extractErrorLogData } = require('dbgate-tools');
|
||||
const logger = getLogger('cloud');
|
||||
const _ = require('lodash');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
module.exports = {
|
||||
publicFiles_meta: true,
|
||||
async publicFiles() {
|
||||
const res = await getPublicCloudFiles();
|
||||
return res;
|
||||
},
|
||||
|
||||
publicFileData_meta: true,
|
||||
async publicFileData({ path }) {
|
||||
const res = getPublicFileData(path);
|
||||
return res;
|
||||
},
|
||||
|
||||
refreshPublicFiles_meta: true,
|
||||
async refreshPublicFiles({ isRefresh }) {
|
||||
await refreshPublicFiles(isRefresh);
|
||||
return {
|
||||
status: 'ok',
|
||||
};
|
||||
},
|
||||
|
||||
contentList_meta: true,
|
||||
async contentList() {
|
||||
try {
|
||||
const resp = await callCloudApiGet('content-list');
|
||||
return resp;
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting cloud content list');
|
||||
|
||||
return [];
|
||||
}
|
||||
},
|
||||
|
||||
getContent_meta: true,
|
||||
async getContent({ folid, cntid }) {
|
||||
const resp = await getCloudContent(folid, cntid);
|
||||
return resp;
|
||||
},
|
||||
|
||||
putContent_meta: true,
|
||||
async putContent({ folid, cntid, content, name, type }) {
|
||||
const resp = await putCloudContent(folid, cntid, content, name, type);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
createFolder_meta: true,
|
||||
async createFolder({ name }) {
|
||||
const resp = await callCloudApiPost(`folders/create`, { name });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
grantFolder_meta: true,
|
||||
async grantFolder({ inviteLink }) {
|
||||
const m = inviteLink.match(/^dbgate\:\/\/folder\/v1\/([a-zA-Z0-9]+)\?mode=(read|write|admin)$/);
|
||||
if (!m) {
|
||||
throw new Error('Invalid invite link format');
|
||||
}
|
||||
const invite = m[1];
|
||||
const mode = m[2];
|
||||
|
||||
const resp = await callCloudApiPost(`folders/grant/${mode}`, { invite });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
renameFolder_meta: true,
|
||||
async renameFolder({ folid, name }) {
|
||||
const resp = await callCloudApiPost(`folders/rename`, { folid, name });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
deleteFolder_meta: true,
|
||||
async deleteFolder({ folid }) {
|
||||
const resp = await callCloudApiPost(`folders/delete`, { folid });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
getInviteToken_meta: true,
|
||||
async getInviteToken({ folid, role }) {
|
||||
const resp = await callCloudApiGet(`invite-token/${folid}/${role}`);
|
||||
return resp;
|
||||
},
|
||||
|
||||
refreshContent_meta: true,
|
||||
async refreshContent() {
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return {
|
||||
status: 'ok',
|
||||
};
|
||||
},
|
||||
|
||||
copyConnectionCloud_meta: true,
|
||||
async copyConnectionCloud({ conid, folid }) {
|
||||
const conn = await connections.getCore({ conid });
|
||||
const folderEncryptor = await getCloudFolderEncryptor(folid);
|
||||
const recryptedConn = recryptConnection(conn, getInternalEncryptor(), folderEncryptor);
|
||||
const connToSend = _.omit(recryptedConn, ['_id']);
|
||||
const resp = await putCloudContent(
|
||||
folid,
|
||||
undefined,
|
||||
JSON.stringify(connToSend),
|
||||
getConnectionLabel(conn),
|
||||
'connection'
|
||||
);
|
||||
return resp;
|
||||
},
|
||||
|
||||
saveConnection_meta: true,
|
||||
async saveConnection({ folid, connection }) {
|
||||
let cntid = undefined;
|
||||
if (connection._id) {
|
||||
const m = connection._id.match(/^cloud\:\/\/(.+)\/(.+)$/);
|
||||
if (!m) {
|
||||
throw new Error('Invalid cloud connection ID format');
|
||||
}
|
||||
folid = m[1];
|
||||
cntid = m[2];
|
||||
}
|
||||
|
||||
if (!folid) {
|
||||
throw new Error('Missing cloud folder ID');
|
||||
}
|
||||
|
||||
const folderEncryptor = await getCloudFolderEncryptor(folid);
|
||||
const recryptedConn = encryptConnection(connection, folderEncryptor);
|
||||
const resp = await putCloudContent(
|
||||
folid,
|
||||
cntid,
|
||||
JSON.stringify(recryptedConn),
|
||||
getConnectionLabel(recryptedConn),
|
||||
'connection'
|
||||
);
|
||||
|
||||
if (resp.apiErrorMessage) {
|
||||
return resp;
|
||||
}
|
||||
|
||||
removeCloudCachedConnection(folid, resp.cntid);
|
||||
cntid = resp.cntid;
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return {
|
||||
...recryptedConn,
|
||||
_id: `cloud://${folid}/${cntid}`,
|
||||
};
|
||||
},
|
||||
|
||||
duplicateConnection_meta: true,
|
||||
async duplicateConnection({ conid }) {
|
||||
const m = conid.match(/^cloud\:\/\/(.+)\/(.+)$/);
|
||||
if (!m) {
|
||||
throw new Error('Invalid cloud connection ID format');
|
||||
}
|
||||
const folid = m[1];
|
||||
const cntid = m[2];
|
||||
const respGet = await getCloudContent(folid, cntid);
|
||||
const conn = JSON.parse(respGet.content);
|
||||
const conn2 = {
|
||||
...conn,
|
||||
displayName: getConnectionLabel(conn) + ' - copy',
|
||||
};
|
||||
const respPut = await putCloudContent(folid, undefined, JSON.stringify(conn2), conn2.displayName, 'connection');
|
||||
return respPut;
|
||||
},
|
||||
|
||||
deleteConnection_meta: true,
|
||||
async deleteConnection({ conid }) {
|
||||
const m = conid.match(/^cloud\:\/\/(.+)\/(.+)$/);
|
||||
if (!m) {
|
||||
throw new Error('Invalid cloud connection ID format');
|
||||
}
|
||||
const folid = m[1];
|
||||
const cntid = m[2];
|
||||
const resp = await callCloudApiPost(`content/delete/${folid}/${cntid}`);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
deleteContent_meta: true,
|
||||
async deleteContent({ folid, cntid }) {
|
||||
const resp = await callCloudApiPost(`content/delete/${folid}/${cntid}`);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
renameContent_meta: true,
|
||||
async renameContent({ folid, cntid, name }) {
|
||||
const resp = await callCloudApiPost(`content/rename/${folid}/${cntid}`, { name });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
saveFile_meta: true,
|
||||
async saveFile({ folid, cntid, fileName, data, contentFolder, format }) {
|
||||
const resp = await putCloudContent(folid, cntid, data, fileName, 'file', contentFolder, format);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
copyFile_meta: true,
|
||||
async copyFile({ folid, cntid, name }) {
|
||||
const resp = await callCloudApiPost(`content/duplicate/${folid}/${cntid}`, { name });
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
},
|
||||
|
||||
exportFile_meta: true,
|
||||
async exportFile({ folid, cntid, filePath }, req) {
|
||||
const { content } = await getCloudContent(folid, cntid);
|
||||
if (!content) {
|
||||
throw new Error('File not found');
|
||||
}
|
||||
await fs.writeFile(filePath, content);
|
||||
return true;
|
||||
},
|
||||
};
|
||||
@@ -298,8 +298,12 @@ module.exports = {
|
||||
|
||||
changelog_meta: true,
|
||||
async changelog() {
|
||||
try {
|
||||
const resp = await axios.default.get('https://raw.githubusercontent.com/dbgate/dbgate/master/CHANGELOG.md');
|
||||
return resp.data;
|
||||
} catch (err) {
|
||||
return ''
|
||||
}
|
||||
},
|
||||
|
||||
checkLicense_meta: true,
|
||||
|
||||
@@ -239,6 +239,19 @@ module.exports = {
|
||||
return (await this.datastore.find()).filter(x => connectionHasPermission(x, req));
|
||||
},
|
||||
|
||||
async getUsedEngines() {
|
||||
const storage = require('./storage');
|
||||
|
||||
const storageEngines = await storage.getUsedEngines();
|
||||
if (storageEngines) {
|
||||
return storageEngines;
|
||||
}
|
||||
if (portalConnections) {
|
||||
return _.uniq(_.compact(portalConnections.map(x => x.engine)));
|
||||
}
|
||||
return _.uniq((await this.datastore.find()).map(x => x.engine));
|
||||
},
|
||||
|
||||
test_meta: true,
|
||||
test({ connection, requestDbList = false }) {
|
||||
const subprocess = fork(
|
||||
@@ -410,6 +423,13 @@ module.exports = {
|
||||
return volatile;
|
||||
}
|
||||
|
||||
const cloudMatch = conid.match(/^cloud\:\/\/(.+)\/(.+)$/);
|
||||
if (cloudMatch) {
|
||||
const { loadCachedCloudConnection } = require('../utility/cloudIntf');
|
||||
const conn = await loadCachedCloudConnection(cloudMatch[1], cloudMatch[2]);
|
||||
return conn;
|
||||
}
|
||||
|
||||
const storage = require('./storage');
|
||||
|
||||
const storageConnection = await storage.getConnection({ conid });
|
||||
|
||||
@@ -148,6 +148,9 @@ module.exports = {
|
||||
const existing = this.opened.find(x => x.conid == conid && x.database == database);
|
||||
if (existing) return existing;
|
||||
const connection = await connections.getCore({ conid });
|
||||
if (!connection) {
|
||||
throw new Error(`databaseConnections: Connection with conid="${conid}" not found`);
|
||||
}
|
||||
if (connection.passwordMode == 'askPassword' || connection.passwordMode == 'askUser') {
|
||||
throw new MissingCredentialsError({ conid, passwordMode: connection.passwordMode });
|
||||
}
|
||||
@@ -304,6 +307,12 @@ module.exports = {
|
||||
return this.loadDataCore('loadKeys', { conid, database, root, filter, limit });
|
||||
},
|
||||
|
||||
scanKeys_meta: true,
|
||||
async scanKeys({ conid, database, root, pattern, cursor, count }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
return this.loadDataCore('scanKeys', { conid, database, root, pattern, cursor, count });
|
||||
},
|
||||
|
||||
exportKeys_meta: true,
|
||||
async exportKeys({ conid, database, options }, req) {
|
||||
testConnectionPermission(conid, req);
|
||||
|
||||
@@ -10,6 +10,7 @@ const requirePluginFunction = require('../utility/requirePluginFunction');
|
||||
const socket = require('../utility/socket');
|
||||
const crypto = require('crypto');
|
||||
const dbgateApi = require('../shell');
|
||||
const { ChartProcessor } = require('dbgate-datalib');
|
||||
|
||||
function readFirstLine(file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
@@ -302,4 +303,29 @@ module.exports = {
|
||||
await dbgateApi.download(uri, { targetFile: getJslFileName(jslid) });
|
||||
return { jslid };
|
||||
},
|
||||
|
||||
buildChart_meta: true,
|
||||
async buildChart({ jslid, definition }) {
|
||||
const datastore = new JsonLinesDatastore(getJslFileName(jslid));
|
||||
const processor = new ChartProcessor(definition ? [definition] : undefined);
|
||||
await datastore.enumRows(row => {
|
||||
processor.addRow(row);
|
||||
return true;
|
||||
});
|
||||
processor.finalize();
|
||||
return processor.charts;
|
||||
},
|
||||
|
||||
detectChartColumns_meta: true,
|
||||
async detectChartColumns({ jslid }) {
|
||||
const datastore = new JsonLinesDatastore(getJslFileName(jslid));
|
||||
const processor = new ChartProcessor();
|
||||
processor.autoDetectCharts = false;
|
||||
await datastore.enumRows(row => {
|
||||
processor.addRow(row);
|
||||
return true;
|
||||
});
|
||||
processor.finalize();
|
||||
return processor.availableColumns;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -52,7 +52,7 @@ module.exports = {
|
||||
if (existing) return existing;
|
||||
const connection = await connections.getCore({ conid });
|
||||
if (!connection) {
|
||||
throw new Error(`Connection with conid="${conid}" not found`);
|
||||
throw new Error(`serverConnections: Connection with conid="${conid}" not found`);
|
||||
}
|
||||
if (connection.singleDatabase) {
|
||||
return null;
|
||||
|
||||
@@ -83,6 +83,11 @@ module.exports = {
|
||||
jsldata.notifyChangedStats(stats);
|
||||
},
|
||||
|
||||
handle_charts(sesid, props) {
|
||||
const { jslid, charts, resultIndex } = props;
|
||||
socket.emit(`session-charts-${sesid}`, { jslid, resultIndex, charts });
|
||||
},
|
||||
|
||||
handle_initializeFile(sesid, props) {
|
||||
const { jslid } = props;
|
||||
socket.emit(`session-initialize-file-${jslid}`);
|
||||
@@ -141,7 +146,7 @@ module.exports = {
|
||||
},
|
||||
|
||||
executeQuery_meta: true,
|
||||
async executeQuery({ sesid, sql, autoCommit }) {
|
||||
async executeQuery({ sesid, sql, autoCommit, limitRows, frontMatter }) {
|
||||
const session = this.opened.find(x => x.sesid == sesid);
|
||||
if (!session) {
|
||||
throw new Error('Invalid session');
|
||||
@@ -149,7 +154,7 @@ module.exports = {
|
||||
|
||||
logger.info({ sesid, sql }, 'Processing query');
|
||||
this.dispatchMessage(sesid, 'Query execution started');
|
||||
session.subprocess.send({ msgtype: 'executeQuery', sql, autoCommit });
|
||||
session.subprocess.send({ msgtype: 'executeQuery', sql, autoCommit, limitRows, frontMatter });
|
||||
|
||||
return { state: 'ok' };
|
||||
},
|
||||
|
||||
@@ -32,4 +32,8 @@ module.exports = {
|
||||
},
|
||||
|
||||
startRefreshLicense() {},
|
||||
|
||||
async getUsedEngines() {
|
||||
return null;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -27,6 +27,7 @@ const plugins = require('./controllers/plugins');
|
||||
const files = require('./controllers/files');
|
||||
const scheduler = require('./controllers/scheduler');
|
||||
const queryHistory = require('./controllers/queryHistory');
|
||||
const cloud = require('./controllers/cloud');
|
||||
const onFinished = require('on-finished');
|
||||
const processArgs = require('./utility/processArgs');
|
||||
|
||||
@@ -39,6 +40,7 @@ const { getDefaultAuthProvider } = require('./auth/authProvider');
|
||||
const startCloudUpgradeTimer = require('./utility/cloudUpgrade');
|
||||
const { isProApp } = require('./utility/checkLicense');
|
||||
const { getHealthStatus, getHealthStatusSprinx } = require('./utility/healthStatus');
|
||||
const { startCloudFiles } = require('./utility/cloudIntf');
|
||||
|
||||
const logger = getLogger('main');
|
||||
|
||||
@@ -200,6 +202,8 @@ function start() {
|
||||
if (process.env.CLOUD_UPGRADE_FILE) {
|
||||
startCloudUpgradeTimer();
|
||||
}
|
||||
|
||||
startCloudFiles();
|
||||
}
|
||||
|
||||
function useAllControllers(app, electron) {
|
||||
@@ -220,6 +224,7 @@ function useAllControllers(app, electron) {
|
||||
useController(app, electron, '/query-history', queryHistory);
|
||||
useController(app, electron, '/apps', apps);
|
||||
useController(app, electron, '/auth', auth);
|
||||
useController(app, electron, '/cloud', cloud);
|
||||
}
|
||||
|
||||
function setElectronSender(electronSender) {
|
||||
|
||||
@@ -28,14 +28,7 @@ function start() {
|
||||
let version = {
|
||||
version: 'Unknown',
|
||||
};
|
||||
try {
|
||||
version = await driver.getVersion(dbhan);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error getting DB server version');
|
||||
version = {
|
||||
version: 'Unknown',
|
||||
};
|
||||
}
|
||||
let databases = undefined;
|
||||
if (requestDbList) {
|
||||
databases = await driver.listDatabases(dbhan);
|
||||
|
||||
@@ -12,6 +12,7 @@ const {
|
||||
ScriptWriterEval,
|
||||
SqlGenerator,
|
||||
playJsonScriptWriter,
|
||||
serializeJsTypesForJsonStringify,
|
||||
} = require('dbgate-tools');
|
||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||
const { connectUtility } = require('../utility/connectUtility');
|
||||
@@ -232,7 +233,7 @@ async function handleQueryData({ msgid, sql, range }, skipReadonlyCheck = false)
|
||||
try {
|
||||
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
|
||||
const res = await driver.query(dbhan, sql, { range });
|
||||
process.send({ msgtype: 'response', msgid, ...res });
|
||||
process.send({ msgtype: 'response', msgid, ...serializeJsTypesForJsonStringify(res) });
|
||||
} catch (err) {
|
||||
process.send({
|
||||
msgtype: 'response',
|
||||
@@ -254,7 +255,7 @@ async function handleDriverDataCore(msgid, callMethod, { logName }) {
|
||||
const driver = requireEngineDriver(storedConnection);
|
||||
try {
|
||||
const result = await callMethod(driver);
|
||||
process.send({ msgtype: 'response', msgid, result });
|
||||
process.send({ msgtype: 'response', msgid, result: serializeJsTypesForJsonStringify(result) });
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err, { logName }), `Error when handling message ${logName}`);
|
||||
process.send({ msgtype: 'response', msgid, errorMessage: extractErrorMessage(err, 'Error executing DB data') });
|
||||
@@ -274,6 +275,10 @@ async function handleLoadKeys({ msgid, root, filter, limit }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.loadKeys(dbhan, root, filter, limit), { logName: 'loadKeys' });
|
||||
}
|
||||
|
||||
async function handleScanKeys({ msgid, pattern, cursor, count }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.scanKeys(dbhan, pattern, cursor, count), { logName: 'scanKeys' });
|
||||
}
|
||||
|
||||
async function handleExportKeys({ msgid, options }) {
|
||||
return handleDriverDataCore(msgid, driver => driver.exportKeys(dbhan, options), { logName: 'exportKeys' });
|
||||
}
|
||||
@@ -452,6 +457,7 @@ const messageHandlers = {
|
||||
updateCollection: handleUpdateCollection,
|
||||
collectionData: handleCollectionData,
|
||||
loadKeys: handleLoadKeys,
|
||||
scanKeys: handleScanKeys,
|
||||
loadKeyInfo: handleLoadKeyInfo,
|
||||
callMethod: handleCallMethod,
|
||||
loadKeyTableRange: handleLoadKeyTableRange,
|
||||
|
||||
@@ -117,7 +117,7 @@ async function handleExecuteControlCommand({ command }) {
|
||||
}
|
||||
}
|
||||
|
||||
async function handleExecuteQuery({ sql, autoCommit }) {
|
||||
async function handleExecuteQuery({ sql, autoCommit, limitRows, frontMatter }) {
|
||||
lastActivity = new Date().getTime();
|
||||
|
||||
await waitConnected();
|
||||
@@ -146,7 +146,7 @@ async function handleExecuteQuery({ sql, autoCommit }) {
|
||||
...driver.getQuerySplitterOptions('stream'),
|
||||
returnRichInfo: true,
|
||||
})) {
|
||||
await handleQueryStream(dbhan, driver, queryStreamInfoHolder, sqlItem);
|
||||
await handleQueryStream(dbhan, driver, queryStreamInfoHolder, sqlItem, undefined, limitRows, frontMatter);
|
||||
// const handler = new StreamHandler(resultIndex);
|
||||
// const stream = await driver.stream(systemConnection, sqlItem, handler);
|
||||
// handler.stream = stream;
|
||||
|
||||
@@ -15,6 +15,7 @@ const logger = getLogger('execQuery');
|
||||
* @param {string} [options.sqlFile] - SQL file
|
||||
* @param {boolean} [options.logScriptItems] - whether to log script items instead of whole script
|
||||
* @param {boolean} [options.useTransaction] - run query in transaction
|
||||
* @param {boolean} [options.skipLogging] - whether to skip logging
|
||||
*/
|
||||
async function executeQuery({
|
||||
connection = undefined,
|
||||
@@ -23,9 +24,10 @@ async function executeQuery({
|
||||
sql,
|
||||
sqlFile = undefined,
|
||||
logScriptItems = false,
|
||||
skipLogging = false,
|
||||
useTransaction,
|
||||
}) {
|
||||
if (!logScriptItems) {
|
||||
if (!logScriptItems && !skipLogging) {
|
||||
logger.info({ sql: getLimitedQuery(sql) }, `Execute query`);
|
||||
}
|
||||
|
||||
@@ -38,7 +40,9 @@ async function executeQuery({
|
||||
}
|
||||
|
||||
try {
|
||||
if (!skipLogging) {
|
||||
logger.debug(`Running SQL query, length: ${sql.length}`);
|
||||
}
|
||||
|
||||
await driver.script(dbhan, sql, { logScriptItems, useTransaction });
|
||||
} finally {
|
||||
|
||||
@@ -52,8 +52,11 @@ async function generateDeploySql({
|
||||
dbdiffOptionsExtra?.['schemaMode'] !== 'ignore' &&
|
||||
dbdiffOptionsExtra?.['schemaMode'] !== 'ignoreImplicit'
|
||||
) {
|
||||
if (!driver?.dialect?.defaultSchemaName) {
|
||||
throw new Error('targetSchema is required for databases with multiple schemas');
|
||||
}
|
||||
targetSchema = driver.dialect.defaultSchemaName;
|
||||
}
|
||||
|
||||
try {
|
||||
if (!analysedStructure) {
|
||||
|
||||
@@ -7,6 +7,8 @@ const logger = getLogger('queryReader');
|
||||
* Returns reader object for {@link copyStream} function. This reader object reads data from query.
|
||||
* @param {object} options
|
||||
* @param {connectionType} options.connection - connection object
|
||||
* @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
|
||||
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
|
||||
* @param {string} options.query - SQL query
|
||||
* @param {string} [options.queryType] - query type
|
||||
* @param {string} [options.sql] - SQL query. obsolete; use query instead
|
||||
@@ -16,6 +18,8 @@ async function queryReader({
|
||||
connection,
|
||||
query,
|
||||
queryType,
|
||||
systemConnection,
|
||||
driver,
|
||||
// obsolete; use query instead
|
||||
sql,
|
||||
}) {
|
||||
@@ -28,10 +32,13 @@ async function queryReader({
|
||||
logger.info({ sql: query || sql }, `Reading query`);
|
||||
// else console.log(`Reading query ${JSON.stringify(json)}`);
|
||||
|
||||
const driver = requireEngineDriver(connection);
|
||||
const pool = await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script');
|
||||
if (!driver) {
|
||||
driver = requireEngineDriver(connection);
|
||||
}
|
||||
const dbhan = systemConnection || (await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script'));
|
||||
|
||||
const reader =
|
||||
queryType == 'json' ? await driver.readJsonQuery(pool, query) : await driver.readQuery(pool, query || sql);
|
||||
queryType == 'json' ? await driver.readJsonQuery(dbhan, query) : await driver.readQuery(dbhan, query || sql);
|
||||
return reader;
|
||||
}
|
||||
|
||||
|
||||
@@ -36,6 +36,10 @@ async function callRefactorSqlQueryApi(query, task, structure, dialect) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function getLicenseHttpHeaders() {
|
||||
return {};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isAuthProxySupported,
|
||||
authProxyGetRedirectUrl,
|
||||
@@ -47,4 +51,5 @@ module.exports = {
|
||||
callTextToSqlApi,
|
||||
callCompleteOnCursorApi,
|
||||
callRefactorSqlQueryApi,
|
||||
getLicenseHttpHeaders,
|
||||
};
|
||||
|
||||
380
packages/api/src/utility/cloudIntf.js
Normal file
380
packages/api/src/utility/cloudIntf.js
Normal file
@@ -0,0 +1,380 @@
|
||||
const axios = require('axios');
|
||||
const fs = require('fs-extra');
|
||||
const _ = require('lodash');
|
||||
const path = require('path');
|
||||
const { getLicenseHttpHeaders } = require('./authProxy');
|
||||
const { getLogger, extractErrorLogData, jsonLinesParse } = require('dbgate-tools');
|
||||
const { datadir } = require('./directories');
|
||||
const platformInfo = require('./platformInfo');
|
||||
const connections = require('../controllers/connections');
|
||||
const { isProApp } = require('./checkLicense');
|
||||
const socket = require('./socket');
|
||||
const config = require('../controllers/config');
|
||||
const simpleEncryptor = require('simple-encryptor');
|
||||
const currentVersion = require('../currentVersion');
|
||||
const { getPublicIpInfo } = require('./hardwareFingerprint');
|
||||
|
||||
const logger = getLogger('cloudIntf');
|
||||
|
||||
let cloudFiles = null;
|
||||
|
||||
const DBGATE_IDENTITY_URL = process.env.LOCAL_DBGATE_IDENTITY
|
||||
? 'http://localhost:3103'
|
||||
: process.env.DEVWEB || process.env.DEVMODE
|
||||
? 'https://identity.dbgate.udolni.net'
|
||||
: 'https://identity.dbgate.io';
|
||||
|
||||
const DBGATE_CLOUD_URL = process.env.LOCAL_DBGATE_CLOUD
|
||||
? 'http://localhost:3110'
|
||||
: process.env.DEVWEB || process.env.DEVMODE
|
||||
? 'https://cloud.dbgate.udolni.net'
|
||||
: 'https://cloud.dbgate.io';
|
||||
|
||||
async function createDbGateIdentitySession(client) {
|
||||
const resp = await axios.default.post(
|
||||
`${DBGATE_IDENTITY_URL}/api/create-session`,
|
||||
{
|
||||
client,
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}
|
||||
);
|
||||
return {
|
||||
sid: resp.data.sid,
|
||||
url: `${DBGATE_IDENTITY_URL}/api/signin/${resp.data.sid}`,
|
||||
};
|
||||
}
|
||||
|
||||
function startCloudTokenChecking(sid, callback) {
|
||||
const started = Date.now();
|
||||
const interval = setInterval(async () => {
|
||||
if (Date.now() - started > 60 * 1000) {
|
||||
clearInterval(interval);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// console.log(`Checking cloud token for session: ${DBGATE_IDENTITY_URL}/api/get-token/${sid}`);
|
||||
const resp = await axios.default.get(`${DBGATE_IDENTITY_URL}/api/get-token/${sid}`, {
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
},
|
||||
});
|
||||
// console.log('CHECK RESP:', resp.data);
|
||||
|
||||
if (resp.data.email) {
|
||||
clearInterval(interval);
|
||||
callback(resp.data);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error checking cloud token');
|
||||
}
|
||||
}, 500);
|
||||
}
|
||||
|
||||
async function loadCloudFiles() {
|
||||
try {
|
||||
const fileContent = await fs.readFile(path.join(datadir(), 'cloud-files.jsonl'), 'utf-8');
|
||||
const parsedJson = jsonLinesParse(fileContent);
|
||||
cloudFiles = _.sortBy(parsedJson, x => `${x.folder}/${x.title}`);
|
||||
} catch (err) {
|
||||
cloudFiles = [];
|
||||
}
|
||||
}
|
||||
|
||||
async function collectCloudFilesSearchTags() {
|
||||
const res = [];
|
||||
if (platformInfo.isElectron) {
|
||||
res.push('app');
|
||||
} else {
|
||||
res.push('web');
|
||||
}
|
||||
if (platformInfo.isWindows) {
|
||||
res.push('windows');
|
||||
}
|
||||
if (platformInfo.isMac) {
|
||||
res.push('mac');
|
||||
}
|
||||
if (platformInfo.isLinux) {
|
||||
res.push('linux');
|
||||
}
|
||||
if (platformInfo.isAwsUbuntuLayout) {
|
||||
res.push('aws');
|
||||
}
|
||||
if (platformInfo.isAzureUbuntuLayout) {
|
||||
res.push('azure');
|
||||
}
|
||||
if (platformInfo.isSnap) {
|
||||
res.push('snap');
|
||||
}
|
||||
if (platformInfo.isDocker) {
|
||||
res.push('docker');
|
||||
}
|
||||
if (platformInfo.isNpmDist) {
|
||||
res.push('npm');
|
||||
}
|
||||
const engines = await connections.getUsedEngines();
|
||||
const engineTags = engines.map(engine => engine.split('@')[0]);
|
||||
res.push(...engineTags);
|
||||
|
||||
// team-premium and trials will return the same cloud files as premium - no need to check
|
||||
res.push(isProApp() ? 'premium' : 'community');
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async function getCloudSigninHolder() {
|
||||
const settingsValue = await config.getSettings();
|
||||
const holder = settingsValue['cloudSigninTokenHolder'];
|
||||
return holder;
|
||||
}
|
||||
|
||||
async function getCloudSigninHeaders(holder = null) {
|
||||
if (!holder) {
|
||||
holder = await getCloudSigninHolder();
|
||||
}
|
||||
if (holder) {
|
||||
return {
|
||||
'x-cloud-login': holder.token,
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async function updateCloudFiles(isRefresh) {
|
||||
let lastCloudFilesTags;
|
||||
try {
|
||||
lastCloudFilesTags = await fs.readFile(path.join(datadir(), 'cloud-files-tags.txt'), 'utf-8');
|
||||
} catch (err) {
|
||||
lastCloudFilesTags = '';
|
||||
}
|
||||
|
||||
const ipInfo = await getPublicIpInfo();
|
||||
|
||||
const tags = (await collectCloudFilesSearchTags()).join(',');
|
||||
let lastCheckedTm = 0;
|
||||
if (tags == lastCloudFilesTags && cloudFiles.length > 0) {
|
||||
lastCheckedTm = _.max(cloudFiles.map(x => parseInt(x.modifiedTm)));
|
||||
}
|
||||
|
||||
logger.info({ tags, lastCheckedTm }, 'Downloading cloud files');
|
||||
|
||||
const resp = await axios.default.get(
|
||||
`${DBGATE_CLOUD_URL}/public-cloud-updates?lastCheckedTm=${lastCheckedTm}&tags=${tags}&isRefresh=${
|
||||
isRefresh ? 1 : 0
|
||||
}&country=${ipInfo?.country || ''}`,
|
||||
{
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
...(await getCloudSigninHeaders()),
|
||||
'x-app-version': currentVersion.version,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
logger.info(`Downloaded ${resp.data.length} cloud files`);
|
||||
|
||||
const filesByPath = lastCheckedTm == 0 ? {} : _.keyBy(cloudFiles, 'path');
|
||||
for (const file of resp.data) {
|
||||
if (file.isDeleted) {
|
||||
delete filesByPath[file.path];
|
||||
} else {
|
||||
filesByPath[file.path] = file;
|
||||
}
|
||||
}
|
||||
|
||||
cloudFiles = Object.values(filesByPath);
|
||||
|
||||
await fs.writeFile(path.join(datadir(), 'cloud-files.jsonl'), cloudFiles.map(x => JSON.stringify(x)).join('\n'));
|
||||
await fs.writeFile(path.join(datadir(), 'cloud-files-tags.txt'), tags);
|
||||
|
||||
socket.emitChanged(`public-cloud-changed`);
|
||||
}
|
||||
|
||||
async function startCloudFiles() {
|
||||
loadCloudFiles();
|
||||
}
|
||||
|
||||
async function getPublicCloudFiles() {
|
||||
if (!loadCloudFiles) {
|
||||
await loadCloudFiles();
|
||||
}
|
||||
return cloudFiles;
|
||||
}
|
||||
|
||||
async function getPublicFileData(path) {
|
||||
const resp = await axios.default.get(`${DBGATE_CLOUD_URL}/public/${path}`, {
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
},
|
||||
});
|
||||
return resp.data;
|
||||
}
|
||||
|
||||
async function refreshPublicFiles(isRefresh) {
|
||||
if (!cloudFiles) {
|
||||
await loadCloudFiles();
|
||||
}
|
||||
try {
|
||||
await updateCloudFiles(isRefresh);
|
||||
} catch (err) {
|
||||
logger.error(extractErrorLogData(err), 'Error updating cloud files');
|
||||
}
|
||||
}
|
||||
|
||||
async function callCloudApiGet(endpoint, signinHolder = null, additionalHeaders = {}) {
|
||||
if (!signinHolder) {
|
||||
signinHolder = await getCloudSigninHolder();
|
||||
}
|
||||
if (!signinHolder) {
|
||||
return null;
|
||||
}
|
||||
const signinHeaders = await getCloudSigninHeaders(signinHolder);
|
||||
|
||||
const resp = await axios.default.get(`${DBGATE_CLOUD_URL}/${endpoint}`, {
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
...signinHeaders,
|
||||
...additionalHeaders,
|
||||
},
|
||||
validateStatus: status => status < 500,
|
||||
});
|
||||
const { errorMessage } = resp.data;
|
||||
if (errorMessage) {
|
||||
return { apiErrorMessage: errorMessage };
|
||||
}
|
||||
return resp.data;
|
||||
}
|
||||
|
||||
async function callCloudApiPost(endpoint, body, signinHolder = null) {
|
||||
if (!signinHolder) {
|
||||
signinHolder = await getCloudSigninHolder();
|
||||
}
|
||||
if (!signinHolder) {
|
||||
return null;
|
||||
}
|
||||
const signinHeaders = await getCloudSigninHeaders(signinHolder);
|
||||
|
||||
const resp = await axios.default.post(`${DBGATE_CLOUD_URL}/${endpoint}`, body, {
|
||||
headers: {
|
||||
...getLicenseHttpHeaders(),
|
||||
...signinHeaders,
|
||||
},
|
||||
validateStatus: status => status < 500,
|
||||
});
|
||||
const { errorMessage, isLicenseLimit, limitedLicenseLimits } = resp.data;
|
||||
if (errorMessage) {
|
||||
return {
|
||||
apiErrorMessage: errorMessage,
|
||||
apiErrorIsLicenseLimit: isLicenseLimit,
|
||||
apiErrorLimitedLicenseLimits: limitedLicenseLimits,
|
||||
};
|
||||
}
|
||||
return resp.data;
|
||||
}
|
||||
|
||||
async function getCloudFolderEncryptor(folid) {
|
||||
const { encryptionKey } = await callCloudApiGet(`folder-key/${folid}`);
|
||||
if (!encryptionKey) {
|
||||
throw new Error('No encryption key for folder: ' + folid);
|
||||
}
|
||||
return simpleEncryptor.createEncryptor(encryptionKey);
|
||||
}
|
||||
|
||||
async function getCloudContent(folid, cntid) {
|
||||
const signinHolder = await getCloudSigninHolder();
|
||||
if (!signinHolder) {
|
||||
throw new Error('No signed in');
|
||||
}
|
||||
|
||||
const encryptor = simpleEncryptor.createEncryptor(signinHolder.encryptionKey);
|
||||
|
||||
const { content, name, type, contentFolder, contentType, apiErrorMessage } = await callCloudApiGet(
|
||||
`content/${folid}/${cntid}`,
|
||||
signinHolder,
|
||||
{
|
||||
'x-kehid': signinHolder.kehid,
|
||||
}
|
||||
);
|
||||
|
||||
if (apiErrorMessage) {
|
||||
return { apiErrorMessage };
|
||||
}
|
||||
|
||||
return {
|
||||
content: encryptor.decrypt(content),
|
||||
name,
|
||||
type,
|
||||
contentFolder,
|
||||
contentType,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns Promise<{ cntid: string } | { apiErrorMessage: string }>
|
||||
*/
|
||||
async function putCloudContent(folid, cntid, content, name, type, contentFolder = null, contentType = null) {
|
||||
const signinHolder = await getCloudSigninHolder();
|
||||
if (!signinHolder) {
|
||||
throw new Error('No signed in');
|
||||
}
|
||||
|
||||
const encryptor = simpleEncryptor.createEncryptor(signinHolder.encryptionKey);
|
||||
|
||||
const resp = await callCloudApiPost(
|
||||
`put-content`,
|
||||
{
|
||||
folid,
|
||||
cntid,
|
||||
name,
|
||||
type,
|
||||
kehid: signinHolder.kehid,
|
||||
content: encryptor.encrypt(content),
|
||||
contentFolder,
|
||||
contentType,
|
||||
},
|
||||
signinHolder
|
||||
);
|
||||
socket.emitChanged('cloud-content-changed');
|
||||
socket.emit('cloud-content-updated');
|
||||
return resp;
|
||||
}
|
||||
|
||||
const cloudConnectionCache = {};
|
||||
async function loadCachedCloudConnection(folid, cntid) {
|
||||
const cacheKey = `${folid}|${cntid}`;
|
||||
if (!cloudConnectionCache[cacheKey]) {
|
||||
const { content } = await getCloudContent(folid, cntid);
|
||||
cloudConnectionCache[cacheKey] = {
|
||||
...JSON.parse(content),
|
||||
_id: `cloud://${folid}/${cntid}`,
|
||||
};
|
||||
}
|
||||
return cloudConnectionCache[cacheKey];
|
||||
}
|
||||
|
||||
function removeCloudCachedConnection(folid, cntid) {
|
||||
const cacheKey = `${folid}|${cntid}`;
|
||||
delete cloudConnectionCache[cacheKey];
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createDbGateIdentitySession,
|
||||
startCloudTokenChecking,
|
||||
startCloudFiles,
|
||||
getPublicCloudFiles,
|
||||
getPublicFileData,
|
||||
refreshPublicFiles,
|
||||
callCloudApiGet,
|
||||
callCloudApiPost,
|
||||
getCloudFolderEncryptor,
|
||||
getCloudContent,
|
||||
loadCachedCloudConnection,
|
||||
putCloudContent,
|
||||
removeCloudCachedConnection,
|
||||
};
|
||||
@@ -81,11 +81,11 @@ function decryptPasswordString(password) {
|
||||
return password;
|
||||
}
|
||||
|
||||
function encryptObjectPasswordField(obj, field) {
|
||||
function encryptObjectPasswordField(obj, field, encryptor = null) {
|
||||
if (obj && obj[field] && !obj[field].startsWith('crypt:')) {
|
||||
return {
|
||||
...obj,
|
||||
[field]: 'crypt:' + getInternalEncryptor().encrypt(obj[field]),
|
||||
[field]: 'crypt:' + (encryptor || getInternalEncryptor()).encrypt(obj[field]),
|
||||
};
|
||||
}
|
||||
return obj;
|
||||
@@ -101,11 +101,11 @@ function decryptObjectPasswordField(obj, field) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
function encryptConnection(connection) {
|
||||
function encryptConnection(connection, encryptor = null) {
|
||||
if (connection.passwordMode != 'saveRaw') {
|
||||
connection = encryptObjectPasswordField(connection, 'password');
|
||||
connection = encryptObjectPasswordField(connection, 'sshPassword');
|
||||
connection = encryptObjectPasswordField(connection, 'sshKeyfilePassword');
|
||||
connection = encryptObjectPasswordField(connection, 'password', encryptor);
|
||||
connection = encryptObjectPasswordField(connection, 'sshPassword', encryptor);
|
||||
connection = encryptObjectPasswordField(connection, 'sshKeyfilePassword', encryptor);
|
||||
}
|
||||
return connection;
|
||||
}
|
||||
|
||||
@@ -4,6 +4,9 @@ const fs = require('fs');
|
||||
const _ = require('lodash');
|
||||
|
||||
const { jsldir } = require('../utility/directories');
|
||||
const { serializeJsTypesReplacer } = require('dbgate-tools');
|
||||
const { ChartProcessor } = require('dbgate-datalib');
|
||||
const { isProApp } = require('./checkLicense');
|
||||
|
||||
class QueryStreamTableWriter {
|
||||
constructor(sesid = undefined) {
|
||||
@@ -11,9 +14,12 @@ class QueryStreamTableWriter {
|
||||
this.currentChangeIndex = 1;
|
||||
this.initializedFile = false;
|
||||
this.sesid = sesid;
|
||||
if (isProApp()) {
|
||||
this.chartProcessor = new ChartProcessor();
|
||||
}
|
||||
}
|
||||
|
||||
initializeFromQuery(structure, resultIndex) {
|
||||
initializeFromQuery(structure, resultIndex, chartDefinition) {
|
||||
this.jslid = crypto.randomUUID();
|
||||
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
|
||||
fs.writeFileSync(
|
||||
@@ -27,6 +33,9 @@ class QueryStreamTableWriter {
|
||||
this.writeCurrentStats(false, false);
|
||||
this.resultIndex = resultIndex;
|
||||
this.initializedFile = true;
|
||||
if (isProApp() && chartDefinition) {
|
||||
this.chartProcessor = new ChartProcessor([chartDefinition]);
|
||||
}
|
||||
process.send({ msgtype: 'recordset', jslid: this.jslid, resultIndex, sesid: this.sesid });
|
||||
}
|
||||
|
||||
@@ -38,7 +47,16 @@ class QueryStreamTableWriter {
|
||||
|
||||
row(row) {
|
||||
// console.log('ACCEPT ROW', row);
|
||||
this.currentStream.write(JSON.stringify(row) + '\n');
|
||||
this.currentStream.write(JSON.stringify(row, serializeJsTypesReplacer) + '\n');
|
||||
try {
|
||||
if (this.chartProcessor) {
|
||||
this.chartProcessor.addRow(row);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error processing chart row', e);
|
||||
this.chartProcessor = null;
|
||||
}
|
||||
|
||||
this.currentRowCount += 1;
|
||||
|
||||
if (!this.plannedStats) {
|
||||
@@ -81,20 +99,52 @@ class QueryStreamTableWriter {
|
||||
}
|
||||
|
||||
close(afterClose) {
|
||||
return new Promise(resolve => {
|
||||
if (this.currentStream) {
|
||||
this.currentStream.end(() => {
|
||||
this.writeCurrentStats(true, true);
|
||||
if (afterClose) afterClose();
|
||||
if (this.chartProcessor) {
|
||||
try {
|
||||
this.chartProcessor.finalize();
|
||||
if (this.chartProcessor.charts.length > 0) {
|
||||
process.send({
|
||||
msgtype: 'charts',
|
||||
sesid: this.sesid,
|
||||
jslid: this.jslid,
|
||||
charts: this.chartProcessor.charts,
|
||||
resultIndex: this.resultIndex,
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error finalizing chart processor', e);
|
||||
this.chartProcessor = null;
|
||||
}
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
class StreamHandler {
|
||||
constructor(queryStreamInfoHolder, resolve, startLine, sesid = undefined) {
|
||||
constructor(
|
||||
queryStreamInfoHolder,
|
||||
resolve,
|
||||
startLine,
|
||||
sesid = undefined,
|
||||
limitRows = undefined,
|
||||
frontMatter = undefined
|
||||
) {
|
||||
this.recordset = this.recordset.bind(this);
|
||||
this.startLine = startLine;
|
||||
this.sesid = sesid;
|
||||
this.frontMatter = frontMatter;
|
||||
this.limitRows = limitRows;
|
||||
this.rowsLimitOverflow = false;
|
||||
this.row = this.row.bind(this);
|
||||
// this.error = this.error.bind(this);
|
||||
this.done = this.done.bind(this);
|
||||
@@ -106,6 +156,7 @@ class StreamHandler {
|
||||
this.plannedStats = false;
|
||||
this.queryStreamInfoHolder = queryStreamInfoHolder;
|
||||
this.resolve = resolve;
|
||||
this.rowCounter = 0;
|
||||
// currentHandlers = [...currentHandlers, this];
|
||||
}
|
||||
|
||||
@@ -117,13 +168,18 @@ class StreamHandler {
|
||||
}
|
||||
|
||||
recordset(columns) {
|
||||
if (this.rowsLimitOverflow) {
|
||||
return;
|
||||
}
|
||||
this.closeCurrentWriter();
|
||||
this.currentWriter = new QueryStreamTableWriter(this.sesid);
|
||||
this.currentWriter.initializeFromQuery(
|
||||
Array.isArray(columns) ? { columns } : columns,
|
||||
this.queryStreamInfoHolder.resultIndex
|
||||
this.queryStreamInfoHolder.resultIndex,
|
||||
this.frontMatter?.[`chart-${this.queryStreamInfoHolder.resultIndex + 1}`]
|
||||
);
|
||||
this.queryStreamInfoHolder.resultIndex += 1;
|
||||
this.rowCounter = 0;
|
||||
|
||||
// this.writeCurrentStats();
|
||||
|
||||
@@ -134,8 +190,36 @@ class StreamHandler {
|
||||
// }, 500);
|
||||
}
|
||||
row(row) {
|
||||
if (this.currentWriter) this.currentWriter.row(row);
|
||||
else if (row.message) process.send({ msgtype: 'info', info: { message: row.message }, sesid: this.sesid });
|
||||
if (this.rowsLimitOverflow) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.limitRows && this.rowCounter >= this.limitRows) {
|
||||
process.send({
|
||||
msgtype: 'info',
|
||||
info: { message: `Rows limit overflow, loaded ${this.rowCounter} rows, canceling query`, severity: 'error' },
|
||||
sesid: this.sesid,
|
||||
});
|
||||
this.rowsLimitOverflow = true;
|
||||
|
||||
this.queryStreamInfoHolder.canceled = true;
|
||||
if (this.currentWriter) {
|
||||
this.currentWriter.close().then(() => {
|
||||
process.exit(0);
|
||||
});
|
||||
} else {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.currentWriter) {
|
||||
this.currentWriter.row(row);
|
||||
this.rowCounter += 1;
|
||||
} else if (row.message) {
|
||||
process.send({ msgtype: 'info', info: { message: row.message }, sesid: this.sesid });
|
||||
}
|
||||
// this.onRow(this.jslid);
|
||||
}
|
||||
// error(error) {
|
||||
@@ -160,10 +244,25 @@ class StreamHandler {
|
||||
}
|
||||
}
|
||||
|
||||
function handleQueryStream(dbhan, driver, queryStreamInfoHolder, sqlItem, sesid = undefined) {
|
||||
function handleQueryStream(
|
||||
dbhan,
|
||||
driver,
|
||||
queryStreamInfoHolder,
|
||||
sqlItem,
|
||||
sesid = undefined,
|
||||
limitRows = undefined,
|
||||
frontMatter = undefined
|
||||
) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const start = sqlItem.trimStart || sqlItem.start;
|
||||
const handler = new StreamHandler(queryStreamInfoHolder, resolve, start && start.line, sesid);
|
||||
const handler = new StreamHandler(
|
||||
queryStreamInfoHolder,
|
||||
resolve,
|
||||
start && start.line,
|
||||
sesid,
|
||||
limitRows,
|
||||
frontMatter
|
||||
);
|
||||
driver.stream(dbhan, sqlItem.text, handler);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -87,4 +87,5 @@ module.exports = {
|
||||
getHardwareFingerprint,
|
||||
getHardwareFingerprintHash,
|
||||
getPublicHardwareFingerprint,
|
||||
getPublicIpInfo,
|
||||
};
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "jest",
|
||||
"test:charts": "jest -t \"Chart processor\"",
|
||||
"test:ci": "jest --json --outputFile=result.json --testLocationInResults",
|
||||
"start": "tsc --watch"
|
||||
},
|
||||
@@ -13,14 +14,15 @@
|
||||
"lib"
|
||||
],
|
||||
"dependencies": {
|
||||
"date-fns": "^4.1.0",
|
||||
"dbgate-filterparser": "^6.0.0-alpha.1",
|
||||
"dbgate-sqltree": "^6.0.0-alpha.1",
|
||||
"dbgate-tools": "^6.0.0-alpha.1",
|
||||
"dbgate-filterparser": "^6.0.0-alpha.1",
|
||||
"uuid": "^3.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"dbgate-types": "^6.0.0-alpha.1",
|
||||
"@types/node": "^13.7.0",
|
||||
"dbgate-types": "^6.0.0-alpha.1",
|
||||
"jest": "^28.1.3",
|
||||
"ts-jest": "^28.0.7",
|
||||
"typescript": "^4.4.3"
|
||||
|
||||
88
packages/datalib/src/chartDefinitions.ts
Normal file
88
packages/datalib/src/chartDefinitions.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
export type ChartTypeEnum = 'bar' | 'line' | 'pie' | 'polarArea';
|
||||
export type ChartXTransformFunction =
|
||||
| 'identity'
|
||||
| 'date:minute'
|
||||
| 'date:hour'
|
||||
| 'date:day'
|
||||
| 'date:month'
|
||||
| 'date:year';
|
||||
export type ChartYAggregateFunction = 'sum' | 'first' | 'last' | 'min' | 'max' | 'count' | 'avg';
|
||||
export type ChartDataLabelFormatter = 'number' | 'size:bytes' | 'size:kb' | 'size:mb' | 'size:gb';
|
||||
|
||||
export const ChartConstDefaults = {
|
||||
sortOrder: ' asc',
|
||||
windowAlign: 'end',
|
||||
windowSize: 100,
|
||||
parentAggregateLimit: 200,
|
||||
};
|
||||
|
||||
export const ChartLimits = {
|
||||
AUTODETECT_CHART_LIMIT: 10, // limit for auto-detecting charts, to avoid too many charts
|
||||
AUTODETECT_MEASURES_LIMIT: 10, // limit for auto-detecting measures, to avoid too many measures
|
||||
APPLY_LIMIT_AFTER_ROWS: 100,
|
||||
MAX_DISTINCT_VALUES: 10, // max number of distinct values to keep in topDistinctValues
|
||||
VALID_VALUE_RATIO_LIMIT: 0.5, // limit for valid value ratio, y defs below this will not be used in auto-detect
|
||||
PIE_RATIO_LIMIT: 0.05, // limit for other values in pie chart, if the value is below this, it will be grouped into "Other"
|
||||
PIE_COUNT_LIMIT: 10, // limit for number of pie chart slices, if the number of slices is above this, it will be grouped into "Other"
|
||||
};
|
||||
|
||||
export interface ChartXFieldDefinition {
|
||||
field: string;
|
||||
title?: string;
|
||||
transformFunction: ChartXTransformFunction;
|
||||
sortOrder?: 'natural' | 'ascKeys' | 'descKeys' | 'ascValues' | 'descValues';
|
||||
windowAlign?: 'start' | 'end';
|
||||
windowSize?: number;
|
||||
parentAggregateLimit?: number;
|
||||
}
|
||||
|
||||
export interface ChartYFieldDefinition {
|
||||
field: string;
|
||||
title?: string;
|
||||
aggregateFunction: ChartYAggregateFunction;
|
||||
}
|
||||
|
||||
export interface ChartDefinition {
|
||||
chartType: ChartTypeEnum;
|
||||
title?: string;
|
||||
pieRatioLimit?: number; // limit for pie chart, if the value is below this, it will be grouped into "Other"
|
||||
pieCountLimit?: number; // limit for number of pie chart slices, if the number of slices is above this, it will be grouped into "Other"
|
||||
|
||||
xdef: ChartXFieldDefinition;
|
||||
ydefs: ChartYFieldDefinition[];
|
||||
|
||||
useDataLabels?: boolean;
|
||||
dataLabelFormatter?: ChartDataLabelFormatter;
|
||||
}
|
||||
|
||||
export interface ChartDateParsed {
|
||||
year: number;
|
||||
month?: number;
|
||||
day?: number;
|
||||
hour?: number;
|
||||
minute?: number;
|
||||
second?: number;
|
||||
fraction?: string;
|
||||
}
|
||||
|
||||
export interface ChartAvailableColumn {
|
||||
field: string;
|
||||
}
|
||||
|
||||
export interface ProcessedChart {
|
||||
minX?: string;
|
||||
maxX?: string;
|
||||
rowsAdded: number;
|
||||
buckets: { [key: string]: any }; // key is the bucket key, value is aggregated data
|
||||
bucketKeysOrdered: string[];
|
||||
bucketKeyDateParsed: { [key: string]: ChartDateParsed }; // key is the bucket key, value is parsed date
|
||||
isGivenDefinition: boolean; // true if the chart was created with a given definition, false if it was created from raw data
|
||||
invalidXRows: number;
|
||||
invalidYRows: { [key: string]: number }; // key is the y field, value is the count of invalid rows
|
||||
validYRows: { [key: string]: number }; // key is the field, value is the count of valid rows
|
||||
|
||||
topDistinctValues: { [key: string]: Set<any> }; // key is the field, value is the set of distinct values
|
||||
availableColumns: ChartAvailableColumn[];
|
||||
|
||||
definition: ChartDefinition;
|
||||
}
|
||||
374
packages/datalib/src/chartProcessor.ts
Normal file
374
packages/datalib/src/chartProcessor.ts
Normal file
@@ -0,0 +1,374 @@
|
||||
import {
|
||||
ChartAvailableColumn,
|
||||
ChartDateParsed,
|
||||
ChartDefinition,
|
||||
ChartLimits,
|
||||
ProcessedChart,
|
||||
} from './chartDefinitions';
|
||||
import _sortBy from 'lodash/sortBy';
|
||||
import _sum from 'lodash/sum';
|
||||
import {
|
||||
aggregateChartNumericValuesFromSource,
|
||||
autoAggregateCompactTimelineChart,
|
||||
computeChartBucketCardinality,
|
||||
computeChartBucketKey,
|
||||
fillChartTimelineBuckets,
|
||||
tryParseChartDate,
|
||||
} from './chartTools';
|
||||
import { getChartScore, getChartYFieldScore } from './chartScoring';
|
||||
|
||||
export class ChartProcessor {
|
||||
chartsProcessing: ProcessedChart[] = [];
|
||||
charts: ProcessedChart[] = [];
|
||||
availableColumnsDict: { [field: string]: ChartAvailableColumn } = {};
|
||||
availableColumns: ChartAvailableColumn[] = [];
|
||||
autoDetectCharts = false;
|
||||
rowsAdded = 0;
|
||||
|
||||
constructor(public givenDefinitions: ChartDefinition[] = []) {
|
||||
for (const definition of givenDefinitions) {
|
||||
this.chartsProcessing.push({
|
||||
definition,
|
||||
rowsAdded: 0,
|
||||
bucketKeysOrdered: [],
|
||||
buckets: {},
|
||||
bucketKeyDateParsed: {},
|
||||
isGivenDefinition: true,
|
||||
invalidXRows: 0,
|
||||
invalidYRows: {},
|
||||
availableColumns: [],
|
||||
validYRows: {},
|
||||
topDistinctValues: {},
|
||||
});
|
||||
}
|
||||
this.autoDetectCharts = this.givenDefinitions.length == 0;
|
||||
}
|
||||
|
||||
// findOrCreateChart(definition: ChartDefinition, isGivenDefinition: boolean): ProcessedChart {
|
||||
// const signatureItems = [
|
||||
// definition.chartType,
|
||||
// definition.xdef.field,
|
||||
// definition.xdef.transformFunction,
|
||||
// definition.ydefs.map(y => y.field).join(','),
|
||||
// ];
|
||||
// const signature = signatureItems.join('::');
|
||||
|
||||
// if (this.chartsBySignature[signature]) {
|
||||
// return this.chartsBySignature[signature];
|
||||
// }
|
||||
// const chart: ProcessedChart = {
|
||||
// definition,
|
||||
// rowsAdded: 0,
|
||||
// bucketKeysOrdered: [],
|
||||
// buckets: {},
|
||||
// bucketKeyDateParsed: {},
|
||||
// isGivenDefinition,
|
||||
// };
|
||||
// this.chartsBySignature[signature] = chart;
|
||||
// return chart;
|
||||
// }
|
||||
|
||||
addRow(row: any) {
|
||||
const dateColumns: { [key: string]: ChartDateParsed } = {};
|
||||
const numericColumns: { [key: string]: number } = {};
|
||||
const numericColumnsForAutodetect: { [key: string]: number } = {};
|
||||
const stringColumns: { [key: string]: string } = {};
|
||||
|
||||
for (const [key, value] of Object.entries(row)) {
|
||||
const number: number = typeof value == 'string' ? Number(value) : typeof value == 'number' ? value : NaN;
|
||||
this.availableColumnsDict[key] = {
|
||||
field: key,
|
||||
};
|
||||
|
||||
const keyLower = key.toLowerCase();
|
||||
const keyIsId = keyLower.endsWith('_id') || keyLower == 'id' || key.endsWith('Id');
|
||||
|
||||
const parsedDate = tryParseChartDate(value);
|
||||
if (parsedDate) {
|
||||
dateColumns[key] = parsedDate;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!isNaN(number) && isFinite(number)) {
|
||||
numericColumns[key] = number;
|
||||
if (!keyIsId) {
|
||||
numericColumnsForAutodetect[key] = number; // for auto-detecting charts
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (typeof value === 'string' && isNaN(number) && value.length < 100) {
|
||||
stringColumns[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
// const sortedNumericColumnns = Object.keys(numericColumns).sort();
|
||||
|
||||
if (this.autoDetectCharts) {
|
||||
// create charts from data, if there are no given definitions
|
||||
for (const datecol in dateColumns) {
|
||||
let usedChart = this.chartsProcessing.find(
|
||||
chart =>
|
||||
!chart.isGivenDefinition &&
|
||||
chart.definition.xdef.field === datecol &&
|
||||
chart.definition.xdef.transformFunction?.startsWith('date:')
|
||||
);
|
||||
|
||||
if (
|
||||
!usedChart &&
|
||||
(this.rowsAdded < ChartLimits.APPLY_LIMIT_AFTER_ROWS ||
|
||||
this.chartsProcessing.length < ChartLimits.AUTODETECT_CHART_LIMIT)
|
||||
) {
|
||||
usedChart = {
|
||||
definition: {
|
||||
chartType: 'line',
|
||||
xdef: {
|
||||
field: datecol,
|
||||
transformFunction: 'date:day',
|
||||
},
|
||||
ydefs: [],
|
||||
},
|
||||
rowsAdded: 0,
|
||||
bucketKeysOrdered: [],
|
||||
buckets: {},
|
||||
bucketKeyDateParsed: {},
|
||||
isGivenDefinition: false,
|
||||
invalidXRows: 0,
|
||||
invalidYRows: {},
|
||||
availableColumns: [],
|
||||
validYRows: {},
|
||||
topDistinctValues: {},
|
||||
};
|
||||
this.chartsProcessing.push(usedChart);
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(row)) {
|
||||
if (value == null) continue;
|
||||
if (key == datecol) continue; // skip date column itself
|
||||
let existingYDef = usedChart.definition.ydefs.find(y => y.field === key);
|
||||
if (
|
||||
!existingYDef &&
|
||||
(this.rowsAdded < ChartLimits.APPLY_LIMIT_AFTER_ROWS ||
|
||||
usedChart.definition.ydefs.length < ChartLimits.AUTODETECT_MEASURES_LIMIT)
|
||||
) {
|
||||
existingYDef = {
|
||||
field: key,
|
||||
aggregateFunction: 'sum',
|
||||
};
|
||||
usedChart.definition.ydefs.push(existingYDef);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// apply on all charts with this date column
|
||||
for (const chart of this.chartsProcessing) {
|
||||
this.applyRawData(
|
||||
chart,
|
||||
row,
|
||||
dateColumns[chart.definition.xdef.field],
|
||||
chart.isGivenDefinition ? numericColumns : numericColumnsForAutodetect,
|
||||
stringColumns
|
||||
);
|
||||
}
|
||||
|
||||
for (let i = 0; i < this.chartsProcessing.length; i++) {
|
||||
this.chartsProcessing[i] = autoAggregateCompactTimelineChart(this.chartsProcessing[i]);
|
||||
}
|
||||
|
||||
this.rowsAdded += 1;
|
||||
if (this.rowsAdded == ChartLimits.APPLY_LIMIT_AFTER_ROWS) {
|
||||
this.applyLimitsOnCharts();
|
||||
}
|
||||
}
|
||||
|
||||
applyLimitsOnCharts() {
|
||||
const autodetectProcessingCharts = this.chartsProcessing.filter(chart => !chart.isGivenDefinition);
|
||||
if (autodetectProcessingCharts.length > ChartLimits.AUTODETECT_CHART_LIMIT) {
|
||||
const newAutodetectProcessingCharts = _sortBy(
|
||||
this.chartsProcessing.slice(0, ChartLimits.AUTODETECT_CHART_LIMIT),
|
||||
chart => -getChartScore(chart)
|
||||
);
|
||||
|
||||
for (const chart of autodetectProcessingCharts) {
|
||||
chart.definition.ydefs = _sortBy(chart.definition.ydefs, yfield => -getChartYFieldScore(chart, yfield)).slice(
|
||||
0,
|
||||
ChartLimits.AUTODETECT_MEASURES_LIMIT
|
||||
);
|
||||
}
|
||||
|
||||
this.chartsProcessing = [
|
||||
...this.chartsProcessing.filter(chart => chart.isGivenDefinition),
|
||||
...newAutodetectProcessingCharts,
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
addRows(...rows: any[]) {
|
||||
for (const row of rows) {
|
||||
this.addRow(row);
|
||||
}
|
||||
}
|
||||
|
||||
finalize() {
|
||||
this.applyLimitsOnCharts();
|
||||
this.availableColumns = Object.values(this.availableColumnsDict);
|
||||
for (const chart of this.chartsProcessing) {
|
||||
let addedChart: ProcessedChart = chart;
|
||||
if (chart.rowsAdded == 0) {
|
||||
continue; // skip empty charts
|
||||
}
|
||||
const sortOrder = chart.definition.xdef.sortOrder ?? 'ascKeys';
|
||||
if (sortOrder != 'natural') {
|
||||
if (sortOrder == 'ascKeys' || sortOrder == 'descKeys') {
|
||||
if (chart.definition.xdef.transformFunction.startsWith('date:')) {
|
||||
addedChart = autoAggregateCompactTimelineChart(addedChart);
|
||||
fillChartTimelineBuckets(addedChart);
|
||||
}
|
||||
|
||||
addedChart.bucketKeysOrdered = _sortBy(Object.keys(addedChart.buckets));
|
||||
if (sortOrder == 'descKeys') {
|
||||
addedChart.bucketKeysOrdered.reverse();
|
||||
}
|
||||
}
|
||||
|
||||
if (sortOrder == 'ascValues' || sortOrder == 'descValues') {
|
||||
addedChart.bucketKeysOrdered = _sortBy(Object.keys(addedChart.buckets), key =>
|
||||
computeChartBucketCardinality(addedChart.buckets[key])
|
||||
);
|
||||
if (sortOrder == 'descValues') {
|
||||
addedChart.bucketKeysOrdered.reverse();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!addedChart.isGivenDefinition) {
|
||||
addedChart = {
|
||||
...addedChart,
|
||||
definition: {
|
||||
...addedChart.definition,
|
||||
ydefs: addedChart.definition.ydefs.filter(
|
||||
y =>
|
||||
!addedChart.invalidYRows[y.field] &&
|
||||
addedChart.validYRows[y.field] / addedChart.rowsAdded >= ChartLimits.VALID_VALUE_RATIO_LIMIT
|
||||
),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (addedChart) {
|
||||
addedChart.availableColumns = this.availableColumns;
|
||||
this.charts.push(addedChart);
|
||||
}
|
||||
|
||||
this.groupPieOtherBuckets(addedChart);
|
||||
}
|
||||
|
||||
this.charts = [
|
||||
...this.charts.filter(x => x.isGivenDefinition),
|
||||
..._sortBy(
|
||||
this.charts.filter(x => !x.isGivenDefinition),
|
||||
chart => -getChartScore(chart)
|
||||
),
|
||||
];
|
||||
}
|
||||
groupPieOtherBuckets(chart: ProcessedChart) {
|
||||
if (chart.definition.chartType !== 'pie') {
|
||||
return; // only for pie charts
|
||||
}
|
||||
const ratioLimit = chart.definition.pieRatioLimit ?? ChartLimits.PIE_RATIO_LIMIT;
|
||||
const countLimit = chart.definition.pieCountLimit ?? ChartLimits.PIE_COUNT_LIMIT;
|
||||
if (ratioLimit == 0 && countLimit == 0) {
|
||||
return; // no grouping if limit is 0
|
||||
}
|
||||
const otherBucket: any = {};
|
||||
let newBuckets: any = {};
|
||||
const cardSum = _sum(Object.values(chart.buckets).map(bucket => computeChartBucketCardinality(bucket)));
|
||||
|
||||
if (cardSum == 0) {
|
||||
return; // no buckets to process
|
||||
}
|
||||
|
||||
for (const [bucketKey, bucket] of Object.entries(chart.buckets)) {
|
||||
if (computeChartBucketCardinality(bucket) / cardSum < ratioLimit) {
|
||||
for (const field in bucket) {
|
||||
otherBucket[field] = (otherBucket[field] ?? 0) + bucket[field];
|
||||
}
|
||||
} else {
|
||||
newBuckets[bucketKey] = bucket;
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(newBuckets).length > countLimit) {
|
||||
const sortedBucketKeys = _sortBy(
|
||||
Object.entries(newBuckets),
|
||||
([, bucket]) => -computeChartBucketCardinality(bucket)
|
||||
).map(([key]) => key);
|
||||
const newBuckets2 = {};
|
||||
sortedBucketKeys.forEach((key, index) => {
|
||||
if (index < countLimit) {
|
||||
newBuckets2[key] = newBuckets[key];
|
||||
} else {
|
||||
for (const field in newBuckets[key]) {
|
||||
otherBucket[field] = (otherBucket[field] ?? 0) + newBuckets[key][field];
|
||||
}
|
||||
}
|
||||
});
|
||||
newBuckets = newBuckets2;
|
||||
}
|
||||
|
||||
if (Object.keys(otherBucket).length > 0) {
|
||||
newBuckets['Other'] = otherBucket;
|
||||
}
|
||||
chart.buckets = newBuckets;
|
||||
chart.bucketKeysOrdered = [...chart.bucketKeysOrdered, 'Other'].filter(key => key in newBuckets);
|
||||
}
|
||||
|
||||
applyRawData(
|
||||
chart: ProcessedChart,
|
||||
row: any,
|
||||
dateParsed: ChartDateParsed,
|
||||
numericColumns: { [key: string]: number },
|
||||
stringColumns: { [key: string]: string }
|
||||
) {
|
||||
if (chart.definition.xdef == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (row[chart.definition.xdef.field] == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (dateParsed == null && chart.definition.xdef.transformFunction.startsWith('date:')) {
|
||||
chart.invalidXRows += 1;
|
||||
return; // skip if date is invalid
|
||||
}
|
||||
|
||||
const [bucketKey, bucketKeyParsed] = computeChartBucketKey(dateParsed, chart, row);
|
||||
|
||||
if (!bucketKey) {
|
||||
return; // skip if no bucket key
|
||||
}
|
||||
|
||||
if (bucketKeyParsed) {
|
||||
chart.bucketKeyDateParsed[bucketKey] = bucketKeyParsed;
|
||||
}
|
||||
|
||||
if (chart.minX == null || bucketKey < chart.minX) {
|
||||
chart.minX = bucketKey;
|
||||
}
|
||||
if (chart.maxX == null || bucketKey > chart.maxX) {
|
||||
chart.maxX = bucketKey;
|
||||
}
|
||||
|
||||
if (!chart.buckets[bucketKey]) {
|
||||
chart.buckets[bucketKey] = {};
|
||||
if (chart.definition.xdef.sortOrder == 'natural') {
|
||||
chart.bucketKeysOrdered.push(bucketKey);
|
||||
}
|
||||
}
|
||||
|
||||
aggregateChartNumericValuesFromSource(chart, bucketKey, numericColumns, row);
|
||||
chart.rowsAdded += 1;
|
||||
}
|
||||
}
|
||||
23
packages/datalib/src/chartScoring.ts
Normal file
23
packages/datalib/src/chartScoring.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import _sortBy from 'lodash/sortBy';
|
||||
import _sum from 'lodash/sum';
|
||||
import { ChartLimits, ChartYFieldDefinition, ProcessedChart } from './chartDefinitions';
|
||||
|
||||
export function getChartScore(chart: ProcessedChart): number {
|
||||
let res = 0;
|
||||
res += chart.rowsAdded * 5;
|
||||
|
||||
const ydefScores = chart.definition.ydefs.map(yField => getChartYFieldScore(chart, yField));
|
||||
const sorted = _sortBy(ydefScores).reverse();
|
||||
res += _sum(sorted.slice(0, ChartLimits.AUTODETECT_MEASURES_LIMIT));
|
||||
return res;
|
||||
}
|
||||
|
||||
export function getChartYFieldScore(chart: ProcessedChart, yField: ChartYFieldDefinition): number {
|
||||
let res = 0;
|
||||
res += chart.validYRows[yField.field] * 5; // score for valid Y rows
|
||||
res += (chart.topDistinctValues[yField.field]?.size ?? 0) * 20; // score for distinct values in Y field
|
||||
res += chart.rowsAdded * 2; // base score for rows added
|
||||
res -= (chart.invalidYRows[yField.field] ?? 0) * 50; // penalty for invalid Y rows
|
||||
|
||||
return res;
|
||||
}
|
||||
542
packages/datalib/src/chartTools.ts
Normal file
542
packages/datalib/src/chartTools.ts
Normal file
@@ -0,0 +1,542 @@
|
||||
import _toPairs from 'lodash/toPairs';
|
||||
import _sumBy from 'lodash/sumBy';
|
||||
import {
|
||||
ChartConstDefaults,
|
||||
ChartDateParsed,
|
||||
ChartLimits,
|
||||
ChartXTransformFunction,
|
||||
ProcessedChart,
|
||||
} from './chartDefinitions';
|
||||
import { addMinutes, addHours, addDays, addMonths, addYears } from 'date-fns';
|
||||
|
||||
export function getChartDebugPrint(chart: ProcessedChart) {
|
||||
let res = '';
|
||||
res += `Chart: ${chart.definition.chartType} (${chart.definition.xdef.transformFunction})\n`;
|
||||
for (const key of chart.bucketKeysOrdered) {
|
||||
res += `${key}: ${_toPairs(chart.buckets[key])
|
||||
.map(([k, v]) => `${k}=${v}`)
|
||||
.join(', ')}\n`;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
export function tryParseChartDate(dateInput: any): ChartDateParsed | null {
|
||||
if (dateInput instanceof Date) {
|
||||
return {
|
||||
year: dateInput.getFullYear(),
|
||||
month: dateInput.getMonth() + 1,
|
||||
day: dateInput.getDate(),
|
||||
hour: dateInput.getHours(),
|
||||
minute: dateInput.getMinutes(),
|
||||
second: dateInput.getSeconds(),
|
||||
fraction: undefined, // Date object does not have fraction
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof dateInput !== 'string') return null;
|
||||
const m = dateInput.match(
|
||||
/^(\d{4})-(\d{2})-(\d{2})(?:[ T](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(Z|[+-]\d{2}:\d{2})?)?$/
|
||||
);
|
||||
if (!m) return null;
|
||||
|
||||
const [_notUsed, year, month, day, hour, minute, second, fraction] = m;
|
||||
|
||||
return {
|
||||
year: parseInt(year, 10),
|
||||
month: parseInt(month, 10),
|
||||
day: parseInt(day, 10),
|
||||
hour: parseInt(hour, 10) || 0,
|
||||
minute: parseInt(minute, 10) || 0,
|
||||
second: parseInt(second, 10) || 0,
|
||||
fraction: fraction || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
function pad2Digits(number) {
|
||||
return ('00' + number).slice(-2);
|
||||
}
|
||||
|
||||
export function stringifyChartDate(value: ChartDateParsed, transform: ChartXTransformFunction): string {
|
||||
switch (transform) {
|
||||
case 'date:year':
|
||||
return `${value.year}`;
|
||||
case 'date:month':
|
||||
return `${value.year}-${pad2Digits(value.month)}`;
|
||||
case 'date:day':
|
||||
return `${value.year}-${pad2Digits(value.month)}-${pad2Digits(value.day)}`;
|
||||
case 'date:hour':
|
||||
return `${value.year}-${pad2Digits(value.month)}-${pad2Digits(value.day)} ${pad2Digits(value.hour)}`;
|
||||
case 'date:minute':
|
||||
return `${value.year}-${pad2Digits(value.month)}-${pad2Digits(value.day)} ${pad2Digits(value.hour)}:${pad2Digits(
|
||||
value.minute
|
||||
)}`;
|
||||
default:
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
export function incrementChartDate(value: ChartDateParsed, transform: ChartXTransformFunction): ChartDateParsed {
|
||||
const dateRepresentation = new Date(
|
||||
value.year,
|
||||
(value.month ?? 1) - 1,
|
||||
value.day ?? 1,
|
||||
value.hour ?? 0,
|
||||
value.minute ?? 0
|
||||
);
|
||||
let newDateRepresentation: Date;
|
||||
switch (transform) {
|
||||
case 'date:year':
|
||||
newDateRepresentation = addYears(dateRepresentation, 1);
|
||||
break;
|
||||
case 'date:month':
|
||||
newDateRepresentation = addMonths(dateRepresentation, 1);
|
||||
break;
|
||||
case 'date:day':
|
||||
newDateRepresentation = addDays(dateRepresentation, 1);
|
||||
break;
|
||||
case 'date:hour':
|
||||
newDateRepresentation = addHours(dateRepresentation, 1);
|
||||
break;
|
||||
case 'date:minute':
|
||||
newDateRepresentation = addMinutes(dateRepresentation, 1);
|
||||
break;
|
||||
}
|
||||
switch (transform) {
|
||||
case 'date:year':
|
||||
return { year: newDateRepresentation.getFullYear() };
|
||||
case 'date:month':
|
||||
return {
|
||||
year: newDateRepresentation.getFullYear(),
|
||||
month: newDateRepresentation.getMonth() + 1,
|
||||
};
|
||||
case 'date:day':
|
||||
return {
|
||||
year: newDateRepresentation.getFullYear(),
|
||||
month: newDateRepresentation.getMonth() + 1,
|
||||
day: newDateRepresentation.getDate(),
|
||||
};
|
||||
case 'date:hour':
|
||||
return {
|
||||
year: newDateRepresentation.getFullYear(),
|
||||
month: newDateRepresentation.getMonth() + 1,
|
||||
day: newDateRepresentation.getDate(),
|
||||
hour: newDateRepresentation.getHours(),
|
||||
};
|
||||
case 'date:minute':
|
||||
return {
|
||||
year: newDateRepresentation.getFullYear(),
|
||||
month: newDateRepresentation.getMonth() + 1,
|
||||
day: newDateRepresentation.getDate(),
|
||||
hour: newDateRepresentation.getHours(),
|
||||
minute: newDateRepresentation.getMinutes(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function computeChartBucketKey(
|
||||
dateParsed: ChartDateParsed,
|
||||
chart: ProcessedChart,
|
||||
row: any
|
||||
): [string, ChartDateParsed] {
|
||||
switch (chart.definition.xdef.transformFunction) {
|
||||
case 'date:year':
|
||||
return [dateParsed ? `${dateParsed.year}` : null, { year: dateParsed.year }];
|
||||
case 'date:month':
|
||||
return [
|
||||
dateParsed ? `${dateParsed.year}-${pad2Digits(dateParsed.month)}` : null,
|
||||
{
|
||||
year: dateParsed.year,
|
||||
month: dateParsed.month,
|
||||
},
|
||||
];
|
||||
case 'date:day':
|
||||
return [
|
||||
dateParsed ? `${dateParsed.year}-${pad2Digits(dateParsed.month)}-${pad2Digits(dateParsed.day)}` : null,
|
||||
{
|
||||
year: dateParsed.year,
|
||||
month: dateParsed.month,
|
||||
day: dateParsed.day,
|
||||
},
|
||||
];
|
||||
case 'date:hour':
|
||||
return [
|
||||
dateParsed
|
||||
? `${dateParsed.year}-${pad2Digits(dateParsed.month)}-${pad2Digits(dateParsed.day)} ${pad2Digits(
|
||||
dateParsed.hour
|
||||
)}`
|
||||
: null,
|
||||
{
|
||||
year: dateParsed.year,
|
||||
month: dateParsed.month,
|
||||
day: dateParsed.day,
|
||||
hour: dateParsed.hour,
|
||||
},
|
||||
];
|
||||
case 'date:minute':
|
||||
return [
|
||||
dateParsed
|
||||
? `${dateParsed.year}-${pad2Digits(dateParsed.month)}-${pad2Digits(dateParsed.day)} ${pad2Digits(
|
||||
dateParsed.hour
|
||||
)}:${pad2Digits(dateParsed.minute)}`
|
||||
: null,
|
||||
{
|
||||
year: dateParsed.year,
|
||||
month: dateParsed.month,
|
||||
day: dateParsed.day,
|
||||
hour: dateParsed.hour,
|
||||
minute: dateParsed.minute,
|
||||
},
|
||||
];
|
||||
case 'identity':
|
||||
default:
|
||||
return [row[chart.definition.xdef.field], null];
|
||||
}
|
||||
}
|
||||
|
||||
export function computeDateBucketDistance(
|
||||
begin: ChartDateParsed,
|
||||
end: ChartDateParsed,
|
||||
transform: ChartXTransformFunction
|
||||
): number {
|
||||
switch (transform) {
|
||||
case 'date:year':
|
||||
return end.year - begin.year;
|
||||
case 'date:month':
|
||||
return (end.year - begin.year) * 12 + (end.month - begin.month);
|
||||
case 'date:day':
|
||||
return (
|
||||
(end.year - begin.year) * 365 +
|
||||
(end.month - begin.month) * 30 + // rough approximation
|
||||
(end.day - begin.day)
|
||||
);
|
||||
case 'date:hour':
|
||||
return (
|
||||
(end.year - begin.year) * 365 * 24 +
|
||||
(end.month - begin.month) * 30 * 24 + // rough approximation
|
||||
(end.day - begin.day) * 24 +
|
||||
(end.hour - begin.hour)
|
||||
);
|
||||
case 'date:minute':
|
||||
return (
|
||||
(end.year - begin.year) * 365 * 24 * 60 +
|
||||
(end.month - begin.month) * 30 * 24 * 60 + // rough approximation
|
||||
(end.day - begin.day) * 24 * 60 +
|
||||
(end.hour - begin.hour) * 60 +
|
||||
(end.minute - begin.minute)
|
||||
);
|
||||
case 'identity':
|
||||
default:
|
||||
return NaN;
|
||||
}
|
||||
}
|
||||
|
||||
export function compareChartDatesParsed(
|
||||
a: ChartDateParsed,
|
||||
b: ChartDateParsed,
|
||||
transform: ChartXTransformFunction
|
||||
): number {
|
||||
switch (transform) {
|
||||
case 'date:year':
|
||||
return a.year - b.year;
|
||||
case 'date:month':
|
||||
return a.year === b.year ? a.month - b.month : a.year - b.year;
|
||||
case 'date:day':
|
||||
return a.year === b.year && a.month === b.month
|
||||
? a.day - b.day
|
||||
: a.year === b.year
|
||||
? a.month - b.month
|
||||
: a.year - b.year;
|
||||
case 'date:hour':
|
||||
return a.year === b.year && a.month === b.month && a.day === b.day
|
||||
? a.hour - b.hour
|
||||
: a.year === b.year && a.month === b.month
|
||||
? a.day - b.day
|
||||
: a.year === b.year
|
||||
? a.month - b.month
|
||||
: a.year - b.year;
|
||||
|
||||
case 'date:minute':
|
||||
return a.year === b.year && a.month === b.month && a.day === b.day && a.hour === b.hour
|
||||
? a.minute - b.minute
|
||||
: a.year === b.year && a.month === b.month && a.day === b.day
|
||||
? a.hour - b.hour
|
||||
: a.year === b.year && a.month === b.month
|
||||
? a.day - b.day
|
||||
: a.year === b.year
|
||||
? a.month - b.month
|
||||
: a.year - b.year;
|
||||
}
|
||||
}
|
||||
|
||||
function getParentDateBucketKey(bucketKey: string, transform: ChartXTransformFunction): string | null {
|
||||
switch (transform) {
|
||||
case 'date:year':
|
||||
return null; // no parent for year
|
||||
case 'date:month':
|
||||
return bucketKey.slice(0, 4);
|
||||
case 'date:day':
|
||||
return bucketKey.slice(0, 7);
|
||||
case 'date:hour':
|
||||
return bucketKey.slice(0, 10);
|
||||
case 'date:minute':
|
||||
return bucketKey.slice(0, 13);
|
||||
}
|
||||
}
|
||||
|
||||
function getParentDateBucketTransform(transform: ChartXTransformFunction): ChartXTransformFunction | null {
|
||||
switch (transform) {
|
||||
case 'date:year':
|
||||
return null; // no parent for year
|
||||
case 'date:month':
|
||||
return 'date:year';
|
||||
case 'date:day':
|
||||
return 'date:month';
|
||||
case 'date:hour':
|
||||
return 'date:day';
|
||||
case 'date:minute':
|
||||
return 'date:hour';
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function getParentKeyParsed(date: ChartDateParsed, transform: ChartXTransformFunction): ChartDateParsed | null {
|
||||
switch (transform) {
|
||||
case 'date:year':
|
||||
return null; // no parent for year
|
||||
case 'date:month':
|
||||
return { year: date.year };
|
||||
case 'date:day':
|
||||
return { year: date.year, month: date.month };
|
||||
case 'date:hour':
|
||||
return { year: date.year, month: date.month, day: date.day };
|
||||
case 'date:minute':
|
||||
return { year: date.year, month: date.month, day: date.day, hour: date.hour };
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function createParentChartAggregation(chart: ProcessedChart): ProcessedChart | null {
|
||||
if (chart.isGivenDefinition) {
|
||||
// if the chart is created with a given definition, we cannot create a parent aggregation
|
||||
return null;
|
||||
}
|
||||
const parentTransform = getParentDateBucketTransform(chart.definition.xdef.transformFunction);
|
||||
if (!parentTransform) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const res: ProcessedChart = {
|
||||
definition: {
|
||||
...chart.definition,
|
||||
xdef: {
|
||||
...chart.definition.xdef,
|
||||
transformFunction: parentTransform,
|
||||
},
|
||||
},
|
||||
rowsAdded: chart.rowsAdded,
|
||||
bucketKeysOrdered: [],
|
||||
buckets: {},
|
||||
bucketKeyDateParsed: {},
|
||||
isGivenDefinition: false,
|
||||
invalidXRows: chart.invalidXRows,
|
||||
invalidYRows: { ...chart.invalidYRows }, // copy invalid Y rows
|
||||
validYRows: { ...chart.validYRows }, // copy valid Y rows
|
||||
topDistinctValues: { ...chart.topDistinctValues }, // copy top distinct values
|
||||
availableColumns: chart.availableColumns,
|
||||
};
|
||||
|
||||
for (const [bucketKey, bucketValues] of Object.entries(chart.buckets)) {
|
||||
const parentKey = getParentDateBucketKey(bucketKey, chart.definition.xdef.transformFunction);
|
||||
if (!parentKey) {
|
||||
// skip if the bucket is already a parent
|
||||
continue;
|
||||
}
|
||||
res.bucketKeyDateParsed[parentKey] = getParentKeyParsed(
|
||||
chart.bucketKeyDateParsed[bucketKey],
|
||||
chart.definition.xdef.transformFunction
|
||||
);
|
||||
aggregateChartNumericValuesFromChild(res, parentKey, bucketValues);
|
||||
}
|
||||
|
||||
const bucketKeys = Object.keys(res.buckets).sort();
|
||||
res.minX = bucketKeys.length > 0 ? bucketKeys[0] : null;
|
||||
res.maxX = bucketKeys.length > 0 ? bucketKeys[bucketKeys.length - 1] : null;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
export function autoAggregateCompactTimelineChart(chart: ProcessedChart) {
|
||||
while (true) {
|
||||
const fromParsed = chart.bucketKeyDateParsed[chart.minX];
|
||||
const toParsed = chart.bucketKeyDateParsed[chart.maxX];
|
||||
|
||||
if (!fromParsed || !toParsed) {
|
||||
return chart; // cannot fill timeline buckets without valid date range
|
||||
}
|
||||
const transform = chart.definition.xdef.transformFunction;
|
||||
if (!transform.startsWith('date:')) {
|
||||
return chart; // cannot aggregate non-date charts
|
||||
}
|
||||
const dateDistance = computeDateBucketDistance(fromParsed, toParsed, transform);
|
||||
if (dateDistance < (chart.definition.xdef.parentAggregateLimit ?? ChartConstDefaults.parentAggregateLimit)) {
|
||||
return chart; // no need to aggregate further, the distance is less than the limit
|
||||
}
|
||||
|
||||
const parentChart = createParentChartAggregation(chart);
|
||||
if (!parentChart) {
|
||||
return chart; // cannot create parent aggregation
|
||||
}
|
||||
|
||||
chart = parentChart;
|
||||
}
|
||||
}
|
||||
|
||||
export function aggregateChartNumericValuesFromSource(
|
||||
chart: ProcessedChart,
|
||||
bucketKey: string,
|
||||
numericColumns: { [key: string]: number },
|
||||
row: any
|
||||
) {
|
||||
for (const ydef of chart.definition.ydefs) {
|
||||
if (numericColumns[ydef.field] == null) {
|
||||
if (row[ydef.field]) {
|
||||
chart.invalidYRows[ydef.field] = (chart.invalidYRows[ydef.field] || 0) + 1; // increment invalid row count if the field is not numeric
|
||||
}
|
||||
continue;
|
||||
}
|
||||
chart.validYRows[ydef.field] = (chart.validYRows[ydef.field] || 0) + 1; // increment valid row count
|
||||
|
||||
let distinctValues = chart.topDistinctValues[ydef.field];
|
||||
if (!distinctValues) {
|
||||
distinctValues = new Set();
|
||||
chart.topDistinctValues[ydef.field] = distinctValues;
|
||||
}
|
||||
if (distinctValues.size < ChartLimits.MAX_DISTINCT_VALUES) {
|
||||
chart.topDistinctValues[ydef.field].add(numericColumns[ydef.field]);
|
||||
}
|
||||
|
||||
switch (ydef.aggregateFunction) {
|
||||
case 'sum':
|
||||
chart.buckets[bucketKey][ydef.field] =
|
||||
(chart.buckets[bucketKey][ydef.field] || 0) + (numericColumns[ydef.field] || 0);
|
||||
break;
|
||||
case 'first':
|
||||
if (chart.buckets[bucketKey][ydef.field] === undefined) {
|
||||
chart.buckets[bucketKey][ydef.field] = numericColumns[ydef.field];
|
||||
}
|
||||
break;
|
||||
case 'last':
|
||||
chart.buckets[bucketKey][ydef.field] = numericColumns[ydef.field];
|
||||
break;
|
||||
case 'min':
|
||||
if (chart.buckets[bucketKey][ydef.field] === undefined) {
|
||||
chart.buckets[bucketKey][ydef.field] = numericColumns[ydef.field];
|
||||
} else {
|
||||
chart.buckets[bucketKey][ydef.field] = Math.min(
|
||||
chart.buckets[bucketKey][ydef.field],
|
||||
numericColumns[ydef.field]
|
||||
);
|
||||
}
|
||||
break;
|
||||
case 'max':
|
||||
if (chart.buckets[bucketKey][ydef.field] === undefined) {
|
||||
chart.buckets[bucketKey][ydef.field] = numericColumns[ydef.field];
|
||||
} else {
|
||||
chart.buckets[bucketKey][ydef.field] = Math.max(
|
||||
chart.buckets[bucketKey][ydef.field],
|
||||
numericColumns[ydef.field]
|
||||
);
|
||||
}
|
||||
break;
|
||||
case 'count':
|
||||
chart.buckets[bucketKey][ydef.field] = (chart.buckets[bucketKey][ydef.field] || 0) + 1;
|
||||
break;
|
||||
case 'avg':
|
||||
if (chart.buckets[bucketKey][ydef.field] === undefined) {
|
||||
chart.buckets[bucketKey][ydef.field] = [numericColumns[ydef.field], 1]; // [sum, count]
|
||||
} else {
|
||||
chart.buckets[bucketKey][ydef.field][0] += numericColumns[ydef.field];
|
||||
chart.buckets[bucketKey][ydef.field][1] += 1;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function aggregateChartNumericValuesFromChild(
|
||||
chart: ProcessedChart,
|
||||
bucketKey: string,
|
||||
childBucketValues: { [key: string]: any }
|
||||
) {
|
||||
for (const ydef of chart.definition.ydefs) {
|
||||
if (childBucketValues[ydef.field] == undefined) {
|
||||
continue; // skip if the field is not present in the child bucket
|
||||
}
|
||||
if (!chart.buckets[bucketKey]) {
|
||||
chart.buckets[bucketKey] = {};
|
||||
}
|
||||
switch (ydef.aggregateFunction) {
|
||||
case 'sum':
|
||||
case 'count':
|
||||
chart.buckets[bucketKey][ydef.field] =
|
||||
(chart.buckets[bucketKey][ydef.field] || 0) + (childBucketValues[ydef.field] || 0);
|
||||
break;
|
||||
case 'min':
|
||||
if (chart.buckets[bucketKey][ydef.field] === undefined) {
|
||||
chart.buckets[bucketKey][ydef.field] = childBucketValues[ydef.field];
|
||||
} else {
|
||||
chart.buckets[bucketKey][ydef.field] = Math.min(
|
||||
chart.buckets[bucketKey][ydef.field],
|
||||
childBucketValues[ydef.field]
|
||||
);
|
||||
}
|
||||
break;
|
||||
case 'max':
|
||||
if (chart.buckets[bucketKey][ydef.field] === undefined) {
|
||||
chart.buckets[bucketKey][ydef.field] = childBucketValues[ydef.field];
|
||||
} else {
|
||||
chart.buckets[bucketKey][ydef.field] = Math.max(
|
||||
chart.buckets[bucketKey][ydef.field],
|
||||
childBucketValues[ydef.field]
|
||||
);
|
||||
}
|
||||
break;
|
||||
case 'avg':
|
||||
if (chart.buckets[bucketKey][ydef.field] === undefined) {
|
||||
chart.buckets[bucketKey][ydef.field] = childBucketValues[ydef.field];
|
||||
} else {
|
||||
chart.buckets[bucketKey][ydef.field][0] += childBucketValues[ydef.field][0];
|
||||
chart.buckets[bucketKey][ydef.field][1] += childBucketValues[ydef.field][1];
|
||||
}
|
||||
break;
|
||||
case 'first':
|
||||
case 'last':
|
||||
throw new Error(`Cannot aggregate ${ydef.aggregateFunction} for ${ydef.field} in child bucket`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function fillChartTimelineBuckets(chart: ProcessedChart) {
|
||||
const fromParsed = chart.bucketKeyDateParsed[chart.minX];
|
||||
const toParsed = chart.bucketKeyDateParsed[chart.maxX];
|
||||
if (!fromParsed || !toParsed) {
|
||||
return; // cannot fill timeline buckets without valid date range
|
||||
}
|
||||
const transform = chart.definition.xdef.transformFunction;
|
||||
|
||||
let currentParsed = fromParsed;
|
||||
while (compareChartDatesParsed(currentParsed, toParsed, transform) <= 0) {
|
||||
const bucketKey = stringifyChartDate(currentParsed, transform);
|
||||
if (!chart.buckets[bucketKey]) {
|
||||
chart.buckets[bucketKey] = {};
|
||||
chart.bucketKeyDateParsed[bucketKey] = currentParsed;
|
||||
}
|
||||
currentParsed = incrementChartDate(currentParsed, transform);
|
||||
}
|
||||
}
|
||||
|
||||
export function computeChartBucketCardinality(bucket: { [key: string]: any }): number {
|
||||
return _sumBy(Object.keys(bucket), field => bucket[field]);
|
||||
}
|
||||
@@ -23,3 +23,5 @@ export * from './FreeTableGridDisplay';
|
||||
export * from './FreeTableModel';
|
||||
export * from './CustomGridDisplay';
|
||||
export * from './ScriptDrivedDeployer';
|
||||
export * from './chartDefinitions';
|
||||
export * from './chartProcessor';
|
||||
|
||||
376
packages/datalib/src/tests/chartProcessor.test.ts
Normal file
376
packages/datalib/src/tests/chartProcessor.test.ts
Normal file
@@ -0,0 +1,376 @@
|
||||
import exp from 'constants';
|
||||
import { ChartProcessor } from '../chartProcessor';
|
||||
import { getChartDebugPrint } from '../chartTools';
|
||||
|
||||
const DS1 = [
|
||||
{
|
||||
timestamp: '2023-10-01T12:00:00Z',
|
||||
value: 42.5,
|
||||
category: 'B',
|
||||
related_id: 12,
|
||||
},
|
||||
{
|
||||
timestamp: '2023-10-02T10:05:00Z',
|
||||
value: 12,
|
||||
category: 'A',
|
||||
related_id: 13,
|
||||
},
|
||||
{
|
||||
timestamp: '2023-10-03T07:10:00Z',
|
||||
value: 57,
|
||||
category: 'A',
|
||||
related_id: 5,
|
||||
},
|
||||
{
|
||||
timestamp: '2024-08-03T07:10:00Z',
|
||||
value: 33,
|
||||
category: 'B',
|
||||
related_id: 22,
|
||||
},
|
||||
];
|
||||
|
||||
const DS2 = [
|
||||
{
|
||||
ts1: '2023-10-01T12:00:00Z',
|
||||
ts2: '2024-10-01T12:00:00Z',
|
||||
dummy1: 1,
|
||||
dummy2: 1,
|
||||
dummy3: 1,
|
||||
dummy4: 1,
|
||||
dummy5: 1,
|
||||
dummy6: 1,
|
||||
dummy7: 1,
|
||||
dummy8: 1,
|
||||
dummy9: 1,
|
||||
dummy10: 1,
|
||||
price1: '11',
|
||||
price2: '22',
|
||||
},
|
||||
{
|
||||
ts1: '2023-10-02T10:05:00Z',
|
||||
ts2: '2024-10-02T10:05:00Z',
|
||||
price1: '12',
|
||||
price2: '23',
|
||||
},
|
||||
{
|
||||
ts1: '2023-10-03T07:10:00Z',
|
||||
ts2: '2024-10-03T07:10:00Z',
|
||||
price1: '13',
|
||||
price2: '24',
|
||||
},
|
||||
{
|
||||
ts1: '2023-11-04T12:00:00Z',
|
||||
ts2: '2024-11-04T12:00:00Z',
|
||||
price1: 1,
|
||||
price2: 2,
|
||||
},
|
||||
];
|
||||
|
||||
const DS3 = [
|
||||
{
|
||||
timestamp: '2023-10-01T12:00:00Z',
|
||||
value: 42.5,
|
||||
bitval: true,
|
||||
},
|
||||
{
|
||||
timestamp: '2023-10-02T10:05:00Z',
|
||||
value: 12,
|
||||
bitval: false,
|
||||
},
|
||||
{
|
||||
timestamp: '2023-10-03T07:10:00Z',
|
||||
value: 57,
|
||||
bitval: null,
|
||||
},
|
||||
];
|
||||
|
||||
const DS4 = [
|
||||
{
|
||||
object_id: 710293590,
|
||||
ObjectName: 'Journal',
|
||||
Total_Reserved_kb: '68696',
|
||||
RowsCount: '405452',
|
||||
},
|
||||
{
|
||||
object_id: 182291709,
|
||||
ObjectName: 'Employee',
|
||||
Total_Reserved_kb: '732008',
|
||||
RowsCount: '1980067',
|
||||
},
|
||||
{
|
||||
object_id: 23432525,
|
||||
ObjectName: 'User',
|
||||
Total_Reserved_kb: '325352',
|
||||
RowsCount: '2233',
|
||||
},
|
||||
{
|
||||
object_id: 4985159,
|
||||
ObjectName: 'Project',
|
||||
Total_Reserved_kb: '293523',
|
||||
RowsCount: '1122',
|
||||
},
|
||||
];
|
||||
|
||||
describe('Chart processor', () => {
|
||||
test('Simple by day test, autodetected', () => {
|
||||
const processor = new ChartProcessor();
|
||||
processor.addRows(...DS1.slice(0, 3));
|
||||
processor.finalize();
|
||||
expect(processor.charts.length).toEqual(1);
|
||||
const chart = processor.charts[0];
|
||||
expect(chart.definition.xdef.transformFunction).toEqual('date:day');
|
||||
expect(chart.definition.ydefs).toEqual([
|
||||
expect.objectContaining({
|
||||
field: 'value',
|
||||
}),
|
||||
]);
|
||||
expect(chart.bucketKeysOrdered).toEqual(['2023-10-01', '2023-10-02', '2023-10-03']);
|
||||
});
|
||||
test('By month grouped, autedetected', () => {
|
||||
const processor = new ChartProcessor();
|
||||
processor.addRows(...DS1.slice(0, 4));
|
||||
processor.finalize();
|
||||
expect(processor.charts.length).toEqual(1);
|
||||
const chart = processor.charts[0];
|
||||
expect(chart.definition.xdef.transformFunction).toEqual('date:month');
|
||||
expect(chart.bucketKeysOrdered).toEqual([
|
||||
'2023-10',
|
||||
'2023-11',
|
||||
'2023-12',
|
||||
'2024-01',
|
||||
'2024-02',
|
||||
'2024-03',
|
||||
'2024-04',
|
||||
'2024-05',
|
||||
'2024-06',
|
||||
'2024-07',
|
||||
'2024-08',
|
||||
]);
|
||||
});
|
||||
test('Detect columns', () => {
|
||||
const processor = new ChartProcessor();
|
||||
processor.autoDetectCharts = false;
|
||||
processor.addRows(...DS1);
|
||||
processor.finalize();
|
||||
expect(processor.charts.length).toEqual(0);
|
||||
expect(processor.availableColumns).toEqual([
|
||||
expect.objectContaining({
|
||||
field: 'timestamp',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
field: 'value',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
field: 'category',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
field: 'related_id',
|
||||
}),
|
||||
]);
|
||||
});
|
||||
test('Explicit definition', () => {
|
||||
const processor = new ChartProcessor([
|
||||
{
|
||||
chartType: 'pie',
|
||||
xdef: {
|
||||
field: 'category',
|
||||
transformFunction: 'identity',
|
||||
sortOrder: 'natural',
|
||||
},
|
||||
ydefs: [
|
||||
{
|
||||
field: 'related_id',
|
||||
aggregateFunction: 'sum',
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
processor.addRows(...DS1);
|
||||
processor.finalize();
|
||||
expect(processor.charts.length).toEqual(1);
|
||||
const chart = processor.charts[0];
|
||||
expect(chart.definition.xdef.transformFunction).toEqual('identity');
|
||||
expect(chart.bucketKeysOrdered).toEqual(['B', 'A']);
|
||||
expect(chart.buckets).toEqual({
|
||||
B: { related_id: 34 },
|
||||
A: { related_id: 18 },
|
||||
});
|
||||
});
|
||||
|
||||
test('Two data sets with different date columns', () => {
|
||||
const processor = new ChartProcessor();
|
||||
processor.addRows(...DS2);
|
||||
processor.finalize();
|
||||
expect(processor.charts.length).toEqual(2);
|
||||
expect(processor.charts[0].definition).toEqual(
|
||||
expect.objectContaining({
|
||||
xdef: expect.objectContaining({
|
||||
field: 'ts1',
|
||||
transformFunction: 'date:day',
|
||||
}),
|
||||
ydefs: [
|
||||
expect.objectContaining({
|
||||
field: 'price1',
|
||||
aggregateFunction: 'sum',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
field: 'price2',
|
||||
aggregateFunction: 'sum',
|
||||
}),
|
||||
],
|
||||
})
|
||||
);
|
||||
expect(processor.charts[1].definition).toEqual(
|
||||
expect.objectContaining({
|
||||
xdef: expect.objectContaining({
|
||||
field: 'ts2',
|
||||
transformFunction: 'date:day',
|
||||
}),
|
||||
ydefs: [
|
||||
expect.objectContaining({
|
||||
field: 'price1',
|
||||
aggregateFunction: 'sum',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
field: 'price2',
|
||||
aggregateFunction: 'sum',
|
||||
}),
|
||||
],
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('Exclude boolean fields in autodetected', () => {
|
||||
const processor = new ChartProcessor();
|
||||
processor.addRows(...DS3);
|
||||
processor.finalize();
|
||||
expect(processor.charts.length).toEqual(1);
|
||||
const chart = processor.charts[0];
|
||||
expect(chart.definition.xdef.transformFunction).toEqual('date:day');
|
||||
expect(chart.definition.ydefs).toEqual([
|
||||
expect.objectContaining({
|
||||
field: 'value',
|
||||
}),
|
||||
]);
|
||||
});
|
||||
|
||||
test('Added field manual from GUI', () => {
|
||||
const processor = new ChartProcessor([
|
||||
{
|
||||
chartType: 'bar',
|
||||
xdef: {
|
||||
field: 'object_id',
|
||||
transformFunction: 'identity',
|
||||
},
|
||||
ydefs: [
|
||||
{
|
||||
field: 'object_id',
|
||||
aggregateFunction: 'sum',
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
processor.addRows(...DS4);
|
||||
processor.finalize();
|
||||
expect(processor.charts.length).toEqual(1);
|
||||
const chart = processor.charts[0];
|
||||
expect(chart.definition.xdef.transformFunction).toEqual('identity');
|
||||
expect(chart.definition.ydefs).toEqual([
|
||||
expect.objectContaining({
|
||||
field: 'object_id',
|
||||
aggregateFunction: 'sum',
|
||||
}),
|
||||
]);
|
||||
});
|
||||
|
||||
const PieMainTestData = [
|
||||
['natural', ['Journal', 'Employee', 'User', 'Project']],
|
||||
['ascKeys', ['Employee', 'Journal', 'Project', 'User']],
|
||||
['descKeys', ['User', 'Project', 'Journal', 'Employee']],
|
||||
['ascValues', ['Project', 'User', 'Journal', 'Employee']],
|
||||
['descValues', ['Employee', 'Journal', 'User', 'Project']],
|
||||
];
|
||||
|
||||
test.each(PieMainTestData)('Pie chart - used space for DB objects (%s)', (sortOrder, expectedOrder) => {
|
||||
const processor = new ChartProcessor([
|
||||
{
|
||||
chartType: 'bar',
|
||||
xdef: {
|
||||
field: 'ObjectName',
|
||||
transformFunction: 'identity',
|
||||
sortOrder: sortOrder as any,
|
||||
},
|
||||
ydefs: [
|
||||
{
|
||||
field: 'RowsCount',
|
||||
aggregateFunction: 'sum',
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
processor.addRows(...DS4);
|
||||
processor.finalize();
|
||||
expect(processor.charts.length).toEqual(1);
|
||||
const chart = processor.charts[0];
|
||||
expect(chart.bucketKeysOrdered).toEqual(expectedOrder);
|
||||
expect(chart.buckets).toEqual({
|
||||
Employee: { RowsCount: 1980067 },
|
||||
Journal: { RowsCount: 405452 },
|
||||
Project: { RowsCount: 1122 },
|
||||
User: { RowsCount: 2233 },
|
||||
});
|
||||
});
|
||||
|
||||
const PieOtherTestData = [
|
||||
[
|
||||
'ratio',
|
||||
0.1,
|
||||
5,
|
||||
['Employee', 'Journal', 'Other'],
|
||||
{
|
||||
Employee: { RowsCount: 1980067 },
|
||||
Journal: { RowsCount: 405452 },
|
||||
Other: { RowsCount: 3355 },
|
||||
},
|
||||
],
|
||||
[
|
||||
'count',
|
||||
0,
|
||||
1,
|
||||
['Employee', 'Other'],
|
||||
{
|
||||
Employee: { RowsCount: 1980067 },
|
||||
Other: { RowsCount: 408807 },
|
||||
},
|
||||
],
|
||||
];
|
||||
|
||||
test.each(PieOtherTestData)(
|
||||
'Pie limit test - %s',
|
||||
(_description, pieRatioLimit, pieCountLimit, expectedOrder, expectedBuckets) => {
|
||||
const processor = new ChartProcessor([
|
||||
{
|
||||
chartType: 'pie',
|
||||
pieRatioLimit: pieRatioLimit as number,
|
||||
pieCountLimit: pieCountLimit as number,
|
||||
xdef: {
|
||||
field: 'ObjectName',
|
||||
transformFunction: 'identity',
|
||||
},
|
||||
ydefs: [
|
||||
{
|
||||
field: 'RowsCount',
|
||||
aggregateFunction: 'sum',
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
processor.addRows(...DS4);
|
||||
processor.finalize();
|
||||
expect(processor.charts.length).toEqual(1);
|
||||
const chart = processor.charts[0];
|
||||
expect(chart.bucketKeysOrdered).toEqual(expectedOrder);
|
||||
expect(chart.buckets).toEqual(expectedBuckets);
|
||||
}
|
||||
);
|
||||
});
|
||||
@@ -20,6 +20,7 @@ export function getFilterValueExpression(value, dataType?) {
|
||||
if (value === true) return 'TRUE';
|
||||
if (value === false) return 'FALSE';
|
||||
if (value.$oid) return `ObjectId("${value.$oid}")`;
|
||||
if (value.$bigint) return value.$bigint;
|
||||
if (value.type == 'Buffer' && Array.isArray(value.data)) {
|
||||
return '0x' + arrayToHexString(value.data);
|
||||
}
|
||||
|
||||
@@ -2,14 +2,18 @@ import P from 'parsimmon';
|
||||
import moment from 'moment';
|
||||
import { Condition } from 'dbgate-sqltree';
|
||||
import { interpretEscapes, token, word, whitespace } from './common';
|
||||
import { hexStringToArray } from 'dbgate-tools';
|
||||
import { hexStringToArray, parseNumberSafe } from 'dbgate-tools';
|
||||
import { FilterBehaviour, TransformType } from 'dbgate-types';
|
||||
|
||||
const binaryCondition =
|
||||
(operator, numberDualTesting = false) =>
|
||||
value => {
|
||||
const numValue = parseFloat(value);
|
||||
if (numberDualTesting && !isNaN(numValue)) {
|
||||
const numValue = parseNumberSafe(value);
|
||||
if (
|
||||
numberDualTesting &&
|
||||
// @ts-ignore
|
||||
!isNaN(numValue)
|
||||
) {
|
||||
return {
|
||||
conditionType: 'or',
|
||||
conditions: [
|
||||
@@ -52,6 +56,18 @@ const binaryCondition =
|
||||
};
|
||||
};
|
||||
|
||||
const simpleEqualCondition = () => value => ({
|
||||
conditionType: 'binary',
|
||||
operator: '=',
|
||||
left: {
|
||||
exprType: 'placeholder',
|
||||
},
|
||||
right: {
|
||||
exprType: 'value',
|
||||
value,
|
||||
},
|
||||
});
|
||||
|
||||
const likeCondition = (conditionType, likeString) => value => ({
|
||||
conditionType,
|
||||
left: {
|
||||
@@ -333,21 +349,23 @@ const createParser = (filterBehaviour: FilterBehaviour) => {
|
||||
|
||||
string1Num: () =>
|
||||
token(P.regexp(/"-?(0|[1-9][0-9]*)([.][0-9]+)?([eE][+-]?[0-9]+)?"/, 1))
|
||||
.map(Number)
|
||||
.map(parseNumberSafe)
|
||||
.desc('numer quoted'),
|
||||
|
||||
string2Num: () =>
|
||||
token(P.regexp(/'-?(0|[1-9][0-9]*)([.][0-9]+)?([eE][+-]?[0-9]+)?'/, 1))
|
||||
.map(Number)
|
||||
.map(parseNumberSafe)
|
||||
.desc('numer quoted'),
|
||||
|
||||
number: () =>
|
||||
token(P.regexp(/-?(0|[1-9][0-9]*)([.][0-9]+)?([eE][+-]?[0-9]+)?/))
|
||||
.map(Number)
|
||||
.map(parseNumberSafe)
|
||||
.desc('number'),
|
||||
|
||||
objectid: () => token(P.regexp(/ObjectId\(['"]?[0-9a-f]{24}['"]?\)/)).desc('ObjectId'),
|
||||
|
||||
objectidstr: () => token(P.regexp(/[0-9a-f]{24}/)).desc('ObjectId string'),
|
||||
|
||||
hexstring: () =>
|
||||
token(P.regexp(/0x(([0-9a-fA-F][0-9a-fA-F])+)/, 1))
|
||||
.map(x => ({
|
||||
@@ -366,6 +384,7 @@ const createParser = (filterBehaviour: FilterBehaviour) => {
|
||||
value: r => P.alt(...allowedValues.map(x => r[x])),
|
||||
valueTestEq: r => r.value.map(binaryCondition('=')),
|
||||
hexTestEq: r => r.hexstring.map(binaryCondition('=')),
|
||||
valueTestObjectIdStr: r => r.objectidstr.map(simpleEqualCondition()),
|
||||
valueTestStr: r => r.value.map(likeCondition('like', '%#VALUE#%')),
|
||||
valueTestNum: r => r.number.map(numberTestCondition()),
|
||||
valueTestObjectId: r => r.objectid.map(objectIdTestCondition()),
|
||||
@@ -546,12 +565,13 @@ const createParser = (filterBehaviour: FilterBehaviour) => {
|
||||
}
|
||||
}
|
||||
|
||||
if (filterBehaviour.allowNumberDualTesting) {
|
||||
allowedElements.push('valueTestNum');
|
||||
if (filterBehaviour.allowObjectIdTesting) {
|
||||
allowedElements.push('valueTestObjectIdStr');
|
||||
allowedElements.push('valueTestObjectId');
|
||||
}
|
||||
|
||||
if (filterBehaviour.allowObjectIdTesting) {
|
||||
allowedElements.push('valueTestObjectId');
|
||||
if (filterBehaviour.allowNumberDualTesting) {
|
||||
allowedElements.push('valueTestNum');
|
||||
}
|
||||
|
||||
// must be last
|
||||
|
||||
@@ -16,11 +16,17 @@ function isLike(value, test) {
|
||||
return res;
|
||||
}
|
||||
|
||||
function extractRawValue(value) {
|
||||
if (value?.$bigint) return value.$bigint;
|
||||
if (value?.$oid) return value.$oid;
|
||||
return value;
|
||||
}
|
||||
|
||||
export function evaluateCondition(condition: Condition, values) {
|
||||
switch (condition.conditionType) {
|
||||
case 'binary':
|
||||
const left = evaluateExpression(condition.left, values);
|
||||
const right = evaluateExpression(condition.right, values);
|
||||
const left = extractRawValue(evaluateExpression(condition.left, values));
|
||||
const right = extractRawValue(evaluateExpression(condition.right, values));
|
||||
switch (condition.operator) {
|
||||
case '=':
|
||||
return left == right;
|
||||
@@ -50,10 +56,15 @@ export function evaluateCondition(condition: Condition, values) {
|
||||
case 'or':
|
||||
return condition.conditions.some(cond => evaluateCondition(cond, values));
|
||||
case 'like':
|
||||
return isLike(evaluateExpression(condition.left, values), evaluateExpression(condition.right, values));
|
||||
break;
|
||||
return isLike(
|
||||
extractRawValue(evaluateExpression(condition.left, values)),
|
||||
extractRawValue(evaluateExpression(condition.right, values))
|
||||
);
|
||||
case 'notLike':
|
||||
return !isLike(evaluateExpression(condition.left, values), evaluateExpression(condition.right, values));
|
||||
return !isLike(
|
||||
extractRawValue(evaluateExpression(condition.left, values)),
|
||||
extractRawValue(evaluateExpression(condition.right, values))
|
||||
);
|
||||
case 'not':
|
||||
return !evaluateCondition(condition.condition, values);
|
||||
case 'anyColumnPass':
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
"typescript": "^4.4.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"dbgate-query-splitter": "^4.11.4",
|
||||
"dbgate-query-splitter": "^4.11.5",
|
||||
"dbgate-sqltree": "^6.0.0-alpha.1",
|
||||
"debug": "^4.3.4",
|
||||
"json-stable-stringify": "^1.0.1",
|
||||
|
||||
@@ -78,6 +78,7 @@ export class SqlDumper implements AlterProcessor {
|
||||
else if (_isNumber(value)) this.putRaw(value.toString());
|
||||
else if (_isDate(value)) this.putStringValue(new Date(value).toISOString());
|
||||
else if (value?.type == 'Buffer' && _isArray(value?.data)) this.putByteArrayValue(value?.data);
|
||||
else if (value?.$bigint) this.putRaw(value?.$bigint);
|
||||
else if (_isPlainObject(value) || _isArray(value)) this.putStringValue(JSON.stringify(value));
|
||||
else this.put('^null');
|
||||
}
|
||||
|
||||
@@ -1,40 +1,71 @@
|
||||
import _omit from 'lodash/omit';
|
||||
import _sortBy from 'lodash/sortBy';
|
||||
|
||||
const SHOW_INCREMENT = 100;
|
||||
export const DB_KEYS_SHOW_INCREMENT = 100;
|
||||
|
||||
export interface DbKeysNodeModelBase {
|
||||
text?: string;
|
||||
sortKey: string;
|
||||
key: string;
|
||||
count?: number;
|
||||
level: number;
|
||||
keyPath: string[];
|
||||
parentKey: string;
|
||||
}
|
||||
|
||||
export interface DbKeysLeafNodeModel extends DbKeysNodeModelBase {
|
||||
key: string;
|
||||
|
||||
type: 'string' | 'hash' | 'set' | 'list' | 'zset' | 'stream' | 'binary' | 'ReJSON-RL';
|
||||
}
|
||||
|
||||
export interface DbKeysFolderNodeModel extends DbKeysNodeModelBase {
|
||||
root: string;
|
||||
// root: string;
|
||||
type: 'dir';
|
||||
maxShowCount?: number;
|
||||
// visibleCount?: number;
|
||||
// isExpanded?: boolean;
|
||||
}
|
||||
|
||||
export interface DbKeysFolderStateMode {
|
||||
key: string;
|
||||
visibleCount?: number;
|
||||
isExpanded?: boolean;
|
||||
shouldLoadNext?: boolean;
|
||||
hasNext?: boolean;
|
||||
}
|
||||
|
||||
export interface DbKeysTreeModel {
|
||||
treeKeySeparator: string;
|
||||
root: DbKeysFolderNodeModel;
|
||||
dirsByKey: { [key: string]: DbKeysFolderNodeModel };
|
||||
dirStateByKey: { [key: string]: DbKeysFolderStateMode };
|
||||
childrenByKey: { [key: string]: DbKeysNodeModel[] };
|
||||
refreshAll?: boolean;
|
||||
keyObjectsByKey: { [key: string]: DbKeysNodeModel };
|
||||
scannedKeys: number;
|
||||
loadCount: number;
|
||||
dbsize: number;
|
||||
cursor: string;
|
||||
loadedAll: boolean;
|
||||
// refreshAll?: boolean;
|
||||
}
|
||||
|
||||
export type DbKeysNodeModel = DbKeysLeafNodeModel | DbKeysFolderNodeModel;
|
||||
|
||||
export type DbKeysLoadFunction = (root: string, limit: number) => Promise<DbKeysNodeModel[]>;
|
||||
export interface DbKeyLoadedModel {
|
||||
key: string;
|
||||
|
||||
export type DbKeysChangeModelFunction = (func: (model: DbKeysTreeModel) => DbKeysTreeModel) => void;
|
||||
type: 'string' | 'hash' | 'set' | 'list' | 'zset' | 'stream' | 'binary' | 'ReJSON-RL';
|
||||
count?: number;
|
||||
}
|
||||
|
||||
export interface DbKeysLoadResult {
|
||||
nextCursor: string;
|
||||
keys: DbKeyLoadedModel[];
|
||||
dbsize: number;
|
||||
}
|
||||
|
||||
// export type DbKeysLoadFunction = (root: string, limit: number) => Promise<DbKeysLoadResult>;
|
||||
|
||||
export type DbKeysChangeModelFunction = (
|
||||
func: (model: DbKeysTreeModel) => DbKeysTreeModel,
|
||||
loadNextPage: boolean
|
||||
) => void;
|
||||
|
||||
// function dbKeys_findFolderNode(node: DbKeysNodeModel, root: string) {
|
||||
// if (node.type != 'dir') {
|
||||
@@ -73,119 +104,242 @@ export type DbKeysChangeModelFunction = (func: (model: DbKeysTreeModel) => DbKey
|
||||
// };
|
||||
// }
|
||||
|
||||
export async function dbKeys_loadMissing(tree: DbKeysTreeModel, loader: DbKeysLoadFunction): Promise<DbKeysTreeModel> {
|
||||
const childrenByKey = { ...tree.childrenByKey };
|
||||
const dirsByKey = { ...tree.dirsByKey };
|
||||
// export async function dbKeys_loadMissing(tree: DbKeysTreeModel, loader: DbKeysLoadFunction): Promise<DbKeysTreeModel> {
|
||||
// const childrenByKey = { ...tree.childrenByKey };
|
||||
// const dirsByKey = { ...tree.dirsByKey };
|
||||
|
||||
for (const root in tree.dirsByKey) {
|
||||
const dir = tree.dirsByKey[root];
|
||||
// for (const root in tree.dirsByKey) {
|
||||
// const dir = tree.dirsByKey[root];
|
||||
|
||||
if (dir.isExpanded && dir.shouldLoadNext) {
|
||||
if (!tree.childrenByKey[root] || dir.hasNext) {
|
||||
const loadCount = dir.maxShowCount && dir.shouldLoadNext ? dir.maxShowCount + SHOW_INCREMENT : SHOW_INCREMENT;
|
||||
const items = await loader(root, loadCount + 1);
|
||||
// if (dir.isExpanded && dir.shouldLoadNext) {
|
||||
// if (!tree.childrenByKey[root] || dir.hasNext) {
|
||||
// const loadCount = dir.maxShowCount && dir.shouldLoadNext ? dir.maxShowCount + SHOW_INCREMENT : SHOW_INCREMENT;
|
||||
// const items = await loader(root, loadCount + 1);
|
||||
|
||||
childrenByKey[root] = items.slice(0, loadCount);
|
||||
dirsByKey[root] = {
|
||||
...dir,
|
||||
shouldLoadNext: false,
|
||||
maxShowCount: loadCount,
|
||||
hasNext: items.length > loadCount,
|
||||
// childrenByKey[root] = items.slice(0, loadCount);
|
||||
// dirsByKey[root] = {
|
||||
// ...dir,
|
||||
// shouldLoadNext: false,
|
||||
// maxShowCount: loadCount,
|
||||
// hasNext: items.length > loadCount,
|
||||
// };
|
||||
|
||||
// for (const child of items.slice(0, loadCount)) {
|
||||
// if (child.type == 'dir' && !dirsByKey[child.root]) {
|
||||
// dirsByKey[child.root] = {
|
||||
// shouldLoadNext: false,
|
||||
// maxShowCount: null,
|
||||
// hasNext: false,
|
||||
// isExpanded: false,
|
||||
// type: 'dir',
|
||||
// level: dir.level + 1,
|
||||
// root: child.root,
|
||||
// text: child.text,
|
||||
// };
|
||||
// }
|
||||
// }
|
||||
// } else {
|
||||
// dirsByKey[root] = {
|
||||
// ...dir,
|
||||
// shouldLoadNext: false,
|
||||
// };
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// return {
|
||||
// ...tree,
|
||||
// dirsByKey,
|
||||
// childrenByKey,
|
||||
// refreshAll: false,
|
||||
// };
|
||||
// }
|
||||
|
||||
export function dbKeys_mergeNextPage(tree: DbKeysTreeModel, nextPage: DbKeysLoadResult): DbKeysTreeModel {
|
||||
const keyObjectsByKey = { ...tree.keyObjectsByKey };
|
||||
|
||||
for (const keyObj of nextPage.keys) {
|
||||
const keyPath = keyObj.key.split(tree.treeKeySeparator);
|
||||
keyObjectsByKey[keyObj.key] = {
|
||||
...keyObj,
|
||||
level: keyPath.length,
|
||||
text: keyPath[keyPath.length - 1],
|
||||
sortKey: keyPath[keyPath.length - 1],
|
||||
keyPath,
|
||||
parentKey: keyPath.slice(0, -1).join(tree.treeKeySeparator),
|
||||
};
|
||||
}
|
||||
|
||||
for (const child of items.slice(0, loadCount)) {
|
||||
if (child.type == 'dir' && !dirsByKey[child.root]) {
|
||||
dirsByKey[child.root] = {
|
||||
shouldLoadNext: false,
|
||||
maxShowCount: null,
|
||||
hasNext: false,
|
||||
isExpanded: false,
|
||||
const dirsByKey: { [key: string]: DbKeysFolderNodeModel } = {};
|
||||
const childrenByKey: { [key: string]: DbKeysNodeModel[] } = {};
|
||||
|
||||
dirsByKey[''] = tree.root;
|
||||
|
||||
for (const keyObj of Object.values(keyObjectsByKey)) {
|
||||
const dirPath = keyObj.keyPath.slice(0, -1);
|
||||
const dirKey = dirPath.join(tree.treeKeySeparator);
|
||||
|
||||
let dirDepth = keyObj.keyPath.length - 1;
|
||||
|
||||
while (dirDepth > 0) {
|
||||
const newDirPath = keyObj.keyPath.slice(0, dirDepth);
|
||||
const newDirKey = newDirPath.join(tree.treeKeySeparator);
|
||||
if (!dirsByKey[newDirKey]) {
|
||||
dirsByKey[newDirKey] = {
|
||||
level: keyObj.level - 1,
|
||||
keyPath: newDirPath,
|
||||
parentKey: newDirPath.slice(0, -1).join(tree.treeKeySeparator),
|
||||
type: 'dir',
|
||||
level: dir.level + 1,
|
||||
root: child.root,
|
||||
text: child.text,
|
||||
key: newDirKey,
|
||||
text: `${newDirPath[newDirPath.length - 1]}${tree.treeKeySeparator}*`,
|
||||
sortKey: newDirPath[newDirPath.length - 1],
|
||||
};
|
||||
}
|
||||
|
||||
dirDepth -= 1;
|
||||
}
|
||||
} else {
|
||||
dirsByKey[root] = {
|
||||
...dir,
|
||||
shouldLoadNext: false,
|
||||
};
|
||||
|
||||
if (!childrenByKey[dirKey]) {
|
||||
childrenByKey[dirKey] = [];
|
||||
}
|
||||
|
||||
childrenByKey[dirKey].push(keyObj);
|
||||
}
|
||||
|
||||
for (const dirObj of Object.values(dirsByKey)) {
|
||||
if (dirObj.key == '') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!childrenByKey[dirObj.parentKey]) {
|
||||
childrenByKey[dirObj.parentKey] = [];
|
||||
}
|
||||
childrenByKey[dirObj.parentKey].push(dirObj);
|
||||
|
||||
// set key count
|
||||
dirsByKey[dirObj.key].count = childrenByKey[dirObj.key].length;
|
||||
}
|
||||
|
||||
for (const key in childrenByKey) {
|
||||
childrenByKey[key] = _sortBy(childrenByKey[key], 'sortKey');
|
||||
}
|
||||
|
||||
return {
|
||||
...tree,
|
||||
cursor: nextPage.nextCursor,
|
||||
dirsByKey,
|
||||
childrenByKey,
|
||||
refreshAll: false,
|
||||
keyObjectsByKey,
|
||||
scannedKeys: tree.scannedKeys + tree.loadCount,
|
||||
loadedAll: nextPage.nextCursor == '0',
|
||||
dbsize: nextPage.dbsize,
|
||||
};
|
||||
}
|
||||
|
||||
export function dbKeys_markNodeExpanded(tree: DbKeysTreeModel, root: string, isExpanded: boolean): DbKeysTreeModel {
|
||||
const node = tree.dirsByKey[root];
|
||||
if (!node) {
|
||||
return tree;
|
||||
}
|
||||
const node = tree.dirStateByKey[root];
|
||||
return {
|
||||
...tree,
|
||||
dirsByKey: {
|
||||
...tree.dirsByKey,
|
||||
dirStateByKey: {
|
||||
...tree.dirStateByKey,
|
||||
[root]: {
|
||||
...node,
|
||||
isExpanded,
|
||||
shouldLoadNext: isExpanded,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function dbKeys_refreshAll(tree?: DbKeysTreeModel): DbKeysTreeModel {
|
||||
const root: DbKeysFolderNodeModel = {
|
||||
isExpanded: true,
|
||||
level: 0,
|
||||
root: '',
|
||||
type: 'dir',
|
||||
shouldLoadNext: true,
|
||||
};
|
||||
export function dbKeys_showNextItems(tree: DbKeysTreeModel, root: string): DbKeysTreeModel {
|
||||
const node = tree.dirStateByKey[root];
|
||||
return {
|
||||
...tree,
|
||||
dirStateByKey: {
|
||||
...tree.dirStateByKey,
|
||||
[root]: {
|
||||
...node,
|
||||
visibleCount: (node?.visibleCount ?? DB_KEYS_SHOW_INCREMENT) + DB_KEYS_SHOW_INCREMENT,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function dbKeys_createNewModel(treeKeySeparator: string): DbKeysTreeModel {
|
||||
const root: DbKeysFolderNodeModel = {
|
||||
level: 0,
|
||||
type: 'dir',
|
||||
keyPath: [],
|
||||
parentKey: '',
|
||||
key: '',
|
||||
sortKey: '',
|
||||
};
|
||||
return {
|
||||
treeKeySeparator,
|
||||
childrenByKey: {},
|
||||
keyObjectsByKey: {},
|
||||
dirsByKey: {
|
||||
'': root,
|
||||
},
|
||||
refreshAll: true,
|
||||
dirStateByKey: {
|
||||
'': {
|
||||
key: '',
|
||||
visibleCount: DB_KEYS_SHOW_INCREMENT,
|
||||
isExpanded: true,
|
||||
},
|
||||
},
|
||||
scannedKeys: 0,
|
||||
dbsize: 0,
|
||||
loadCount: 2000,
|
||||
cursor: '0',
|
||||
root,
|
||||
loadedAll: false,
|
||||
};
|
||||
}
|
||||
|
||||
export function dbKeys_reloadFolder(tree: DbKeysTreeModel, root: string): DbKeysTreeModel {
|
||||
export function dbKeys_clearLoadedData(tree: DbKeysTreeModel): DbKeysTreeModel {
|
||||
return {
|
||||
...tree,
|
||||
childrenByKey: _omit(tree.childrenByKey, root),
|
||||
childrenByKey: {},
|
||||
keyObjectsByKey: {},
|
||||
dirsByKey: {
|
||||
...tree.dirsByKey,
|
||||
[root]: {
|
||||
...tree.dirsByKey[root],
|
||||
shouldLoadNext: true,
|
||||
hasNext: undefined,
|
||||
},
|
||||
'': tree.root,
|
||||
},
|
||||
scannedKeys: 0,
|
||||
dbsize: 0,
|
||||
cursor: '0',
|
||||
loadedAll: false,
|
||||
};
|
||||
}
|
||||
|
||||
function addFlatItems(tree: DbKeysTreeModel, root: string, res: DbKeysNodeModel[]) {
|
||||
const item = tree.dirsByKey[root];
|
||||
if (!item.isExpanded) {
|
||||
// export function dbKeys_reloadFolder(tree: DbKeysTreeModel, root: string): DbKeysTreeModel {
|
||||
// return {
|
||||
// ...tree,
|
||||
// childrenByKey: _omit(tree.childrenByKey, root),
|
||||
// dirsByKey: {
|
||||
// ...tree.dirsByKey,
|
||||
// [root]: {
|
||||
// ...tree.dirsByKey[root],
|
||||
// shouldLoadNext: true,
|
||||
// hasNext: undefined,
|
||||
// },
|
||||
// },
|
||||
// };
|
||||
// }
|
||||
|
||||
function addFlatItems(tree: DbKeysTreeModel, root: string, res: DbKeysNodeModel[], visitedRoots: string[] = []) {
|
||||
const item = tree.dirStateByKey[root];
|
||||
if (!item?.isExpanded) {
|
||||
return false;
|
||||
}
|
||||
const children = tree.childrenByKey[root] || [];
|
||||
for (const child of children) {
|
||||
res.push(child);
|
||||
if (child.type == 'dir') {
|
||||
addFlatItems(tree, child.root, res);
|
||||
if (visitedRoots.includes(child.key)) {
|
||||
console.warn('Redis: preventing infinite loop for root', child.key);
|
||||
return false;
|
||||
}
|
||||
addFlatItems(tree, child.key, res, [...visitedRoots, root]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,11 @@ import _isDate from 'lodash/isDate';
|
||||
import _isNumber from 'lodash/isNumber';
|
||||
import _isPlainObject from 'lodash/isPlainObject';
|
||||
import _pad from 'lodash/pad';
|
||||
import _cloneDeepWith from 'lodash/cloneDeepWith';
|
||||
import _isEmpty from 'lodash/isEmpty';
|
||||
import _omitBy from 'lodash/omitBy';
|
||||
import { DataEditorTypesBehaviour } from 'dbgate-types';
|
||||
import isPlainObject from 'lodash/isPlainObject';
|
||||
|
||||
export type EditorDataType =
|
||||
| 'null'
|
||||
@@ -80,7 +84,7 @@ export function parseCellValue(value, editorTypes?: DataEditorTypesBehaviour) {
|
||||
|
||||
if (editorTypes?.parseNumber) {
|
||||
if (/^-?[0-9]+(?:\.[0-9]+)?$/.test(value)) {
|
||||
return parseFloat(value);
|
||||
return parseNumberSafe(value);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -208,6 +212,18 @@ export function stringifyCellValue(
|
||||
}
|
||||
}
|
||||
}
|
||||
if (value?.$bigint) {
|
||||
return {
|
||||
value: value.$bigint,
|
||||
gridStyle: 'valueCellStyle',
|
||||
};
|
||||
}
|
||||
if (typeof value === 'bigint') {
|
||||
return {
|
||||
value: value.toString(),
|
||||
gridStyle: 'valueCellStyle',
|
||||
};
|
||||
}
|
||||
|
||||
if (editorTypes?.parseDateAsDollar) {
|
||||
if (value?.$date) {
|
||||
@@ -343,6 +359,9 @@ export function shouldOpenMultilineDialog(value) {
|
||||
if (value?.$date) {
|
||||
return false;
|
||||
}
|
||||
if (value?.$bigint) {
|
||||
return false;
|
||||
}
|
||||
if (_isPlainObject(value) || _isArray(value)) {
|
||||
return true;
|
||||
}
|
||||
@@ -573,3 +592,82 @@ export function jsonLinesParse(jsonLines: string): any[] {
|
||||
})
|
||||
.filter(x => x);
|
||||
}
|
||||
|
||||
export function serializeJsTypesForJsonStringify(obj, replacer = null) {
|
||||
return _cloneDeepWith(obj, value => {
|
||||
if (typeof value === 'bigint') {
|
||||
return { $bigint: value.toString() };
|
||||
}
|
||||
if (replacer) {
|
||||
return replacer(value);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function deserializeJsTypesFromJsonParse(obj) {
|
||||
return _cloneDeepWith(obj, value => {
|
||||
if (value?.$bigint) {
|
||||
return BigInt(value.$bigint);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function serializeJsTypesReplacer(key, value) {
|
||||
if (typeof value === 'bigint') {
|
||||
return { $bigint: value.toString() };
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
export function deserializeJsTypesReviver(key, value) {
|
||||
if (value?.$bigint) {
|
||||
return BigInt(value.$bigint);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
export function parseNumberSafe(value) {
|
||||
if (/^-?[0-9]+$/.test(value)) {
|
||||
const parsed = parseInt(value);
|
||||
if (Number.isSafeInteger(parsed)) {
|
||||
return parsed;
|
||||
}
|
||||
return BigInt(value);
|
||||
}
|
||||
return parseFloat(value);
|
||||
}
|
||||
|
||||
const frontMatterRe = /^--\ >>>[ \t\r]*\n(.*)\n-- <<<[ \t\r]*\n/s;
|
||||
|
||||
export function getSqlFrontMatter(text: string, yamlModule) {
|
||||
const match = text.match(frontMatterRe);
|
||||
if (!match) return null;
|
||||
const yamlContentMapped = match[1].replace(/^--[ ]?/gm, '');
|
||||
return yamlModule.load(yamlContentMapped);
|
||||
}
|
||||
|
||||
export function removeSqlFrontMatter(text: string) {
|
||||
return text.replace(frontMatterRe, '');
|
||||
}
|
||||
|
||||
export function setSqlFrontMatter(text: string, data: { [key: string]: any }, yamlModule) {
|
||||
const textClean = removeSqlFrontMatter(text);
|
||||
|
||||
if (!isPlainObject(data)) {
|
||||
return textClean;
|
||||
}
|
||||
|
||||
const dataClean = _omitBy(data, v => v === undefined);
|
||||
|
||||
if (_isEmpty(dataClean)) {
|
||||
return textClean;
|
||||
}
|
||||
const yamlContent = yamlModule.dump(dataClean);
|
||||
const yamlContentMapped = yamlContent
|
||||
.trimRight()
|
||||
.split('\n')
|
||||
.map(line => '-- ' + line)
|
||||
.join('\n');
|
||||
const frontMatterContent = `-- >>>\n${yamlContentMapped}\n-- <<<\n`;
|
||||
return frontMatterContent + textClean;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2015",
|
||||
"target": "ES2018",
|
||||
"module": "commonjs",
|
||||
"declaration": true,
|
||||
"skipLibCheck": true,
|
||||
|
||||
1
packages/types/engines.d.ts
vendored
1
packages/types/engines.d.ts
vendored
@@ -239,6 +239,7 @@ export interface EngineDriver<TClient = any> extends FilterBehaviourProvider {
|
||||
}[]
|
||||
>;
|
||||
loadKeys(dbhan: DatabaseHandle<TClient>, root: string, filter?: string): Promise;
|
||||
scanKeys(dbhan: DatabaseHandle<TClient>, root: string, pattern: string, cursor: string, count: number): Promise;
|
||||
exportKeys(dbhan: DatabaseHandle<TClient>, options: {}): Promise;
|
||||
loadKeyInfo(dbhan: DatabaseHandle<TClient>, key): Promise;
|
||||
loadKeyTableRange(dbhan: DatabaseHandle<TClient>, key, cursor, count): Promise;
|
||||
|
||||
@@ -24,9 +24,10 @@
|
||||
"ace-builds": "^1.36.5",
|
||||
"chart.js": "^4.4.2",
|
||||
"chartjs-adapter-moment": "^1.0.0",
|
||||
"chartjs-plugin-datalabels": "^2.2.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"dbgate-datalib": "^6.0.0-alpha.1",
|
||||
"dbgate-query-splitter": "^4.11.4",
|
||||
"dbgate-query-splitter": "^4.11.5",
|
||||
"dbgate-sqltree": "^6.0.0-alpha.1",
|
||||
"dbgate-tools": "^6.0.0-alpha.1",
|
||||
"dbgate-types": "^6.0.0-alpha.1",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
:root {
|
||||
--dim-widget-icon-size: 60px;
|
||||
--dim-widget-icon-size: 50px;
|
||||
--dim-statusbar-height: 22px;
|
||||
--dim-left-panel-width: 300px;
|
||||
--dim-tabs-height: 33px;
|
||||
|
||||
@@ -36,6 +36,9 @@ body {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
}
|
||||
.align-items-center {
|
||||
align-items: center;
|
||||
}
|
||||
.flex {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
@@ -14,7 +14,12 @@
|
||||
// import { shouldWaitForElectronInitialize } from './utility/getElectron';
|
||||
import { subscribeConnectionPingers } from './utility/connectionsPinger';
|
||||
import { subscribePermissionCompiler } from './utility/hasPermission';
|
||||
import { apiCall, installNewVolatileConnectionListener } from './utility/api';
|
||||
import {
|
||||
apiCall,
|
||||
installNewCloudTokenListener,
|
||||
installNewVolatileConnectionListener,
|
||||
refreshPublicCloudFiles,
|
||||
} from './utility/api';
|
||||
import { getConfig, getSettings, getUsedApps } from './utility/metadataLoaders';
|
||||
import AppTitleProvider from './utility/AppTitleProvider.svelte';
|
||||
import getElectron from './utility/getElectron';
|
||||
@@ -23,6 +28,7 @@
|
||||
import { handleAuthOnStartup } from './clientAuth';
|
||||
import { initializeAppUpdates } from './utility/appUpdate';
|
||||
import { _t } from './translations';
|
||||
import { installCloudListeners } from './utility/cloudListeners';
|
||||
|
||||
export let isAdminPage = false;
|
||||
|
||||
@@ -51,9 +57,13 @@
|
||||
subscribeConnectionPingers();
|
||||
subscribePermissionCompiler();
|
||||
installNewVolatileConnectionListener();
|
||||
installNewCloudTokenListener();
|
||||
initializeAppUpdates();
|
||||
installCloudListeners();
|
||||
}
|
||||
|
||||
refreshPublicCloudFiles();
|
||||
|
||||
loadedApi = loadedApiValue;
|
||||
|
||||
if (!loadedApi) {
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
export let groupFunc;
|
||||
export let items;
|
||||
export let groupIconFunc = plusExpandIcon;
|
||||
export let mapGroupTitle = undefined;
|
||||
export let module;
|
||||
export let checkedObjectsStore = null;
|
||||
export let disableContextMenu = false;
|
||||
@@ -63,7 +64,7 @@
|
||||
<FontIcon icon={groupIconFunc(isExpanded)} />
|
||||
</span>
|
||||
|
||||
{group}
|
||||
{mapGroupTitle ? mapGroupTitle(group) : group}
|
||||
{items && `(${countText})`}
|
||||
</div>
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@
|
||||
|
||||
export let groupIconFunc = plusExpandIcon;
|
||||
export let groupFunc = undefined;
|
||||
export let mapGroupTitle = undefined;
|
||||
export let onDropOnGroup = undefined;
|
||||
export let emptyGroupNames = [];
|
||||
export let isExpandedBySearch = false;
|
||||
@@ -127,6 +128,7 @@
|
||||
{subItemsComponent}
|
||||
{checkedObjectsStore}
|
||||
{groupFunc}
|
||||
{mapGroupTitle}
|
||||
{disableContextMenu}
|
||||
{filter}
|
||||
{passProps}
|
||||
|
||||
@@ -81,6 +81,7 @@
|
||||
import ConfirmModal from '../modals/ConfirmModal.svelte';
|
||||
import { apiCall } from '../utility/api';
|
||||
import { openImportExportTab } from '../utility/importExportTools';
|
||||
import { isProApp } from '../utility/proTools';
|
||||
|
||||
export let data;
|
||||
$: isZipped = data.folderName?.endsWith('.zip');
|
||||
@@ -187,6 +188,7 @@
|
||||
data.fileType.endsWith('.sql') && { text: 'Open SQL', onClick: handleOpenSqlFile },
|
||||
data.fileType.endsWith('.yaml') && { text: 'Open YAML', onClick: handleOpenYamlFile },
|
||||
!isZipped &&
|
||||
isProApp() &&
|
||||
data.fileType == 'jsonl' && {
|
||||
text: 'Open in profiler',
|
||||
submenu: getExtensions()
|
||||
|
||||
139
packages/web/src/appobj/CloudContentAppObject.svelte
Normal file
139
packages/web/src/appobj/CloudContentAppObject.svelte
Normal file
@@ -0,0 +1,139 @@
|
||||
<script lang="ts" context="module">
|
||||
import { cloudConnectionsStore } from '../stores';
|
||||
import { apiCall } from '../utility/api';
|
||||
import AppObjectCore from './AppObjectCore.svelte';
|
||||
|
||||
export const extractKey = data => data.cntid;
|
||||
export const createMatcher =
|
||||
filter =>
|
||||
({ name }) =>
|
||||
filterName(filter, name);
|
||||
</script>
|
||||
|
||||
<script lang="ts">
|
||||
import { filterName, getConnectionLabel } from 'dbgate-tools';
|
||||
import ConnectionAppObject, { openConnection } from './ConnectionAppObject.svelte';
|
||||
import { _t } from '../translations';
|
||||
import openNewTab from '../utility/openNewTab';
|
||||
import { showModal } from '../modals/modalTools';
|
||||
import ConfirmModal from '../modals/ConfirmModal.svelte';
|
||||
import SavedFileAppObject from './SavedFileAppObject.svelte';
|
||||
|
||||
export let data;
|
||||
export let passProps;
|
||||
|
||||
function createMenu() {
|
||||
const res = [];
|
||||
switch (data.type) {
|
||||
case 'connection':
|
||||
res.push({
|
||||
text: _t('connection.connect', { defaultMessage: 'Connect' }),
|
||||
onClick: handleConnect,
|
||||
isBold: true,
|
||||
});
|
||||
res.push({ divider: true });
|
||||
res.push({
|
||||
text: _t('connection.edit', { defaultMessage: 'Edit' }),
|
||||
onClick: handleEditConnection,
|
||||
});
|
||||
res.push({
|
||||
text: _t('connection.delete', { defaultMessage: 'Delete' }),
|
||||
onClick: handleDeleteConnection,
|
||||
});
|
||||
res.push({
|
||||
text: _t('connection.duplicate', { defaultMessage: 'Duplicate' }),
|
||||
onClick: handleDuplicateConnection,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
function handleEditConnection() {
|
||||
openNewTab({
|
||||
title: data.name,
|
||||
icon: 'img cloud-connection',
|
||||
tabComponent: 'ConnectionTab',
|
||||
props: {
|
||||
conid: data.conid,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async function handleDeleteConnection() {
|
||||
showModal(ConfirmModal, {
|
||||
message: `Really delete connection ${data.name}?`,
|
||||
onConfirm: () => {
|
||||
apiCall('cloud/delete-connection', { conid: data.conid });
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async function handleDuplicateConnection() {
|
||||
await apiCall('cloud/duplicate-connection', { conid: data.conid });
|
||||
}
|
||||
|
||||
async function handleConnect() {
|
||||
const conn = await apiCall('connections/get', { conid: data.conid });
|
||||
$cloudConnectionsStore = {
|
||||
...$cloudConnectionsStore,
|
||||
[data.conid]: conn,
|
||||
};
|
||||
openConnection(conn);
|
||||
}
|
||||
|
||||
async function handleOpenContent() {
|
||||
switch (data.type) {
|
||||
case 'connection':
|
||||
await handleConnect();
|
||||
break;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if data.conid && $cloudConnectionsStore[data.conid]}
|
||||
<ConnectionAppObject
|
||||
{...$$restProps}
|
||||
{passProps}
|
||||
data={{
|
||||
...$cloudConnectionsStore[data.conid],
|
||||
status: data.status,
|
||||
}}
|
||||
on:dblclick
|
||||
on:expand
|
||||
/>
|
||||
{:else if data.type == 'file'}
|
||||
<SavedFileAppObject
|
||||
{...$$restProps}
|
||||
{passProps}
|
||||
data={{
|
||||
file: data.name,
|
||||
folder: data.contentFolder,
|
||||
folid: data.folid,
|
||||
cntid: data.cntid,
|
||||
}}
|
||||
on:dblclick
|
||||
on:expand
|
||||
/>
|
||||
{:else}
|
||||
<AppObjectCore
|
||||
{...$$restProps}
|
||||
{data}
|
||||
icon={'img cloud-connection'}
|
||||
title={data.name}
|
||||
menu={createMenu}
|
||||
on:click={handleOpenContent}
|
||||
on:dblclick
|
||||
on:expand
|
||||
></AppObjectCore>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.info {
|
||||
margin-left: 30px;
|
||||
margin-right: 5px;
|
||||
color: var(--theme-font-3);
|
||||
white-space: nowrap;
|
||||
}
|
||||
</style>
|
||||
@@ -108,6 +108,7 @@
|
||||
import _ from 'lodash';
|
||||
import AppObjectCore from './AppObjectCore.svelte';
|
||||
import {
|
||||
cloudSigninTokenHolder,
|
||||
currentDatabase,
|
||||
DEFAULT_CONNECTION_SEARCH_SETTINGS,
|
||||
expandedConnections,
|
||||
@@ -160,7 +161,7 @@
|
||||
const handleOpenConnectionTab = () => {
|
||||
openNewTab({
|
||||
title: getConnectionLabel(data),
|
||||
icon: 'img connection',
|
||||
icon: data._id.startsWith('cloud://') ? 'img cloud-connection' : 'img connection',
|
||||
tabComponent: 'ConnectionTab',
|
||||
props: {
|
||||
conid: data._id,
|
||||
@@ -261,11 +262,15 @@
|
||||
});
|
||||
};
|
||||
const handleDuplicate = () => {
|
||||
if (data._id.startsWith('cloud://')) {
|
||||
apiCall('cloud/duplicate-connection', { conid: data._id });
|
||||
} else {
|
||||
apiCall('connections/save', {
|
||||
...data,
|
||||
_id: undefined,
|
||||
displayName: `${getConnectionLabel(data)} - copy`,
|
||||
});
|
||||
}
|
||||
};
|
||||
const handleCreateDatabase = () => {
|
||||
showModal(InputTextModal, {
|
||||
@@ -332,6 +337,19 @@
|
||||
text: _t('connection.duplicate', { defaultMessage: 'Duplicate' }),
|
||||
onClick: handleDuplicate,
|
||||
},
|
||||
!$openedConnections.includes(data._id) &&
|
||||
$cloudSigninTokenHolder &&
|
||||
passProps?.cloudContentList?.length > 0 && {
|
||||
text: _t('connection.copyToCloudFolder', { defaultMessage: 'Copy to cloud folder' }),
|
||||
submenu: passProps?.cloudContentList
|
||||
?.filter(x => x.role == 'write' || x.role == 'admin')
|
||||
?.map(fld => ({
|
||||
text: fld.name,
|
||||
onClick: () => {
|
||||
apiCall('cloud/copy-connection-cloud', { conid: data._id, folid: fld.folid });
|
||||
},
|
||||
})),
|
||||
},
|
||||
],
|
||||
{ divider: true },
|
||||
!data.singleDatabase && [
|
||||
@@ -416,7 +434,7 @@
|
||||
{...$$restProps}
|
||||
{data}
|
||||
title={getConnectionLabel(data, { showUnsaved: true })}
|
||||
icon={data.singleDatabase ? 'img database' : 'img server'}
|
||||
icon={data._id.startsWith('cloud://') ? 'img cloud-connection' : data.singleDatabase ? 'img database' : 'img server'}
|
||||
isBold={data.singleDatabase
|
||||
? $currentDatabase?.connection?._id == data._id && $currentDatabase?.name == data.defaultDatabase
|
||||
: $currentDatabase?.connection?._id == data._id}
|
||||
|
||||
@@ -330,6 +330,29 @@ await dbgateApi.dropAllDbObjects(${JSON.stringify(
|
||||
});
|
||||
};
|
||||
|
||||
const handleGenerateRunScript = () => {
|
||||
openNewTab(
|
||||
{
|
||||
title: 'Shell #',
|
||||
icon: 'img shell',
|
||||
tabComponent: 'ShellTab',
|
||||
},
|
||||
{
|
||||
editor: `// @require ${extractPackageName(connection.engine)}
|
||||
|
||||
await dbgateApi.executeQuery(${JSON.stringify(
|
||||
{
|
||||
connection: extractShellConnection(connection, name),
|
||||
sql: 'your script here',
|
||||
},
|
||||
undefined,
|
||||
2
|
||||
)});
|
||||
`,
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
const handleShowDataDeployer = () => {
|
||||
showModal(ChooseArchiveFolderModal, {
|
||||
message: 'Choose archive folder for data deployer',
|
||||
@@ -338,7 +361,7 @@ await dbgateApi.dropAllDbObjects(${JSON.stringify(
|
||||
{
|
||||
title: archiveFolder,
|
||||
icon: 'img replicator',
|
||||
tabComponent: 'DataDeployerTab',
|
||||
tabComponent: 'DataDeployTab',
|
||||
props: {
|
||||
conid: connection?._id,
|
||||
database: name,
|
||||
@@ -347,6 +370,8 @@ await dbgateApi.dropAllDbObjects(${JSON.stringify(
|
||||
{
|
||||
editor: {
|
||||
archiveFolder,
|
||||
conid: connection?._id,
|
||||
database: name,
|
||||
},
|
||||
}
|
||||
);
|
||||
@@ -405,6 +430,7 @@ await dbgateApi.dropAllDbObjects(${JSON.stringify(
|
||||
driver?.databaseEngineTypes?.includes('sql') &&
|
||||
hasPermission(`dbops/sql-generator`) && { onClick: handleSqlGenerator, text: 'SQL Generator' },
|
||||
driver?.supportsDatabaseProfiler &&
|
||||
isProApp() &&
|
||||
hasPermission(`dbops/profiler`) && { onClick: handleDatabaseProfiler, text: 'Database profiler' },
|
||||
// isSqlOrDoc &&
|
||||
// isSqlOrDoc &&
|
||||
@@ -437,6 +463,11 @@ await dbgateApi.dropAllDbObjects(${JSON.stringify(
|
||||
text: 'Shell: Drop all objects',
|
||||
},
|
||||
|
||||
{
|
||||
onClick: handleGenerateRunScript,
|
||||
text: 'Shell: Run script',
|
||||
},
|
||||
|
||||
driver?.databaseEngineTypes?.includes('sql') &&
|
||||
hasPermission(`dbops/import`) && {
|
||||
onClick: handleShowDataDeployer,
|
||||
|
||||
@@ -185,10 +185,6 @@
|
||||
isImport: true,
|
||||
requiresWriteAccess: true,
|
||||
},
|
||||
hasPermission('dbops/charts') && {
|
||||
label: 'Open active chart',
|
||||
isActiveChart: true,
|
||||
},
|
||||
];
|
||||
case 'views':
|
||||
return [
|
||||
@@ -245,10 +241,6 @@
|
||||
isExport: true,
|
||||
functionName: 'tableReader',
|
||||
},
|
||||
{
|
||||
label: 'Open active chart',
|
||||
isActiveChart: true,
|
||||
},
|
||||
];
|
||||
case 'matviews':
|
||||
return [
|
||||
@@ -299,10 +291,6 @@
|
||||
isExport: true,
|
||||
functionName: 'tableReader',
|
||||
},
|
||||
{
|
||||
label: 'Open active chart',
|
||||
isActiveChart: true,
|
||||
},
|
||||
];
|
||||
case 'queries':
|
||||
return [
|
||||
@@ -472,28 +460,7 @@
|
||||
return driver;
|
||||
};
|
||||
|
||||
if (menu.isActiveChart) {
|
||||
const driver = await getDriver();
|
||||
const dmp = driver.createDumper();
|
||||
dmp.put('^select * from %f', data);
|
||||
openNewTab(
|
||||
{
|
||||
title: data.pureName,
|
||||
icon: 'img chart',
|
||||
tabComponent: 'ChartTab',
|
||||
props: {
|
||||
conid: data.conid,
|
||||
database: data.database,
|
||||
},
|
||||
},
|
||||
{
|
||||
editor: {
|
||||
config: { chartType: 'bar' },
|
||||
sql: dmp.s,
|
||||
},
|
||||
}
|
||||
);
|
||||
} else if (menu.isQueryDesigner) {
|
||||
if (menu.isQueryDesigner) {
|
||||
openNewTab(
|
||||
{
|
||||
title: 'Query #',
|
||||
|
||||
52
packages/web/src/appobj/PublicCloudFileAppObject.svelte
Normal file
52
packages/web/src/appobj/PublicCloudFileAppObject.svelte
Normal file
@@ -0,0 +1,52 @@
|
||||
<script lang="ts" context="module">
|
||||
import AppObjectCore from './AppObjectCore.svelte';
|
||||
|
||||
export const extractKey = data => data.path;
|
||||
export const createMatcher =
|
||||
filter =>
|
||||
({ title, description }) =>
|
||||
filterName(filter, title, description);
|
||||
</script>
|
||||
|
||||
<script lang="ts">
|
||||
import { apiCall } from '../utility/api';
|
||||
import newQuery from '../query/newQuery';
|
||||
import { filterName } from 'dbgate-tools';
|
||||
|
||||
export let data;
|
||||
|
||||
async function handleOpenSqlFile() {
|
||||
const fileData = await apiCall('cloud/public-file-data', { path: data.path });
|
||||
newQuery({
|
||||
initialData: fileData.text,
|
||||
});
|
||||
}
|
||||
|
||||
function createMenu() {
|
||||
return [{ text: 'Open', onClick: handleOpenSqlFile }];
|
||||
}
|
||||
</script>
|
||||
|
||||
<AppObjectCore
|
||||
{...$$restProps}
|
||||
{data}
|
||||
icon={'img sql-file'}
|
||||
title={data.title}
|
||||
menu={createMenu}
|
||||
on:click={handleOpenSqlFile}
|
||||
>
|
||||
{#if data.description}
|
||||
<div class="info">
|
||||
{data.description}
|
||||
</div>
|
||||
{/if}
|
||||
</AppObjectCore>
|
||||
|
||||
<style>
|
||||
.info {
|
||||
margin-left: 30px;
|
||||
margin-right: 5px;
|
||||
color: var(--theme-font-3);
|
||||
white-space: nowrap;
|
||||
}
|
||||
</style>
|
||||
@@ -41,16 +41,6 @@
|
||||
label: 'Markdown file',
|
||||
};
|
||||
|
||||
const charts: FileTypeHandler = {
|
||||
icon: 'img chart',
|
||||
format: 'json',
|
||||
tabComponent: 'ChartTab',
|
||||
folder: 'charts',
|
||||
currentConnection: true,
|
||||
extension: 'json',
|
||||
label: 'Chart file',
|
||||
};
|
||||
|
||||
const query: FileTypeHandler = {
|
||||
icon: 'img query-design',
|
||||
format: 'json',
|
||||
@@ -139,7 +129,6 @@
|
||||
sql,
|
||||
shell,
|
||||
markdown,
|
||||
charts,
|
||||
query,
|
||||
sqlite,
|
||||
diagrams,
|
||||
@@ -206,7 +195,14 @@
|
||||
showModal(ConfirmModal, {
|
||||
message: `Really delete file ${data.file}?`,
|
||||
onConfirm: () => {
|
||||
if (data.folid && data.cntid) {
|
||||
apiCall('cloud/delete-content', {
|
||||
folid: data.folid,
|
||||
cntid: data.cntid,
|
||||
});
|
||||
} else {
|
||||
apiCall('files/delete', data);
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
@@ -217,7 +213,15 @@
|
||||
label: 'New file name',
|
||||
header: 'Rename file',
|
||||
onConfirm: newFile => {
|
||||
if (data.folid && data.cntid) {
|
||||
apiCall('cloud/rename-content', {
|
||||
folid: data.folid,
|
||||
cntid: data.cntid,
|
||||
name: newFile,
|
||||
});
|
||||
} else {
|
||||
apiCall('files/rename', { ...data, newFile });
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
@@ -226,9 +230,17 @@
|
||||
showModal(InputTextModal, {
|
||||
value: data.file,
|
||||
label: 'New file name',
|
||||
header: 'Rename file',
|
||||
header: 'Copy file',
|
||||
onConfirm: newFile => {
|
||||
if (data.folid && data.cntid) {
|
||||
apiCall('cloud/copy-file', {
|
||||
folid: data.folid,
|
||||
cntid: data.cntid,
|
||||
name: newFile,
|
||||
});
|
||||
} else {
|
||||
apiCall('files/copy', { ...data, newFile });
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
@@ -236,21 +248,38 @@
|
||||
const handleDownload = () => {
|
||||
saveFileToDisk(
|
||||
async filePath => {
|
||||
if (data.folid && data.cntid) {
|
||||
await apiCall('cloud/export-file', {
|
||||
folid: data.folid,
|
||||
cntid: data.cntid,
|
||||
filePath,
|
||||
});
|
||||
} else {
|
||||
await apiCall('files/export-file', {
|
||||
folder,
|
||||
file: data.file,
|
||||
filePath,
|
||||
});
|
||||
}
|
||||
},
|
||||
{ formatLabel: handler.label, formatExtension: handler.format, defaultFileName: data.file }
|
||||
);
|
||||
};
|
||||
|
||||
async function openTab() {
|
||||
const resp = await apiCall('files/load', { folder, file: data.file, format: handler.format });
|
||||
let dataContent;
|
||||
if (data.folid && data.cntid) {
|
||||
const resp = await apiCall('cloud/get-content', {
|
||||
folid: data.folid,
|
||||
cntid: data.cntid,
|
||||
});
|
||||
dataContent = resp.content;
|
||||
} else {
|
||||
dataContent = await apiCall('files/load', { folder, file: data.file, format: handler.format });
|
||||
}
|
||||
|
||||
const connProps: any = {};
|
||||
let tooltip = undefined;
|
||||
const connProps: any = {};
|
||||
|
||||
if (handler.currentConnection) {
|
||||
const connection = _.get($currentDatabase, 'connection') || {};
|
||||
@@ -270,10 +299,12 @@
|
||||
savedFile: data.file,
|
||||
savedFolder: handler.folder,
|
||||
savedFormat: handler.format,
|
||||
savedCloudFolderId: data.folid,
|
||||
savedCloudContentId: data.cntid,
|
||||
...connProps,
|
||||
},
|
||||
},
|
||||
{ editor: resp }
|
||||
{ editor: dataContent }
|
||||
);
|
||||
}
|
||||
</script>
|
||||
|
||||
10
packages/web/src/appobj/SubCloudItemsList.svelte
Normal file
10
packages/web/src/appobj/SubCloudItemsList.svelte
Normal file
@@ -0,0 +1,10 @@
|
||||
<script lang="ts">
|
||||
import { cloudConnectionsStore } from '../stores';
|
||||
import SubDatabaseList from './SubDatabaseList.svelte';
|
||||
|
||||
export let data;
|
||||
</script>
|
||||
|
||||
{#if data.conid && $cloudConnectionsStore[data.conid]}
|
||||
<SubDatabaseList {...$$props} data={$cloudConnectionsStore[data.conid]} />
|
||||
{/if}
|
||||
@@ -1,87 +0,0 @@
|
||||
<script lang="ts" context="module">
|
||||
import Chart from 'chart.js/auto';
|
||||
import 'chartjs-adapter-moment';
|
||||
import zoomPlugin from 'chartjs-plugin-zoom';
|
||||
|
||||
const getCurrentEditor = () => getActiveComponent('ChartCore');
|
||||
|
||||
registerCommand({
|
||||
id: 'chart.export',
|
||||
category: 'Chart',
|
||||
toolbarName: 'Export',
|
||||
name: 'Export chart',
|
||||
icon: 'icon report',
|
||||
toolbar: true,
|
||||
isRelatedToTab: true,
|
||||
onClick: () => getCurrentEditor().exportChart(),
|
||||
testEnabled: () => getCurrentEditor() != null,
|
||||
});
|
||||
|
||||
Chart.register(zoomPlugin);
|
||||
</script>
|
||||
|
||||
<script lang="ts">
|
||||
import { onMount, afterUpdate, onDestroy } from 'svelte';
|
||||
import _ from 'lodash';
|
||||
import registerCommand from '../commands/registerCommand';
|
||||
import { apiCall } from '../utility/api';
|
||||
|
||||
import contextMenu, { getContextMenu, registerMenu } from '../utility/contextMenu';
|
||||
import createActivator, { getActiveComponent } from '../utility/createActivator';
|
||||
import { saveFileToDisk } from '../utility/exportFileTools';
|
||||
|
||||
export let data;
|
||||
export let title;
|
||||
export let type = 'line';
|
||||
export let options = {};
|
||||
// export let plugins = {};
|
||||
// export let menu;
|
||||
|
||||
export const activator = createActivator('ChartCore', true);
|
||||
|
||||
let chart = null;
|
||||
let domChart;
|
||||
|
||||
onMount(() => {
|
||||
chart = new Chart(domChart, {
|
||||
type,
|
||||
data: data,
|
||||
// options must be cloned, because chartjs modifies options object, without cloning fails passing options to electron invoke
|
||||
options: _.cloneDeep(options),
|
||||
});
|
||||
});
|
||||
|
||||
afterUpdate(() => {
|
||||
if (!chart) return;
|
||||
chart.data = data;
|
||||
chart.type = type;
|
||||
chart.options = _.cloneDeep(options);
|
||||
// chart.plugins = plugins;
|
||||
chart.update();
|
||||
});
|
||||
|
||||
onDestroy(() => {
|
||||
chart = null;
|
||||
});
|
||||
|
||||
export async function exportChart() {
|
||||
saveFileToDisk(async filePath => {
|
||||
await apiCall('files/export-chart', {
|
||||
title,
|
||||
filePath,
|
||||
config: {
|
||||
type,
|
||||
data,
|
||||
options,
|
||||
},
|
||||
image: domChart.toDataURL(),
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
registerMenu({ command: 'chart.export', tag: 'export' });
|
||||
|
||||
const menu = getContextMenu();
|
||||
</script>
|
||||
|
||||
<canvas bind:this={domChart} {...$$restProps} use:contextMenu={menu} />
|
||||
@@ -1,170 +0,0 @@
|
||||
<script lang="ts">
|
||||
import FormProviderCore from '../forms/FormProviderCore.svelte';
|
||||
import HorizontalSplitter from '../elements/HorizontalSplitter.svelte';
|
||||
import WidgetColumnBar from '../widgets/WidgetColumnBar.svelte';
|
||||
import WidgetColumnBarItem from '../widgets/WidgetColumnBarItem.svelte';
|
||||
import ManagerInnerContainer from '../elements/ManagerInnerContainer.svelte';
|
||||
import FormSelectField from '../forms/FormSelectField.svelte';
|
||||
import FormTextField from '../forms/FormTextField.svelte';
|
||||
import FormCheckboxField from '../forms/FormCheckboxField.svelte';
|
||||
import FormFieldTemplateTiny from '../forms/FormFieldTemplateTiny.svelte';
|
||||
import { getConnectionInfo } from '../utility/metadataLoaders';
|
||||
import { findEngineDriver } from 'dbgate-tools';
|
||||
import { extensions } from '../stores';
|
||||
import { loadChartData, loadChartStructure } from './chartDataLoader';
|
||||
import DataChart from './DataChart.svelte';
|
||||
import _ from 'lodash';
|
||||
import ErrorInfo from '../elements/ErrorInfo.svelte';
|
||||
import FormColorField from '../forms/FormColorField.svelte';
|
||||
|
||||
export let data;
|
||||
export let configStore;
|
||||
export let conid;
|
||||
export let database;
|
||||
export let sql;
|
||||
// export let menu;
|
||||
|
||||
let availableColumnNames = [];
|
||||
let errorLoadingColumns = null;
|
||||
let errorLoadingData = null;
|
||||
let loadedData = null;
|
||||
|
||||
$: config = $configStore;
|
||||
|
||||
const getDriver = async () => {
|
||||
const conn = await getConnectionInfo({ conid });
|
||||
if (!conn) return;
|
||||
const driver = findEngineDriver(conn, $extensions);
|
||||
return driver;
|
||||
};
|
||||
|
||||
const handleLoadColumns = async () => {
|
||||
const driver = await getDriver();
|
||||
if (!driver) return;
|
||||
try {
|
||||
errorLoadingColumns = null;
|
||||
const columns = await loadChartStructure(driver, conid, database, sql);
|
||||
availableColumnNames = columns;
|
||||
// configStore.update(x => ({ ...x, labelColumn: availableColumnNames[0] }));
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
errorLoadingColumns = err.message;
|
||||
}
|
||||
};
|
||||
|
||||
const handleLoadData = async () => {
|
||||
const driver = await getDriver();
|
||||
if (!driver) return;
|
||||
try {
|
||||
errorLoadingData = null;
|
||||
const loaded = await loadChartData(driver, conid, database, sql, config);
|
||||
if (!loaded) return;
|
||||
const { columns, rows } = loaded;
|
||||
loadedData = {
|
||||
structure: columns,
|
||||
rows,
|
||||
};
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
errorLoadingData = err.message;
|
||||
}
|
||||
};
|
||||
|
||||
$: {
|
||||
$extensions;
|
||||
if (sql && conid && database) {
|
||||
handleLoadColumns();
|
||||
}
|
||||
}
|
||||
$: {
|
||||
if (data) {
|
||||
availableColumnNames = data.structure.columns.map(x => x.columnName);
|
||||
}
|
||||
}
|
||||
$: {
|
||||
$extensions;
|
||||
if (config.labelColumn && sql && conid && database) {
|
||||
handleLoadData();
|
||||
}
|
||||
}
|
||||
|
||||
let managerSize;
|
||||
</script>
|
||||
|
||||
<FormProviderCore values={configStore} template={FormFieldTemplateTiny}>
|
||||
<HorizontalSplitter initialValue="300px" bind:size={managerSize}>
|
||||
<div class="left" slot="1">
|
||||
<WidgetColumnBar>
|
||||
<WidgetColumnBarItem title="Style" name="style" height="40%">
|
||||
<ManagerInnerContainer width={managerSize}>
|
||||
<FormSelectField
|
||||
label="Chart type"
|
||||
name="chartType"
|
||||
isNative
|
||||
options={[
|
||||
{ value: 'bar', label: 'Bar' },
|
||||
{ value: 'line', label: 'Line' },
|
||||
{ value: 'pie', label: 'Pie' },
|
||||
{ value: 'polarArea', label: 'Polar area' },
|
||||
]}
|
||||
/>
|
||||
<FormTextField label="Chart title" name="chartTitle" />
|
||||
<FormSelectField
|
||||
label="Truncate from"
|
||||
name="truncateFrom"
|
||||
isNative
|
||||
options={[
|
||||
{ value: 'begin', label: 'Begin' },
|
||||
{ value: 'end', label: 'End (most recent data for datetime)' },
|
||||
]}
|
||||
/>
|
||||
<FormTextField label="Truncate limit" name="truncateLimit" />
|
||||
<FormCheckboxField label="Show relative values" name="showRelativeValues" />
|
||||
{#if $configStore.chartType == 'line'}
|
||||
<FormCheckboxField label="Fill" name="fillLineChart" defaultValue={true} />
|
||||
{/if}
|
||||
<FormTextField label="Color set" name="colorSeed" />
|
||||
</ManagerInnerContainer>
|
||||
</WidgetColumnBarItem>
|
||||
<WidgetColumnBarItem title="Data" name="data">
|
||||
<ManagerInnerContainer width={managerSize}>
|
||||
{#if availableColumnNames.length > 0}
|
||||
<FormSelectField
|
||||
label="Label column"
|
||||
name="labelColumn"
|
||||
isNative
|
||||
options={availableColumnNames.map(col => ({ value: col, label: col }))}
|
||||
/>
|
||||
{/if}
|
||||
|
||||
{#each availableColumnNames as col (col)}
|
||||
<FormCheckboxField label={col} name={`dataColumn_${col}`} />
|
||||
{#if config[`dataColumn_${col}`]}
|
||||
<FormColorField label="Color" name={`dataColumnColor_${col}`} emptyLabel="Random" />
|
||||
<FormTextField label="Label" name={`dataColumnLabel_${col}`} />
|
||||
{/if}
|
||||
{/each}
|
||||
</ManagerInnerContainer>
|
||||
</WidgetColumnBarItem>
|
||||
</WidgetColumnBar>
|
||||
</div>
|
||||
|
||||
<svelte:fragment slot="2">
|
||||
{#if errorLoadingColumns}
|
||||
<ErrorInfo message={errorLoadingColumns} alignTop />
|
||||
{:else if errorLoadingData}
|
||||
<ErrorInfo message={errorLoadingData} alignTop />
|
||||
{:else}
|
||||
<DataChart data={data || loadedData} />
|
||||
{/if}
|
||||
</svelte:fragment>
|
||||
</HorizontalSplitter>
|
||||
</FormProviderCore>
|
||||
|
||||
<style>
|
||||
.left {
|
||||
background-color: var(--theme-bg-0);
|
||||
display: flex;
|
||||
flex: 1;
|
||||
}
|
||||
</style>
|
||||
@@ -1,198 +0,0 @@
|
||||
<script lang="ts" context="module">
|
||||
function getTimeAxis(labels) {
|
||||
const res = [];
|
||||
for (const label of labels) {
|
||||
const parsed = moment(label);
|
||||
if (!parsed.isValid()) return null;
|
||||
const iso = parsed.toISOString();
|
||||
if (iso < '1850-01-01T00:00:00' || iso > '2150-01-01T00:00:00') return null;
|
||||
res.push(parsed);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
function getLabels(labelValues, timeAxis, chartType) {
|
||||
if (!timeAxis) return labelValues;
|
||||
if (chartType === 'line') return timeAxis.map(x => x.toDate());
|
||||
return timeAxis.map(x => x.format('D. M. YYYY'));
|
||||
}
|
||||
|
||||
function getOptions(timeAxis, chartType) {
|
||||
const res = {
|
||||
scales: {},
|
||||
};
|
||||
if (timeAxis && chartType === 'line') {
|
||||
res.scales = {
|
||||
x: {
|
||||
type: 'time',
|
||||
distribution: 'linear',
|
||||
|
||||
time: {
|
||||
tooltipFormat: 'D. M. YYYY HH:mm',
|
||||
displayFormats: {
|
||||
millisecond: 'HH:mm:ss.SSS',
|
||||
second: 'HH:mm:ss',
|
||||
minute: 'HH:mm',
|
||||
hour: 'D.M hA',
|
||||
day: 'D. M.',
|
||||
week: 'D. M. YYYY',
|
||||
month: 'MM-YYYY',
|
||||
quarter: '[Q]Q - YYYY',
|
||||
year: 'YYYY',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
function getPlugins(chartTitle) {
|
||||
const res = {};
|
||||
if (chartTitle) {
|
||||
res['title'] = {
|
||||
display: true,
|
||||
text: chartTitle,
|
||||
};
|
||||
}
|
||||
res['zoom'] = {
|
||||
zoom: {
|
||||
wheel: {
|
||||
enabled: true,
|
||||
},
|
||||
pinch: {
|
||||
enabled: true,
|
||||
},
|
||||
drag: {
|
||||
enabled: true,
|
||||
modifierKey: 'shift',
|
||||
},
|
||||
mode: 'x',
|
||||
},
|
||||
pan: {
|
||||
enabled: true,
|
||||
mode: 'x',
|
||||
},
|
||||
};
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
function createChartData(
|
||||
freeData,
|
||||
labelColumn,
|
||||
dataColumns,
|
||||
colorSeed,
|
||||
chartType,
|
||||
chartTitle,
|
||||
fillLineChart,
|
||||
dataColumnColors,
|
||||
dataColumnLabels,
|
||||
themeDef
|
||||
) {
|
||||
if (!freeData || !labelColumn || !dataColumns || !freeData.rows || dataColumns.length == 0) return null;
|
||||
const palettes = themeDef?.themeType == 'dark' ? presetDarkPalettes : presetPalettes;
|
||||
const colors = randomcolor({
|
||||
count: _.max([freeData.rows.length, dataColumns.length, 1]),
|
||||
seed: colorSeed,
|
||||
});
|
||||
let backgroundColor = null;
|
||||
let borderColor = null;
|
||||
const labelValues = freeData.rows.map(x => x[labelColumn]);
|
||||
const timeAxis = getTimeAxis(labelValues);
|
||||
const labels = getLabels(labelValues, timeAxis, chartType);
|
||||
const res = {
|
||||
labels,
|
||||
datasets: dataColumns.map((dataColumn, columnIndex) => {
|
||||
const label = dataColumnLabels[dataColumn];
|
||||
if (chartType == 'line' || chartType == 'bar') {
|
||||
const color = dataColumnColors[dataColumn];
|
||||
if (color) {
|
||||
backgroundColor = palettes[color][4] + '80';
|
||||
borderColor = palettes[color][7];
|
||||
} else {
|
||||
backgroundColor = colors[columnIndex] + '80';
|
||||
borderColor = colors[columnIndex];
|
||||
}
|
||||
} else {
|
||||
backgroundColor = colors;
|
||||
}
|
||||
|
||||
return {
|
||||
label: label || dataColumn,
|
||||
data: freeData.rows.map(row => row[dataColumn]),
|
||||
backgroundColor,
|
||||
borderColor,
|
||||
borderWidth: 1,
|
||||
fill: fillLineChart == false ? false : true,
|
||||
};
|
||||
}),
|
||||
};
|
||||
|
||||
const options = getOptions(timeAxis, chartType);
|
||||
const plugins = getPlugins(chartTitle);
|
||||
|
||||
// console.log('RES', res);
|
||||
// console.log('OPTIONS', options);
|
||||
|
||||
return [res, options, plugins];
|
||||
}
|
||||
</script>
|
||||
|
||||
<script lang="ts">
|
||||
import _ from 'lodash';
|
||||
import randomcolor from 'randomcolor';
|
||||
import moment from 'moment';
|
||||
import ChartCore from './ChartCore.svelte';
|
||||
import { getFormContext } from '../forms/FormProviderCore.svelte';
|
||||
import { generate, presetPalettes, presetDarkPalettes, presetPrimaryColors } from '@ant-design/colors';
|
||||
import { extractDataColumnColors, extractDataColumnLabels, extractDataColumns } from './chartDataLoader';
|
||||
import { currentThemeDefinition } from '../stores';
|
||||
|
||||
export let data;
|
||||
// export let menu;
|
||||
|
||||
const { values } = getFormContext();
|
||||
|
||||
let clientWidth;
|
||||
let clientHeight;
|
||||
|
||||
$: dataColumns = extractDataColumns($values);
|
||||
$: dataColumnColors = extractDataColumnColors($values, dataColumns);
|
||||
$: dataColumnLabels = extractDataColumnLabels($values, dataColumns);
|
||||
|
||||
$: chartData = createChartData(
|
||||
data,
|
||||
$values.labelColumn,
|
||||
dataColumns,
|
||||
$values.colorSeed || '5',
|
||||
$values.chartType,
|
||||
$values.chartTitle,
|
||||
$values.fillLineChart,
|
||||
dataColumnColors,
|
||||
dataColumnLabels,
|
||||
$currentThemeDefinition
|
||||
);
|
||||
</script>
|
||||
|
||||
<div class="wrapper" bind:clientWidth bind:clientHeight>
|
||||
{#if chartData}
|
||||
{#key `${$values.chartType}|${clientWidth}|${clientHeight}`}
|
||||
<ChartCore
|
||||
width={clientWidth}
|
||||
height={clientHeight}
|
||||
data={chartData[0]}
|
||||
type={$values.chartType}
|
||||
title={$values.chartTitle}
|
||||
options={{ ...chartData[1], plugins: chartData[2] }}
|
||||
/>
|
||||
{/key}
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.wrapper {
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
}
|
||||
</style>
|
||||
@@ -1,133 +0,0 @@
|
||||
import type { Select } from 'dbgate-sqltree';
|
||||
import type { EngineDriver } from 'dbgate-types';
|
||||
import _ from 'lodash';
|
||||
import { apiCall } from '../utility/api';
|
||||
|
||||
export async function loadChartStructure(driver: EngineDriver, conid, database, sql) {
|
||||
const select: Select = {
|
||||
commandType: 'select',
|
||||
selectAll: true,
|
||||
topRecords: 1,
|
||||
from: {
|
||||
subQueryString: sql,
|
||||
alias: 'subq',
|
||||
},
|
||||
};
|
||||
|
||||
const resp = await apiCall('database-connections/sql-select', { conid, database, select });
|
||||
if (resp.errorMessage) throw new Error(resp.errorMessage);
|
||||
return resp.columns.map(x => x.columnName);
|
||||
}
|
||||
|
||||
export async function loadChartData(driver: EngineDriver, conid, database, sql, config) {
|
||||
const dataColumns = extractDataColumns(config);
|
||||
const { labelColumn, truncateFrom, truncateLimit, showRelativeValues } = config;
|
||||
if (!labelColumn || !dataColumns || dataColumns.length == 0) return null;
|
||||
|
||||
const select: Select = {
|
||||
commandType: 'select',
|
||||
|
||||
columns: [
|
||||
{
|
||||
exprType: 'column',
|
||||
source: { alias: 'subq' },
|
||||
columnName: labelColumn,
|
||||
alias: labelColumn,
|
||||
},
|
||||
// @ts-ignore
|
||||
...dataColumns.map(columnName => ({
|
||||
exprType: 'call',
|
||||
func: 'SUM',
|
||||
args: [
|
||||
{
|
||||
exprType: 'column',
|
||||
columnName,
|
||||
source: { alias: 'subq' },
|
||||
},
|
||||
],
|
||||
alias: columnName,
|
||||
})),
|
||||
],
|
||||
topRecords: truncateLimit || 100,
|
||||
from: {
|
||||
subQueryString: sql,
|
||||
alias: 'subq',
|
||||
},
|
||||
groupBy: [
|
||||
{
|
||||
exprType: 'column',
|
||||
source: { alias: 'subq' },
|
||||
columnName: labelColumn,
|
||||
},
|
||||
],
|
||||
orderBy: [
|
||||
{
|
||||
exprType: 'column',
|
||||
source: { alias: 'subq' },
|
||||
columnName: labelColumn,
|
||||
direction: truncateFrom == 'end' ? 'DESC' : 'ASC',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const resp = await apiCall('database-connections/sql-select', { conid, database, select });
|
||||
let { rows, columns, errorMessage } = resp;
|
||||
if (errorMessage) {
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
if (truncateFrom == 'end' && rows) {
|
||||
rows = _.reverse([...rows]);
|
||||
}
|
||||
if (showRelativeValues) {
|
||||
const maxValues = dataColumns.map(col => _.max(rows.map(row => row[col])));
|
||||
for (const [col, max] of _.zip(dataColumns, maxValues)) {
|
||||
if (!max) continue;
|
||||
if (!_.isNumber(max)) continue;
|
||||
if (!(max > 0)) continue;
|
||||
rows = rows.map(row => ({
|
||||
...row,
|
||||
[col]: (row[col] / max) * 100,
|
||||
}));
|
||||
// columns = columns.map((x) => {
|
||||
// if (x.columnName == col) {
|
||||
// return { columnName: `${col} %` };
|
||||
// }
|
||||
// return x;
|
||||
// });
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Loaded chart data', { columns, rows });
|
||||
|
||||
return {
|
||||
columns,
|
||||
rows,
|
||||
};
|
||||
}
|
||||
|
||||
export function extractDataColumns(values) {
|
||||
const dataColumns = [];
|
||||
for (const key in values) {
|
||||
if (key.startsWith('dataColumn_') && values[key]) {
|
||||
dataColumns.push(key.substring('dataColumn_'.length));
|
||||
}
|
||||
}
|
||||
return dataColumns;
|
||||
}
|
||||
export function extractDataColumnColors(values, dataColumns) {
|
||||
const res = {};
|
||||
for (const column of dataColumns) {
|
||||
const color = values[`dataColumnColor_${column}`];
|
||||
if (color) res[column] = color;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
export function extractDataColumnLabels(values, dataColumns) {
|
||||
const res = {};
|
||||
for (const column of dataColumns) {
|
||||
const label = values[`dataColumnLabel_${column}`];
|
||||
if (label) res[column] = label;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
@@ -164,6 +164,13 @@
|
||||
}}
|
||||
data-testid='CommandPalette_main'
|
||||
>
|
||||
<div
|
||||
class="overlay"
|
||||
on:click={() => {
|
||||
$visibleCommandPalette = null;
|
||||
}}
|
||||
/>
|
||||
<div class="palette">
|
||||
<div class="pages">
|
||||
<div
|
||||
class="page"
|
||||
@@ -218,55 +225,114 @@
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.main {
|
||||
width: 500px;
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: flex-start;
|
||||
padding-top: 100px;
|
||||
}
|
||||
|
||||
.overlay {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background-color: rgba(0, 0, 0, 0.4);
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
.palette {
|
||||
position: relative;
|
||||
z-index: 2;
|
||||
width: 600px;
|
||||
background: var(--theme-bg-2);
|
||||
border-radius: 4px;
|
||||
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
.mainInner {
|
||||
padding: 5px;
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
.content {
|
||||
max-height: 400px;
|
||||
overflow-y: scroll;
|
||||
max-height: 500px;
|
||||
overflow-y: auto;
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
.search {
|
||||
display: flex;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
input {
|
||||
width: 100%;
|
||||
padding: 8px;
|
||||
background: var(--theme-bg-1);
|
||||
border: 1px solid var(--theme-border);
|
||||
border-radius: 4px;
|
||||
color: var(--theme-font);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
input:focus {
|
||||
outline: none;
|
||||
border-color: var(--theme-accent);
|
||||
}
|
||||
|
||||
.command {
|
||||
padding: 5px;
|
||||
padding: 8px 12px;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
cursor: pointer;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
.command:hover {
|
||||
background: var(--theme-bg-3);
|
||||
}
|
||||
|
||||
.command.selected {
|
||||
background: var(--theme-bg-selected);
|
||||
}
|
||||
|
||||
.shortcut {
|
||||
background: var(--theme-bg-3);
|
||||
padding: 2px 6px;
|
||||
border-radius: 3px;
|
||||
font-size: 12px;
|
||||
color: var(--theme-font-dimmed);
|
||||
}
|
||||
|
||||
.pages {
|
||||
display: flex;
|
||||
border-bottom: 1px solid var(--theme-border);
|
||||
}
|
||||
|
||||
.page {
|
||||
padding: 5px;
|
||||
border: 1px solid var(--theme-border);
|
||||
padding: 8px 16px;
|
||||
cursor: pointer;
|
||||
color: var(--theme-font-dimmed);
|
||||
border-bottom: 2px solid transparent;
|
||||
}
|
||||
|
||||
.page:hover {
|
||||
color: var(--theme-font-hover);
|
||||
color: var(--theme-font);
|
||||
}
|
||||
|
||||
.page.selected {
|
||||
background: var(--theme-bg-1);
|
||||
color: var(--theme-font);
|
||||
border-bottom-color: var(--theme-accent);
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import {
|
||||
cloudSigninTokenHolder,
|
||||
currentDatabase,
|
||||
currentTheme,
|
||||
emptyConnectionGroupNames,
|
||||
@@ -123,6 +124,27 @@ registerCommand({
|
||||
},
|
||||
});
|
||||
|
||||
registerCommand({
|
||||
id: 'new.connectionOnCloud',
|
||||
toolbar: true,
|
||||
icon: 'img cloud-connection',
|
||||
toolbarName: 'Add connection on cloud',
|
||||
category: 'New',
|
||||
toolbarOrder: 1,
|
||||
name: 'Connection on Cloud',
|
||||
testEnabled: () => !getCurrentConfig()?.runAsPortal && !getCurrentConfig()?.storageDatabase && isProApp(),
|
||||
onClick: () => {
|
||||
openNewTab({
|
||||
title: 'New Connection on Cloud',
|
||||
icon: 'img cloud-connection',
|
||||
tabComponent: 'ConnectionTab',
|
||||
props: {
|
||||
saveOnCloud: true,
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
registerCommand({
|
||||
id: 'new.connection.folder',
|
||||
toolbar: true,
|
||||
@@ -535,7 +557,7 @@ registerCommand({
|
||||
id: 'app.exportConnections',
|
||||
category: 'Settings',
|
||||
name: 'Export connections',
|
||||
testEnabled: () => getElectron() != null,
|
||||
testEnabled: () => !getCurrentConfig()?.runAsPortal && !getCurrentConfig()?.storageDatabase,
|
||||
onClick: () => {
|
||||
showModal(ExportImportConnectionsModal, {
|
||||
mode: 'export',
|
||||
@@ -547,7 +569,7 @@ registerCommand({
|
||||
id: 'app.importConnections',
|
||||
category: 'Settings',
|
||||
name: 'Import connections',
|
||||
testEnabled: () => getElectron() != null,
|
||||
testEnabled: () => !getCurrentConfig()?.runAsPortal && !getCurrentConfig()?.storageDatabase,
|
||||
onClick: async () => {
|
||||
const files = await electron.showOpenDialog({
|
||||
properties: ['showHiddenFiles', 'openFile'],
|
||||
@@ -662,6 +684,15 @@ if (hasPermission('settings/change')) {
|
||||
});
|
||||
}
|
||||
|
||||
registerCommand({
|
||||
id: 'cloud.logout',
|
||||
category: 'Cloud',
|
||||
name: 'Logout',
|
||||
onClick: () => {
|
||||
cloudSigninTokenHolder.set(null);
|
||||
},
|
||||
});
|
||||
|
||||
registerCommand({
|
||||
id: 'file.exit',
|
||||
category: 'File',
|
||||
@@ -929,9 +960,17 @@ registerCommand({
|
||||
id: 'app.openSponsoring',
|
||||
category: 'Application',
|
||||
name: 'Become sponsor',
|
||||
testEnabled: () => !isProApp(),
|
||||
onClick: () => openWebLink('https://opencollective.com/dbgate'),
|
||||
});
|
||||
|
||||
registerCommand({
|
||||
id: 'app.giveFeedback',
|
||||
category: 'Application',
|
||||
name: 'Give us feedback',
|
||||
onClick: () => openWebLink('https://dbgate.org/feedback'),
|
||||
});
|
||||
|
||||
registerCommand({
|
||||
id: 'app.zoomIn',
|
||||
category: 'Application',
|
||||
|
||||
@@ -16,6 +16,8 @@
|
||||
{ useThousandsSeparator: getBoolSettingsValue('dataGrid.thousandsSeparator', false) },
|
||||
jsonParsedValue
|
||||
);
|
||||
|
||||
// $: console.log('CellValue', value, stringified);
|
||||
</script>
|
||||
|
||||
{#if rowData == null}
|
||||
|
||||
@@ -54,7 +54,8 @@
|
||||
|
||||
$: style = computeStyle(maxWidth, col);
|
||||
|
||||
$: isJson = _.isPlainObject(value) && !(value?.type == 'Buffer' && _.isArray(value.data)) && !value.$oid;
|
||||
$: isJson =
|
||||
_.isPlainObject(value) && !(value?.type == 'Buffer' && _.isArray(value.data)) && !value.$oid && !value.$bigint;
|
||||
|
||||
// don't parse JSON for explicit data types
|
||||
$: jsonParsedValue = !editorTypes?.explicitDataType && isJsonLikeLongString(value) ? safeJsonParse(value) : null;
|
||||
|
||||
@@ -261,13 +261,6 @@
|
||||
testEnabled: () => getCurrentDataGrid() != null,
|
||||
onClick: () => getCurrentDataGrid().openFreeTable(),
|
||||
});
|
||||
registerCommand({
|
||||
id: 'dataGrid.openChartFromSelection',
|
||||
category: 'Data grid',
|
||||
name: 'Open chart from selection',
|
||||
testEnabled: () => getCurrentDataGrid() != null,
|
||||
onClick: () => getCurrentDataGrid().openChartFromSelection(),
|
||||
});
|
||||
registerCommand({
|
||||
id: 'dataGrid.newJson',
|
||||
category: 'Data grid',
|
||||
@@ -469,6 +462,7 @@
|
||||
export let hideGridLeftColumn = false;
|
||||
export let overlayDefinition = null;
|
||||
export let onGetSelectionMenu = null;
|
||||
export let onOpenChart = null;
|
||||
|
||||
export const activator = createActivator('DataGridCore', false);
|
||||
|
||||
@@ -715,23 +709,6 @@
|
||||
openJsonLinesData(getSelectedFreeDataRows());
|
||||
}
|
||||
|
||||
export function openChartFromSelection() {
|
||||
openNewTab(
|
||||
{
|
||||
title: 'Chart #',
|
||||
icon: 'img chart',
|
||||
tabComponent: 'ChartTab',
|
||||
props: {},
|
||||
},
|
||||
{
|
||||
editor: {
|
||||
data: getSelectedFreeData(),
|
||||
config: { chartType: 'bar' },
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export function viewJsonDocumentEnabled() {
|
||||
return isDynamicStructure && _.uniq(selectedCells.map(x => x[0])).length == 1;
|
||||
}
|
||||
@@ -1869,9 +1846,13 @@
|
||||
// ],
|
||||
// },
|
||||
isProApp() && { command: 'dataGrid.sendToDataDeploy' },
|
||||
isProApp() &&
|
||||
onOpenChart && {
|
||||
text: 'Open chart',
|
||||
onClick: () => onOpenChart(),
|
||||
},
|
||||
{ command: 'dataGrid.generateSqlFromData' },
|
||||
{ command: 'dataGrid.openFreeTable' },
|
||||
{ command: 'dataGrid.openChartFromSelection' },
|
||||
{ command: 'dataGrid.openSelectionInMap', hideDisabled: true },
|
||||
{ placeTag: 'chart' }
|
||||
);
|
||||
|
||||
27
packages/web/src/datagrid/FreeTableDataGrid.svelte
Normal file
27
packages/web/src/datagrid/FreeTableDataGrid.svelte
Normal file
@@ -0,0 +1,27 @@
|
||||
<script lang="ts">
|
||||
import { createGridCache, createGridConfig, FreeTableGridDisplay } from 'dbgate-datalib';
|
||||
import { writable } from 'svelte/store';
|
||||
|
||||
import DataGridCore from './DataGridCore.svelte';
|
||||
import RowsArrayGrider from './RowsArrayGrider';
|
||||
import ErrorInfo from '../elements/ErrorInfo.svelte';
|
||||
import LoadingInfo from '../elements/LoadingInfo.svelte';
|
||||
|
||||
export let model;
|
||||
|
||||
let errorMessage = null;
|
||||
|
||||
const config = writable(createGridConfig());
|
||||
const cache = writable(createGridCache());
|
||||
|
||||
$: grider = new RowsArrayGrider(model.rows);
|
||||
$: display = new FreeTableGridDisplay(model, $config, config.update, $cache, cache.update);
|
||||
</script>
|
||||
|
||||
{#if !model}
|
||||
<LoadingInfo wrapper message="Loading data" />
|
||||
{:else if errorMessage}
|
||||
<ErrorInfo message={errorMessage} />
|
||||
{:else if grider}
|
||||
<DataGridCore {...$$props} {grider} {display} />
|
||||
{/if}
|
||||
@@ -1,14 +1,6 @@
|
||||
<script context="module" lang="ts">
|
||||
const getCurrentEditor = () => getActiveComponent('SqlDataGridCore');
|
||||
|
||||
registerCommand({
|
||||
id: 'sqlDataGrid.openActiveChart',
|
||||
category: 'Data grid',
|
||||
name: 'Open active chart',
|
||||
testEnabled: () => getCurrentEditor() != null && hasPermission('dbops/charts'),
|
||||
onClick: () => getCurrentEditor().openActiveChart(),
|
||||
});
|
||||
|
||||
registerCommand({
|
||||
id: 'sqlDataGrid.openQuery',
|
||||
category: 'Data grid',
|
||||
@@ -190,28 +182,6 @@
|
||||
openQuery(display.getPageQueryText(0, getIntSettingsValue('dataGrid.pageSize', 100, 5, 1000)));
|
||||
}
|
||||
|
||||
export function openActiveChart() {
|
||||
openNewTab(
|
||||
{
|
||||
title: 'Chart #',
|
||||
icon: 'img chart',
|
||||
tabComponent: 'ChartTab',
|
||||
props: {
|
||||
conid,
|
||||
database,
|
||||
},
|
||||
},
|
||||
{
|
||||
editor: {
|
||||
config: { chartType: 'bar' },
|
||||
sql: display.getExportQuery(select => {
|
||||
select.orderBy = null;
|
||||
}),
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
const quickExportHandler = fmt => async () => {
|
||||
const coninfo = await getConnectionInfo({ conid });
|
||||
exportQuickExportFile(
|
||||
|
||||
@@ -72,6 +72,7 @@ export function countColumnSizes(grider: Grider, columns, containerWidth, displa
|
||||
let text = value;
|
||||
if (_.isArray(value)) text = `[${value.length} items]`;
|
||||
else if (value?.$oid) text = `ObjectId("${value.$oid}")`;
|
||||
else if (value?.$bigint) text = value.$bigint;
|
||||
else if (isJsonLikeLongString(value) && safeJsonParse(value)) text = '(JSON)';
|
||||
const width = context.measureText(text).width + 8;
|
||||
// console.log('colName', colName, text, width);
|
||||
|
||||
@@ -39,7 +39,7 @@
|
||||
|
||||
$: size = computeSplitterSize(initialValue, clientWidth, customRatio, initialSizeRight);
|
||||
|
||||
$: if (onChangeSize) onChangeSize(size);
|
||||
$: if (onChangeSize) onChangeSize(size, clientWidth - size);
|
||||
</script>
|
||||
|
||||
<div class="container" bind:clientWidth>
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
<div><slot /></div>
|
||||
<script lang="ts">
|
||||
export let noMargin = false;
|
||||
</script>
|
||||
|
||||
<div class:noMargin><slot /></div>
|
||||
|
||||
<style>
|
||||
div {
|
||||
@@ -6,4 +10,8 @@
|
||||
border-bottom: 1px solid var(--theme-border);
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
div.noMargin {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
export let flex1 = true;
|
||||
export let contentTestId = undefined;
|
||||
export let inlineTabs = false;
|
||||
export let onUserChange = null;
|
||||
|
||||
export function setValue(index) {
|
||||
value = index;
|
||||
@@ -30,8 +31,16 @@
|
||||
<div class="main" class:flex1>
|
||||
<div class="tabs" class:inlineTabs>
|
||||
{#each _.compact(tabs) as tab, index}
|
||||
<div class="tab-item" class:selected={value == index} on:click={() => (value = index)} data-testid={tab.testid}>
|
||||
<span class="ml-2">
|
||||
<div
|
||||
class="tab-item"
|
||||
class:selected={value == index}
|
||||
on:click={() => {
|
||||
value = index;
|
||||
onUserChange?.(index);
|
||||
}}
|
||||
data-testid={tab.testid}
|
||||
>
|
||||
<span class="ml-2 noselect">
|
||||
{tab.label}
|
||||
</span>
|
||||
</div>
|
||||
@@ -139,5 +148,4 @@
|
||||
.container.isInline:not(.tabVisible) {
|
||||
display: none;
|
||||
}
|
||||
|
||||
</style>
|
||||
|
||||
27
packages/web/src/forms/FormCloudFolderSelect.svelte
Normal file
27
packages/web/src/forms/FormCloudFolderSelect.svelte
Normal file
@@ -0,0 +1,27 @@
|
||||
<script lang="ts">
|
||||
import { useCloudContentList } from '../utility/metadataLoaders';
|
||||
|
||||
import FormSelectField from './FormSelectField.svelte';
|
||||
|
||||
export let name;
|
||||
export let requiredRoleVariants = ['read', 'write', 'admin'];
|
||||
|
||||
export let prependFolders = [];
|
||||
|
||||
const cloudContentList = useCloudContentList();
|
||||
|
||||
$: folderOptions = [
|
||||
...prependFolders.map(folder => ({
|
||||
value: folder.folid,
|
||||
label: folder.name,
|
||||
})),
|
||||
...($cloudContentList || [])
|
||||
.filter(folder => requiredRoleVariants.find(role => folder.role == role))
|
||||
.map(folder => ({
|
||||
value: folder.folid,
|
||||
label: folder.name,
|
||||
})),
|
||||
];
|
||||
</script>
|
||||
|
||||
<FormSelectField {...$$props} options={folderOptions} />
|
||||
@@ -39,6 +39,9 @@
|
||||
'icon minus-thick': 'mdi mdi-minus-thick',
|
||||
'icon invisible-box': 'mdi mdi-minus-box-outline icon-invisible',
|
||||
'icon cloud-upload': 'mdi mdi-cloud-upload',
|
||||
'icon cloud': 'mdi mdi-cloud',
|
||||
'icon cloud-public': 'mdi mdi-cloud-search',
|
||||
'icon cloud-private': 'mdi mdi-cloud-key',
|
||||
'icon import': 'mdi mdi-application-import',
|
||||
'icon export': 'mdi mdi-application-export',
|
||||
'icon new-connection': 'mdi mdi-database-plus',
|
||||
@@ -68,6 +71,7 @@
|
||||
'icon trigger': 'mdi mdi-lightning-bolt',
|
||||
'icon scheduler-event': 'mdi mdi-calendar-blank',
|
||||
'icon arrow-link': 'mdi mdi-arrow-top-right-thick',
|
||||
'icon reset': 'mdi mdi-cancel',
|
||||
|
||||
'icon window-restore': 'mdi mdi-window-restore',
|
||||
'icon window-maximize': 'mdi mdi-window-maximize',
|
||||
@@ -112,6 +116,9 @@
|
||||
'icon square': 'mdi mdi-square',
|
||||
'icon data-deploy': 'mdi mdi-database-settings',
|
||||
|
||||
'icon cloud-account': 'mdi mdi-account-remove-outline',
|
||||
'icon cloud-account-connected': 'mdi mdi-account-check-outline',
|
||||
|
||||
'icon edit': 'mdi mdi-pencil',
|
||||
'icon delete': 'mdi mdi-delete',
|
||||
'icon arrow-up': 'mdi mdi-arrow-up',
|
||||
@@ -151,6 +158,7 @@
|
||||
'icon text': 'mdi mdi-text',
|
||||
'icon ai': 'mdi mdi-head-lightbulb',
|
||||
'icon wait': 'mdi mdi-timer-sand',
|
||||
'icon more': 'mdi mdi-more',
|
||||
|
||||
'icon run': 'mdi mdi-play',
|
||||
'icon chevron-down': 'mdi mdi-chevron-down',
|
||||
@@ -222,6 +230,7 @@
|
||||
|
||||
'icon premium': 'mdi mdi-star',
|
||||
'icon upload': 'mdi mdi-upload',
|
||||
'icon limit': 'mdi mdi-car-speed-limiter',
|
||||
|
||||
'img ok': 'mdi mdi-check-circle color-icon-green',
|
||||
'img ok-inv': 'mdi mdi-check-circle color-icon-inv-green',
|
||||
@@ -262,6 +271,7 @@
|
||||
'img role': 'mdi mdi-account-group color-icon-blue',
|
||||
'img admin': 'mdi mdi-security color-icon-blue',
|
||||
'img auth': 'mdi mdi-account-key color-icon-blue',
|
||||
'img cloud-connection': 'mdi mdi-cloud-lock color-icon-blue',
|
||||
|
||||
'img add': 'mdi mdi-plus-circle color-icon-green',
|
||||
'img minus': 'mdi mdi-minus-circle color-icon-red',
|
||||
|
||||
40
packages/web/src/modals/ChooseCloudFolderModal.svelte
Normal file
40
packages/web/src/modals/ChooseCloudFolderModal.svelte
Normal file
@@ -0,0 +1,40 @@
|
||||
<script lang="ts">
|
||||
import FormStyledButton from '../buttons/FormStyledButton.svelte';
|
||||
import FormCloudFolderSelect from '../forms/FormCloudFolderSelect.svelte';
|
||||
|
||||
import FormProvider from '../forms/FormProvider.svelte';
|
||||
import FormSubmit from '../forms/FormSubmit.svelte';
|
||||
import { useCloudContentList } from '../utility/metadataLoaders';
|
||||
import ModalBase from './ModalBase.svelte';
|
||||
import { closeCurrentModal } from './modalTools';
|
||||
|
||||
export let message = '';
|
||||
export let onConfirm;
|
||||
export let requiredRoleVariants;
|
||||
|
||||
const cloudContentList = useCloudContentList();
|
||||
</script>
|
||||
|
||||
{#if $cloudContentList}
|
||||
<FormProvider initialValues={{ cloudFolder: $cloudContentList?.find(x => x.isPrivate)?.folid }}>
|
||||
<ModalBase {...$$restProps}>
|
||||
<svelte:fragment slot="header">Choose cloud folder</svelte:fragment>
|
||||
|
||||
<div>{message}</div>
|
||||
|
||||
<FormCloudFolderSelect label="Cloud folder" name="cloudFolder" isNative {requiredRoleVariants} />
|
||||
|
||||
<svelte:fragment slot="footer">
|
||||
<FormSubmit
|
||||
value="OK"
|
||||
on:click={e => {
|
||||
closeCurrentModal();
|
||||
console.log('onConfirm', e.detail);
|
||||
onConfirm(e.detail.cloudFolder);
|
||||
}}
|
||||
/>
|
||||
<FormStyledButton type="button" value="Close" on:click={closeCurrentModal} />
|
||||
</svelte:fragment>
|
||||
</ModalBase>
|
||||
</FormProvider>
|
||||
{/if}
|
||||
71
packages/web/src/modals/LicenseLimitMessageModal.svelte
Normal file
71
packages/web/src/modals/LicenseLimitMessageModal.svelte
Normal file
@@ -0,0 +1,71 @@
|
||||
<script>
|
||||
import FormStyledButton from '../buttons/FormStyledButton.svelte';
|
||||
import FormProvider from '../forms/FormProvider.svelte';
|
||||
import FormSubmit from '../forms/FormSubmit.svelte';
|
||||
import FontIcon from '../icons/FontIcon.svelte';
|
||||
import { isProApp } from '../utility/proTools';
|
||||
import { openWebLink } from '../utility/simpleTools';
|
||||
|
||||
import ModalBase from './ModalBase.svelte';
|
||||
import { closeCurrentModal } from './modalTools';
|
||||
|
||||
export let message;
|
||||
export let licenseLimits;
|
||||
</script>
|
||||
|
||||
<FormProvider>
|
||||
<ModalBase {...$$restProps}>
|
||||
<div slot="header">License limit error</div>
|
||||
|
||||
<div class="wrapper">
|
||||
<div class="icon">
|
||||
<FontIcon icon="img error" />
|
||||
</div>
|
||||
<div data-testid="LicenseLimitMessageModal_message">
|
||||
<p>
|
||||
Cloud operation ended with error:<br />
|
||||
{message}
|
||||
</p>
|
||||
|
||||
<p>
|
||||
This is a limitation of the free version of DbGate. To continue using cloud operations, please {#if !isProApp()}download
|
||||
and{/if} purchase DbGate Premium.
|
||||
</p>
|
||||
<p>Free version limit:</p>
|
||||
<ul>
|
||||
{#each licenseLimits || [] as limit}
|
||||
<li>{limit}</li>
|
||||
{/each}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div slot="footer">
|
||||
<FormSubmit value="Close" on:click={closeCurrentModal} data-testid="LicenseLimitMessageModal_closeButton" />
|
||||
{#if !isProApp()}
|
||||
<FormStyledButton
|
||||
value="Download DbGate Premium"
|
||||
on:click={() => openWebLink('https://dbgate.io/download/')}
|
||||
skipWidth
|
||||
/>
|
||||
{/if}
|
||||
<FormStyledButton
|
||||
value="Purchase DbGate Premium"
|
||||
on:click={() => openWebLink('https://dbgate.io/purchase/premium/')}
|
||||
skipWidth
|
||||
/>
|
||||
</div>
|
||||
</ModalBase>
|
||||
</FormProvider>
|
||||
|
||||
<style>
|
||||
.wrapper {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.icon {
|
||||
margin-right: 10px;
|
||||
font-size: 20pt;
|
||||
padding-top: 30px;
|
||||
}
|
||||
</style>
|
||||
41
packages/web/src/modals/RowsLimitModal.svelte
Normal file
41
packages/web/src/modals/RowsLimitModal.svelte
Normal file
@@ -0,0 +1,41 @@
|
||||
<script lang="ts">
|
||||
import FormStyledButton from '../buttons/FormStyledButton.svelte';
|
||||
|
||||
import FormProvider from '../forms/FormProvider.svelte';
|
||||
import FormSubmit from '../forms/FormSubmit.svelte';
|
||||
import FormTextField from '../forms/FormTextField.svelte';
|
||||
import ModalBase from './ModalBase.svelte';
|
||||
import { closeCurrentModal } from './modalTools';
|
||||
|
||||
export let value;
|
||||
export let onConfirm;
|
||||
|
||||
const handleSubmit = async value => {
|
||||
closeCurrentModal();
|
||||
onConfirm(value);
|
||||
};
|
||||
</script>
|
||||
|
||||
<FormProvider initialValues={{ value }}>
|
||||
<ModalBase {...$$restProps}>
|
||||
<svelte:fragment slot="header">Rows limit</svelte:fragment>
|
||||
|
||||
<FormTextField
|
||||
label="Return only N rows from query"
|
||||
name="value"
|
||||
focused
|
||||
data-testid="RowsLimitModal_value"
|
||||
placeholder="(No rows limit)"
|
||||
/>
|
||||
|
||||
<svelte:fragment slot="footer">
|
||||
<FormSubmit
|
||||
value="OK"
|
||||
on:click={e => handleSubmit(parseInt(e.detail.value) || null)}
|
||||
data-testid="RowsLimitModal_setLimit"
|
||||
/>
|
||||
<FormStyledButton value="Set no limit" on:click={e => handleSubmit(null)} data-testid="RowsLimitModal_setNoLimit" />
|
||||
<FormStyledButton type="button" value="Cancel" on:click={closeCurrentModal} data-testid="RowsLimitModal_cancel" />
|
||||
</svelte:fragment>
|
||||
</ModalBase>
|
||||
</FormProvider>
|
||||
@@ -1,15 +1,18 @@
|
||||
<script lang="ts">
|
||||
import FormStyledButton from '../buttons/FormStyledButton.svelte';
|
||||
|
||||
import FormProvider from '../forms/FormProvider.svelte';
|
||||
import FormProviderCore from '../forms/FormProviderCore.svelte';
|
||||
import FormSubmit from '../forms/FormSubmit.svelte';
|
||||
import FormTextField from '../forms/FormTextField.svelte';
|
||||
import { cloudSigninTokenHolder } from '../stores';
|
||||
import { _t } from '../translations';
|
||||
import { apiCall } from '../utility/api';
|
||||
import { writable } from 'svelte/store';
|
||||
|
||||
import getElectron from '../utility/getElectron';
|
||||
import ModalBase from './ModalBase.svelte';
|
||||
import { closeCurrentModal } from './modalTools';
|
||||
import { closeCurrentModal, showModal } from './modalTools';
|
||||
import FormCloudFolderSelect from '../forms/FormCloudFolderSelect.svelte';
|
||||
|
||||
export let data;
|
||||
export let name;
|
||||
@@ -18,11 +21,16 @@
|
||||
export let fileExtension;
|
||||
export let filePath;
|
||||
export let onSave = undefined;
|
||||
export let folid;
|
||||
// export let cntid;
|
||||
|
||||
const values = writable({ name, cloudFolder: folid ?? '__local' });
|
||||
|
||||
const electron = getElectron();
|
||||
|
||||
const handleSubmit = async e => {
|
||||
const { name } = e.detail;
|
||||
const { name, cloudFolder } = e.detail;
|
||||
if (cloudFolder === '__local') {
|
||||
await apiCall('files/save', { folder, file: name, data, format });
|
||||
closeCurrentModal();
|
||||
if (onSave) {
|
||||
@@ -30,8 +38,32 @@
|
||||
savedFile: name,
|
||||
savedFolder: folder,
|
||||
savedFilePath: null,
|
||||
savedCloudFolderId: null,
|
||||
savedCloudContentId: null,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const resp = await apiCall('cloud/save-file', {
|
||||
folid: cloudFolder,
|
||||
fileName: name,
|
||||
data,
|
||||
contentFolder: folder,
|
||||
format,
|
||||
// cntid,
|
||||
});
|
||||
if (resp.cntid) {
|
||||
closeCurrentModal();
|
||||
if (onSave) {
|
||||
onSave(name, {
|
||||
savedFile: name,
|
||||
savedFolder: folder,
|
||||
savedFilePath: null,
|
||||
savedCloudFolderId: cloudFolder,
|
||||
// savedCloudContentId: resp.cntid,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handleSaveToDisk = async filePath => {
|
||||
@@ -47,15 +79,32 @@
|
||||
savedFile: null,
|
||||
savedFolder: null,
|
||||
savedFilePath: filePath,
|
||||
savedCloudFolderId: null,
|
||||
savedCloudContentId: null,
|
||||
});
|
||||
}
|
||||
};
|
||||
</script>
|
||||
|
||||
<FormProvider initialValues={{ name }}>
|
||||
<FormProviderCore {values}>
|
||||
<ModalBase {...$$restProps}>
|
||||
<svelte:fragment slot="header">Save file</svelte:fragment>
|
||||
<FormTextField label="File name" name="name" focused />
|
||||
{#if $cloudSigninTokenHolder}
|
||||
<FormCloudFolderSelect
|
||||
label="Choose cloud folder"
|
||||
name="cloudFolder"
|
||||
isNative
|
||||
requiredRoleVariants={['write', 'admin']}
|
||||
prependFolders={[
|
||||
{
|
||||
folid: '__local',
|
||||
name: "Local folder (don't store on cloud)",
|
||||
},
|
||||
]}
|
||||
/>
|
||||
{/if}
|
||||
|
||||
<svelte:fragment slot="footer">
|
||||
<FormSubmit value={_t('common.save', { defaultMessage: 'Save' })} on:click={handleSubmit} />
|
||||
{#if electron}
|
||||
@@ -79,4 +128,4 @@
|
||||
{/if}
|
||||
</svelte:fragment>
|
||||
</ModalBase>
|
||||
</FormProvider>
|
||||
</FormProviderCore>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<script lang="ts">
|
||||
import _ from 'lodash';
|
||||
import _, { result } from 'lodash';
|
||||
|
||||
import { onMount, tick } from 'svelte';
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
import { apiOff, apiOn } from '../utility/api';
|
||||
import useEffect from '../utility/useEffect';
|
||||
import AllResultsTab from './AllResultsTab.svelte';
|
||||
import JslChart from '../charts/JslChart.svelte';
|
||||
|
||||
export let tabs = [];
|
||||
export let sessionId;
|
||||
@@ -16,6 +17,8 @@
|
||||
export let driver;
|
||||
|
||||
export let resultCount;
|
||||
export let onSetFrontMatterField;
|
||||
export let onGetFrontMatter;
|
||||
|
||||
onMount(() => {
|
||||
allResultsInOneTab = $allResultsInOneTabDefault;
|
||||
@@ -23,6 +26,7 @@
|
||||
|
||||
let allResultsInOneTab = null;
|
||||
let resultInfos = [];
|
||||
let charts = [];
|
||||
let domTabs;
|
||||
|
||||
$: resultCount = resultInfos.length;
|
||||
@@ -35,6 +39,23 @@
|
||||
if (!currentTab?.isResult) domTabs.setValue(_.findIndex(allTabs, x => x.isResult));
|
||||
};
|
||||
|
||||
const handleCharts = async props => {
|
||||
charts = [
|
||||
...charts,
|
||||
{
|
||||
jslid: props.jslid,
|
||||
charts: props.charts,
|
||||
resultIndex: props.resultIndex,
|
||||
},
|
||||
];
|
||||
const selectedChart = onGetFrontMatter?.()?.['selected-chart'];
|
||||
await tick();
|
||||
if (selectedChart && props.resultIndex == selectedChart - 1) {
|
||||
domTabs.setValue(_.findIndex(allTabs, x => x.isChart && x.resultIndex === props.resultIndex));
|
||||
}
|
||||
// console.log('Charts received for jslid:', props.jslid, 'Charts:', props.charts);
|
||||
};
|
||||
|
||||
$: oneTab = allResultsInOneTab ?? $allResultsInOneTabDefault;
|
||||
|
||||
$: allTabs = [
|
||||
@@ -55,13 +76,27 @@
|
||||
label: `Result ${index + 1}`,
|
||||
isResult: true,
|
||||
component: JslDataGrid,
|
||||
props: { jslid: info.jslid, driver },
|
||||
props: { jslid: info.jslid, driver, onOpenChart: () => handleOpenChart(info.resultIndex) },
|
||||
}))),
|
||||
...charts.map((info, index) => ({
|
||||
label: `Chart ${info.resultIndex + 1}`,
|
||||
isChart: true,
|
||||
resultIndex: info.resultIndex,
|
||||
component: JslChart,
|
||||
props: {
|
||||
jslid: info.jslid,
|
||||
initialCharts: info.charts,
|
||||
onEditDefinition: definition => {
|
||||
onSetFrontMatterField?.(`chart-${info.resultIndex + 1}`, definition ?? undefined);
|
||||
},
|
||||
},
|
||||
})),
|
||||
];
|
||||
|
||||
$: {
|
||||
if (executeNumber >= 0) {
|
||||
resultInfos = [];
|
||||
charts = [];
|
||||
if (domTabs) domTabs.setValue(0);
|
||||
}
|
||||
}
|
||||
@@ -72,8 +107,10 @@
|
||||
function onSession(sid) {
|
||||
if (sid) {
|
||||
apiOn(`session-recordset-${sid}`, handleResultSet);
|
||||
apiOn(`session-charts-${sid}`, handleCharts);
|
||||
return () => {
|
||||
apiOff(`session-recordset-${sid}`, handleResultSet);
|
||||
apiOff(`session-charts-${sid}`, handleCharts);
|
||||
};
|
||||
}
|
||||
return () => {};
|
||||
@@ -84,6 +121,25 @@
|
||||
allResultsInOneTab = value;
|
||||
$allResultsInOneTabDefault = value;
|
||||
}
|
||||
|
||||
async function handleOpenChart(resultIndex) {
|
||||
const chartTab = _.find(allTabs, x => x.isChart && x.resultIndex === resultIndex);
|
||||
if (chartTab) {
|
||||
domTabs.setValue(_.findIndex(allTabs, x => x.isChart && x.resultIndex === resultIndex));
|
||||
} else {
|
||||
charts = [
|
||||
...charts,
|
||||
{
|
||||
jslid: resultInfos[resultIndex].jslid,
|
||||
charts: [],
|
||||
resultIndex,
|
||||
},
|
||||
];
|
||||
await tick();
|
||||
domTabs.setValue(_.findIndex(allTabs, x => x.isChart && x.resultIndex === resultIndex));
|
||||
}
|
||||
onSetFrontMatterField?.('selected-chart', resultIndex + 1);
|
||||
}
|
||||
</script>
|
||||
|
||||
<TabControl
|
||||
@@ -94,6 +150,13 @@
|
||||
? { text: 'Every result in single tab', onClick: () => setOneTabValue(false) }
|
||||
: { text: 'All results in one tab', onClick: () => setOneTabValue(true) },
|
||||
]}
|
||||
onUserChange={value => {
|
||||
if (allTabs[value].isChart) {
|
||||
onSetFrontMatterField?.(`selected-chart`, allTabs[value].resultIndex + 1);
|
||||
} else {
|
||||
onSetFrontMatterField?.(`selected-chart`, undefined);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<slot name="0" slot="0" />
|
||||
<slot name="1" slot="1" />
|
||||
|
||||
@@ -227,6 +227,12 @@ ORDER BY
|
||||
</FormFieldTemplateLarge>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<FormTextField
|
||||
name="sqlEditor.limitRows"
|
||||
label="Return only N rows from query"
|
||||
placeholder="(No rows limit)"
|
||||
/>
|
||||
</svelte:fragment>
|
||||
<svelte:fragment slot="2">
|
||||
<div class="heading">Connection</div>
|
||||
|
||||
@@ -182,6 +182,10 @@ export const focusedConnectionOrDatabase = writable<{ conid: string; database?:
|
||||
|
||||
export const focusedTreeDbKey = writable<{ key: string; root: string; type: string; text: string }>(null);
|
||||
|
||||
export const cloudSigninTokenHolder = writableSettingsValue(null, 'cloudSigninTokenHolder');
|
||||
|
||||
export const cloudConnectionsStore = writable({});
|
||||
|
||||
export const DEFAULT_OBJECT_SEARCH_SETTINGS = {
|
||||
pureName: true,
|
||||
schemaName: false,
|
||||
@@ -453,4 +457,10 @@ focusedTreeDbKey.subscribe(value => {
|
||||
});
|
||||
export const getFocusedTreeDbKey = () => focusedTreeDbKeyValue;
|
||||
|
||||
let cloudConnectionsStoreValue = {};
|
||||
cloudConnectionsStore.subscribe(value => {
|
||||
cloudConnectionsStoreValue = value;
|
||||
});
|
||||
export const getCloudConnectionsStore = () => cloudConnectionsStoreValue;
|
||||
|
||||
window['__changeCurrentTheme'] = theme => currentTheme.set(theme);
|
||||
|
||||
@@ -7,9 +7,10 @@
|
||||
import LoadingInfo from '../elements/LoadingInfo.svelte';
|
||||
import Markdown from '../elements/Markdown.svelte';
|
||||
import { apiCall } from '../utility/api';
|
||||
import _ from 'lodash';
|
||||
|
||||
let isLoading = false;
|
||||
let text = null;
|
||||
let text = '';
|
||||
|
||||
const handleLoad = async () => {
|
||||
isLoading = true;
|
||||
@@ -27,7 +28,7 @@
|
||||
<LoadingInfo message="Loading changelog" />
|
||||
{:else}
|
||||
<div>
|
||||
<Markdown source={text || ''} />
|
||||
<Markdown source={_.isString(text) ? text: ''} />
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
|
||||
@@ -1,118 +0,0 @@
|
||||
<script lang="ts" context="module">
|
||||
const getCurrentEditor = () => getActiveComponent('ChartTab');
|
||||
|
||||
registerFileCommands({
|
||||
idPrefix: 'chart',
|
||||
category: 'Chart',
|
||||
getCurrentEditor,
|
||||
folder: 'charts',
|
||||
format: 'json',
|
||||
fileExtension: 'chart',
|
||||
|
||||
undoRedo: true,
|
||||
});
|
||||
</script>
|
||||
|
||||
<script lang="ts">
|
||||
import _ from 'lodash';
|
||||
import { derived } from 'svelte/store';
|
||||
import ChartEditor from '../charts/ChartEditor.svelte';
|
||||
import invalidateCommands from '../commands/invalidateCommands';
|
||||
import registerCommand from '../commands/registerCommand';
|
||||
import { registerFileCommands } from '../commands/stdCommands';
|
||||
|
||||
import ErrorInfo from '../elements/ErrorInfo.svelte';
|
||||
|
||||
import LoadingInfo from '../elements/LoadingInfo.svelte';
|
||||
|
||||
import useEditorData from '../query/useEditorData';
|
||||
import { getContextMenu, registerMenu } from '../utility/contextMenu';
|
||||
import createActivator, { getActiveComponent } from '../utility/createActivator';
|
||||
import createUndoReducer from '../utility/createUndoReducer';
|
||||
import resolveApi from '../utility/resolveApi';
|
||||
|
||||
export let tabid;
|
||||
export let conid;
|
||||
export let database;
|
||||
|
||||
export const activator = createActivator('ChartTab', true);
|
||||
|
||||
export function getData() {
|
||||
return $editorState.value || '';
|
||||
}
|
||||
|
||||
const { editorState, editorValue, setEditorData } = useEditorData({
|
||||
tabid,
|
||||
onInitialData: value => {
|
||||
dispatchModel({ type: 'reset', value });
|
||||
},
|
||||
});
|
||||
|
||||
const [modelState, dispatchModel] = createUndoReducer({
|
||||
tables: [],
|
||||
references: [],
|
||||
columns: [],
|
||||
});
|
||||
|
||||
$: setEditorData($modelState.value);
|
||||
|
||||
$: {
|
||||
$modelState;
|
||||
invalidateCommands();
|
||||
}
|
||||
|
||||
const setConfig = config =>
|
||||
// @ts-ignore
|
||||
dispatchModel({
|
||||
type: 'compute',
|
||||
compute: v => ({ ...v, config: _.isFunction(config) ? config(v.config) : config }),
|
||||
});
|
||||
|
||||
const configDerivedStore = derived(modelState, ($modelState: any) =>
|
||||
$modelState.value ? $modelState.value.config || {} : {}
|
||||
);
|
||||
const configStore = {
|
||||
...configDerivedStore,
|
||||
update: setConfig,
|
||||
set: setConfig,
|
||||
};
|
||||
|
||||
export function canUndo() {
|
||||
return $modelState.canUndo;
|
||||
}
|
||||
|
||||
export function undo() {
|
||||
dispatchModel({ type: 'undo' });
|
||||
}
|
||||
|
||||
export function canRedo() {
|
||||
return $modelState.canRedo;
|
||||
}
|
||||
|
||||
export function redo() {
|
||||
dispatchModel({ type: 'redo' });
|
||||
}
|
||||
|
||||
registerMenu(
|
||||
{ command: 'chart.save' },
|
||||
{ command: 'chart.saveAs' },
|
||||
{ placeTag: 'export' },
|
||||
{ divider: true },
|
||||
{ command: 'chart.undo' },
|
||||
{ command: 'chart.redo' }
|
||||
);
|
||||
</script>
|
||||
|
||||
{#if $editorState.isLoading}
|
||||
<LoadingInfo wrapper message="Loading data" />
|
||||
{:else if $editorState.errorMessage}
|
||||
<ErrorInfo message={$editorState.errorMessage} />
|
||||
{:else}
|
||||
<ChartEditor
|
||||
data={$modelState.value && $modelState.value.data}
|
||||
{configStore}
|
||||
sql={$modelState.value && $modelState.value.sql}
|
||||
{conid}
|
||||
{database}
|
||||
/>
|
||||
{/if}
|
||||
@@ -36,6 +36,7 @@
|
||||
import ConnectionAdvancedDriverFields from '../settings/ConnectionAdvancedDriverFields.svelte';
|
||||
import DatabaseLoginModal from '../modals/DatabaseLoginModal.svelte';
|
||||
import { _t } from '../translations';
|
||||
import ChooseCloudFolderModal from '../modals/ChooseCloudFolderModal.svelte';
|
||||
|
||||
export let connection;
|
||||
export let tabid;
|
||||
@@ -44,6 +45,7 @@
|
||||
export let inlineTabs = false;
|
||||
|
||||
export let onlyTestButton;
|
||||
export let saveOnCloud = false;
|
||||
|
||||
let isTesting;
|
||||
let sqlConnectResult;
|
||||
@@ -157,6 +159,45 @@
|
||||
$: currentConnection = getCurrentConnectionCore($values, driver);
|
||||
|
||||
async function handleSave() {
|
||||
if (saveOnCloud && !getCurrentConnection()?._id) {
|
||||
showModal(ChooseCloudFolderModal, {
|
||||
requiredRoleVariants: ['write', 'admin'],
|
||||
message: 'Choose cloud folder to saved connection',
|
||||
onConfirm: async folid => {
|
||||
let connection = getCurrentConnection();
|
||||
const saved = await apiCall('cloud/save-connection', { folid, connection });
|
||||
if (saved?._id) {
|
||||
$values = {
|
||||
...$values,
|
||||
_id: saved._id,
|
||||
unsaved: false,
|
||||
};
|
||||
changeTab(tabid, tab => ({
|
||||
...tab,
|
||||
title: getConnectionLabel(saved),
|
||||
props: {
|
||||
...tab.props,
|
||||
conid: saved._id,
|
||||
},
|
||||
}));
|
||||
showSnackbarSuccess('Connection saved');
|
||||
}
|
||||
},
|
||||
});
|
||||
} else if (
|
||||
// @ts-ignore
|
||||
getCurrentConnection()?._id?.startsWith('cloud://')
|
||||
) {
|
||||
let connection = getCurrentConnection();
|
||||
const resp = await apiCall('cloud/save-connection', { connection });
|
||||
if (resp?._id) {
|
||||
showSnackbarSuccess('Connection saved');
|
||||
changeTab(tabid, tab => ({
|
||||
...tab,
|
||||
title: getConnectionLabel(connection),
|
||||
}));
|
||||
}
|
||||
} else {
|
||||
let connection = getCurrentConnection();
|
||||
connection = {
|
||||
...connection,
|
||||
@@ -178,9 +219,22 @@
|
||||
}));
|
||||
showSnackbarSuccess('Connection saved');
|
||||
}
|
||||
}
|
||||
|
||||
async function handleConnect() {
|
||||
let connection = getCurrentConnection();
|
||||
|
||||
if (
|
||||
// @ts-ignore
|
||||
connection?._id?.startsWith('cloud://')
|
||||
) {
|
||||
const saved = await apiCall('cloud/save-connection', { connection });
|
||||
changeTab(tabid, tab => ({
|
||||
...tab,
|
||||
title: getConnectionLabel(connection),
|
||||
}));
|
||||
openConnection(saved);
|
||||
} else {
|
||||
if (!connection._id) {
|
||||
connection = {
|
||||
...connection,
|
||||
@@ -194,6 +248,7 @@
|
||||
_id: saved._id,
|
||||
};
|
||||
openConnection(saved);
|
||||
}
|
||||
// closeMultipleTabs(x => x.tabid == tabid, true);
|
||||
}
|
||||
|
||||
@@ -287,7 +342,9 @@
|
||||
{:else if isConnected}
|
||||
<FormButton value="Disconnect" on:click={handleDisconnect} data-testid="ConnectionTab_buttonDisconnect" />
|
||||
{:else}
|
||||
{#if $values._id || !saveOnCloud}
|
||||
<FormButton value="Connect" on:click={handleConnect} data-testid="ConnectionTab_buttonConnect" />
|
||||
{/if}
|
||||
{#if isTesting}
|
||||
<FormButton value="Cancel test" on:click={handleCancelTest} />
|
||||
{:else}
|
||||
|
||||
@@ -1,250 +0,0 @@
|
||||
<script lang="ts" context="module">
|
||||
const getCurrentEditor = () => getActiveComponent('ProfilerTab');
|
||||
|
||||
registerCommand({
|
||||
id: 'profiler.start',
|
||||
category: 'Profiler',
|
||||
name: 'Start profiling',
|
||||
icon: 'icon play',
|
||||
testEnabled: () => getCurrentEditor()?.startProfilingEnabled(),
|
||||
onClick: () => getCurrentEditor().startProfiling(),
|
||||
});
|
||||
|
||||
registerCommand({
|
||||
id: 'profiler.stop',
|
||||
category: 'Profiler',
|
||||
name: 'Stop profiling',
|
||||
icon: 'icon play-stop',
|
||||
testEnabled: () => getCurrentEditor()?.stopProfilingEnabled(),
|
||||
onClick: () => getCurrentEditor().stopProfiling(),
|
||||
});
|
||||
|
||||
registerCommand({
|
||||
id: 'profiler.save',
|
||||
category: 'Profiler',
|
||||
name: 'Save',
|
||||
icon: 'icon save',
|
||||
testEnabled: () => getCurrentEditor()?.saveEnabled(),
|
||||
onClick: () => getCurrentEditor().save(),
|
||||
});
|
||||
</script>
|
||||
|
||||
<script>
|
||||
import { findEngineDriver } from 'dbgate-tools';
|
||||
|
||||
import { onDestroy, onMount } from 'svelte';
|
||||
|
||||
import ToolStripCommandButton from '../buttons/ToolStripCommandButton.svelte';
|
||||
import ToolStripContainer from '../buttons/ToolStripContainer.svelte';
|
||||
import invalidateCommands from '../commands/invalidateCommands';
|
||||
import registerCommand from '../commands/registerCommand';
|
||||
import JslDataGrid from '../datagrid/JslDataGrid.svelte';
|
||||
import ErrorInfo from '../elements/ErrorInfo.svelte';
|
||||
import VerticalSplitter from '../elements/VerticalSplitter.svelte';
|
||||
import { showModal } from '../modals/modalTools';
|
||||
import SaveArchiveModal from '../modals/SaveArchiveModal.svelte';
|
||||
import { currentArchive, selectedWidget } from '../stores';
|
||||
import { apiCall } from '../utility/api';
|
||||
import createActivator, { getActiveComponent } from '../utility/createActivator';
|
||||
import { useConnectionInfo } from '../utility/metadataLoaders';
|
||||
import { extensions } from '../stores';
|
||||
import ChartCore from '../charts/ChartCore.svelte';
|
||||
import LoadingInfo from '../elements/LoadingInfo.svelte';
|
||||
import randomcolor from 'randomcolor';
|
||||
|
||||
export const activator = createActivator('ProfilerTab', true);
|
||||
|
||||
export let conid;
|
||||
export let database;
|
||||
export let engine;
|
||||
export let jslidLoad;
|
||||
|
||||
let jslidSession;
|
||||
|
||||
let isProfiling = false;
|
||||
let sessionId;
|
||||
let isLoadingChart = false;
|
||||
|
||||
let intervalId;
|
||||
let chartData;
|
||||
|
||||
$: connection = useConnectionInfo({ conid });
|
||||
$: driver = findEngineDriver(engine || $connection, $extensions);
|
||||
$: jslid = jslidSession || jslidLoad;
|
||||
|
||||
onMount(() => {
|
||||
intervalId = setInterval(() => {
|
||||
if (sessionId) {
|
||||
apiCall('sessions/ping', {
|
||||
sesid: sessionId,
|
||||
});
|
||||
}
|
||||
}, 15 * 1000);
|
||||
});
|
||||
|
||||
$: {
|
||||
if (jslidLoad && driver) {
|
||||
loadChart();
|
||||
}
|
||||
}
|
||||
|
||||
onDestroy(() => {
|
||||
clearInterval(intervalId);
|
||||
});
|
||||
|
||||
export async function startProfiling() {
|
||||
isProfiling = true;
|
||||
|
||||
let sesid = sessionId;
|
||||
if (!sesid) {
|
||||
const resp = await apiCall('sessions/create', {
|
||||
conid,
|
||||
database,
|
||||
});
|
||||
sesid = resp.sesid;
|
||||
sessionId = sesid;
|
||||
}
|
||||
|
||||
const resp = await apiCall('sessions/start-profiler', {
|
||||
sesid,
|
||||
});
|
||||
jslidSession = resp.jslid;
|
||||
|
||||
invalidateCommands();
|
||||
}
|
||||
|
||||
export function startProfilingEnabled() {
|
||||
return conid && database && !isProfiling;
|
||||
}
|
||||
|
||||
async function loadChart() {
|
||||
isLoadingChart = true;
|
||||
|
||||
const colors = randomcolor({
|
||||
count: driver.profilerChartMeasures.length,
|
||||
seed: 5,
|
||||
});
|
||||
|
||||
const data = await apiCall('jsldata/extract-timeline-chart', {
|
||||
jslid,
|
||||
timestampFunction: driver.profilerTimestampFunction,
|
||||
aggregateFunction: driver.profilerChartAggregateFunction,
|
||||
measures: driver.profilerChartMeasures,
|
||||
});
|
||||
chartData = {
|
||||
...data,
|
||||
labels: data.labels.map(x => new Date(x)),
|
||||
datasets: data.datasets.map((x, i) => ({
|
||||
...x,
|
||||
borderColor: colors[i],
|
||||
})),
|
||||
};
|
||||
isLoadingChart = false;
|
||||
}
|
||||
|
||||
export async function stopProfiling() {
|
||||
isProfiling = false;
|
||||
await apiCall('sessions/stop-profiler', { sesid: sessionId });
|
||||
await apiCall('sessions/kill', { sesid: sessionId });
|
||||
sessionId = null;
|
||||
|
||||
invalidateCommands();
|
||||
|
||||
loadChart();
|
||||
}
|
||||
|
||||
export function stopProfilingEnabled() {
|
||||
return conid && database && isProfiling;
|
||||
}
|
||||
|
||||
export function saveEnabled() {
|
||||
return !!jslidSession;
|
||||
}
|
||||
|
||||
async function doSave(folder, file) {
|
||||
await apiCall('archive/save-jsl-data', { folder, file, jslid });
|
||||
currentArchive.set(folder);
|
||||
selectedWidget.set('archive');
|
||||
}
|
||||
|
||||
export function save() {
|
||||
showModal(SaveArchiveModal, {
|
||||
// folder: archiveFolder,
|
||||
// file: archiveFile,
|
||||
onSave: doSave,
|
||||
});
|
||||
}
|
||||
|
||||
// const data = [
|
||||
// { year: 2010, count: 10 },
|
||||
// { year: 2011, count: 20 },
|
||||
// { year: 2012, count: 15 },
|
||||
// { year: 2013, count: 25 },
|
||||
// { year: 2014, count: 22 },
|
||||
// { year: 2015, count: 30 },
|
||||
// { year: 2016, count: 28 },
|
||||
// ];
|
||||
// {
|
||||
// labels: data.map(row => row.year),
|
||||
// datasets: [
|
||||
// {
|
||||
// label: 'Acquisitions by year',
|
||||
// data: data.map(row => row.count),
|
||||
// },
|
||||
// ],
|
||||
// }
|
||||
</script>
|
||||
|
||||
<ToolStripContainer>
|
||||
{#if jslid}
|
||||
<VerticalSplitter allowCollapseChild1 allowCollapseChild2>
|
||||
<svelte:fragment slot="1">
|
||||
{#key jslid}
|
||||
<JslDataGrid {jslid} listenInitializeFile formatterFunction={driver?.profilerFormatterFunction} />
|
||||
{/key}
|
||||
</svelte:fragment>
|
||||
<svelte:fragment slot="2">
|
||||
{#if isLoadingChart}
|
||||
<LoadingInfo wrapper message="Loading chart" />
|
||||
{:else}
|
||||
<ChartCore
|
||||
title="Profile data"
|
||||
data={chartData}
|
||||
options={{
|
||||
maintainAspectRatio: false,
|
||||
scales: {
|
||||
x: {
|
||||
type: 'time',
|
||||
distribution: 'linear',
|
||||
|
||||
time: {
|
||||
tooltipFormat: 'D. M. YYYY HH:mm',
|
||||
displayFormats: {
|
||||
millisecond: 'HH:mm:ss.SSS',
|
||||
second: 'HH:mm:ss',
|
||||
minute: 'HH:mm',
|
||||
hour: 'D.M hA',
|
||||
day: 'D. M.',
|
||||
week: 'D. M. YYYY',
|
||||
month: 'MM-YYYY',
|
||||
quarter: '[Q]Q - YYYY',
|
||||
year: 'YYYY',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
</svelte:fragment>
|
||||
</VerticalSplitter>
|
||||
{:else}
|
||||
<ErrorInfo message="Profiler not yet started" alignTop />
|
||||
{/if}
|
||||
|
||||
<svelte:fragment slot="toolstrip">
|
||||
<ToolStripCommandButton command="profiler.start" />
|
||||
<ToolStripCommandButton command="profiler.stop" />
|
||||
<ToolStripCommandButton command="profiler.save" />
|
||||
</svelte:fragment>
|
||||
</ToolStripContainer>
|
||||
@@ -1,6 +1,7 @@
|
||||
<script lang="ts" context="module">
|
||||
import registerCommand from '../commands/registerCommand';
|
||||
import { copyTextToClipboard } from '../utility/clipboard';
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
const getCurrentEditor = () => getActiveComponent('QueryTab');
|
||||
|
||||
@@ -60,6 +61,13 @@
|
||||
getCurrentEditor() != null && !getCurrentEditor()?.isBusy() && getCurrentEditor()?.hasConnection(),
|
||||
onClick: () => getCurrentEditor().executeCurrent(),
|
||||
});
|
||||
registerCommand({
|
||||
id: 'query.toggleAutoExecute',
|
||||
category: 'Query',
|
||||
name: 'Toggle auto execute',
|
||||
testEnabled: () => getCurrentEditor() != null,
|
||||
onClick: () => getCurrentEditor().toggleAutoExecute(),
|
||||
});
|
||||
registerCommand({
|
||||
id: 'query.beginTransaction',
|
||||
category: 'Query',
|
||||
@@ -126,7 +134,7 @@
|
||||
import InsertJoinModal from '../modals/InsertJoinModal.svelte';
|
||||
import useTimerLabel from '../utility/useTimerLabel';
|
||||
import createActivator, { getActiveComponent } from '../utility/createActivator';
|
||||
import { findEngineDriver, safeJsonParse } from 'dbgate-tools';
|
||||
import { findEngineDriver, getSqlFrontMatter, safeJsonParse, setSqlFrontMatter } from 'dbgate-tools';
|
||||
import AceEditor from '../query/AceEditor.svelte';
|
||||
import StatusBarTabItem from '../widgets/StatusBarTabItem.svelte';
|
||||
import { showSnackbarError } from '../utility/snackbar';
|
||||
@@ -144,6 +152,10 @@
|
||||
import HorizontalSplitter from '../elements/HorizontalSplitter.svelte';
|
||||
import QueryAiAssistant from '../query/QueryAiAssistant.svelte';
|
||||
import uuidv1 from 'uuid/v1';
|
||||
import ToolStripButton from '../buttons/ToolStripButton.svelte';
|
||||
import { getIntSettingsValue } from '../settings/settingsTools';
|
||||
import RowsLimitModal from '../modals/RowsLimitModal.svelte';
|
||||
import _ from 'lodash';
|
||||
|
||||
export let tabid;
|
||||
export let conid;
|
||||
@@ -196,6 +208,22 @@
|
||||
let domAiAssistant;
|
||||
let isInTransaction = false;
|
||||
let isAutocommit = false;
|
||||
let splitterInitialValue = undefined;
|
||||
|
||||
const queryRowsLimitLocalStorageKey = `tabdata_limitRows_${tabid}`;
|
||||
function getInitialRowsLimit() {
|
||||
const storageValue = localStorage.getItem(queryRowsLimitLocalStorageKey);
|
||||
if (storageValue == 'nolimit') {
|
||||
return null;
|
||||
}
|
||||
if (storageValue) {
|
||||
return parseInt(storageValue) ?? null;
|
||||
}
|
||||
return getIntSettingsValue('sqlEditor.limitRows', null, 1);
|
||||
}
|
||||
|
||||
let queryRowsLimit = getInitialRowsLimit();
|
||||
$: localStorage.setItem(queryRowsLimitLocalStorageKey, queryRowsLimit ? queryRowsLimit.toString() : 'nolimit');
|
||||
|
||||
onMount(() => {
|
||||
intervalId = setInterval(() => {
|
||||
@@ -332,6 +360,7 @@
|
||||
executeStartLine = startLine;
|
||||
executeNumber++;
|
||||
visibleResultTabs = true;
|
||||
const frontMatter = getSqlFrontMatter($editorValue, yaml);
|
||||
|
||||
busy = true;
|
||||
timerLabel.start();
|
||||
@@ -362,6 +391,8 @@
|
||||
sesid,
|
||||
sql,
|
||||
autoCommit: driver?.implicitTransactions && isAutocommit,
|
||||
limitRows: queryRowsLimit ? queryRowsLimit : undefined,
|
||||
frontMatter,
|
||||
});
|
||||
}
|
||||
await apiCall('query-history/write', {
|
||||
@@ -531,12 +562,47 @@
|
||||
initialArgs && initialArgs.scriptTemplate
|
||||
? () => applyScriptTemplate(initialArgs.scriptTemplate, $extensions, $$props)
|
||||
: null,
|
||||
|
||||
onInitialData: value => {
|
||||
const frontMatter = getSqlFrontMatter(value, yaml);
|
||||
if (frontMatter?.autoExecute) {
|
||||
executeCore(value, 0);
|
||||
}
|
||||
if (frontMatter?.splitterInitialValue) {
|
||||
splitterInitialValue = frontMatter.splitterInitialValue;
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
function handleChangeErrors(errors) {
|
||||
errorMessages = errors;
|
||||
}
|
||||
|
||||
function handleSetFrontMatterField(field, value) {
|
||||
const text = $editorValue;
|
||||
setEditorData(
|
||||
setSqlFrontMatter(
|
||||
text,
|
||||
{
|
||||
...getSqlFrontMatter(text, yaml),
|
||||
[field]: value,
|
||||
},
|
||||
yaml
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
export function toggleAutoExecute() {
|
||||
const frontMatter = getSqlFrontMatter($editorValue, yaml);
|
||||
setEditorData(
|
||||
setSqlFrontMatter(
|
||||
$editorValue,
|
||||
{ ...frontMatter, autoExecute: frontMatter?.autoExecute ? undefined : true },
|
||||
yaml
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
async function handleKeyDown(event) {
|
||||
if (isProApp()) {
|
||||
if (event.code == 'Space' && event.shiftKey && event.ctrlKey && !isAiAssistantVisible) {
|
||||
@@ -565,6 +631,7 @@
|
||||
{ command: 'query.execute' },
|
||||
{ command: 'query.executeCurrent' },
|
||||
{ command: 'query.kill' },
|
||||
{ command: 'query.toggleAutoExecute' },
|
||||
{ divider: true },
|
||||
{ command: 'query.toggleComment' },
|
||||
{ command: 'query.formatCode' },
|
||||
@@ -606,7 +673,7 @@
|
||||
<ToolStripContainer bind:this={domToolStrip}>
|
||||
<HorizontalSplitter isSplitter={isAiAssistantVisible} initialSizeRight={300}>
|
||||
<svelte:fragment slot="1">
|
||||
<VerticalSplitter isSplitter={visibleResultTabs}>
|
||||
<VerticalSplitter isSplitter={visibleResultTabs} initialValue={splitterInitialValue}>
|
||||
<svelte:fragment slot="1">
|
||||
{#if driver?.databaseEngineTypes?.includes('sql')}
|
||||
<SqlEditor
|
||||
@@ -659,7 +726,15 @@
|
||||
{/if}
|
||||
</svelte:fragment>
|
||||
<svelte:fragment slot="2">
|
||||
<ResultTabs tabs={[{ label: 'Messages', slot: 0 }]} {sessionId} {executeNumber} bind:resultCount {driver}>
|
||||
<ResultTabs
|
||||
tabs={[{ label: 'Messages', slot: 0 }]}
|
||||
{sessionId}
|
||||
{executeNumber}
|
||||
bind:resultCount
|
||||
{driver}
|
||||
onSetFrontMatterField={handleSetFrontMatterField}
|
||||
onGetFrontMatter={() => getSqlFrontMatter($editorValue, yaml)}
|
||||
>
|
||||
<svelte:fragment slot="0">
|
||||
<SocketMessageView
|
||||
eventName={sessionId ? `session-info-${sessionId}` : null}
|
||||
@@ -713,6 +788,20 @@
|
||||
<ToolStripCommandButton command="query.kill" data-testid="QueryTab_killButton" />
|
||||
<ToolStripSaveButton idPrefix="query" />
|
||||
<ToolStripCommandButton command="query.formatCode" />
|
||||
{#if !driver?.singleConnectionOnly}
|
||||
<ToolStripButton
|
||||
icon="icon limit"
|
||||
on:click={() =>
|
||||
showModal(RowsLimitModal, {
|
||||
value: queryRowsLimit,
|
||||
onConfirm: value => {
|
||||
queryRowsLimit = value;
|
||||
},
|
||||
})}
|
||||
>
|
||||
{queryRowsLimit ? `Limit ${queryRowsLimit} rows` : 'Unlimited rows'}</ToolStripButton
|
||||
>
|
||||
{/if}
|
||||
{#if resultCount == 1}
|
||||
<ToolStripExportButton command="jslTableGrid.export" {quickExportHandlerRef} label="Export result" />
|
||||
{/if}
|
||||
|
||||
@@ -6,7 +6,6 @@ import * as QueryTab from './QueryTab.svelte';
|
||||
import * as ShellTab from './ShellTab.svelte';
|
||||
import * as ArchiveFileTab from './ArchiveFileTab.svelte';
|
||||
import * as PluginTab from './PluginTab.svelte';
|
||||
import * as ChartTab from './ChartTab.svelte';
|
||||
import * as MarkdownEditorTab from './MarkdownEditorTab.svelte';
|
||||
import * as MarkdownViewTab from './MarkdownViewTab.svelte';
|
||||
import * as MarkdownPreviewTab from './MarkdownPreviewTab.svelte';
|
||||
@@ -23,7 +22,6 @@ import * as QueryDataTab from './QueryDataTab.svelte';
|
||||
import * as ConnectionTab from './ConnectionTab.svelte';
|
||||
import * as MapTab from './MapTab.svelte';
|
||||
import * as ServerSummaryTab from './ServerSummaryTab.svelte';
|
||||
import * as ProfilerTab from './ProfilerTab.svelte';
|
||||
import * as ImportExportTab from './ImportExportTab.svelte';
|
||||
import * as SqlObjectTab from './SqlObjectTab.svelte';
|
||||
|
||||
@@ -38,7 +36,6 @@ export default {
|
||||
ShellTab,
|
||||
ArchiveFileTab,
|
||||
PluginTab,
|
||||
ChartTab,
|
||||
MarkdownEditorTab,
|
||||
MarkdownViewTab,
|
||||
MarkdownPreviewTab,
|
||||
@@ -55,7 +52,6 @@ export default {
|
||||
ConnectionTab,
|
||||
MapTab,
|
||||
ServerSummaryTab,
|
||||
ProfilerTab,
|
||||
ImportExportTab,
|
||||
SqlObjectTab,
|
||||
...protabs,
|
||||
|
||||
@@ -13,6 +13,9 @@ import { callServerPing } from './connectionsPinger';
|
||||
import { batchDispatchCacheTriggers, dispatchCacheChange } from './cache';
|
||||
import { isAdminPage, isOneOfPage } from './pageDefs';
|
||||
import { openWebLink } from './simpleTools';
|
||||
import { serializeJsTypesReplacer } from 'dbgate-tools';
|
||||
import { cloudSigninTokenHolder } from '../stores';
|
||||
import LicenseLimitMessageModal from '../modals/LicenseLimitMessageModal.svelte';
|
||||
|
||||
export const strmid = uuidv1();
|
||||
|
||||
@@ -119,7 +122,14 @@ async function processApiResponse(route, args, resp) {
|
||||
// missingCredentials: true,
|
||||
// };
|
||||
} else if (resp?.apiErrorMessage) {
|
||||
if (resp?.apiErrorIsLicenseLimit) {
|
||||
showModal(LicenseLimitMessageModal, {
|
||||
message: resp.apiErrorMessage,
|
||||
licenseLimits: resp.apiErrorLimitedLicenseLimits,
|
||||
});
|
||||
} else {
|
||||
showSnackbarError('API error:' + resp?.apiErrorMessage);
|
||||
}
|
||||
return {
|
||||
errorMessage: resp.apiErrorMessage,
|
||||
};
|
||||
@@ -177,7 +187,7 @@ export async function apiCall(
|
||||
'Content-Type': 'application/json',
|
||||
...resolveApiHeaders(),
|
||||
},
|
||||
body: JSON.stringify(args),
|
||||
body: JSON.stringify(args, serializeJsTypesReplacer),
|
||||
});
|
||||
|
||||
if (resp.status == 401 && !apiDisabled) {
|
||||
@@ -278,6 +288,13 @@ export function installNewVolatileConnectionListener() {
|
||||
});
|
||||
}
|
||||
|
||||
export function installNewCloudTokenListener() {
|
||||
apiOn('got-cloud-token', async tokenHolder => {
|
||||
console.log('HOLDER', tokenHolder);
|
||||
cloudSigninTokenHolder.set(tokenHolder);
|
||||
});
|
||||
}
|
||||
|
||||
export function getAuthCategory(config) {
|
||||
if (config.isBasicAuth) {
|
||||
return 'basic';
|
||||
@@ -291,6 +308,15 @@ export function getAuthCategory(config) {
|
||||
return 'token';
|
||||
}
|
||||
|
||||
export function refreshPublicCloudFiles() {
|
||||
if (sessionStorage.getItem('publicCloudFilesLoaded')) {
|
||||
return;
|
||||
}
|
||||
|
||||
apiCall('cloud/refresh-public-files');
|
||||
sessionStorage.setItem('publicCloudFilesLoaded', 'true');
|
||||
}
|
||||
|
||||
function enableApiLog() {
|
||||
apiLogging = true;
|
||||
console.log('API loggin enabled');
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user