Merge branch 'feature/mongosh'

This commit is contained in:
SPRINX0\prochazka
2025-07-30 10:45:02 +02:00
21 changed files with 2179 additions and 282 deletions

View File

@@ -22,10 +22,10 @@ jobs:
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- name: Install jq
run: |
sudo apt-get install jq -y

View File

@@ -22,10 +22,10 @@ jobs:
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- name: Checkout dbgate/dbgate-pro
uses: actions/checkout@v2
with:

View File

@@ -22,10 +22,10 @@ jobs:
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- name: Configure NPM token
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -13,10 +13,10 @@ jobs:
e2e-tests:
runs-on: ubuntu-latest
steps:
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- uses: actions/checkout@v3
with:
fetch-depth: 1

View File

@@ -13,10 +13,10 @@ jobs:
all-tests:
runs-on: ubuntu-latest
steps:
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- uses: actions/checkout@v3
with:
fetch-depth: 1

2
.nvmrc
View File

@@ -1 +1 @@
v21.7.3
v24.4.1

View File

@@ -43,6 +43,8 @@ function adjustFile(file, isApp = false) {
if (process.argv.includes('--community')) {
delete json.optionalDependencies['mongodb-client-encryption'];
delete json.dependencies['@mongosh/service-provider-node-driver'];
delete json.dependencies['@mongosh/browser-runtime-electron'];
}
if (isApp && process.argv.includes('--premium')) {

View File

@@ -22,6 +22,8 @@ const volatilePackages = [
'ssh2',
'wkx',
'@duckdb/node-api',
'@mongosh/browser-runtime-electron',
'@mongosh/service-provider-node-driver',
];
module.exports = volatilePackages;

View File

@@ -1,6 +1,6 @@
{
"private": true,
"version": "6.6.0",
"version": "6.6.1-premium-beta.5",
"name": "dbgate-all",
"workspaces": [
"packages/*",

View File

@@ -3,6 +3,7 @@ const os = require('os');
const path = require('path');
const processArgs = require('./processArgs');
const isElectron = require('is-electron');
const { isProApp } = require('./checkLicense');
const platform = process.env.OS_OVERRIDE ? process.env.OS_OVERRIDE : process.platform;
const isWindows = platform === 'win32';
@@ -59,6 +60,7 @@ const platformInfo = {
defaultKeyfile: path.join(os.homedir(), '.ssh/id_rsa'),
isAwsUbuntuLayout,
isAzureUbuntuLayout,
isProApp: isProApp()
};
module.exports = platformInfo;

View File

@@ -41,7 +41,9 @@
"dbgate-tools": "^6.0.0-alpha.1",
"is-promise": "^4.0.0",
"lodash": "^4.17.21",
"mongodb": "^6.3.0"
"mongodb": "^6.3.0",
"@mongosh/browser-runtime-electron": "^3.16.4",
"@mongosh/service-provider-node-driver": "^3.10.2"
},
"optionalDependencies": {
"mongodb-client-encryption": "^6.1.1"

View File

@@ -1,8 +1,8 @@
const _ = require('lodash');
const stream = require('stream');
const isPromise = require('is-promise');
const driverBase = require('../frontend/driver');
const Analyser = require('./Analyser');
const isPromise = require('is-promise');
const { MongoClient, ObjectId, AbstractCursor, Long } = require('mongodb');
const { EJSON } = require('bson');
const { serializeJsTypesForJsonStringify, deserializeJsTypesFromJsonParse } = require('dbgate-tools');
@@ -13,6 +13,8 @@ const {
convertToMongoSort,
} = require('../frontend/convertToMongoCondition');
let platformInfo;
function serializeMongoData(row) {
return EJSON.serialize(
serializeJsTypesForJsonStringify(row, (value) => {
@@ -48,6 +50,7 @@ function findArrayResult(resValue) {
async function getScriptableDb(dbhan) {
const db = dbhan.getDatabase();
db.getCollection = (name) => db.collection(name);
const collections = await db.listCollections().toArray();
for (const collection of collections) {
_.set(db, collection.name, db.collection(collection.name));
@@ -55,7 +58,29 @@ async function getScriptableDb(dbhan) {
return db;
}
/** @type {import('dbgate-types').EngineDriver} */
/**
* @param {string} uri
* @param {string} dbName
* @returns {string}
*/
function ensureDatabaseInMongoURI(uri, dbName) {
if (!dbName) return uri;
try {
const url = new URL(uri);
const hasDatabase = url.pathname && url.pathname !== '/' && url.pathname.length > 1;
if (hasDatabase) return uri;
url.pathname = `/${dbName}`;
return url.toString();
} catch (error) {
logger.error('Invalid URI format:', error.message);
return uri;
}
}
/** @type {import('dbgate-types').EngineDriver<MongoClient>} */
const driver = {
...driverBase,
analyserClass: Analyser,
@@ -105,11 +130,33 @@ const driver = {
};
},
async script(dbhan, sql) {
let func;
func = eval(`(db,ObjectId) => ${sql}`);
const db = await getScriptableDb(dbhan);
const res = func(db, ObjectId.createFromHexString);
if (isPromise(res)) await res;
if (platformInfo.isProApp) {
const { NodeDriverServiceProvider } = require('@mongosh/service-provider-node-driver');
const { ElectronRuntime } = require('@mongosh/browser-runtime-electron');
const connectionString = ensureDatabaseInMongoURI(dbhan.client.s.url, dbhan.database);
const serviceProvider = await NodeDriverServiceProvider.connect(connectionString);
const runtime = new ElectronRuntime(serviceProvider);
const exprValue = await runtime.evaluate(sql);
const { printable } = exprValue;
if (Array.isArray(printable)) {
return printable;
} else if ('documents' in printable) {
return printable.documents;
} else if ('cursor' in printable && 'firstBatch' in printable.cursor) {
return printable.cursor.firstBatch;
}
return printable;
} else {
let func;
func = eval(`(db,ObjectId) => ${sql}`);
const db = await getScriptableDb(dbhan);
const res = func(db, ObjectId.createFromHexString);
if (isPromise(res)) await res;
}
},
async operation(dbhan, operation, options) {
const { type } = operation;
@@ -118,15 +165,18 @@ const driver = {
await this.script(dbhan, `db.createCollection('${operation.collection.name}')`);
break;
case 'dropCollection':
await this.script(dbhan, `db.dropCollection('${operation.collection}')`);
await this.script(dbhan, `db.getCollection('${operation.collection}').drop()`);
break;
case 'renameCollection':
await this.script(dbhan, `db.renameCollection('${operation.collection}', '${operation.newName}')`);
await this.script(
dbhan,
`db.getCollection('${operation.collection}').renameCollection('${operation.newName}')`
);
break;
case 'cloneCollection':
await this.script(
dbhan,
`db.collection('${operation.collection}').aggregate([{$out: '${operation.newName}'}]).toArray()`
`db.getCollection('${operation.collection}').aggregate([{$out: '${operation.newName}'}]).toArray()`
);
break;
default:
@@ -135,75 +185,134 @@ const driver = {
// saveScriptToDatabase({ conid: connection._id, database: name }, `db.createCollection('${newCollection}')`);
},
async stream(dbhan, sql, options) {
let func;
try {
func = eval(`(db,ObjectId) => ${sql}`);
} catch (err) {
options.info({
message: 'Error compiling expression: ' + err.message,
time: new Date(),
severity: 'error',
});
options.done();
return;
}
const db = await getScriptableDb(dbhan);
if (platformInfo.isProApp) {
const { NodeDriverServiceProvider } = require('@mongosh/service-provider-node-driver');
const { ElectronRuntime } = require('@mongosh/browser-runtime-electron');
let exprValue;
try {
exprValue = func(db, ObjectId.createFromHexString);
} catch (err) {
options.info({
message: 'Error evaluating expression: ' + err.message,
time: new Date(),
severity: 'error',
});
options.done();
return;
}
let exprValue;
if (exprValue instanceof AbstractCursor) {
await readCursor(exprValue, options);
} else if (isPromise(exprValue)) {
try {
const resValue = await exprValue;
options.info({
message: 'Command succesfully executed',
time: new Date(),
severity: 'info',
});
try {
options.info({
message: `Result: ${JSON.stringify(resValue)}`,
time: new Date(),
severity: 'info',
});
} catch (err) {
options.info({
message: `Result: ${resValue}`,
time: new Date(),
severity: 'info',
});
}
const arrayRes = findArrayResult(resValue);
if (arrayRes) {
options.recordset({ __isDynamicStructure: true });
for (const row of arrayRes) {
options.row(row);
}
}
const connectionString = ensureDatabaseInMongoURI(dbhan.client.s.url, dbhan.database);
const serviceProvider = await NodeDriverServiceProvider.connect(connectionString);
const runtime = new ElectronRuntime(serviceProvider);
exprValue = await runtime.evaluate(sql);
} catch (err) {
options.info({
message: 'Error when running command: ' + err.message,
message: 'Error evaluating expression: ' + err.message,
time: new Date(),
severity: 'error',
});
options.done();
return;
}
}
options.done();
const { printable, type } = exprValue;
if (type === 'Document') {
options.recordset({ __isDynamicStructure: true });
options.row(printable);
} else if (type === 'Cursor' || exprValue.type === 'AggregationCursor') {
options.recordset({ __isDynamicStructure: true });
for (const doc of printable.documents) {
options.row(doc);
}
} else {
if (Array.isArray(printable)) {
options.recordset({ __isDynamicStructure: true });
for (const row of printable) {
options.row(row);
}
} else if ('documents' in printable) {
options.recordset({ __isDynamicStructure: true });
for (const row of printable.documents) {
options.row(row);
}
} else if ('cursor' in printable && 'firstBatch' in printable.cursor) {
options.recordset({ __isDynamicStructure: true });
for (const row of printable.cursor.firstBatch) {
options.row(row);
}
} else {
options.info({
printable: printable,
time: new Date(),
severity: 'info',
message: 'Query returned not supported value.',
});
}
}
options.done();
} else {
let func;
try {
func = eval(`(db,ObjectId) => ${sql}`);
} catch (err) {
options.info({
message: 'Error compiling expression: ' + err.message,
time: new Date(),
severity: 'error',
});
options.done();
return;
}
const db = await getScriptableDb(dbhan);
let exprValue;
try {
exprValue = func(db, ObjectId.createFromHexString);
} catch (err) {
options.info({
message: 'Error evaluating expression: ' + err.message,
time: new Date(),
severity: 'error',
});
options.done();
return;
}
if (exprValue instanceof AbstractCursor) {
await readCursor(exprValue, options);
} else if (isPromise(exprValue)) {
try {
const resValue = await exprValue;
options.info({
message: 'Command succesfully executed',
time: new Date(),
severity: 'info',
});
try {
options.info({
message: `Result: ${JSON.stringify(resValue)}`,
time: new Date(),
severity: 'info',
});
} catch (err) {
options.info({
message: `Result: ${resValue}`,
time: new Date(),
severity: 'info',
});
}
const arrayRes = findArrayResult(resValue);
if (arrayRes) {
options.recordset({ __isDynamicStructure: true });
for (const row of arrayRes) {
options.row(row);
}
}
} catch (err) {
options.info({
message: 'Error when running command: ' + err.message,
time: new Date(),
severity: 'error',
});
}
}
options.done();
}
},
async startProfiler(dbhan, options) {
const db = await getScriptableDb(dbhan);
@@ -582,4 +691,8 @@ const driver = {
},
};
driver.initialize = (dbgateEnv) => {
platformInfo = dbgateEnv.platformInfo;
};
module.exports = driver;

View File

@@ -13,4 +13,7 @@ module.exports = {
extractProfileTimestamp,
aggregateProfileChartEntry,
},
initialize(dbgateEnv) {
driver.initialize(dbgateEnv);
},
};

View File

@@ -88,14 +88,14 @@ const driver = {
getCollectionUpdateScript(changeSet, collectionInfo) {
let res = '';
for (const insert of changeSet.inserts) {
res += `db.${insert.pureName}.insertOne(${jsonStringifyWithObjectId({
res += `db.getCollection('${insert.pureName}').insertOne(${jsonStringifyWithObjectId({
...insert.document,
...insert.fields,
})});\n`;
}
for (const update of changeSet.updates) {
if (update.document) {
res += `db.${update.pureName}.replaceOne(${jsonStringifyWithObjectId(
res += `db.getCollection('${update.pureName}').replaceOne(${jsonStringifyWithObjectId(
update.condition
)}, ${jsonStringifyWithObjectId({
...update.document,
@@ -112,13 +112,13 @@ const driver = {
if (!_.isEmpty(set)) updates.$set = set;
if (!_.isEmpty(unset)) updates.$unset = unset;
res += `db.${update.pureName}.updateOne(${jsonStringifyWithObjectId(
res += `db.getCollection('${update.pureName}').updateOne(${jsonStringifyWithObjectId(
update.condition
)}, ${jsonStringifyWithObjectId(updates)});\n`;
}
}
for (const del of changeSet.deletes) {
res += `db.${del.pureName}.deleteOne(${jsonStringifyWithObjectId(del.condition)});\n`;
res += `db.getCollection('${del.pureName}').deleteOne(${jsonStringifyWithObjectId(del.condition)});\n`;
}
return res;
},
@@ -128,7 +128,7 @@ const driver = {
},
getCollectionExportQueryScript(collection, condition, sort) {
return `db.collection('${collection}')
return `db.getCollection('${collection}')
.find(${JSON.stringify(convertToMongoCondition(condition) || {})})
.sort(${JSON.stringify(convertToMongoSort(sort) || {})})`;
},
@@ -182,9 +182,9 @@ const driver = {
async getScriptTemplateContent(scriptTemplate, props) {
switch (scriptTemplate) {
case 'dropCollection':
return `db.${props.pureName}.drop();`;
return `db.getCollection('${props.pureName}').drop();`;
case 'findCollection':
return `db.${props.pureName}.find();`;
return `db.getCollection('${props.pureName}').find();`;
}
},
};

View File

@@ -189,6 +189,7 @@ jobs:
cp <<artifact-root>>/*win*.exe artifacts/dbgate-<<suffix>>.exe || true
cp <<artifact-root>>/*win_x64.zip artifacts/dbgate-windows-<<suffix>>.zip || true
cp <<artifact-root>>/*win_arm64.zip artifacts/dbgate-windows-<<suffix>>-arm64.zip || true
cp <<artifact-root>>/*win_arm64.exe artifacts/dbgate-windows-<<suffix>>-arm64.exe || true
cp <<artifact-root>>/*-mac_universal.dmg artifacts/dbgate-<<suffix>>.dmg || true
cp <<artifact-root>>/*-mac_x64.dmg artifacts/dbgate-<<suffix>>-x64.dmg || true
cp <<artifact-root>>/*-mac_arm64.dmg artifacts/dbgate-<<suffix>>-arm64.dmg || true

View File

@@ -25,10 +25,10 @@ jobs:
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- name: Install jq
run: |

View File

@@ -30,10 +30,10 @@ jobs:
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- _include: checkout-and-merge-pro

View File

@@ -30,10 +30,10 @@ jobs:
- uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- name: Configure NPM token
env:

View File

@@ -13,10 +13,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- uses: actions/checkout@v3
with:

View File

@@ -13,10 +13,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Use Node.js 18.x
- name: Use Node.js 22.x
uses: actions/setup-node@v1
with:
node-version: 18.x
node-version: 22.x
- uses: actions/checkout@v3
with:

2138
yarn.lock

File diff suppressed because it is too large Load Diff