added plugins

This commit is contained in:
Jan Prochazka
2021-04-13 16:17:53 +02:00
parent 446e7c139f
commit 4d5cc119f2
115 changed files with 5519 additions and 24 deletions

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021 DbGate
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,6 @@
[![styled with prettier](https://img.shields.io/badge/styled_with-prettier-ff69b4.svg)](https://github.com/prettier/prettier)
[![NPM version](https://img.shields.io/npm/v/dbgate-plugin-mongo.svg)](https://www.npmjs.com/package/dbgate-plugin-mongo)
# dbgate-plugin-mongo
Use DbGate for install of this plugin

View File

@@ -0,0 +1,6 @@
<svg height="2500" viewBox="8.738 -5.03622834 17.45992422 39.40619484" width="2500"
xmlns="http://www.w3.org/2000/svg">
<path d="m15.9.087.854 1.604c.192.296.4.558.645.802a22.406 22.406 0 0 1 2.004 2.266c1.447 1.9 2.423 4.01 3.12 6.292.418 1.394.645 2.824.662 4.27.07 4.323-1.412 8.035-4.4 11.12a12.7 12.7 0 0 1 -1.57 1.342c-.296 0-.436-.227-.558-.436a3.589 3.589 0 0 1 -.436-1.255c-.105-.523-.174-1.046-.14-1.586v-.244c-.024-.052-.285-24.052-.181-24.175z" fill="#599636"/>
<path d="m15.9.034c-.035-.07-.07-.017-.105.017.017.35-.105.662-.296.96-.21.296-.488.523-.767.767-1.55 1.342-2.77 2.963-3.747 4.776-1.3 2.44-1.97 5.055-2.16 7.808-.087.993.314 4.497.627 5.508.854 2.684 2.388 4.933 4.375 6.885.488.47 1.01.906 1.55 1.325.157 0 .174-.14.21-.244a4.78 4.78 0 0 0 .157-.68l.35-2.614z" fill="#6cac48"/>
<path d="m16.754 28.845c.035-.4.227-.732.436-1.063-.21-.087-.366-.26-.488-.453a3.235 3.235 0 0 1 -.26-.575c-.244-.732-.296-1.5-.366-2.248v-.453c-.087.07-.105.662-.105.75a17.37 17.37 0 0 1 -.314 2.353c-.052.314-.087.627-.28.906 0 .035 0 .07.017.122.314.924.4 1.865.453 2.824v.35c0 .418-.017.33.33.47.14.052.296.07.436.174.105 0 .122-.087.122-.157l-.052-.575v-1.604c-.017-.28.035-.558.07-.82z" fill="#c2bfbf"/>
</svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -0,0 +1,36 @@
{
"name": "dbgate-plugin-mongo",
"main": "dist/backend.js",
"version": "1.0.1",
"license": "MIT",
"author": "Jan Prochazka",
"homepage": "https://github.com/dbgate/dbgate-plugin-mongo",
"description": "MongoDB connect plugin for DbGate",
"funding": "https://www.paypal.com/paypalme/JanProchazkaCz/30eur",
"keywords": [
"dbgate",
"dbgateplugin",
"mongo",
"mongodb"
],
"files": [
"dist"
],
"scripts": {
"build:frontend": "webpack --config webpack-frontend.config",
"build:backend": "webpack --config webpack-backend.config.js",
"build": "yarn build:frontend && yarn build:backend",
"plugin": "yarn build && yarn pack && dbgate-plugin dbgate-plugin-mongo",
"plugout": "dbgate-plugout dbgate-plugin-mongo",
"prepublishOnly": "yarn build"
},
"devDependencies": {
"byline": "^5.0.0",
"dbgate-plugin-tools": "^1.0.4",
"dbgate-tools": "^4.1.0-rc.1",
"is-promise": "^4.0.0",
"mongodb": "^3.6.5",
"webpack": "^4.42.0",
"webpack-cli": "^3.3.11"
}
}

View File

@@ -0,0 +1,8 @@
module.exports = {
trailingComma: 'es5',
tabWidth: 2,
semi: true,
singleQuote: true,
arrowParen: 'avoid',
printWidth: 120,
};

View File

@@ -0,0 +1,24 @@
const { DatabaseAnalyser } = require('dbgate-tools');
class Analyser extends DatabaseAnalyser {
constructor(pool, driver) {
super(pool, driver);
}
async _runAnalysis() {
const collections = await this.pool.__getDatabase().listCollections().toArray();
const res = this.mergeAnalyseResult(
{
collections: collections.map((x) => ({
pureName: x.name,
})),
},
(x) => x.pureName
);
// console.log('MERGED', res);
return res;
}
}
module.exports = Analyser;

View File

@@ -0,0 +1,58 @@
export function createBulkInsertStream(driver, stream, pool, name, options) {
const collectionName = name.pureName;
const db = pool.__getDatabase();
const writable = new stream.Writable({
objectMode: true,
});
writable.buffer = [];
writable.wasHeader = false;
writable.addRow = (row) => {
if (!writable.wasHeader) {
writable.wasHeader = true;
if (row.__isStreamHeader ||
// TODO remove isArray test
Array.isArray(row.columns)) return;
}
writable.buffer.push(row);
};
writable.checkStructure = async () => {
if (options.dropIfExists || options.truncate) {
console.log(`Dropping collection ${collectionName}`);
await db.collection(collectionName).drop();
}
if (options.truncate) {
console.log(`Truncating collection ${collectionName}`);
await db.collection(collectionName).deleteMany({});
}
};
writable.send = async () => {
const rows = writable.buffer;
writable.buffer = [];
await db.collection(collectionName).insertMany(rows);
};
writable.sendIfFull = async () => {
if (writable.buffer.length > 100) {
await writable.send();
}
};
writable._write = async (chunk, encoding, callback) => {
writable.addRow(chunk);
await writable.sendIfFull();
callback();
};
writable._final = async (callback) => {
await writable.send();
callback();
};
return writable;
}

View File

@@ -0,0 +1,260 @@
const _ = require('lodash');
const stream = require('stream');
const isPromise = require('is-promise');
const driverBase = require('../frontend/driver');
const Analyser = require('./Analyser');
const MongoClient = require('mongodb').MongoClient;
const ObjectId = require('mongodb').ObjectId;
const Cursor = require('mongodb').Cursor;
const { createBulkInsertStream } = require('./createBulkInsertStream');
function readCursor(cursor, options) {
return new Promise((resolve) => {
options.recordset({ __isDynamicStructure: true });
cursor.on('data', (data) => options.row(data));
cursor.on('end', () => resolve());
});
}
const mongoIdRegex = /^[0-9a-f]{24}$/;
function convertCondition(condition) {
if (condition && _.isString(condition._id) && condition._id.match(mongoIdRegex)) {
return {
_id: ObjectId(condition._id),
};
}
return condition;
}
function findArrayResult(resValue) {
if (!_.isPlainObject(resValue)) return null;
const arrays = _.values(resValue).filter((x) => _.isArray(x));
if (arrays.length == 1) return arrays[0];
return null;
}
async function getScriptableDb(pool) {
const db = pool.__getDatabase();
const collections = await db.listCollections().toArray();
for (const collection of collections) {
db[collection.name] = db.collection(collection.name);
}
return db;
}
/** @type {import('dbgate-types').EngineDriver} */
const driver = {
...driverBase,
analyserClass: Analyser,
async connect({ server, port, user, password, database, useDatabaseUrl, databaseUrl, ssl }) {
// let mongoUrl = databaseUrl;
// if (!useDatabaseUrl) {
// mongoUrl = user ? `mongodb://${user}:${password}@${server}:${port}` : `mongodb://${server}:${port}`;
// if (database) mongoUrl += '/' + database;
// }
const mongoUrl = useDatabaseUrl
? databaseUrl
: user
? `mongodb://${user}:${password}@${server}:${port}`
: `mongodb://${server}:${port}`;
const options = {};
if (ssl) {
options.tls = true;
options.tlsCAFile = ssl.ca;
options.tlsCertificateKeyFile = ssl.cert || ssl.key;
options.tlsCertificateKeyFilePassword = ssl.password;
options.tlsAllowInvalidCertificates = !ssl.rejectUnauthorized;
}
const pool = new MongoClient(mongoUrl, options);
await pool.connect();
// const pool = await MongoClient.connect(mongoUrl);
pool.__getDatabase = database ? () => pool.db(database) : () => pool.db();
pool.__databaseName = database;
return pool;
},
// @ts-ignore
async query(pool, sql) {
return {
rows: [],
columns: [],
};
},
async stream(pool, sql, options) {
let func;
try {
func = eval(`(db,ObjectId) => ${sql}`);
} catch (err) {
options.info({
message: 'Error compiling expression: ' + err.message,
time: new Date(),
severity: 'error',
});
options.done();
return;
}
const db = await getScriptableDb(pool);
let exprValue;
try {
exprValue = func(db, ObjectId);
} catch (err) {
options.info({
message: 'Error evaluating expression: ' + err.message,
time: new Date(),
severity: 'error',
});
options.done();
return;
}
if (exprValue instanceof Cursor) {
await readCursor(exprValue, options);
} else if (isPromise(exprValue)) {
try {
const resValue = await exprValue;
options.info({
message: 'Command succesfully executed',
time: new Date(),
severity: 'info',
});
options.info({
message: JSON.stringify(resValue),
time: new Date(),
severity: 'info',
});
const arrayRes = findArrayResult(resValue);
if (arrayRes) {
options.recordset({ __isDynamicStructure: true });
for (const row of arrayRes) {
options.row(row);
}
}
} catch (err) {
options.info({
message: 'Error when running command: ' + err.message,
time: new Date(),
severity: 'error',
});
}
}
options.done();
},
async readQuery(pool, sql, structure) {
try {
const json = JSON.parse(sql);
if (json && json.pureName) {
sql = `db.${json.pureName}.find()`;
}
} catch (err) {
// query is not JSON serialized collection name
}
// const pass = new stream.PassThrough({
// objectMode: true,
// highWaterMark: 100,
// });
func = eval(`(db,ObjectId) => ${sql}`);
const db = await getScriptableDb(pool);
exprValue = func(db, ObjectId);
// return directly stream without header row
return exprValue;
// pass.write(structure || { __isDynamicStructure: true });
// exprValue.on('data', (row) => pass.write(row));
// exprValue.on('end', () => pass.end());
// return pass;
},
async writeTable(pool, name, options) {
return createBulkInsertStream(this, stream, pool, name, options);
},
async getVersion(pool) {
const status = await pool.__getDatabase().admin().serverInfo();
return status;
},
async listDatabases(pool) {
const res = await pool.__getDatabase().admin().listDatabases();
return res.databases;
},
async readCollection(pool, options) {
try {
const collection = pool.__getDatabase().collection(options.pureName);
if (options.countDocuments) {
const count = await collection.countDocuments(options.condition || {});
return { count };
} else {
let cursor = await collection.find(options.condition || {});
if (options.sort) cursor = cursor.sort(options.sort);
if (options.skip) cursor = cursor.skip(options.skip);
if (options.limit) cursor = cursor.limit(options.limit);
const rows = await cursor.toArray();
return { rows };
}
} catch (err) {
return { errorMessage: err.message };
}
},
async updateCollection(pool, changeSet) {
const res = {
inserted: [],
updated: [],
deleted: [],
replaced: [],
};
try {
const db = pool.__getDatabase();
for (const insert of changeSet.inserts) {
const collection = db.collection(insert.pureName);
const document = {
...insert.document,
...insert.fields,
};
const resdoc = await collection.insert(document);
res.inserted.push(resdoc._id);
}
for (const update of changeSet.updates) {
const collection = db.collection(update.pureName);
if (update.document) {
const document = {
...update.document,
...update.fields,
};
const doc = await collection.findOne(convertCondition(update.condition));
if (doc) {
const resdoc = await collection.replaceOne(convertCondition(update.condition), {
...document,
_id: doc._id,
});
res.replaced.push(resdoc._id);
}
} else {
const resdoc = await collection.updateOne(convertCondition(update.condition), { $set: update.fields });
res.updated.push(resdoc._id);
}
}
for (const del of changeSet.deletes) {
const collection = db.collection(del.pureName);
const resdoc = await collection.deleteOne(convertCondition(del.condition));
res.deleted.push(resdoc._id);
}
return res;
} catch (err) {
return { errorMessage: err.message };
}
},
async createDatabase(pool, name) {
const db = pool.db(name);
await db.createCollection('collection1');
},
};
module.exports = driver;

View File

@@ -0,0 +1,6 @@
const driver = require('./driver');
module.exports = {
packageName: 'dbgate-plugin-mongo',
driver,
};

View File

@@ -0,0 +1,6 @@
const { SqlDumper } = require('dbgate-tools');
class Dumper extends SqlDumper {
}
module.exports = Dumper;

View File

@@ -0,0 +1,76 @@
const { driverBase } = require('dbgate-tools');
const Dumper = require('./Dumper');
const mongoIdRegex = /^[0-9a-f]{24}$/;
function getConditionPreview(condition) {
if (condition && _.isString(condition._id) && condition._id.match(mongoIdRegex)) {
return `{ _id: ObjectId('${condition._id}') }`;
}
return JSON.stringify(condition);
}
/** @type {import('dbgate-types').SqlDialect} */
const dialect = {
limitSelect: true,
rangeSelect: true,
offsetFetchRangeSyntax: true,
stringEscapeChar: "'",
fallbackDataType: 'nvarchar(max)',
nosql: true,
quoteIdentifier(s) {
return `[${s}]`;
},
};
/** @type {import('dbgate-types').EngineDriver} */
const driver = {
...driverBase,
dumperClass: Dumper,
dialect,
engine: 'mongo@dbgate-plugin-mongo',
title: 'MongoDB',
defaultPort: 27017,
supportsDatabaseUrl: true,
databaseUrlPlaceholder: 'e.g. mongodb://username:password@mongodb.mydomain.net/dbname',
getCollectionUpdateScript(changeSet) {
let res = '';
for (const insert of changeSet.inserts) {
res += `db.${insert.pureName}.insert(${JSON.stringify(
{
...insert.document,
...insert.fields,
},
undefined,
2
)});\n`;
}
for (const update of changeSet.updates) {
if (update.document) {
res += `db.${update.pureName}.replaceOne(${getConditionPreview(update.condition)}, ${JSON.stringify(
{
...update.document,
...update.fields,
},
undefined,
2
)});\n`;
} else {
res += `db.${update.pureName}.updateOne(${getConditionPreview(update.condition)}, ${JSON.stringify(
{
$set: update.fields,
},
undefined,
2
)});\n`;
}
}
for (const del of changeSet.deletes) {
res += `db.${del.pureName}.deleteOne(${getConditionPreview(del.condition)});\n`;
}
return res;
},
};
module.exports = driver;

View File

@@ -0,0 +1,6 @@
import driver from './driver';
export default {
packageName: 'dbgate-plugin-mongo',
driver,
};

View File

@@ -0,0 +1,23 @@
var webpack = require('webpack');
var path = require('path');
var config = {
context: __dirname + '/src/backend',
entry: {
app: './index.js',
},
target: 'node',
output: {
path: path.resolve(__dirname, 'dist'),
filename: 'backend.js',
libraryTarget: 'commonjs2',
},
// uncomment for disable minimalization
// optimization: {
// minimize: false,
// },
};
module.exports = config;

View File

@@ -0,0 +1,24 @@
var webpack = require("webpack");
var path = require("path");
var config = {
context: __dirname + "/src/frontend",
entry: {
app: "./index.js",
},
target: "web",
output: {
path: path.resolve(__dirname, "dist"),
filename: "frontend.js",
libraryTarget: "var",
library: 'plugin',
},
// uncomment for disable minimalization
// optimization: {
// minimize: false,
// },
};
module.exports = config;