Cleanup files and improve file manager.

This commit is contained in:
LukeGus
2025-09-18 00:32:56 -05:00
parent cb7bb3c864
commit 8afd84d96d
53 changed files with 6354 additions and 4736 deletions

View File

@@ -34,13 +34,13 @@ app.use(
// Configure multer for file uploads
const storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, 'uploads/');
cb(null, "uploads/");
},
filename: (req, file, cb) => {
// Preserve original filename with timestamp prefix to avoid conflicts
const timestamp = Date.now();
cb(null, `${timestamp}-${file.originalname}`);
}
},
});
const upload = multer({
@@ -50,12 +50,15 @@ const upload = multer({
},
fileFilter: (req, file, cb) => {
// Allow SQLite files
if (file.originalname.endsWith('.termix-export.sqlite') || file.originalname.endsWith('.sqlite')) {
if (
file.originalname.endsWith(".termix-export.sqlite") ||
file.originalname.endsWith(".sqlite")
) {
cb(null, true);
} else {
cb(new Error('Only .termix-export.sqlite files are allowed'));
cb(new Error("Only .termix-export.sqlite files are allowed"));
}
}
},
});
interface CacheEntry {
@@ -295,11 +298,11 @@ app.get("/encryption/status", async (req, res) => {
res.json({
encryption: detailedStatus,
migration: migrationStatus
migration: migrationStatus,
});
} catch (error) {
apiLogger.error("Failed to get encryption status", error, {
operation: "encryption_status"
operation: "encryption_status",
});
res.status(500).json({ error: "Failed to get encryption status" });
}
@@ -307,24 +310,26 @@ app.get("/encryption/status", async (req, res) => {
app.post("/encryption/initialize", async (req, res) => {
try {
const { EncryptionKeyManager } = await import("../utils/encryption-key-manager.js");
const { EncryptionKeyManager } = await import(
"../utils/encryption-key-manager.js"
);
const keyManager = EncryptionKeyManager.getInstance();
const newKey = await keyManager.generateNewKey();
await DatabaseEncryption.initialize({ masterPassword: newKey });
apiLogger.info("Encryption initialized via API", {
operation: "encryption_init_api"
operation: "encryption_init_api",
});
res.json({
success: true,
message: "Encryption initialized successfully",
keyPreview: newKey.substring(0, 8) + "..."
keyPreview: newKey.substring(0, 8) + "...",
});
} catch (error) {
apiLogger.error("Failed to initialize encryption", error, {
operation: "encryption_init_api_failed"
operation: "encryption_init_api_failed",
});
res.status(500).json({ error: "Failed to initialize encryption" });
}
@@ -336,38 +341,38 @@ app.post("/encryption/migrate", async (req, res) => {
const migration = new EncryptionMigration({
dryRun,
backupEnabled: true
backupEnabled: true,
});
if (dryRun) {
apiLogger.info("Starting encryption migration (dry run)", {
operation: "encryption_migrate_dry_run"
operation: "encryption_migrate_dry_run",
});
res.json({
success: true,
message: "Dry run mode - no changes made",
dryRun: true
dryRun: true,
});
} else {
apiLogger.info("Starting encryption migration", {
operation: "encryption_migrate"
operation: "encryption_migrate",
});
await migration.runMigration();
res.json({
success: true,
message: "Migration completed successfully"
message: "Migration completed successfully",
});
}
} catch (error) {
apiLogger.error("Migration failed", error, {
operation: "encryption_migrate_failed"
operation: "encryption_migrate_failed",
});
res.status(500).json({
error: "Migration failed",
details: error instanceof Error ? error.message : "Unknown error"
details: error instanceof Error ? error.message : "Unknown error",
});
}
});
@@ -377,17 +382,17 @@ app.post("/encryption/regenerate", async (req, res) => {
await DatabaseEncryption.reinitializeWithNewKey();
apiLogger.warn("Encryption key regenerated via API", {
operation: "encryption_regenerate_api"
operation: "encryption_regenerate_api",
});
res.json({
success: true,
message: "New encryption key generated",
warning: "All encrypted data must be re-encrypted"
warning: "All encrypted data must be re-encrypted",
});
} catch (error) {
apiLogger.error("Failed to regenerate encryption key", error, {
operation: "encryption_regenerate_failed"
operation: "encryption_regenerate_failed",
});
res.status(500).json({ error: "Failed to regenerate encryption key" });
}
@@ -400,7 +405,7 @@ app.post("/database/export", async (req, res) => {
apiLogger.info("Starting SQLite database export via API", {
operation: "database_sqlite_export_api",
customPath: !!customPath
customPath: !!customPath,
});
const exportPath = await DatabaseSQLiteExport.exportDatabase(customPath);
@@ -410,20 +415,20 @@ app.post("/database/export", async (req, res) => {
message: "Database exported successfully as SQLite",
exportPath,
size: fs.statSync(exportPath).size,
format: "sqlite"
format: "sqlite",
});
} catch (error) {
apiLogger.error("SQLite database export failed", error, {
operation: "database_sqlite_export_api_failed"
operation: "database_sqlite_export_api_failed",
});
res.status(500).json({
error: "SQLite database export failed",
details: error instanceof Error ? error.message : "Unknown error"
details: error instanceof Error ? error.message : "Unknown error",
});
}
});
app.post("/database/import", upload.single('file'), async (req, res) => {
app.post("/database/import", upload.single("file"), async (req, res) => {
try {
if (!req.file) {
return res.status(400).json({ error: "No file uploaded" });
@@ -439,17 +444,17 @@ app.post("/database/import", upload.single('file'), async (req, res) => {
originalName: req.file.originalname,
fileSize: req.file.size,
mode: "additive",
backupCurrent: backupCurrentBool
backupCurrent: backupCurrentBool,
});
// Validate export file first
// Check file extension using original filename
if (!req.file.originalname.endsWith('.termix-export.sqlite')) {
if (!req.file.originalname.endsWith(".termix-export.sqlite")) {
// Clean up uploaded file
fs.unlinkSync(importPath);
return res.status(400).json({
error: "Invalid SQLite export file",
details: ["File must have .termix-export.sqlite extension"]
details: ["File must have .termix-export.sqlite extension"],
});
}
@@ -459,13 +464,13 @@ app.post("/database/import", upload.single('file'), async (req, res) => {
fs.unlinkSync(importPath);
return res.status(400).json({
error: "Invalid SQLite export file",
details: validation.errors
details: validation.errors,
});
}
const result = await DatabaseSQLiteExport.importDatabase(importPath, {
replaceExisting: false, // Always use additive mode
backupCurrent: backupCurrentBool
backupCurrent: backupCurrentBool,
});
// Clean up uploaded file
@@ -473,11 +478,13 @@ app.post("/database/import", upload.single('file'), async (req, res) => {
res.json({
success: result.success,
message: result.success ? "SQLite database imported successfully" : "SQLite database import completed with errors",
message: result.success
? "SQLite database imported successfully"
: "SQLite database import completed with errors",
imported: result.imported,
errors: result.errors,
warnings: result.warnings,
format: "sqlite"
format: "sqlite",
});
} catch (error) {
// Clean up uploaded file if it exists
@@ -488,17 +495,20 @@ app.post("/database/import", upload.single('file'), async (req, res) => {
apiLogger.warn("Failed to clean up uploaded file", {
operation: "file_cleanup_failed",
filePath: req.file.path,
error: cleanupError instanceof Error ? cleanupError.message : 'Unknown error'
error:
cleanupError instanceof Error
? cleanupError.message
: "Unknown error",
});
}
}
apiLogger.error("SQLite database import failed", error, {
operation: "database_sqlite_import_api_failed"
operation: "database_sqlite_import_api_failed",
});
res.status(500).json({
error: "SQLite database import failed",
details: error instanceof Error ? error.message : "Unknown error"
details: error instanceof Error ? error.message : "Unknown error",
});
}
});
@@ -512,18 +522,18 @@ app.get("/database/export/:exportPath/info", async (req, res) => {
if (!validation.valid) {
return res.status(400).json({
error: "Invalid SQLite export file",
details: validation.errors
details: validation.errors,
});
}
res.json({
valid: true,
metadata: validation.metadata,
format: "sqlite"
format: "sqlite",
});
} catch (error) {
apiLogger.error("Failed to get SQLite export info", error, {
operation: "sqlite_export_info_failed"
operation: "sqlite_export_info_failed",
});
res.status(500).json({ error: "Failed to get SQLite export information" });
}
@@ -534,23 +544,26 @@ app.post("/database/backup", async (req, res) => {
const { customPath } = req.body;
apiLogger.info("Creating encrypted database backup via API", {
operation: "database_backup_api"
operation: "database_backup_api",
});
// Import required modules
const { databasePaths, getMemoryDatabaseBuffer } = await import("./db/index.js");
const { databasePaths, getMemoryDatabaseBuffer } = await import(
"./db/index.js"
);
// Get current in-memory database as buffer
const dbBuffer = getMemoryDatabaseBuffer();
// Create backup directory
const backupDir = customPath || path.join(databasePaths.directory, 'backups');
const backupDir =
customPath || path.join(databasePaths.directory, "backups");
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true });
}
// Generate backup filename with timestamp
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
const backupFileName = `database-backup-${timestamp}.sqlite.encrypted`;
const backupPath = path.join(backupDir, backupFileName);
@@ -561,15 +574,15 @@ app.post("/database/backup", async (req, res) => {
success: true,
message: "Encrypted backup created successfully",
backupPath,
size: fs.statSync(backupPath).size
size: fs.statSync(backupPath).size,
});
} catch (error) {
apiLogger.error("Database backup failed", error, {
operation: "database_backup_api_failed"
operation: "database_backup_api_failed",
});
res.status(500).json({
error: "Database backup failed",
details: error instanceof Error ? error.message : "Unknown error"
details: error instanceof Error ? error.message : "Unknown error",
});
}
});
@@ -584,7 +597,7 @@ app.post("/database/restore", async (req, res) => {
apiLogger.info("Restoring database from backup via API", {
operation: "database_restore_api",
backupPath
backupPath,
});
// Validate backup file
@@ -596,24 +609,28 @@ app.post("/database/restore", async (req, res) => {
if (!DatabaseFileEncryption.validateHardwareCompatibility(backupPath)) {
return res.status(400).json({
error: "Hardware fingerprint mismatch",
message: "This backup was created on different hardware and cannot be restored"
message:
"This backup was created on different hardware and cannot be restored",
});
}
const restoredPath = DatabaseFileEncryption.restoreFromEncryptedBackup(backupPath, targetPath);
const restoredPath = DatabaseFileEncryption.restoreFromEncryptedBackup(
backupPath,
targetPath,
);
res.json({
success: true,
message: "Database restored successfully",
restoredPath
restoredPath,
});
} catch (error) {
apiLogger.error("Database restore failed", error, {
operation: "database_restore_api_failed"
operation: "database_restore_api_failed",
});
res.status(500).json({
error: "Database restore failed",
details: error instanceof Error ? error.message : "Unknown error"
details: error instanceof Error ? error.message : "Unknown error",
});
}
});
@@ -645,13 +662,13 @@ const PORT = 8081;
async function initializeEncryption() {
try {
databaseLogger.info("Initializing database encryption...", {
operation: "encryption_init"
operation: "encryption_init",
});
await DatabaseEncryption.initialize({
encryptionEnabled: process.env.ENCRYPTION_ENABLED !== 'false',
forceEncryption: process.env.FORCE_ENCRYPTION === 'true',
migrateOnAccess: process.env.MIGRATE_ON_ACCESS !== 'false'
encryptionEnabled: process.env.ENCRYPTION_ENABLED !== "false",
forceEncryption: process.env.FORCE_ENCRYPTION === "true",
migrateOnAccess: process.env.MIGRATE_ON_ACCESS !== "false",
});
const status = await DatabaseEncryption.getDetailedStatus();
@@ -660,24 +677,28 @@ async function initializeEncryption() {
operation: "encryption_init_complete",
enabled: status.enabled,
keyId: status.key.keyId,
hasStoredKey: status.key.hasKey
hasStoredKey: status.key.hasKey,
});
} else {
databaseLogger.error("Database encryption configuration invalid", undefined, {
operation: "encryption_init_failed",
status
});
databaseLogger.error(
"Database encryption configuration invalid",
undefined,
{
operation: "encryption_init_failed",
status,
},
);
}
} catch (error) {
databaseLogger.error("Failed to initialize database encryption", error, {
operation: "encryption_init_error"
operation: "encryption_init_error",
});
}
}
app.listen(PORT, async () => {
// Ensure uploads directory exists
const uploadsDir = path.join(process.cwd(), 'uploads');
const uploadsDir = path.join(process.cwd(), "uploads");
if (!fs.existsSync(uploadsDir)) {
fs.mkdirSync(uploadsDir, { recursive: true });
}

View File

@@ -17,12 +17,12 @@ if (!fs.existsSync(dbDir)) {
}
// Database file encryption configuration
const enableFileEncryption = process.env.DB_FILE_ENCRYPTION !== 'false';
const enableFileEncryption = process.env.DB_FILE_ENCRYPTION !== "false";
const dbPath = path.join(dataDir, "db.sqlite");
const encryptedDbPath = `${dbPath}.encrypted`;
// Initialize database with file encryption support
let actualDbPath = ':memory:'; // Always use memory database
let actualDbPath = ":memory:"; // Always use memory database
let memoryDatabase: Database.Database;
let isNewDatabase = false;
@@ -30,55 +30,54 @@ if (enableFileEncryption) {
try {
// Check if encrypted database exists
if (DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath)) {
databaseLogger.info('Found encrypted database file, loading into memory...', {
operation: 'db_memory_load',
encryptedPath: encryptedDbPath
});
databaseLogger.info(
"Found encrypted database file, loading into memory...",
{
operation: "db_memory_load",
encryptedPath: encryptedDbPath,
},
);
// Validate hardware compatibility
if (!DatabaseFileEncryption.validateHardwareCompatibility(encryptedDbPath)) {
databaseLogger.error('Hardware fingerprint mismatch for encrypted database', {
operation: 'db_decrypt_failed',
reason: 'hardware_mismatch'
});
throw new Error('Cannot decrypt database: hardware fingerprint mismatch');
if (
!DatabaseFileEncryption.validateHardwareCompatibility(encryptedDbPath)
) {
databaseLogger.error(
"Hardware fingerprint mismatch for encrypted database",
{
operation: "db_decrypt_failed",
reason: "hardware_mismatch",
},
);
throw new Error(
"Cannot decrypt database: hardware fingerprint mismatch",
);
}
// Decrypt database content to memory buffer
const decryptedBuffer = DatabaseFileEncryption.decryptDatabaseToBuffer(encryptedDbPath);
const decryptedBuffer =
DatabaseFileEncryption.decryptDatabaseToBuffer(encryptedDbPath);
// Create in-memory database from decrypted buffer
memoryDatabase = new Database(decryptedBuffer);
databaseLogger.success('Existing database loaded into memory successfully', {
operation: 'db_memory_load_success',
bufferSize: decryptedBuffer.length,
inMemory: true
});
} else {
// No encrypted database exists - create new in-memory database
databaseLogger.info('No encrypted database found, creating new in-memory database', {
operation: 'db_memory_create_new'
});
memoryDatabase = new Database(':memory:');
memoryDatabase = new Database(":memory:");
isNewDatabase = true;
// Check if there's an old unencrypted database to migrate
if (fs.existsSync(dbPath)) {
databaseLogger.info('Found existing unencrypted database, will migrate to memory', {
operation: 'db_migrate_to_memory',
oldPath: dbPath
});
// Load old database and copy its content to memory database
const oldDb = new Database(dbPath, { readonly: true });
// Get all table schemas and data from old database
const tables = oldDb.prepare(`
const tables = oldDb
.prepare(
`
SELECT name, sql FROM sqlite_master
WHERE type='table' AND name NOT LIKE 'sqlite_%'
`).all() as { name: string; sql: string }[];
`,
)
.all() as { name: string; sql: string }[];
// Create tables in memory database
for (const table of tables) {
@@ -90,13 +89,13 @@ if (enableFileEncryption) {
const rows = oldDb.prepare(`SELECT * FROM ${table.name}`).all();
if (rows.length > 0) {
const columns = Object.keys(rows[0]);
const placeholders = columns.map(() => '?').join(', ');
const placeholders = columns.map(() => "?").join(", ");
const insertStmt = memoryDatabase.prepare(
`INSERT INTO ${table.name} (${columns.join(', ')}) VALUES (${placeholders})`
`INSERT INTO ${table.name} (${columns.join(", ")}) VALUES (${placeholders})`,
);
for (const row of rows) {
const values = columns.map(col => (row as any)[col]);
const values = columns.map((col) => (row as any)[col]);
insertStmt.run(values);
}
}
@@ -104,48 +103,36 @@ if (enableFileEncryption) {
oldDb.close();
databaseLogger.success('Migrated existing database to memory', {
operation: 'db_migrate_to_memory_success'
});
isNewDatabase = false;
} else {
databaseLogger.success('Created new in-memory database', {
operation: 'db_memory_create_success'
});
}
}
} catch (error) {
databaseLogger.error('Failed to initialize memory database', error, {
operation: 'db_memory_init_failed'
databaseLogger.error("Failed to initialize memory database", error, {
operation: "db_memory_init_failed",
});
// If file encryption is critical, fail fast
if (process.env.DB_FILE_ENCRYPTION_REQUIRED === 'true') {
if (process.env.DB_FILE_ENCRYPTION_REQUIRED === "true") {
throw error;
}
// Create fallback in-memory database
databaseLogger.warn('Creating fallback in-memory database', {
operation: 'db_memory_fallback'
});
memoryDatabase = new Database(':memory:');
memoryDatabase = new Database(":memory:");
isNewDatabase = true;
}
} else {
// File encryption disabled - still use memory for consistency
databaseLogger.info('File encryption disabled, using in-memory database', {
operation: 'db_memory_no_encryption'
});
memoryDatabase = new Database(':memory:');
memoryDatabase = new Database(":memory:");
isNewDatabase = true;
}
databaseLogger.info(`Initializing SQLite database`, {
operation: "db_init",
path: actualDbPath,
encrypted: enableFileEncryption && DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath),
encrypted:
enableFileEncryption &&
DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath),
inMemory: true,
isNewDatabase
isNewDatabase,
});
const sqlite = memoryDatabase;
@@ -415,13 +402,7 @@ const initializeDatabase = async (): Promise<void> => {
"INSERT INTO settings (key, value) VALUES ('allow_registration', 'true')",
)
.run();
databaseLogger.success("Default settings initialized", {
operation: "db_init",
});
} else {
databaseLogger.debug("Default settings already exist", {
operation: "db_init",
});
}
} catch (e) {
databaseLogger.warn("Could not initialize default settings", {
@@ -442,14 +423,14 @@ async function saveMemoryDatabaseToFile() {
// Encrypt and save to file
DatabaseFileEncryption.encryptDatabaseFromBuffer(buffer, encryptedDbPath);
databaseLogger.debug('In-memory database saved to encrypted file', {
operation: 'memory_db_save',
databaseLogger.debug("In-memory database saved to encrypted file", {
operation: "memory_db_save",
bufferSize: buffer.length,
encryptedPath: encryptedDbPath
encryptedPath: encryptedDbPath,
});
} catch (error) {
databaseLogger.error('Failed to save in-memory database', error, {
operation: 'memory_db_save_failed'
databaseLogger.error("Failed to save in-memory database", error, {
operation: "memory_db_save_failed",
});
}
}
@@ -461,39 +442,55 @@ async function handlePostInitFileEncryption() {
try {
// Clean up any existing unencrypted database files
if (fs.existsSync(dbPath)) {
databaseLogger.warn('Found unencrypted database file, removing for security', {
operation: 'db_security_cleanup_existing',
removingPath: dbPath
});
databaseLogger.warn(
"Found unencrypted database file, removing for security",
{
operation: "db_security_cleanup_existing",
removingPath: dbPath,
},
);
try {
fs.unlinkSync(dbPath);
databaseLogger.success('Unencrypted database file removed for security', {
operation: 'db_security_cleanup_complete',
removedPath: dbPath
});
databaseLogger.success(
"Unencrypted database file removed for security",
{
operation: "db_security_cleanup_complete",
removedPath: dbPath,
},
);
} catch (error) {
databaseLogger.warn('Could not remove unencrypted database file (may be locked)', {
operation: 'db_security_cleanup_deferred',
path: dbPath,
error: error instanceof Error ? error.message : 'Unknown error'
});
databaseLogger.warn(
"Could not remove unencrypted database file (may be locked)",
{
operation: "db_security_cleanup_deferred",
path: dbPath,
error: error instanceof Error ? error.message : "Unknown error",
},
);
// Try again after a short delay
setTimeout(() => {
try {
if (fs.existsSync(dbPath)) {
fs.unlinkSync(dbPath);
databaseLogger.success('Delayed cleanup: unencrypted database file removed', {
operation: 'db_security_cleanup_delayed_success',
removedPath: dbPath
});
databaseLogger.success(
"Delayed cleanup: unencrypted database file removed",
{
operation: "db_security_cleanup_delayed_success",
removedPath: dbPath,
},
);
}
} catch (delayedError) {
databaseLogger.error('Failed to remove unencrypted database file even after delay', delayedError, {
operation: 'db_security_cleanup_delayed_failed',
path: dbPath
});
databaseLogger.error(
"Failed to remove unencrypted database file even after delay",
delayedError,
{
operation: "db_security_cleanup_delayed_failed",
path: dbPath,
},
);
}
}, 2000);
}
@@ -506,16 +503,15 @@ async function handlePostInitFileEncryption() {
// Set up periodic saves every 5 minutes
setInterval(saveMemoryDatabaseToFile, 5 * 60 * 1000);
databaseLogger.info('Periodic in-memory database saves configured', {
operation: 'memory_db_autosave_setup',
intervalMinutes: 5
});
}
} catch (error) {
databaseLogger.error('Failed to handle database file encryption/cleanup', error, {
operation: 'db_encrypt_cleanup_failed'
});
databaseLogger.error(
"Failed to handle database file encryption/cleanup",
error,
{
operation: "db_encrypt_cleanup_failed",
},
);
// Don't fail the entire initialization for this
}
@@ -533,7 +529,9 @@ initializeDatabase()
databaseLogger.success("Database connection established", {
operation: "db_init",
path: actualDbPath,
hasEncryptedBackup: enableFileEncryption && DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath)
hasEncryptedBackup:
enableFileEncryption &&
DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath),
});
// Cleanup function for database and temporary files
@@ -542,13 +540,14 @@ async function cleanupDatabase() {
if (memoryDatabase) {
try {
await saveMemoryDatabaseToFile();
databaseLogger.info('In-memory database saved before shutdown', {
operation: 'shutdown_save'
});
} catch (error) {
databaseLogger.error('Failed to save in-memory database before shutdown', error, {
operation: 'shutdown_save_failed'
});
databaseLogger.error(
"Failed to save in-memory database before shutdown",
error,
{
operation: "shutdown_save_failed",
},
);
}
}
@@ -556,20 +555,20 @@ async function cleanupDatabase() {
try {
if (sqlite) {
sqlite.close();
databaseLogger.debug('Database connection closed', {
operation: 'db_close'
databaseLogger.debug("Database connection closed", {
operation: "db_close",
});
}
} catch (error) {
databaseLogger.warn('Error closing database connection', {
operation: 'db_close_error',
error: error instanceof Error ? error.message : 'Unknown error'
databaseLogger.warn("Error closing database connection", {
operation: "db_close_error",
error: error instanceof Error ? error.message : "Unknown error",
});
}
// Clean up temp directory
try {
const tempDir = path.join(dataDir, '.temp');
const tempDir = path.join(dataDir, ".temp");
if (fs.existsSync(tempDir)) {
const files = fs.readdirSync(tempDir);
for (const file of files) {
@@ -582,8 +581,8 @@ async function cleanupDatabase() {
try {
fs.rmdirSync(tempDir);
databaseLogger.debug('Temp directory cleaned up', {
operation: 'temp_dir_cleanup'
databaseLogger.debug("Temp directory cleaned up", {
operation: "temp_dir_cleanup",
});
} catch {
// Ignore directory removal errors
@@ -595,7 +594,7 @@ async function cleanupDatabase() {
}
// Register cleanup handlers
process.on('exit', () => {
process.on("exit", () => {
// Synchronous cleanup only for exit event
if (sqlite) {
try {
@@ -604,17 +603,17 @@ process.on('exit', () => {
}
});
process.on('SIGINT', async () => {
databaseLogger.info('Received SIGINT, cleaning up...', {
operation: 'shutdown'
process.on("SIGINT", async () => {
databaseLogger.info("Received SIGINT, cleaning up...", {
operation: "shutdown",
});
await cleanupDatabase();
process.exit(0);
});
process.on('SIGTERM', async () => {
databaseLogger.info('Received SIGTERM, cleaning up...', {
operation: 'shutdown'
process.on("SIGTERM", async () => {
databaseLogger.info("Received SIGTERM, cleaning up...", {
operation: "shutdown",
});
await cleanupDatabase();
process.exit(0);
@@ -628,29 +627,33 @@ export const databasePaths = {
main: actualDbPath,
encrypted: encryptedDbPath,
directory: dbDir,
inMemory: true
inMemory: true,
};
// Memory database buffer function
function getMemoryDatabaseBuffer(): Buffer {
if (!memoryDatabase) {
throw new Error('Memory database not initialized');
throw new Error("Memory database not initialized");
}
try {
// Export in-memory database to buffer
const buffer = memoryDatabase.serialize();
databaseLogger.debug('Memory database serialized to buffer', {
operation: 'memory_db_serialize',
bufferSize: buffer.length
databaseLogger.debug("Memory database serialized to buffer", {
operation: "memory_db_serialize",
bufferSize: buffer.length,
});
return buffer;
} catch (error) {
databaseLogger.error('Failed to serialize memory database to buffer', error, {
operation: 'memory_db_serialize_failed'
});
databaseLogger.error(
"Failed to serialize memory database to buffer",
error,
{
operation: "memory_db_serialize_failed",
},
);
throw error;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -65,7 +65,7 @@ router.get("/db/host/internal", async (req: Request, res: Response) => {
try {
const data = await EncryptedDBOperations.select(
db.select().from(sshData),
'ssh_data'
"ssh_data",
);
const result = data.map((row: any) => {
return {
@@ -210,7 +210,11 @@ router.post(
}
try {
const result = await EncryptedDBOperations.insert(sshData, 'ssh_data', sshDataObj);
const result = await EncryptedDBOperations.insert(
sshData,
"ssh_data",
sshDataObj,
);
if (!result) {
sshLogger.warn("No host returned after creation", {
@@ -403,14 +407,19 @@ router.put(
try {
await EncryptedDBOperations.update(
sshData,
'ssh_data',
"ssh_data",
and(eq(sshData.id, Number(hostId)), eq(sshData.userId, userId)),
sshDataObj
sshDataObj,
);
const updatedHosts = await EncryptedDBOperations.select(
db.select().from(sshData).where(and(eq(sshData.id, Number(hostId)), eq(sshData.userId, userId))),
'ssh_data'
db
.select()
.from(sshData)
.where(
and(eq(sshData.id, Number(hostId)), eq(sshData.userId, userId)),
),
"ssh_data",
);
if (updatedHosts.length === 0) {
@@ -486,7 +495,7 @@ router.get("/db/host", authenticateJWT, async (req: Request, res: Response) => {
try {
const data = await EncryptedDBOperations.select(
db.select().from(sshData).where(eq(sshData.userId, userId)),
'ssh_data'
"ssh_data",
);
const result = await Promise.all(
@@ -1106,12 +1115,12 @@ router.put(
try {
const updatedHosts = await EncryptedDBOperations.update(
sshData,
'ssh_data',
"ssh_data",
and(eq(sshData.userId, userId), eq(sshData.folder, oldName)),
{
folder: newName,
updatedAt: new Date().toISOString(),
}
},
);
const updatedCredentials = await db
@@ -1252,7 +1261,7 @@ router.post(
updatedAt: new Date().toISOString(),
};
await EncryptedDBOperations.insert(sshData, 'ssh_data', sshDataObj);
await EncryptedDBOperations.insert(sshData, "ssh_data", sshDataObj);
results.success++;
} catch (error) {
results.failed++;

View File

@@ -10,20 +10,38 @@ import { EncryptedDBOperations } from "../utils/encrypted-db-operations.js";
// 可执行文件检测工具函数
function isExecutableFile(permissions: string, fileName: string): boolean {
// 检查执行权限位 (user, group, other)
const hasExecutePermission = permissions[3] === 'x' || permissions[6] === 'x' || permissions[9] === 'x';
const hasExecutePermission =
permissions[3] === "x" || permissions[6] === "x" || permissions[9] === "x";
// 常见的脚本文件扩展名
const scriptExtensions = ['.sh', '.py', '.pl', '.rb', '.js', '.php', '.bash', '.zsh', '.fish'];
const hasScriptExtension = scriptExtensions.some(ext => fileName.toLowerCase().endsWith(ext));
const scriptExtensions = [
".sh",
".py",
".pl",
".rb",
".js",
".php",
".bash",
".zsh",
".fish",
];
const hasScriptExtension = scriptExtensions.some((ext) =>
fileName.toLowerCase().endsWith(ext),
);
// 常见的编译可执行文件(无扩展名或特定扩展名)
const executableExtensions = ['.bin', '.exe', '.out'];
const hasExecutableExtension = executableExtensions.some(ext => fileName.toLowerCase().endsWith(ext));
const executableExtensions = [".bin", ".exe", ".out"];
const hasExecutableExtension = executableExtensions.some((ext) =>
fileName.toLowerCase().endsWith(ext),
);
// 无扩展名且有执行权限的文件通常是可执行文件
const hasNoExtension = !fileName.includes('.') && hasExecutePermission;
const hasNoExtension = !fileName.includes(".") && hasExecutePermission;
return hasExecutePermission && (hasScriptExtension || hasExecutableExtension || hasNoExtension);
return (
hasExecutePermission &&
(hasScriptExtension || hasExecutableExtension || hasNoExtension)
);
}
const app = express();
@@ -106,13 +124,16 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
if (credentialId && hostId && userId) {
try {
const credentials = await EncryptedDBOperations.select(
db.select().from(sshCredentials).where(
and(
eq(sshCredentials.id, credentialId),
eq(sshCredentials.userId, userId),
db
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.id, credentialId),
eq(sshCredentials.userId, userId),
),
),
),
'ssh_credentials'
"ssh_credentials",
);
if (credentials.length > 0) {
@@ -140,12 +161,15 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
});
}
} else if (credentialId && hostId) {
fileLogger.warn("Missing userId for credential resolution in file manager", {
operation: "ssh_credentials",
hostId,
credentialId,
hasUserId: !!userId,
});
fileLogger.warn(
"Missing userId for credential resolution in file manager",
{
operation: "ssh_credentials",
hostId,
credentialId,
hasUserId: !!userId,
},
);
}
const config: any = {
@@ -360,8 +384,11 @@ app.get("/ssh/file_manager/ssh/listFiles", (req, res) => {
owner,
group,
linkTarget, // 符号链接的目标
path: `${sshPath.endsWith('/') ? sshPath : sshPath + '/'}${actualName}`, // 添加完整路径
executable: !isDirectory && !isLink ? isExecutableFile(permissions, actualName) : false // 检测可执行文件
path: `${sshPath.endsWith("/") ? sshPath : sshPath + "/"}${actualName}`, // 添加完整路径
executable:
!isDirectory && !isLink
? isExecutableFile(permissions, actualName)
: false, // 检测可执行文件
});
}
}
@@ -419,11 +446,13 @@ app.get("/ssh/file_manager/ssh/identifySymlink", (req, res) => {
}
const [fileType, target] = data.trim().split("\n");
res.json({
path: linkPath,
target: target,
type: fileType.toLowerCase().includes("directory") ? "directory" : "file"
type: fileType.toLowerCase().includes("directory")
? "directory"
: "file",
});
});
@@ -460,84 +489,91 @@ app.get("/ssh/file_manager/ssh/readFile", (req, res) => {
const escapedPath = filePath.replace(/'/g, "'\"'\"'");
// Get file size first
sshConn.client.exec(`stat -c%s '${escapedPath}' 2>/dev/null || wc -c < '${escapedPath}'`, (sizeErr, sizeStream) => {
if (sizeErr) {
fileLogger.error("SSH file size check error:", sizeErr);
return res.status(500).json({ error: sizeErr.message });
}
let sizeData = "";
let sizeErrorData = "";
sizeStream.on("data", (chunk: Buffer) => {
sizeData += chunk.toString();
});
sizeStream.stderr.on("data", (chunk: Buffer) => {
sizeErrorData += chunk.toString();
});
sizeStream.on("close", (sizeCode) => {
if (sizeCode !== 0) {
fileLogger.error(`File size check failed: ${sizeErrorData}`);
return res.status(500).json({ error: `Cannot check file size: ${sizeErrorData}` });
sshConn.client.exec(
`stat -c%s '${escapedPath}' 2>/dev/null || wc -c < '${escapedPath}'`,
(sizeErr, sizeStream) => {
if (sizeErr) {
fileLogger.error("SSH file size check error:", sizeErr);
return res.status(500).json({ error: sizeErr.message });
}
const fileSize = parseInt(sizeData.trim(), 10);
let sizeData = "";
let sizeErrorData = "";
if (isNaN(fileSize)) {
fileLogger.error("Invalid file size response:", sizeData);
return res.status(500).json({ error: "Cannot determine file size" });
}
sizeStream.on("data", (chunk: Buffer) => {
sizeData += chunk.toString();
});
// Check if file is too large
if (fileSize > MAX_READ_SIZE) {
fileLogger.warn("File too large for reading", {
operation: "file_read",
sessionId,
filePath,
fileSize,
maxSize: MAX_READ_SIZE,
});
return res.status(400).json({
error: `File too large to open in editor. Maximum size is ${MAX_READ_SIZE / 1024 / 1024}MB, file is ${(fileSize / 1024 / 1024).toFixed(2)}MB. Use download instead.`,
fileSize,
maxSize: MAX_READ_SIZE,
tooLarge: true
});
}
sizeStream.stderr.on("data", (chunk: Buffer) => {
sizeErrorData += chunk.toString();
});
// File size is acceptable, proceed with reading
sshConn.client.exec(`cat '${escapedPath}'`, (err, stream) => {
if (err) {
fileLogger.error("SSH readFile error:", err);
return res.status(500).json({ error: err.message });
sizeStream.on("close", (sizeCode) => {
if (sizeCode !== 0) {
fileLogger.error(`File size check failed: ${sizeErrorData}`);
return res
.status(500)
.json({ error: `Cannot check file size: ${sizeErrorData}` });
}
let data = "";
let errorData = "";
const fileSize = parseInt(sizeData.trim(), 10);
stream.on("data", (chunk: Buffer) => {
data += chunk.toString();
});
if (isNaN(fileSize)) {
fileLogger.error("Invalid file size response:", sizeData);
return res.status(500).json({ error: "Cannot determine file size" });
}
stream.stderr.on("data", (chunk: Buffer) => {
errorData += chunk.toString();
});
// Check if file is too large
if (fileSize > MAX_READ_SIZE) {
fileLogger.warn("File too large for reading", {
operation: "file_read",
sessionId,
filePath,
fileSize,
maxSize: MAX_READ_SIZE,
});
return res.status(400).json({
error: `File too large to open in editor. Maximum size is ${MAX_READ_SIZE / 1024 / 1024}MB, file is ${(fileSize / 1024 / 1024).toFixed(2)}MB. Use download instead.`,
fileSize,
maxSize: MAX_READ_SIZE,
tooLarge: true,
});
}
stream.on("close", (code) => {
if (code !== 0) {
fileLogger.error(
`SSH readFile command failed with code ${code}: ${errorData.replace(/\n/g, " ").trim()}`,
);
return res.status(500).json({ error: `Command failed: ${errorData}` });
// File size is acceptable, proceed with reading
sshConn.client.exec(`cat '${escapedPath}'`, (err, stream) => {
if (err) {
fileLogger.error("SSH readFile error:", err);
return res.status(500).json({ error: err.message });
}
res.json({ content: data, path: filePath });
let data = "";
let errorData = "";
stream.on("data", (chunk: Buffer) => {
data += chunk.toString();
});
stream.stderr.on("data", (chunk: Buffer) => {
errorData += chunk.toString();
});
stream.on("close", (code) => {
if (code !== 0) {
fileLogger.error(
`SSH readFile command failed with code ${code}: ${errorData.replace(/\n/g, " ").trim()}`,
);
return res
.status(500)
.json({ error: `Command failed: ${errorData}` });
}
res.json({ content: data, path: filePath });
});
});
});
});
});
},
);
});
app.post("/ssh/file_manager/ssh/writeFile", async (req, res) => {
@@ -1542,12 +1578,7 @@ app.put("/ssh/file_manager/ssh/moveItem", async (req, res) => {
});
app.post("/ssh/file_manager/ssh/downloadFile", async (req, res) => {
const {
sessionId,
path: filePath,
hostId,
userId,
} = req.body;
const { sessionId, path: filePath, hostId, userId } = req.body;
if (!sessionId || !filePath) {
fileLogger.warn("Missing download parameters", {
@@ -1565,7 +1596,9 @@ app.post("/ssh/file_manager/ssh/downloadFile", async (req, res) => {
sessionId,
isConnected: sshConn?.isConnected,
});
return res.status(400).json({ error: "SSH session not found or not connected" });
return res
.status(400)
.json({ error: "SSH session not found or not connected" });
}
sshConn.lastActive = Date.now();
@@ -1582,7 +1615,9 @@ app.post("/ssh/file_manager/ssh/downloadFile", async (req, res) => {
sftp.stat(filePath, (statErr, stats) => {
if (statErr) {
fileLogger.error("File stat failed for download:", statErr);
return res.status(500).json({ error: `Cannot access file: ${statErr.message}` });
return res
.status(500)
.json({ error: `Cannot access file: ${statErr.message}` });
}
if (!stats.isFile()) {
@@ -1593,7 +1628,9 @@ app.post("/ssh/file_manager/ssh/downloadFile", async (req, res) => {
isFile: stats.isFile(),
isDirectory: stats.isDirectory(),
});
return res.status(400).json({ error: "Cannot download directories or special files" });
return res
.status(400)
.json({ error: "Cannot download directories or special files" });
}
// Check file size (limit to 100MB for safety)
@@ -1607,7 +1644,7 @@ app.post("/ssh/file_manager/ssh/downloadFile", async (req, res) => {
maxSize: MAX_FILE_SIZE,
});
return res.status(400).json({
error: `File too large. Maximum size is ${MAX_FILE_SIZE / 1024 / 1024}MB, file is ${(stats.size / 1024 / 1024).toFixed(2)}MB`
error: `File too large. Maximum size is ${MAX_FILE_SIZE / 1024 / 1024}MB, file is ${(stats.size / 1024 / 1024).toFixed(2)}MB`,
});
}
@@ -1615,12 +1652,14 @@ app.post("/ssh/file_manager/ssh/downloadFile", async (req, res) => {
sftp.readFile(filePath, (readErr, data) => {
if (readErr) {
fileLogger.error("File read failed for download:", readErr);
return res.status(500).json({ error: `Failed to read file: ${readErr.message}` });
return res
.status(500)
.json({ error: `Failed to read file: ${readErr.message}` });
}
// Convert to base64 for safe transport
const base64Content = data.toString('base64');
const fileName = filePath.split('/').pop() || 'download';
const base64Content = data.toString("base64");
const fileName = filePath.split("/").pop() || "download";
fileLogger.success("File downloaded successfully", {
operation: "file_download",
@@ -1654,7 +1693,9 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
const sshConn = sshSessions[sessionId];
if (!sshConn || !sshConn.isConnected) {
return res.status(400).json({ error: "SSH session not found or not connected" });
return res
.status(400)
.json({ error: "SSH session not found or not connected" });
}
sshConn.lastActive = Date.now();
@@ -1662,7 +1703,7 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
try {
// Extract source name
const sourceName = sourcePath.split('/').pop() || 'copied_item';
const sourceName = sourcePath.split("/").pop() || "copied_item";
// First check if source file exists
const escapedSourceForCheck = sourcePath.replace(/'/g, "'\"'\"'");
@@ -1676,7 +1717,10 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
}
stream.on("close", (code) => {
fileLogger.info("File existence check completed", { sourcePath, exists: code === 0 });
fileLogger.info("File existence check completed", {
sourcePath,
exists: code === 0,
});
resolve(code === 0);
});
@@ -1687,23 +1731,29 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
if (!checkExists) {
return res.status(404).json({
error: `Source file not found: ${sourcePath}`,
toast: { type: "error", message: `Source file not found: ${sourceName}` }
toast: {
type: "error",
message: `Source file not found: ${sourceName}`,
},
});
}
// Use timestamp for uniqueness
const timestamp = Date.now().toString().slice(-8);
const nameWithoutExt = sourceName.includes('.')
? sourceName.substring(0, sourceName.lastIndexOf('.'))
const nameWithoutExt = sourceName.includes(".")
? sourceName.substring(0, sourceName.lastIndexOf("."))
: sourceName;
const extension = sourceName.includes('.')
? sourceName.substring(sourceName.lastIndexOf('.'))
: '';
const extension = sourceName.includes(".")
? sourceName.substring(sourceName.lastIndexOf("."))
: "";
// Always use timestamp suffix to ensure uniqueness without SSH calls
const uniqueName = `${nameWithoutExt}_copy_${timestamp}${extension}`;
fileLogger.info("Using timestamp-based unique name", { originalName: sourceName, uniqueName });
fileLogger.info("Using timestamp-based unique name", {
originalName: sourceName,
uniqueName,
});
const targetPath = `${targetDir}/${uniqueName}`;
// Escape paths for shell commands
@@ -1722,7 +1772,7 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
sourcePath,
targetPath,
uniqueName,
command: copyCommand.substring(0, 200) + "..." // Log truncated command
command: copyCommand.substring(0, 200) + "...", // Log truncated command
});
// Add timeout to prevent hanging
@@ -1730,12 +1780,16 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
fileLogger.error("Copy command timed out after 20 seconds", {
sourcePath,
targetPath,
command: copyCommand
command: copyCommand,
});
if (!res.headersSent) {
res.status(500).json({
error: "Copy operation timed out",
toast: { type: "error", message: "Copy operation timed out. SSH connection may be unstable." }
toast: {
type: "error",
message:
"Copy operation timed out. SSH connection may be unstable.",
},
});
}
}, 20000); // 20 second timeout for better responsiveness
@@ -1757,21 +1811,30 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
stream.on("data", (data: Buffer) => {
const output = data.toString();
stdoutData += output;
fileLogger.info("Copy command stdout", { output: output.substring(0, 200) });
fileLogger.info("Copy command stdout", {
output: output.substring(0, 200),
});
});
stream.stderr.on("data", (data: Buffer) => {
const output = data.toString();
errorData += output;
fileLogger.info("Copy command stderr", { output: output.substring(0, 200) });
fileLogger.info("Copy command stderr", {
output: output.substring(0, 200),
});
});
stream.on("close", (code) => {
clearTimeout(commandTimeout);
fileLogger.info("Copy command completed", { code, errorData, hasError: errorData.length > 0 });
fileLogger.info("Copy command completed", {
code,
errorData,
hasError: errorData.length > 0,
});
if (code !== 0) {
const fullErrorInfo = errorData || stdoutData || 'No error message available';
const fullErrorInfo =
errorData || stdoutData || "No error message available";
fileLogger.error(`SSH copyItem command failed with code ${code}`, {
operation: "file_copy_failed",
sessionId,
@@ -1781,18 +1844,21 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
exitCode: code,
errorData,
stdoutData,
fullErrorInfo
fullErrorInfo,
});
if (!res.headersSent) {
return res.status(500).json({
error: `Copy failed: ${fullErrorInfo}`,
toast: { type: "error", message: `Copy failed: ${fullErrorInfo}` },
toast: {
type: "error",
message: `Copy failed: ${fullErrorInfo}`,
},
debug: {
sourcePath,
targetPath,
exitCode: code,
command: copyCommand
}
command: copyCommand,
},
});
}
return;
@@ -1830,7 +1896,6 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
}
});
});
} catch (error: any) {
fileLogger.error("Copy operation error:", error);
res.status(500).json({ error: error.message });
@@ -1839,23 +1904,23 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
// Helper function to determine MIME type based on file extension
function getMimeType(fileName: string): string {
const ext = fileName.split('.').pop()?.toLowerCase();
const ext = fileName.split(".").pop()?.toLowerCase();
const mimeTypes: Record<string, string> = {
'txt': 'text/plain',
'json': 'application/json',
'js': 'text/javascript',
'html': 'text/html',
'css': 'text/css',
'png': 'image/png',
'jpg': 'image/jpeg',
'jpeg': 'image/jpeg',
'gif': 'image/gif',
'pdf': 'application/pdf',
'zip': 'application/zip',
'tar': 'application/x-tar',
'gz': 'application/gzip',
txt: "text/plain",
json: "application/json",
js: "text/javascript",
html: "text/html",
css: "text/css",
png: "image/png",
jpg: "image/jpeg",
jpeg: "image/jpeg",
gif: "image/gif",
pdf: "application/pdf",
zip: "application/zip",
tar: "application/x-tar",
gz: "application/gzip",
};
return mimeTypes[ext || ''] || 'application/octet-stream';
return mimeTypes[ext || ""] || "application/octet-stream";
}
process.on("SIGINT", () => {
@@ -1874,12 +1939,15 @@ app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => {
const sshConn = sshSessions[sessionId];
if (!sshConn || !sshConn.isConnected) {
fileLogger.error("SSH connection not found or not connected for executeFile", {
operation: "execute_file",
sessionId,
hasConnection: !!sshConn,
isConnected: sshConn?.isConnected
});
fileLogger.error(
"SSH connection not found or not connected for executeFile",
{
operation: "execute_file",
sessionId,
hasConnection: !!sshConn,
isConnected: sshConn?.isConnected,
},
);
return res.status(400).json({ error: "SSH connection not available" });
}
@@ -1895,10 +1963,12 @@ app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => {
sshConn.client.exec(checkCommand, (checkErr, checkStream) => {
if (checkErr) {
fileLogger.error("SSH executeFile check error:", checkErr);
return res.status(500).json({ error: "Failed to check file executability" });
return res
.status(500)
.json({ error: "Failed to check file executability" });
}
let checkResult = '';
let checkResult = "";
checkStream.on("data", (data) => {
checkResult += data.toString();
});
@@ -1915,7 +1985,7 @@ app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => {
operation: "execute_file",
sessionId,
filePath,
command: executeCommand.substring(0, 100) + "..."
command: executeCommand.substring(0, 100) + "...",
});
sshConn.client.exec(executeCommand, (err, stream) => {
@@ -1924,8 +1994,8 @@ app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => {
return res.status(500).json({ error: "Failed to execute file" });
}
let output = '';
let errorOutput = '';
let output = "";
let errorOutput = "";
stream.on("data", (data) => {
output += data.toString();
@@ -1938,8 +2008,10 @@ app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => {
stream.on("close", (code) => {
// 从输出中提取退出代码
const exitCodeMatch = output.match(/EXIT_CODE:(\d+)$/);
const actualExitCode = exitCodeMatch ? parseInt(exitCodeMatch[1]) : code;
const cleanOutput = output.replace(/EXIT_CODE:\d+$/, '').trim();
const actualExitCode = exitCodeMatch
? parseInt(exitCodeMatch[1])
: code;
const cleanOutput = output.replace(/EXIT_CODE:\d+$/, "").trim();
fileLogger.info("File execution completed", {
operation: "execute_file",
@@ -1947,7 +2019,7 @@ app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => {
filePath,
exitCode: actualExitCode,
outputLength: cleanOutput.length,
errorLength: errorOutput.length
errorLength: errorOutput.length,
});
res.json({
@@ -1955,7 +2027,7 @@ app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => {
exitCode: actualExitCode,
output: cleanOutput,
error: errorOutput,
timestamp: new Date().toISOString()
timestamp: new Date().toISOString(),
});
});

View File

@@ -309,7 +309,7 @@ async function fetchAllHosts(): Promise<SSHHostWithCredentials[]> {
try {
const hosts = await EncryptedDBOperations.select(
db.select().from(sshData),
'ssh_data'
"ssh_data",
);
const hostsWithCredentials: SSHHostWithCredentials[] = [];
@@ -339,7 +339,7 @@ async function fetchHostById(
try {
const hosts = await EncryptedDBOperations.select(
db.select().from(sshData).where(eq(sshData.id, id)),
'ssh_data'
"ssh_data",
);
if (hosts.length === 0) {
@@ -358,17 +358,6 @@ async function resolveHostCredentials(
host: any,
): Promise<SSHHostWithCredentials | undefined> {
try {
statsLogger.debug(`Resolving credentials for host ${host.id}`, {
operation: 'credential_resolve',
hostId: host.id,
authType: host.authType,
credentialId: host.credentialId,
hasPassword: !!host.password,
hasKey: !!host.key,
passwordLength: host.password?.length || 0,
keyLength: host.key?.length || 0
});
const baseHost: any = {
id: host.id,
name: host.name,
@@ -399,24 +388,32 @@ async function resolveHostCredentials(
if (host.credentialId) {
try {
const credentials = await EncryptedDBOperations.select(
db.select().from(sshCredentials).where(and(
eq(sshCredentials.id, host.credentialId),
eq(sshCredentials.userId, host.userId),
)),
'ssh_credentials'
db
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.id, host.credentialId),
eq(sshCredentials.userId, host.userId),
),
),
"ssh_credentials",
);
if (credentials.length > 0) {
const credential = credentials[0];
statsLogger.debug(`Using credential ${credential.id} for host ${host.id}`, {
operation: 'credential_resolve',
credentialId: credential.id,
authType: credential.authType,
hasPassword: !!credential.password,
hasKey: !!credential.key,
passwordLength: credential.password?.length || 0,
keyLength: credential.key?.length || 0
});
statsLogger.debug(
`Using credential ${credential.id} for host ${host.id}`,
{
operation: "credential_resolve",
credentialId: credential.id,
authType: credential.authType,
hasPassword: !!credential.password,
hasKey: !!credential.key,
passwordLength: credential.password?.length || 0,
keyLength: credential.key?.length || 0,
},
);
baseHost.credentialId = credential.id;
baseHost.username = credential.username;
@@ -435,9 +432,6 @@ async function resolveHostCredentials(
baseHost.keyType = credential.keyType;
}
} else {
statsLogger.warn(
`Credential ${host.credentialId} not found for host ${host.id}, using legacy data`,
);
addLegacyCredentials(baseHost, host);
}
} catch (error) {
@@ -447,25 +441,9 @@ async function resolveHostCredentials(
addLegacyCredentials(baseHost, host);
}
} else {
statsLogger.debug(`Using legacy credentials for host ${host.id}`, {
operation: 'credential_resolve',
hasPassword: !!host.password,
hasKey: !!host.key,
passwordLength: host.password?.length || 0,
keyLength: host.key?.length || 0
});
addLegacyCredentials(baseHost, host);
}
statsLogger.debug(`Final resolved host ${host.id}`, {
operation: 'credential_resolve',
authType: baseHost.authType,
hasPassword: !!baseHost.password,
hasKey: !!baseHost.key,
passwordLength: baseHost.password?.length || 0,
keyLength: baseHost.key?.length || 0
});
return baseHost;
} catch (error) {
statsLogger.error(
@@ -484,7 +462,7 @@ function addLegacyCredentials(baseHost: any, host: any): void {
function buildSshConfig(host: SSHHostWithCredentials): ConnectConfig {
statsLogger.debug(`Building SSH config for host ${host.ip}`, {
operation: 'ssh_config',
operation: "ssh_config",
authType: host.authType,
hasPassword: !!host.password,
hasKey: !!host.key,
@@ -492,7 +470,9 @@ function buildSshConfig(host: SSHHostWithCredentials): ConnectConfig {
passwordLength: host.password?.length || 0,
keyLength: host.key?.length || 0,
passwordType: typeof host.password,
passwordRaw: host.password ? JSON.stringify(host.password.substring(0, 20)) : null
passwordRaw: host.password
? JSON.stringify(host.password.substring(0, 20))
: null,
});
const base: ConnectConfig = {
@@ -508,12 +488,12 @@ function buildSshConfig(host: SSHHostWithCredentials): ConnectConfig {
throw new Error(`No password available for host ${host.ip}`);
}
statsLogger.debug(`Using password auth for ${host.ip}`, {
operation: 'ssh_config',
operation: "ssh_config",
passwordLength: host.password.length,
passwordFirst3: host.password.substring(0, 3),
passwordLast3: host.password.substring(host.password.length - 3),
passwordType: typeof host.password,
passwordIsString: typeof host.password === 'string'
passwordIsString: typeof host.password === "string",
});
(base as any).password = host.password;
} else if (host.authType === "key") {
@@ -522,9 +502,9 @@ function buildSshConfig(host: SSHHostWithCredentials): ConnectConfig {
}
statsLogger.debug(`Using key auth for ${host.ip}`, {
operation: 'ssh_config',
keyPreview: host.key.substring(0, Math.min(50, host.key.length)) + '...',
hasPassphrase: !!host.keyPassword
operation: "ssh_config",
keyPreview: host.key.substring(0, Math.min(50, host.key.length)) + "...",
hasPassphrase: !!host.keyPassword,
});
try {

View File

@@ -178,22 +178,22 @@ wss.on("connection", (ws: WebSocket) => {
}, 60000);
sshLogger.debug(`Terminal SSH setup`, {
operation: 'terminal_ssh',
operation: "terminal_ssh",
hostId: id,
ip,
authType,
hasPassword: !!password,
passwordLength: password?.length || 0,
hasCredentialId: !!credentialId
hasCredentialId: !!credentialId,
});
if (password) {
sshLogger.debug(`Password preview: "${password.substring(0, 15)}..."`, {
operation: 'terminal_ssh_password'
operation: "terminal_ssh_password",
});
} else {
sshLogger.debug(`No password provided`, {
operation: 'terminal_ssh_password'
operation: "terminal_ssh_password",
});
}
@@ -201,13 +201,16 @@ wss.on("connection", (ws: WebSocket) => {
if (credentialId && id && hostConfig.userId) {
try {
const credentials = await EncryptedDBOperations.select(
db.select().from(sshCredentials).where(
and(
eq(sshCredentials.id, credentialId),
eq(sshCredentials.userId, hostConfig.userId),
db
.select()
.from(sshCredentials)
.where(
and(
eq(sshCredentials.id, credentialId),
eq(sshCredentials.userId, hostConfig.userId),
),
),
),
'ssh_credentials'
"ssh_credentials",
);
if (credentials.length > 0) {

View File

@@ -32,7 +32,14 @@ import "dotenv/config";
systemLogger.success("All backend services initialized successfully", {
operation: "startup_complete",
services: ["database", "encryption", "terminal", "tunnel", "file_manager", "stats"],
services: [
"database",
"encryption",
"terminal",
"tunnel",
"file_manager",
"stats",
],
version: version,
});

View File

@@ -1,6 +1,6 @@
import { FieldEncryption } from './encryption.js';
import { EncryptionKeyManager } from './encryption-key-manager.js';
import { databaseLogger } from './logger.js';
import { FieldEncryption } from "./encryption.js";
import { EncryptionKeyManager } from "./encryption-key-manager.js";
import { databaseLogger } from "./logger.js";
interface EncryptionContext {
masterPassword: string;
@@ -14,26 +14,29 @@ class DatabaseEncryption {
static async initialize(config: Partial<EncryptionContext> = {}) {
const keyManager = EncryptionKeyManager.getInstance();
const masterPassword = config.masterPassword || await keyManager.initializeKey();
const masterPassword =
config.masterPassword || (await keyManager.initializeKey());
this.context = {
masterPassword,
encryptionEnabled: config.encryptionEnabled ?? true,
forceEncryption: config.forceEncryption ?? false,
migrateOnAccess: config.migrateOnAccess ?? true
migrateOnAccess: config.migrateOnAccess ?? true,
};
databaseLogger.info('Database encryption initialized', {
operation: 'encryption_init',
databaseLogger.info("Database encryption initialized", {
operation: "encryption_init",
enabled: this.context.encryptionEnabled,
forceEncryption: this.context.forceEncryption,
dynamicKey: !config.masterPassword
dynamicKey: !config.masterPassword,
});
}
static getContext(): EncryptionContext {
if (!this.context) {
throw new Error('DatabaseEncryption not initialized. Call initialize() first.');
throw new Error(
"DatabaseEncryption not initialized. Call initialize() first.",
);
}
return this.context;
}
@@ -48,15 +51,25 @@ class DatabaseEncryption {
for (const [fieldName, value] of Object.entries(record)) {
if (FieldEncryption.shouldEncryptField(tableName, fieldName) && value) {
try {
const fieldKey = FieldEncryption.getFieldKey(context.masterPassword, `${tableName}.${fieldName}`);
encryptedRecord[fieldName] = FieldEncryption.encryptField(value as string, fieldKey);
const fieldKey = FieldEncryption.getFieldKey(
context.masterPassword,
`${tableName}.${fieldName}`,
);
encryptedRecord[fieldName] = FieldEncryption.encryptField(
value as string,
fieldKey,
);
hasEncryption = true;
} catch (error) {
databaseLogger.error(`Failed to encrypt field ${tableName}.${fieldName}`, error, {
operation: 'field_encryption',
table: tableName,
field: fieldName
});
databaseLogger.error(
`Failed to encrypt field ${tableName}.${fieldName}`,
error,
{
operation: "field_encryption",
table: tableName,
field: fieldName,
},
);
throw error;
}
}
@@ -64,8 +77,8 @@ class DatabaseEncryption {
if (hasEncryption) {
databaseLogger.debug(`Encrypted sensitive fields for ${tableName}`, {
operation: 'record_encryption',
table: tableName
operation: "record_encryption",
table: tableName,
});
}
@@ -83,28 +96,41 @@ class DatabaseEncryption {
for (const [fieldName, value] of Object.entries(record)) {
if (FieldEncryption.shouldEncryptField(tableName, fieldName) && value) {
try {
const fieldKey = FieldEncryption.getFieldKey(context.masterPassword, `${tableName}.${fieldName}`);
const fieldKey = FieldEncryption.getFieldKey(
context.masterPassword,
`${tableName}.${fieldName}`,
);
if (FieldEncryption.isEncrypted(value as string)) {
decryptedRecord[fieldName] = FieldEncryption.decryptField(value as string, fieldKey);
decryptedRecord[fieldName] = FieldEncryption.decryptField(
value as string,
fieldKey,
);
hasDecryption = true;
} else if (context.encryptionEnabled && !context.forceEncryption) {
decryptedRecord[fieldName] = value;
needsMigration = context.migrateOnAccess;
} else if (context.forceEncryption) {
databaseLogger.warn(`Unencrypted field detected in force encryption mode`, {
operation: 'decryption_warning',
table: tableName,
field: fieldName
});
databaseLogger.warn(
`Unencrypted field detected in force encryption mode`,
{
operation: "decryption_warning",
table: tableName,
field: fieldName,
},
);
decryptedRecord[fieldName] = value;
}
} catch (error) {
databaseLogger.error(`Failed to decrypt field ${tableName}.${fieldName}`, error, {
operation: 'field_decryption',
table: tableName,
field: fieldName
});
databaseLogger.error(
`Failed to decrypt field ${tableName}.${fieldName}`,
error,
{
operation: "field_decryption",
table: tableName,
field: fieldName,
},
);
if (context.forceEncryption) {
throw error;
@@ -115,13 +141,6 @@ class DatabaseEncryption {
}
}
if (hasDecryption) {
databaseLogger.debug(`Decrypted sensitive fields for ${tableName}`, {
operation: 'record_decryption',
table: tableName
});
}
if (needsMigration) {
this.scheduleFieldMigration(tableName, record);
}
@@ -131,7 +150,7 @@ class DatabaseEncryption {
static decryptRecords(tableName: string, records: any[]): any[] {
if (!Array.isArray(records)) return records;
return records.map(record => this.decryptRecord(tableName, record));
return records.map((record) => this.decryptRecord(tableName, record));
}
private static scheduleFieldMigration(tableName: string, record: any) {
@@ -139,11 +158,15 @@ class DatabaseEncryption {
try {
await this.migrateRecord(tableName, record);
} catch (error) {
databaseLogger.error(`Failed to migrate record ${tableName}:${record.id}`, error, {
operation: 'migration_failed',
table: tableName,
recordId: record.id
});
databaseLogger.error(
`Failed to migrate record ${tableName}:${record.id}`,
error,
{
operation: "migration_failed",
table: tableName,
recordId: record.id,
},
);
}
}, 1000);
}
@@ -156,49 +179,61 @@ class DatabaseEncryption {
const updatedRecord = { ...record };
for (const [fieldName, value] of Object.entries(record)) {
if (FieldEncryption.shouldEncryptField(tableName, fieldName) &&
value && !FieldEncryption.isEncrypted(value as string)) {
if (
FieldEncryption.shouldEncryptField(tableName, fieldName) &&
value &&
!FieldEncryption.isEncrypted(value as string)
) {
try {
const fieldKey = FieldEncryption.getFieldKey(context.masterPassword, `${tableName}.${fieldName}`);
updatedRecord[fieldName] = FieldEncryption.encryptField(value as string, fieldKey);
const fieldKey = FieldEncryption.getFieldKey(
context.masterPassword,
`${tableName}.${fieldName}`,
);
updatedRecord[fieldName] = FieldEncryption.encryptField(
value as string,
fieldKey,
);
needsUpdate = true;
} catch (error) {
databaseLogger.error(`Failed to migrate field ${tableName}.${fieldName}`, error, {
operation: 'field_migration',
table: tableName,
field: fieldName,
recordId: record.id
});
databaseLogger.error(
`Failed to migrate field ${tableName}.${fieldName}`,
error,
{
operation: "field_migration",
table: tableName,
field: fieldName,
recordId: record.id,
},
);
throw error;
}
}
}
if (needsUpdate) {
databaseLogger.info(`Migrated record to encrypted format`, {
operation: 'record_migration',
table: tableName,
recordId: record.id
});
}
return updatedRecord;
}
static validateConfiguration(): boolean {
try {
const context = this.getContext();
const testData = 'test-encryption-data';
const testKey = FieldEncryption.getFieldKey(context.masterPassword, 'test');
const testData = "test-encryption-data";
const testKey = FieldEncryption.getFieldKey(
context.masterPassword,
"test",
);
const encrypted = FieldEncryption.encryptField(testData, testKey);
const decrypted = FieldEncryption.decryptField(encrypted, testKey);
return decrypted === testData;
} catch (error) {
databaseLogger.error('Encryption configuration validation failed', error, {
operation: 'config_validation'
});
databaseLogger.error(
"Encryption configuration validation failed",
error,
{
operation: "config_validation",
},
);
return false;
}
}
@@ -210,14 +245,14 @@ class DatabaseEncryption {
enabled: context.encryptionEnabled,
forceEncryption: context.forceEncryption,
migrateOnAccess: context.migrateOnAccess,
configValid: this.validateConfiguration()
configValid: this.validateConfiguration(),
};
} catch {
return {
enabled: false,
forceEncryption: false,
migrateOnAccess: false,
configValid: false
configValid: false,
};
}
}
@@ -230,7 +265,7 @@ class DatabaseEncryption {
return {
...encryptionStatus,
key: keyStatus,
initialized: this.context !== null
initialized: this.context !== null,
};
}
@@ -241,12 +276,12 @@ class DatabaseEncryption {
this.context = null;
await this.initialize({ masterPassword: newKey });
databaseLogger.warn('Database encryption reinitialized with new key', {
operation: 'encryption_reinit',
requiresMigration: true
databaseLogger.warn("Database encryption reinitialized with new key", {
operation: "encryption_reinit",
requiresMigration: true,
});
}
}
export { DatabaseEncryption };
export type { EncryptionContext };
export type { EncryptionContext };

View File

@@ -1,8 +1,8 @@
import crypto from 'crypto';
import fs from 'fs';
import path from 'path';
import { HardwareFingerprint } from './hardware-fingerprint.js';
import { databaseLogger } from './logger.js';
import crypto from "crypto";
import fs from "fs";
import path from "path";
import { HardwareFingerprint } from "./hardware-fingerprint.js";
import { databaseLogger } from "./logger.js";
interface EncryptedFileMetadata {
iv: string;
@@ -18,11 +18,11 @@ interface EncryptedFileMetadata {
* This provides an additional security layer on top of field-level encryption
*/
class DatabaseFileEncryption {
private static readonly VERSION = 'v1';
private static readonly ALGORITHM = 'aes-256-gcm';
private static readonly VERSION = "v1";
private static readonly ALGORITHM = "aes-256-gcm";
private static readonly KEY_ITERATIONS = 100000;
private static readonly ENCRYPTED_FILE_SUFFIX = '.encrypted';
private static readonly METADATA_FILE_SUFFIX = '.meta';
private static readonly ENCRYPTED_FILE_SUFFIX = ".encrypted";
private static readonly METADATA_FILE_SUFFIX = ".meta";
/**
* Generate file encryption key from hardware fingerprint
@@ -35,15 +35,9 @@ class DatabaseFileEncryption {
salt,
this.KEY_ITERATIONS,
32, // 256 bits for AES-256
'sha256'
"sha256",
);
databaseLogger.debug('Generated file encryption key from hardware fingerprint', {
operation: 'file_key_generation',
iterations: this.KEY_ITERATIONS,
keyLength: key.length
});
return key;
}
@@ -59,20 +53,17 @@ class DatabaseFileEncryption {
// Encrypt the buffer
const cipher = crypto.createCipheriv(this.ALGORITHM, key, iv) as any;
const encrypted = Buffer.concat([
cipher.update(buffer),
cipher.final()
]);
const encrypted = Buffer.concat([cipher.update(buffer), cipher.final()]);
const tag = cipher.getAuthTag();
// Create metadata
const metadata: EncryptedFileMetadata = {
iv: iv.toString('hex'),
tag: tag.toString('hex'),
iv: iv.toString("hex"),
tag: tag.toString("hex"),
version: this.VERSION,
fingerprint: HardwareFingerprint.generate().substring(0, 16),
salt: salt.toString('hex'),
algorithm: this.ALGORITHM
salt: salt.toString("hex"),
algorithm: this.ALGORITHM,
};
// Write encrypted file and metadata
@@ -80,21 +71,15 @@ class DatabaseFileEncryption {
fs.writeFileSync(targetPath, encrypted);
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
databaseLogger.info('Database buffer encrypted successfully', {
operation: 'database_buffer_encryption',
targetPath,
bufferSize: buffer.length,
encryptedSize: encrypted.length,
fingerprintPrefix: metadata.fingerprint
});
return targetPath;
} catch (error) {
databaseLogger.error('Failed to encrypt database buffer', error, {
operation: 'database_buffer_encryption_failed',
targetPath
databaseLogger.error("Failed to encrypt database buffer", error, {
operation: "database_buffer_encryption_failed",
targetPath,
});
throw new Error(`Database buffer encryption failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
throw new Error(
`Database buffer encryption failed: ${error instanceof Error ? error.message : "Unknown error"}`,
);
}
}
@@ -106,7 +91,8 @@ class DatabaseFileEncryption {
throw new Error(`Source database file does not exist: ${sourcePath}`);
}
const encryptedPath = targetPath || `${sourcePath}${this.ENCRYPTED_FILE_SUFFIX}`;
const encryptedPath =
targetPath || `${sourcePath}${this.ENCRYPTED_FILE_SUFFIX}`;
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
try {
@@ -122,41 +108,43 @@ class DatabaseFileEncryption {
const cipher = crypto.createCipheriv(this.ALGORITHM, key, iv) as any;
const encrypted = Buffer.concat([
cipher.update(sourceData),
cipher.final()
cipher.final(),
]);
const tag = cipher.getAuthTag();
// Create metadata
const metadata: EncryptedFileMetadata = {
iv: iv.toString('hex'),
tag: tag.toString('hex'),
iv: iv.toString("hex"),
tag: tag.toString("hex"),
version: this.VERSION,
fingerprint: HardwareFingerprint.generate().substring(0, 16),
salt: salt.toString('hex'),
algorithm: this.ALGORITHM
salt: salt.toString("hex"),
algorithm: this.ALGORITHM,
};
// Write encrypted file and metadata
fs.writeFileSync(encryptedPath, encrypted);
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
databaseLogger.info('Database file encrypted successfully', {
operation: 'database_file_encryption',
databaseLogger.info("Database file encrypted successfully", {
operation: "database_file_encryption",
sourcePath,
encryptedPath,
fileSize: sourceData.length,
encryptedSize: encrypted.length,
fingerprintPrefix: metadata.fingerprint
fingerprintPrefix: metadata.fingerprint,
});
return encryptedPath;
} catch (error) {
databaseLogger.error('Failed to encrypt database file', error, {
operation: 'database_file_encryption_failed',
databaseLogger.error("Failed to encrypt database file", error, {
operation: "database_file_encryption_failed",
sourcePath,
targetPath: encryptedPath
targetPath: encryptedPath,
});
throw new Error(`Database file encryption failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
throw new Error(
`Database file encryption failed: ${error instanceof Error ? error.message : "Unknown error"}`,
);
}
}
@@ -165,7 +153,9 @@ class DatabaseFileEncryption {
*/
static decryptDatabaseToBuffer(encryptedPath: string): Buffer {
if (!fs.existsSync(encryptedPath)) {
throw new Error(`Encrypted database file does not exist: ${encryptedPath}`);
throw new Error(
`Encrypted database file does not exist: ${encryptedPath}`,
);
}
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
@@ -175,7 +165,7 @@ class DatabaseFileEncryption {
try {
// Read metadata
const metadataContent = fs.readFileSync(metadataPath, 'utf8');
const metadataContent = fs.readFileSync(metadataPath, "utf8");
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
// Validate metadata version
@@ -184,60 +174,59 @@ class DatabaseFileEncryption {
}
// Validate hardware fingerprint
const currentFingerprint = HardwareFingerprint.generate().substring(0, 16);
const currentFingerprint = HardwareFingerprint.generate().substring(
0,
16,
);
if (metadata.fingerprint !== currentFingerprint) {
databaseLogger.warn('Hardware fingerprint mismatch for database buffer decryption', {
operation: 'database_buffer_decryption',
expected: metadata.fingerprint,
current: currentFingerprint
});
throw new Error('Hardware fingerprint mismatch - database was encrypted on different hardware');
throw new Error(
"Hardware fingerprint mismatch - database was encrypted on different hardware",
);
}
// Read encrypted data
const encryptedData = fs.readFileSync(encryptedPath);
// Generate decryption key
const salt = Buffer.from(metadata.salt, 'hex');
const salt = Buffer.from(metadata.salt, "hex");
const key = this.generateFileEncryptionKey(salt);
// Decrypt to buffer
const decipher = crypto.createDecipheriv(
metadata.algorithm,
key,
Buffer.from(metadata.iv, 'hex')
Buffer.from(metadata.iv, "hex"),
) as any;
decipher.setAuthTag(Buffer.from(metadata.tag, 'hex'));
decipher.setAuthTag(Buffer.from(metadata.tag, "hex"));
const decryptedBuffer = Buffer.concat([
decipher.update(encryptedData),
decipher.final()
decipher.final(),
]);
databaseLogger.info('Database decrypted to memory buffer', {
operation: 'database_buffer_decryption',
encryptedPath,
encryptedSize: encryptedData.length,
decryptedSize: decryptedBuffer.length,
fingerprintPrefix: metadata.fingerprint
});
return decryptedBuffer;
} catch (error) {
databaseLogger.error('Failed to decrypt database to buffer', error, {
operation: 'database_buffer_decryption_failed',
encryptedPath
databaseLogger.error("Failed to decrypt database to buffer", error, {
operation: "database_buffer_decryption_failed",
encryptedPath,
});
throw new Error(`Database buffer decryption failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
throw new Error(
`Database buffer decryption failed: ${error instanceof Error ? error.message : "Unknown error"}`,
);
}
}
/**
* Decrypt database file
*/
static decryptDatabaseFile(encryptedPath: string, targetPath?: string): string {
static decryptDatabaseFile(
encryptedPath: string,
targetPath?: string,
): string {
if (!fs.existsSync(encryptedPath)) {
throw new Error(`Encrypted database file does not exist: ${encryptedPath}`);
throw new Error(
`Encrypted database file does not exist: ${encryptedPath}`,
);
}
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
@@ -245,11 +234,12 @@ class DatabaseFileEncryption {
throw new Error(`Metadata file does not exist: ${metadataPath}`);
}
const decryptedPath = targetPath || encryptedPath.replace(this.ENCRYPTED_FILE_SUFFIX, '');
const decryptedPath =
targetPath || encryptedPath.replace(this.ENCRYPTED_FILE_SUFFIX, "");
try {
// Read metadata
const metadataContent = fs.readFileSync(metadataPath, 'utf8');
const metadataContent = fs.readFileSync(metadataPath, "utf8");
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
// Validate metadata version
@@ -258,56 +248,63 @@ class DatabaseFileEncryption {
}
// Validate hardware fingerprint
const currentFingerprint = HardwareFingerprint.generate().substring(0, 16);
const currentFingerprint = HardwareFingerprint.generate().substring(
0,
16,
);
if (metadata.fingerprint !== currentFingerprint) {
databaseLogger.warn('Hardware fingerprint mismatch for database file', {
operation: 'database_file_decryption',
databaseLogger.warn("Hardware fingerprint mismatch for database file", {
operation: "database_file_decryption",
expected: metadata.fingerprint,
current: currentFingerprint
current: currentFingerprint,
});
throw new Error('Hardware fingerprint mismatch - database was encrypted on different hardware');
throw new Error(
"Hardware fingerprint mismatch - database was encrypted on different hardware",
);
}
// Read encrypted data
const encryptedData = fs.readFileSync(encryptedPath);
// Generate decryption key
const salt = Buffer.from(metadata.salt, 'hex');
const salt = Buffer.from(metadata.salt, "hex");
const key = this.generateFileEncryptionKey(salt);
// Decrypt the file
const decipher = crypto.createDecipheriv(
metadata.algorithm,
key,
Buffer.from(metadata.iv, 'hex')
Buffer.from(metadata.iv, "hex"),
) as any;
decipher.setAuthTag(Buffer.from(metadata.tag, 'hex'));
decipher.setAuthTag(Buffer.from(metadata.tag, "hex"));
const decrypted = Buffer.concat([
decipher.update(encryptedData),
decipher.final()
decipher.final(),
]);
// Write decrypted file
fs.writeFileSync(decryptedPath, decrypted);
databaseLogger.info('Database file decrypted successfully', {
operation: 'database_file_decryption',
databaseLogger.info("Database file decrypted successfully", {
operation: "database_file_decryption",
encryptedPath,
decryptedPath,
encryptedSize: encryptedData.length,
decryptedSize: decrypted.length,
fingerprintPrefix: metadata.fingerprint
fingerprintPrefix: metadata.fingerprint,
});
return decryptedPath;
} catch (error) {
databaseLogger.error('Failed to decrypt database file', error, {
operation: 'database_file_decryption_failed',
databaseLogger.error("Failed to decrypt database file", error, {
operation: "database_file_decryption_failed",
encryptedPath,
targetPath: decryptedPath
targetPath: decryptedPath,
});
throw new Error(`Database file decryption failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
throw new Error(
`Database file decryption failed: ${error instanceof Error ? error.message : "Unknown error"}`,
);
}
}
@@ -322,9 +319,12 @@ class DatabaseFileEncryption {
}
try {
const metadataContent = fs.readFileSync(metadataPath, 'utf8');
const metadataContent = fs.readFileSync(metadataPath, "utf8");
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
return metadata.version === this.VERSION && metadata.algorithm === this.ALGORITHM;
return (
metadata.version === this.VERSION &&
metadata.algorithm === this.ALGORITHM
);
} catch {
return false;
}
@@ -346,18 +346,21 @@ class DatabaseFileEncryption {
try {
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
const metadataContent = fs.readFileSync(metadataPath, 'utf8');
const metadataContent = fs.readFileSync(metadataPath, "utf8");
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
const fileStats = fs.statSync(encryptedPath);
const currentFingerprint = HardwareFingerprint.generate().substring(0, 16);
const currentFingerprint = HardwareFingerprint.generate().substring(
0,
16,
);
return {
version: metadata.version,
algorithm: metadata.algorithm,
fingerprint: metadata.fingerprint,
isCurrentHardware: metadata.fingerprint === currentFingerprint,
fileSize: fileStats.size
fileSize: fileStats.size,
};
} catch {
return null;
@@ -367,7 +370,10 @@ class DatabaseFileEncryption {
/**
* Securely backup database by creating encrypted copy
*/
static createEncryptedBackup(databasePath: string, backupDir: string): string {
static createEncryptedBackup(
databasePath: string,
backupDir: string,
): string {
if (!fs.existsSync(databasePath)) {
throw new Error(`Database file does not exist: ${databasePath}`);
}
@@ -378,26 +384,26 @@ class DatabaseFileEncryption {
}
// Generate backup filename with timestamp
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
const backupFileName = `database-backup-${timestamp}.sqlite.encrypted`;
const backupPath = path.join(backupDir, backupFileName);
try {
const encryptedPath = this.encryptDatabaseFile(databasePath, backupPath);
databaseLogger.info('Encrypted database backup created', {
operation: 'database_backup',
databaseLogger.info("Encrypted database backup created", {
operation: "database_backup",
sourcePath: databasePath,
backupPath: encryptedPath,
timestamp
timestamp,
});
return encryptedPath;
} catch (error) {
databaseLogger.error('Failed to create encrypted backup', error, {
operation: 'database_backup_failed',
databaseLogger.error("Failed to create encrypted backup", error, {
operation: "database_backup_failed",
sourcePath: databasePath,
backupDir
backupDir,
});
throw error;
}
@@ -406,26 +412,29 @@ class DatabaseFileEncryption {
/**
* Restore database from encrypted backup
*/
static restoreFromEncryptedBackup(backupPath: string, targetPath: string): string {
static restoreFromEncryptedBackup(
backupPath: string,
targetPath: string,
): string {
if (!this.isEncryptedDatabaseFile(backupPath)) {
throw new Error('Invalid encrypted backup file');
throw new Error("Invalid encrypted backup file");
}
try {
const restoredPath = this.decryptDatabaseFile(backupPath, targetPath);
databaseLogger.info('Database restored from encrypted backup', {
operation: 'database_restore',
databaseLogger.info("Database restored from encrypted backup", {
operation: "database_restore",
backupPath,
restoredPath
restoredPath,
});
return restoredPath;
} catch (error) {
databaseLogger.error('Failed to restore from encrypted backup', error, {
operation: 'database_restore_failed',
databaseLogger.error("Failed to restore from encrypted backup", error, {
operation: "database_restore_failed",
backupPath,
targetPath
targetPath,
});
throw error;
}
@@ -451,27 +460,27 @@ class DatabaseFileEncryption {
const tempFiles = [
`${basePath}.tmp`,
`${basePath}${this.ENCRYPTED_FILE_SUFFIX}`,
`${basePath}${this.ENCRYPTED_FILE_SUFFIX}${this.METADATA_FILE_SUFFIX}`
`${basePath}${this.ENCRYPTED_FILE_SUFFIX}${this.METADATA_FILE_SUFFIX}`,
];
for (const tempFile of tempFiles) {
if (fs.existsSync(tempFile)) {
fs.unlinkSync(tempFile);
databaseLogger.debug('Cleaned up temporary file', {
operation: 'temp_cleanup',
file: tempFile
databaseLogger.debug("Cleaned up temporary file", {
operation: "temp_cleanup",
file: tempFile,
});
}
}
} catch (error) {
databaseLogger.warn('Failed to clean up temporary files', {
operation: 'temp_cleanup_failed',
databaseLogger.warn("Failed to clean up temporary files", {
operation: "temp_cleanup_failed",
basePath,
error: error instanceof Error ? error.message : 'Unknown error'
error: error instanceof Error ? error.message : "Unknown error",
});
}
}
}
export { DatabaseFileEncryption };
export type { EncryptedFileMetadata };
export type { EncryptedFileMetadata };

View File

@@ -1,13 +1,23 @@
import fs from 'fs';
import path from 'path';
import crypto from 'crypto';
import { DatabaseFileEncryption } from './database-file-encryption.js';
import { DatabaseEncryption } from './database-encryption.js';
import { FieldEncryption } from './encryption.js';
import { HardwareFingerprint } from './hardware-fingerprint.js';
import { databaseLogger } from './logger.js';
import { db, databasePaths } from '../database/db/index.js';
import { users, sshData, sshCredentials, settings, fileManagerRecent, fileManagerPinned, fileManagerShortcuts, dismissedAlerts, sshCredentialUsage } from '../database/db/schema.js';
import fs from "fs";
import path from "path";
import crypto from "crypto";
import { DatabaseFileEncryption } from "./database-file-encryption.js";
import { DatabaseEncryption } from "./database-encryption.js";
import { FieldEncryption } from "./encryption.js";
import { HardwareFingerprint } from "./hardware-fingerprint.js";
import { databaseLogger } from "./logger.js";
import { db, databasePaths } from "../database/db/index.js";
import {
users,
sshData,
sshCredentials,
settings,
fileManagerRecent,
fileManagerPinned,
fileManagerShortcuts,
dismissedAlerts,
sshCredentialUsage,
} from "../database/db/schema.js";
interface ExportMetadata {
version: string;
@@ -41,8 +51,8 @@ interface ImportResult {
* Handles both field-level and file-level encryption/decryption during migration
*/
class DatabaseMigration {
private static readonly VERSION = 'v1';
private static readonly EXPORT_FILE_EXTENSION = '.termix-export.json';
private static readonly VERSION = "v1";
private static readonly EXPORT_FILE_EXTENSION = ".termix-export.json";
/**
* Export database for migration
@@ -53,28 +63,48 @@ class DatabaseMigration {
const timestamp = new Date().toISOString();
const defaultExportPath = path.join(
databasePaths.directory,
`termix-export-${timestamp.replace(/[:.]/g, '-')}${this.EXPORT_FILE_EXTENSION}`
`termix-export-${timestamp.replace(/[:.]/g, "-")}${this.EXPORT_FILE_EXTENSION}`,
);
const actualExportPath = exportPath || defaultExportPath;
try {
databaseLogger.info('Starting database export for migration', {
operation: 'database_export',
databaseLogger.info("Starting database export for migration", {
operation: "database_export",
exportId,
exportPath: actualExportPath
exportPath: actualExportPath,
});
// Define tables to export and their encryption status
const tablesToExport = [
{ name: 'users', table: users, hasEncryption: true },
{ name: 'ssh_data', table: sshData, hasEncryption: true },
{ name: 'ssh_credentials', table: sshCredentials, hasEncryption: true },
{ name: 'settings', table: settings, hasEncryption: false },
{ name: 'file_manager_recent', table: fileManagerRecent, hasEncryption: false },
{ name: 'file_manager_pinned', table: fileManagerPinned, hasEncryption: false },
{ name: 'file_manager_shortcuts', table: fileManagerShortcuts, hasEncryption: false },
{ name: 'dismissed_alerts', table: dismissedAlerts, hasEncryption: false },
{ name: 'ssh_credential_usage', table: sshCredentialUsage, hasEncryption: false }
{ name: "users", table: users, hasEncryption: true },
{ name: "ssh_data", table: sshData, hasEncryption: true },
{ name: "ssh_credentials", table: sshCredentials, hasEncryption: true },
{ name: "settings", table: settings, hasEncryption: false },
{
name: "file_manager_recent",
table: fileManagerRecent,
hasEncryption: false,
},
{
name: "file_manager_pinned",
table: fileManagerPinned,
hasEncryption: false,
},
{
name: "file_manager_shortcuts",
table: fileManagerShortcuts,
hasEncryption: false,
},
{
name: "dismissed_alerts",
table: dismissedAlerts,
hasEncryption: false,
},
{
name: "ssh_credential_usage",
table: sshCredentialUsage,
hasEncryption: false,
},
];
const exportData: MigrationExport = {
@@ -82,12 +112,15 @@ class DatabaseMigration {
version: this.VERSION,
exportedAt: timestamp,
exportId,
sourceHardwareFingerprint: HardwareFingerprint.generate().substring(0, 16),
sourceHardwareFingerprint: HardwareFingerprint.generate().substring(
0,
16,
),
tableCount: 0,
recordCount: 0,
encryptedFields: []
encryptedFields: [],
},
data: {}
data: {},
};
let totalRecords = 0;
@@ -96,9 +129,9 @@ class DatabaseMigration {
for (const tableInfo of tablesToExport) {
try {
databaseLogger.debug(`Exporting table: ${tableInfo.name}`, {
operation: 'table_export',
operation: "table_export",
table: tableInfo.name,
hasEncryption: tableInfo.hasEncryption
hasEncryption: tableInfo.hasEncryption,
});
// Query all records from the table
@@ -107,16 +140,20 @@ class DatabaseMigration {
// Decrypt encrypted fields if necessary
let processedRecords = records;
if (tableInfo.hasEncryption && records.length > 0) {
processedRecords = records.map(record => {
processedRecords = records.map((record) => {
try {
return DatabaseEncryption.decryptRecord(tableInfo.name, record);
} catch (error) {
databaseLogger.warn(`Failed to decrypt record in ${tableInfo.name}`, {
operation: 'export_decrypt_warning',
table: tableInfo.name,
recordId: (record as any).id,
error: error instanceof Error ? error.message : 'Unknown error'
});
databaseLogger.warn(
`Failed to decrypt record in ${tableInfo.name}`,
{
operation: "export_decrypt_warning",
table: tableInfo.name,
recordId: (record as any).id,
error:
error instanceof Error ? error.message : "Unknown error",
},
);
// Return original record if decryption fails
return record;
}
@@ -126,7 +163,9 @@ class DatabaseMigration {
if (records.length > 0) {
const sampleRecord = records[0];
for (const fieldName of Object.keys(sampleRecord)) {
if (FieldEncryption.shouldEncryptField(tableInfo.name, fieldName)) {
if (
FieldEncryption.shouldEncryptField(tableInfo.name, fieldName)
) {
const fieldKey = `${tableInfo.name}.${fieldName}`;
if (!exportData.metadata.encryptedFields.includes(fieldKey)) {
exportData.metadata.encryptedFields.push(fieldKey);
@@ -140,15 +179,19 @@ class DatabaseMigration {
totalRecords += processedRecords.length;
databaseLogger.debug(`Table ${tableInfo.name} exported`, {
operation: 'table_export_complete',
operation: "table_export_complete",
table: tableInfo.name,
recordCount: processedRecords.length
recordCount: processedRecords.length,
});
} catch (error) {
databaseLogger.error(`Failed to export table ${tableInfo.name}`, error, {
operation: 'table_export_failed',
table: tableInfo.name
});
databaseLogger.error(
`Failed to export table ${tableInfo.name}`,
error,
{
operation: "table_export_failed",
table: tableInfo.name,
},
);
throw error;
}
}
@@ -159,25 +202,27 @@ class DatabaseMigration {
// Write export file
const exportContent = JSON.stringify(exportData, null, 2);
fs.writeFileSync(actualExportPath, exportContent, 'utf8');
fs.writeFileSync(actualExportPath, exportContent, "utf8");
databaseLogger.success('Database export completed successfully', {
operation: 'database_export_complete',
databaseLogger.success("Database export completed successfully", {
operation: "database_export_complete",
exportId,
exportPath: actualExportPath,
tableCount: exportData.metadata.tableCount,
recordCount: exportData.metadata.recordCount,
fileSize: exportContent.length
fileSize: exportContent.length,
});
return actualExportPath;
} catch (error) {
databaseLogger.error('Database export failed', error, {
operation: 'database_export_failed',
databaseLogger.error("Database export failed", error, {
operation: "database_export_failed",
exportId,
exportPath: actualExportPath
exportPath: actualExportPath,
});
throw new Error(`Database export failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
throw new Error(
`Database export failed: ${error instanceof Error ? error.message : "Unknown error"}`,
);
}
}
@@ -185,10 +230,13 @@ class DatabaseMigration {
* Import database from migration export
* Re-encrypts fields for the current hardware
*/
static async importDatabase(importPath: string, options: {
replaceExisting?: boolean;
backupCurrent?: boolean;
} = {}): Promise<ImportResult> {
static async importDatabase(
importPath: string,
options: {
replaceExisting?: boolean;
backupCurrent?: boolean;
} = {},
): Promise<ImportResult> {
const { replaceExisting = false, backupCurrent = true } = options;
if (!fs.existsSync(importPath)) {
@@ -196,43 +244,45 @@ class DatabaseMigration {
}
try {
databaseLogger.info('Starting database import from migration export', {
operation: 'database_import',
databaseLogger.info("Starting database import from migration export", {
operation: "database_import",
importPath,
replaceExisting,
backupCurrent
backupCurrent,
});
// Read and validate export file
const exportContent = fs.readFileSync(importPath, 'utf8');
const exportContent = fs.readFileSync(importPath, "utf8");
const exportData: MigrationExport = JSON.parse(exportContent);
// Validate export format
if (exportData.metadata.version !== this.VERSION) {
throw new Error(`Unsupported export version: ${exportData.metadata.version}`);
throw new Error(
`Unsupported export version: ${exportData.metadata.version}`,
);
}
const result: ImportResult = {
success: false,
imported: { tables: 0, records: 0 },
errors: [],
warnings: []
warnings: [],
};
// Create backup if requested
if (backupCurrent) {
try {
const backupPath = await this.createCurrentDatabaseBackup();
databaseLogger.info('Current database backed up before import', {
operation: 'import_backup',
backupPath
databaseLogger.info("Current database backed up before import", {
operation: "import_backup",
backupPath,
});
} catch (error) {
const warningMsg = `Failed to create backup: ${error instanceof Error ? error.message : 'Unknown error'}`;
const warningMsg = `Failed to create backup: ${error instanceof Error ? error.message : "Unknown error"}`;
result.warnings.push(warningMsg);
databaseLogger.warn('Failed to create pre-import backup', {
operation: 'import_backup_failed',
error: warningMsg
databaseLogger.warn("Failed to create pre-import backup", {
operation: "import_backup_failed",
error: warningMsg,
});
}
}
@@ -241,9 +291,9 @@ class DatabaseMigration {
for (const [tableName, tableData] of Object.entries(exportData.data)) {
try {
databaseLogger.debug(`Importing table: ${tableName}`, {
operation: 'table_import',
operation: "table_import",
table: tableName,
recordCount: tableData.length
recordCount: tableData.length,
});
if (replaceExisting) {
@@ -252,8 +302,8 @@ class DatabaseMigration {
if (tableSchema) {
await db.delete(tableSchema);
databaseLogger.debug(`Cleared existing data from ${tableName}`, {
operation: 'table_clear',
table: tableName
operation: "table_clear",
table: tableName,
});
}
}
@@ -262,7 +312,10 @@ class DatabaseMigration {
for (const record of tableData) {
try {
// Re-encrypt sensitive fields for current hardware
const processedRecord = DatabaseEncryption.encryptRecord(tableName, record);
const processedRecord = DatabaseEncryption.encryptRecord(
tableName,
record,
);
// Insert record
const tableSchema = this.getTableSchema(tableName);
@@ -270,12 +323,12 @@ class DatabaseMigration {
await db.insert(tableSchema).values(processedRecord);
}
} catch (error) {
const errorMsg = `Failed to import record in ${tableName}: ${error instanceof Error ? error.message : 'Unknown error'}`;
const errorMsg = `Failed to import record in ${tableName}: ${error instanceof Error ? error.message : "Unknown error"}`;
result.errors.push(errorMsg);
databaseLogger.error('Failed to import record', error, {
operation: 'record_import_failed',
databaseLogger.error("Failed to import record", error, {
operation: "record_import_failed",
table: tableName,
recordId: record.id
recordId: record.id,
});
}
}
@@ -284,16 +337,16 @@ class DatabaseMigration {
result.imported.records += tableData.length;
databaseLogger.debug(`Table ${tableName} imported`, {
operation: 'table_import_complete',
operation: "table_import_complete",
table: tableName,
recordCount: tableData.length
recordCount: tableData.length,
});
} catch (error) {
const errorMsg = `Failed to import table ${tableName}: ${error instanceof Error ? error.message : 'Unknown error'}`;
const errorMsg = `Failed to import table ${tableName}: ${error instanceof Error ? error.message : "Unknown error"}`;
result.errors.push(errorMsg);
databaseLogger.error('Failed to import table', error, {
operation: 'table_import_failed',
table: tableName
databaseLogger.error("Failed to import table", error, {
operation: "table_import_failed",
table: tableName,
});
}
}
@@ -302,31 +355,37 @@ class DatabaseMigration {
result.success = result.errors.length === 0;
if (result.success) {
databaseLogger.success('Database import completed successfully', {
operation: 'database_import_complete',
databaseLogger.success("Database import completed successfully", {
operation: "database_import_complete",
importPath,
tablesImported: result.imported.tables,
recordsImported: result.imported.records,
warnings: result.warnings.length
warnings: result.warnings.length,
});
} else {
databaseLogger.error('Database import completed with errors', undefined, {
operation: 'database_import_partial',
importPath,
tablesImported: result.imported.tables,
recordsImported: result.imported.records,
errorCount: result.errors.length,
warningCount: result.warnings.length
});
databaseLogger.error(
"Database import completed with errors",
undefined,
{
operation: "database_import_partial",
importPath,
tablesImported: result.imported.tables,
recordsImported: result.imported.records,
errorCount: result.errors.length,
warningCount: result.warnings.length,
},
);
}
return result;
} catch (error) {
databaseLogger.error('Database import failed', error, {
operation: 'database_import_failed',
importPath
databaseLogger.error("Database import failed", error, {
operation: "database_import_failed",
importPath,
});
throw new Error(`Database import failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
throw new Error(
`Database import failed: ${error instanceof Error ? error.message : "Unknown error"}`,
);
}
}
@@ -341,32 +400,38 @@ class DatabaseMigration {
const result = {
valid: false,
metadata: undefined as ExportMetadata | undefined,
errors: [] as string[]
errors: [] as string[],
};
try {
if (!fs.existsSync(exportPath)) {
result.errors.push('Export file does not exist');
result.errors.push("Export file does not exist");
return result;
}
const exportContent = fs.readFileSync(exportPath, 'utf8');
const exportContent = fs.readFileSync(exportPath, "utf8");
const exportData: MigrationExport = JSON.parse(exportContent);
// Validate structure
if (!exportData.metadata || !exportData.data) {
result.errors.push('Invalid export file structure');
result.errors.push("Invalid export file structure");
return result;
}
// Validate version
if (exportData.metadata.version !== this.VERSION) {
result.errors.push(`Unsupported export version: ${exportData.metadata.version}`);
result.errors.push(
`Unsupported export version: ${exportData.metadata.version}`,
);
return result;
}
// Validate required metadata fields
const requiredFields = ['exportedAt', 'exportId', 'sourceHardwareFingerprint'];
const requiredFields = [
"exportedAt",
"exportId",
"sourceHardwareFingerprint",
];
for (const field of requiredFields) {
if (!exportData.metadata[field as keyof ExportMetadata]) {
result.errors.push(`Missing required metadata field: ${field}`);
@@ -380,7 +445,9 @@ class DatabaseMigration {
return result;
} catch (error) {
result.errors.push(`Failed to parse export file: ${error instanceof Error ? error.message : 'Unknown error'}`);
result.errors.push(
`Failed to parse export file: ${error instanceof Error ? error.message : "Unknown error"}`,
);
return result;
}
}
@@ -389,8 +456,8 @@ class DatabaseMigration {
* Create backup of current database
*/
private static async createCurrentDatabaseBackup(): Promise<string> {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const backupDir = path.join(databasePaths.directory, 'backups');
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
const backupDir = path.join(databasePaths.directory, "backups");
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true });
@@ -399,7 +466,7 @@ class DatabaseMigration {
// Create encrypted backup
const backupPath = DatabaseFileEncryption.createEncryptedBackup(
databasePaths.main,
backupDir
backupDir,
);
return backupPath;
@@ -410,15 +477,15 @@ class DatabaseMigration {
*/
private static getTableSchema(tableName: string) {
const tableMap: { [key: string]: any } = {
'users': users,
'ssh_data': sshData,
'ssh_credentials': sshCredentials,
'settings': settings,
'file_manager_recent': fileManagerRecent,
'file_manager_pinned': fileManagerPinned,
'file_manager_shortcuts': fileManagerShortcuts,
'dismissed_alerts': dismissedAlerts,
'ssh_credential_usage': sshCredentialUsage
users: users,
ssh_data: sshData,
ssh_credentials: sshCredentials,
settings: settings,
file_manager_recent: fileManagerRecent,
file_manager_pinned: fileManagerPinned,
file_manager_shortcuts: fileManagerShortcuts,
dismissed_alerts: dismissedAlerts,
ssh_credential_usage: sshCredentialUsage,
};
return tableMap[tableName];
@@ -434,4 +501,4 @@ class DatabaseMigration {
}
export { DatabaseMigration };
export type { ExportMetadata, MigrationExport, ImportResult };
export type { ExportMetadata, MigrationExport, ImportResult };

View File

@@ -1,15 +1,15 @@
import fs from 'fs';
import path from 'path';
import crypto from 'crypto';
import Database from 'better-sqlite3';
import { sql, eq } from 'drizzle-orm';
import { drizzle } from 'drizzle-orm/better-sqlite3';
import { DatabaseEncryption } from './database-encryption.js';
import { FieldEncryption } from './encryption.js';
import { HardwareFingerprint } from './hardware-fingerprint.js';
import { databaseLogger } from './logger.js';
import { databasePaths, db, sqliteInstance } from '../database/db/index.js';
import { sshData, sshCredentials, users } from '../database/db/schema.js';
import fs from "fs";
import path from "path";
import crypto from "crypto";
import Database from "better-sqlite3";
import { sql, eq } from "drizzle-orm";
import { drizzle } from "drizzle-orm/better-sqlite3";
import { DatabaseEncryption } from "./database-encryption.js";
import { FieldEncryption } from "./encryption.js";
import { HardwareFingerprint } from "./hardware-fingerprint.js";
import { databaseLogger } from "./logger.js";
import { databasePaths, db, sqliteInstance } from "../database/db/index.js";
import { sshData, sshCredentials, users } from "../database/db/schema.js";
interface ExportMetadata {
version: string;
@@ -36,9 +36,9 @@ interface ImportResult {
* Exports decrypted data to a new SQLite database file for hardware transfer
*/
class DatabaseSQLiteExport {
private static readonly VERSION = 'v1';
private static readonly EXPORT_FILE_EXTENSION = '.termix-export.sqlite';
private static readonly METADATA_TABLE = '_termix_export_metadata';
private static readonly VERSION = "v1";
private static readonly EXPORT_FILE_EXTENSION = ".termix-export.sqlite";
private static readonly METADATA_TABLE = "_termix_export_metadata";
/**
* Export database as SQLite file for migration
@@ -49,15 +49,15 @@ class DatabaseSQLiteExport {
const timestamp = new Date().toISOString();
const defaultExportPath = path.join(
databasePaths.directory,
`termix-export-${timestamp.replace(/[:.]/g, '-')}${this.EXPORT_FILE_EXTENSION}`
`termix-export-${timestamp.replace(/[:.]/g, "-")}${this.EXPORT_FILE_EXTENSION}`,
);
const actualExportPath = exportPath || defaultExportPath;
try {
databaseLogger.info('Starting SQLite database export for migration', {
operation: 'database_sqlite_export',
databaseLogger.info("Starting SQLite database export for migration", {
operation: "database_sqlite_export",
exportId,
exportPath: actualExportPath
exportPath: actualExportPath,
});
// Create new SQLite database for export
@@ -65,18 +65,21 @@ class DatabaseSQLiteExport {
// Define tables to export - only SSH-related data
const tablesToExport = [
{ name: 'ssh_data', hasEncryption: true },
{ name: 'ssh_credentials', hasEncryption: true }
{ name: "ssh_data", hasEncryption: true },
{ name: "ssh_credentials", hasEncryption: true },
];
const exportMetadata: ExportMetadata = {
version: this.VERSION,
exportedAt: timestamp,
exportId,
sourceHardwareFingerprint: HardwareFingerprint.generate().substring(0, 16),
sourceHardwareFingerprint: HardwareFingerprint.generate().substring(
0,
16,
),
tableCount: 0,
recordCount: 0,
encryptedFields: []
encryptedFields: [],
};
let totalRecords = 0;
@@ -86,9 +89,9 @@ class DatabaseSQLiteExport {
const totalSshCredentials = await db.select().from(sshCredentials);
databaseLogger.info(`Export preparation: found SSH data`, {
operation: 'export_data_check',
operation: "export_data_check",
totalSshData: totalSshData.length,
totalSshCredentials: totalSshCredentials.length
totalSshCredentials: totalSshCredentials.length,
});
// Create metadata table
@@ -103,13 +106,13 @@ class DatabaseSQLiteExport {
for (const tableInfo of tablesToExport) {
try {
databaseLogger.debug(`Exporting SQLite table: ${tableInfo.name}`, {
operation: 'table_sqlite_export',
operation: "table_sqlite_export",
table: tableInfo.name,
hasEncryption: tableInfo.hasEncryption
hasEncryption: tableInfo.hasEncryption,
});
// Create table in export database using consistent schema
if (tableInfo.name === 'ssh_data') {
if (tableInfo.name === "ssh_data") {
// Create ssh_data table using exact schema matching Drizzle definition
const createTableSql = `CREATE TABLE ssh_data (
id INTEGER PRIMARY KEY AUTOINCREMENT,
@@ -137,7 +140,7 @@ class DatabaseSQLiteExport {
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
)`;
exportDb.exec(createTableSql);
} else if (tableInfo.name === 'ssh_credentials') {
} else if (tableInfo.name === "ssh_credentials") {
// Create ssh_credentials table using exact schema matching Drizzle definition
const createTableSql = `CREATE TABLE ssh_credentials (
id INTEGER PRIMARY KEY AUTOINCREMENT,
@@ -153,41 +156,48 @@ class DatabaseSQLiteExport {
exportDb.exec(createTableSql);
} else {
databaseLogger.warn(`Unknown table ${tableInfo.name}, skipping`, {
operation: 'table_sqlite_export_skip',
table: tableInfo.name
operation: "table_sqlite_export_skip",
table: tableInfo.name,
});
continue;
}
// Query all records from tables using Drizzle
let records: any[];
if (tableInfo.name === 'ssh_data') {
if (tableInfo.name === "ssh_data") {
records = await db.select().from(sshData);
} else if (tableInfo.name === 'ssh_credentials') {
} else if (tableInfo.name === "ssh_credentials") {
records = await db.select().from(sshCredentials);
} else {
records = [];
}
databaseLogger.info(`Found ${records.length} records in ${tableInfo.name} for export`, {
operation: 'table_record_count',
table: tableInfo.name,
recordCount: records.length
});
databaseLogger.info(
`Found ${records.length} records in ${tableInfo.name} for export`,
{
operation: "table_record_count",
table: tableInfo.name,
recordCount: records.length,
},
);
// Decrypt encrypted fields if necessary
let processedRecords = records;
if (tableInfo.hasEncryption && records.length > 0) {
processedRecords = records.map(record => {
processedRecords = records.map((record) => {
try {
return DatabaseEncryption.decryptRecord(tableInfo.name, record);
} catch (error) {
databaseLogger.warn(`Failed to decrypt record in ${tableInfo.name}`, {
operation: 'export_decrypt_warning',
table: tableInfo.name,
recordId: (record as any).id,
error: error instanceof Error ? error.message : 'Unknown error'
});
databaseLogger.warn(
`Failed to decrypt record in ${tableInfo.name}`,
{
operation: "export_decrypt_warning",
table: tableInfo.name,
recordId: (record as any).id,
error:
error instanceof Error ? error.message : "Unknown error",
},
);
return record;
}
});
@@ -210,40 +220,44 @@ class DatabaseSQLiteExport {
const tsFieldNames = Object.keys(sampleRecord);
// Map TypeScript field names to database column names
const dbColumnNames = tsFieldNames.map(fieldName => {
const dbColumnNames = tsFieldNames.map((fieldName) => {
// Map TypeScript field names to database column names
const fieldMappings: Record<string, string> = {
'userId': 'user_id',
'authType': 'auth_type',
'requirePassword': 'require_password',
'keyPassword': 'key_password',
'keyType': 'key_type',
'credentialId': 'credential_id',
'enableTerminal': 'enable_terminal',
'enableTunnel': 'enable_tunnel',
'tunnelConnections': 'tunnel_connections',
'enableFileManager': 'enable_file_manager',
'defaultPath': 'default_path',
'createdAt': 'created_at',
'updatedAt': 'updated_at',
'keyContent': 'key_content'
userId: "user_id",
authType: "auth_type",
requirePassword: "require_password",
keyPassword: "key_password",
keyType: "key_type",
credentialId: "credential_id",
enableTerminal: "enable_terminal",
enableTunnel: "enable_tunnel",
tunnelConnections: "tunnel_connections",
enableFileManager: "enable_file_manager",
defaultPath: "default_path",
createdAt: "created_at",
updatedAt: "updated_at",
keyContent: "key_content",
};
return fieldMappings[fieldName] || fieldName;
});
const placeholders = dbColumnNames.map(() => '?').join(', ');
const insertSql = `INSERT INTO ${tableInfo.name} (${dbColumnNames.join(', ')}) VALUES (${placeholders})`;
const placeholders = dbColumnNames.map(() => "?").join(", ");
const insertSql = `INSERT INTO ${tableInfo.name} (${dbColumnNames.join(", ")}) VALUES (${placeholders})`;
const insertStmt = exportDb.prepare(insertSql);
for (const record of processedRecords) {
const values = tsFieldNames.map(fieldName => {
const values = tsFieldNames.map((fieldName) => {
const value: any = record[fieldName as keyof typeof record];
// Convert values to SQLite-compatible types
if (value === null || value === undefined) {
return null;
}
if (typeof value === 'string' || typeof value === 'number' || typeof value === 'bigint') {
if (
typeof value === "string" ||
typeof value === "number" ||
typeof value === "bigint"
) {
return value;
}
if (Buffer.isBuffer(value)) {
@@ -252,11 +266,11 @@ class DatabaseSQLiteExport {
if (value instanceof Date) {
return value.toISOString();
}
if (typeof value === 'boolean') {
if (typeof value === "boolean") {
return value ? 1 : 0;
}
// Convert objects and arrays to JSON strings
if (typeof value === 'object') {
if (typeof value === "object") {
return JSON.stringify(value);
}
// Fallback: convert to string
@@ -269,15 +283,19 @@ class DatabaseSQLiteExport {
totalRecords += processedRecords.length;
databaseLogger.debug(`SQLite table ${tableInfo.name} exported`, {
operation: 'table_sqlite_export_complete',
operation: "table_sqlite_export_complete",
table: tableInfo.name,
recordCount: processedRecords.length
recordCount: processedRecords.length,
});
} catch (error) {
databaseLogger.error(`Failed to export SQLite table ${tableInfo.name}`, error, {
operation: 'table_sqlite_export_failed',
table: tableInfo.name
});
databaseLogger.error(
`Failed to export SQLite table ${tableInfo.name}`,
error,
{
operation: "table_sqlite_export_failed",
table: tableInfo.name,
},
);
throw error;
}
}
@@ -286,29 +304,33 @@ class DatabaseSQLiteExport {
exportMetadata.tableCount = tablesToExport.length;
exportMetadata.recordCount = totalRecords;
const insertMetadata = exportDb.prepare(`INSERT INTO ${this.METADATA_TABLE} (key, value) VALUES (?, ?)`);
insertMetadata.run('metadata', JSON.stringify(exportMetadata));
const insertMetadata = exportDb.prepare(
`INSERT INTO ${this.METADATA_TABLE} (key, value) VALUES (?, ?)`,
);
insertMetadata.run("metadata", JSON.stringify(exportMetadata));
// Close export database
exportDb.close();
databaseLogger.success('SQLite database export completed successfully', {
operation: 'database_sqlite_export_complete',
databaseLogger.success("SQLite database export completed successfully", {
operation: "database_sqlite_export_complete",
exportId,
exportPath: actualExportPath,
tableCount: exportMetadata.tableCount,
recordCount: exportMetadata.recordCount,
fileSize: fs.statSync(actualExportPath).size
fileSize: fs.statSync(actualExportPath).size,
});
return actualExportPath;
} catch (error) {
databaseLogger.error('SQLite database export failed', error, {
operation: 'database_sqlite_export_failed',
databaseLogger.error("SQLite database export failed", error, {
operation: "database_sqlite_export_failed",
exportId,
exportPath: actualExportPath
exportPath: actualExportPath,
});
throw new Error(`SQLite database export failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
throw new Error(
`SQLite database export failed: ${error instanceof Error ? error.message : "Unknown error"}`,
);
}
}
@@ -316,10 +338,13 @@ class DatabaseSQLiteExport {
* Import database from SQLite export
* Re-encrypts fields for the current hardware
*/
static async importDatabase(importPath: string, options: {
replaceExisting?: boolean;
backupCurrent?: boolean;
} = {}): Promise<ImportResult> {
static async importDatabase(
importPath: string,
options: {
replaceExisting?: boolean;
backupCurrent?: boolean;
} = {},
): Promise<ImportResult> {
const { replaceExisting = false, backupCurrent = true } = options;
if (!fs.existsSync(importPath)) {
@@ -327,23 +352,27 @@ class DatabaseSQLiteExport {
}
try {
databaseLogger.info('Starting SQLite database import from export', {
operation: 'database_sqlite_import',
databaseLogger.info("Starting SQLite database import from export", {
operation: "database_sqlite_import",
importPath,
replaceExisting,
backupCurrent
backupCurrent,
});
// Open import database
const importDb = new Database(importPath, { readonly: true });
// Validate export format
const metadataResult = importDb.prepare(`
const metadataResult = importDb
.prepare(
`
SELECT value FROM ${this.METADATA_TABLE} WHERE key = 'metadata'
`).get() as { value: string } | undefined;
`,
)
.get() as { value: string } | undefined;
if (!metadataResult) {
throw new Error('Invalid export file: missing metadata');
throw new Error("Invalid export file: missing metadata");
}
const metadata: ExportMetadata = JSON.parse(metadataResult.value);
@@ -355,44 +384,55 @@ class DatabaseSQLiteExport {
success: false,
imported: { tables: 0, records: 0 },
errors: [],
warnings: []
warnings: [],
};
// Get current admin user to assign imported SSH records
const adminUser = await db.select().from(users).where(eq(users.is_admin, true)).limit(1);
const adminUser = await db
.select()
.from(users)
.where(eq(users.is_admin, true))
.limit(1);
if (adminUser.length === 0) {
throw new Error('No admin user found in current database');
throw new Error("No admin user found in current database");
}
const currentAdminUserId = adminUser[0].id;
databaseLogger.debug(`Starting SSH data import - assigning to admin user ${currentAdminUserId}`, {
operation: 'ssh_data_import_start',
adminUserId: currentAdminUserId
});
databaseLogger.debug(
`Starting SSH data import - assigning to admin user ${currentAdminUserId}`,
{
operation: "ssh_data_import_start",
adminUserId: currentAdminUserId,
},
);
// Create backup if requested
if (backupCurrent) {
try {
const backupPath = await this.createCurrentDatabaseBackup();
databaseLogger.info('Current database backed up before import', {
operation: 'import_backup',
backupPath
databaseLogger.info("Current database backed up before import", {
operation: "import_backup",
backupPath,
});
} catch (error) {
const warningMsg = `Failed to create backup: ${error instanceof Error ? error.message : 'Unknown error'}`;
const warningMsg = `Failed to create backup: ${error instanceof Error ? error.message : "Unknown error"}`;
result.warnings.push(warningMsg);
databaseLogger.warn('Failed to create pre-import backup', {
operation: 'import_backup_failed',
error: warningMsg
databaseLogger.warn("Failed to create pre-import backup", {
operation: "import_backup_failed",
error: warningMsg,
});
}
}
// Get list of tables to import (excluding metadata table)
const tables = importDb.prepare(`
const tables = importDb
.prepare(
`
SELECT name FROM sqlite_master
WHERE type='table' AND name != '${this.METADATA_TABLE}'
`).all() as { name: string }[];
`,
)
.all() as { name: string }[];
// Import data table by table
for (const tableRow of tables) {
@@ -400,15 +440,15 @@ class DatabaseSQLiteExport {
try {
databaseLogger.debug(`Importing SQLite table: ${tableName}`, {
operation: 'table_sqlite_import',
table: tableName
operation: "table_sqlite_import",
table: tableName,
});
// Use additive import - don't clear existing data
// This preserves all current data including admin SSH connections
databaseLogger.debug(`Using additive import for ${tableName}`, {
operation: 'table_additive_import',
table: tableName
operation: "table_additive_import",
table: tableName,
});
// Get all records from import table
@@ -422,20 +462,20 @@ class DatabaseSQLiteExport {
// Map database column names to TypeScript field names
const mappedRecord: any = {};
const columnToFieldMappings: Record<string, string> = {
'user_id': 'userId',
'auth_type': 'authType',
'require_password': 'requirePassword',
'key_password': 'keyPassword',
'key_type': 'keyType',
'credential_id': 'credentialId',
'enable_terminal': 'enableTerminal',
'enable_tunnel': 'enableTunnel',
'tunnel_connections': 'tunnelConnections',
'enable_file_manager': 'enableFileManager',
'default_path': 'defaultPath',
'created_at': 'createdAt',
'updated_at': 'updatedAt',
'key_content': 'keyContent'
user_id: "userId",
auth_type: "authType",
require_password: "requirePassword",
key_password: "keyPassword",
key_type: "keyType",
credential_id: "credentialId",
enable_terminal: "enableTerminal",
enable_tunnel: "enableTunnel",
tunnel_connections: "tunnelConnections",
enable_file_manager: "enableFileManager",
default_path: "defaultPath",
created_at: "createdAt",
updated_at: "updatedAt",
key_content: "keyContent",
};
// Convert database column names to TypeScript field names
@@ -445,44 +485,62 @@ class DatabaseSQLiteExport {
}
// Assign imported SSH records to current admin user to avoid foreign key constraint
if (tableName === 'ssh_data' && mappedRecord.userId) {
if (tableName === "ssh_data" && mappedRecord.userId) {
const originalUserId = mappedRecord.userId;
mappedRecord.userId = currentAdminUserId;
databaseLogger.debug(`Reassigned SSH record from user ${originalUserId} to admin ${currentAdminUserId}`, {
operation: 'user_reassignment',
originalUserId,
newUserId: currentAdminUserId
});
databaseLogger.debug(
`Reassigned SSH record from user ${originalUserId} to admin ${currentAdminUserId}`,
{
operation: "user_reassignment",
originalUserId,
newUserId: currentAdminUserId,
},
);
}
// Re-encrypt sensitive fields for current hardware
const processedRecord = DatabaseEncryption.encryptRecord(tableName, mappedRecord);
const processedRecord = DatabaseEncryption.encryptRecord(
tableName,
mappedRecord,
);
// Insert record using Drizzle
try {
if (tableName === 'ssh_data') {
await db.insert(sshData).values(processedRecord).onConflictDoNothing();
} else if (tableName === 'ssh_credentials') {
await db.insert(sshCredentials).values(processedRecord).onConflictDoNothing();
if (tableName === "ssh_data") {
await db
.insert(sshData)
.values(processedRecord)
.onConflictDoNothing();
} else if (tableName === "ssh_credentials") {
await db
.insert(sshCredentials)
.values(processedRecord)
.onConflictDoNothing();
}
} catch (error) {
// Handle any SQL errors gracefully
if (error instanceof Error && error.message.includes('UNIQUE constraint failed')) {
databaseLogger.debug(`Skipping duplicate record in ${tableName}`, {
operation: 'duplicate_record_skip',
table: tableName
});
if (
error instanceof Error &&
error.message.includes("UNIQUE constraint failed")
) {
databaseLogger.debug(
`Skipping duplicate record in ${tableName}`,
{
operation: "duplicate_record_skip",
table: tableName,
},
);
continue;
}
throw error;
}
} catch (error) {
const errorMsg = `Failed to import record in ${tableName}: ${error instanceof Error ? error.message : 'Unknown error'}`;
const errorMsg = `Failed to import record in ${tableName}: ${error instanceof Error ? error.message : "Unknown error"}`;
result.errors.push(errorMsg);
databaseLogger.error('Failed to import record', error, {
operation: 'record_sqlite_import_failed',
databaseLogger.error("Failed to import record", error, {
operation: "record_sqlite_import_failed",
table: tableName,
recordId: (record as any).id
recordId: (record as any).id,
});
}
}
@@ -491,16 +549,16 @@ class DatabaseSQLiteExport {
result.imported.records += records.length;
databaseLogger.debug(`SQLite table ${tableName} imported`, {
operation: 'table_sqlite_import_complete',
operation: "table_sqlite_import_complete",
table: tableName,
recordCount: records.length
recordCount: records.length,
});
} catch (error) {
const errorMsg = `Failed to import table ${tableName}: ${error instanceof Error ? error.message : 'Unknown error'}`;
const errorMsg = `Failed to import table ${tableName}: ${error instanceof Error ? error.message : "Unknown error"}`;
result.errors.push(errorMsg);
databaseLogger.error('Failed to import SQLite table', error, {
operation: 'table_sqlite_import_failed',
table: tableName
databaseLogger.error("Failed to import SQLite table", error, {
operation: "table_sqlite_import_failed",
table: tableName,
});
}
}
@@ -512,31 +570,40 @@ class DatabaseSQLiteExport {
result.success = result.errors.length === 0;
if (result.success) {
databaseLogger.success('SQLite database import completed successfully', {
operation: 'database_sqlite_import_complete',
importPath,
tablesImported: result.imported.tables,
recordsImported: result.imported.records,
warnings: result.warnings.length
});
databaseLogger.success(
"SQLite database import completed successfully",
{
operation: "database_sqlite_import_complete",
importPath,
tablesImported: result.imported.tables,
recordsImported: result.imported.records,
warnings: result.warnings.length,
},
);
} else {
databaseLogger.error('SQLite database import completed with errors', undefined, {
operation: 'database_sqlite_import_partial',
importPath,
tablesImported: result.imported.tables,
recordsImported: result.imported.records,
errorCount: result.errors.length,
warningCount: result.warnings.length
});
databaseLogger.error(
"SQLite database import completed with errors",
undefined,
{
operation: "database_sqlite_import_partial",
importPath,
tablesImported: result.imported.tables,
recordsImported: result.imported.records,
errorCount: result.errors.length,
warningCount: result.warnings.length,
},
);
}
return result;
} catch (error) {
databaseLogger.error('SQLite database import failed', error, {
operation: 'database_sqlite_import_failed',
importPath
databaseLogger.error("SQLite database import failed", error, {
operation: "database_sqlite_import_failed",
importPath,
});
throw new Error(`SQLite database import failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
throw new Error(
`SQLite database import failed: ${error instanceof Error ? error.message : "Unknown error"}`,
);
}
}
@@ -551,29 +618,33 @@ class DatabaseSQLiteExport {
const result = {
valid: false,
metadata: undefined as ExportMetadata | undefined,
errors: [] as string[]
errors: [] as string[],
};
try {
if (!fs.existsSync(exportPath)) {
result.errors.push('Export file does not exist');
result.errors.push("Export file does not exist");
return result;
}
if (!exportPath.endsWith(this.EXPORT_FILE_EXTENSION)) {
result.errors.push('Invalid export file extension');
result.errors.push("Invalid export file extension");
return result;
}
const exportDb = new Database(exportPath, { readonly: true });
try {
const metadataResult = exportDb.prepare(`
const metadataResult = exportDb
.prepare(
`
SELECT value FROM ${this.METADATA_TABLE} WHERE key = 'metadata'
`).get() as { value: string } | undefined;
`,
)
.get() as { value: string } | undefined;
if (!metadataResult) {
result.errors.push('Missing export metadata');
result.errors.push("Missing export metadata");
return result;
}
@@ -592,7 +663,9 @@ class DatabaseSQLiteExport {
return result;
} catch (error) {
result.errors.push(`Failed to validate export file: ${error instanceof Error ? error.message : 'Unknown error'}`);
result.errors.push(
`Failed to validate export file: ${error instanceof Error ? error.message : "Unknown error"}`,
);
return result;
}
}
@@ -609,15 +682,18 @@ class DatabaseSQLiteExport {
* Create backup of current database
*/
private static async createCurrentDatabaseBackup(): Promise<string> {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const backupDir = path.join(databasePaths.directory, 'backups');
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
const backupDir = path.join(databasePaths.directory, "backups");
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true });
}
// Create SQLite backup
const backupPath = path.join(backupDir, `database-backup-${timestamp}.sqlite`);
const backupPath = path.join(
backupDir,
`database-backup-${timestamp}.sqlite`,
);
// Copy current database file
fs.copyFileSync(databasePaths.main, backupPath);
@@ -636,7 +712,10 @@ class DatabaseSQLiteExport {
/**
* Check if a field should be tracked as encrypted
*/
private static shouldTrackEncryptedField(tableName: string, fieldName: string): boolean {
private static shouldTrackEncryptedField(
tableName: string,
fieldName: string,
): boolean {
try {
return FieldEncryption.shouldEncryptField(tableName, fieldName);
} catch {
@@ -646,4 +725,4 @@ class DatabaseSQLiteExport {
}
export { DatabaseSQLiteExport };
export type { ExportMetadata, ImportResult };
export type { ExportMetadata, ImportResult };

View File

@@ -1,83 +1,93 @@
import { db } from '../database/db/index.js';
import { DatabaseEncryption } from './database-encryption.js';
import { databaseLogger } from './logger.js';
import type { SQLiteTable } from 'drizzle-orm/sqlite-core';
import { db } from "../database/db/index.js";
import { DatabaseEncryption } from "./database-encryption.js";
import { databaseLogger } from "./logger.js";
import type { SQLiteTable } from "drizzle-orm/sqlite-core";
type TableName = 'users' | 'ssh_data' | 'ssh_credentials';
type TableName = "users" | "ssh_data" | "ssh_credentials";
class EncryptedDBOperations {
static async insert<T extends Record<string, any>>(
table: SQLiteTable<any>,
tableName: TableName,
data: T
data: T,
): Promise<T> {
try {
const encryptedData = DatabaseEncryption.encryptRecord(tableName, data);
const result = await db.insert(table).values(encryptedData).returning();
// Decrypt the returned data to ensure consistency
const decryptedResult = DatabaseEncryption.decryptRecord(tableName, result[0]);
const decryptedResult = DatabaseEncryption.decryptRecord(
tableName,
result[0],
);
databaseLogger.debug(`Inserted encrypted record into ${tableName}`, {
operation: 'encrypted_insert',
table: tableName
operation: "encrypted_insert",
table: tableName,
});
return decryptedResult as T;
} catch (error) {
databaseLogger.error(`Failed to insert encrypted record into ${tableName}`, error, {
operation: 'encrypted_insert_failed',
table: tableName
});
databaseLogger.error(
`Failed to insert encrypted record into ${tableName}`,
error,
{
operation: "encrypted_insert_failed",
table: tableName,
},
);
throw error;
}
}
static async select<T extends Record<string, any>>(
query: any,
tableName: TableName
tableName: TableName,
): Promise<T[]> {
try {
const results = await query;
const decryptedResults = DatabaseEncryption.decryptRecords(tableName, results);
databaseLogger.debug(`Selected and decrypted ${decryptedResults.length} records from ${tableName}`, {
operation: 'encrypted_select',
table: tableName,
count: decryptedResults.length
});
const decryptedResults = DatabaseEncryption.decryptRecords(
tableName,
results,
);
return decryptedResults;
} catch (error) {
databaseLogger.error(`Failed to select/decrypt records from ${tableName}`, error, {
operation: 'encrypted_select_failed',
table: tableName
});
databaseLogger.error(
`Failed to select/decrypt records from ${tableName}`,
error,
{
operation: "encrypted_select_failed",
table: tableName,
},
);
throw error;
}
}
static async selectOne<T extends Record<string, any>>(
query: any,
tableName: TableName
tableName: TableName,
): Promise<T | undefined> {
try {
const result = await query;
if (!result) return undefined;
const decryptedResult = DatabaseEncryption.decryptRecord(tableName, result);
databaseLogger.debug(`Selected and decrypted single record from ${tableName}`, {
operation: 'encrypted_select_one',
table: tableName
});
const decryptedResult = DatabaseEncryption.decryptRecord(
tableName,
result,
);
return decryptedResult;
} catch (error) {
databaseLogger.error(`Failed to select/decrypt single record from ${tableName}`, error, {
operation: 'encrypted_select_one_failed',
table: tableName
});
databaseLogger.error(
`Failed to select/decrypt single record from ${tableName}`,
error,
{
operation: "encrypted_select_one_failed",
table: tableName,
},
);
throw error;
}
}
@@ -86,23 +96,31 @@ class EncryptedDBOperations {
table: SQLiteTable<any>,
tableName: TableName,
where: any,
data: Partial<T>
data: Partial<T>,
): Promise<T[]> {
try {
const encryptedData = DatabaseEncryption.encryptRecord(tableName, data);
const result = await db.update(table).set(encryptedData).where(where).returning();
const result = await db
.update(table)
.set(encryptedData)
.where(where)
.returning();
databaseLogger.debug(`Updated encrypted record in ${tableName}`, {
operation: 'encrypted_update',
table: tableName
operation: "encrypted_update",
table: tableName,
});
return result as T[];
} catch (error) {
databaseLogger.error(`Failed to update encrypted record in ${tableName}`, error, {
operation: 'encrypted_update_failed',
table: tableName
});
databaseLogger.error(
`Failed to update encrypted record in ${tableName}`,
error,
{
operation: "encrypted_update_failed",
table: tableName,
},
);
throw error;
}
}
@@ -110,21 +128,21 @@ class EncryptedDBOperations {
static async delete(
table: SQLiteTable<any>,
tableName: TableName,
where: any
where: any,
): Promise<any[]> {
try {
const result = await db.delete(table).where(where).returning();
databaseLogger.debug(`Deleted record from ${tableName}`, {
operation: 'encrypted_delete',
table: tableName
operation: "encrypted_delete",
table: tableName,
});
return result;
} catch (error) {
databaseLogger.error(`Failed to delete record from ${tableName}`, error, {
operation: 'encrypted_delete_failed',
table: tableName
operation: "encrypted_delete_failed",
table: tableName,
});
throw error;
}
@@ -135,26 +153,26 @@ class EncryptedDBOperations {
try {
databaseLogger.info(`Starting encryption migration for ${tableName}`, {
operation: 'migration_start',
table: tableName
operation: "migration_start",
table: tableName,
});
let table: SQLiteTable<any>;
let records: any[];
switch (tableName) {
case 'users':
const { users } = await import('../database/db/schema.js');
case "users":
const { users } = await import("../database/db/schema.js");
table = users;
records = await db.select().from(users);
break;
case 'ssh_data':
const { sshData } = await import('../database/db/schema.js');
case "ssh_data":
const { sshData } = await import("../database/db/schema.js");
table = sshData;
records = await db.select().from(sshData);
break;
case 'ssh_credentials':
const { sshCredentials } = await import('../database/db/schema.js');
case "ssh_credentials":
const { sshCredentials } = await import("../database/db/schema.js");
table = sshCredentials;
records = await db.select().from(sshCredentials);
break;
@@ -164,34 +182,44 @@ class EncryptedDBOperations {
for (const record of records) {
try {
const migratedRecord = await DatabaseEncryption.migrateRecord(tableName, record);
const migratedRecord = await DatabaseEncryption.migrateRecord(
tableName,
record,
);
if (JSON.stringify(migratedRecord) !== JSON.stringify(record)) {
const { eq } = await import('drizzle-orm');
await db.update(table).set(migratedRecord).where(eq((table as any).id, record.id));
const { eq } = await import("drizzle-orm");
await db
.update(table)
.set(migratedRecord)
.where(eq((table as any).id, record.id));
migratedCount++;
}
} catch (error) {
databaseLogger.error(`Failed to migrate record ${record.id} in ${tableName}`, error, {
operation: 'migration_record_failed',
table: tableName,
recordId: record.id
});
databaseLogger.error(
`Failed to migrate record ${record.id} in ${tableName}`,
error,
{
operation: "migration_record_failed",
table: tableName,
recordId: record.id,
},
);
}
}
databaseLogger.success(`Migration completed for ${tableName}`, {
operation: 'migration_complete',
operation: "migration_complete",
table: tableName,
migratedCount,
totalRecords: records.length
totalRecords: records.length,
});
return migratedCount;
} catch (error) {
databaseLogger.error(`Migration failed for ${tableName}`, error, {
operation: 'migration_failed',
table: tableName
operation: "migration_failed",
table: tableName,
});
throw error;
}
@@ -202,8 +230,8 @@ class EncryptedDBOperations {
const status = DatabaseEncryption.getEncryptionStatus();
return status.configValid && status.enabled;
} catch (error) {
databaseLogger.error('Encryption health check failed', error, {
operation: 'health_check_failed'
databaseLogger.error("Encryption health check failed", error, {
operation: "health_check_failed",
});
return false;
}
@@ -211,4 +239,4 @@ class EncryptedDBOperations {
}
export { EncryptedDBOperations };
export type { TableName };
export type { TableName };

View File

@@ -1,9 +1,9 @@
import crypto from 'crypto';
import { db } from '../database/db/index.js';
import { settings } from '../database/db/schema.js';
import { eq } from 'drizzle-orm';
import { databaseLogger } from './logger.js';
import { MasterKeyProtection } from './master-key-protection.js';
import crypto from "crypto";
import { db } from "../database/db/index.js";
import { settings } from "../database/db/schema.js";
import { eq } from "drizzle-orm";
import { databaseLogger } from "./logger.js";
import { MasterKeyProtection } from "./master-key-protection.js";
interface EncryptionKeyInfo {
hasKey: boolean;
@@ -35,44 +35,49 @@ class EncryptionKeyManager {
return MasterKeyProtection.decryptMasterKey(encodedKey);
}
databaseLogger.warn('Found legacy base64-encoded key, migrating to KEK protection', {
operation: 'key_migration_legacy'
});
const buffer = Buffer.from(encodedKey, 'base64');
return buffer.toString('hex');
databaseLogger.warn(
"Found legacy base64-encoded key, migrating to KEK protection",
{
operation: "key_migration_legacy",
},
);
const buffer = Buffer.from(encodedKey, "base64");
return buffer.toString("hex");
}
async initializeKey(): Promise<string> {
databaseLogger.info('Initializing encryption key system...', {
operation: 'key_init'
});
try {
let existingKey = await this.getStoredKey();
if (existingKey) {
databaseLogger.success('Found existing encryption key', {
operation: 'key_init',
hasKey: true
databaseLogger.success("Found existing encryption key", {
operation: "key_init",
hasKey: true,
});
this.currentKey = existingKey;
return existingKey;
}
const environmentKey = process.env.DB_ENCRYPTION_KEY;
if (environmentKey && environmentKey !== 'default-key-change-me') {
if (environmentKey && environmentKey !== "default-key-change-me") {
if (!this.validateKeyStrength(environmentKey)) {
databaseLogger.error('Environment encryption key is too weak', undefined, {
operation: 'key_init',
source: 'environment',
keyLength: environmentKey.length
});
throw new Error('DB_ENCRYPTION_KEY is too weak. Must be at least 32 characters with good entropy.');
databaseLogger.error(
"Environment encryption key is too weak",
undefined,
{
operation: "key_init",
source: "environment",
keyLength: environmentKey.length,
},
);
throw new Error(
"DB_ENCRYPTION_KEY is too weak. Must be at least 32 characters with good entropy.",
);
}
databaseLogger.info('Using encryption key from environment variable', {
operation: 'key_init',
source: 'environment'
databaseLogger.info("Using encryption key from environment variable", {
operation: "key_init",
source: "environment",
});
await this.storeKey(environmentKey);
@@ -81,33 +86,35 @@ class EncryptionKeyManager {
}
const newKey = await this.generateNewKey();
databaseLogger.warn('Generated new encryption key - PLEASE BACKUP THIS KEY', {
operation: 'key_init',
generated: true,
keyPreview: newKey.substring(0, 8) + '...'
});
databaseLogger.warn(
"Generated new encryption key - PLEASE BACKUP THIS KEY",
{
operation: "key_init",
generated: true,
keyPreview: newKey.substring(0, 8) + "...",
},
);
return newKey;
} catch (error) {
databaseLogger.error('Failed to initialize encryption key', error, {
operation: 'key_init_failed'
databaseLogger.error("Failed to initialize encryption key", error, {
operation: "key_init_failed",
});
throw error;
}
}
async generateNewKey(): Promise<string> {
const newKey = crypto.randomBytes(32).toString('hex');
const keyId = crypto.randomBytes(8).toString('hex');
const newKey = crypto.randomBytes(32).toString("hex");
const keyId = crypto.randomBytes(8).toString("hex");
await this.storeKey(newKey, keyId);
this.currentKey = newKey;
databaseLogger.success('Generated new encryption key', {
operation: 'key_generated',
databaseLogger.success("Generated new encryption key", {
operation: "key_generated",
keyId,
keyLength: newKey.length
keyLength: newKey.length,
});
return newKey;
@@ -115,41 +122,49 @@ class EncryptionKeyManager {
private async storeKey(key: string, keyId?: string): Promise<void> {
const now = new Date().toISOString();
const id = keyId || crypto.randomBytes(8).toString('hex');
const id = keyId || crypto.randomBytes(8).toString("hex");
const keyData = {
key: this.encodeKey(key),
keyId: id,
createdAt: now,
algorithm: 'aes-256-gcm'
algorithm: "aes-256-gcm",
};
const encodedData = JSON.stringify(keyData);
try {
const existing = await db.select().from(settings).where(eq(settings.key, 'db_encryption_key'));
const existing = await db
.select()
.from(settings)
.where(eq(settings.key, "db_encryption_key"));
if (existing.length > 0) {
await db.update(settings)
await db
.update(settings)
.set({ value: encodedData })
.where(eq(settings.key, 'db_encryption_key'));
.where(eq(settings.key, "db_encryption_key"));
} else {
await db.insert(settings).values({
key: 'db_encryption_key',
value: encodedData
key: "db_encryption_key",
value: encodedData,
});
}
const existingCreated = await db.select().from(settings).where(eq(settings.key, 'encryption_key_created'));
const existingCreated = await db
.select()
.from(settings)
.where(eq(settings.key, "encryption_key_created"));
if (existingCreated.length > 0) {
await db.update(settings)
await db
.update(settings)
.set({ value: now })
.where(eq(settings.key, 'encryption_key_created'));
.where(eq(settings.key, "encryption_key_created"));
} else {
await db.insert(settings).values({
key: 'encryption_key_created',
value: now
key: "encryption_key_created",
value: now,
});
}
@@ -157,12 +172,11 @@ class EncryptionKeyManager {
hasKey: true,
keyId: id,
createdAt: now,
algorithm: 'aes-256-gcm'
algorithm: "aes-256-gcm",
};
} catch (error) {
databaseLogger.error('Failed to store encryption key', error, {
operation: 'key_store_failed'
databaseLogger.error("Failed to store encryption key", error, {
operation: "key_store_failed",
});
throw error;
}
@@ -170,7 +184,10 @@ class EncryptionKeyManager {
private async getStoredKey(): Promise<string | null> {
try {
const result = await db.select().from(settings).where(eq(settings.key, 'db_encryption_key'));
const result = await db
.select()
.from(settings)
.where(eq(settings.key, "db_encryption_key"));
if (result.length === 0) {
return null;
@@ -182,34 +199,33 @@ class EncryptionKeyManager {
try {
keyData = JSON.parse(encodedData);
} catch {
databaseLogger.warn('Found legacy base64-encoded key data, migrating', {
operation: 'key_data_migration_legacy'
databaseLogger.warn("Found legacy base64-encoded key data, migrating", {
operation: "key_data_migration_legacy",
});
keyData = JSON.parse(Buffer.from(encodedData, 'base64').toString());
keyData = JSON.parse(Buffer.from(encodedData, "base64").toString());
}
this.keyInfo = {
hasKey: true,
keyId: keyData.keyId,
createdAt: keyData.createdAt,
algorithm: keyData.algorithm
algorithm: keyData.algorithm,
};
const decodedKey = this.decodeKey(keyData.key);
if (!MasterKeyProtection.isProtectedKey(keyData.key)) {
databaseLogger.info('Auto-migrating legacy key to KEK protection', {
operation: 'key_auto_migration',
keyId: keyData.keyId
databaseLogger.info("Auto-migrating legacy key to KEK protection", {
operation: "key_auto_migration",
keyId: keyData.keyId,
});
await this.storeKey(decodedKey, keyData.keyId);
}
return decodedKey;
} catch (error) {
databaseLogger.error('Failed to retrieve stored encryption key', error, {
operation: 'key_retrieve_failed'
databaseLogger.error("Failed to retrieve stored encryption key", error, {
operation: "key_retrieve_failed",
});
return null;
}
@@ -221,28 +237,31 @@ class EncryptionKeyManager {
async getKeyInfo(): Promise<EncryptionKeyInfo> {
if (!this.keyInfo) {
const hasKey = await this.getStoredKey() !== null;
const hasKey = (await this.getStoredKey()) !== null;
return {
hasKey,
algorithm: 'aes-256-gcm'
algorithm: "aes-256-gcm",
};
}
return this.keyInfo;
}
async regenerateKey(): Promise<string> {
databaseLogger.info('Regenerating encryption key', {
operation: 'key_regenerate'
databaseLogger.info("Regenerating encryption key", {
operation: "key_regenerate",
});
const oldKeyInfo = await this.getKeyInfo();
const newKey = await this.generateNewKey();
databaseLogger.warn('Encryption key regenerated - ALL DATA MUST BE RE-ENCRYPTED', {
operation: 'key_regenerated',
oldKeyId: oldKeyInfo.keyId,
newKeyId: this.keyInfo?.keyId
});
databaseLogger.warn(
"Encryption key regenerated - ALL DATA MUST BE RE-ENCRYPTED",
{
operation: "key_regenerated",
oldKeyId: oldKeyInfo.keyId,
newKeyId: this.keyInfo?.keyId,
},
);
return newKey;
}
@@ -257,7 +276,11 @@ class EncryptionKeyManager {
const entropyTest = new Set(key).size / key.length;
const complexity = Number(hasLower) + Number(hasUpper) + Number(hasDigit) + Number(hasSpecial);
const complexity =
Number(hasLower) +
Number(hasUpper) +
Number(hasDigit) +
Number(hasSpecial);
return complexity >= 3 && entropyTest > 0.4;
}
@@ -266,16 +289,20 @@ class EncryptionKeyManager {
if (!testKey) return false;
try {
const testData = 'validation-test-' + Date.now();
const testBuffer = Buffer.from(testKey, 'hex');
const testData = "validation-test-" + Date.now();
const testBuffer = Buffer.from(testKey, "hex");
if (testBuffer.length !== 32) {
return false;
}
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv('aes-256-gcm', testBuffer, iv) as any;
cipher.update(testData, 'utf8');
const cipher = crypto.createCipheriv(
"aes-256-gcm",
testBuffer,
iv,
) as any;
cipher.update(testData, "utf8");
cipher.final();
cipher.getAuthTag();
@@ -302,13 +329,16 @@ class EncryptionKeyManager {
algorithm: keyInfo.algorithm,
initialized: this.isInitialized(),
kekProtected,
kekValid: kekProtected ? MasterKeyProtection.validateProtection() : false
kekValid: kekProtected ? MasterKeyProtection.validateProtection() : false,
};
}
private async isKEKProtected(): Promise<boolean> {
try {
const result = await db.select().from(settings).where(eq(settings.key, 'db_encryption_key'));
const result = await db
.select()
.from(settings)
.where(eq(settings.key, "db_encryption_key"));
if (result.length === 0) return false;
const keyData = JSON.parse(result[0].value);
@@ -320,4 +350,4 @@ class EncryptionKeyManager {
}
export { EncryptionKeyManager };
export type { EncryptionKeyInfo };
export type { EncryptionKeyInfo };

View File

@@ -1,11 +1,11 @@
#!/usr/bin/env node
import { DatabaseEncryption } from './database-encryption.js';
import { EncryptedDBOperations } from './encrypted-db-operations.js';
import { EncryptionKeyManager } from './encryption-key-manager.js';
import { databaseLogger } from './logger.js';
import { db } from '../database/db/index.js';
import { settings } from '../database/db/schema.js';
import { eq, sql } from 'drizzle-orm';
import { DatabaseEncryption } from "./database-encryption.js";
import { EncryptedDBOperations } from "./encrypted-db-operations.js";
import { EncryptionKeyManager } from "./encryption-key-manager.js";
import { databaseLogger } from "./logger.js";
import { db } from "../database/db/index.js";
import { settings } from "../database/db/schema.js";
import { eq, sql } from "drizzle-orm";
interface MigrationConfig {
masterPassword?: string;
@@ -22,15 +22,15 @@ class EncryptionMigration {
masterPassword: config.masterPassword,
forceEncryption: config.forceEncryption ?? false,
backupEnabled: config.backupEnabled ?? true,
dryRun: config.dryRun ?? false
dryRun: config.dryRun ?? false,
};
}
async runMigration(): Promise<void> {
databaseLogger.info('Starting database encryption migration', {
operation: 'migration_start',
databaseLogger.info("Starting database encryption migration", {
operation: "migration_start",
dryRun: this.config.dryRun,
forceEncryption: this.config.forceEncryption
forceEncryption: this.config.forceEncryption,
});
try {
@@ -45,21 +45,23 @@ class EncryptionMigration {
await this.updateSettings();
await this.verifyMigration();
databaseLogger.success('Database encryption migration completed successfully', {
operation: 'migration_complete'
});
databaseLogger.success(
"Database encryption migration completed successfully",
{
operation: "migration_complete",
},
);
} catch (error) {
databaseLogger.error('Migration failed', error, {
operation: 'migration_failed'
databaseLogger.error("Migration failed", error, {
operation: "migration_failed",
});
throw error;
}
}
private async validatePrerequisites(): Promise<void> {
databaseLogger.info('Validating migration prerequisites', {
operation: 'validation'
databaseLogger.info("Validating migration prerequisites", {
operation: "validation",
});
// Check if KEK-managed encryption key exists
@@ -77,187 +79,200 @@ class EncryptionMigration {
this.config.masterPassword = currentKey;
}
} catch (error) {
throw new Error('Failed to retrieve encryption key from KEK manager. Please ensure encryption is properly initialized.');
throw new Error(
"Failed to retrieve encryption key from KEK manager. Please ensure encryption is properly initialized.",
);
}
}
// Validate key strength
if (this.config.masterPassword.length < 16) {
throw new Error('Master password must be at least 16 characters long');
throw new Error("Master password must be at least 16 characters long");
}
// Test database connection
try {
await db.select().from(settings).limit(1);
} catch (error) {
throw new Error('Database connection failed');
throw new Error("Database connection failed");
}
databaseLogger.success('Prerequisites validation passed', {
operation: 'validation_complete',
keySource: 'kek_manager'
databaseLogger.success("Prerequisites validation passed", {
operation: "validation_complete",
keySource: "kek_manager",
});
}
private async createBackup(): Promise<void> {
databaseLogger.info('Creating database backup before migration', {
operation: 'backup_start'
databaseLogger.info("Creating database backup before migration", {
operation: "backup_start",
});
try {
const fs = await import('fs');
const path = await import('path');
const dataDir = process.env.DATA_DIR || './db/data';
const dbPath = path.join(dataDir, 'db.sqlite');
const fs = await import("fs");
const path = await import("path");
const dataDir = process.env.DATA_DIR || "./db/data";
const dbPath = path.join(dataDir, "db.sqlite");
const backupPath = path.join(dataDir, `db-backup-${Date.now()}.sqlite`);
if (fs.existsSync(dbPath)) {
fs.copyFileSync(dbPath, backupPath);
databaseLogger.success(`Database backup created: ${backupPath}`, {
operation: 'backup_complete',
backupPath
operation: "backup_complete",
backupPath,
});
}
} catch (error) {
databaseLogger.error('Failed to create backup', error, {
operation: 'backup_failed'
databaseLogger.error("Failed to create backup", error, {
operation: "backup_failed",
});
throw error;
}
}
private async initializeEncryption(): Promise<void> {
databaseLogger.info('Initializing encryption system', {
operation: 'encryption_init'
databaseLogger.info("Initializing encryption system", {
operation: "encryption_init",
});
DatabaseEncryption.initialize({
masterPassword: this.config.masterPassword!,
encryptionEnabled: true,
forceEncryption: this.config.forceEncryption,
migrateOnAccess: true
migrateOnAccess: true,
});
const isHealthy = await EncryptedDBOperations.healthCheck();
if (!isHealthy) {
throw new Error('Encryption system health check failed');
throw new Error("Encryption system health check failed");
}
databaseLogger.success('Encryption system initialized successfully', {
operation: 'encryption_init_complete'
databaseLogger.success("Encryption system initialized successfully", {
operation: "encryption_init_complete",
});
}
private async migrateTables(): Promise<void> {
const tables: Array<'users' | 'ssh_data' | 'ssh_credentials'> = [
'users',
'ssh_data',
'ssh_credentials'
const tables: Array<"users" | "ssh_data" | "ssh_credentials"> = [
"users",
"ssh_data",
"ssh_credentials",
];
let totalMigrated = 0;
for (const tableName of tables) {
databaseLogger.info(`Starting migration for table: ${tableName}`, {
operation: 'table_migration_start',
table: tableName
operation: "table_migration_start",
table: tableName,
});
try {
if (this.config.dryRun) {
databaseLogger.info(`[DRY RUN] Would migrate table: ${tableName}`, {
operation: 'dry_run_table',
table: tableName
operation: "dry_run_table",
table: tableName,
});
continue;
}
const migratedCount = await EncryptedDBOperations.migrateExistingRecords(tableName);
const migratedCount =
await EncryptedDBOperations.migrateExistingRecords(tableName);
totalMigrated += migratedCount;
databaseLogger.success(`Migration completed for table: ${tableName}`, {
operation: 'table_migration_complete',
operation: "table_migration_complete",
table: tableName,
migratedCount
migratedCount,
});
} catch (error) {
databaseLogger.error(`Migration failed for table: ${tableName}`, error, {
operation: 'table_migration_failed',
table: tableName
});
databaseLogger.error(
`Migration failed for table: ${tableName}`,
error,
{
operation: "table_migration_failed",
table: tableName,
},
);
throw error;
}
}
databaseLogger.success(`All tables migrated successfully`, {
operation: 'all_tables_migrated',
totalMigrated
operation: "all_tables_migrated",
totalMigrated,
});
}
private async updateSettings(): Promise<void> {
if (this.config.dryRun) {
databaseLogger.info('[DRY RUN] Would update encryption settings', {
operation: 'dry_run_settings'
databaseLogger.info("[DRY RUN] Would update encryption settings", {
operation: "dry_run_settings",
});
return;
}
try {
const encryptionSettings = [
{ key: 'encryption_enabled', value: 'true' },
{ key: 'encryption_migration_completed', value: new Date().toISOString() },
{ key: 'encryption_version', value: '1.0' }
{ key: "encryption_enabled", value: "true" },
{
key: "encryption_migration_completed",
value: new Date().toISOString(),
},
{ key: "encryption_version", value: "1.0" },
];
for (const setting of encryptionSettings) {
const existing = await db.select().from(settings).where(eq(settings.key, setting.key));
const existing = await db
.select()
.from(settings)
.where(eq(settings.key, setting.key));
if (existing.length > 0) {
await db.update(settings).set({ value: setting.value }).where(eq(settings.key, setting.key));
await db
.update(settings)
.set({ value: setting.value })
.where(eq(settings.key, setting.key));
} else {
await db.insert(settings).values(setting);
}
}
databaseLogger.success('Encryption settings updated', {
operation: 'settings_updated'
databaseLogger.success("Encryption settings updated", {
operation: "settings_updated",
});
} catch (error) {
databaseLogger.error('Failed to update settings', error, {
operation: 'settings_update_failed'
databaseLogger.error("Failed to update settings", error, {
operation: "settings_update_failed",
});
throw error;
}
}
private async verifyMigration(): Promise<void> {
databaseLogger.info('Verifying migration integrity', {
operation: 'verification_start'
databaseLogger.info("Verifying migration integrity", {
operation: "verification_start",
});
try {
const status = DatabaseEncryption.getEncryptionStatus();
if (!status.enabled || !status.configValid) {
throw new Error('Encryption system verification failed');
throw new Error("Encryption system verification failed");
}
const testResult = await this.performTestEncryption();
if (!testResult) {
throw new Error('Test encryption/decryption failed');
throw new Error("Test encryption/decryption failed");
}
databaseLogger.success('Migration verification completed successfully', {
operation: 'verification_complete',
status
databaseLogger.success("Migration verification completed successfully", {
operation: "verification_complete",
status,
});
} catch (error) {
databaseLogger.error('Migration verification failed', error, {
operation: 'verification_failed'
databaseLogger.error("Migration verification failed", error, {
operation: "verification_failed",
});
throw error;
}
@@ -265,9 +280,12 @@ class EncryptionMigration {
private async performTestEncryption(): Promise<boolean> {
try {
const { FieldEncryption } = await import('./encryption.js');
const { FieldEncryption } = await import("./encryption.js");
const testData = `test-data-${Date.now()}`;
const testKey = FieldEncryption.getFieldKey(this.config.masterPassword!, 'test');
const testKey = FieldEncryption.getFieldKey(
this.config.masterPassword!,
"test",
);
const encrypted = FieldEncryption.encryptField(testData, testKey);
const decrypted = FieldEncryption.decryptField(encrypted, testKey);
@@ -285,10 +303,17 @@ class EncryptionMigration {
migrationDate?: string;
}> {
try {
const encryptionEnabled = await db.select().from(settings).where(eq(settings.key, 'encryption_enabled'));
const migrationCompleted = await db.select().from(settings).where(eq(settings.key, 'encryption_migration_completed'));
const encryptionEnabled = await db
.select()
.from(settings)
.where(eq(settings.key, "encryption_enabled"));
const migrationCompleted = await db
.select()
.from(settings)
.where(eq(settings.key, "encryption_migration_completed"));
const isEncryptionEnabled = encryptionEnabled.length > 0 && encryptionEnabled[0].value === 'true';
const isEncryptionEnabled =
encryptionEnabled.length > 0 && encryptionEnabled[0].value === "true";
const isMigrationCompleted = migrationCompleted.length > 0;
// Check if migration is actually required by looking for unencrypted sensitive data
@@ -298,11 +323,13 @@ class EncryptionMigration {
isEncryptionEnabled,
migrationCompleted: isMigrationCompleted,
migrationRequired,
migrationDate: isMigrationCompleted ? migrationCompleted[0].value : undefined
migrationDate: isMigrationCompleted
? migrationCompleted[0].value
: undefined,
};
} catch (error) {
databaseLogger.error('Failed to check migration status', error, {
operation: 'status_check_failed'
databaseLogger.error("Failed to check migration status", error, {
operation: "status_check_failed",
});
throw error;
}
@@ -311,10 +338,14 @@ class EncryptionMigration {
static async checkIfMigrationRequired(): Promise<boolean> {
try {
// Import table schemas
const { sshData, sshCredentials } = await import('../database/db/schema.js');
const { sshData, sshCredentials } = await import(
"../database/db/schema.js"
);
// Check if there's any unencrypted sensitive data in ssh_data
const sshDataCount = await db.select({ count: sql<number>`count(*)` }).from(sshData);
const sshDataCount = await db
.select({ count: sql<number>`count(*)` })
.from(sshData);
if (sshDataCount[0].count > 0) {
// Sample a few records to check if they contain unencrypted data
const sampleData = await db.select().from(sshData).limit(5);
@@ -329,9 +360,14 @@ class EncryptionMigration {
}
// Check if there's any unencrypted sensitive data in ssh_credentials
const credentialsCount = await db.select({ count: sql<number>`count(*)` }).from(sshCredentials);
const credentialsCount = await db
.select({ count: sql<number>`count(*)` })
.from(sshCredentials);
if (credentialsCount[0].count > 0) {
const sampleCredentials = await db.select().from(sshCredentials).limit(5);
const sampleCredentials = await db
.select()
.from(sshCredentials)
.limit(5);
for (const record of sampleCredentials) {
if (record.password && !this.looksEncrypted(record.password)) {
return true; // Found unencrypted password
@@ -347,10 +383,13 @@ class EncryptionMigration {
return false; // No unencrypted sensitive data found
} catch (error) {
databaseLogger.warn('Failed to check if migration required, assuming required', {
operation: 'migration_check_failed',
error: error instanceof Error ? error.message : 'Unknown error'
});
databaseLogger.warn(
"Failed to check if migration required, assuming required",
{
operation: "migration_check_failed",
error: error instanceof Error ? error.message : "Unknown error",
},
);
return true; // If we can't check, assume migration is required for safety
}
}
@@ -365,7 +404,7 @@ class EncryptionMigration {
} catch {
// If it's not JSON, check if it's a reasonable length for encrypted data
// Encrypted data is typically much longer than plaintext
return data.length > 100 && data.includes('='); // Base64-like characteristics
return data.length > 100 && data.includes("="); // Base64-like characteristics
}
}
}
@@ -373,23 +412,24 @@ class EncryptionMigration {
if (import.meta.url === `file://${process.argv[1]}`) {
const config: MigrationConfig = {
masterPassword: process.env.DB_ENCRYPTION_KEY,
forceEncryption: process.env.FORCE_ENCRYPTION === 'true',
backupEnabled: process.env.BACKUP_ENABLED !== 'false',
dryRun: process.env.DRY_RUN === 'true'
forceEncryption: process.env.FORCE_ENCRYPTION === "true",
backupEnabled: process.env.BACKUP_ENABLED !== "false",
dryRun: process.env.DRY_RUN === "true",
};
const migration = new EncryptionMigration(config);
migration.runMigration()
migration
.runMigration()
.then(() => {
console.log('Migration completed successfully');
console.log("Migration completed successfully");
process.exit(0);
})
.catch((error) => {
console.error('Migration failed:', error.message);
console.error("Migration failed:", error.message);
process.exit(1);
});
}
export { EncryptionMigration };
export type { MigrationConfig };
export type { MigrationConfig };

View File

@@ -1,24 +1,39 @@
#!/usr/bin/env node
import { FieldEncryption } from './encryption.js';
import { DatabaseEncryption } from './database-encryption.js';
import { EncryptedDBOperations } from './encrypted-db-operations.js';
import { databaseLogger } from './logger.js';
import { FieldEncryption } from "./encryption.js";
import { DatabaseEncryption } from "./database-encryption.js";
import { EncryptedDBOperations } from "./encrypted-db-operations.js";
import { databaseLogger } from "./logger.js";
class EncryptionTest {
private testPassword = 'test-master-password-for-validation';
private testPassword = "test-master-password-for-validation";
async runAllTests(): Promise<boolean> {
console.log('🔐 Starting Termix Database Encryption Tests...\n');
console.log("🔐 Starting Termix Database Encryption Tests...\n");
const tests = [
{ name: 'Basic Encryption/Decryption', test: () => this.testBasicEncryption() },
{ name: 'Field Encryption Detection', test: () => this.testFieldDetection() },
{ name: 'Key Derivation', test: () => this.testKeyDerivation() },
{ name: 'Database Encryption Context', test: () => this.testDatabaseContext() },
{ name: 'Record Encryption/Decryption', test: () => this.testRecordOperations() },
{ name: 'Backward Compatibility', test: () => this.testBackwardCompatibility() },
{ name: 'Error Handling', test: () => this.testErrorHandling() },
{ name: 'Performance Test', test: () => this.testPerformance() }
{
name: "Basic Encryption/Decryption",
test: () => this.testBasicEncryption(),
},
{
name: "Field Encryption Detection",
test: () => this.testFieldDetection(),
},
{ name: "Key Derivation", test: () => this.testKeyDerivation() },
{
name: "Database Encryption Context",
test: () => this.testDatabaseContext(),
},
{
name: "Record Encryption/Decryption",
test: () => this.testRecordOperations(),
},
{
name: "Backward Compatibility",
test: () => this.testBackwardCompatibility(),
},
{ name: "Error Handling", test: () => this.testErrorHandling() },
{ name: "Performance Test", test: () => this.testPerformance() },
];
let passedTests = 0;
@@ -32,7 +47,9 @@ class EncryptionTest {
passedTests++;
} catch (error) {
console.log(`❌ FAILED: ${test.name}`);
console.log(` Error: ${error instanceof Error ? error.message : 'Unknown error'}\n`);
console.log(
` Error: ${error instanceof Error ? error.message : "Unknown error"}\n`,
);
}
}
@@ -40,75 +57,85 @@ class EncryptionTest {
console.log(`\n🎯 Test Results: ${passedTests}/${totalTests} tests passed`);
if (success) {
console.log('🎉 All encryption tests PASSED! System is ready for production.');
console.log(
"🎉 All encryption tests PASSED! System is ready for production.",
);
} else {
console.log('⚠️ Some tests FAILED! Please review the implementation.');
console.log("⚠️ Some tests FAILED! Please review the implementation.");
}
return success;
}
private async testBasicEncryption(): Promise<void> {
const testData = 'Hello, World! This is sensitive data.';
const key = FieldEncryption.getFieldKey(this.testPassword, 'test-field');
const testData = "Hello, World! This is sensitive data.";
const key = FieldEncryption.getFieldKey(this.testPassword, "test-field");
const encrypted = FieldEncryption.encryptField(testData, key);
const decrypted = FieldEncryption.decryptField(encrypted, key);
if (decrypted !== testData) {
throw new Error(`Decryption mismatch: expected "${testData}", got "${decrypted}"`);
throw new Error(
`Decryption mismatch: expected "${testData}", got "${decrypted}"`,
);
}
if (!FieldEncryption.isEncrypted(encrypted)) {
throw new Error('Encrypted data not detected as encrypted');
throw new Error("Encrypted data not detected as encrypted");
}
if (FieldEncryption.isEncrypted(testData)) {
throw new Error('Plain text incorrectly detected as encrypted');
throw new Error("Plain text incorrectly detected as encrypted");
}
}
private async testFieldDetection(): Promise<void> {
const testCases = [
{ table: 'users', field: 'password_hash', shouldEncrypt: true },
{ table: 'users', field: 'username', shouldEncrypt: false },
{ table: 'ssh_data', field: 'password', shouldEncrypt: true },
{ table: 'ssh_data', field: 'ip', shouldEncrypt: false },
{ table: 'ssh_credentials', field: 'privateKey', shouldEncrypt: true },
{ table: 'unknown_table', field: 'any_field', shouldEncrypt: false }
{ table: "users", field: "password_hash", shouldEncrypt: true },
{ table: "users", field: "username", shouldEncrypt: false },
{ table: "ssh_data", field: "password", shouldEncrypt: true },
{ table: "ssh_data", field: "ip", shouldEncrypt: false },
{ table: "ssh_credentials", field: "privateKey", shouldEncrypt: true },
{ table: "unknown_table", field: "any_field", shouldEncrypt: false },
];
for (const testCase of testCases) {
const result = FieldEncryption.shouldEncryptField(testCase.table, testCase.field);
const result = FieldEncryption.shouldEncryptField(
testCase.table,
testCase.field,
);
if (result !== testCase.shouldEncrypt) {
throw new Error(
`Field detection failed for ${testCase.table}.${testCase.field}: ` +
`expected ${testCase.shouldEncrypt}, got ${result}`
`expected ${testCase.shouldEncrypt}, got ${result}`,
);
}
}
}
private async testKeyDerivation(): Promise<void> {
const password = 'test-password';
const fieldType1 = 'users.password_hash';
const fieldType2 = 'ssh_data.password';
const password = "test-password";
const fieldType1 = "users.password_hash";
const fieldType2 = "ssh_data.password";
const key1a = FieldEncryption.getFieldKey(password, fieldType1);
const key1b = FieldEncryption.getFieldKey(password, fieldType1);
const key2 = FieldEncryption.getFieldKey(password, fieldType2);
if (!key1a.equals(key1b)) {
throw new Error('Same field type should produce identical keys');
throw new Error("Same field type should produce identical keys");
}
if (key1a.equals(key2)) {
throw new Error('Different field types should produce different keys');
throw new Error("Different field types should produce different keys");
}
const differentPasswordKey = FieldEncryption.getFieldKey('different-password', fieldType1);
const differentPasswordKey = FieldEncryption.getFieldKey(
"different-password",
fieldType1,
);
if (key1a.equals(differentPasswordKey)) {
throw new Error('Different passwords should produce different keys');
throw new Error("Different passwords should produce different keys");
}
}
@@ -117,88 +144,101 @@ class EncryptionTest {
masterPassword: this.testPassword,
encryptionEnabled: true,
forceEncryption: false,
migrateOnAccess: true
migrateOnAccess: true,
});
const status = DatabaseEncryption.getEncryptionStatus();
if (!status.enabled) {
throw new Error('Encryption should be enabled');
throw new Error("Encryption should be enabled");
}
if (!status.configValid) {
throw new Error('Configuration should be valid');
throw new Error("Configuration should be valid");
}
}
private async testRecordOperations(): Promise<void> {
const testRecord = {
id: 'test-id-123',
username: 'testuser',
password_hash: 'sensitive-password-hash',
is_admin: false
id: "test-id-123",
username: "testuser",
password_hash: "sensitive-password-hash",
is_admin: false,
};
const encrypted = DatabaseEncryption.encryptRecord('users', testRecord);
const decrypted = DatabaseEncryption.decryptRecord('users', encrypted);
const encrypted = DatabaseEncryption.encryptRecord("users", testRecord);
const decrypted = DatabaseEncryption.decryptRecord("users", encrypted);
if (decrypted.username !== testRecord.username) {
throw new Error('Non-sensitive field should remain unchanged');
throw new Error("Non-sensitive field should remain unchanged");
}
if (decrypted.password_hash !== testRecord.password_hash) {
throw new Error('Sensitive field should be properly decrypted');
throw new Error("Sensitive field should be properly decrypted");
}
if (!FieldEncryption.isEncrypted(encrypted.password_hash)) {
throw new Error('Sensitive field should be encrypted in stored record');
throw new Error("Sensitive field should be encrypted in stored record");
}
}
private async testBackwardCompatibility(): Promise<void> {
const plaintextRecord = {
id: 'legacy-id-456',
username: 'legacyuser',
password_hash: 'plain-text-password-hash',
is_admin: false
id: "legacy-id-456",
username: "legacyuser",
password_hash: "plain-text-password-hash",
is_admin: false,
};
const decrypted = DatabaseEncryption.decryptRecord('users', plaintextRecord);
const decrypted = DatabaseEncryption.decryptRecord(
"users",
plaintextRecord,
);
if (decrypted.password_hash !== plaintextRecord.password_hash) {
throw new Error('Plain text fields should be returned as-is for backward compatibility');
throw new Error(
"Plain text fields should be returned as-is for backward compatibility",
);
}
if (decrypted.username !== plaintextRecord.username) {
throw new Error('Non-sensitive fields should be unchanged');
throw new Error("Non-sensitive fields should be unchanged");
}
}
private async testErrorHandling(): Promise<void> {
const key = FieldEncryption.getFieldKey(this.testPassword, 'test');
const key = FieldEncryption.getFieldKey(this.testPassword, "test");
try {
FieldEncryption.decryptField('invalid-json-data', key);
throw new Error('Should have thrown error for invalid JSON');
FieldEncryption.decryptField("invalid-json-data", key);
throw new Error("Should have thrown error for invalid JSON");
} catch (error) {
if (!error || !(error as Error).message.includes('decryption failed')) {
throw new Error('Should throw appropriate decryption error');
if (!error || !(error as Error).message.includes("decryption failed")) {
throw new Error("Should throw appropriate decryption error");
}
}
try {
const fakeEncrypted = JSON.stringify({ data: 'fake', iv: 'fake', tag: 'fake' });
const fakeEncrypted = JSON.stringify({
data: "fake",
iv: "fake",
tag: "fake",
});
FieldEncryption.decryptField(fakeEncrypted, key);
throw new Error('Should have thrown error for invalid encrypted data');
throw new Error("Should have thrown error for invalid encrypted data");
} catch (error) {
if (!error || !(error as Error).message.includes('Decryption failed')) {
throw new Error('Should throw appropriate error for corrupted data');
if (!error || !(error as Error).message.includes("Decryption failed")) {
throw new Error("Should throw appropriate error for corrupted data");
}
}
}
private async testPerformance(): Promise<void> {
const testData = 'Performance test data that is reasonably long to simulate real SSH keys and passwords.';
const key = FieldEncryption.getFieldKey(this.testPassword, 'performance-test');
const testData =
"Performance test data that is reasonably long to simulate real SSH keys and passwords.";
const key = FieldEncryption.getFieldKey(
this.testPassword,
"performance-test",
);
const iterations = 100;
const startTime = Date.now();
@@ -216,50 +256,57 @@ class EncryptionTest {
const totalTime = endTime - startTime;
const avgTime = totalTime / iterations;
console.log(` ⚡ Performance: ${iterations} encrypt/decrypt cycles in ${totalTime}ms (${avgTime.toFixed(2)}ms avg)`);
console.log(
` ⚡ Performance: ${iterations} encrypt/decrypt cycles in ${totalTime}ms (${avgTime.toFixed(2)}ms avg)`,
);
if (avgTime > 50) {
console.log(' ⚠️ Warning: Encryption operations are slower than expected');
console.log(
" ⚠️ Warning: Encryption operations are slower than expected",
);
}
}
static async validateProduction(): Promise<boolean> {
console.log('🔒 Validating production encryption setup...\n');
console.log("🔒 Validating production encryption setup...\n");
try {
const encryptionKey = process.env.DB_ENCRYPTION_KEY;
if (!encryptionKey) {
console.log('❌ DB_ENCRYPTION_KEY environment variable not set');
console.log("❌ DB_ENCRYPTION_KEY environment variable not set");
return false;
}
if (encryptionKey === 'default-key-change-me') {
console.log('❌ DB_ENCRYPTION_KEY is using default value (INSECURE)');
if (encryptionKey === "default-key-change-me") {
console.log("❌ DB_ENCRYPTION_KEY is using default value (INSECURE)");
return false;
}
if (encryptionKey.length < 16) {
console.log('❌ DB_ENCRYPTION_KEY is too short (minimum 16 characters)');
console.log(
"❌ DB_ENCRYPTION_KEY is too short (minimum 16 characters)",
);
return false;
}
DatabaseEncryption.initialize({
masterPassword: encryptionKey,
encryptionEnabled: true
encryptionEnabled: true,
});
const status = DatabaseEncryption.getEncryptionStatus();
if (!status.configValid) {
console.log('❌ Encryption configuration validation failed');
console.log("❌ Encryption configuration validation failed");
return false;
}
console.log('✅ Production encryption setup is valid');
console.log("✅ Production encryption setup is valid");
return true;
} catch (error) {
console.log(`❌ Production validation failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
console.log(
`❌ Production validation failed: ${error instanceof Error ? error.message : "Unknown error"}`,
);
return false;
}
}
@@ -268,26 +315,27 @@ class EncryptionTest {
if (import.meta.url === `file://${process.argv[1]}`) {
const testMode = process.argv[2];
if (testMode === 'production') {
if (testMode === "production") {
EncryptionTest.validateProduction()
.then((success) => {
process.exit(success ? 0 : 1);
})
.catch((error) => {
console.error('Test execution failed:', error);
console.error("Test execution failed:", error);
process.exit(1);
});
} else {
const test = new EncryptionTest();
test.runAllTests()
test
.runAllTests()
.then((success) => {
process.exit(success ? 0 : 1);
})
.catch((error) => {
console.error('Test execution failed:', error);
console.error("Test execution failed:", error);
process.exit(1);
});
}
}
export { EncryptionTest };
export { EncryptionTest };

View File

@@ -1,4 +1,4 @@
import crypto from 'crypto';
import crypto from "crypto";
interface EncryptedData {
data: string;
@@ -17,7 +17,7 @@ interface EncryptionConfig {
class FieldEncryption {
private static readonly CONFIG: EncryptionConfig = {
algorithm: 'aes-256-gcm',
algorithm: "aes-256-gcm",
keyLength: 32,
ivLength: 16,
saltLength: 32,
@@ -25,9 +25,21 @@ class FieldEncryption {
};
private static readonly ENCRYPTED_FIELDS = {
users: ['password_hash', 'client_secret', 'totp_secret', 'totp_backup_codes', 'oidc_identifier'],
ssh_data: ['password', 'key', 'keyPassword'],
ssh_credentials: ['password', 'privateKey', 'keyPassword', 'key', 'publicKey']
users: [
"password_hash",
"client_secret",
"totp_secret",
"totp_backup_codes",
"oidc_identifier",
],
ssh_data: ["password", "key", "keyPassword"],
ssh_credentials: [
"password",
"privateKey",
"keyPassword",
"key",
"publicKey",
],
};
static isEncrypted(value: string | null): boolean {
@@ -46,56 +58,64 @@ class FieldEncryption {
salt,
this.CONFIG.iterations,
this.CONFIG.keyLength,
'sha256'
"sha256",
);
return Buffer.from(crypto.hkdfSync(
'sha256',
masterKey,
salt,
keyType,
this.CONFIG.keyLength
));
return Buffer.from(
crypto.hkdfSync(
"sha256",
masterKey,
salt,
keyType,
this.CONFIG.keyLength,
),
);
}
static encrypt(plaintext: string, key: Buffer): EncryptedData {
if (!plaintext) return { data: '', iv: '', tag: '' };
if (!plaintext) return { data: "", iv: "", tag: "" };
const iv = crypto.randomBytes(this.CONFIG.ivLength);
const cipher = crypto.createCipheriv(this.CONFIG.algorithm, key, iv) as any;
cipher.setAAD(Buffer.from('termix-field-encryption'));
cipher.setAAD(Buffer.from("termix-field-encryption"));
let encrypted = cipher.update(plaintext, 'utf8', 'hex');
encrypted += cipher.final('hex');
let encrypted = cipher.update(plaintext, "utf8", "hex");
encrypted += cipher.final("hex");
const tag = cipher.getAuthTag();
return {
data: encrypted,
iv: iv.toString('hex'),
tag: tag.toString('hex')
iv: iv.toString("hex"),
tag: tag.toString("hex"),
};
}
static decrypt(encryptedData: EncryptedData, key: Buffer): string {
if (!encryptedData.data) return '';
if (!encryptedData.data) return "";
try {
const decipher = crypto.createDecipheriv(this.CONFIG.algorithm, key, Buffer.from(encryptedData.iv, 'hex')) as any;
decipher.setAAD(Buffer.from('termix-field-encryption'));
decipher.setAuthTag(Buffer.from(encryptedData.tag, 'hex'));
const decipher = crypto.createDecipheriv(
this.CONFIG.algorithm,
key,
Buffer.from(encryptedData.iv, "hex"),
) as any;
decipher.setAAD(Buffer.from("termix-field-encryption"));
decipher.setAuthTag(Buffer.from(encryptedData.tag, "hex"));
let decrypted = decipher.update(encryptedData.data, 'hex', 'utf8');
decrypted += decipher.final('utf8');
let decrypted = decipher.update(encryptedData.data, "hex", "utf8");
decrypted += decipher.final("utf8");
return decrypted;
} catch (error) {
throw new Error(`Decryption failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
throw new Error(
`Decryption failed: ${error instanceof Error ? error.message : "Unknown error"}`,
);
}
}
static encryptField(value: string, fieldKey: Buffer): string {
if (!value) return '';
if (!value) return "";
if (this.isEncrypted(value)) return value;
const encrypted = this.encrypt(value, fieldKey);
@@ -103,36 +123,45 @@ class FieldEncryption {
}
static decryptField(value: string, fieldKey: Buffer): string {
if (!value) return '';
if (!value) return "";
if (!this.isEncrypted(value)) return value;
try {
const encrypted: EncryptedData = JSON.parse(value);
return this.decrypt(encrypted, fieldKey);
} catch (error) {
throw new Error(`Field decryption failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
throw new Error(
`Field decryption failed: ${error instanceof Error ? error.message : "Unknown error"}`,
);
}
}
static getFieldKey(masterPassword: string, fieldType: string): Buffer {
const salt = crypto.createHash('sha256').update(`termix-${fieldType}`).digest();
const salt = crypto
.createHash("sha256")
.update(`termix-${fieldType}`)
.digest();
return this.deriveKey(masterPassword, salt, fieldType);
}
static shouldEncryptField(tableName: string, fieldName: string): boolean {
const tableFields = this.ENCRYPTED_FIELDS[tableName as keyof typeof this.ENCRYPTED_FIELDS];
const tableFields =
this.ENCRYPTED_FIELDS[tableName as keyof typeof this.ENCRYPTED_FIELDS];
return tableFields ? tableFields.includes(fieldName) : false;
}
static generateSalt(): string {
return crypto.randomBytes(this.CONFIG.saltLength).toString('hex');
return crypto.randomBytes(this.CONFIG.saltLength).toString("hex");
}
static validateEncryptionHealth(encryptedValue: string, key: Buffer): boolean {
static validateEncryptionHealth(
encryptedValue: string,
key: Buffer,
): boolean {
try {
if (!this.isEncrypted(encryptedValue)) return false;
const decrypted = this.decryptField(encryptedValue, key);
return decrypted !== '';
return decrypted !== "";
} catch {
return false;
}
@@ -140,4 +169,4 @@ class FieldEncryption {
}
export { FieldEncryption };
export type { EncryptedData, EncryptionConfig };
export type { EncryptedData, EncryptionConfig };

View File

@@ -1,8 +1,8 @@
import crypto from 'crypto';
import os from 'os';
import { execSync } from 'child_process';
import fs from 'fs';
import { databaseLogger } from './logger.js';
import crypto from "crypto";
import os from "os";
import { execSync } from "child_process";
import fs from "fs";
import { databaseLogger } from "./logger.js";
interface HardwareInfo {
cpuId?: string;
@@ -18,7 +18,7 @@ interface HardwareInfo {
* 相比软件环境指纹,硬件指纹在虚拟化和容器环境中更加稳定
*/
class HardwareFingerprint {
private static readonly CACHE_KEY = 'cached_hardware_fingerprint';
private static readonly CACHE_KEY = "cached_hardware_fingerprint";
private static cachedFingerprint: string | null = null;
/**
@@ -27,40 +27,30 @@ class HardwareFingerprint {
*/
static generate(): string {
try {
// 1. 检查缓存
if (this.cachedFingerprint) {
return this.cachedFingerprint;
}
// 2. 检查环境变量覆盖
const envFingerprint = process.env.TERMIX_HARDWARE_SEED;
if (envFingerprint && envFingerprint.length >= 32) {
databaseLogger.info('Using hardware seed from environment variable', {
operation: 'hardware_fingerprint_env'
databaseLogger.info("Using hardware seed from environment variable", {
operation: "hardware_fingerprint_env",
});
this.cachedFingerprint = this.hashFingerprint(envFingerprint);
return this.cachedFingerprint;
}
// 3. 检测真实硬件信息
const hwInfo = this.detectHardwareInfo();
const fingerprint = this.generateFromHardware(hwInfo);
this.cachedFingerprint = fingerprint;
databaseLogger.info('Generated hardware fingerprint', {
operation: 'hardware_fingerprint_generation',
fingerprintPrefix: fingerprint.substring(0, 8),
detectedComponents: Object.keys(hwInfo).filter(key => hwInfo[key as keyof HardwareInfo])
});
return fingerprint;
} catch (error) {
databaseLogger.error('Hardware fingerprint generation failed', error, {
operation: 'hardware_fingerprint_failed'
databaseLogger.error("Hardware fingerprint generation failed", error, {
operation: "hardware_fingerprint_failed",
});
// 回退到基本的环境指纹
return this.generateFallbackFingerprint();
}
}
@@ -74,21 +64,21 @@ class HardwareFingerprint {
try {
switch (platform) {
case 'linux':
case "linux":
hwInfo.cpuId = this.getLinuxCpuId();
hwInfo.motherboardUuid = this.getLinuxMotherboardUuid();
hwInfo.diskSerial = this.getLinuxDiskSerial();
hwInfo.biosSerial = this.getLinuxBiosSerial();
break;
case 'win32':
case "win32":
hwInfo.cpuId = this.getWindowsCpuId();
hwInfo.motherboardUuid = this.getWindowsMotherboardUuid();
hwInfo.diskSerial = this.getWindowsDiskSerial();
hwInfo.biosSerial = this.getWindowsBiosSerial();
break;
case 'darwin':
case "darwin":
hwInfo.cpuId = this.getMacOSCpuId();
hwInfo.motherboardUuid = this.getMacOSMotherboardUuid();
hwInfo.diskSerial = this.getMacOSDiskSerial();
@@ -98,11 +88,10 @@ class HardwareFingerprint {
// 所有平台都尝试获取MAC地址
hwInfo.macAddresses = this.getStableMacAddresses();
} catch (error) {
databaseLogger.error('Some hardware detection failed', error, {
operation: 'hardware_detection_partial_failure',
platform
databaseLogger.error("Some hardware detection failed", error, {
operation: "hardware_detection_partial_failure",
platform,
});
}
@@ -116,18 +105,32 @@ class HardwareFingerprint {
try {
// 尝试多种方法获取CPU信息
const methods = [
() => fs.readFileSync('/proc/cpuinfo', 'utf8').match(/processor\s*:\s*(\d+)/)?.[1],
() => execSync('dmidecode -t processor | grep "ID:" | head -1', { encoding: 'utf8' }).trim(),
() => execSync('cat /proc/cpuinfo | grep "cpu family\\|model\\|stepping" | md5sum', { encoding: 'utf8' }).split(' ')[0]
() =>
fs
.readFileSync("/proc/cpuinfo", "utf8")
.match(/processor\s*:\s*(\d+)/)?.[1],
() =>
execSync('dmidecode -t processor | grep "ID:" | head -1', {
encoding: "utf8",
}).trim(),
() =>
execSync(
'cat /proc/cpuinfo | grep "cpu family\\|model\\|stepping" | md5sum',
{ encoding: "utf8" },
).split(" ")[0],
];
for (const method of methods) {
try {
const result = method();
if (result && result.length > 0) return result;
} catch { /* 继续尝试下一种方法 */ }
} catch {
/* 继续尝试下一种方法 */
}
}
} catch { /* 忽略错误 */ }
} catch {
/* 忽略错误 */
}
return undefined;
}
@@ -135,47 +138,68 @@ class HardwareFingerprint {
try {
// 尝试多种方法获取主板UUID
const methods = [
() => fs.readFileSync('/sys/class/dmi/id/product_uuid', 'utf8').trim(),
() => fs.readFileSync('/proc/sys/kernel/random/boot_id', 'utf8').trim(),
() => execSync('dmidecode -s system-uuid', { encoding: 'utf8' }).trim()
() => fs.readFileSync("/sys/class/dmi/id/product_uuid", "utf8").trim(),
() => fs.readFileSync("/proc/sys/kernel/random/boot_id", "utf8").trim(),
() => execSync("dmidecode -s system-uuid", { encoding: "utf8" }).trim(),
];
for (const method of methods) {
try {
const result = method();
if (result && result.length > 0 && result !== 'Not Settable') return result;
} catch { /* 继续尝试下一种方法 */ }
if (result && result.length > 0 && result !== "Not Settable")
return result;
} catch {
/* 继续尝试下一种方法 */
}
}
} catch { /* 忽略错误 */ }
} catch {
/* 忽略错误 */
}
return undefined;
}
private static getLinuxDiskSerial(): string | undefined {
try {
// 获取根分区所在磁盘的序列号
const rootDisk = execSync("df / | tail -1 | awk '{print $1}' | sed 's/[0-9]*$//'", { encoding: 'utf8' }).trim();
const rootDisk = execSync(
"df / | tail -1 | awk '{print $1}' | sed 's/[0-9]*$//'",
{ encoding: "utf8" },
).trim();
if (rootDisk) {
const serial = execSync(`udevadm info --name=${rootDisk} | grep ID_SERIAL= | cut -d= -f2`, { encoding: 'utf8' }).trim();
const serial = execSync(
`udevadm info --name=${rootDisk} | grep ID_SERIAL= | cut -d= -f2`,
{ encoding: "utf8" },
).trim();
if (serial && serial.length > 0) return serial;
}
} catch { /* 忽略错误 */ }
} catch {
/* 忽略错误 */
}
return undefined;
}
private static getLinuxBiosSerial(): string | undefined {
try {
const methods = [
() => fs.readFileSync('/sys/class/dmi/id/board_serial', 'utf8').trim(),
() => execSync('dmidecode -s baseboard-serial-number', { encoding: 'utf8' }).trim()
() => fs.readFileSync("/sys/class/dmi/id/board_serial", "utf8").trim(),
() =>
execSync("dmidecode -s baseboard-serial-number", {
encoding: "utf8",
}).trim(),
];
for (const method of methods) {
try {
const result = method();
if (result && result.length > 0 && result !== 'Not Specified') return result;
} catch { /* 继续尝试下一种方法 */ }
if (result && result.length > 0 && result !== "Not Specified")
return result;
} catch {
/* 继续尝试下一种方法 */
}
}
} catch { /* 忽略错误 */ }
} catch {
/* 忽略错误 */
}
return undefined;
}
@@ -184,37 +208,53 @@ class HardwareFingerprint {
*/
private static getWindowsCpuId(): string | undefined {
try {
const result = execSync('wmic cpu get ProcessorId /value', { encoding: 'utf8' });
const result = execSync("wmic cpu get ProcessorId /value", {
encoding: "utf8",
});
const match = result.match(/ProcessorId=(.+)/);
return match?.[1]?.trim();
} catch { /* 忽略错误 */ }
} catch {
/* 忽略错误 */
}
return undefined;
}
private static getWindowsMotherboardUuid(): string | undefined {
try {
const result = execSync('wmic csproduct get UUID /value', { encoding: 'utf8' });
const result = execSync("wmic csproduct get UUID /value", {
encoding: "utf8",
});
const match = result.match(/UUID=(.+)/);
return match?.[1]?.trim();
} catch { /* 忽略错误 */ }
} catch {
/* 忽略错误 */
}
return undefined;
}
private static getWindowsDiskSerial(): string | undefined {
try {
const result = execSync('wmic diskdrive get SerialNumber /value', { encoding: 'utf8' });
const result = execSync("wmic diskdrive get SerialNumber /value", {
encoding: "utf8",
});
const match = result.match(/SerialNumber=(.+)/);
return match?.[1]?.trim();
} catch { /* 忽略错误 */ }
} catch {
/* 忽略错误 */
}
return undefined;
}
private static getWindowsBiosSerial(): string | undefined {
try {
const result = execSync('wmic baseboard get SerialNumber /value', { encoding: 'utf8' });
const result = execSync("wmic baseboard get SerialNumber /value", {
encoding: "utf8",
});
const match = result.match(/SerialNumber=(.+)/);
return match?.[1]?.trim();
} catch { /* 忽略错误 */ }
} catch {
/* 忽略错误 */
}
return undefined;
}
@@ -223,36 +263,55 @@ class HardwareFingerprint {
*/
private static getMacOSCpuId(): string | undefined {
try {
const result = execSync('sysctl -n machdep.cpu.brand_string', { encoding: 'utf8' });
const result = execSync("sysctl -n machdep.cpu.brand_string", {
encoding: "utf8",
});
return result.trim();
} catch { /* 忽略错误 */ }
} catch {
/* 忽略错误 */
}
return undefined;
}
private static getMacOSMotherboardUuid(): string | undefined {
try {
const result = execSync('system_profiler SPHardwareDataType | grep "Hardware UUID"', { encoding: 'utf8' });
const result = execSync(
'system_profiler SPHardwareDataType | grep "Hardware UUID"',
{ encoding: "utf8" },
);
const match = result.match(/Hardware UUID:\s*(.+)/);
return match?.[1]?.trim();
} catch { /* 忽略错误 */ }
} catch {
/* 忽略错误 */
}
return undefined;
}
private static getMacOSDiskSerial(): string | undefined {
try {
const result = execSync('system_profiler SPStorageDataType | grep "Serial Number"', { encoding: 'utf8' });
const result = execSync(
'system_profiler SPStorageDataType | grep "Serial Number"',
{ encoding: "utf8" },
);
const match = result.match(/Serial Number:\s*(.+)/);
return match?.[1]?.trim();
} catch { /* 忽略错误 */ }
} catch {
/* 忽略错误 */
}
return undefined;
}
private static getMacOSBiosSerial(): string | undefined {
try {
const result = execSync('system_profiler SPHardwareDataType | grep "Serial Number"', { encoding: 'utf8' });
const result = execSync(
'system_profiler SPHardwareDataType | grep "Serial Number"',
{ encoding: "utf8" },
);
const match = result.match(/Serial Number \(system\):\s*(.+)/);
return match?.[1]?.trim();
} catch { /* 忽略错误 */ }
} catch {
/* 忽略错误 */
}
return undefined;
}
@@ -265,17 +324,22 @@ class HardwareFingerprint {
const networkInterfaces = os.networkInterfaces();
const macAddresses: string[] = [];
for (const [interfaceName, interfaces] of Object.entries(networkInterfaces)) {
for (const [interfaceName, interfaces] of Object.entries(
networkInterfaces,
)) {
if (!interfaces) continue;
// 排除虚拟接口和Docker接口
if (interfaceName.match(/^(lo|docker|veth|br-|virbr)/)) continue;
for (const iface of interfaces) {
if (!iface.internal &&
iface.mac &&
iface.mac !== '00:00:00:00:00:00' &&
!iface.mac.startsWith('02:42:')) { // Docker接口特征
if (
!iface.internal &&
iface.mac &&
iface.mac !== "00:00:00:00:00:00" &&
!iface.mac.startsWith("02:42:")
) {
// Docker接口特征
macAddresses.push(iface.mac);
}
}
@@ -292,20 +356,20 @@ class HardwareFingerprint {
*/
private static generateFromHardware(hwInfo: HardwareInfo): string {
const components = [
hwInfo.motherboardUuid, // 最稳定的标识符
hwInfo.motherboardUuid, // 最稳定的标识符
hwInfo.cpuId,
hwInfo.biosSerial,
hwInfo.diskSerial,
hwInfo.macAddresses?.join(','),
os.platform(), // 操作系统平台
os.arch() // CPU架构
hwInfo.macAddresses?.join(","),
os.platform(), // 操作系统平台
os.arch(), // CPU架构
].filter(Boolean); // 过滤空值
if (components.length === 0) {
throw new Error('No hardware identifiers found');
throw new Error("No hardware identifiers found");
}
return this.hashFingerprint(components.join('|'));
return this.hashFingerprint(components.join("|"));
}
/**
@@ -317,21 +381,24 @@ class HardwareFingerprint {
os.platform(),
os.arch(),
process.cwd(),
'fallback-mode'
"fallback-mode",
];
databaseLogger.warn('Using fallback fingerprint due to hardware detection failure', {
operation: 'hardware_fingerprint_fallback'
});
databaseLogger.warn(
"Using fallback fingerprint due to hardware detection failure",
{
operation: "hardware_fingerprint_fallback",
},
);
return this.hashFingerprint(fallbackComponents.join('|'));
return this.hashFingerprint(fallbackComponents.join("|"));
}
/**
* 标准化指纹哈希
*/
private static hashFingerprint(data: string): string {
return crypto.createHash('sha256').update(data).digest('hex');
return crypto.createHash("sha256").update(data).digest("hex");
}
/**
@@ -341,7 +408,7 @@ class HardwareFingerprint {
const hwInfo = this.detectHardwareInfo();
return {
...hwInfo,
fingerprint: this.generate().substring(0, 16)
fingerprint: this.generate().substring(0, 16),
};
}
@@ -366,4 +433,4 @@ class HardwareFingerprint {
}
export { HardwareFingerprint };
export type { HardwareInfo };
export type { HardwareInfo };

View File

@@ -1,6 +1,6 @@
import crypto from 'crypto';
import { databaseLogger } from './logger.js';
import { HardwareFingerprint } from './hardware-fingerprint.js';
import crypto from "crypto";
import { databaseLogger } from "./logger.js";
import { HardwareFingerprint } from "./hardware-fingerprint.js";
interface ProtectedKeyData {
data: string;
@@ -11,30 +11,23 @@ interface ProtectedKeyData {
}
class MasterKeyProtection {
private static readonly VERSION = 'v1';
private static readonly KEK_SALT = 'termix-kek-salt-v1';
private static readonly VERSION = "v1";
private static readonly KEK_SALT = "termix-kek-salt-v1";
private static readonly KEK_ITERATIONS = 50000;
private static generateDeviceFingerprint(): string {
try {
const fingerprint = HardwareFingerprint.generate();
databaseLogger.debug('Generated hardware fingerprint', {
operation: 'hardware_fingerprint_generation',
fingerprintPrefix: fingerprint.substring(0, 8)
});
return fingerprint;
} catch (error) {
databaseLogger.error('Failed to generate hardware fingerprint', error, {
operation: 'hardware_fingerprint_generation_failed'
databaseLogger.error("Failed to generate hardware fingerprint", error, {
operation: "hardware_fingerprint_generation_failed",
});
throw new Error('Hardware fingerprint generation failed');
throw new Error("Hardware fingerprint generation failed");
}
}
private static deriveKEK(): Buffer {
const fingerprint = this.generateDeviceFingerprint();
const salt = Buffer.from(this.KEK_SALT);
@@ -44,103 +37,112 @@ class MasterKeyProtection {
salt,
this.KEK_ITERATIONS,
32,
'sha256'
"sha256",
);
databaseLogger.debug('Derived KEK from hardware fingerprint', {
operation: 'kek_derivation',
iterations: this.KEK_ITERATIONS
});
return kek;
}
static encryptMasterKey(masterKey: string): string {
if (!masterKey) {
throw new Error('Master key cannot be empty');
throw new Error("Master key cannot be empty");
}
try {
const kek = this.deriveKEK();
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv('aes-256-gcm', kek, iv) as any;
const cipher = crypto.createCipheriv("aes-256-gcm", kek, iv) as any;
let encrypted = cipher.update(masterKey, 'hex', 'hex');
encrypted += cipher.final('hex');
let encrypted = cipher.update(masterKey, "hex", "hex");
encrypted += cipher.final("hex");
const tag = cipher.getAuthTag();
const protectedData: ProtectedKeyData = {
data: encrypted,
iv: iv.toString('hex'),
tag: tag.toString('hex'),
iv: iv.toString("hex"),
tag: tag.toString("hex"),
version: this.VERSION,
fingerprint: this.generateDeviceFingerprint().substring(0, 16)
fingerprint: this.generateDeviceFingerprint().substring(0, 16),
};
const result = JSON.stringify(protectedData);
databaseLogger.info('Master key encrypted with hardware KEK', {
operation: 'master_key_encryption',
databaseLogger.info("Master key encrypted with hardware KEK", {
operation: "master_key_encryption",
version: this.VERSION,
fingerprintPrefix: protectedData.fingerprint
fingerprintPrefix: protectedData.fingerprint,
});
return result;
} catch (error) {
databaseLogger.error('Failed to encrypt master key', error, {
operation: 'master_key_encryption_failed'
databaseLogger.error("Failed to encrypt master key", error, {
operation: "master_key_encryption_failed",
});
throw new Error('Master key encryption failed');
throw new Error("Master key encryption failed");
}
}
static decryptMasterKey(encryptedKey: string): string {
if (!encryptedKey) {
throw new Error('Encrypted key cannot be empty');
throw new Error("Encrypted key cannot be empty");
}
try {
const protectedData: ProtectedKeyData = JSON.parse(encryptedKey);
if (protectedData.version !== this.VERSION) {
throw new Error(`Unsupported protection version: ${protectedData.version}`);
throw new Error(
`Unsupported protection version: ${protectedData.version}`,
);
}
const currentFingerprint = this.generateDeviceFingerprint().substring(0, 16);
const currentFingerprint = this.generateDeviceFingerprint().substring(
0,
16,
);
if (protectedData.fingerprint !== currentFingerprint) {
databaseLogger.warn('Hardware fingerprint mismatch detected', {
operation: 'master_key_decryption',
databaseLogger.warn("Hardware fingerprint mismatch detected", {
operation: "master_key_decryption",
expected: protectedData.fingerprint,
current: currentFingerprint
current: currentFingerprint,
});
throw new Error('Hardware fingerprint mismatch - key was encrypted on different hardware');
throw new Error(
"Hardware fingerprint mismatch - key was encrypted on different hardware",
);
}
const kek = this.deriveKEK();
const decipher = crypto.createDecipheriv('aes-256-gcm', kek, Buffer.from(protectedData.iv, 'hex')) as any;
decipher.setAuthTag(Buffer.from(protectedData.tag, 'hex'));
const decipher = crypto.createDecipheriv(
"aes-256-gcm",
kek,
Buffer.from(protectedData.iv, "hex"),
) as any;
decipher.setAuthTag(Buffer.from(protectedData.tag, "hex"));
let decrypted = decipher.update(protectedData.data, 'hex', 'hex');
decrypted += decipher.final('hex');
databaseLogger.debug('Master key decrypted successfully', {
operation: 'master_key_decryption',
version: protectedData.version
});
let decrypted = decipher.update(protectedData.data, "hex", "hex");
decrypted += decipher.final("hex");
return decrypted;
} catch (error) {
databaseLogger.error('Failed to decrypt master key', error, {
operation: 'master_key_decryption_failed'
databaseLogger.error("Failed to decrypt master key", error, {
operation: "master_key_decryption_failed",
});
throw new Error(`Master key decryption failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
throw new Error(
`Master key decryption failed: ${error instanceof Error ? error.message : "Unknown error"}`,
);
}
}
static isProtectedKey(data: string): boolean {
try {
const parsed = JSON.parse(data);
return !!(parsed.data && parsed.iv && parsed.tag && parsed.version && parsed.fingerprint);
return !!(
parsed.data &&
parsed.iv &&
parsed.tag &&
parsed.version &&
parsed.fingerprint
);
} catch {
return false;
}
@@ -148,21 +150,21 @@ class MasterKeyProtection {
static validateProtection(): boolean {
try {
const testKey = crypto.randomBytes(32).toString('hex');
const testKey = crypto.randomBytes(32).toString("hex");
const encrypted = this.encryptMasterKey(testKey);
const decrypted = this.decryptMasterKey(encrypted);
const isValid = decrypted === testKey;
databaseLogger.info('Master key protection validation completed', {
operation: 'protection_validation',
result: isValid ? 'passed' : 'failed'
databaseLogger.info("Master key protection validation completed", {
operation: "protection_validation",
result: isValid ? "passed" : "failed",
});
return isValid;
} catch (error) {
databaseLogger.error('Master key protection validation failed', error, {
operation: 'protection_validation_failed'
databaseLogger.error("Master key protection validation failed", error, {
operation: "protection_validation_failed",
});
return false;
}
@@ -179,12 +181,15 @@ class MasterKeyProtection {
}
const protectedData: ProtectedKeyData = JSON.parse(encryptedKey);
const currentFingerprint = this.generateDeviceFingerprint().substring(0, 16);
const currentFingerprint = this.generateDeviceFingerprint().substring(
0,
16,
);
return {
version: protectedData.version,
fingerprint: protectedData.fingerprint,
isCurrentDevice: protectedData.fingerprint === currentFingerprint
isCurrentDevice: protectedData.fingerprint === currentFingerprint,
};
} catch {
return null;
@@ -193,4 +198,4 @@ class MasterKeyProtection {
}
export { MasterKeyProtection };
export type { ProtectedKeyData };
export type { ProtectedKeyData };

View File

@@ -1,5 +1,5 @@
// Import SSH2 using ES modules
import ssh2Pkg from 'ssh2';
import ssh2Pkg from "ssh2";
const ssh2Utils = ssh2Pkg.utils;
// Simple fallback SSH key type detection
@@ -7,117 +7,120 @@ function detectKeyTypeFromContent(keyContent: string): string {
const content = keyContent.trim();
// Check for OpenSSH format headers
if (content.includes('-----BEGIN OPENSSH PRIVATE KEY-----')) {
if (content.includes("-----BEGIN OPENSSH PRIVATE KEY-----")) {
// Look for key type indicators in the content
if (content.includes('ssh-ed25519') || content.includes('AAAAC3NzaC1lZDI1NTE5')) {
return 'ssh-ed25519';
if (
content.includes("ssh-ed25519") ||
content.includes("AAAAC3NzaC1lZDI1NTE5")
) {
return "ssh-ed25519";
}
if (content.includes('ssh-rsa') || content.includes('AAAAB3NzaC1yc2E')) {
return 'ssh-rsa';
if (content.includes("ssh-rsa") || content.includes("AAAAB3NzaC1yc2E")) {
return "ssh-rsa";
}
if (content.includes('ecdsa-sha2-nistp256')) {
return 'ecdsa-sha2-nistp256';
if (content.includes("ecdsa-sha2-nistp256")) {
return "ecdsa-sha2-nistp256";
}
if (content.includes('ecdsa-sha2-nistp384')) {
return 'ecdsa-sha2-nistp384';
if (content.includes("ecdsa-sha2-nistp384")) {
return "ecdsa-sha2-nistp384";
}
if (content.includes('ecdsa-sha2-nistp521')) {
return 'ecdsa-sha2-nistp521';
if (content.includes("ecdsa-sha2-nistp521")) {
return "ecdsa-sha2-nistp521";
}
// For OpenSSH format, try to detect by analyzing the base64 content structure
try {
const base64Content = content
.replace('-----BEGIN OPENSSH PRIVATE KEY-----', '')
.replace('-----END OPENSSH PRIVATE KEY-----', '')
.replace(/\s/g, '');
.replace("-----BEGIN OPENSSH PRIVATE KEY-----", "")
.replace("-----END OPENSSH PRIVATE KEY-----", "")
.replace(/\s/g, "");
// OpenSSH format starts with "openssh-key-v1" followed by key type
const decoded = Buffer.from(base64Content, 'base64').toString('binary');
const decoded = Buffer.from(base64Content, "base64").toString("binary");
if (decoded.includes('ssh-rsa')) {
return 'ssh-rsa';
if (decoded.includes("ssh-rsa")) {
return "ssh-rsa";
}
if (decoded.includes('ssh-ed25519')) {
return 'ssh-ed25519';
if (decoded.includes("ssh-ed25519")) {
return "ssh-ed25519";
}
if (decoded.includes('ecdsa-sha2-nistp256')) {
return 'ecdsa-sha2-nistp256';
if (decoded.includes("ecdsa-sha2-nistp256")) {
return "ecdsa-sha2-nistp256";
}
if (decoded.includes('ecdsa-sha2-nistp384')) {
return 'ecdsa-sha2-nistp384';
if (decoded.includes("ecdsa-sha2-nistp384")) {
return "ecdsa-sha2-nistp384";
}
if (decoded.includes('ecdsa-sha2-nistp521')) {
return 'ecdsa-sha2-nistp521';
if (decoded.includes("ecdsa-sha2-nistp521")) {
return "ecdsa-sha2-nistp521";
}
// Default to RSA for OpenSSH format if we can't detect specifically
return 'ssh-rsa';
return "ssh-rsa";
} catch (error) {
console.warn('Failed to decode OpenSSH key content:', error);
console.warn("Failed to decode OpenSSH key content:", error);
// If decoding fails, default to RSA as it's most common for OpenSSH format
return 'ssh-rsa';
return "ssh-rsa";
}
}
// Check for traditional PEM headers
if (content.includes('-----BEGIN RSA PRIVATE KEY-----')) {
return 'ssh-rsa';
if (content.includes("-----BEGIN RSA PRIVATE KEY-----")) {
return "ssh-rsa";
}
if (content.includes('-----BEGIN DSA PRIVATE KEY-----')) {
return 'ssh-dss';
if (content.includes("-----BEGIN DSA PRIVATE KEY-----")) {
return "ssh-dss";
}
if (content.includes('-----BEGIN EC PRIVATE KEY-----')) {
return 'ecdsa-sha2-nistp256'; // Default ECDSA type
if (content.includes("-----BEGIN EC PRIVATE KEY-----")) {
return "ecdsa-sha2-nistp256"; // Default ECDSA type
}
// Check for PKCS#8 format (modern format)
if (content.includes('-----BEGIN PRIVATE KEY-----')) {
if (content.includes("-----BEGIN PRIVATE KEY-----")) {
// Try to decode and analyze the DER structure for better detection
try {
const base64Content = content
.replace('-----BEGIN PRIVATE KEY-----', '')
.replace('-----END PRIVATE KEY-----', '')
.replace(/\s/g, '');
.replace("-----BEGIN PRIVATE KEY-----", "")
.replace("-----END PRIVATE KEY-----", "")
.replace(/\s/g, "");
const decoded = Buffer.from(base64Content, 'base64');
const decodedString = decoded.toString('binary');
const decoded = Buffer.from(base64Content, "base64");
const decodedString = decoded.toString("binary");
// Check for algorithm identifiers in the DER structure
if (decodedString.includes('1.2.840.113549.1.1.1')) {
if (decodedString.includes("1.2.840.113549.1.1.1")) {
// RSA OID
return 'ssh-rsa';
} else if (decodedString.includes('1.2.840.10045.2.1')) {
return "ssh-rsa";
} else if (decodedString.includes("1.2.840.10045.2.1")) {
// EC Private Key OID - this indicates ECDSA
if (decodedString.includes('1.2.840.10045.3.1.7')) {
if (decodedString.includes("1.2.840.10045.3.1.7")) {
// prime256v1 curve OID
return 'ecdsa-sha2-nistp256';
return "ecdsa-sha2-nistp256";
}
return 'ecdsa-sha2-nistp256'; // Default to P-256
} else if (decodedString.includes('1.3.101.112')) {
return "ecdsa-sha2-nistp256"; // Default to P-256
} else if (decodedString.includes("1.3.101.112")) {
// Ed25519 OID
return 'ssh-ed25519';
return "ssh-ed25519";
}
} catch (error) {
// If decoding fails, fall back to length-based detection
console.warn('Failed to decode private key for type detection:', error);
console.warn("Failed to decode private key for type detection:", error);
}
// Fallback: Try to detect key type from the content structure
// This is a fallback for PKCS#8 format keys
if (content.length < 800) {
// Ed25519 keys are typically shorter
return 'ssh-ed25519';
return "ssh-ed25519";
} else if (content.length > 1600) {
// RSA keys are typically longer
return 'ssh-rsa';
return "ssh-rsa";
} else {
// ECDSA keys are typically medium length
return 'ecdsa-sha2-nistp256';
return "ecdsa-sha2-nistp256";
}
}
return 'unknown';
return "unknown";
}
// Detect public key type from public key content
@@ -125,92 +128,92 @@ function detectPublicKeyTypeFromContent(publicKeyContent: string): string {
const content = publicKeyContent.trim();
// SSH public keys start with the key type
if (content.startsWith('ssh-rsa ')) {
return 'ssh-rsa';
if (content.startsWith("ssh-rsa ")) {
return "ssh-rsa";
}
if (content.startsWith('ssh-ed25519 ')) {
return 'ssh-ed25519';
if (content.startsWith("ssh-ed25519 ")) {
return "ssh-ed25519";
}
if (content.startsWith('ecdsa-sha2-nistp256 ')) {
return 'ecdsa-sha2-nistp256';
if (content.startsWith("ecdsa-sha2-nistp256 ")) {
return "ecdsa-sha2-nistp256";
}
if (content.startsWith('ecdsa-sha2-nistp384 ')) {
return 'ecdsa-sha2-nistp384';
if (content.startsWith("ecdsa-sha2-nistp384 ")) {
return "ecdsa-sha2-nistp384";
}
if (content.startsWith('ecdsa-sha2-nistp521 ')) {
return 'ecdsa-sha2-nistp521';
if (content.startsWith("ecdsa-sha2-nistp521 ")) {
return "ecdsa-sha2-nistp521";
}
if (content.startsWith('ssh-dss ')) {
return 'ssh-dss';
if (content.startsWith("ssh-dss ")) {
return "ssh-dss";
}
// Check for PEM format public keys
if (content.includes('-----BEGIN PUBLIC KEY-----')) {
if (content.includes("-----BEGIN PUBLIC KEY-----")) {
// Try to decode the base64 content to detect key type
try {
const base64Content = content
.replace('-----BEGIN PUBLIC KEY-----', '')
.replace('-----END PUBLIC KEY-----', '')
.replace(/\s/g, '');
.replace("-----BEGIN PUBLIC KEY-----", "")
.replace("-----END PUBLIC KEY-----", "")
.replace(/\s/g, "");
const decoded = Buffer.from(base64Content, 'base64');
const decodedString = decoded.toString('binary');
const decoded = Buffer.from(base64Content, "base64");
const decodedString = decoded.toString("binary");
// Check for algorithm identifiers in the DER structure
if (decodedString.includes('1.2.840.113549.1.1.1')) {
if (decodedString.includes("1.2.840.113549.1.1.1")) {
// RSA OID
return 'ssh-rsa';
} else if (decodedString.includes('1.2.840.10045.2.1')) {
return "ssh-rsa";
} else if (decodedString.includes("1.2.840.10045.2.1")) {
// EC Public Key OID - this indicates ECDSA
if (decodedString.includes('1.2.840.10045.3.1.7')) {
if (decodedString.includes("1.2.840.10045.3.1.7")) {
// prime256v1 curve OID
return 'ecdsa-sha2-nistp256';
return "ecdsa-sha2-nistp256";
}
return 'ecdsa-sha2-nistp256'; // Default to P-256
} else if (decodedString.includes('1.3.101.112')) {
return "ecdsa-sha2-nistp256"; // Default to P-256
} else if (decodedString.includes("1.3.101.112")) {
// Ed25519 OID
return 'ssh-ed25519';
return "ssh-ed25519";
}
} catch (error) {
// If decoding fails, fall back to length-based detection
console.warn('Failed to decode public key for type detection:', error);
console.warn("Failed to decode public key for type detection:", error);
}
// Fallback: Try to guess based on key length
if (content.length < 400) {
return 'ssh-ed25519';
return "ssh-ed25519";
} else if (content.length > 600) {
return 'ssh-rsa';
return "ssh-rsa";
} else {
return 'ecdsa-sha2-nistp256';
return "ecdsa-sha2-nistp256";
}
}
if (content.includes('-----BEGIN RSA PUBLIC KEY-----')) {
return 'ssh-rsa';
if (content.includes("-----BEGIN RSA PUBLIC KEY-----")) {
return "ssh-rsa";
}
// Check for base64 encoded key data patterns
if (content.includes('AAAAB3NzaC1yc2E')) {
return 'ssh-rsa';
if (content.includes("AAAAB3NzaC1yc2E")) {
return "ssh-rsa";
}
if (content.includes('AAAAC3NzaC1lZDI1NTE5')) {
return 'ssh-ed25519';
if (content.includes("AAAAC3NzaC1lZDI1NTE5")) {
return "ssh-ed25519";
}
if (content.includes('AAAAE2VjZHNhLXNoYTItbmlzdHAyNTY')) {
return 'ecdsa-sha2-nistp256';
if (content.includes("AAAAE2VjZHNhLXNoYTItbmlzdHAyNTY")) {
return "ecdsa-sha2-nistp256";
}
if (content.includes('AAAAE2VjZHNhLXNoYTItbmlzdHAzODQ')) {
return 'ecdsa-sha2-nistp384';
if (content.includes("AAAAE2VjZHNhLXNoYTItbmlzdHAzODQ")) {
return "ecdsa-sha2-nistp384";
}
if (content.includes('AAAAE2VjZHNhLXNoYTItbmlzdHA1MjE')) {
return 'ecdsa-sha2-nistp521';
if (content.includes("AAAAE2VjZHNhLXNoYTItbmlzdHA1MjE")) {
return "ecdsa-sha2-nistp521";
}
if (content.includes('AAAAB3NzaC1kc3M')) {
return 'ssh-dss';
if (content.includes("AAAAB3NzaC1kc3M")) {
return "ssh-dss";
}
return 'unknown';
return "unknown";
}
export interface KeyInfo {
@@ -239,90 +242,114 @@ export interface KeyPairValidationResult {
/**
* Parse SSH private key and extract public key and type information
*/
export function parseSSHKey(privateKeyData: string, passphrase?: string): KeyInfo {
console.log('=== SSH Key Parsing Debug ===');
console.log('Key length:', privateKeyData?.length || 'undefined');
console.log('First 100 chars:', privateKeyData?.substring(0, 100) || 'undefined');
console.log('ssh2Utils available:', typeof ssh2Utils);
console.log('parseKey function available:', typeof ssh2Utils?.parseKey);
export function parseSSHKey(
privateKeyData: string,
passphrase?: string,
): KeyInfo {
console.log("=== SSH Key Parsing Debug ===");
console.log("Key length:", privateKeyData?.length || "undefined");
console.log(
"First 100 chars:",
privateKeyData?.substring(0, 100) || "undefined",
);
console.log("ssh2Utils available:", typeof ssh2Utils);
console.log("parseKey function available:", typeof ssh2Utils?.parseKey);
try {
let keyType = 'unknown';
let publicKey = '';
let keyType = "unknown";
let publicKey = "";
let useSSH2 = false;
// Try SSH2 first if available
if (ssh2Utils && typeof ssh2Utils.parseKey === 'function') {
if (ssh2Utils && typeof ssh2Utils.parseKey === "function") {
try {
console.log('Calling ssh2Utils.parseKey...');
console.log("Calling ssh2Utils.parseKey...");
const parsedKey = ssh2Utils.parseKey(privateKeyData, passphrase);
console.log('parseKey returned:', typeof parsedKey, parsedKey instanceof Error ? parsedKey.message : 'success');
console.log(
"parseKey returned:",
typeof parsedKey,
parsedKey instanceof Error ? parsedKey.message : "success",
);
if (!(parsedKey instanceof Error)) {
// Extract key type
if (parsedKey.type) {
keyType = parsedKey.type;
}
console.log('Extracted key type:', keyType);
console.log("Extracted key type:", keyType);
// Generate public key in SSH format
try {
console.log('Attempting to generate public key...');
console.log("Attempting to generate public key...");
const publicKeyBuffer = parsedKey.getPublicSSH();
console.log('Public key buffer type:', typeof publicKeyBuffer);
console.log('Public key buffer is Buffer:', Buffer.isBuffer(publicKeyBuffer));
console.log("Public key buffer type:", typeof publicKeyBuffer);
console.log(
"Public key buffer is Buffer:",
Buffer.isBuffer(publicKeyBuffer),
);
// ssh2's getPublicSSH() returns binary SSH protocol data, not text
// We need to convert this to proper SSH public key format
if (Buffer.isBuffer(publicKeyBuffer)) {
// Convert binary SSH data to base64 and create proper SSH key format
const base64Data = publicKeyBuffer.toString('base64');
const base64Data = publicKeyBuffer.toString("base64");
// Create proper SSH public key format: "keytype base64data"
if (keyType === 'ssh-rsa') {
if (keyType === "ssh-rsa") {
publicKey = `ssh-rsa ${base64Data}`;
} else if (keyType === 'ssh-ed25519') {
} else if (keyType === "ssh-ed25519") {
publicKey = `ssh-ed25519 ${base64Data}`;
} else if (keyType.startsWith('ecdsa-')) {
} else if (keyType.startsWith("ecdsa-")) {
publicKey = `${keyType} ${base64Data}`;
} else {
publicKey = `${keyType} ${base64Data}`;
}
console.log('Generated SSH public key format, length:', publicKey.length);
console.log('Public key starts with:', publicKey.substring(0, 50));
console.log(
"Generated SSH public key format, length:",
publicKey.length,
);
console.log(
"Public key starts with:",
publicKey.substring(0, 50),
);
} else {
console.warn('Unexpected public key buffer type');
publicKey = '';
console.warn("Unexpected public key buffer type");
publicKey = "";
}
} catch (error) {
console.warn('Failed to generate public key:', error);
publicKey = '';
console.warn("Failed to generate public key:", error);
publicKey = "";
}
useSSH2 = true;
console.log(`SSH key parsed successfully with SSH2: ${keyType}`);
} else {
console.warn('SSH2 parsing failed:', parsedKey.message);
console.warn("SSH2 parsing failed:", parsedKey.message);
}
} catch (error) {
console.warn('SSH2 parsing exception:', error instanceof Error ? error.message : error);
console.warn(
"SSH2 parsing exception:",
error instanceof Error ? error.message : error,
);
}
} else {
console.warn('SSH2 parseKey function not available');
console.warn("SSH2 parseKey function not available");
}
// Fallback to content-based detection
if (!useSSH2) {
console.log('Using fallback key type detection...');
console.log("Using fallback key type detection...");
keyType = detectKeyTypeFromContent(privateKeyData);
console.log(`Fallback detected key type: ${keyType}`);
// For fallback, we can't generate public key but the detection is still useful
publicKey = '';
publicKey = "";
if (keyType !== 'unknown') {
console.log(`SSH key type detected successfully with fallback: ${keyType}`);
if (keyType !== "unknown") {
console.log(
`SSH key type detected successfully with fallback: ${keyType}`,
);
}
}
@@ -330,34 +357,38 @@ export function parseSSHKey(privateKeyData: string, passphrase?: string): KeyInf
privateKey: privateKeyData,
publicKey,
keyType,
success: keyType !== 'unknown'
success: keyType !== "unknown",
};
} catch (error) {
console.error('Exception during SSH key parsing:', error);
console.error('Error stack:', error instanceof Error ? error.stack : 'No stack');
console.error("Exception during SSH key parsing:", error);
console.error(
"Error stack:",
error instanceof Error ? error.stack : "No stack",
);
// Final fallback - try content detection
try {
const fallbackKeyType = detectKeyTypeFromContent(privateKeyData);
if (fallbackKeyType !== 'unknown') {
if (fallbackKeyType !== "unknown") {
console.log(`Final fallback detection successful: ${fallbackKeyType}`);
return {
privateKey: privateKeyData,
publicKey: '',
publicKey: "",
keyType: fallbackKeyType,
success: true
success: true,
};
}
} catch (fallbackError) {
console.error('Even fallback detection failed:', fallbackError);
console.error("Even fallback detection failed:", fallbackError);
}
return {
privateKey: privateKeyData,
publicKey: '',
keyType: 'unknown',
publicKey: "",
keyType: "unknown",
success: false,
error: error instanceof Error ? error.message : 'Unknown error parsing key'
error:
error instanceof Error ? error.message : "Unknown error parsing key",
};
}
}
@@ -366,9 +397,12 @@ export function parseSSHKey(privateKeyData: string, passphrase?: string): KeyInf
* Parse SSH public key and extract type information
*/
export function parsePublicKey(publicKeyData: string): PublicKeyInfo {
console.log('=== SSH Public Key Parsing Debug ===');
console.log('Public key length:', publicKeyData?.length || 'undefined');
console.log('First 100 chars:', publicKeyData?.substring(0, 100) || 'undefined');
console.log("=== SSH Public Key Parsing Debug ===");
console.log("Public key length:", publicKeyData?.length || "undefined");
console.log(
"First 100 chars:",
publicKeyData?.substring(0, 100) || "undefined",
);
try {
const keyType = detectPublicKeyTypeFromContent(publicKeyData);
@@ -377,15 +411,18 @@ export function parsePublicKey(publicKeyData: string): PublicKeyInfo {
return {
publicKey: publicKeyData,
keyType,
success: keyType !== 'unknown'
success: keyType !== "unknown",
};
} catch (error) {
console.error('Exception during SSH public key parsing:', error);
console.error("Exception during SSH public key parsing:", error);
return {
publicKey: publicKeyData,
keyType: 'unknown',
keyType: "unknown",
success: false,
error: error instanceof Error ? error.message : 'Unknown error parsing public key'
error:
error instanceof Error
? error.message
: "Unknown error parsing public key",
};
}
}
@@ -397,11 +434,11 @@ export function detectKeyType(privateKeyData: string): string {
try {
const parsedKey = ssh2Utils.parseKey(privateKeyData);
if (parsedKey instanceof Error) {
return 'unknown';
return "unknown";
}
return parsedKey.type || 'unknown';
return parsedKey.type || "unknown";
} catch (error) {
return 'unknown';
return "unknown";
}
}
@@ -410,15 +447,15 @@ export function detectKeyType(privateKeyData: string): string {
*/
export function getFriendlyKeyTypeName(keyType: string): string {
const keyTypeMap: Record<string, string> = {
'ssh-rsa': 'RSA',
'ssh-ed25519': 'Ed25519',
'ecdsa-sha2-nistp256': 'ECDSA P-256',
'ecdsa-sha2-nistp384': 'ECDSA P-384',
'ecdsa-sha2-nistp521': 'ECDSA P-521',
'ssh-dss': 'DSA',
'rsa-sha2-256': 'RSA-SHA2-256',
'rsa-sha2-512': 'RSA-SHA2-512',
'unknown': 'Unknown'
"ssh-rsa": "RSA",
"ssh-ed25519": "Ed25519",
"ecdsa-sha2-nistp256": "ECDSA P-256",
"ecdsa-sha2-nistp384": "ECDSA P-384",
"ecdsa-sha2-nistp521": "ECDSA P-521",
"ssh-dss": "DSA",
"rsa-sha2-256": "RSA-SHA2-256",
"rsa-sha2-512": "RSA-SHA2-512",
unknown: "Unknown",
};
return keyTypeMap[keyType] || keyType;
@@ -427,25 +464,37 @@ export function getFriendlyKeyTypeName(keyType: string): string {
/**
* Validate if a private key and public key form a valid key pair
*/
export function validateKeyPair(privateKeyData: string, publicKeyData: string, passphrase?: string): KeyPairValidationResult {
console.log('=== Key Pair Validation Debug ===');
console.log('Private key length:', privateKeyData?.length || 'undefined');
console.log('Public key length:', publicKeyData?.length || 'undefined');
export function validateKeyPair(
privateKeyData: string,
publicKeyData: string,
passphrase?: string,
): KeyPairValidationResult {
console.log("=== Key Pair Validation Debug ===");
console.log("Private key length:", privateKeyData?.length || "undefined");
console.log("Public key length:", publicKeyData?.length || "undefined");
try {
// First parse the private key and try to generate public key
const privateKeyInfo = parseSSHKey(privateKeyData, passphrase);
const publicKeyInfo = parsePublicKey(publicKeyData);
console.log('Private key parsing result:', privateKeyInfo.success, privateKeyInfo.keyType);
console.log('Public key parsing result:', publicKeyInfo.success, publicKeyInfo.keyType);
console.log(
"Private key parsing result:",
privateKeyInfo.success,
privateKeyInfo.keyType,
);
console.log(
"Public key parsing result:",
publicKeyInfo.success,
publicKeyInfo.keyType,
);
if (!privateKeyInfo.success) {
return {
isValid: false,
privateKeyType: privateKeyInfo.keyType,
publicKeyType: publicKeyInfo.keyType,
error: `Invalid private key: ${privateKeyInfo.error}`
error: `Invalid private key: ${privateKeyInfo.error}`,
};
}
@@ -454,7 +503,7 @@ export function validateKeyPair(privateKeyData: string, publicKeyData: string, p
isValid: false,
privateKeyType: privateKeyInfo.keyType,
publicKeyType: publicKeyInfo.keyType,
error: `Invalid public key: ${publicKeyInfo.error}`
error: `Invalid public key: ${publicKeyInfo.error}`,
};
}
@@ -464,7 +513,7 @@ export function validateKeyPair(privateKeyData: string, publicKeyData: string, p
isValid: false,
privateKeyType: privateKeyInfo.keyType,
publicKeyType: publicKeyInfo.keyType,
error: `Key type mismatch: private key is ${privateKeyInfo.keyType}, public key is ${publicKeyInfo.keyType}`
error: `Key type mismatch: private key is ${privateKeyInfo.keyType}, public key is ${publicKeyInfo.keyType}`,
};
}
@@ -473,27 +522,34 @@ export function validateKeyPair(privateKeyData: string, publicKeyData: string, p
const generatedPublicKey = privateKeyInfo.publicKey.trim();
const providedPublicKey = publicKeyData.trim();
console.log('Generated public key length:', generatedPublicKey.length);
console.log('Provided public key length:', providedPublicKey.length);
console.log("Generated public key length:", generatedPublicKey.length);
console.log("Provided public key length:", providedPublicKey.length);
// Compare the key data part (excluding comments)
const generatedKeyParts = generatedPublicKey.split(' ');
const providedKeyParts = providedPublicKey.split(' ');
const generatedKeyParts = generatedPublicKey.split(" ");
const providedKeyParts = providedPublicKey.split(" ");
if (generatedKeyParts.length >= 2 && providedKeyParts.length >= 2) {
// Compare key type and key data (first two parts)
const generatedKeyData = generatedKeyParts[0] + ' ' + generatedKeyParts[1];
const providedKeyData = providedKeyParts[0] + ' ' + providedKeyParts[1];
const generatedKeyData =
generatedKeyParts[0] + " " + generatedKeyParts[1];
const providedKeyData = providedKeyParts[0] + " " + providedKeyParts[1];
console.log('Generated key data:', generatedKeyData.substring(0, 50) + '...');
console.log('Provided key data:', providedKeyData.substring(0, 50) + '...');
console.log(
"Generated key data:",
generatedKeyData.substring(0, 50) + "...",
);
console.log(
"Provided key data:",
providedKeyData.substring(0, 50) + "...",
);
if (generatedKeyData === providedKeyData) {
return {
isValid: true,
privateKeyType: privateKeyInfo.keyType,
publicKeyType: publicKeyInfo.keyType,
generatedPublicKey: generatedPublicKey
generatedPublicKey: generatedPublicKey,
};
} else {
return {
@@ -501,7 +557,7 @@ export function validateKeyPair(privateKeyData: string, publicKeyData: string, p
privateKeyType: privateKeyInfo.keyType,
publicKeyType: publicKeyInfo.keyType,
generatedPublicKey: generatedPublicKey,
error: 'Public key does not match the private key'
error: "Public key does not match the private key",
};
}
}
@@ -512,16 +568,18 @@ export function validateKeyPair(privateKeyData: string, publicKeyData: string, p
isValid: true, // Assume valid if types match and no errors
privateKeyType: privateKeyInfo.keyType,
publicKeyType: publicKeyInfo.keyType,
error: 'Unable to verify key pair match, but key types are compatible'
error: "Unable to verify key pair match, but key types are compatible",
};
} catch (error) {
console.error('Exception during key pair validation:', error);
console.error("Exception during key pair validation:", error);
return {
isValid: false,
privateKeyType: 'unknown',
publicKeyType: 'unknown',
error: error instanceof Error ? error.message : 'Unknown error during validation'
privateKeyType: "unknown",
publicKeyType: "unknown",
error:
error instanceof Error
? error.message
: "Unknown error during validation",
};
}
}
}