Implement database export/import functionality for hardware migration

Added comprehensive database export/import system to safely migrate SSH connection data between different server environments.

Key Features:
- SQLite export format with encrypted data migration
- Hardware fingerprint protection and re-encryption
- Field mapping between TypeScript and database schemas
- Foreign key constraint handling for cross-environment imports
- Admin user assignment for imported SSH records
- Additive import strategy preserving existing data
- File upload support for import operations

Technical Implementation:
- Complete Drizzle ORM schema consistency
- Bidirectional field name mapping (userId ↔ user_id)
- Proper encryption/decryption workflow
- Multer file upload middleware integration
- Error handling and logging throughout

Security:
- Only exports SSH-related tables (ssh_data, ssh_credentials)
- Protects admin user data from migration conflicts
- Re-encrypts sensitive fields for target hardware
- Validates export file format and version compatibility
This commit is contained in:
ZacharyZcR
2025-09-17 16:44:20 +08:00
parent 5ec9451ef2
commit fc14389e59
10 changed files with 3000 additions and 252 deletions

View File

@@ -1,5 +1,6 @@
import express from "express";
import bodyParser from "body-parser";
import multer from "multer";
import userRoutes from "./routes/users.js";
import sshRoutes from "./routes/ssh.js";
import alertRoutes from "./routes/alerts.js";
@@ -12,6 +13,9 @@ import "dotenv/config";
import { databaseLogger, apiLogger } from "../utils/logger.js";
import { DatabaseEncryption } from "../utils/database-encryption.js";
import { EncryptionMigration } from "../utils/encryption-migration.js";
import { DatabaseMigration } from "../utils/database-migration.js";
import { DatabaseSQLiteExport } from "../utils/database-sqlite-export.js";
import { DatabaseFileEncryption } from "../utils/database-file-encryption.js";
const app = express();
app.use(
@@ -27,6 +31,33 @@ app.use(
}),
);
// Configure multer for file uploads
const storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, 'uploads/');
},
filename: (req, file, cb) => {
// Preserve original filename with timestamp prefix to avoid conflicts
const timestamp = Date.now();
cb(null, `${timestamp}-${file.originalname}`);
}
});
const upload = multer({
storage: storage,
limits: {
fileSize: 100 * 1024 * 1024, // 100MB limit
},
fileFilter: (req, file, cb) => {
// Allow SQLite files
if (file.originalname.endsWith('.termix-export.sqlite') || file.originalname.endsWith('.sqlite')) {
cb(null, true);
} else {
cb(new Error('Only .termix-export.sqlite files are allowed'));
}
}
});
interface CacheEntry {
data: any;
timestamp: number;
@@ -362,6 +393,231 @@ app.post("/encryption/regenerate", async (req, res) => {
}
});
// Database migration and backup endpoints
app.post("/database/export", async (req, res) => {
try {
const { customPath } = req.body;
apiLogger.info("Starting SQLite database export via API", {
operation: "database_sqlite_export_api",
customPath: !!customPath
});
const exportPath = await DatabaseSQLiteExport.exportDatabase(customPath);
res.json({
success: true,
message: "Database exported successfully as SQLite",
exportPath,
size: fs.statSync(exportPath).size,
format: "sqlite"
});
} catch (error) {
apiLogger.error("SQLite database export failed", error, {
operation: "database_sqlite_export_api_failed"
});
res.status(500).json({
error: "SQLite database export failed",
details: error instanceof Error ? error.message : "Unknown error"
});
}
});
app.post("/database/import", upload.single('file'), async (req, res) => {
try {
if (!req.file) {
return res.status(400).json({ error: "No file uploaded" });
}
const { backupCurrent = "true" } = req.body;
const backupCurrentBool = backupCurrent === "true";
const importPath = req.file.path;
apiLogger.info("Starting SQLite database import via API (additive mode)", {
operation: "database_sqlite_import_api",
importPath,
originalName: req.file.originalname,
fileSize: req.file.size,
mode: "additive",
backupCurrent: backupCurrentBool
});
// Validate export file first
// Check file extension using original filename
if (!req.file.originalname.endsWith('.termix-export.sqlite')) {
// Clean up uploaded file
fs.unlinkSync(importPath);
return res.status(400).json({
error: "Invalid SQLite export file",
details: ["File must have .termix-export.sqlite extension"]
});
}
const validation = DatabaseSQLiteExport.validateExportFile(importPath);
if (!validation.valid) {
// Clean up uploaded file
fs.unlinkSync(importPath);
return res.status(400).json({
error: "Invalid SQLite export file",
details: validation.errors
});
}
const result = await DatabaseSQLiteExport.importDatabase(importPath, {
replaceExisting: false, // Always use additive mode
backupCurrent: backupCurrentBool
});
// Clean up uploaded file
fs.unlinkSync(importPath);
res.json({
success: result.success,
message: result.success ? "SQLite database imported successfully" : "SQLite database import completed with errors",
imported: result.imported,
errors: result.errors,
warnings: result.warnings,
format: "sqlite"
});
} catch (error) {
// Clean up uploaded file if it exists
if (req.file?.path) {
try {
fs.unlinkSync(req.file.path);
} catch (cleanupError) {
apiLogger.warn("Failed to clean up uploaded file", {
operation: "file_cleanup_failed",
filePath: req.file.path,
error: cleanupError instanceof Error ? cleanupError.message : 'Unknown error'
});
}
}
apiLogger.error("SQLite database import failed", error, {
operation: "database_sqlite_import_api_failed"
});
res.status(500).json({
error: "SQLite database import failed",
details: error instanceof Error ? error.message : "Unknown error"
});
}
});
app.get("/database/export/:exportPath/info", async (req, res) => {
try {
const { exportPath } = req.params;
const decodedPath = decodeURIComponent(exportPath);
const validation = DatabaseSQLiteExport.validateExportFile(decodedPath);
if (!validation.valid) {
return res.status(400).json({
error: "Invalid SQLite export file",
details: validation.errors
});
}
res.json({
valid: true,
metadata: validation.metadata,
format: "sqlite"
});
} catch (error) {
apiLogger.error("Failed to get SQLite export info", error, {
operation: "sqlite_export_info_failed"
});
res.status(500).json({ error: "Failed to get SQLite export information" });
}
});
app.post("/database/backup", async (req, res) => {
try {
const { customPath } = req.body;
apiLogger.info("Creating encrypted database backup via API", {
operation: "database_backup_api"
});
// Import required modules
const { databasePaths, getMemoryDatabaseBuffer } = await import("./db/index.js");
// Get current in-memory database as buffer
const dbBuffer = getMemoryDatabaseBuffer();
// Create backup directory
const backupDir = customPath || path.join(databasePaths.directory, 'backups');
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true });
}
// Generate backup filename with timestamp
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const backupFileName = `database-backup-${timestamp}.sqlite.encrypted`;
const backupPath = path.join(backupDir, backupFileName);
// Create encrypted backup directly from memory buffer
DatabaseFileEncryption.encryptDatabaseFromBuffer(dbBuffer, backupPath);
res.json({
success: true,
message: "Encrypted backup created successfully",
backupPath,
size: fs.statSync(backupPath).size
});
} catch (error) {
apiLogger.error("Database backup failed", error, {
operation: "database_backup_api_failed"
});
res.status(500).json({
error: "Database backup failed",
details: error instanceof Error ? error.message : "Unknown error"
});
}
});
app.post("/database/restore", async (req, res) => {
try {
const { backupPath, targetPath } = req.body;
if (!backupPath) {
return res.status(400).json({ error: "Backup path is required" });
}
apiLogger.info("Restoring database from backup via API", {
operation: "database_restore_api",
backupPath
});
// Validate backup file
if (!DatabaseFileEncryption.isEncryptedDatabaseFile(backupPath)) {
return res.status(400).json({ error: "Invalid encrypted backup file" });
}
// Check hardware compatibility
if (!DatabaseFileEncryption.validateHardwareCompatibility(backupPath)) {
return res.status(400).json({
error: "Hardware fingerprint mismatch",
message: "This backup was created on different hardware and cannot be restored"
});
}
const restoredPath = DatabaseFileEncryption.restoreFromEncryptedBackup(backupPath, targetPath);
res.json({
success: true,
message: "Database restored successfully",
restoredPath
});
} catch (error) {
apiLogger.error("Database restore failed", error, {
operation: "database_restore_api_failed"
});
res.status(500).json({
error: "Database restore failed",
details: error instanceof Error ? error.message : "Unknown error"
});
}
});
app.use("/users", userRoutes);
app.use("/ssh", sshRoutes);
app.use("/alerts", alertRoutes);
@@ -420,6 +676,12 @@ async function initializeEncryption() {
}
app.listen(PORT, async () => {
// Ensure uploads directory exists
const uploadsDir = path.join(process.cwd(), 'uploads');
if (!fs.existsSync(uploadsDir)) {
fs.mkdirSync(uploadsDir, { recursive: true });
}
await initializeEncryption();
databaseLogger.success(`Database API server started on port ${PORT}`, {
@@ -437,6 +699,11 @@ app.listen(PORT, async () => {
"/encryption/initialize",
"/encryption/migrate",
"/encryption/regenerate",
"/database/export",
"/database/import",
"/database/export/:exportPath/info",
"/database/backup",
"/database/restore",
],
});
});

View File

@@ -4,6 +4,7 @@ import * as schema from "./schema.js";
import fs from "fs";
import path from "path";
import { databaseLogger } from "../../utils/logger.js";
import { DatabaseFileEncryption } from "../../utils/database-file-encryption.js";
const dataDir = process.env.DATA_DIR || "./db/data";
const dbDir = path.resolve(dataDir);
@@ -15,12 +16,139 @@ if (!fs.existsSync(dbDir)) {
fs.mkdirSync(dbDir, { recursive: true });
}
// Database file encryption configuration
const enableFileEncryption = process.env.DB_FILE_ENCRYPTION !== 'false';
const dbPath = path.join(dataDir, "db.sqlite");
const encryptedDbPath = `${dbPath}.encrypted`;
// Initialize database with file encryption support
let actualDbPath = ':memory:'; // Always use memory database
let memoryDatabase: Database.Database;
let isNewDatabase = false;
if (enableFileEncryption) {
try {
// Check if encrypted database exists
if (DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath)) {
databaseLogger.info('Found encrypted database file, loading into memory...', {
operation: 'db_memory_load',
encryptedPath: encryptedDbPath
});
// Validate hardware compatibility
if (!DatabaseFileEncryption.validateHardwareCompatibility(encryptedDbPath)) {
databaseLogger.error('Hardware fingerprint mismatch for encrypted database', {
operation: 'db_decrypt_failed',
reason: 'hardware_mismatch'
});
throw new Error('Cannot decrypt database: hardware fingerprint mismatch');
}
// Decrypt database content to memory buffer
const decryptedBuffer = DatabaseFileEncryption.decryptDatabaseToBuffer(encryptedDbPath);
// Create in-memory database from decrypted buffer
memoryDatabase = new Database(decryptedBuffer);
databaseLogger.success('Existing database loaded into memory successfully', {
operation: 'db_memory_load_success',
bufferSize: decryptedBuffer.length,
inMemory: true
});
} else {
// No encrypted database exists - create new in-memory database
databaseLogger.info('No encrypted database found, creating new in-memory database', {
operation: 'db_memory_create_new'
});
memoryDatabase = new Database(':memory:');
isNewDatabase = true;
// Check if there's an old unencrypted database to migrate
if (fs.existsSync(dbPath)) {
databaseLogger.info('Found existing unencrypted database, will migrate to memory', {
operation: 'db_migrate_to_memory',
oldPath: dbPath
});
// Load old database and copy its content to memory database
const oldDb = new Database(dbPath, { readonly: true });
// Get all table schemas and data from old database
const tables = oldDb.prepare(`
SELECT name, sql FROM sqlite_master
WHERE type='table' AND name NOT LIKE 'sqlite_%'
`).all() as { name: string; sql: string }[];
// Create tables in memory database
for (const table of tables) {
memoryDatabase.exec(table.sql);
}
// Copy data for each table
for (const table of tables) {
const rows = oldDb.prepare(`SELECT * FROM ${table.name}`).all();
if (rows.length > 0) {
const columns = Object.keys(rows[0]);
const placeholders = columns.map(() => '?').join(', ');
const insertStmt = memoryDatabase.prepare(
`INSERT INTO ${table.name} (${columns.join(', ')}) VALUES (${placeholders})`
);
for (const row of rows) {
const values = columns.map(col => (row as any)[col]);
insertStmt.run(values);
}
}
}
oldDb.close();
databaseLogger.success('Migrated existing database to memory', {
operation: 'db_migrate_to_memory_success'
});
isNewDatabase = false;
} else {
databaseLogger.success('Created new in-memory database', {
operation: 'db_memory_create_success'
});
}
}
} catch (error) {
databaseLogger.error('Failed to initialize memory database', error, {
operation: 'db_memory_init_failed'
});
// If file encryption is critical, fail fast
if (process.env.DB_FILE_ENCRYPTION_REQUIRED === 'true') {
throw error;
}
// Create fallback in-memory database
databaseLogger.warn('Creating fallback in-memory database', {
operation: 'db_memory_fallback'
});
memoryDatabase = new Database(':memory:');
isNewDatabase = true;
}
} else {
// File encryption disabled - still use memory for consistency
databaseLogger.info('File encryption disabled, using in-memory database', {
operation: 'db_memory_no_encryption'
});
memoryDatabase = new Database(':memory:');
isNewDatabase = true;
}
databaseLogger.info(`Initializing SQLite database`, {
operation: "db_init",
path: dbPath,
path: actualDbPath,
encrypted: enableFileEncryption && DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath),
inMemory: true,
isNewDatabase
});
const sqlite = new Database(dbPath);
const sqlite = memoryDatabase;
sqlite.exec(`
CREATE TABLE IF NOT EXISTS users (
@@ -270,7 +398,7 @@ const migrateSchema = () => {
});
};
const initializeDatabase = async () => {
const initializeDatabase = async (): Promise<void> => {
migrateSchema();
try {
@@ -303,15 +431,229 @@ const initializeDatabase = async () => {
}
};
initializeDatabase().catch((error) => {
databaseLogger.error("Failed to initialize database", error, {
operation: "db_init",
// Function to save in-memory database to encrypted file
async function saveMemoryDatabaseToFile() {
if (!memoryDatabase || !enableFileEncryption) return;
try {
// Export in-memory database to buffer
const buffer = memoryDatabase.serialize();
// Encrypt and save to file
DatabaseFileEncryption.encryptDatabaseFromBuffer(buffer, encryptedDbPath);
databaseLogger.debug('In-memory database saved to encrypted file', {
operation: 'memory_db_save',
bufferSize: buffer.length,
encryptedPath: encryptedDbPath
});
} catch (error) {
databaseLogger.error('Failed to save in-memory database', error, {
operation: 'memory_db_save_failed'
});
}
}
// Function to handle post-initialization file encryption and cleanup
async function handlePostInitFileEncryption() {
if (!enableFileEncryption) return;
try {
// Clean up any existing unencrypted database files
if (fs.existsSync(dbPath)) {
databaseLogger.warn('Found unencrypted database file, removing for security', {
operation: 'db_security_cleanup_existing',
removingPath: dbPath
});
try {
fs.unlinkSync(dbPath);
databaseLogger.success('Unencrypted database file removed for security', {
operation: 'db_security_cleanup_complete',
removedPath: dbPath
});
} catch (error) {
databaseLogger.warn('Could not remove unencrypted database file (may be locked)', {
operation: 'db_security_cleanup_deferred',
path: dbPath,
error: error instanceof Error ? error.message : 'Unknown error'
});
// Try again after a short delay
setTimeout(() => {
try {
if (fs.existsSync(dbPath)) {
fs.unlinkSync(dbPath);
databaseLogger.success('Delayed cleanup: unencrypted database file removed', {
operation: 'db_security_cleanup_delayed_success',
removedPath: dbPath
});
}
} catch (delayedError) {
databaseLogger.error('Failed to remove unencrypted database file even after delay', delayedError, {
operation: 'db_security_cleanup_delayed_failed',
path: dbPath
});
}
}, 2000);
}
}
// Always save the in-memory database (whether new or existing)
if (memoryDatabase) {
// Save immediately after initialization
await saveMemoryDatabaseToFile();
// Set up periodic saves every 5 minutes
setInterval(saveMemoryDatabaseToFile, 5 * 60 * 1000);
databaseLogger.info('Periodic in-memory database saves configured', {
operation: 'memory_db_autosave_setup',
intervalMinutes: 5
});
}
} catch (error) {
databaseLogger.error('Failed to handle database file encryption/cleanup', error, {
operation: 'db_encrypt_cleanup_failed'
});
// Don't fail the entire initialization for this
}
}
initializeDatabase()
.then(() => handlePostInitFileEncryption())
.catch((error) => {
databaseLogger.error("Failed to initialize database", error, {
operation: "db_init",
});
process.exit(1);
});
process.exit(1);
});
databaseLogger.success("Database connection established", {
operation: "db_init",
path: dbPath,
path: actualDbPath,
hasEncryptedBackup: enableFileEncryption && DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath)
});
// Cleanup function for database and temporary files
async function cleanupDatabase() {
// Save in-memory database before closing
if (memoryDatabase) {
try {
await saveMemoryDatabaseToFile();
databaseLogger.info('In-memory database saved before shutdown', {
operation: 'shutdown_save'
});
} catch (error) {
databaseLogger.error('Failed to save in-memory database before shutdown', error, {
operation: 'shutdown_save_failed'
});
}
}
// Close database connection
try {
if (sqlite) {
sqlite.close();
databaseLogger.debug('Database connection closed', {
operation: 'db_close'
});
}
} catch (error) {
databaseLogger.warn('Error closing database connection', {
operation: 'db_close_error',
error: error instanceof Error ? error.message : 'Unknown error'
});
}
// Clean up temp directory
try {
const tempDir = path.join(dataDir, '.temp');
if (fs.existsSync(tempDir)) {
const files = fs.readdirSync(tempDir);
for (const file of files) {
try {
fs.unlinkSync(path.join(tempDir, file));
} catch {
// Ignore individual file cleanup errors
}
}
try {
fs.rmdirSync(tempDir);
databaseLogger.debug('Temp directory cleaned up', {
operation: 'temp_dir_cleanup'
});
} catch {
// Ignore directory removal errors
}
}
} catch (error) {
// Ignore temp directory cleanup errors
}
}
// Register cleanup handlers
process.on('exit', () => {
// Synchronous cleanup only for exit event
if (sqlite) {
try {
sqlite.close();
} catch {}
}
});
process.on('SIGINT', async () => {
databaseLogger.info('Received SIGINT, cleaning up...', {
operation: 'shutdown'
});
await cleanupDatabase();
process.exit(0);
});
process.on('SIGTERM', async () => {
databaseLogger.info('Received SIGTERM, cleaning up...', {
operation: 'shutdown'
});
await cleanupDatabase();
process.exit(0);
});
// Export database connection and file encryption utilities
export const db = drizzle(sqlite, { schema });
export const sqliteInstance = sqlite; // Export underlying SQLite instance for schema queries
export { DatabaseFileEncryption };
export const databasePaths = {
main: actualDbPath,
encrypted: encryptedDbPath,
directory: dbDir,
inMemory: true
};
// Memory database buffer function
function getMemoryDatabaseBuffer(): Buffer {
if (!memoryDatabase) {
throw new Error('Memory database not initialized');
}
try {
// Export in-memory database to buffer
const buffer = memoryDatabase.serialize();
databaseLogger.debug('Memory database serialized to buffer', {
operation: 'memory_db_serialize',
bufferSize: buffer.length
});
return buffer;
} catch (error) {
databaseLogger.error('Failed to serialize memory database to buffer', error, {
operation: 'memory_db_serialize_failed'
});
throw error;
}
}
// Export save function for manual saves and buffer access
export { saveMemoryDatabaseToFile, getMemoryDatabaseBuffer };

View File

@@ -0,0 +1,477 @@
import crypto from 'crypto';
import fs from 'fs';
import path from 'path';
import { HardwareFingerprint } from './hardware-fingerprint.js';
import { databaseLogger } from './logger.js';
interface EncryptedFileMetadata {
iv: string;
tag: string;
version: string;
fingerprint: string;
salt: string;
algorithm: string;
}
/**
* Database file encryption - encrypts the entire SQLite database file at rest
* This provides an additional security layer on top of field-level encryption
*/
class DatabaseFileEncryption {
private static readonly VERSION = 'v1';
private static readonly ALGORITHM = 'aes-256-gcm';
private static readonly KEY_ITERATIONS = 100000;
private static readonly ENCRYPTED_FILE_SUFFIX = '.encrypted';
private static readonly METADATA_FILE_SUFFIX = '.meta';
/**
* Generate file encryption key from hardware fingerprint
*/
private static generateFileEncryptionKey(salt: Buffer): Buffer {
const hardwareFingerprint = HardwareFingerprint.generate();
const key = crypto.pbkdf2Sync(
hardwareFingerprint,
salt,
this.KEY_ITERATIONS,
32, // 256 bits for AES-256
'sha256'
);
databaseLogger.debug('Generated file encryption key from hardware fingerprint', {
operation: 'file_key_generation',
iterations: this.KEY_ITERATIONS,
keyLength: key.length
});
return key;
}
/**
* Encrypt database from buffer (for in-memory databases)
*/
static encryptDatabaseFromBuffer(buffer: Buffer, targetPath: string): string {
try {
// Generate encryption components
const salt = crypto.randomBytes(32);
const iv = crypto.randomBytes(16);
const key = this.generateFileEncryptionKey(salt);
// Encrypt the buffer
const cipher = crypto.createCipheriv(this.ALGORITHM, key, iv) as any;
const encrypted = Buffer.concat([
cipher.update(buffer),
cipher.final()
]);
const tag = cipher.getAuthTag();
// Create metadata
const metadata: EncryptedFileMetadata = {
iv: iv.toString('hex'),
tag: tag.toString('hex'),
version: this.VERSION,
fingerprint: HardwareFingerprint.generate().substring(0, 16),
salt: salt.toString('hex'),
algorithm: this.ALGORITHM
};
// Write encrypted file and metadata
const metadataPath = `${targetPath}${this.METADATA_FILE_SUFFIX}`;
fs.writeFileSync(targetPath, encrypted);
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
databaseLogger.info('Database buffer encrypted successfully', {
operation: 'database_buffer_encryption',
targetPath,
bufferSize: buffer.length,
encryptedSize: encrypted.length,
fingerprintPrefix: metadata.fingerprint
});
return targetPath;
} catch (error) {
databaseLogger.error('Failed to encrypt database buffer', error, {
operation: 'database_buffer_encryption_failed',
targetPath
});
throw new Error(`Database buffer encryption failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
/**
* Encrypt database file
*/
static encryptDatabaseFile(sourcePath: string, targetPath?: string): string {
if (!fs.existsSync(sourcePath)) {
throw new Error(`Source database file does not exist: ${sourcePath}`);
}
const encryptedPath = targetPath || `${sourcePath}${this.ENCRYPTED_FILE_SUFFIX}`;
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
try {
// Read source file
const sourceData = fs.readFileSync(sourcePath);
// Generate encryption components
const salt = crypto.randomBytes(32);
const iv = crypto.randomBytes(16);
const key = this.generateFileEncryptionKey(salt);
// Encrypt the file
const cipher = crypto.createCipheriv(this.ALGORITHM, key, iv) as any;
const encrypted = Buffer.concat([
cipher.update(sourceData),
cipher.final()
]);
const tag = cipher.getAuthTag();
// Create metadata
const metadata: EncryptedFileMetadata = {
iv: iv.toString('hex'),
tag: tag.toString('hex'),
version: this.VERSION,
fingerprint: HardwareFingerprint.generate().substring(0, 16),
salt: salt.toString('hex'),
algorithm: this.ALGORITHM
};
// Write encrypted file and metadata
fs.writeFileSync(encryptedPath, encrypted);
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
databaseLogger.info('Database file encrypted successfully', {
operation: 'database_file_encryption',
sourcePath,
encryptedPath,
fileSize: sourceData.length,
encryptedSize: encrypted.length,
fingerprintPrefix: metadata.fingerprint
});
return encryptedPath;
} catch (error) {
databaseLogger.error('Failed to encrypt database file', error, {
operation: 'database_file_encryption_failed',
sourcePath,
targetPath: encryptedPath
});
throw new Error(`Database file encryption failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
/**
* Decrypt database file to buffer (for in-memory usage)
*/
static decryptDatabaseToBuffer(encryptedPath: string): Buffer {
if (!fs.existsSync(encryptedPath)) {
throw new Error(`Encrypted database file does not exist: ${encryptedPath}`);
}
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
if (!fs.existsSync(metadataPath)) {
throw new Error(`Metadata file does not exist: ${metadataPath}`);
}
try {
// Read metadata
const metadataContent = fs.readFileSync(metadataPath, 'utf8');
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
// Validate metadata version
if (metadata.version !== this.VERSION) {
throw new Error(`Unsupported encryption version: ${metadata.version}`);
}
// Validate hardware fingerprint
const currentFingerprint = HardwareFingerprint.generate().substring(0, 16);
if (metadata.fingerprint !== currentFingerprint) {
databaseLogger.warn('Hardware fingerprint mismatch for database buffer decryption', {
operation: 'database_buffer_decryption',
expected: metadata.fingerprint,
current: currentFingerprint
});
throw new Error('Hardware fingerprint mismatch - database was encrypted on different hardware');
}
// Read encrypted data
const encryptedData = fs.readFileSync(encryptedPath);
// Generate decryption key
const salt = Buffer.from(metadata.salt, 'hex');
const key = this.generateFileEncryptionKey(salt);
// Decrypt to buffer
const decipher = crypto.createDecipheriv(
metadata.algorithm,
key,
Buffer.from(metadata.iv, 'hex')
) as any;
decipher.setAuthTag(Buffer.from(metadata.tag, 'hex'));
const decryptedBuffer = Buffer.concat([
decipher.update(encryptedData),
decipher.final()
]);
databaseLogger.info('Database decrypted to memory buffer', {
operation: 'database_buffer_decryption',
encryptedPath,
encryptedSize: encryptedData.length,
decryptedSize: decryptedBuffer.length,
fingerprintPrefix: metadata.fingerprint
});
return decryptedBuffer;
} catch (error) {
databaseLogger.error('Failed to decrypt database to buffer', error, {
operation: 'database_buffer_decryption_failed',
encryptedPath
});
throw new Error(`Database buffer decryption failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
/**
* Decrypt database file
*/
static decryptDatabaseFile(encryptedPath: string, targetPath?: string): string {
if (!fs.existsSync(encryptedPath)) {
throw new Error(`Encrypted database file does not exist: ${encryptedPath}`);
}
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
if (!fs.existsSync(metadataPath)) {
throw new Error(`Metadata file does not exist: ${metadataPath}`);
}
const decryptedPath = targetPath || encryptedPath.replace(this.ENCRYPTED_FILE_SUFFIX, '');
try {
// Read metadata
const metadataContent = fs.readFileSync(metadataPath, 'utf8');
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
// Validate metadata version
if (metadata.version !== this.VERSION) {
throw new Error(`Unsupported encryption version: ${metadata.version}`);
}
// Validate hardware fingerprint
const currentFingerprint = HardwareFingerprint.generate().substring(0, 16);
if (metadata.fingerprint !== currentFingerprint) {
databaseLogger.warn('Hardware fingerprint mismatch for database file', {
operation: 'database_file_decryption',
expected: metadata.fingerprint,
current: currentFingerprint
});
throw new Error('Hardware fingerprint mismatch - database was encrypted on different hardware');
}
// Read encrypted data
const encryptedData = fs.readFileSync(encryptedPath);
// Generate decryption key
const salt = Buffer.from(metadata.salt, 'hex');
const key = this.generateFileEncryptionKey(salt);
// Decrypt the file
const decipher = crypto.createDecipheriv(
metadata.algorithm,
key,
Buffer.from(metadata.iv, 'hex')
) as any;
decipher.setAuthTag(Buffer.from(metadata.tag, 'hex'));
const decrypted = Buffer.concat([
decipher.update(encryptedData),
decipher.final()
]);
// Write decrypted file
fs.writeFileSync(decryptedPath, decrypted);
databaseLogger.info('Database file decrypted successfully', {
operation: 'database_file_decryption',
encryptedPath,
decryptedPath,
encryptedSize: encryptedData.length,
decryptedSize: decrypted.length,
fingerprintPrefix: metadata.fingerprint
});
return decryptedPath;
} catch (error) {
databaseLogger.error('Failed to decrypt database file', error, {
operation: 'database_file_decryption_failed',
encryptedPath,
targetPath: decryptedPath
});
throw new Error(`Database file decryption failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
/**
* Check if a file is an encrypted database file
*/
static isEncryptedDatabaseFile(filePath: string): boolean {
const metadataPath = `${filePath}${this.METADATA_FILE_SUFFIX}`;
if (!fs.existsSync(filePath) || !fs.existsSync(metadataPath)) {
return false;
}
try {
const metadataContent = fs.readFileSync(metadataPath, 'utf8');
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
return metadata.version === this.VERSION && metadata.algorithm === this.ALGORITHM;
} catch {
return false;
}
}
/**
* Get information about an encrypted database file
*/
static getEncryptedFileInfo(encryptedPath: string): {
version: string;
algorithm: string;
fingerprint: string;
isCurrentHardware: boolean;
fileSize: number;
} | null {
if (!this.isEncryptedDatabaseFile(encryptedPath)) {
return null;
}
try {
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
const metadataContent = fs.readFileSync(metadataPath, 'utf8');
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
const fileStats = fs.statSync(encryptedPath);
const currentFingerprint = HardwareFingerprint.generate().substring(0, 16);
return {
version: metadata.version,
algorithm: metadata.algorithm,
fingerprint: metadata.fingerprint,
isCurrentHardware: metadata.fingerprint === currentFingerprint,
fileSize: fileStats.size
};
} catch {
return null;
}
}
/**
* Securely backup database by creating encrypted copy
*/
static createEncryptedBackup(databasePath: string, backupDir: string): string {
if (!fs.existsSync(databasePath)) {
throw new Error(`Database file does not exist: ${databasePath}`);
}
// Ensure backup directory exists
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true });
}
// Generate backup filename with timestamp
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const backupFileName = `database-backup-${timestamp}.sqlite.encrypted`;
const backupPath = path.join(backupDir, backupFileName);
try {
const encryptedPath = this.encryptDatabaseFile(databasePath, backupPath);
databaseLogger.info('Encrypted database backup created', {
operation: 'database_backup',
sourcePath: databasePath,
backupPath: encryptedPath,
timestamp
});
return encryptedPath;
} catch (error) {
databaseLogger.error('Failed to create encrypted backup', error, {
operation: 'database_backup_failed',
sourcePath: databasePath,
backupDir
});
throw error;
}
}
/**
* Restore database from encrypted backup
*/
static restoreFromEncryptedBackup(backupPath: string, targetPath: string): string {
if (!this.isEncryptedDatabaseFile(backupPath)) {
throw new Error('Invalid encrypted backup file');
}
try {
const restoredPath = this.decryptDatabaseFile(backupPath, targetPath);
databaseLogger.info('Database restored from encrypted backup', {
operation: 'database_restore',
backupPath,
restoredPath
});
return restoredPath;
} catch (error) {
databaseLogger.error('Failed to restore from encrypted backup', error, {
operation: 'database_restore_failed',
backupPath,
targetPath
});
throw error;
}
}
/**
* Validate hardware compatibility for encrypted file
*/
static validateHardwareCompatibility(encryptedPath: string): boolean {
try {
const info = this.getEncryptedFileInfo(encryptedPath);
return info?.isCurrentHardware ?? false;
} catch {
return false;
}
}
/**
* Clean up temporary files
*/
static cleanupTempFiles(basePath: string): void {
try {
const tempFiles = [
`${basePath}.tmp`,
`${basePath}${this.ENCRYPTED_FILE_SUFFIX}`,
`${basePath}${this.ENCRYPTED_FILE_SUFFIX}${this.METADATA_FILE_SUFFIX}`
];
for (const tempFile of tempFiles) {
if (fs.existsSync(tempFile)) {
fs.unlinkSync(tempFile);
databaseLogger.debug('Cleaned up temporary file', {
operation: 'temp_cleanup',
file: tempFile
});
}
}
} catch (error) {
databaseLogger.warn('Failed to clean up temporary files', {
operation: 'temp_cleanup_failed',
basePath,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
}
}
export { DatabaseFileEncryption };
export type { EncryptedFileMetadata };

View File

@@ -0,0 +1,437 @@
import fs from 'fs';
import path from 'path';
import crypto from 'crypto';
import { DatabaseFileEncryption } from './database-file-encryption.js';
import { DatabaseEncryption } from './database-encryption.js';
import { FieldEncryption } from './encryption.js';
import { HardwareFingerprint } from './hardware-fingerprint.js';
import { databaseLogger } from './logger.js';
import { db, databasePaths } from '../database/db/index.js';
import { users, sshData, sshCredentials, settings, fileManagerRecent, fileManagerPinned, fileManagerShortcuts, dismissedAlerts, sshCredentialUsage } from '../database/db/schema.js';
interface ExportMetadata {
version: string;
exportedAt: string;
exportId: string;
sourceHardwareFingerprint: string;
tableCount: number;
recordCount: number;
encryptedFields: string[];
}
interface MigrationExport {
metadata: ExportMetadata;
data: {
[tableName: string]: any[];
};
}
interface ImportResult {
success: boolean;
imported: {
tables: number;
records: number;
};
errors: string[];
warnings: string[];
}
/**
* Database migration utility for exporting/importing data between different hardware
* Handles both field-level and file-level encryption/decryption during migration
*/
class DatabaseMigration {
private static readonly VERSION = 'v1';
private static readonly EXPORT_FILE_EXTENSION = '.termix-export.json';
/**
* Export database for migration
* Decrypts all encrypted fields for transport to new hardware
*/
static async exportDatabase(exportPath?: string): Promise<string> {
const exportId = crypto.randomUUID();
const timestamp = new Date().toISOString();
const defaultExportPath = path.join(
databasePaths.directory,
`termix-export-${timestamp.replace(/[:.]/g, '-')}${this.EXPORT_FILE_EXTENSION}`
);
const actualExportPath = exportPath || defaultExportPath;
try {
databaseLogger.info('Starting database export for migration', {
operation: 'database_export',
exportId,
exportPath: actualExportPath
});
// Define tables to export and their encryption status
const tablesToExport = [
{ name: 'users', table: users, hasEncryption: true },
{ name: 'ssh_data', table: sshData, hasEncryption: true },
{ name: 'ssh_credentials', table: sshCredentials, hasEncryption: true },
{ name: 'settings', table: settings, hasEncryption: false },
{ name: 'file_manager_recent', table: fileManagerRecent, hasEncryption: false },
{ name: 'file_manager_pinned', table: fileManagerPinned, hasEncryption: false },
{ name: 'file_manager_shortcuts', table: fileManagerShortcuts, hasEncryption: false },
{ name: 'dismissed_alerts', table: dismissedAlerts, hasEncryption: false },
{ name: 'ssh_credential_usage', table: sshCredentialUsage, hasEncryption: false }
];
const exportData: MigrationExport = {
metadata: {
version: this.VERSION,
exportedAt: timestamp,
exportId,
sourceHardwareFingerprint: HardwareFingerprint.generate().substring(0, 16),
tableCount: 0,
recordCount: 0,
encryptedFields: []
},
data: {}
};
let totalRecords = 0;
// Export each table
for (const tableInfo of tablesToExport) {
try {
databaseLogger.debug(`Exporting table: ${tableInfo.name}`, {
operation: 'table_export',
table: tableInfo.name,
hasEncryption: tableInfo.hasEncryption
});
// Query all records from the table
const records = await db.select().from(tableInfo.table);
// Decrypt encrypted fields if necessary
let processedRecords = records;
if (tableInfo.hasEncryption && records.length > 0) {
processedRecords = records.map(record => {
try {
return DatabaseEncryption.decryptRecord(tableInfo.name, record);
} catch (error) {
databaseLogger.warn(`Failed to decrypt record in ${tableInfo.name}`, {
operation: 'export_decrypt_warning',
table: tableInfo.name,
recordId: (record as any).id,
error: error instanceof Error ? error.message : 'Unknown error'
});
// Return original record if decryption fails
return record;
}
});
// Track which fields were encrypted
if (records.length > 0) {
const sampleRecord = records[0];
for (const fieldName of Object.keys(sampleRecord)) {
if (FieldEncryption.shouldEncryptField(tableInfo.name, fieldName)) {
const fieldKey = `${tableInfo.name}.${fieldName}`;
if (!exportData.metadata.encryptedFields.includes(fieldKey)) {
exportData.metadata.encryptedFields.push(fieldKey);
}
}
}
}
}
exportData.data[tableInfo.name] = processedRecords;
totalRecords += processedRecords.length;
databaseLogger.debug(`Table ${tableInfo.name} exported`, {
operation: 'table_export_complete',
table: tableInfo.name,
recordCount: processedRecords.length
});
} catch (error) {
databaseLogger.error(`Failed to export table ${tableInfo.name}`, error, {
operation: 'table_export_failed',
table: tableInfo.name
});
throw error;
}
}
// Update metadata
exportData.metadata.tableCount = tablesToExport.length;
exportData.metadata.recordCount = totalRecords;
// Write export file
const exportContent = JSON.stringify(exportData, null, 2);
fs.writeFileSync(actualExportPath, exportContent, 'utf8');
databaseLogger.success('Database export completed successfully', {
operation: 'database_export_complete',
exportId,
exportPath: actualExportPath,
tableCount: exportData.metadata.tableCount,
recordCount: exportData.metadata.recordCount,
fileSize: exportContent.length
});
return actualExportPath;
} catch (error) {
databaseLogger.error('Database export failed', error, {
operation: 'database_export_failed',
exportId,
exportPath: actualExportPath
});
throw new Error(`Database export failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
/**
* Import database from migration export
* Re-encrypts fields for the current hardware
*/
static async importDatabase(importPath: string, options: {
replaceExisting?: boolean;
backupCurrent?: boolean;
} = {}): Promise<ImportResult> {
const { replaceExisting = false, backupCurrent = true } = options;
if (!fs.existsSync(importPath)) {
throw new Error(`Import file does not exist: ${importPath}`);
}
try {
databaseLogger.info('Starting database import from migration export', {
operation: 'database_import',
importPath,
replaceExisting,
backupCurrent
});
// Read and validate export file
const exportContent = fs.readFileSync(importPath, 'utf8');
const exportData: MigrationExport = JSON.parse(exportContent);
// Validate export format
if (exportData.metadata.version !== this.VERSION) {
throw new Error(`Unsupported export version: ${exportData.metadata.version}`);
}
const result: ImportResult = {
success: false,
imported: { tables: 0, records: 0 },
errors: [],
warnings: []
};
// Create backup if requested
if (backupCurrent) {
try {
const backupPath = await this.createCurrentDatabaseBackup();
databaseLogger.info('Current database backed up before import', {
operation: 'import_backup',
backupPath
});
} catch (error) {
const warningMsg = `Failed to create backup: ${error instanceof Error ? error.message : 'Unknown error'}`;
result.warnings.push(warningMsg);
databaseLogger.warn('Failed to create pre-import backup', {
operation: 'import_backup_failed',
error: warningMsg
});
}
}
// Import data table by table
for (const [tableName, tableData] of Object.entries(exportData.data)) {
try {
databaseLogger.debug(`Importing table: ${tableName}`, {
operation: 'table_import',
table: tableName,
recordCount: tableData.length
});
if (replaceExisting) {
// Clear existing data
const tableSchema = this.getTableSchema(tableName);
if (tableSchema) {
await db.delete(tableSchema);
databaseLogger.debug(`Cleared existing data from ${tableName}`, {
operation: 'table_clear',
table: tableName
});
}
}
// Process and encrypt records
for (const record of tableData) {
try {
// Re-encrypt sensitive fields for current hardware
const processedRecord = DatabaseEncryption.encryptRecord(tableName, record);
// Insert record
const tableSchema = this.getTableSchema(tableName);
if (tableSchema) {
await db.insert(tableSchema).values(processedRecord);
}
} catch (error) {
const errorMsg = `Failed to import record in ${tableName}: ${error instanceof Error ? error.message : 'Unknown error'}`;
result.errors.push(errorMsg);
databaseLogger.error('Failed to import record', error, {
operation: 'record_import_failed',
table: tableName,
recordId: record.id
});
}
}
result.imported.tables++;
result.imported.records += tableData.length;
databaseLogger.debug(`Table ${tableName} imported`, {
operation: 'table_import_complete',
table: tableName,
recordCount: tableData.length
});
} catch (error) {
const errorMsg = `Failed to import table ${tableName}: ${error instanceof Error ? error.message : 'Unknown error'}`;
result.errors.push(errorMsg);
databaseLogger.error('Failed to import table', error, {
operation: 'table_import_failed',
table: tableName
});
}
}
// Check if import was successful
result.success = result.errors.length === 0;
if (result.success) {
databaseLogger.success('Database import completed successfully', {
operation: 'database_import_complete',
importPath,
tablesImported: result.imported.tables,
recordsImported: result.imported.records,
warnings: result.warnings.length
});
} else {
databaseLogger.error('Database import completed with errors', undefined, {
operation: 'database_import_partial',
importPath,
tablesImported: result.imported.tables,
recordsImported: result.imported.records,
errorCount: result.errors.length,
warningCount: result.warnings.length
});
}
return result;
} catch (error) {
databaseLogger.error('Database import failed', error, {
operation: 'database_import_failed',
importPath
});
throw new Error(`Database import failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
/**
* Validate export file format and compatibility
*/
static validateExportFile(exportPath: string): {
valid: boolean;
metadata?: ExportMetadata;
errors: string[];
} {
const result = {
valid: false,
metadata: undefined as ExportMetadata | undefined,
errors: [] as string[]
};
try {
if (!fs.existsSync(exportPath)) {
result.errors.push('Export file does not exist');
return result;
}
const exportContent = fs.readFileSync(exportPath, 'utf8');
const exportData: MigrationExport = JSON.parse(exportContent);
// Validate structure
if (!exportData.metadata || !exportData.data) {
result.errors.push('Invalid export file structure');
return result;
}
// Validate version
if (exportData.metadata.version !== this.VERSION) {
result.errors.push(`Unsupported export version: ${exportData.metadata.version}`);
return result;
}
// Validate required metadata fields
const requiredFields = ['exportedAt', 'exportId', 'sourceHardwareFingerprint'];
for (const field of requiredFields) {
if (!exportData.metadata[field as keyof ExportMetadata]) {
result.errors.push(`Missing required metadata field: ${field}`);
}
}
if (result.errors.length === 0) {
result.valid = true;
result.metadata = exportData.metadata;
}
return result;
} catch (error) {
result.errors.push(`Failed to parse export file: ${error instanceof Error ? error.message : 'Unknown error'}`);
return result;
}
}
/**
* Create backup of current database
*/
private static async createCurrentDatabaseBackup(): Promise<string> {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const backupDir = path.join(databasePaths.directory, 'backups');
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true });
}
// Create encrypted backup
const backupPath = DatabaseFileEncryption.createEncryptedBackup(
databasePaths.main,
backupDir
);
return backupPath;
}
/**
* Get table schema for database operations
*/
private static getTableSchema(tableName: string) {
const tableMap: { [key: string]: any } = {
'users': users,
'ssh_data': sshData,
'ssh_credentials': sshCredentials,
'settings': settings,
'file_manager_recent': fileManagerRecent,
'file_manager_pinned': fileManagerPinned,
'file_manager_shortcuts': fileManagerShortcuts,
'dismissed_alerts': dismissedAlerts,
'ssh_credential_usage': sshCredentialUsage
};
return tableMap[tableName];
}
/**
* Get export file info without importing
*/
static getExportInfo(exportPath: string): ExportMetadata | null {
const validation = this.validateExportFile(exportPath);
return validation.valid ? validation.metadata! : null;
}
}
export { DatabaseMigration };
export type { ExportMetadata, MigrationExport, ImportResult };

View File

@@ -0,0 +1,649 @@
import fs from 'fs';
import path from 'path';
import crypto from 'crypto';
import Database from 'better-sqlite3';
import { sql, eq } from 'drizzle-orm';
import { drizzle } from 'drizzle-orm/better-sqlite3';
import { DatabaseEncryption } from './database-encryption.js';
import { FieldEncryption } from './encryption.js';
import { HardwareFingerprint } from './hardware-fingerprint.js';
import { databaseLogger } from './logger.js';
import { databasePaths, db, sqliteInstance } from '../database/db/index.js';
import { sshData, sshCredentials, users } from '../database/db/schema.js';
interface ExportMetadata {
version: string;
exportedAt: string;
exportId: string;
sourceHardwareFingerprint: string;
tableCount: number;
recordCount: number;
encryptedFields: string[];
}
interface ImportResult {
success: boolean;
imported: {
tables: number;
records: number;
};
errors: string[];
warnings: string[];
}
/**
* SQLite database export/import utility for hardware migration
* Exports decrypted data to a new SQLite database file for hardware transfer
*/
class DatabaseSQLiteExport {
private static readonly VERSION = 'v1';
private static readonly EXPORT_FILE_EXTENSION = '.termix-export.sqlite';
private static readonly METADATA_TABLE = '_termix_export_metadata';
/**
* Export database as SQLite file for migration
* Creates a new SQLite database with decrypted data
*/
static async exportDatabase(exportPath?: string): Promise<string> {
const exportId = crypto.randomUUID();
const timestamp = new Date().toISOString();
const defaultExportPath = path.join(
databasePaths.directory,
`termix-export-${timestamp.replace(/[:.]/g, '-')}${this.EXPORT_FILE_EXTENSION}`
);
const actualExportPath = exportPath || defaultExportPath;
try {
databaseLogger.info('Starting SQLite database export for migration', {
operation: 'database_sqlite_export',
exportId,
exportPath: actualExportPath
});
// Create new SQLite database for export
const exportDb = new Database(actualExportPath);
// Define tables to export - only SSH-related data
const tablesToExport = [
{ name: 'ssh_data', hasEncryption: true },
{ name: 'ssh_credentials', hasEncryption: true }
];
const exportMetadata: ExportMetadata = {
version: this.VERSION,
exportedAt: timestamp,
exportId,
sourceHardwareFingerprint: HardwareFingerprint.generate().substring(0, 16),
tableCount: 0,
recordCount: 0,
encryptedFields: []
};
let totalRecords = 0;
// Check total records in SSH tables for debugging
const totalSshData = await db.select().from(sshData);
const totalSshCredentials = await db.select().from(sshCredentials);
databaseLogger.info(`Export preparation: found SSH data`, {
operation: 'export_data_check',
totalSshData: totalSshData.length,
totalSshCredentials: totalSshCredentials.length
});
// Create metadata table
exportDb.exec(`
CREATE TABLE ${this.METADATA_TABLE} (
key TEXT PRIMARY KEY,
value TEXT NOT NULL
)
`);
// Copy schema and data for each table
for (const tableInfo of tablesToExport) {
try {
databaseLogger.debug(`Exporting SQLite table: ${tableInfo.name}`, {
operation: 'table_sqlite_export',
table: tableInfo.name,
hasEncryption: tableInfo.hasEncryption
});
// Create table in export database using consistent schema
if (tableInfo.name === 'ssh_data') {
// Create ssh_data table using exact schema matching Drizzle definition
const createTableSql = `CREATE TABLE ssh_data (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
name TEXT,
ip TEXT NOT NULL,
port INTEGER NOT NULL,
username TEXT NOT NULL,
folder TEXT,
tags TEXT,
pin INTEGER NOT NULL DEFAULT 0,
auth_type TEXT NOT NULL,
password TEXT,
require_password INTEGER NOT NULL DEFAULT 1,
key TEXT,
key_password TEXT,
key_type TEXT,
credential_id INTEGER,
enable_terminal INTEGER NOT NULL DEFAULT 1,
enable_tunnel INTEGER NOT NULL DEFAULT 1,
tunnel_connections TEXT,
enable_file_manager INTEGER NOT NULL DEFAULT 1,
default_path TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
)`;
exportDb.exec(createTableSql);
} else if (tableInfo.name === 'ssh_credentials') {
// Create ssh_credentials table using exact schema matching Drizzle definition
const createTableSql = `CREATE TABLE ssh_credentials (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
username TEXT,
password TEXT,
key_content TEXT,
key_password TEXT,
key_type TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
)`;
exportDb.exec(createTableSql);
} else {
databaseLogger.warn(`Unknown table ${tableInfo.name}, skipping`, {
operation: 'table_sqlite_export_skip',
table: tableInfo.name
});
continue;
}
// Query all records from tables using Drizzle
let records: any[];
if (tableInfo.name === 'ssh_data') {
records = await db.select().from(sshData);
} else if (tableInfo.name === 'ssh_credentials') {
records = await db.select().from(sshCredentials);
} else {
records = [];
}
databaseLogger.info(`Found ${records.length} records in ${tableInfo.name} for export`, {
operation: 'table_record_count',
table: tableInfo.name,
recordCount: records.length
});
// Decrypt encrypted fields if necessary
let processedRecords = records;
if (tableInfo.hasEncryption && records.length > 0) {
processedRecords = records.map(record => {
try {
return DatabaseEncryption.decryptRecord(tableInfo.name, record);
} catch (error) {
databaseLogger.warn(`Failed to decrypt record in ${tableInfo.name}`, {
operation: 'export_decrypt_warning',
table: tableInfo.name,
recordId: (record as any).id,
error: error instanceof Error ? error.message : 'Unknown error'
});
return record;
}
});
// Track encrypted fields
const sampleRecord = records[0];
for (const fieldName of Object.keys(sampleRecord)) {
if (this.shouldTrackEncryptedField(tableInfo.name, fieldName)) {
const fieldKey = `${tableInfo.name}.${fieldName}`;
if (!exportMetadata.encryptedFields.includes(fieldKey)) {
exportMetadata.encryptedFields.push(fieldKey);
}
}
}
}
// Insert records into export database
if (processedRecords.length > 0) {
const sampleRecord = processedRecords[0];
const tsFieldNames = Object.keys(sampleRecord);
// Map TypeScript field names to database column names
const dbColumnNames = tsFieldNames.map(fieldName => {
// Map TypeScript field names to database column names
const fieldMappings: Record<string, string> = {
'userId': 'user_id',
'authType': 'auth_type',
'requirePassword': 'require_password',
'keyPassword': 'key_password',
'keyType': 'key_type',
'credentialId': 'credential_id',
'enableTerminal': 'enable_terminal',
'enableTunnel': 'enable_tunnel',
'tunnelConnections': 'tunnel_connections',
'enableFileManager': 'enable_file_manager',
'defaultPath': 'default_path',
'createdAt': 'created_at',
'updatedAt': 'updated_at',
'keyContent': 'key_content'
};
return fieldMappings[fieldName] || fieldName;
});
const placeholders = dbColumnNames.map(() => '?').join(', ');
const insertSql = `INSERT INTO ${tableInfo.name} (${dbColumnNames.join(', ')}) VALUES (${placeholders})`;
const insertStmt = exportDb.prepare(insertSql);
for (const record of processedRecords) {
const values = tsFieldNames.map(fieldName => {
const value: any = record[fieldName as keyof typeof record];
// Convert values to SQLite-compatible types
if (value === null || value === undefined) {
return null;
}
if (typeof value === 'string' || typeof value === 'number' || typeof value === 'bigint') {
return value;
}
if (Buffer.isBuffer(value)) {
return value;
}
if (value instanceof Date) {
return value.toISOString();
}
if (typeof value === 'boolean') {
return value ? 1 : 0;
}
// Convert objects and arrays to JSON strings
if (typeof value === 'object') {
return JSON.stringify(value);
}
// Fallback: convert to string
return String(value);
});
insertStmt.run(values);
}
}
totalRecords += processedRecords.length;
databaseLogger.debug(`SQLite table ${tableInfo.name} exported`, {
operation: 'table_sqlite_export_complete',
table: tableInfo.name,
recordCount: processedRecords.length
});
} catch (error) {
databaseLogger.error(`Failed to export SQLite table ${tableInfo.name}`, error, {
operation: 'table_sqlite_export_failed',
table: tableInfo.name
});
throw error;
}
}
// Update and store metadata
exportMetadata.tableCount = tablesToExport.length;
exportMetadata.recordCount = totalRecords;
const insertMetadata = exportDb.prepare(`INSERT INTO ${this.METADATA_TABLE} (key, value) VALUES (?, ?)`);
insertMetadata.run('metadata', JSON.stringify(exportMetadata));
// Close export database
exportDb.close();
databaseLogger.success('SQLite database export completed successfully', {
operation: 'database_sqlite_export_complete',
exportId,
exportPath: actualExportPath,
tableCount: exportMetadata.tableCount,
recordCount: exportMetadata.recordCount,
fileSize: fs.statSync(actualExportPath).size
});
return actualExportPath;
} catch (error) {
databaseLogger.error('SQLite database export failed', error, {
operation: 'database_sqlite_export_failed',
exportId,
exportPath: actualExportPath
});
throw new Error(`SQLite database export failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
/**
* Import database from SQLite export
* Re-encrypts fields for the current hardware
*/
static async importDatabase(importPath: string, options: {
replaceExisting?: boolean;
backupCurrent?: boolean;
} = {}): Promise<ImportResult> {
const { replaceExisting = false, backupCurrent = true } = options;
if (!fs.existsSync(importPath)) {
throw new Error(`Import file does not exist: ${importPath}`);
}
try {
databaseLogger.info('Starting SQLite database import from export', {
operation: 'database_sqlite_import',
importPath,
replaceExisting,
backupCurrent
});
// Open import database
const importDb = new Database(importPath, { readonly: true });
// Validate export format
const metadataResult = importDb.prepare(`
SELECT value FROM ${this.METADATA_TABLE} WHERE key = 'metadata'
`).get() as { value: string } | undefined;
if (!metadataResult) {
throw new Error('Invalid export file: missing metadata');
}
const metadata: ExportMetadata = JSON.parse(metadataResult.value);
if (metadata.version !== this.VERSION) {
throw new Error(`Unsupported export version: ${metadata.version}`);
}
const result: ImportResult = {
success: false,
imported: { tables: 0, records: 0 },
errors: [],
warnings: []
};
// Get current admin user to assign imported SSH records
const adminUser = await db.select().from(users).where(eq(users.is_admin, true)).limit(1);
if (adminUser.length === 0) {
throw new Error('No admin user found in current database');
}
const currentAdminUserId = adminUser[0].id;
databaseLogger.debug(`Starting SSH data import - assigning to admin user ${currentAdminUserId}`, {
operation: 'ssh_data_import_start',
adminUserId: currentAdminUserId
});
// Create backup if requested
if (backupCurrent) {
try {
const backupPath = await this.createCurrentDatabaseBackup();
databaseLogger.info('Current database backed up before import', {
operation: 'import_backup',
backupPath
});
} catch (error) {
const warningMsg = `Failed to create backup: ${error instanceof Error ? error.message : 'Unknown error'}`;
result.warnings.push(warningMsg);
databaseLogger.warn('Failed to create pre-import backup', {
operation: 'import_backup_failed',
error: warningMsg
});
}
}
// Get list of tables to import (excluding metadata table)
const tables = importDb.prepare(`
SELECT name FROM sqlite_master
WHERE type='table' AND name != '${this.METADATA_TABLE}'
`).all() as { name: string }[];
// Import data table by table
for (const tableRow of tables) {
const tableName = tableRow.name;
try {
databaseLogger.debug(`Importing SQLite table: ${tableName}`, {
operation: 'table_sqlite_import',
table: tableName
});
// Use additive import - don't clear existing data
// This preserves all current data including admin SSH connections
databaseLogger.debug(`Using additive import for ${tableName}`, {
operation: 'table_additive_import',
table: tableName
});
// Get all records from import table
const records = importDb.prepare(`SELECT * FROM ${tableName}`).all();
// Process and encrypt records
for (const record of records) {
try {
// Import all SSH data without user filtering
// Map database column names to TypeScript field names
const mappedRecord: any = {};
const columnToFieldMappings: Record<string, string> = {
'user_id': 'userId',
'auth_type': 'authType',
'require_password': 'requirePassword',
'key_password': 'keyPassword',
'key_type': 'keyType',
'credential_id': 'credentialId',
'enable_terminal': 'enableTerminal',
'enable_tunnel': 'enableTunnel',
'tunnel_connections': 'tunnelConnections',
'enable_file_manager': 'enableFileManager',
'default_path': 'defaultPath',
'created_at': 'createdAt',
'updated_at': 'updatedAt',
'key_content': 'keyContent'
};
// Convert database column names to TypeScript field names
for (const [dbColumn, value] of Object.entries(record)) {
const tsField = columnToFieldMappings[dbColumn] || dbColumn;
mappedRecord[tsField] = value;
}
// Assign imported SSH records to current admin user to avoid foreign key constraint
if (tableName === 'ssh_data' && mappedRecord.userId) {
const originalUserId = mappedRecord.userId;
mappedRecord.userId = currentAdminUserId;
databaseLogger.debug(`Reassigned SSH record from user ${originalUserId} to admin ${currentAdminUserId}`, {
operation: 'user_reassignment',
originalUserId,
newUserId: currentAdminUserId
});
}
// Re-encrypt sensitive fields for current hardware
const processedRecord = DatabaseEncryption.encryptRecord(tableName, mappedRecord);
// Insert record using Drizzle
try {
if (tableName === 'ssh_data') {
await db.insert(sshData).values(processedRecord).onConflictDoNothing();
} else if (tableName === 'ssh_credentials') {
await db.insert(sshCredentials).values(processedRecord).onConflictDoNothing();
}
} catch (error) {
// Handle any SQL errors gracefully
if (error instanceof Error && error.message.includes('UNIQUE constraint failed')) {
databaseLogger.debug(`Skipping duplicate record in ${tableName}`, {
operation: 'duplicate_record_skip',
table: tableName
});
continue;
}
throw error;
}
} catch (error) {
const errorMsg = `Failed to import record in ${tableName}: ${error instanceof Error ? error.message : 'Unknown error'}`;
result.errors.push(errorMsg);
databaseLogger.error('Failed to import record', error, {
operation: 'record_sqlite_import_failed',
table: tableName,
recordId: (record as any).id
});
}
}
result.imported.tables++;
result.imported.records += records.length;
databaseLogger.debug(`SQLite table ${tableName} imported`, {
operation: 'table_sqlite_import_complete',
table: tableName,
recordCount: records.length
});
} catch (error) {
const errorMsg = `Failed to import table ${tableName}: ${error instanceof Error ? error.message : 'Unknown error'}`;
result.errors.push(errorMsg);
databaseLogger.error('Failed to import SQLite table', error, {
operation: 'table_sqlite_import_failed',
table: tableName
});
}
}
// Close import database
importDb.close();
// Check if import was successful
result.success = result.errors.length === 0;
if (result.success) {
databaseLogger.success('SQLite database import completed successfully', {
operation: 'database_sqlite_import_complete',
importPath,
tablesImported: result.imported.tables,
recordsImported: result.imported.records,
warnings: result.warnings.length
});
} else {
databaseLogger.error('SQLite database import completed with errors', undefined, {
operation: 'database_sqlite_import_partial',
importPath,
tablesImported: result.imported.tables,
recordsImported: result.imported.records,
errorCount: result.errors.length,
warningCount: result.warnings.length
});
}
return result;
} catch (error) {
databaseLogger.error('SQLite database import failed', error, {
operation: 'database_sqlite_import_failed',
importPath
});
throw new Error(`SQLite database import failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
/**
* Validate SQLite export file
*/
static validateExportFile(exportPath: string): {
valid: boolean;
metadata?: ExportMetadata;
errors: string[];
} {
const result = {
valid: false,
metadata: undefined as ExportMetadata | undefined,
errors: [] as string[]
};
try {
if (!fs.existsSync(exportPath)) {
result.errors.push('Export file does not exist');
return result;
}
if (!exportPath.endsWith(this.EXPORT_FILE_EXTENSION)) {
result.errors.push('Invalid export file extension');
return result;
}
const exportDb = new Database(exportPath, { readonly: true });
try {
const metadataResult = exportDb.prepare(`
SELECT value FROM ${this.METADATA_TABLE} WHERE key = 'metadata'
`).get() as { value: string } | undefined;
if (!metadataResult) {
result.errors.push('Missing export metadata');
return result;
}
const metadata: ExportMetadata = JSON.parse(metadataResult.value);
if (metadata.version !== this.VERSION) {
result.errors.push(`Unsupported export version: ${metadata.version}`);
return result;
}
result.valid = true;
result.metadata = metadata;
} finally {
exportDb.close();
}
return result;
} catch (error) {
result.errors.push(`Failed to validate export file: ${error instanceof Error ? error.message : 'Unknown error'}`);
return result;
}
}
/**
* Get export file info without importing
*/
static getExportInfo(exportPath: string): ExportMetadata | null {
const validation = this.validateExportFile(exportPath);
return validation.valid ? validation.metadata! : null;
}
/**
* Create backup of current database
*/
private static async createCurrentDatabaseBackup(): Promise<string> {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const backupDir = path.join(databasePaths.directory, 'backups');
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true });
}
// Create SQLite backup
const backupPath = path.join(backupDir, `database-backup-${timestamp}.sqlite`);
// Copy current database file
fs.copyFileSync(databasePaths.main, backupPath);
return backupPath;
}
/**
* Get table schema for database operations
* NOTE: This method is deprecated - we now use raw SQL to avoid FK issues
*/
private static getTableSchema(tableName: string) {
return null; // No longer used
}
/**
* Check if a field should be tracked as encrypted
*/
private static shouldTrackEncryptedField(tableName: string, fieldName: string): boolean {
try {
return FieldEncryption.shouldEncryptField(tableName, fieldName);
} catch {
return false;
}
}
}
export { DatabaseSQLiteExport };
export type { ExportMetadata, ImportResult };

View File

@@ -0,0 +1,369 @@
import crypto from 'crypto';
import os from 'os';
import { execSync } from 'child_process';
import fs from 'fs';
import { databaseLogger } from './logger.js';
interface HardwareInfo {
cpuId?: string;
motherboardUuid?: string;
diskSerial?: string;
biosSerial?: string;
tpmInfo?: string;
macAddresses?: string[];
}
/**
* 硬件指纹生成器 - 使用真实硬件特征生成稳定的设备指纹
* 相比软件环境指纹,硬件指纹在虚拟化和容器环境中更加稳定
*/
class HardwareFingerprint {
private static readonly CACHE_KEY = 'cached_hardware_fingerprint';
private static cachedFingerprint: string | null = null;
/**
* 生成硬件指纹
* 优先级:缓存 > 环境变量 > 硬件检测
*/
static generate(): string {
try {
// 1. 检查缓存
if (this.cachedFingerprint) {
return this.cachedFingerprint;
}
// 2. 检查环境变量覆盖
const envFingerprint = process.env.TERMIX_HARDWARE_SEED;
if (envFingerprint && envFingerprint.length >= 32) {
databaseLogger.info('Using hardware seed from environment variable', {
operation: 'hardware_fingerprint_env'
});
this.cachedFingerprint = this.hashFingerprint(envFingerprint);
return this.cachedFingerprint;
}
// 3. 检测真实硬件信息
const hwInfo = this.detectHardwareInfo();
const fingerprint = this.generateFromHardware(hwInfo);
this.cachedFingerprint = fingerprint;
databaseLogger.info('Generated hardware fingerprint', {
operation: 'hardware_fingerprint_generation',
fingerprintPrefix: fingerprint.substring(0, 8),
detectedComponents: Object.keys(hwInfo).filter(key => hwInfo[key as keyof HardwareInfo])
});
return fingerprint;
} catch (error) {
databaseLogger.error('Hardware fingerprint generation failed', error, {
operation: 'hardware_fingerprint_failed'
});
// 回退到基本的环境指纹
return this.generateFallbackFingerprint();
}
}
/**
* 检测硬件信息
*/
private static detectHardwareInfo(): HardwareInfo {
const platform = os.platform();
const hwInfo: HardwareInfo = {};
try {
switch (platform) {
case 'linux':
hwInfo.cpuId = this.getLinuxCpuId();
hwInfo.motherboardUuid = this.getLinuxMotherboardUuid();
hwInfo.diskSerial = this.getLinuxDiskSerial();
hwInfo.biosSerial = this.getLinuxBiosSerial();
break;
case 'win32':
hwInfo.cpuId = this.getWindowsCpuId();
hwInfo.motherboardUuid = this.getWindowsMotherboardUuid();
hwInfo.diskSerial = this.getWindowsDiskSerial();
hwInfo.biosSerial = this.getWindowsBiosSerial();
break;
case 'darwin':
hwInfo.cpuId = this.getMacOSCpuId();
hwInfo.motherboardUuid = this.getMacOSMotherboardUuid();
hwInfo.diskSerial = this.getMacOSDiskSerial();
hwInfo.biosSerial = this.getMacOSBiosSerial();
break;
}
// 所有平台都尝试获取MAC地址
hwInfo.macAddresses = this.getStableMacAddresses();
} catch (error) {
databaseLogger.error('Some hardware detection failed', error, {
operation: 'hardware_detection_partial_failure',
platform
});
}
return hwInfo;
}
/**
* Linux平台硬件信息获取
*/
private static getLinuxCpuId(): string | undefined {
try {
// 尝试多种方法获取CPU信息
const methods = [
() => fs.readFileSync('/proc/cpuinfo', 'utf8').match(/processor\s*:\s*(\d+)/)?.[1],
() => execSync('dmidecode -t processor | grep "ID:" | head -1', { encoding: 'utf8' }).trim(),
() => execSync('cat /proc/cpuinfo | grep "cpu family\\|model\\|stepping" | md5sum', { encoding: 'utf8' }).split(' ')[0]
];
for (const method of methods) {
try {
const result = method();
if (result && result.length > 0) return result;
} catch { /* 继续尝试下一种方法 */ }
}
} catch { /* 忽略错误 */ }
return undefined;
}
private static getLinuxMotherboardUuid(): string | undefined {
try {
// 尝试多种方法获取主板UUID
const methods = [
() => fs.readFileSync('/sys/class/dmi/id/product_uuid', 'utf8').trim(),
() => fs.readFileSync('/proc/sys/kernel/random/boot_id', 'utf8').trim(),
() => execSync('dmidecode -s system-uuid', { encoding: 'utf8' }).trim()
];
for (const method of methods) {
try {
const result = method();
if (result && result.length > 0 && result !== 'Not Settable') return result;
} catch { /* 继续尝试下一种方法 */ }
}
} catch { /* 忽略错误 */ }
return undefined;
}
private static getLinuxDiskSerial(): string | undefined {
try {
// 获取根分区所在磁盘的序列号
const rootDisk = execSync("df / | tail -1 | awk '{print $1}' | sed 's/[0-9]*$//'", { encoding: 'utf8' }).trim();
if (rootDisk) {
const serial = execSync(`udevadm info --name=${rootDisk} | grep ID_SERIAL= | cut -d= -f2`, { encoding: 'utf8' }).trim();
if (serial && serial.length > 0) return serial;
}
} catch { /* 忽略错误 */ }
return undefined;
}
private static getLinuxBiosSerial(): string | undefined {
try {
const methods = [
() => fs.readFileSync('/sys/class/dmi/id/board_serial', 'utf8').trim(),
() => execSync('dmidecode -s baseboard-serial-number', { encoding: 'utf8' }).trim()
];
for (const method of methods) {
try {
const result = method();
if (result && result.length > 0 && result !== 'Not Specified') return result;
} catch { /* 继续尝试下一种方法 */ }
}
} catch { /* 忽略错误 */ }
return undefined;
}
/**
* Windows平台硬件信息获取
*/
private static getWindowsCpuId(): string | undefined {
try {
const result = execSync('wmic cpu get ProcessorId /value', { encoding: 'utf8' });
const match = result.match(/ProcessorId=(.+)/);
return match?.[1]?.trim();
} catch { /* 忽略错误 */ }
return undefined;
}
private static getWindowsMotherboardUuid(): string | undefined {
try {
const result = execSync('wmic csproduct get UUID /value', { encoding: 'utf8' });
const match = result.match(/UUID=(.+)/);
return match?.[1]?.trim();
} catch { /* 忽略错误 */ }
return undefined;
}
private static getWindowsDiskSerial(): string | undefined {
try {
const result = execSync('wmic diskdrive get SerialNumber /value', { encoding: 'utf8' });
const match = result.match(/SerialNumber=(.+)/);
return match?.[1]?.trim();
} catch { /* 忽略错误 */ }
return undefined;
}
private static getWindowsBiosSerial(): string | undefined {
try {
const result = execSync('wmic baseboard get SerialNumber /value', { encoding: 'utf8' });
const match = result.match(/SerialNumber=(.+)/);
return match?.[1]?.trim();
} catch { /* 忽略错误 */ }
return undefined;
}
/**
* macOS平台硬件信息获取
*/
private static getMacOSCpuId(): string | undefined {
try {
const result = execSync('sysctl -n machdep.cpu.brand_string', { encoding: 'utf8' });
return result.trim();
} catch { /* 忽略错误 */ }
return undefined;
}
private static getMacOSMotherboardUuid(): string | undefined {
try {
const result = execSync('system_profiler SPHardwareDataType | grep "Hardware UUID"', { encoding: 'utf8' });
const match = result.match(/Hardware UUID:\s*(.+)/);
return match?.[1]?.trim();
} catch { /* 忽略错误 */ }
return undefined;
}
private static getMacOSDiskSerial(): string | undefined {
try {
const result = execSync('system_profiler SPStorageDataType | grep "Serial Number"', { encoding: 'utf8' });
const match = result.match(/Serial Number:\s*(.+)/);
return match?.[1]?.trim();
} catch { /* 忽略错误 */ }
return undefined;
}
private static getMacOSBiosSerial(): string | undefined {
try {
const result = execSync('system_profiler SPHardwareDataType | grep "Serial Number"', { encoding: 'utf8' });
const match = result.match(/Serial Number \(system\):\s*(.+)/);
return match?.[1]?.trim();
} catch { /* 忽略错误 */ }
return undefined;
}
/**
* 获取稳定的MAC地址
* 排除虚拟接口和临时接口
*/
private static getStableMacAddresses(): string[] {
try {
const networkInterfaces = os.networkInterfaces();
const macAddresses: string[] = [];
for (const [interfaceName, interfaces] of Object.entries(networkInterfaces)) {
if (!interfaces) continue;
// 排除虚拟接口和Docker接口
if (interfaceName.match(/^(lo|docker|veth|br-|virbr)/)) continue;
for (const iface of interfaces) {
if (!iface.internal &&
iface.mac &&
iface.mac !== '00:00:00:00:00:00' &&
!iface.mac.startsWith('02:42:')) { // Docker接口特征
macAddresses.push(iface.mac);
}
}
}
return macAddresses.sort(); // 排序确保一致性
} catch {
return [];
}
}
/**
* 从硬件信息生成指纹
*/
private static generateFromHardware(hwInfo: HardwareInfo): string {
const components = [
hwInfo.motherboardUuid, // 最稳定的标识符
hwInfo.cpuId,
hwInfo.biosSerial,
hwInfo.diskSerial,
hwInfo.macAddresses?.join(','),
os.platform(), // 操作系统平台
os.arch() // CPU架构
].filter(Boolean); // 过滤空值
if (components.length === 0) {
throw new Error('No hardware identifiers found');
}
return this.hashFingerprint(components.join('|'));
}
/**
* 生成回退指纹(当硬件检测失败时)
*/
private static generateFallbackFingerprint(): string {
const fallbackComponents = [
os.hostname(),
os.platform(),
os.arch(),
process.cwd(),
'fallback-mode'
];
databaseLogger.warn('Using fallback fingerprint due to hardware detection failure', {
operation: 'hardware_fingerprint_fallback'
});
return this.hashFingerprint(fallbackComponents.join('|'));
}
/**
* 标准化指纹哈希
*/
private static hashFingerprint(data: string): string {
return crypto.createHash('sha256').update(data).digest('hex');
}
/**
* 获取硬件指纹信息(用于调试和显示)
*/
static getHardwareInfo(): HardwareInfo & { fingerprint: string } {
const hwInfo = this.detectHardwareInfo();
return {
...hwInfo,
fingerprint: this.generate().substring(0, 16)
};
}
/**
* 验证当前硬件指纹
*/
static validateFingerprint(expectedFingerprint: string): boolean {
try {
const currentFingerprint = this.generate();
return currentFingerprint === expectedFingerprint;
} catch {
return false;
}
}
/**
* 清除缓存(用于测试)
*/
static clearCache(): void {
this.cachedFingerprint = null;
}
}
export { HardwareFingerprint };
export type { HardwareInfo };

View File

@@ -1,8 +1,6 @@
import crypto from 'crypto';
import os from 'os';
import fs from 'fs';
import path from 'path';
import { databaseLogger } from './logger.js';
import { HardwareFingerprint } from './hardware-fingerprint.js';
interface ProtectedKeyData {
data: string;
@@ -19,64 +17,22 @@ class MasterKeyProtection {
private static generateDeviceFingerprint(): string {
try {
const features = [
os.hostname(),
os.platform(),
os.arch(),
process.cwd(),
this.getFileSystemFingerprint(),
this.getNetworkFingerprint()
];
const fingerprint = HardwareFingerprint.generate();
const fingerprint = crypto.createHash('sha256')
.update(features.join('|'))
.digest('hex');
databaseLogger.debug('Generated device fingerprint', {
operation: 'fingerprint_generation',
databaseLogger.debug('Generated hardware fingerprint', {
operation: 'hardware_fingerprint_generation',
fingerprintPrefix: fingerprint.substring(0, 8)
});
return fingerprint;
} catch (error) {
databaseLogger.error('Failed to generate device fingerprint', error, {
operation: 'fingerprint_generation_failed'
databaseLogger.error('Failed to generate hardware fingerprint', error, {
operation: 'hardware_fingerprint_generation_failed'
});
throw new Error('Device fingerprint generation failed');
throw new Error('Hardware fingerprint generation failed');
}
}
private static getFileSystemFingerprint(): string {
try {
const stat = fs.statSync(process.cwd());
return `${stat.ino}-${stat.dev}`;
} catch {
return 'fs-unknown';
}
}
private static getNetworkFingerprint(): string {
try {
const networkInterfaces = os.networkInterfaces();
const macAddresses = [];
for (const interfaceName in networkInterfaces) {
const interfaces = networkInterfaces[interfaceName];
if (interfaces) {
for (const iface of interfaces) {
if (!iface.internal && iface.mac && iface.mac !== '00:00:00:00:00:00') {
macAddresses.push(iface.mac);
}
}
}
}
// 使用第一个有效的MAC地址如果没有则使用fallback
return macAddresses.length > 0 ? macAddresses.sort()[0] : 'no-mac-found';
} catch {
return 'network-unknown';
}
}
private static deriveKEK(): Buffer {
@@ -91,7 +47,7 @@ class MasterKeyProtection {
'sha256'
);
databaseLogger.debug('Derived KEK from device fingerprint', {
databaseLogger.debug('Derived KEK from hardware fingerprint', {
operation: 'kek_derivation',
iterations: this.KEK_ITERATIONS
});
@@ -123,7 +79,7 @@ class MasterKeyProtection {
const result = JSON.stringify(protectedData);
databaseLogger.info('Master key encrypted with device KEK', {
databaseLogger.info('Master key encrypted with hardware KEK', {
operation: 'master_key_encryption',
version: this.VERSION,
fingerprintPrefix: protectedData.fingerprint
@@ -152,12 +108,12 @@ class MasterKeyProtection {
const currentFingerprint = this.generateDeviceFingerprint().substring(0, 16);
if (protectedData.fingerprint !== currentFingerprint) {
databaseLogger.warn('Device fingerprint mismatch detected', {
databaseLogger.warn('Hardware fingerprint mismatch detected', {
operation: 'master_key_decryption',
expected: protectedData.fingerprint,
current: currentFingerprint
});
throw new Error('Device fingerprint mismatch - key was encrypted on different machine');
throw new Error('Hardware fingerprint mismatch - key was encrypted on different hardware');
}
const kek = this.deriveKEK();