fix: Resolve database encryption atomicity issues and enhance debugging
This commit addresses critical data corruption issues caused by non-atomic file writes during database encryption, and adds comprehensive diagnostic logging to help debug encryption-related failures. **Problem:** Users reported "Unsupported state or unable to authenticate data" errors when starting the application after system crashes or Docker container restarts. The root cause was non-atomic writes of encrypted database files: 1. Encrypted data file written (step 1) 2. Metadata file written (step 2) → If process crashes between steps 1 and 2, files become inconsistent → New IV/tag in data file, old IV/tag in metadata → GCM authentication fails on next startup → User data permanently inaccessible **Solution - Atomic Writes:** 1. Write-to-temp + atomic-rename pattern: - Write to temporary files (*.tmp-timestamp-pid) - Perform atomic rename operations - Clean up temp files on failure 2. Data integrity validation: - Add dataSize field to metadata - Verify file size before decryption - Early detection of corrupted writes 3. Enhanced error diagnostics: - Key fingerprints (SHA256 prefix) for verification - File modification timestamps - Detailed GCM auth failure messages - Automatic diagnostic info generation **Changes:** database-file-encryption.ts: - Implement atomic write pattern in encryptDatabaseFromBuffer - Implement atomic write pattern in encryptDatabaseFile - Add dataSize field to EncryptedFileMetadata interface - Validate file size before decryption in decryptDatabaseToBuffer - Enhanced error messages for GCM auth failures - Add getDiagnosticInfo() function for comprehensive debugging - Add debug logging for all encryption/decryption operations system-crypto.ts: - Add detailed logging for DATABASE_KEY initialization - Log key source (env var vs .env file) - Add key fingerprints to all log messages - Better error messages when key loading fails db/index.ts: - Automatically generate diagnostic info on decryption failure - Log detailed debugging information to help users troubleshoot **Debugging Info Added:** - Key initialization: source, fingerprint, length, path - Encryption: original size, encrypted size, IV/tag prefixes, temp paths - Decryption: file timestamps, metadata content, key fingerprint matching - Auth failures: .env file status, key availability, file consistency - File diagnostics: existence, readability, size validation, mtime comparison **Backward Compatibility:** - dataSize field is optional (metadata.dataSize?: number) - Old encrypted files without dataSize continue to work - No migration required **Testing:** - Compiled successfully - No breaking changes to existing APIs - Graceful handling of legacy v1 encrypted files Fixes data loss issues reported by users experiencing container restarts and system crashes during database saves.
This commit is contained in:
@@ -95,6 +95,32 @@ async function initializeDatabaseAsync(): Promise<void> {
|
||||
databaseKeyLength: process.env.DATABASE_KEY?.length || 0,
|
||||
});
|
||||
|
||||
try {
|
||||
databaseLogger.info(
|
||||
"Generating diagnostic information for database encryption failure",
|
||||
{
|
||||
operation: "db_encryption_diagnostic",
|
||||
},
|
||||
);
|
||||
const diagnosticInfo =
|
||||
DatabaseFileEncryption.getDiagnosticInfo(encryptedDbPath);
|
||||
databaseLogger.error(
|
||||
"Database encryption diagnostic completed - check logs above for details",
|
||||
null,
|
||||
{
|
||||
operation: "db_encryption_diagnostic_completed",
|
||||
filesConsistent: diagnosticInfo.validation.filesConsistent,
|
||||
sizeMismatch: diagnosticInfo.validation.sizeMismatch,
|
||||
},
|
||||
);
|
||||
} catch (diagError) {
|
||||
databaseLogger.warn("Failed to generate diagnostic information", {
|
||||
operation: "db_diagnostic_failed",
|
||||
error:
|
||||
diagError instanceof Error ? diagError.message : "Unknown error",
|
||||
});
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Database decryption failed: ${error instanceof Error ? error.message : "Unknown error"}. This prevents data loss.`,
|
||||
);
|
||||
|
||||
@@ -12,6 +12,7 @@ interface EncryptedFileMetadata {
|
||||
algorithm: string;
|
||||
keySource?: string;
|
||||
salt?: string;
|
||||
dataSize?: number;
|
||||
}
|
||||
|
||||
class DatabaseFileEncryption {
|
||||
@@ -25,6 +26,10 @@ class DatabaseFileEncryption {
|
||||
buffer: Buffer,
|
||||
targetPath: string,
|
||||
): Promise<string> {
|
||||
const tmpPath = `${targetPath}.tmp-${Date.now()}-${process.pid}`;
|
||||
const tmpMetadataPath = `${tmpPath}${this.METADATA_FILE_SUFFIX}`;
|
||||
const metadataPath = `${targetPath}${this.METADATA_FILE_SUFFIX}`;
|
||||
|
||||
try {
|
||||
const key = await this.systemCrypto.getDatabaseKey();
|
||||
|
||||
@@ -38,6 +43,12 @@ class DatabaseFileEncryption {
|
||||
const encrypted = Buffer.concat([cipher.update(buffer), cipher.final()]);
|
||||
const tag = cipher.getAuthTag();
|
||||
|
||||
const keyFingerprint = crypto
|
||||
.createHash("sha256")
|
||||
.update(key)
|
||||
.digest("hex")
|
||||
.substring(0, 16);
|
||||
|
||||
const metadata: EncryptedFileMetadata = {
|
||||
iv: iv.toString("hex"),
|
||||
tag: tag.toString("hex"),
|
||||
@@ -45,14 +56,69 @@ class DatabaseFileEncryption {
|
||||
fingerprint: "termix-v2-systemcrypto",
|
||||
algorithm: this.ALGORITHM,
|
||||
keySource: "SystemCrypto",
|
||||
dataSize: encrypted.length,
|
||||
};
|
||||
|
||||
const metadataPath = `${targetPath}${this.METADATA_FILE_SUFFIX}`;
|
||||
fs.writeFileSync(targetPath, encrypted);
|
||||
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
|
||||
databaseLogger.debug("Starting atomic encryption write", {
|
||||
operation: "database_buffer_encryption_start",
|
||||
targetPath,
|
||||
tmpPath,
|
||||
originalSize: buffer.length,
|
||||
encryptedSize: encrypted.length,
|
||||
keyFingerprint,
|
||||
ivPrefix: metadata.iv.substring(0, 8),
|
||||
tagPrefix: metadata.tag.substring(0, 8),
|
||||
});
|
||||
|
||||
fs.writeFileSync(tmpPath, encrypted);
|
||||
fs.writeFileSync(tmpMetadataPath, JSON.stringify(metadata, null, 2));
|
||||
|
||||
databaseLogger.debug(
|
||||
"Temporary files written, performing atomic rename",
|
||||
{
|
||||
operation: "database_buffer_encryption_rename",
|
||||
tmpPath,
|
||||
targetPath,
|
||||
},
|
||||
);
|
||||
|
||||
if (fs.existsSync(targetPath)) {
|
||||
fs.unlinkSync(targetPath);
|
||||
}
|
||||
fs.renameSync(tmpPath, targetPath);
|
||||
|
||||
if (fs.existsSync(metadataPath)) {
|
||||
fs.unlinkSync(metadataPath);
|
||||
}
|
||||
fs.renameSync(tmpMetadataPath, metadataPath);
|
||||
|
||||
databaseLogger.debug("Database buffer encrypted with atomic write", {
|
||||
operation: "database_buffer_encryption_atomic",
|
||||
targetPath,
|
||||
encryptedSize: encrypted.length,
|
||||
keyFingerprint,
|
||||
});
|
||||
|
||||
return targetPath;
|
||||
} catch (error) {
|
||||
try {
|
||||
if (fs.existsSync(tmpPath)) {
|
||||
fs.unlinkSync(tmpPath);
|
||||
}
|
||||
if (fs.existsSync(tmpMetadataPath)) {
|
||||
fs.unlinkSync(tmpMetadataPath);
|
||||
}
|
||||
} catch (cleanupError) {
|
||||
databaseLogger.warn("Failed to cleanup temporary files", {
|
||||
operation: "temp_file_cleanup_failed",
|
||||
tmpPath,
|
||||
error:
|
||||
cleanupError instanceof Error
|
||||
? cleanupError.message
|
||||
: "Unknown error",
|
||||
});
|
||||
}
|
||||
|
||||
databaseLogger.error("Failed to encrypt database buffer", error, {
|
||||
operation: "database_buffer_encryption_failed",
|
||||
targetPath,
|
||||
@@ -74,6 +140,8 @@ class DatabaseFileEncryption {
|
||||
const encryptedPath =
|
||||
targetPath || `${sourcePath}${this.ENCRYPTED_FILE_SUFFIX}`;
|
||||
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
|
||||
const tmpPath = `${encryptedPath}.tmp-${Date.now()}-${process.pid}`;
|
||||
const tmpMetadataPath = `${tmpPath}${this.METADATA_FILE_SUFFIX}`;
|
||||
|
||||
try {
|
||||
const sourceData = fs.readFileSync(sourcePath);
|
||||
@@ -93,6 +161,12 @@ class DatabaseFileEncryption {
|
||||
]);
|
||||
const tag = cipher.getAuthTag();
|
||||
|
||||
const keyFingerprint = crypto
|
||||
.createHash("sha256")
|
||||
.update(key)
|
||||
.digest("hex")
|
||||
.substring(0, 16);
|
||||
|
||||
const metadata: EncryptedFileMetadata = {
|
||||
iv: iv.toString("hex"),
|
||||
tag: tag.toString("hex"),
|
||||
@@ -100,10 +174,41 @@ class DatabaseFileEncryption {
|
||||
fingerprint: "termix-v2-systemcrypto",
|
||||
algorithm: this.ALGORITHM,
|
||||
keySource: "SystemCrypto",
|
||||
dataSize: encrypted.length,
|
||||
};
|
||||
|
||||
fs.writeFileSync(encryptedPath, encrypted);
|
||||
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
|
||||
databaseLogger.debug("Starting atomic file encryption", {
|
||||
operation: "database_file_encryption_start",
|
||||
sourcePath,
|
||||
encryptedPath,
|
||||
tmpPath,
|
||||
originalSize: sourceData.length,
|
||||
encryptedSize: encrypted.length,
|
||||
keyFingerprint,
|
||||
ivPrefix: metadata.iv.substring(0, 8),
|
||||
});
|
||||
|
||||
fs.writeFileSync(tmpPath, encrypted);
|
||||
fs.writeFileSync(tmpMetadataPath, JSON.stringify(metadata, null, 2));
|
||||
|
||||
databaseLogger.debug(
|
||||
"Temporary files written, performing atomic rename",
|
||||
{
|
||||
operation: "database_file_encryption_rename",
|
||||
tmpPath,
|
||||
encryptedPath,
|
||||
},
|
||||
);
|
||||
|
||||
if (fs.existsSync(encryptedPath)) {
|
||||
fs.unlinkSync(encryptedPath);
|
||||
}
|
||||
fs.renameSync(tmpPath, encryptedPath);
|
||||
|
||||
if (fs.existsSync(metadataPath)) {
|
||||
fs.unlinkSync(metadataPath);
|
||||
}
|
||||
fs.renameSync(tmpMetadataPath, metadataPath);
|
||||
|
||||
databaseLogger.info("Database file encrypted successfully", {
|
||||
operation: "database_file_encryption",
|
||||
@@ -111,11 +216,30 @@ class DatabaseFileEncryption {
|
||||
encryptedPath,
|
||||
fileSize: sourceData.length,
|
||||
encryptedSize: encrypted.length,
|
||||
keyFingerprint,
|
||||
fingerprintPrefix: metadata.fingerprint,
|
||||
});
|
||||
|
||||
return encryptedPath;
|
||||
} catch (error) {
|
||||
try {
|
||||
if (fs.existsSync(tmpPath)) {
|
||||
fs.unlinkSync(tmpPath);
|
||||
}
|
||||
if (fs.existsSync(tmpMetadataPath)) {
|
||||
fs.unlinkSync(tmpMetadataPath);
|
||||
}
|
||||
} catch (cleanupError) {
|
||||
databaseLogger.warn("Failed to cleanup temporary files", {
|
||||
operation: "temp_file_cleanup_failed",
|
||||
tmpPath,
|
||||
error:
|
||||
cleanupError instanceof Error
|
||||
? cleanupError.message
|
||||
: "Unknown error",
|
||||
});
|
||||
}
|
||||
|
||||
databaseLogger.error("Failed to encrypt database file", error, {
|
||||
operation: "database_file_encryption_failed",
|
||||
sourcePath,
|
||||
@@ -140,11 +264,56 @@ class DatabaseFileEncryption {
|
||||
}
|
||||
|
||||
try {
|
||||
const dataFileStats = fs.statSync(encryptedPath);
|
||||
const metaFileStats = fs.statSync(metadataPath);
|
||||
|
||||
databaseLogger.debug("Starting database decryption", {
|
||||
operation: "database_buffer_decryption_start",
|
||||
encryptedPath,
|
||||
metadataPath,
|
||||
dataFileSize: dataFileStats.size,
|
||||
dataFileMtime: dataFileStats.mtime.toISOString(),
|
||||
metaFileMtime: metaFileStats.mtime.toISOString(),
|
||||
dataDir: process.env.DATA_DIR || "./db/data",
|
||||
});
|
||||
|
||||
const metadataContent = fs.readFileSync(metadataPath, "utf8");
|
||||
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
|
||||
|
||||
databaseLogger.debug("Metadata loaded", {
|
||||
operation: "database_metadata_loaded",
|
||||
version: metadata.version,
|
||||
algorithm: metadata.algorithm,
|
||||
keySource: metadata.keySource,
|
||||
fingerprint: metadata.fingerprint,
|
||||
hasDataSize: !!metadata.dataSize,
|
||||
expectedDataSize: metadata.dataSize,
|
||||
ivPrefix: metadata.iv?.substring(0, 8),
|
||||
tagPrefix: metadata.tag?.substring(0, 8),
|
||||
});
|
||||
|
||||
const encryptedData = fs.readFileSync(encryptedPath);
|
||||
|
||||
if (metadata.dataSize && encryptedData.length !== metadata.dataSize) {
|
||||
databaseLogger.error(
|
||||
"Encrypted file size mismatch - possible corrupted write or mismatched metadata",
|
||||
null,
|
||||
{
|
||||
operation: "database_file_size_mismatch",
|
||||
encryptedPath,
|
||||
actualSize: encryptedData.length,
|
||||
expectedSize: metadata.dataSize,
|
||||
difference: encryptedData.length - metadata.dataSize,
|
||||
dataFileMtime: dataFileStats.mtime.toISOString(),
|
||||
metaFileMtime: metaFileStats.mtime.toISOString(),
|
||||
},
|
||||
);
|
||||
throw new Error(
|
||||
`Encrypted file size mismatch: expected ${metadata.dataSize} bytes but got ${encryptedData.length} bytes. ` +
|
||||
`This indicates corrupted files or interrupted write operation.`,
|
||||
);
|
||||
}
|
||||
|
||||
let key: Buffer;
|
||||
if (metadata.version === "v2") {
|
||||
key = await this.systemCrypto.getDatabaseKey();
|
||||
@@ -167,6 +336,21 @@ class DatabaseFileEncryption {
|
||||
throw new Error(`Unsupported encryption version: ${metadata.version}`);
|
||||
}
|
||||
|
||||
const keyFingerprint = crypto
|
||||
.createHash("sha256")
|
||||
.update(key)
|
||||
.digest("hex")
|
||||
.substring(0, 16);
|
||||
|
||||
databaseLogger.debug("Starting decryption with loaded key", {
|
||||
operation: "database_decryption_attempt",
|
||||
keyFingerprint,
|
||||
algorithm: metadata.algorithm,
|
||||
ivPrefix: metadata.iv.substring(0, 8),
|
||||
tagPrefix: metadata.tag.substring(0, 8),
|
||||
dataSize: encryptedData.length,
|
||||
});
|
||||
|
||||
const decipher = crypto.createDecipheriv(
|
||||
metadata.algorithm,
|
||||
key,
|
||||
@@ -179,15 +363,74 @@ class DatabaseFileEncryption {
|
||||
decipher.final(),
|
||||
]);
|
||||
|
||||
databaseLogger.debug("Database decryption successful", {
|
||||
operation: "database_buffer_decryption_success",
|
||||
encryptedPath,
|
||||
encryptedSize: encryptedData.length,
|
||||
decryptedSize: decryptedBuffer.length,
|
||||
keyFingerprint,
|
||||
});
|
||||
|
||||
return decryptedBuffer;
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : "Unknown error";
|
||||
const isAuthError =
|
||||
errorMessage.includes("Unsupported state") ||
|
||||
errorMessage.includes("authenticate data") ||
|
||||
errorMessage.includes("auth");
|
||||
|
||||
if (isAuthError) {
|
||||
const dataDir = process.env.DATA_DIR || "./db/data";
|
||||
const envPath = path.join(dataDir, ".env");
|
||||
|
||||
let envFileExists = false;
|
||||
let envFileReadable = false;
|
||||
try {
|
||||
envFileExists = fs.existsSync(envPath);
|
||||
if (envFileExists) {
|
||||
fs.accessSync(envPath, fs.constants.R_OK);
|
||||
envFileReadable = true;
|
||||
}
|
||||
} catch {}
|
||||
|
||||
databaseLogger.error(
|
||||
"Database decryption authentication failed - possible causes: wrong DATABASE_KEY, corrupted files, or interrupted write",
|
||||
error,
|
||||
{
|
||||
operation: "database_buffer_decryption_auth_failed",
|
||||
encryptedPath,
|
||||
metadataPath,
|
||||
dataDir,
|
||||
envPath,
|
||||
envFileExists,
|
||||
envFileReadable,
|
||||
hasEnvKey: !!process.env.DATABASE_KEY,
|
||||
envKeyLength: process.env.DATABASE_KEY?.length || 0,
|
||||
suggestion:
|
||||
"Check if DATABASE_KEY in .env matches the key used for encryption",
|
||||
},
|
||||
);
|
||||
throw new Error(
|
||||
`Database decryption authentication failed. This usually means:\n` +
|
||||
`1. DATABASE_KEY has changed or is missing from ${dataDir}/.env\n` +
|
||||
`2. Encrypted file was corrupted during write (system crash/restart)\n` +
|
||||
`3. Metadata file does not match encrypted data\n` +
|
||||
`\nDebug info:\n` +
|
||||
`- DATA_DIR: ${dataDir}\n` +
|
||||
`- .env file exists: ${envFileExists}\n` +
|
||||
`- .env file readable: ${envFileReadable}\n` +
|
||||
`- DATABASE_KEY in environment: ${!!process.env.DATABASE_KEY}\n` +
|
||||
`Original error: ${errorMessage}`,
|
||||
);
|
||||
}
|
||||
|
||||
databaseLogger.error("Failed to decrypt database to buffer", error, {
|
||||
operation: "database_buffer_decryption_failed",
|
||||
encryptedPath,
|
||||
errorMessage,
|
||||
});
|
||||
throw new Error(
|
||||
`Database buffer decryption failed: ${error instanceof Error ? error.message : "Unknown error"}`,
|
||||
);
|
||||
throw new Error(`Database buffer decryption failed: ${errorMessage}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -215,6 +458,23 @@ class DatabaseFileEncryption {
|
||||
|
||||
const encryptedData = fs.readFileSync(encryptedPath);
|
||||
|
||||
if (metadata.dataSize && encryptedData.length !== metadata.dataSize) {
|
||||
databaseLogger.error(
|
||||
"Encrypted file size mismatch - possible corrupted write or mismatched metadata",
|
||||
null,
|
||||
{
|
||||
operation: "database_file_size_mismatch",
|
||||
encryptedPath,
|
||||
actualSize: encryptedData.length,
|
||||
expectedSize: metadata.dataSize,
|
||||
},
|
||||
);
|
||||
throw new Error(
|
||||
`Encrypted file size mismatch: expected ${metadata.dataSize} bytes but got ${encryptedData.length} bytes. ` +
|
||||
`This indicates corrupted files or interrupted write operation.`,
|
||||
);
|
||||
}
|
||||
|
||||
let key: Buffer;
|
||||
if (metadata.version === "v2") {
|
||||
key = await this.systemCrypto.getDatabaseKey();
|
||||
@@ -322,6 +582,125 @@ class DatabaseFileEncryption {
|
||||
}
|
||||
}
|
||||
|
||||
static getDiagnosticInfo(encryptedPath: string): {
|
||||
dataFile: {
|
||||
exists: boolean;
|
||||
size?: number;
|
||||
mtime?: string;
|
||||
readable?: boolean;
|
||||
};
|
||||
metadataFile: {
|
||||
exists: boolean;
|
||||
size?: number;
|
||||
mtime?: string;
|
||||
readable?: boolean;
|
||||
content?: EncryptedFileMetadata;
|
||||
};
|
||||
environment: {
|
||||
dataDir: string;
|
||||
envPath: string;
|
||||
envFileExists: boolean;
|
||||
envFileReadable: boolean;
|
||||
hasEnvKey: boolean;
|
||||
envKeyLength: number;
|
||||
};
|
||||
validation: {
|
||||
filesConsistent: boolean;
|
||||
sizeMismatch?: boolean;
|
||||
expectedSize?: number;
|
||||
actualSize?: number;
|
||||
};
|
||||
} {
|
||||
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
|
||||
const dataDir = process.env.DATA_DIR || "./db/data";
|
||||
const envPath = path.join(dataDir, ".env");
|
||||
|
||||
const result: ReturnType<typeof this.getDiagnosticInfo> = {
|
||||
dataFile: { exists: false },
|
||||
metadataFile: { exists: false },
|
||||
environment: {
|
||||
dataDir,
|
||||
envPath,
|
||||
envFileExists: false,
|
||||
envFileReadable: false,
|
||||
hasEnvKey: !!process.env.DATABASE_KEY,
|
||||
envKeyLength: process.env.DATABASE_KEY?.length || 0,
|
||||
},
|
||||
validation: {
|
||||
filesConsistent: false,
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
result.dataFile.exists = fs.existsSync(encryptedPath);
|
||||
if (result.dataFile.exists) {
|
||||
try {
|
||||
fs.accessSync(encryptedPath, fs.constants.R_OK);
|
||||
result.dataFile.readable = true;
|
||||
const stats = fs.statSync(encryptedPath);
|
||||
result.dataFile.size = stats.size;
|
||||
result.dataFile.mtime = stats.mtime.toISOString();
|
||||
} catch {
|
||||
result.dataFile.readable = false;
|
||||
}
|
||||
}
|
||||
|
||||
result.metadataFile.exists = fs.existsSync(metadataPath);
|
||||
if (result.metadataFile.exists) {
|
||||
try {
|
||||
fs.accessSync(metadataPath, fs.constants.R_OK);
|
||||
result.metadataFile.readable = true;
|
||||
const stats = fs.statSync(metadataPath);
|
||||
result.metadataFile.size = stats.size;
|
||||
result.metadataFile.mtime = stats.mtime.toISOString();
|
||||
|
||||
const content = fs.readFileSync(metadataPath, "utf8");
|
||||
result.metadataFile.content = JSON.parse(content);
|
||||
} catch {
|
||||
result.metadataFile.readable = false;
|
||||
}
|
||||
}
|
||||
|
||||
result.environment.envFileExists = fs.existsSync(envPath);
|
||||
if (result.environment.envFileExists) {
|
||||
try {
|
||||
fs.accessSync(envPath, fs.constants.R_OK);
|
||||
result.environment.envFileReadable = true;
|
||||
} catch {}
|
||||
}
|
||||
|
||||
if (
|
||||
result.dataFile.exists &&
|
||||
result.metadataFile.exists &&
|
||||
result.metadataFile.content
|
||||
) {
|
||||
result.validation.filesConsistent = true;
|
||||
|
||||
if (result.metadataFile.content.dataSize) {
|
||||
result.validation.expectedSize = result.metadataFile.content.dataSize;
|
||||
result.validation.actualSize = result.dataFile.size;
|
||||
result.validation.sizeMismatch =
|
||||
result.metadataFile.content.dataSize !== result.dataFile.size;
|
||||
if (result.validation.sizeMismatch) {
|
||||
result.validation.filesConsistent = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
databaseLogger.error("Failed to generate diagnostic info", error, {
|
||||
operation: "diagnostic_info_failed",
|
||||
encryptedPath,
|
||||
});
|
||||
}
|
||||
|
||||
databaseLogger.info("Database encryption diagnostic info", {
|
||||
operation: "diagnostic_info_generated",
|
||||
...result,
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static async createEncryptedBackup(
|
||||
databasePath: string,
|
||||
backupDir: string,
|
||||
|
||||
@@ -80,14 +80,34 @@ class SystemCrypto {
|
||||
|
||||
async initializeDatabaseKey(): Promise<void> {
|
||||
try {
|
||||
const dataDir = process.env.DATA_DIR || "./db/data";
|
||||
const envPath = path.join(dataDir, ".env");
|
||||
|
||||
const envKey = process.env.DATABASE_KEY;
|
||||
if (envKey && envKey.length >= 64) {
|
||||
this.databaseKey = Buffer.from(envKey, "hex");
|
||||
const keyFingerprint = crypto
|
||||
.createHash("sha256")
|
||||
.update(this.databaseKey)
|
||||
.digest("hex")
|
||||
.substring(0, 16);
|
||||
|
||||
databaseLogger.info("DATABASE_KEY loaded from environment variable", {
|
||||
operation: "db_key_loaded_from_env",
|
||||
keyFingerprint,
|
||||
keyLength: envKey.length,
|
||||
dataDir,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const dataDir = process.env.DATA_DIR || "./db/data";
|
||||
const envPath = path.join(dataDir, ".env");
|
||||
databaseLogger.debug(
|
||||
"DATABASE_KEY not found in environment, checking .env file",
|
||||
{
|
||||
operation: "db_key_checking_file",
|
||||
envPath,
|
||||
},
|
||||
);
|
||||
|
||||
try {
|
||||
const envContent = await fs.readFile(envPath, "utf8");
|
||||
@@ -95,14 +115,47 @@ class SystemCrypto {
|
||||
if (dbKeyMatch && dbKeyMatch[1] && dbKeyMatch[1].length >= 64) {
|
||||
this.databaseKey = Buffer.from(dbKeyMatch[1], "hex");
|
||||
process.env.DATABASE_KEY = dbKeyMatch[1];
|
||||
|
||||
const keyFingerprint = crypto
|
||||
.createHash("sha256")
|
||||
.update(this.databaseKey)
|
||||
.digest("hex")
|
||||
.substring(0, 16);
|
||||
|
||||
databaseLogger.info("DATABASE_KEY loaded from .env file", {
|
||||
operation: "db_key_loaded_from_file",
|
||||
keyFingerprint,
|
||||
keyLength: dbKeyMatch[1].length,
|
||||
envPath,
|
||||
});
|
||||
return;
|
||||
} else {
|
||||
databaseLogger.warn(
|
||||
"DATABASE_KEY found in .env but invalid or too short",
|
||||
{
|
||||
operation: "db_key_invalid_in_file",
|
||||
envPath,
|
||||
hasMatch: !!dbKeyMatch,
|
||||
keyLength: dbKeyMatch?.[1]?.length || 0,
|
||||
requiredLength: 64,
|
||||
},
|
||||
);
|
||||
}
|
||||
} catch {}
|
||||
} catch (fileError) {
|
||||
databaseLogger.warn("Failed to read .env file for DATABASE_KEY", {
|
||||
operation: "db_key_file_read_failed",
|
||||
envPath,
|
||||
error:
|
||||
fileError instanceof Error ? fileError.message : "Unknown error",
|
||||
willGenerateNew: true,
|
||||
});
|
||||
}
|
||||
|
||||
await this.generateAndGuideDatabaseKey();
|
||||
} catch (error) {
|
||||
databaseLogger.error("Failed to initialize database key", error, {
|
||||
operation: "db_key_init_failed",
|
||||
dataDir: process.env.DATA_DIR || "./db/data",
|
||||
});
|
||||
throw new Error("Database key initialization failed");
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user