fix: Resolve database encryption atomicity issues and enhance debugging (#430)

* fix: Resolve database encryption atomicity issues and enhance debugging

This commit addresses critical data corruption issues caused by non-atomic
file writes during database encryption, and adds comprehensive diagnostic
logging to help debug encryption-related failures.

**Problem:**
Users reported "Unsupported state or unable to authenticate data" errors
when starting the application after system crashes or Docker container
restarts. The root cause was non-atomic writes of encrypted database files:

1. Encrypted data file written (step 1)
2. Metadata file written (step 2)
→ If process crashes between steps 1 and 2, files become inconsistent
→ New IV/tag in data file, old IV/tag in metadata
→ GCM authentication fails on next startup
→ User data permanently inaccessible

**Solution - Atomic Writes:**

1. Write-to-temp + atomic-rename pattern:
   - Write to temporary files (*.tmp-timestamp-pid)
   - Perform atomic rename operations
   - Clean up temp files on failure

2. Data integrity validation:
   - Add dataSize field to metadata
   - Verify file size before decryption
   - Early detection of corrupted writes

3. Enhanced error diagnostics:
   - Key fingerprints (SHA256 prefix) for verification
   - File modification timestamps
   - Detailed GCM auth failure messages
   - Automatic diagnostic info generation

**Changes:**

database-file-encryption.ts:
- Implement atomic write pattern in encryptDatabaseFromBuffer
- Implement atomic write pattern in encryptDatabaseFile
- Add dataSize field to EncryptedFileMetadata interface
- Validate file size before decryption in decryptDatabaseToBuffer
- Enhanced error messages for GCM auth failures
- Add getDiagnosticInfo() function for comprehensive debugging
- Add debug logging for all encryption/decryption operations

system-crypto.ts:
- Add detailed logging for DATABASE_KEY initialization
- Log key source (env var vs .env file)
- Add key fingerprints to all log messages
- Better error messages when key loading fails

db/index.ts:
- Automatically generate diagnostic info on decryption failure
- Log detailed debugging information to help users troubleshoot

**Debugging Info Added:**

- Key initialization: source, fingerprint, length, path
- Encryption: original size, encrypted size, IV/tag prefixes, temp paths
- Decryption: file timestamps, metadata content, key fingerprint matching
- Auth failures: .env file status, key availability, file consistency
- File diagnostics: existence, readability, size validation, mtime comparison

**Backward Compatibility:**
- dataSize field is optional (metadata.dataSize?: number)
- Old encrypted files without dataSize continue to work
- No migration required

**Testing:**
- Compiled successfully
- No breaking changes to existing APIs
- Graceful handling of legacy v1 encrypted files

Fixes data loss issues reported by users experiencing container restarts
and system crashes during database saves.

* fix: Cleanup PR

* Update src/backend/utils/database-file-encryption.ts

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* Update src/backend/utils/database-file-encryption.ts

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* Update src/backend/utils/database-file-encryption.ts

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* Update src/backend/utils/database-file-encryption.ts

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* Update src/backend/utils/database-file-encryption.ts

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

---------

Co-authored-by: LukeGus <bugattiguy527@gmail.com>
Co-authored-by: Luke Gustafson <88517757+LukeGus@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit was merged in pull request #430.
This commit is contained in:
ZacharyZcR
2025-11-06 11:21:21 +08:00
committed by GitHub
parent 9ca7df6542
commit bccfd596b8
5 changed files with 366 additions and 36 deletions

View File

@@ -746,7 +746,6 @@ jobs:
mkdir -p homebrew-submission/Casks/t
cp homebrew/termix.rb homebrew-submission/Casks/t/termix.rb
cp homebrew/README.md homebrew-submission/
sed -i '' "s/VERSION_PLACEHOLDER/$VERSION/g" homebrew-submission/Casks/t/termix.rb
sed -i '' "s/CHECKSUM_PLACEHOLDER/$CHECKSUM/g" homebrew-submission/Casks/t/termix.rb

View File

@@ -95,6 +95,32 @@ async function initializeDatabaseAsync(): Promise<void> {
databaseKeyLength: process.env.DATABASE_KEY?.length || 0,
});
try {
databaseLogger.info(
"Generating diagnostic information for database encryption failure",
{
operation: "db_encryption_diagnostic",
},
);
const diagnosticInfo =
DatabaseFileEncryption.getDiagnosticInfo(encryptedDbPath);
databaseLogger.error(
"Database encryption diagnostic completed - check logs above for details",
null,
{
operation: "db_encryption_diagnostic_completed",
filesConsistent: diagnosticInfo.validation.filesConsistent,
sizeMismatch: diagnosticInfo.validation.sizeMismatch,
},
);
} catch (diagError) {
databaseLogger.warn("Failed to generate diagnostic information", {
operation: "db_diagnostic_failed",
error:
diagError instanceof Error ? diagError.message : "Unknown error",
});
}
throw new Error(
`Database decryption failed: ${error instanceof Error ? error.message : "Unknown error"}. This prevents data loss.`,
);

View File

@@ -12,6 +12,7 @@ interface EncryptedFileMetadata {
algorithm: string;
keySource?: string;
salt?: string;
dataSize?: number;
}
class DatabaseFileEncryption {
@@ -25,6 +26,10 @@ class DatabaseFileEncryption {
buffer: Buffer,
targetPath: string,
): Promise<string> {
const tmpPath = `${targetPath}.tmp-${Date.now()}-${process.pid}`;
const tmpMetadataPath = `${tmpPath}${this.METADATA_FILE_SUFFIX}`;
const metadataPath = `${targetPath}${this.METADATA_FILE_SUFFIX}`;
try {
const key = await this.systemCrypto.getDatabaseKey();
@@ -38,6 +43,12 @@ class DatabaseFileEncryption {
const encrypted = Buffer.concat([cipher.update(buffer), cipher.final()]);
const tag = cipher.getAuthTag();
const keyFingerprint = crypto
.createHash("sha256")
.update(key)
.digest("hex")
.substring(0, 16);
const metadata: EncryptedFileMetadata = {
iv: iv.toString("hex"),
tag: tag.toString("hex"),
@@ -45,14 +56,35 @@ class DatabaseFileEncryption {
fingerprint: "termix-v2-systemcrypto",
algorithm: this.ALGORITHM,
keySource: "SystemCrypto",
dataSize: encrypted.length,
};
const metadataPath = `${targetPath}${this.METADATA_FILE_SUFFIX}`;
fs.writeFileSync(targetPath, encrypted);
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
fs.writeFileSync(tmpPath, encrypted);
fs.writeFileSync(tmpMetadataPath, JSON.stringify(metadata, null, 2));
fs.renameSync(tmpPath, targetPath);
fs.renameSync(tmpMetadataPath, metadataPath);
return targetPath;
} catch (error) {
try {
if (fs.existsSync(tmpPath)) {
fs.unlinkSync(tmpPath);
}
if (fs.existsSync(tmpMetadataPath)) {
fs.unlinkSync(tmpMetadataPath);
}
} catch (cleanupError) {
databaseLogger.warn("Failed to cleanup temporary files", {
operation: "temp_file_cleanup_failed",
tmpPath,
error:
cleanupError instanceof Error
? cleanupError.message
: "Unknown error",
});
}
databaseLogger.error("Failed to encrypt database buffer", error, {
operation: "database_buffer_encryption_failed",
targetPath,
@@ -74,6 +106,8 @@ class DatabaseFileEncryption {
const encryptedPath =
targetPath || `${sourcePath}${this.ENCRYPTED_FILE_SUFFIX}`;
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
const tmpPath = `${encryptedPath}.tmp-${Date.now()}-${process.pid}`;
const tmpMetadataPath = `${tmpPath}${this.METADATA_FILE_SUFFIX}`;
try {
const sourceData = fs.readFileSync(sourcePath);
@@ -93,6 +127,12 @@ class DatabaseFileEncryption {
]);
const tag = cipher.getAuthTag();
const keyFingerprint = crypto
.createHash("sha256")
.update(key)
.digest("hex")
.substring(0, 16);
const metadata: EncryptedFileMetadata = {
iv: iv.toString("hex"),
tag: tag.toString("hex"),
@@ -100,10 +140,14 @@ class DatabaseFileEncryption {
fingerprint: "termix-v2-systemcrypto",
algorithm: this.ALGORITHM,
keySource: "SystemCrypto",
dataSize: encrypted.length,
};
fs.writeFileSync(encryptedPath, encrypted);
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
fs.writeFileSync(tmpPath, encrypted);
fs.writeFileSync(tmpMetadataPath, JSON.stringify(metadata, null, 2));
fs.renameSync(tmpPath, encryptedPath);
fs.renameSync(tmpMetadataPath, metadataPath);
databaseLogger.info("Database file encrypted successfully", {
operation: "database_file_encryption",
@@ -111,11 +155,30 @@ class DatabaseFileEncryption {
encryptedPath,
fileSize: sourceData.length,
encryptedSize: encrypted.length,
keyFingerprint,
fingerprintPrefix: metadata.fingerprint,
});
return encryptedPath;
} catch (error) {
try {
if (fs.existsSync(tmpPath)) {
fs.unlinkSync(tmpPath);
}
if (fs.existsSync(tmpMetadataPath)) {
fs.unlinkSync(tmpMetadataPath);
}
} catch (cleanupError) {
databaseLogger.warn("Failed to cleanup temporary files", {
operation: "temp_file_cleanup_failed",
tmpPath,
error:
cleanupError instanceof Error
? cleanupError.message
: "Unknown error",
});
}
databaseLogger.error("Failed to encrypt database file", error, {
operation: "database_file_encryption_failed",
sourcePath,
@@ -140,11 +203,34 @@ class DatabaseFileEncryption {
}
try {
const dataFileStats = fs.statSync(encryptedPath);
const metaFileStats = fs.statSync(metadataPath);
const metadataContent = fs.readFileSync(metadataPath, "utf8");
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
const encryptedData = fs.readFileSync(encryptedPath);
if (metadata.dataSize !== undefined && encryptedData.length !== metadata.dataSize) {
databaseLogger.error(
"Encrypted file size mismatch - possible corrupted write or mismatched metadata",
null,
{
operation: "database_file_size_mismatch",
encryptedPath,
actualSize: encryptedData.length,
expectedSize: metadata.dataSize,
difference: encryptedData.length - metadata.dataSize,
dataFileMtime: dataFileStats.mtime.toISOString(),
metaFileMtime: metaFileStats.mtime.toISOString(),
},
);
throw new Error(
`Encrypted file size mismatch: expected ${metadata.dataSize} bytes but got ${encryptedData.length} bytes. ` +
`This indicates corrupted files or interrupted write operation.`,
);
}
let key: Buffer;
if (metadata.version === "v2") {
key = await this.systemCrypto.getDatabaseKey();
@@ -167,6 +253,12 @@ class DatabaseFileEncryption {
throw new Error(`Unsupported encryption version: ${metadata.version}`);
}
const keyFingerprint = crypto
.createHash("sha256")
.update(key)
.digest("hex")
.substring(0, 16);
const decipher = crypto.createDecipheriv(
metadata.algorithm,
key,
@@ -181,13 +273,64 @@ class DatabaseFileEncryption {
return decryptedBuffer;
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : "Unknown error";
const isAuthError =
errorMessage.includes("Unsupported state") ||
errorMessage.includes("authenticate data") ||
errorMessage.includes("auth");
if (isAuthError) {
const dataDir = process.env.DATA_DIR || "./db/data";
const envPath = path.join(dataDir, ".env");
let envFileExists = false;
let envFileReadable = false;
try {
envFileExists = fs.existsSync(envPath);
if (envFileExists) {
fs.accessSync(envPath, fs.constants.R_OK);
envFileReadable = true;
}
} catch {}
databaseLogger.error(
"Database decryption authentication failed - possible causes: wrong DATABASE_KEY, corrupted files, or interrupted write",
error,
{
operation: "database_buffer_decryption_auth_failed",
encryptedPath,
metadataPath,
dataDir,
envPath,
envFileExists,
envFileReadable,
hasEnvKey: !!process.env.DATABASE_KEY,
envKeyLength: process.env.DATABASE_KEY?.length || 0,
suggestion:
"Check if DATABASE_KEY in .env matches the key used for encryption",
},
);
throw new Error(
`Database decryption authentication failed. This usually means:\n` +
`1. DATABASE_KEY has changed or is missing from ${dataDir}/.env\n` +
`2. Encrypted file was corrupted during write (system crash/restart)\n` +
`3. Metadata file does not match encrypted data\n` +
`\nDebug info:\n` +
`- DATA_DIR: ${dataDir}\n` +
`- .env file exists: ${envFileExists}\n` +
`- .env file readable: ${envFileReadable}\n` +
`- DATABASE_KEY in environment: ${!!process.env.DATABASE_KEY}\n` +
`Original error: ${errorMessage}`,
);
}
databaseLogger.error("Failed to decrypt database to buffer", error, {
operation: "database_buffer_decryption_failed",
encryptedPath,
errorMessage,
});
throw new Error(
`Database buffer decryption failed: ${error instanceof Error ? error.message : "Unknown error"}`,
);
throw new Error(`Database buffer decryption failed: ${errorMessage}`);
}
}
@@ -215,6 +358,23 @@ class DatabaseFileEncryption {
const encryptedData = fs.readFileSync(encryptedPath);
if (metadata.dataSize !== undefined && encryptedData.length !== metadata.dataSize) {
databaseLogger.error(
"Encrypted file size mismatch - possible corrupted write or mismatched metadata",
null,
{
operation: "database_file_size_mismatch",
encryptedPath,
actualSize: encryptedData.length,
expectedSize: metadata.dataSize,
},
);
throw new Error(
`Encrypted file size mismatch: expected ${metadata.dataSize} bytes but got ${encryptedData.length} bytes. ` +
`This indicates corrupted files or interrupted write operation.`,
);
}
let key: Buffer;
if (metadata.version === "v2") {
key = await this.systemCrypto.getDatabaseKey();
@@ -322,6 +482,125 @@ class DatabaseFileEncryption {
}
}
static getDiagnosticInfo(encryptedPath: string): {
dataFile: {
exists: boolean;
size?: number;
mtime?: string;
readable?: boolean;
};
metadataFile: {
exists: boolean;
size?: number;
mtime?: string;
readable?: boolean;
content?: EncryptedFileMetadata;
};
environment: {
dataDir: string;
envPath: string;
envFileExists: boolean;
envFileReadable: boolean;
hasEnvKey: boolean;
envKeyLength: number;
};
validation: {
filesConsistent: boolean;
sizeMismatch?: boolean;
expectedSize?: number;
actualSize?: number;
};
} {
const metadataPath = `${encryptedPath}${this.METADATA_FILE_SUFFIX}`;
const dataDir = process.env.DATA_DIR || "./db/data";
const envPath = path.join(dataDir, ".env");
const result: ReturnType<typeof this.getDiagnosticInfo> = {
dataFile: { exists: false },
metadataFile: { exists: false },
environment: {
dataDir,
envPath,
envFileExists: false,
envFileReadable: false,
hasEnvKey: !!process.env.DATABASE_KEY,
envKeyLength: process.env.DATABASE_KEY?.length || 0,
},
validation: {
filesConsistent: false,
},
};
try {
result.dataFile.exists = fs.existsSync(encryptedPath);
if (result.dataFile.exists) {
try {
fs.accessSync(encryptedPath, fs.constants.R_OK);
result.dataFile.readable = true;
const stats = fs.statSync(encryptedPath);
result.dataFile.size = stats.size;
result.dataFile.mtime = stats.mtime.toISOString();
} catch {
result.dataFile.readable = false;
}
}
result.metadataFile.exists = fs.existsSync(metadataPath);
if (result.metadataFile.exists) {
try {
fs.accessSync(metadataPath, fs.constants.R_OK);
result.metadataFile.readable = true;
const stats = fs.statSync(metadataPath);
result.metadataFile.size = stats.size;
result.metadataFile.mtime = stats.mtime.toISOString();
const content = fs.readFileSync(metadataPath, "utf8");
result.metadataFile.content = JSON.parse(content);
} catch {
result.metadataFile.readable = false;
}
}
result.environment.envFileExists = fs.existsSync(envPath);
if (result.environment.envFileExists) {
try {
fs.accessSync(envPath, fs.constants.R_OK);
result.environment.envFileReadable = true;
} catch {}
}
if (
result.dataFile.exists &&
result.metadataFile.exists &&
result.metadataFile.content
) {
result.validation.filesConsistent = true;
if (result.metadataFile.content.dataSize !== undefined) {
result.validation.expectedSize = result.metadataFile.content.dataSize;
result.validation.actualSize = result.dataFile.size;
result.validation.sizeMismatch =
result.metadataFile.content.dataSize !== result.dataFile.size;
if (result.validation.sizeMismatch) {
result.validation.filesConsistent = false;
}
}
}
} catch (error) {
databaseLogger.error("Failed to generate diagnostic info", error, {
operation: "diagnostic_info_failed",
encryptedPath,
});
}
databaseLogger.info("Database encryption diagnostic info", {
operation: "diagnostic_info_generated",
...result,
});
return result;
}
static async createEncryptedBackup(
databasePath: string,
backupDir: string,

View File

@@ -51,17 +51,8 @@ class SystemCrypto {
},
);
}
} catch (fileError) {
databaseLogger.warn("Failed to read .env file for JWT secret", {
operation: "jwt_init_file_read_failed",
error:
fileError instanceof Error ? fileError.message : "Unknown error",
});
}
} catch (fileError) {}
databaseLogger.warn("Generating new JWT secret", {
operation: "jwt_generating_new_secret",
});
await this.generateAndGuideUser();
} catch (error) {
databaseLogger.error("Failed to initialize JWT secret", error, {
@@ -80,29 +71,44 @@ class SystemCrypto {
async initializeDatabaseKey(): Promise<void> {
try {
const dataDir = process.env.DATA_DIR || "./db/data";
const envPath = path.join(dataDir, ".env");
const envKey = process.env.DATABASE_KEY;
if (envKey && envKey.length >= 64) {
this.databaseKey = Buffer.from(envKey, "hex");
const keyFingerprint = crypto
.createHash("sha256")
.update(this.databaseKey)
.digest("hex")
.substring(0, 16);
return;
}
const dataDir = process.env.DATA_DIR || "./db/data";
const envPath = path.join(dataDir, ".env");
try {
const envContent = await fs.readFile(envPath, "utf8");
const dbKeyMatch = envContent.match(/^DATABASE_KEY=(.+)$/m);
if (dbKeyMatch && dbKeyMatch[1] && dbKeyMatch[1].length >= 64) {
this.databaseKey = Buffer.from(dbKeyMatch[1], "hex");
process.env.DATABASE_KEY = dbKeyMatch[1];
const keyFingerprint = crypto
.createHash("sha256")
.update(this.databaseKey)
.digest("hex")
.substring(0, 16);
return;
} else {
}
} catch {}
} catch (fileError) {}
await this.generateAndGuideDatabaseKey();
} catch (error) {
databaseLogger.error("Failed to initialize database key", error, {
operation: "db_key_init_failed",
dataDir: process.env.DATA_DIR || "./db/data",
});
throw new Error("Database key initialization failed");
}

View File

@@ -24,11 +24,20 @@ import {
verifyTOTPLogin,
getServerConfig,
isElectron,
logoutUser,
} from "../../main-axios.ts";
import { ElectronServerConfig as ServerConfigComponent } from "@/ui/desktop/authentication/ElectronServerConfig.tsx";
import { ElectronLoginForm } from "@/ui/desktop/authentication/ElectronLoginForm.tsx";
function getCookie(name: string): string | undefined {
const value = `; ${document.cookie}`;
const parts = value.split(`; ${name}=`);
if (parts.length === 2) return parts.pop()?.split(";").shift();
}
interface ExtendedWindow extends Window {
IS_ELECTRON_WEBVIEW?: boolean;
}
interface AuthProps extends React.ComponentProps<"div"> {
setLoggedIn: (loggedIn: boolean) => void;
setIsAdmin: (isAdmin: boolean) => void;
@@ -37,7 +46,6 @@ interface AuthProps extends React.ComponentProps<"div"> {
loggedIn: boolean;
authLoading: boolean;
setDbError: (error: string | null) => void;
dbError?: string | null;
onAuthSuccess: (authData: {
isAdmin: boolean;
username: string | null;
@@ -54,21 +62,20 @@ export function Auth({
loggedIn,
authLoading,
setDbError,
dbError,
onAuthSuccess,
...props
}: AuthProps) {
const { t } = useTranslation();
const isInElectronWebView = () => {
if ((window as any).IS_ELECTRON_WEBVIEW) {
if ((window as ExtendedWindow).IS_ELECTRON_WEBVIEW) {
return true;
}
try {
if (window.self !== window.top) {
return true;
}
} catch (e) {
} catch (_e) {
return false;
}
return false;
@@ -126,7 +133,7 @@ export function Auth({
userId: meRes.userId || null,
});
toast.success(t("messages.loginSuccess"));
} catch (err) {
} catch (_err) {
toast.error(t("errors.failedUserInfo"));
}
}, [
@@ -206,7 +213,7 @@ export function Auth({
.finally(() => {
setDbHealthChecking(false);
});
}, [setDbError, firstUserToastShown, showServerConfig]);
}, [setDbError, firstUserToastShown, showServerConfig, t]);
useEffect(() => {
if (!registrationAllowed && !internalLoggedIn) {
@@ -282,9 +289,9 @@ export function Auth({
);
setWebviewAuthSuccess(true);
setTimeout(() => window.location.reload(), 100);
setLoading(false);
return;
} catch (e) {}
} catch (e) {
console.error("Error posting auth success message:", e);
}
}
const [meRes] = await Promise.all([getUserInfo()]);
@@ -461,7 +468,9 @@ export function Auth({
setTimeout(() => window.location.reload(), 100);
setTotpLoading(false);
return;
} catch (e) {}
} catch (e) {
console.error("Error posting auth success message:", e);
}
}
setInternalLoggedIn(true);
@@ -569,7 +578,9 @@ export function Auth({
setTimeout(() => window.location.reload(), 100);
setOidcLoading(false);
return;
} catch (e) {}
} catch (e) {
console.error("Error posting auth success message:", e);
}
}
}
@@ -607,7 +618,16 @@ export function Auth({
setOidcLoading(false);
});
}
}, []);
}, [
onAuthSuccess,
setDbError,
setIsAdmin,
setLoggedIn,
setUserId,
setUsername,
t,
isInElectronWebView,
]);
const Spinner = (
<svg