dev-1.7.0 #294
107
.dockerignore
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
# Git and version control
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
|
||||||
|
# Node.js
|
||||||
|
node_modules
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
|
||||||
|
# Environment and configuration
|
||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
!.env.example
|
||||||
|
|
||||||
|
# IDE and editor files
|
||||||
|
.vscode
|
||||||
|
.idea
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
*~
|
||||||
|
|
||||||
|
# OS generated files
|
||||||
|
.DS_Store
|
||||||
|
.DS_Store?
|
||||||
|
._*
|
||||||
|
.Spotlight-V100
|
||||||
|
.Trashes
|
||||||
|
ehthumbs.db
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
logs
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# Runtime data
|
||||||
|
pids
|
||||||
|
*.pid
|
||||||
|
*.seed
|
||||||
|
*.pid.lock
|
||||||
|
|
||||||
|
# Coverage directory used by tools like istanbul
|
||||||
|
coverage
|
||||||
|
|
||||||
|
# Build directories (we build inside Docker)
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
|
||||||
|
# Temporary files
|
||||||
|
tmp/
|
||||||
|
temp/
|
||||||
|
|
||||||
|
# SSL certificates (generated at runtime)
|
||||||
|
ssl/
|
||||||
|
*.crt
|
||||||
|
*.key
|
||||||
|
*.pem
|
||||||
|
|
||||||
|
# Database files (use volumes)
|
||||||
|
*.sqlite
|
||||||
|
*.db
|
||||||
|
|
||||||
|
# Docker files (avoid recursion)
|
||||||
|
Dockerfile*
|
||||||
|
docker-compose*.yml
|
||||||
|
.dockerignore
|
||||||
|
|
|||||||
|
|
||||||
|
# Documentation
|
||||||
|
README*.md
|
||||||
|
CONTRIBUTING.md
|
||||||
|
LICENSE
|
||||||
|
*.md
|
||||||
|
|
||||||
|
# Repository images and assets (not needed in container)
|
||||||
|
repo-images/
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
test/
|
||||||
|
tests/
|
||||||
|
*.test.js
|
||||||
|
*.spec.js
|
||||||
|
|
||||||
|
# Uploads directory (use volumes)
|
||||||
|
uploads/
|
||||||
|
|
||||||
|
# Backup files
|
||||||
|
*.bak
|
||||||
|
*.backup
|
||||||
|
*.old
|
||||||
|
|
||||||
|
# Cache directories
|
||||||
|
.cache/
|
||||||
|
.npm/
|
||||||
|
.yarn/
|
||||||
|
|
||||||
|
# TypeScript build info
|
||||||
|
*.tsbuildinfo
|
||||||
|
|
||||||
|
# ESLint cache
|
||||||
|
.eslintcache
|
||||||
|
|
||||||
|
# Prettier
|
||||||
|
.prettierignore
|
||||||
|
.prettierrc*
|
||||||
|
|
||||||
|
# Local configuration
|
||||||
|
.claude/
|
||||||
2
.gitignore
vendored
@@ -25,3 +25,5 @@ dist-ssr
|
|||||||
/db/
|
/db/
|
||||||
/release/
|
/release/
|
||||||
/.claude/
|
/.claude/
|
||||||
|
/ssl/
|
||||||
|
.env
|
||||||
|
|||||||
323
MIGRATION-TESTING.md
Normal file
@@ -0,0 +1,323 @@
|
|||||||
|
# Database Migration Testing Guide
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This document outlines the testing procedures for the automatic database migration system that migrates unencrypted SQLite databases to encrypted format during Docker deployment updates.
|
||||||
|
|
||||||
|
## Migration System Features
|
||||||
|
|
||||||
|
✅ **Automatic Detection**: Detects unencrypted databases on startup
|
||||||
|
✅ **Safe Backup**: Creates timestamped backups before migration
|
||||||
|
✅ **Integrity Verification**: Validates migration completeness
|
||||||
|
✅ **Non-destructive**: Original files are renamed, not deleted
|
||||||
|
✅ **Cleanup**: Removes old backup files (keeps latest 3)
|
||||||
|
✅ **Admin API**: Migration status and history endpoints
|
||||||
|
✅ **Detailed Logging**: Comprehensive migration logs
|
||||||
|
|
||||||
|
## Test Scenarios
|
||||||
|
|
||||||
|
### Scenario 1: Fresh Installation (No Migration Needed)
|
||||||
|
**Setup**: Clean Docker container with no existing database files
|
||||||
|
**Expected**:
|
||||||
|
- New encrypted database created
|
||||||
|
- No migration messages in logs
|
||||||
|
- Status API shows "Fresh installation detected"
|
||||||
|
|
||||||
|
**Test Commands**:
|
||||||
|
```bash
|
||||||
|
# Clean start
|
||||||
|
docker run --rm termix:latest
|
||||||
|
# Check logs for "fresh installation"
|
||||||
|
# GET /database/migration/status should show needsMigration: false
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scenario 2: Standard Migration (Unencrypted → Encrypted)
|
||||||
|
**Setup**: Existing unencrypted `db.sqlite` file with user data
|
||||||
|
**Expected**:
|
||||||
|
- Automatic migration on startup
|
||||||
|
- Backup file created (`.migration-backup-{timestamp}`)
|
||||||
|
- Original file renamed (`.migrated-{timestamp}`)
|
||||||
|
- Encrypted database created successfully
|
||||||
|
- All data preserved and accessible
|
||||||
|
|
||||||
|
**Test Commands**:
|
||||||
|
```bash
|
||||||
|
# 1. Create test data in unencrypted format
|
||||||
|
docker run -v /host/data:/app/data termix:old-version
|
||||||
|
# Add some SSH hosts and credentials via UI
|
||||||
|
|
||||||
|
# 2. Stop container and update to new version
|
||||||
|
docker stop container_id
|
||||||
|
docker run -v /host/data:/app/data termix:latest
|
||||||
|
|
||||||
|
# 3. Check migration logs
|
||||||
|
docker logs container_id | grep -i migration
|
||||||
|
|
||||||
|
# 4. Verify data integrity via API
|
||||||
|
curl -H "Authorization: Bearer $TOKEN" http://localhost:8081/database/migration/status
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scenario 3: Already Encrypted (No Migration Needed)
|
||||||
|
**Setup**: Only encrypted database file exists
|
||||||
|
**Expected**:
|
||||||
|
- No migration performed
|
||||||
|
- Database loads normally
|
||||||
|
- Status API shows "Only encrypted database exists"
|
||||||
|
|
||||||
|
**Test Commands**:
|
||||||
|
```bash
|
||||||
|
# Start with existing encrypted database
|
||||||
|
docker run -v /host/encrypted-data:/app/data termix:latest
|
||||||
|
# Verify no migration messages in logs
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scenario 4: Both Files Exist (Safety Mode)
|
||||||
|
**Setup**: Both encrypted and unencrypted databases present
|
||||||
|
**Expected**:
|
||||||
|
- Migration skipped for safety
|
||||||
|
- Warning logged about manual intervention
|
||||||
|
- Both files preserved
|
||||||
|
- Uses encrypted database
|
||||||
|
|
||||||
|
**Test Commands**:
|
||||||
|
```bash
|
||||||
|
# Manually create both files
|
||||||
|
touch /host/data/db.sqlite
|
||||||
|
touch /host/data/db.sqlite.encrypted
|
||||||
|
docker run -v /host/data:/app/data termix:latest
|
||||||
|
# Check for safety warning in logs
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scenario 5: Migration Failure Recovery
|
||||||
|
**Setup**: Simulate migration failure (corrupted source file)
|
||||||
|
**Expected**:
|
||||||
|
- Migration fails safely
|
||||||
|
- Backup file preserved
|
||||||
|
- Original unencrypted file untouched
|
||||||
|
- Clear error message with recovery instructions
|
||||||
|
|
||||||
|
**Test Commands**:
|
||||||
|
```bash
|
||||||
|
# Create corrupted database file
|
||||||
|
echo "corrupted" > /host/data/db.sqlite
|
||||||
|
docker run -v /host/data:/app/data termix:latest
|
||||||
|
# Verify error handling and backup preservation
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scenario 6: Large Database Migration
|
||||||
|
**Setup**: Large unencrypted database (>100MB with many records)
|
||||||
|
**Expected**:
|
||||||
|
- Migration completes successfully
|
||||||
|
- Performance is acceptable (under 30 seconds)
|
||||||
|
- Memory usage stays reasonable
|
||||||
|
- All data integrity checks pass
|
||||||
|
|
||||||
|
**Test Commands**:
|
||||||
|
```bash
|
||||||
|
# Create large dataset first
|
||||||
|
# Monitor migration duration and memory usage
|
||||||
|
docker stats container_id
|
||||||
|
```
|
||||||
|
|
||||||
|
## API Testing
|
||||||
|
|
||||||
|
### Migration Status Endpoint
|
||||||
|
```bash
|
||||||
|
# Admin access required
|
||||||
|
curl -H "Authorization: Bearer $ADMIN_TOKEN" \
|
||||||
|
http://localhost:8081/database/migration/status
|
||||||
|
|
||||||
|
# Expected response:
|
||||||
|
{
|
||||||
|
"migrationStatus": {
|
||||||
|
"needsMigration": false,
|
||||||
|
"hasUnencryptedDb": false,
|
||||||
|
"hasEncryptedDb": true,
|
||||||
|
"unencryptedDbSize": 0,
|
||||||
|
"reason": "Only encrypted database exists. No migration needed."
|
||||||
|
},
|
||||||
|
"files": {
|
||||||
|
"unencryptedDbSize": 0,
|
||||||
|
"encryptedDbSize": 524288,
|
||||||
|
"backupFiles": 2,
|
||||||
|
"migratedFiles": 1
|
||||||
|
},
|
||||||
|
"recommendations": [
|
||||||
|
"Database is properly encrypted",
|
||||||
|
"No action required"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Migration History Endpoint
|
||||||
|
```bash
|
||||||
|
curl -H "Authorization: Bearer $ADMIN_TOKEN" \
|
||||||
|
http://localhost:8081/database/migration/history
|
||||||
|
|
||||||
|
# Expected response:
|
||||||
|
{
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"name": "db.sqlite.migration-backup-2024-09-24T10-30-00-000Z",
|
||||||
|
"size": 262144,
|
||||||
|
"created": "2024-09-24T10:30:00.000Z",
|
||||||
|
"modified": "2024-09-24T10:30:00.000Z",
|
||||||
|
"type": "backup"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"summary": {
|
||||||
|
"totalBackups": 1,
|
||||||
|
"totalMigrated": 1,
|
||||||
|
"oldestBackup": "2024-09-24T10:30:00.000Z",
|
||||||
|
"newestBackup": "2024-09-24T10:30:00.000Z"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Log Analysis
|
||||||
|
|
||||||
|
### Successful Migration Logs
|
||||||
|
Look for these log entries:
|
||||||
|
```
|
||||||
|
[INFO] Migration status check completed - needsMigration: true
|
||||||
|
[INFO] Starting automatic database migration
|
||||||
|
[INFO] Creating migration backup
|
||||||
|
[SUCCESS] Migration backup created successfully
|
||||||
|
[INFO] Found tables to migrate - tableCount: 8
|
||||||
|
[SUCCESS] Migration integrity verification completed
|
||||||
|
[INFO] Creating encrypted database file
|
||||||
|
[SUCCESS] Database migration completed successfully
|
||||||
|
```
|
||||||
|
|
||||||
|
### Migration Skipped (Safety) Logs
|
||||||
|
```
|
||||||
|
[INFO] Migration status check completed - needsMigration: false
|
||||||
|
[INFO] Both encrypted and unencrypted databases exist. Skipping migration for safety
|
||||||
|
[WARN] Manual intervention may be required
|
||||||
|
```
|
||||||
|
|
||||||
|
### Migration Failure Logs
|
||||||
|
```
|
||||||
|
[ERROR] Database migration failed
|
||||||
|
[ERROR] Backup available at: /app/data/db.sqlite.migration-backup-{timestamp}
|
||||||
|
[ERROR] Manual intervention required to recover data
|
||||||
|
```
|
||||||
|
|
||||||
|
## Manual Recovery Procedures
|
||||||
|
|
||||||
|
### If Migration Fails:
|
||||||
|
1. **Locate backup file**: `db.sqlite.migration-backup-{timestamp}`
|
||||||
|
2. **Restore original**: `cp backup-file db.sqlite`
|
||||||
|
3. **Check logs**: Look for specific error details
|
||||||
|
4. **Fix issue**: Address the root cause (permissions, disk space, etc.)
|
||||||
|
5. **Retry**: Restart container to trigger migration again
|
||||||
|
|
||||||
|
### If Both Databases Exist:
|
||||||
|
1. **Check dates**: Determine which file is newer
|
||||||
|
2. **Backup both**: Make copies before proceeding
|
||||||
|
3. **Remove older**: Delete the outdated database file
|
||||||
|
4. **Restart**: Container will detect single database
|
||||||
|
|
||||||
|
### Emergency Data Recovery:
|
||||||
|
1. **Backup files are SQLite**: Can be opened with any SQLite client
|
||||||
|
2. **Manual export**: Use SQLite tools to export data
|
||||||
|
3. **Re-import**: Use Termix import functionality
|
||||||
|
|
||||||
|
## Performance Expectations
|
||||||
|
|
||||||
|
| Database Size | Expected Migration Time | Memory Usage |
|
||||||
|
|---------------|------------------------|--------------|
|
||||||
|
| < 10MB | < 5 seconds | < 50MB |
|
||||||
|
| 10-50MB | 5-15 seconds | < 100MB |
|
||||||
|
| 50-200MB | 15-45 seconds | < 200MB |
|
||||||
|
| 200MB+ | 45+ seconds | < 500MB |
|
||||||
|
|
||||||
|
## Validation Checklist
|
||||||
|
|
||||||
|
After migration, verify:
|
||||||
|
- [ ] All SSH hosts are accessible
|
||||||
|
- [ ] SSH credentials work correctly
|
||||||
|
- [ ] File manager recent/pinned items preserved
|
||||||
|
- [ ] User settings maintained
|
||||||
|
- [ ] OIDC configuration intact
|
||||||
|
- [ ] Admin users still have admin privileges
|
||||||
|
- [ ] Backup file exists and is valid SQLite
|
||||||
|
- [ ] Original file renamed (not deleted)
|
||||||
|
- [ ] Encrypted file is properly encrypted
|
||||||
|
- [ ] Migration APIs respond correctly
|
||||||
|
|
||||||
|
## Monitoring Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Watch migration in real-time
|
||||||
|
docker logs -f container_id | grep -i migration
|
||||||
|
|
||||||
|
# Check file sizes before/after
|
||||||
|
ls -la /host/data/db.sqlite*
|
||||||
|
|
||||||
|
# Verify encrypted file
|
||||||
|
file /host/data/db.sqlite.encrypted
|
||||||
|
|
||||||
|
# Monitor system resources during migration
|
||||||
|
docker stats container_id
|
||||||
|
|
||||||
|
# Test database connectivity after migration
|
||||||
|
curl -H "Authorization: Bearer $TOKEN" \
|
||||||
|
http://localhost:8081/hosts/list
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Issues & Solutions
|
||||||
|
|
||||||
|
### Issue: "Permission denied" during backup creation
|
||||||
|
**Solution**: Check container file permissions and volume mounts
|
||||||
|
|
||||||
|
### Issue: "Insufficient disk space" during migration
|
||||||
|
**Solution**: Free up space, migration requires 2x database size temporarily
|
||||||
|
|
||||||
|
### Issue: "Database locked" error
|
||||||
|
**Solution**: Ensure no other processes are accessing the database file
|
||||||
|
|
||||||
|
### Issue: Migration hangs indefinitely
|
||||||
|
**Solution**: Check for very large BLOB data, increase timeout or migrate manually
|
||||||
|
|
||||||
|
### Issue: Encrypted file fails validation
|
||||||
|
**Solution**: Check DATABASE_KEY environment variable, ensure it's stable
|
||||||
|
|
||||||
|
## Security Considerations
|
||||||
|
|
||||||
|
- **Backup files contain unencrypted data**: Secure backup file access
|
||||||
|
- **Migration logs may contain sensitive info**: Review log retention policies
|
||||||
|
- **Temporary files during migration**: Ensure secure temp directory
|
||||||
|
- **Original files are preserved**: Plan for secure cleanup of old files
|
||||||
|
- **Admin API access**: Ensure proper authentication and authorization
|
||||||
|
|
||||||
|
## Integration with CI/CD
|
||||||
|
|
||||||
|
For automated testing in CI/CD pipelines:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
# Migration integration test
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Start with unencrypted test data
|
||||||
|
docker run -d --name test-migration \
|
||||||
|
-v ./test-data:/app/data \
|
||||||
|
termix:latest
|
||||||
|
|
||||||
|
# Wait for startup
|
||||||
|
sleep 30
|
||||||
|
|
||||||
|
# Check migration status
|
||||||
|
RESPONSE=$(curl -s -H "Authorization: Bearer $TEST_TOKEN" \
|
||||||
|
http://localhost:8081/database/migration/status)
|
||||||
|
|
||||||
|
# Validate migration success
|
||||||
|
echo "$RESPONSE" | jq '.migrationStatus.needsMigration == false'
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
docker stop test-migration
|
||||||
|
docker rm test-migration
|
||||||
|
```
|
||||||
|
|
||||||
|
This comprehensive testing approach ensures the migration system handles all edge cases safely and provides administrators with full visibility into the migration process.
|
||||||
267
SECURITY.md
@@ -1,267 +0,0 @@
|
|||||||
# Security Guide for Termix
|
|
||||||
|
|
||||||
## Database Encryption
|
|
||||||
|
|
||||||
Termix implements AES-256-GCM encryption for sensitive data stored in the database. This protects SSH credentials, passwords, and authentication tokens from unauthorized access.
|
|
||||||
|
|
||||||
### Encrypted Fields
|
|
||||||
|
|
||||||
The following database fields are automatically encrypted:
|
|
||||||
|
|
||||||
**Users Table:**
|
|
||||||
|
|
||||||
- `password_hash` - User password hashes
|
|
||||||
- `client_secret` - OIDC client secrets
|
|
||||||
- `totp_secret` - 2FA authentication seeds
|
|
||||||
- `totp_backup_codes` - 2FA backup codes
|
|
||||||
|
|
||||||
**SSH Data Table:**
|
|
||||||
|
|
||||||
- `password` - SSH connection passwords
|
|
||||||
- `key` - SSH private keys
|
|
||||||
- `keyPassword` - SSH private key passphrases
|
|
||||||
|
|
||||||
**SSH Credentials Table:**
|
|
||||||
|
|
||||||
- `password` - Stored SSH passwords
|
|
||||||
- `privateKey` - SSH private keys
|
|
||||||
- `keyPassword` - SSH private key passphrases
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
#### Required Environment Variables
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Encryption master key (REQUIRED)
|
|
||||||
DB_ENCRYPTION_KEY=your-very-strong-encryption-key-32-chars-minimum
|
|
||||||
```
|
|
||||||
|
|
||||||
**⚠️ CRITICAL:** The encryption key must be:
|
|
||||||
|
|
||||||
- At least 16 characters long (32+ recommended)
|
|
||||||
- Cryptographically random
|
|
||||||
- Unique per installation
|
|
||||||
- Safely backed up
|
|
||||||
|
|
||||||
#### Optional Settings
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Enable/disable encryption (default: true)
|
|
||||||
ENCRYPTION_ENABLED=true
|
|
||||||
|
|
||||||
# Reject unencrypted data (default: false)
|
|
||||||
FORCE_ENCRYPTION=false
|
|
||||||
|
|
||||||
# Auto-encrypt legacy data (default: true)
|
|
||||||
MIGRATE_ON_ACCESS=true
|
|
||||||
```
|
|
||||||
|
|
||||||
### Initial Setup
|
|
||||||
|
|
||||||
#### 1. Generate Encryption Key
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Generate a secure random key (Linux/macOS)
|
|
||||||
openssl rand -hex 32
|
|
||||||
|
|
||||||
# Or using Node.js
|
|
||||||
node -e "console.log(require('crypto').randomBytes(32).toString('hex'))"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 2. Set Environment Variable
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Add to your .env file
|
|
||||||
echo "DB_ENCRYPTION_KEY=$(openssl rand -hex 32)" >> .env
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 3. Validate Configuration
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Test encryption setup
|
|
||||||
npm run test:encryption
|
|
||||||
```
|
|
||||||
|
|
||||||
### Migration from Unencrypted Database
|
|
||||||
|
|
||||||
If you have an existing Termix installation with unencrypted data:
|
|
||||||
|
|
||||||
#### 1. Backup Your Database
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Create backup before migration
|
|
||||||
cp ./db/data/db.sqlite ./db/data/db-backup-$(date +%Y%m%d-%H%M%S).sqlite
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 2. Run Migration
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Set encryption key
|
|
||||||
export DB_ENCRYPTION_KEY="your-secure-key-here"
|
|
||||||
|
|
||||||
# Test migration (dry run)
|
|
||||||
npm run migrate:encryption -- --dry-run
|
|
||||||
|
|
||||||
# Run actual migration
|
|
||||||
npm run migrate:encryption
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 3. Verify Migration
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check encryption status
|
|
||||||
curl http://localhost:8081/encryption/status
|
|
||||||
|
|
||||||
# Test application functionality
|
|
||||||
npm run test:encryption production
|
|
||||||
```
|
|
||||||
|
|
||||||
### Security Best Practices
|
|
||||||
|
|
||||||
#### Key Management
|
|
||||||
|
|
||||||
1. **Generate unique keys** for each installation
|
|
||||||
2. **Store keys securely** (use environment variables, not config files)
|
|
||||||
3. **Backup keys safely** (encrypted backups in secure locations)
|
|
||||||
4. **Rotate keys periodically** (implement key rotation schedule)
|
|
||||||
|
|
||||||
#### Deployment Security
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Production Docker example
|
|
||||||
docker run -d \
|
|
||||||
-e DB_ENCRYPTION_KEY="$(cat /secure/location/encryption.key)" \
|
|
||||||
-e ENCRYPTION_ENABLED=true \
|
|
||||||
-e FORCE_ENCRYPTION=true \
|
|
||||||
-v termix-data:/app/data \
|
|
||||||
ghcr.io/lukegus/termix:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
#### File System Protection
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Secure database directory permissions
|
|
||||||
chmod 700 ./db/data/
|
|
||||||
chmod 600 ./db/data/db.sqlite
|
|
||||||
|
|
||||||
# Use encrypted storage if possible
|
|
||||||
# Consider full disk encryption for production
|
|
||||||
```
|
|
||||||
|
|
||||||
### Monitoring and Alerting
|
|
||||||
|
|
||||||
#### Health Checks
|
|
||||||
|
|
||||||
The encryption system provides health check endpoints:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check encryption status
|
|
||||||
GET /encryption/status
|
|
||||||
|
|
||||||
# Response format:
|
|
||||||
{
|
|
||||||
"encryption": {
|
|
||||||
"enabled": true,
|
|
||||||
"configValid": true,
|
|
||||||
"forceEncryption": false,
|
|
||||||
"migrateOnAccess": true
|
|
||||||
},
|
|
||||||
"migration": {
|
|
||||||
"isEncryptionEnabled": true,
|
|
||||||
"migrationCompleted": true,
|
|
||||||
"migrationDate": "2024-01-15T10:30:00Z"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Log Monitoring
|
|
||||||
|
|
||||||
Monitor logs for encryption-related events:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Encryption initialization
|
|
||||||
"Database encryption initialized successfully"
|
|
||||||
|
|
||||||
# Migration events
|
|
||||||
"Migration completed for table: users"
|
|
||||||
|
|
||||||
# Security warnings
|
|
||||||
"DB_ENCRYPTION_KEY not set, using default (INSECURE)"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Troubleshooting
|
|
||||||
|
|
||||||
#### Common Issues
|
|
||||||
|
|
||||||
**1. "Decryption failed" errors**
|
|
||||||
|
|
||||||
- Verify `DB_ENCRYPTION_KEY` is correct
|
|
||||||
- Check if database was corrupted
|
|
||||||
- Restore from backup if necessary
|
|
||||||
|
|
||||||
**2. Performance issues**
|
|
||||||
|
|
||||||
- Encryption adds ~1ms per operation
|
|
||||||
- Consider disabling `MIGRATE_ON_ACCESS` after migration
|
|
||||||
- Monitor CPU usage during large migrations
|
|
||||||
|
|
||||||
**3. Key rotation**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Generate new key
|
|
||||||
NEW_KEY=$(openssl rand -hex 32)
|
|
||||||
|
|
||||||
# Update configuration
|
|
||||||
# Note: Requires re-encryption of all data
|
|
||||||
```
|
|
||||||
|
|
||||||
### Compliance Notes
|
|
||||||
|
|
||||||
This encryption implementation helps meet requirements for:
|
|
||||||
|
|
||||||
- **GDPR** - Personal data protection
|
|
||||||
- **SOC 2** - Data security controls
|
|
||||||
- **PCI DSS** - Sensitive data protection
|
|
||||||
- **HIPAA** - Healthcare data encryption (if applicable)
|
|
||||||
|
|
||||||
### Security Limitations
|
|
||||||
|
|
||||||
**What this protects against:**
|
|
||||||
|
|
||||||
- Database file theft
|
|
||||||
- Disk access by unauthorized users
|
|
||||||
- Data breaches from file system access
|
|
||||||
|
|
||||||
**What this does NOT protect against:**
|
|
||||||
|
|
||||||
- Application-level vulnerabilities
|
|
||||||
- Memory dumps while application is running
|
|
||||||
- Attacks against the running application
|
|
||||||
- Social engineering attacks
|
|
||||||
|
|
||||||
### Emergency Procedures
|
|
||||||
|
|
||||||
#### Lost Encryption Key
|
|
||||||
|
|
||||||
⚠️ **Data is unrecoverable without the encryption key**
|
|
||||||
|
|
||||||
1. Check all backup locations
|
|
||||||
2. Restore from unencrypted backup if available
|
|
||||||
3. Contact system administrators
|
|
||||||
|
|
||||||
#### Suspected Key Compromise
|
|
||||||
|
|
||||||
1. **Immediately** generate new encryption key
|
|
||||||
2. Take application offline
|
|
||||||
3. Re-encrypt all sensitive data with new key
|
|
||||||
4. Investigate compromise source
|
|
||||||
5. Update security procedures
|
|
||||||
|
|
||||||
### Support
|
|
||||||
|
|
||||||
For security-related questions:
|
|
||||||
|
|
||||||
- Open issue: [GitHub Issues](https://github.com/LukeGus/Termix/issues)
|
|
||||||
- Discord: [Termix Community](https://discord.gg/jVQGdvHDrf)
|
|
||||||
|
|
||||||
**Do not share encryption keys or sensitive debugging information in public channels.**
|
|
||||||
52
docker/.env.example
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
# Termix Docker Environment Configuration Example
|
||||||
|
#
|
||||||
|
# IMPORTANT: This file shows available environment variables.
|
||||||
|
# For most users, you DON'T need to create a .env file.
|
||||||
|
# Termix will auto-generate secure keys on first startup.
|
||||||
|
#
|
||||||
|
# Copy this file to .env ONLY if you need custom configuration:
|
||||||
|
# cp docker/.env.example docker/.env
|
||||||
|
|
||||||
|
# ===== BASIC CONFIGURATION =====
|
||||||
|
PORT=8080
|
||||||
|
NODE_ENV=production
|
||||||
|
|
||||||
|
# ===== SSL/HTTPS CONFIGURATION =====
|
||||||
|
ENABLE_SSL=false
|
||||||
|
SSL_PORT=8443
|
||||||
|
SSL_DOMAIN=localhost
|
||||||
|
SSL_CERT_PATH=/app/ssl/termix.crt
|
||||||
|
SSL_KEY_PATH=/app/ssl/termix.key
|
||||||
|
|
||||||
|
# ===== SECURITY KEYS =====
|
||||||
|
# WARNING: Only set these if you need specific keys for multi-instance deployment
|
||||||
|
# For single instance deployment, leave these EMPTY - Termix will auto-generate
|
||||||
|
# secure random keys and persist them in Docker volumes.
|
||||||
|
#
|
||||||
|
# If you DO set these, generate them with: openssl rand -hex 32
|
||||||
|
JWT_SECRET=
|
||||||
|
DATABASE_KEY=
|
||||||
|
INTERNAL_AUTH_TOKEN=
|
||||||
|
|
||||||
|
# ===== DATABASE CONFIGURATION =====
|
||||||
|
DATABASE_ENCRYPTION=true
|
||||||
|
|
||||||
|
# ===== CORS CONFIGURATION =====
|
||||||
|
ALLOWED_ORIGINS=*
|
||||||
|
Using 
Using `*` for `ALLOWED_ORIGINS` is insecure and should be avoided in production as it allows any website to make requests to your Termix instance. This can lead to security vulnerabilities like Cross-Site Request Forgery (CSRF). It would be better to provide a more secure default example, such as `ALLOWED_ORIGINS=http://localhost:5173,https://your-termix-domain.com`, and add a strong warning in the comments about the risks of using a wildcard.
|
|||||||
|
|
||||||
|
# ===== DEPLOYMENT NOTES =====
|
||||||
|
#
|
||||||
|
# Single Instance (Recommended):
|
||||||
|
# - Don't create .env file - let Termix auto-generate keys
|
||||||
|
# - Keys are automatically persisted in Docker volumes
|
||||||
|
# - Secure and maintenance-free
|
||||||
|
#
|
||||||
|
# Multi-Instance Cluster:
|
||||||
|
# - Set identical JWT_SECRET, DATABASE_KEY, INTERNAL_AUTH_TOKEN across all instances
|
||||||
|
# - Use shared storage for /app/data and /app/config volumes
|
||||||
|
# - Ensure all instances can access the same encryption keys
|
||||||
|
#
|
||||||
|
# Security Best Practices:
|
||||||
|
# - Never commit .env files to version control
|
||||||
|
# - Use Docker secrets in production environments
|
||||||
|
# - Regularly rotate keys (requires data migration)
|
||||||
@@ -35,23 +35,10 @@ RUN npm rebuild better-sqlite3 --force
|
|||||||
|
|
||||||
RUN npm run build:backend
|
RUN npm run build:backend
|
||||||
|
|
||||||
# Stage 4: Production dependencies
|
# Stage 4: Production dependencies with native modules
|
||||||
FROM node:24-alpine AS production-deps
|
FROM node:24-alpine AS production-deps
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY package*.json ./
|
|
||||||
|
|
||||||
ENV npm_config_target_platform=linux
|
|
||||||
ENV npm_config_target_arch=x64
|
|
||||||
ENV npm_config_target_libc=glibc
|
|
||||||
|
|
||||||
RUN npm ci --only=production --ignore-scripts --force && \
|
|
||||||
npm cache clean --force
|
|
||||||
|
|
||||||
# Stage 5: Build native modules
|
|
||||||
FROM node:24-alpine AS native-builder
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
RUN apk add --no-cache python3 make g++
|
RUN apk add --no-cache python3 make g++
|
||||||
|
|
||||||
COPY package*.json ./
|
COPY package*.json ./
|
||||||
@@ -60,10 +47,11 @@ ENV npm_config_target_platform=linux
|
|||||||
ENV npm_config_target_arch=x64
|
ENV npm_config_target_arch=x64
|
||||||
ENV npm_config_target_libc=glibc
|
ENV npm_config_target_libc=glibc
|
||||||
|
|
||||||
# Install native modules and compile them properly
|
# Install production dependencies and rebuild native modules in one stage
|
||||||
RUN npm ci --only=production --force && \
|
RUN npm ci --omit=dev --ignore-scripts --force && \
|
||||||
npm rebuild better-sqlite3 bcryptjs --force && \
|
npm rebuild better-sqlite3 bcryptjs --force && \
|
||||||
npm cache clean --force
|
npm cache clean --force && \
|
||||||
|
rm -rf ~/.npm /tmp/* /var/cache/apk/*
|
||||||
|
|
||||||
# Stage 6: Final image
|
# Stage 6: Final image
|
||||||
FROM node:24-alpine
|
FROM node:24-alpine
|
||||||
@@ -71,23 +59,26 @@ ENV DATA_DIR=/app/data \
|
|||||||
PORT=8080 \
|
PORT=8080 \
|
||||||
NODE_ENV=production
|
NODE_ENV=production
|
||||||
|
|
||||||
RUN apk add --no-cache nginx gettext su-exec && \
|
RUN apk add --no-cache nginx gettext su-exec openssl && \
|
||||||
mkdir -p /app/data && \
|
mkdir -p /app/data /app/config /app/ssl && \
|
||||||
chown -R node:node /app/data
|
chown -R node:node /app/data /app/config /app/ssl
|
||||||
|
|
||||||
COPY docker/nginx.conf /etc/nginx/nginx.conf
|
COPY docker/nginx.conf /etc/nginx/nginx.conf
|
||||||
|
COPY docker/nginx-https.conf /etc/nginx/nginx-https.conf
|
||||||
COPY --from=frontend-builder /app/dist /usr/share/nginx/html
|
COPY --from=frontend-builder /app/dist /usr/share/nginx/html
|
||||||
COPY --from=frontend-builder /app/src/locales /usr/share/nginx/html/locales
|
COPY --from=frontend-builder /app/src/locales /usr/share/nginx/html/locales
|
||||||
RUN chown -R nginx:nginx /usr/share/nginx/html
|
RUN chown -R nginx:nginx /usr/share/nginx/html
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY --from=native-builder /app/node_modules /app/node_modules
|
COPY --from=production-deps /app/node_modules /app/node_modules
|
||||||
COPY --from=backend-builder /app/dist/backend ./dist/backend
|
COPY --from=backend-builder /app/dist/backend ./dist/backend
|
||||||
|
|
||||||
COPY package.json ./
|
COPY package.json ./
|
||||||
COPY .env ./.env
|
RUN chown -R node:node /app && \
|
||||||
RUN chown -R node:node /app
|
chmod 755 /app/config && \
|
||||||
|
chmod 755 /app/ssl && \
|
||||||
|
chmod 755 /app/data
|
||||||
|
|
||||||
VOLUME ["/app/data"]
|
VOLUME ["/app/data"]
|
||||||
|
|
||||||
|
|||||||
@@ -1,15 +1,67 @@
|
|||||||
|
# Termix Docker Compose Configuration
|
||||||
|
#
|
||||||
|
# QUICK START: Just run "docker-compose up -d"
|
||||||
|
# - Security keys are auto-generated on first startup
|
||||||
|
# - Keys are persisted in Docker volumes (survive container restarts)
|
||||||
|
# - No manual .env file needed for single-instance deployment
|
||||||
|
#
|
||||||
|
# See docker/.env.example for advanced configuration options
|
||||||
|
|
||||||
services:
|
services:
|
||||||
termix:
|
termix:
|
||||||
image: ghcr.io/lukegus/termix:latest
|
build:
|
||||||
|
context: ..
|
||||||
|
dockerfile: docker/Dockerfile
|
||||||
container_name: termix
|
container_name: termix
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
ports:
|
||||||
- "8080:8080"
|
# HTTP port (redirects to HTTPS if SSL enabled)
|
||||||
|
- "${PORT:-8080}:8080"
|
||||||
|
# HTTPS port (when SSL is enabled)
|
||||||
|
- "${SSL_PORT:-8443}:8443"
|
||||||
volumes:
|
volumes:
|
||||||
- termix-data:/app/data
|
- termix-data:/app/data
|
||||||
|
- termix-config:/app/config # Auto-generated .env keys are persisted here
|
||||||
|
# Optional: Mount custom SSL certificates
|
||||||
|
# - ./ssl:/app/ssl:ro
|
||||||
environment:
|
environment:
|
||||||
PORT: "8080"
|
# Basic configuration
|
||||||
|
- PORT=${PORT:-8080}
|
||||||
|
- NODE_ENV=${NODE_ENV:-production}
|
||||||
|
|
||||||
|
# SSL/TLS Configuration
|
||||||
|
- ENABLE_SSL=${ENABLE_SSL:-false}
|
||||||
|
- SSL_PORT=${SSL_PORT:-8443}
|
||||||
|
- SSL_DOMAIN=${SSL_DOMAIN:-localhost}
|
||||||
|
- SSL_CERT_PATH=${SSL_CERT_PATH:-/app/ssl/termix.crt}
|
||||||
|
- SSL_KEY_PATH=${SSL_KEY_PATH:-/app/ssl/termix.key}
|
||||||
|
|
||||||
|
# Security keys (auto-generated if not provided)
|
||||||
|
# Leave empty to auto-generate secure random keys on first startup
|
||||||
|
# Set values only if you need specific keys for multi-instance deployment
|
||||||
|
- JWT_SECRET=${JWT_SECRET:-}
|
||||||
|
- DATABASE_KEY=${DATABASE_KEY:-}
|
||||||
|
- INTERNAL_AUTH_TOKEN=${INTERNAL_AUTH_TOKEN:-}
|
||||||
|
|
||||||
|
# Database configuration
|
||||||
|
- DATABASE_ENCRYPTION=${DATABASE_ENCRYPTION:-true}
|
||||||
|
|
||||||
|
# CORS configuration
|
||||||
|
- ALLOWED_ORIGINS=${ALLOWED_ORIGINS:-*}
|
||||||
|
|
||||||
|
# Health check for both HTTP and HTTPS
|
||||||
|
healthcheck:
|
||||||
|
Setting 
Setting `ALLOWED_ORIGINS` to `*` by default poses a significant security risk in production environments, as it permits cross-origin requests from any domain. For a more secure default configuration, consider changing this to a more restrictive value, for example, an empty string or a specific development URL like `http://localhost:8080`. The accompanying documentation should strongly advise users to configure this with their specific domain for production deployments.
|
|||||||
|
test: |
|
||||||
|
curl -f -k https://localhost:8443/health 2>/dev/null ||
|
||||||
|
curl -f http://localhost:8080/health 2>/dev/null ||
|
||||||
|
exit 1
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 40s
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
termix-data:
|
termix-data:
|
||||||
driver: local
|
driver: local
|
||||||
|
termix-config:
|
||||||
|
driver: local
|
||||||
|
|||||||
@@ -2,9 +2,25 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
export PORT=${PORT:-8080}
|
export PORT=${PORT:-8080}
|
||||||
|
export ENABLE_SSL=${ENABLE_SSL:-false}
|
||||||
|
export SSL_PORT=${SSL_PORT:-8443}
|
||||||
|
export SSL_CERT_PATH=${SSL_CERT_PATH:-/app/ssl/termix.crt}
|
||||||
|
export SSL_KEY_PATH=${SSL_KEY_PATH:-/app/ssl/termix.key}
|
||||||
|
|
||||||
echo "Configuring web UI to run on port: $PORT"
|
echo "Configuring web UI to run on port: $PORT"
|
||||||
|
|
||||||
envsubst '${PORT}' < /etc/nginx/nginx.conf > /etc/nginx/nginx.conf.tmp
|
# Choose nginx configuration based on SSL setting
|
||||||
|
# Default: HTTP-only for easy setup
|
||||||
|
# Set ENABLE_SSL=true to use HTTPS with automatic redirect
|
||||||
|
if [ "$ENABLE_SSL" = "true" ]; then
|
||||||
|
echo "SSL enabled - using HTTPS configuration with redirect"
|
||||||
|
NGINX_CONF_SOURCE="/etc/nginx/nginx-https.conf"
|
||||||
|
else
|
||||||
|
echo "SSL disabled - using HTTP-only configuration (default)"
|
||||||
|
NGINX_CONF_SOURCE="/etc/nginx/nginx.conf"
|
||||||
|
fi
|
||||||
|
|
||||||
|
envsubst '${PORT} ${SSL_PORT} ${SSL_CERT_PATH} ${SSL_KEY_PATH}' < $NGINX_CONF_SOURCE > /etc/nginx/nginx.conf.tmp
|
||||||
mv /etc/nginx/nginx.conf.tmp /etc/nginx/nginx.conf
|
mv /etc/nginx/nginx.conf.tmp /etc/nginx/nginx.conf
|
||||||
|
|
||||||
mkdir -p /app/data
|
mkdir -p /app/data
|
||||||
|
|||||||
211
docker/nginx-https.conf
Normal file
@@ -0,0 +1,211 @@
|
|||||||
|
events {
|
||||||
|
worker_connections 1024;
|
||||||
|
}
|
||||||
|
|
||||||
|
http {
|
||||||
|
include mime.types;
|
||||||
|
default_type application/octet-stream;
|
||||||
|
|
||||||
|
sendfile on;
|
||||||
|
keepalive_timeout 65;
|
||||||
|
|
||||||
|
# SSL Configuration
|
||||||
|
ssl_protocols TLSv1.2 TLSv1.3;
|
||||||
|
ssl_ciphers ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384;
|
||||||
|
ssl_prefer_server_ciphers off;
|
||||||
|
ssl_session_cache shared:SSL:10m;
|
||||||
|
ssl_session_timeout 10m;
|
||||||
|
|
||||||
|
# HTTP Server - Redirect to HTTPS
|
||||||
|
server {
|
||||||
|
listen ${PORT};
|
||||||
|
server_name localhost;
|
||||||
|
|
||||||
|
# Redirect all HTTP traffic to HTTPS
|
||||||
|
return 301 https://$server_name:${SSL_PORT}$request_uri;
|
||||||
|
}
|
||||||
|
|
||||||
|
# HTTPS Server
|
||||||
|
server {
|
||||||
|
listen ${SSL_PORT} ssl;
|
||||||
|
server_name localhost;
|
||||||
|
|
||||||
|
# SSL Certificate paths
|
||||||
|
ssl_certificate ${SSL_CERT_PATH};
|
||||||
|
ssl_certificate_key ${SSL_KEY_PATH};
|
||||||
|
|
||||||
|
# Security headers for HTTPS
|
||||||
|
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
|
||||||
|
add_header X-Frame-Options DENY always;
|
||||||
|
add_header X-Content-Type-Options nosniff always;
|
||||||
|
add_header X-XSS-Protection "1; mode=block" always;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
root /usr/share/nginx/html;
|
||||||
|
index index.html index.htm;
|
||||||
|
}
|
||||||
|
|
||||||
|
location ~ ^/users(/.*)?$ {
|
||||||
|
proxy_pass http://127.0.0.1:8081;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
location ~ ^/version(/.*)?$ {
|
||||||
|
proxy_pass http://127.0.0.1:8081;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
location ~ ^/releases(/.*)?$ {
|
||||||
|
proxy_pass http://127.0.0.1:8081;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
location ~ ^/alerts(/.*)?$ {
|
||||||
|
proxy_pass http://127.0.0.1:8081;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
location ~ ^/credentials(/.*)?$ {
|
||||||
|
proxy_pass http://127.0.0.1:8081;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /ssh/ {
|
||||||
|
proxy_pass http://127.0.0.1:8081;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
# WebSocket proxy for authenticated terminal connections
|
||||||
|
location /ssh/websocket/ {
|
||||||
|
# Pass to WebSocket server with authentication support
|
||||||
|
proxy_pass http://127.0.0.1:8082/;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
|
||||||
|
# WebSocket upgrade headers
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
|
||||||
|
# Pass client information for authentication logging
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
|
||||||
|
# Query parameters are passed by default with proxy_pass
|
||||||
|
|
||||||
|
# WebSocket timeouts (longer for terminal sessions)
|
||||||
|
proxy_read_timeout 86400s; # 24 hours
|
||||||
|
proxy_send_timeout 86400s; # 24 hours
|
||||||
|
proxy_connect_timeout 10s; # Quick auth check
|
||||||
|
|
||||||
|
# Disable buffering for real-time terminal
|
||||||
|
proxy_buffering off;
|
||||||
|
proxy_request_buffering off;
|
||||||
|
|
||||||
|
# Handle connection errors gracefully
|
||||||
|
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /ssh/tunnel/ {
|
||||||
|
proxy_pass http://127.0.0.1:8083;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /ssh/file_manager/recent {
|
||||||
|
proxy_pass http://127.0.0.1:8081;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /ssh/file_manager/pinned {
|
||||||
|
proxy_pass http://127.0.0.1:8081;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /ssh/file_manager/shortcuts {
|
||||||
|
proxy_pass http://127.0.0.1:8081;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /ssh/file_manager/ssh/ {
|
||||||
|
proxy_pass http://127.0.0.1:8084;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /health {
|
||||||
|
proxy_pass http://127.0.0.1:8081;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
location ~ ^/status(/.*)?$ {
|
||||||
|
proxy_pass http://127.0.0.1:8085;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
location ~ ^/metrics(/.*)?$ {
|
||||||
|
proxy_pass http://127.0.0.1:8085;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
error_page 500 502 503 504 /50x.html;
|
||||||
|
location = /50x.html {
|
||||||
|
root /usr/share/nginx/html;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,10 +9,23 @@ http {
|
|||||||
sendfile on;
|
sendfile on;
|
||||||
keepalive_timeout 65;
|
keepalive_timeout 65;
|
||||||
|
|
||||||
|
# SSL Configuration
|
||||||
|
ssl_protocols TLSv1.2 TLSv1.3;
|
||||||
|
ssl_ciphers ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384;
|
||||||
|
ssl_prefer_server_ciphers off;
|
||||||
|
ssl_session_cache shared:SSL:10m;
|
||||||
|
ssl_session_timeout 10m;
|
||||||
|
|
||||||
|
# HTTP Server - Redirect to HTTPS when SSL enabled
|
||||||
server {
|
server {
|
||||||
listen ${PORT};
|
listen ${PORT};
|
||||||
server_name localhost;
|
server_name localhost;
|
||||||
|
|
||||||
|
# Security headers
|
||||||
|
add_header X-Frame-Options DENY always;
|
||||||
|
add_header X-Content-Type-Options nosniff always;
|
||||||
|
add_header X-XSS-Protection "1; mode=block" always;
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
root /usr/share/nginx/html;
|
root /usr/share/nginx/html;
|
||||||
index index.html index.htm;
|
index index.html index.htm;
|
||||||
@@ -72,25 +85,36 @@ http {
|
|||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# WebSocket proxy for authenticated terminal connections
|
||||||
location /ssh/websocket/ {
|
location /ssh/websocket/ {
|
||||||
|
# Pass to WebSocket server with authentication support
|
||||||
proxy_pass http://127.0.0.1:8082/;
|
proxy_pass http://127.0.0.1:8082/;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
|
|
||||||
|
# WebSocket upgrade headers
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection "Upgrade";
|
proxy_set_header Connection "upgrade";
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
proxy_cache_bypass $http_upgrade;
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
|
||||||
proxy_read_timeout 300s;
|
# Pass client information for authentication logging
|
||||||
proxy_send_timeout 300s;
|
|
||||||
proxy_connect_timeout 75s;
|
|
||||||
proxy_set_header Connection "";
|
|
||||||
|
|
||||||
proxy_buffering off;
|
|
||||||
proxy_request_buffering off;
|
|
||||||
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
|
||||||
|
# Query parameters are passed by default with proxy_pass
|
||||||
|
|
||||||
|
# WebSocket timeouts (longer for terminal sessions)
|
||||||
|
proxy_read_timeout 86400s; # 24 hours
|
||||||
|
proxy_send_timeout 86400s; # 24 hours
|
||||||
|
proxy_connect_timeout 10s; # Quick auth check
|
||||||
|
|
||||||
|
# Disable buffering for real-time terminal
|
||||||
|
proxy_buffering off;
|
||||||
|
proxy_request_buffering off;
|
||||||
|
|
||||||
|
# Handle connection errors gracefully
|
||||||
|
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /ssh/tunnel/ {
|
location /ssh/tunnel/ {
|
||||||
|
|||||||
@@ -23,21 +23,21 @@ contextBridge.exposeInMainWorld("electronAPI", {
|
|||||||
|
|
||||||
invoke: (channel, ...args) => ipcRenderer.invoke(channel, ...args),
|
invoke: (channel, ...args) => ipcRenderer.invoke(channel, ...args),
|
||||||
|
|
||||||
// ================== 拖拽API ==================
|
// ================== Drag & Drop API ==================
|
||||||
|
|
||||||
// 创建临时文件用于拖拽
|
// Create temporary file for dragging
|
||||||
createTempFile: (fileData) =>
|
createTempFile: (fileData) =>
|
||||||
ipcRenderer.invoke("create-temp-file", fileData),
|
ipcRenderer.invoke("create-temp-file", fileData),
|
||||||
|
|
||||||
// 创建临时文件夹用于拖拽
|
// Create temporary folder for dragging
|
||||||
createTempFolder: (folderData) =>
|
createTempFolder: (folderData) =>
|
||||||
ipcRenderer.invoke("create-temp-folder", folderData),
|
ipcRenderer.invoke("create-temp-folder", folderData),
|
||||||
|
|
||||||
// 开始拖拽到桌面
|
// Start dragging to desktop
|
||||||
startDragToDesktop: (dragData) =>
|
startDragToDesktop: (dragData) =>
|
||||||
ipcRenderer.invoke("start-drag-to-desktop", dragData),
|
ipcRenderer.invoke("start-drag-to-desktop", dragData),
|
||||||
|
|
||||||
// 清理临时文件
|
// Cleanup temporary files
|
||||||
cleanupTempFile: (tempId) => ipcRenderer.invoke("cleanup-temp-file", tempId),
|
cleanupTempFile: (tempId) => ipcRenderer.invoke("cleanup-temp-file", tempId),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
2696
package-lock.json
generated
15
package.json
@@ -9,9 +9,12 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"clean": "npx prettier . --write",
|
"clean": "npx prettier . --write",
|
||||||
"dev": "vite",
|
"dev": "vite",
|
||||||
|
"dev:https": "cross-env VITE_HTTPS=true vite",
|
||||||
"build": "vite build && tsc -p tsconfig.node.json",
|
"build": "vite build && tsc -p tsconfig.node.json",
|
||||||
"build:backend": "tsc -p tsconfig.node.json",
|
"build:backend": "tsc -p tsconfig.node.json",
|
||||||
"dev:backend": "tsc -p tsconfig.node.json && node ./dist/backend/backend/starter.js",
|
"dev:backend": "tsc -p tsconfig.node.json && node ./dist/backend/backend/starter.js",
|
||||||
|
"start": "npm run build:backend && node ./dist/backend/backend/starter.js",
|
||||||
|
"start:ssl": "npm run start",
|
||||||
"lint": "eslint .",
|
"lint": "eslint .",
|
||||||
"preview": "vite preview",
|
"preview": "vite preview",
|
||||||
"electron": "electron .",
|
"electron": "electron .",
|
||||||
@@ -23,6 +26,9 @@
|
|||||||
"migrate:encryption": "tsc -p tsconfig.node.json && node ./dist/backend/backend/utils/encryption-migration.js"
|
"migrate:encryption": "tsc -p tsconfig.node.json && node ./dist/backend/backend/utils/encryption-migration.js"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@codemirror/autocomplete": "^6.18.7",
|
||||||
|
"@codemirror/comment": "^0.19.1",
|
||||||
|
"@codemirror/search": "^6.5.11",
|
||||||
"@hookform/resolvers": "^5.1.1",
|
"@hookform/resolvers": "^5.1.1",
|
||||||
"@monaco-editor/react": "^4.7.0",
|
"@monaco-editor/react": "^4.7.0",
|
||||||
"@radix-ui/react-accordion": "^1.2.11",
|
"@radix-ui/react-accordion": "^1.2.11",
|
||||||
@@ -81,15 +87,23 @@
|
|||||||
"nanoid": "^5.1.5",
|
"nanoid": "^5.1.5",
|
||||||
"next-themes": "^0.4.6",
|
"next-themes": "^0.4.6",
|
||||||
"node-fetch": "^3.3.2",
|
"node-fetch": "^3.3.2",
|
||||||
|
"pdfjs-dist": "^5.4.149",
|
||||||
"qrcode": "^1.5.4",
|
"qrcode": "^1.5.4",
|
||||||
"react": "^19.1.0",
|
"react": "^19.1.0",
|
||||||
"react-dom": "^19.1.0",
|
"react-dom": "^19.1.0",
|
||||||
|
"react-h5-audio-player": "^3.10.1",
|
||||||
"react-hook-form": "^7.60.0",
|
"react-hook-form": "^7.60.0",
|
||||||
"react-i18next": "^15.7.3",
|
"react-i18next": "^15.7.3",
|
||||||
"react-icons": "^5.5.0",
|
"react-icons": "^5.5.0",
|
||||||
|
"react-markdown": "^10.1.0",
|
||||||
|
"react-pdf": "^10.1.0",
|
||||||
|
"react-photo-view": "^1.2.7",
|
||||||
|
"react-player": "^3.3.3",
|
||||||
"react-resizable-panels": "^3.0.3",
|
"react-resizable-panels": "^3.0.3",
|
||||||
"react-simple-keyboard": "^3.8.120",
|
"react-simple-keyboard": "^3.8.120",
|
||||||
|
"react-syntax-highlighter": "^15.6.6",
|
||||||
"react-xtermjs": "^1.0.10",
|
"react-xtermjs": "^1.0.10",
|
||||||
|
"remark-gfm": "^4.0.1",
|
||||||
"sonner": "^2.0.7",
|
"sonner": "^2.0.7",
|
||||||
"speakeasy": "^2.0.0",
|
"speakeasy": "^2.0.0",
|
||||||
"ssh2": "^1.16.0",
|
"ssh2": "^1.16.0",
|
||||||
@@ -113,6 +127,7 @@
|
|||||||
"@vitejs/plugin-react-swc": "^3.10.2",
|
"@vitejs/plugin-react-swc": "^3.10.2",
|
||||||
"autoprefixer": "^10.4.21",
|
"autoprefixer": "^10.4.21",
|
||||||
"concurrently": "^9.2.1",
|
"concurrently": "^9.2.1",
|
||||||
|
"cross-env": "^10.0.0",
|
||||||
"electron": "^38.0.0",
|
"electron": "^38.0.0",
|
||||||
"electron-builder": "^26.0.12",
|
"electron-builder": "^26.0.12",
|
||||||
"electron-icon-builder": "^2.0.1",
|
"electron-icon-builder": "^2.0.1",
|
||||||
|
|||||||
29
public/pdf.worker.min.js
vendored
Normal file
176
scripts/enable-ssl.sh
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Termix SSL Quick Setup Script
|
||||||
|
# Enables HTTPS/WSS with one command
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
CYAN='\033[0;36m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
||||||
|
ENV_FILE="$PROJECT_ROOT/.env"
|
||||||
|
|
||||||
|
log_info() {
|
||||||
|
echo -e "${BLUE}[SSL Setup]${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_success() {
|
||||||
|
echo -e "${GREEN}[SSL Setup]${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_warn() {
|
||||||
|
echo -e "${YELLOW}[SSL Setup]${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_error() {
|
||||||
|
echo -e "${RED}[SSL Setup]${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_header() {
|
||||||
|
echo -e "${CYAN}$1${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_banner() {
|
||||||
|
echo ""
|
||||||
|
echo "=============================================="
|
||||||
|
log_header "🔒 Termix SSL Quick Setup"
|
||||||
|
echo "=============================================="
|
||||||
|
echo ""
|
||||||
|
log_info "This script will:"
|
||||||
|
echo " ✅ Generate SSL certificates automatically"
|
||||||
|
echo " ✅ Create/update .env configuration"
|
||||||
|
echo " ✅ Enable HTTPS/WSS support"
|
||||||
|
echo " ✅ Generate security keys"
|
||||||
|
echo ""
|
||||||
|
}
|
||||||
|
|
||||||
|
generate_keys() {
|
||||||
|
log_info "🔑 Generating security keys..."
|
||||||
|
|
||||||
|
# Generate JWT secret
|
||||||
|
JWT_SECRET=$(openssl rand -hex 32)
|
||||||
|
log_success "Generated JWT secret"
|
||||||
|
|
||||||
|
# Generate database key
|
||||||
|
DATABASE_KEY=$(openssl rand -hex 32)
|
||||||
|
log_success "Generated database encryption key"
|
||||||
|
|
||||||
|
echo "JWT_SECRET=$JWT_SECRET" >> "$ENV_FILE"
|
||||||
|
echo "DATABASE_KEY=$DATABASE_KEY" >> "$ENV_FILE"
|
||||||
|
|
||||||
|
log_success "Security keys added to .env file"
|
||||||
|
}
|
||||||
|
|
||||||
|
setup_env_file() {
|
||||||
|
log_info "📝 Setting up environment configuration..."
|
||||||
|
|
||||||
|
if [[ -f "$ENV_FILE" ]]; then
|
||||||
|
log_warn "⚠️ .env file already exists, creating backup..."
|
||||||
|
cp "$ENV_FILE" "$ENV_FILE.backup.$(date +%s)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create or update .env file
|
||||||
|
cat > "$ENV_FILE" << EOF
|
||||||
|
# Termix SSL Configuration - Auto-generated $(date)
|
||||||
|
|
||||||
|
# SSL/TLS Configuration
|
||||||
|
ENABLE_SSL=true
|
||||||
|
SSL_PORT=8443
|
||||||
|
SSL_DOMAIN=localhost
|
||||||
|
PORT=8080
|
||||||
|
|
||||||
|
# Node environment
|
||||||
|
NODE_ENV=production
|
||||||
|
|
||||||
|
# CORS configuration
|
||||||
|
ALLOWED_ORIGINS=*
|
||||||
|
|
||||||
|
# Database encryption
|
||||||
|
DATABASE_ENCRYPTION=true
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Add security keys
|
||||||
|
generate_keys
|
||||||
|
|
||||||
|
log_success "Environment configuration created at $ENV_FILE"
|
||||||
|
}
|
||||||
|
|
||||||
|
setup_ssl_certificates() {
|
||||||
|
log_info "🔐 Setting up SSL certificates..."
|
||||||
|
|
||||||
|
# Run SSL setup script
|
||||||
|
if [[ -f "$SCRIPT_DIR/setup-ssl.sh" ]]; then
|
||||||
|
bash "$SCRIPT_DIR/setup-ssl.sh"
|
||||||
|
else
|
||||||
|
log_error "❌ SSL setup script not found at $SCRIPT_DIR/setup-ssl.sh"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
show_next_steps() {
|
||||||
|
echo ""
|
||||||
|
log_header "🚀 SSL Setup Complete!"
|
||||||
|
echo ""
|
||||||
|
log_success "Your Termix instance is now configured for HTTPS/WSS!"
|
||||||
|
echo ""
|
||||||
|
echo "Next steps:"
|
||||||
|
echo ""
|
||||||
|
echo "1. 🐳 Using Docker:"
|
||||||
|
echo " docker-compose -f docker-compose.ssl.yml up"
|
||||||
|
echo ""
|
||||||
|
echo "2. 📦 Using npm:"
|
||||||
|
echo " npm start"
|
||||||
|
echo ""
|
||||||
|
echo "3. 🌐 Access your application:"
|
||||||
|
echo " • HTTPS: https://localhost:8443"
|
||||||
|
echo " • HTTP: http://localhost:8080 (redirects to HTTPS)"
|
||||||
|
echo ""
|
||||||
|
echo "4. 📱 WebSocket connections will automatically use WSS"
|
||||||
|
echo ""
|
||||||
|
log_warn "⚠️ Browser Warning: Self-signed certificates will show security warnings"
|
||||||
|
echo ""
|
||||||
|
echo "For production deployment:"
|
||||||
|
echo "• Replace self-signed certificates with CA-signed certificates"
|
||||||
|
echo "• Update SSL_DOMAIN in .env to your actual domain"
|
||||||
|
echo "• Set proper ALLOWED_ORIGINS for CORS"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Show generated keys
|
||||||
|
if [[ -f "$ENV_FILE" ]]; then
|
||||||
|
echo "Generated security keys (keep these secure!):"
|
||||||
|
echo "• JWT_SECRET: $(grep JWT_SECRET "$ENV_FILE" | cut -d= -f2)"
|
||||||
|
echo "• DATABASE_KEY: $(grep DATABASE_KEY "$ENV_FILE" | cut -d= -f2)"
|
||||||
|
echo ""
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main execution
|
||||||
|
main() {
|
||||||
|
print_banner
|
||||||
|
|
||||||
|
# Check prerequisites
|
||||||
|
if ! command -v openssl &> /dev/null; then
|
||||||
|
log_error "❌ OpenSSL is not installed. Please install OpenSSL first."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Setup environment
|
||||||
|
setup_env_file
|
||||||
|
|
||||||
|
# Setup SSL certificates
|
||||||
|
setup_ssl_certificates
|
||||||
|
|
||||||
|
# Show completion message
|
||||||
|
show_next_steps
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run main function
|
||||||
|
main "$@"
|
||||||
195
scripts/setup-ssl.sh
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Termix SSL Certificate Auto-Setup Script
|
||||||
|
# Linus principle: Simple, automatic, works everywhere
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
SSL_DIR="$(dirname "$0")/../ssl"
|
||||||
|
CERT_FILE="$SSL_DIR/termix.crt"
|
||||||
|
KEY_FILE="$SSL_DIR/termix.key"
|
||||||
|
DAYS_VALID=365
|
||||||
|
|
||||||
|
# Default domain - can be overridden by environment variable
|
||||||
|
DOMAIN=${SSL_DOMAIN:-"localhost"}
|
||||||
|
ALT_NAMES=${SSL_ALT_NAMES:-"DNS:localhost,DNS:127.0.0.1,DNS:*.localhost,IP:127.0.0.1"}
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
log_info() {
|
||||||
|
echo -e "${BLUE}[SSL Setup]${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_success() {
|
||||||
|
echo -e "${GREEN}[SSL Setup]${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_warn() {
|
||||||
|
echo -e "${YELLOW}[SSL Setup]${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_error() {
|
||||||
|
echo -e "${RED}[SSL Setup]${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if certificate exists and is still valid
|
||||||
|
check_existing_cert() {
|
||||||
|
if [[ -f "$CERT_FILE" && -f "$KEY_FILE" ]]; then
|
||||||
|
# Check if certificate is still valid for at least 30 days
|
||||||
|
if openssl x509 -in "$CERT_FILE" -checkend 2592000 -noout 2>/dev/null; then
|
||||||
|
log_success "✅ Valid SSL certificate already exists"
|
||||||
|
log_info "Certificate: $CERT_FILE"
|
||||||
|
log_info "Private Key: $KEY_FILE"
|
||||||
|
|
||||||
|
# Show certificate info
|
||||||
|
local expiry=$(openssl x509 -in "$CERT_FILE" -noout -enddate 2>/dev/null | cut -d= -f2)
|
||||||
|
log_info "Expires: $expiry"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
log_warn "⚠️ Existing certificate is expired or expiring soon"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Generate self-signed certificate
|
||||||
|
generate_certificate() {
|
||||||
|
log_info "🔐 Generating new SSL certificate for domain: $DOMAIN"
|
||||||
|
|
||||||
|
# Create SSL directory if it doesn't exist
|
||||||
|
mkdir -p "$SSL_DIR"
|
||||||
|
|
||||||
|
# Create OpenSSL config for SAN (Subject Alternative Names)
|
||||||
|
local config_file="$SSL_DIR/openssl.conf"
|
||||||
|
cat > "$config_file" << EOF
|
||||||
|
[req]
|
||||||
|
default_bits = 2048
|
||||||
|
prompt = no
|
||||||
|
default_md = sha256
|
||||||
|
distinguished_name = dn
|
||||||
|
req_extensions = v3_req
|
||||||
|
|
||||||
|
[dn]
|
||||||
|
C=US
|
||||||
|
ST=State
|
||||||
|
L=City
|
||||||
|
O=Termix
|
||||||
|
OU=IT Department
|
||||||
|
CN=$DOMAIN
|
||||||
|
|
||||||
|
[v3_req]
|
||||||
|
basicConstraints = CA:FALSE
|
||||||
|
keyUsage = nonRepudiation, digitalSignature, keyEncipherment
|
||||||
|
subjectAltName = @alt_names
|
||||||
|
|
||||||
|
[alt_names]
|
||||||
|
DNS.1 = localhost
|
||||||
|
DNS.2 = 127.0.0.1
|
||||||
|
DNS.3 = *.localhost
|
||||||
|
IP.1 = 127.0.0.1
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Add custom alt names if provided
|
||||||
|
if [[ -n "$SSL_ALT_NAMES" ]]; then
|
||||||
|
local counter=2
|
||||||
|
IFS=',' read -ra NAMES <<< "$SSL_ALT_NAMES"
|
||||||
|
for name in "${NAMES[@]}"; do
|
||||||
|
name=$(echo "$name" | xargs) # trim whitespace
|
||||||
|
if [[ "$name" == DNS:* ]]; then
|
||||||
|
echo "DNS.$((counter++)) = ${name#DNS:}" >> "$config_file"
|
||||||
|
elif [[ "$name" == IP:* ]]; then
|
||||||
|
echo "IP.$((counter++)) = ${name#IP:}" >> "$config_file"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Generate private key
|
||||||
|
log_info "📝 Generating private key..."
|
||||||
|
openssl genrsa -out "$KEY_FILE" 2048
|
||||||
|
|
||||||
|
# Generate certificate
|
||||||
|
log_info "📄 Generating certificate..."
|
||||||
|
openssl req -new -x509 -key "$KEY_FILE" -out "$CERT_FILE" -days $DAYS_VALID -config "$config_file" -extensions v3_req
|
||||||
|
|
||||||
|
# Set proper permissions
|
||||||
|
chmod 600 "$KEY_FILE"
|
||||||
|
chmod 644 "$CERT_FILE"
|
||||||
|
|
||||||
|
# Clean up temp config
|
||||||
|
rm -f "$config_file"
|
||||||
|
|
||||||
|
log_success "✅ SSL certificate generated successfully"
|
||||||
|
log_info "Certificate: $CERT_FILE"
|
||||||
|
log_info "Private Key: $KEY_FILE"
|
||||||
|
log_info "Valid for: $DAYS_VALID days"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Show certificate information
|
||||||
|
show_certificate_info() {
|
||||||
|
if [[ -f "$CERT_FILE" ]]; then
|
||||||
|
echo ""
|
||||||
|
log_info "📋 Certificate Information:"
|
||||||
|
openssl x509 -in "$CERT_FILE" -noout -subject -issuer -dates
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log_info "🌐 Subject Alternative Names:"
|
||||||
|
openssl x509 -in "$CERT_FILE" -noout -text | grep -A1 "Subject Alternative Name" | tail -1 | sed 's/^[[:space:]]*//'
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main execution
|
||||||
|
main() {
|
||||||
|
echo ""
|
||||||
|
echo "=============================================="
|
||||||
|
echo "🔒 Termix SSL Certificate Auto-Setup"
|
||||||
|
echo "=============================================="
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
log_info "Target domain: $DOMAIN"
|
||||||
|
log_info "SSL directory: $SSL_DIR"
|
||||||
|
|
||||||
|
# Check if OpenSSL is available
|
||||||
|
if ! command -v openssl &> /dev/null; then
|
||||||
|
log_error "❌ OpenSSL is not installed. Please install OpenSSL first."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check existing certificate
|
||||||
|
if check_existing_cert; then
|
||||||
|
show_certificate_info
|
||||||
|
echo ""
|
||||||
|
log_info "🚀 SSL setup complete - ready for HTTPS/WSS!"
|
||||||
|
echo ""
|
||||||
|
echo "To use the certificate:"
|
||||||
|
echo " - Nginx SSL cert: $CERT_FILE"
|
||||||
|
echo " - Nginx SSL key: $KEY_FILE"
|
||||||
|
echo ""
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Generate new certificate
|
||||||
|
generate_certificate
|
||||||
|
show_certificate_info
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log_success "🚀 SSL setup complete - ready for HTTPS/WSS!"
|
||||||
|
echo ""
|
||||||
|
echo "Next steps:"
|
||||||
|
echo " 1. Update your Nginx configuration to use these certificates"
|
||||||
|
echo " 2. Restart Nginx to enable HTTPS/WSS"
|
||||||
|
echo " 3. Access your application via https://localhost"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Security note for self-signed certificates
|
||||||
|
log_warn "⚠️ Note: Self-signed certificates will show browser warnings"
|
||||||
|
log_info "💡 For production, consider using Let's Encrypt or a commercial CA"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run main function
|
||||||
|
main "$@"
|
||||||
@@ -5,6 +5,9 @@ import fs from "fs";
|
|||||||
import path from "path";
|
import path from "path";
|
||||||
import { databaseLogger } from "../../utils/logger.js";
|
import { databaseLogger } from "../../utils/logger.js";
|
||||||
import { DatabaseFileEncryption } from "../../utils/database-file-encryption.js";
|
import { DatabaseFileEncryption } from "../../utils/database-file-encryption.js";
|
||||||
|
import { SystemCrypto } from "../../utils/system-crypto.js";
|
||||||
|
import { DatabaseMigration } from "../../utils/database-migration.js";
|
||||||
|
import { DatabaseSaveTrigger } from "../../utils/database-save-trigger.js";
|
||||||
|
|
||||||
const dataDir = process.env.DATA_DIR || "./db/data";
|
const dataDir = process.env.DATA_DIR || "./db/data";
|
||||||
const dbDir = path.resolve(dataDir);
|
const dbDir = path.resolve(dataDir);
|
||||||
@@ -25,8 +28,29 @@ const encryptedDbPath = `${dbPath}.encrypted`;
|
|||||||
let actualDbPath = ":memory:"; // Always use memory database
|
let actualDbPath = ":memory:"; // Always use memory database
|
||||||
let memoryDatabase: Database.Database;
|
let memoryDatabase: Database.Database;
|
||||||
let isNewDatabase = false;
|
let isNewDatabase = false;
|
||||||
|
let sqlite: Database.Database; // Module-level sqlite instance
|
||||||
|
|
||||||
if (enableFileEncryption) {
|
// Async initialization function to handle SystemCrypto and DatabaseFileEncryption
|
||||||
|
async function initializeDatabaseAsync(): Promise<void> {
|
||||||
|
// Initialize SystemCrypto database key first
|
||||||
|
databaseLogger.info("Initializing SystemCrypto database key...", {
|
||||||
|
operation: "db_init_systemcrypto",
|
||||||
|
envKeyAvailable: !!process.env.DATABASE_KEY,
|
||||||
|
envKeyLength: process.env.DATABASE_KEY?.length || 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
const systemCrypto = SystemCrypto.getInstance();
|
||||||
|
await systemCrypto.initializeDatabaseKey();
|
||||||
|
|
||||||
|
// Verify key is available after initialization
|
||||||
|
const dbKey = await systemCrypto.getDatabaseKey();
|
||||||
|
databaseLogger.info("SystemCrypto database key initialized", {
|
||||||
|
operation: "db_init_systemcrypto_complete",
|
||||||
|
keyLength: dbKey.length,
|
||||||
|
keyAvailable: !!dbKey,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (enableFileEncryption) {
|
||||||
try {
|
try {
|
||||||
// Check if encrypted database exists
|
// Check if encrypted database exists
|
||||||
if (DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath)) {
|
if (DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath)) {
|
||||||
@@ -35,97 +59,146 @@ if (enableFileEncryption) {
|
|||||||
{
|
{
|
||||||
operation: "db_memory_load",
|
operation: "db_memory_load",
|
||||||
encryptedPath: encryptedDbPath,
|
encryptedPath: encryptedDbPath,
|
||||||
|
fileSize: fs.statSync(encryptedDbPath).size,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
// Validate hardware compatibility
|
// Decrypt database content to memory buffer (now async)
|
||||||
if (
|
databaseLogger.info("Starting database decryption...", {
|
||||||
!DatabaseFileEncryption.validateHardwareCompatibility(encryptedDbPath)
|
operation: "db_decrypt_start",
|
||||||
) {
|
encryptedPath: encryptedDbPath,
|
||||||
databaseLogger.error(
|
});
|
||||||
"Hardware fingerprint mismatch for encrypted database",
|
|
||||||
{
|
|
||||||
operation: "db_decrypt_failed",
|
|
||||||
reason: "hardware_mismatch",
|
|
||||||
},
|
|
||||||
);
|
|
||||||
throw new Error(
|
|
||||||
"Cannot decrypt database: hardware fingerprint mismatch",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decrypt database content to memory buffer
|
|
||||||
const decryptedBuffer =
|
const decryptedBuffer =
|
||||||
DatabaseFileEncryption.decryptDatabaseToBuffer(encryptedDbPath);
|
await DatabaseFileEncryption.decryptDatabaseToBuffer(encryptedDbPath);
|
||||||
|
|
||||||
|
databaseLogger.info("Database decryption successful", {
|
||||||
|
operation: "db_decrypt_success",
|
||||||
|
decryptedSize: decryptedBuffer.length,
|
||||||
|
isSqlite: decryptedBuffer.slice(0, 16).toString().startsWith('SQLite format 3'),
|
||||||
|
});
|
||||||
|
|
||||||
// Create in-memory database from decrypted buffer
|
// Create in-memory database from decrypted buffer
|
||||||
memoryDatabase = new Database(decryptedBuffer);
|
memoryDatabase = new Database(decryptedBuffer);
|
||||||
|
|
||||||
|
databaseLogger.info("In-memory database created from decrypted buffer", {
|
||||||
|
operation: "db_memory_create_success",
|
||||||
|
});
|
||||||
} else {
|
} else {
|
||||||
|
// No encrypted database exists - check if we need to migrate
|
||||||
|
const migration = new DatabaseMigration(dataDir);
|
||||||
|
const migrationStatus = migration.checkMigrationStatus();
|
||||||
|
|
||||||
|
databaseLogger.info("Migration status check completed", {
|
||||||
|
operation: "migration_status",
|
||||||
|
needsMigration: migrationStatus.needsMigration,
|
||||||
|
hasUnencryptedDb: migrationStatus.hasUnencryptedDb,
|
||||||
|
hasEncryptedDb: migrationStatus.hasEncryptedDb,
|
||||||
|
unencryptedDbSize: migrationStatus.unencryptedDbSize,
|
||||||
|
reason: migrationStatus.reason,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (migrationStatus.needsMigration) {
|
||||||
|
// Perform automatic migration
|
||||||
|
databaseLogger.info("Starting automatic database migration", {
|
||||||
|
operation: "auto_migration_start",
|
||||||
|
unencryptedDbSize: migrationStatus.unencryptedDbSize,
|
||||||
|
});
|
||||||
|
|
||||||
|
const migrationResult = await migration.migrateDatabase();
|
||||||
|
|
||||||
|
if (migrationResult.success) {
|
||||||
|
databaseLogger.success("Automatic database migration completed successfully", {
|
||||||
|
operation: "auto_migration_success",
|
||||||
|
migratedTables: migrationResult.migratedTables,
|
||||||
|
migratedRows: migrationResult.migratedRows,
|
||||||
|
duration: migrationResult.duration,
|
||||||
|
backupPath: migrationResult.backupPath,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Clean up old backup files
|
||||||
|
migration.cleanupOldBackups();
|
||||||
|
|
||||||
|
// Load the newly created encrypted database
|
||||||
|
if (DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath)) {
|
||||||
|
databaseLogger.info("Loading migrated encrypted database into memory", {
|
||||||
|
operation: "load_migrated_db",
|
||||||
|
encryptedPath: encryptedDbPath,
|
||||||
|
});
|
||||||
|
|
||||||
|
const decryptedBuffer = await DatabaseFileEncryption.decryptDatabaseToBuffer(encryptedDbPath);
|
||||||
|
memoryDatabase = new Database(decryptedBuffer);
|
||||||
|
isNewDatabase = false; // We have migrated data
|
||||||
|
|
||||||
|
databaseLogger.success("Migrated encrypted database loaded successfully", {
|
||||||
|
operation: "load_migrated_db_success",
|
||||||
|
decryptedSize: decryptedBuffer.length,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
throw new Error("Migration completed but encrypted database file not found");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Migration failed - this is critical
|
||||||
|
databaseLogger.error("Automatic database migration failed", null, {
|
||||||
|
operation: "auto_migration_failed",
|
||||||
|
error: migrationResult.error,
|
||||||
|
migratedTables: migrationResult.migratedTables,
|
||||||
|
migratedRows: migrationResult.migratedRows,
|
||||||
|
duration: migrationResult.duration,
|
||||||
|
backupPath: migrationResult.backupPath,
|
||||||
|
});
|
||||||
|
|
||||||
|
// 🔥 CRITICAL: Migration failure with existing data
|
||||||
|
console.error("🚨 DATABASE MIGRATION FAILED - THIS IS CRITICAL!");
|
||||||
|
console.error("Migration error:", migrationResult.error);
|
||||||
|
console.error("Backup available at:", migrationResult.backupPath);
|
||||||
|
console.error("Manual intervention required to recover data.");
|
||||||
|
|
||||||
|
throw new Error(`Database migration failed: ${migrationResult.error}. Backup available at: ${migrationResult.backupPath}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No migration needed - create fresh database
|
||||||
memoryDatabase = new Database(":memory:");
|
memoryDatabase = new Database(":memory:");
|
||||||
isNewDatabase = true;
|
isNewDatabase = true;
|
||||||
|
|
||||||
// Check if there's an old unencrypted database to migrate
|
databaseLogger.info("Creating fresh in-memory database", {
|
||||||
if (fs.existsSync(dbPath)) {
|
operation: "fresh_db_create",
|
||||||
// Load old database and copy its content to memory database
|
reason: migrationStatus.reason,
|
||||||
const oldDb = new Database(dbPath, { readonly: true });
|
});
|
||||||
|
|
||||||
// Get all table schemas and data from old database
|
|
||||||
const tables = oldDb
|
|
||||||
.prepare(
|
|
||||||
`
|
|
||||||
SELECT name, sql FROM sqlite_master
|
|
||||||
WHERE type='table' AND name NOT LIKE 'sqlite_%'
|
|
||||||
`,
|
|
||||||
)
|
|
||||||
.all() as { name: string; sql: string }[];
|
|
||||||
|
|
||||||
// Create tables in memory database
|
|
||||||
for (const table of tables) {
|
|
||||||
memoryDatabase.exec(table.sql);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copy data for each table
|
|
||||||
for (const table of tables) {
|
|
||||||
const rows = oldDb.prepare(`SELECT * FROM ${table.name}`).all();
|
|
||||||
if (rows.length > 0) {
|
|
||||||
const columns = Object.keys(rows[0]);
|
|
||||||
const placeholders = columns.map(() => "?").join(", ");
|
|
||||||
const insertStmt = memoryDatabase.prepare(
|
|
||||||
`INSERT INTO ${table.name} (${columns.join(", ")}) VALUES (${placeholders})`,
|
|
||||||
);
|
|
||||||
|
|
||||||
for (const row of rows) {
|
|
||||||
const values = columns.map((col) => (row as any)[col]);
|
|
||||||
insertStmt.run(values);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
oldDb.close();
|
|
||||||
|
|
||||||
isNewDatabase = false;
|
|
||||||
} else {
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
databaseLogger.error("Failed to initialize memory database", error, {
|
databaseLogger.error("Failed to initialize memory database", error, {
|
||||||
operation: "db_memory_init_failed",
|
operation: "db_memory_init_failed",
|
||||||
|
errorMessage: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
errorStack: error instanceof Error ? error.stack : undefined,
|
||||||
|
encryptedDbExists: DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath),
|
||||||
|
databaseKeyAvailable: !!process.env.DATABASE_KEY,
|
||||||
|
databaseKeyLength: process.env.DATABASE_KEY?.length || 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
// If file encryption is critical, fail fast
|
// 🔥 CRITICAL: Never silently ignore database decryption failures!
|
||||||
if (process.env.DB_FILE_ENCRYPTION_REQUIRED === "true") {
|
// This causes complete data loss for users
|
||||||
throw error;
|
console.error("🚨 DATABASE DECRYPTION FAILED - THIS IS CRITICAL!");
|
||||||
}
|
console.error("Error details:", error instanceof Error ? error.message : error);
|
||||||
|
console.error("Encrypted file exists:", DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath));
|
||||||
|
console.error("DATABASE_KEY available:", !!process.env.DATABASE_KEY);
|
||||||
|
|
||||||
|
// Always fail fast on decryption errors - data integrity is critical
|
||||||
|
throw new Error(`Database decryption failed: ${error instanceof Error ? error.message : "Unknown error"}. This prevents data loss.`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
memoryDatabase = new Database(":memory:");
|
memoryDatabase = new Database(":memory:");
|
||||||
isNewDatabase = true;
|
isNewDatabase = true;
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
memoryDatabase = new Database(":memory:");
|
|
||||||
isNewDatabase = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
databaseLogger.info(`Initializing SQLite database`, {
|
// Main async initialization function that combines database setup with schema creation
|
||||||
|
async function initializeCompleteDatabase(): Promise<void> {
|
||||||
|
// First initialize the database and SystemCrypto
|
||||||
|
await initializeDatabaseAsync();
|
||||||
|
|
||||||
|
databaseLogger.info(`Initializing SQLite database`, {
|
||||||
operation: "db_init",
|
operation: "db_init",
|
||||||
path: actualDbPath,
|
path: actualDbPath,
|
||||||
encrypted:
|
encrypted:
|
||||||
@@ -133,11 +206,20 @@ databaseLogger.info(`Initializing SQLite database`, {
|
|||||||
DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath),
|
DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath),
|
||||||
inMemory: true,
|
inMemory: true,
|
||||||
isNewDatabase,
|
isNewDatabase,
|
||||||
});
|
});
|
||||||
|
|
||||||
const sqlite = memoryDatabase;
|
// Create module-level sqlite instance after database is initialized
|
||||||
|
sqlite = memoryDatabase;
|
||||||
|
|
||||||
sqlite.exec(`
|
// Initialize drizzle ORM with the configured database
|
||||||
|
db = drizzle(sqlite, { schema });
|
||||||
|
|
||||||
|
databaseLogger.info("Database ORM initialized", {
|
||||||
|
operation: "drizzle_init",
|
||||||
|
tablesConfigured: Object.keys(schema).length
|
||||||
|
});
|
||||||
|
|
||||||
|
sqlite.exec(`
|
||||||
CREATE TABLE IF NOT EXISTS users (
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
id TEXT PRIMARY KEY,
|
id TEXT PRIMARY KEY,
|
||||||
username TEXT NOT NULL,
|
username TEXT NOT NULL,
|
||||||
@@ -256,8 +338,36 @@ sqlite.exec(`
|
|||||||
FOREIGN KEY (host_id) REFERENCES ssh_data (id),
|
FOREIGN KEY (host_id) REFERENCES ssh_data (id),
|
||||||
FOREIGN KEY (user_id) REFERENCES users (id)
|
FOREIGN KEY (user_id) REFERENCES users (id)
|
||||||
);
|
);
|
||||||
|
|
||||||
`);
|
`);
|
||||||
|
|
||||||
|
// Run schema migrations
|
||||||
|
migrateSchema();
|
||||||
|
|
||||||
|
// Initialize default settings
|
||||||
|
try {
|
||||||
|
const row = sqlite
|
||||||
|
.prepare("SELECT value FROM settings WHERE key = 'allow_registration'")
|
||||||
|
.get();
|
||||||
|
if (!row) {
|
||||||
|
databaseLogger.info("Initializing default settings", {
|
||||||
|
operation: "db_init",
|
||||||
|
setting: "allow_registration",
|
||||||
|
});
|
||||||
|
sqlite
|
||||||
|
.prepare(
|
||||||
|
"INSERT INTO settings (key, value) VALUES ('allow_registration', 'true')",
|
||||||
|
)
|
||||||
|
.run();
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
databaseLogger.warn("Could not initialize default settings", {
|
||||||
|
operation: "db_init",
|
||||||
|
error: e,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const addColumnIfNotExists = (
|
const addColumnIfNotExists = (
|
||||||
table: string,
|
table: string,
|
||||||
column: string,
|
column: string,
|
||||||
@@ -365,11 +475,11 @@ const migrateSchema = () => {
|
|||||||
"INTEGER REFERENCES ssh_credentials(id)",
|
"INTEGER REFERENCES ssh_credentials(id)",
|
||||||
);
|
);
|
||||||
|
|
||||||
addColumnIfNotExists(
|
// AutoStart plaintext columns
|
||||||
"ssh_data",
|
addColumnIfNotExists("ssh_data", "autostart_password", "TEXT");
|
||||||
"require_password",
|
addColumnIfNotExists("ssh_data", "autostart_key", "TEXT");
|
||||||
"INTEGER NOT NULL DEFAULT 1",
|
addColumnIfNotExists("ssh_data", "autostart_key_password", "TEXT");
|
||||||
);
|
|
||||||
|
|
||||||
// SSH credentials table migrations for encryption support
|
// SSH credentials table migrations for encryption support
|
||||||
addColumnIfNotExists("ssh_credentials", "private_key", "TEXT");
|
addColumnIfNotExists("ssh_credentials", "private_key", "TEXT");
|
||||||
@@ -385,115 +495,70 @@ const migrateSchema = () => {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const initializeDatabase = async (): Promise<void> => {
|
// Function to save in-memory database to file (encrypted or unencrypted fallback)
|
||||||
migrateSchema();
|
|
||||||
|
|
||||||
try {
|
|
||||||
const row = sqlite
|
|
||||||
.prepare("SELECT value FROM settings WHERE key = 'allow_registration'")
|
|
||||||
.get();
|
|
||||||
if (!row) {
|
|
||||||
databaseLogger.info("Initializing default settings", {
|
|
||||||
operation: "db_init",
|
|
||||||
setting: "allow_registration",
|
|
||||||
});
|
|
||||||
sqlite
|
|
||||||
.prepare(
|
|
||||||
"INSERT INTO settings (key, value) VALUES ('allow_registration', 'true')",
|
|
||||||
)
|
|
||||||
.run();
|
|
||||||
} else {
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
databaseLogger.warn("Could not initialize default settings", {
|
|
||||||
operation: "db_init",
|
|
||||||
error: e,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Function to save in-memory database to encrypted file
|
|
||||||
async function saveMemoryDatabaseToFile() {
|
async function saveMemoryDatabaseToFile() {
|
||||||
if (!memoryDatabase || !enableFileEncryption) return;
|
if (!memoryDatabase) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Export in-memory database to buffer
|
// Export in-memory database to buffer
|
||||||
const buffer = memoryDatabase.serialize();
|
const buffer = memoryDatabase.serialize();
|
||||||
|
|
||||||
// Encrypt and save to file
|
// Ensure data directory exists
|
||||||
DatabaseFileEncryption.encryptDatabaseFromBuffer(buffer, encryptedDbPath);
|
if (!fs.existsSync(dataDir)) {
|
||||||
|
fs.mkdirSync(dataDir, { recursive: true });
|
||||||
|
databaseLogger.info("Created data directory", {
|
||||||
|
operation: "data_dir_create",
|
||||||
|
path: dataDir,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (enableFileEncryption) {
|
||||||
|
// Save as encrypted file
|
||||||
|
await DatabaseFileEncryption.encryptDatabaseFromBuffer(buffer, encryptedDbPath);
|
||||||
|
|
||||||
databaseLogger.debug("In-memory database saved to encrypted file", {
|
databaseLogger.debug("In-memory database saved to encrypted file", {
|
||||||
operation: "memory_db_save",
|
operation: "memory_db_save_encrypted",
|
||||||
bufferSize: buffer.length,
|
bufferSize: buffer.length,
|
||||||
encryptedPath: encryptedDbPath,
|
encryptedPath: encryptedDbPath,
|
||||||
});
|
});
|
||||||
|
} else {
|
||||||
|
// Fallback: save as unencrypted SQLite file to prevent data loss
|
||||||
|
fs.writeFileSync(dbPath, buffer);
|
||||||
|
|
||||||
|
databaseLogger.debug("In-memory database saved to unencrypted file", {
|
||||||
|
operation: "memory_db_save_unencrypted",
|
||||||
|
bufferSize: buffer.length,
|
||||||
|
unencryptedPath: dbPath,
|
||||||
|
warning: "File encryption disabled - data saved unencrypted",
|
||||||
|
});
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
databaseLogger.error("Failed to save in-memory database", error, {
|
databaseLogger.error("Failed to save in-memory database", error, {
|
||||||
operation: "memory_db_save_failed",
|
operation: "memory_db_save_failed",
|
||||||
|
enableFileEncryption,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Function to handle post-initialization file encryption and cleanup
|
// Function to handle post-initialization file encryption and periodic saves
|
||||||
async function handlePostInitFileEncryption() {
|
async function handlePostInitFileEncryption() {
|
||||||
if (!enableFileEncryption) return;
|
if (!enableFileEncryption) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Clean up any existing unencrypted database files
|
// Check for any remaining unencrypted database files that may need attention
|
||||||
if (fs.existsSync(dbPath)) {
|
if (fs.existsSync(dbPath)) {
|
||||||
|
// This could happen if migration was skipped or if there are multiple database files
|
||||||
databaseLogger.warn(
|
databaseLogger.warn(
|
||||||
"Found unencrypted database file, removing for security",
|
"Unencrypted database file still exists after initialization",
|
||||||
{
|
{
|
||||||
operation: "db_security_cleanup_existing",
|
operation: "db_security_check",
|
||||||
removingPath: dbPath,
|
path: dbPath,
|
||||||
|
note: "This may be normal if migration was skipped for safety reasons",
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
try {
|
// Don't automatically delete - let migration logic handle this
|
||||||
fs.unlinkSync(dbPath);
|
// This provides better safety and transparency
|
||||||
databaseLogger.success(
|
|
||||||
"Unencrypted database file removed for security",
|
|
||||||
{
|
|
||||||
operation: "db_security_cleanup_complete",
|
|
||||||
removedPath: dbPath,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.warn(
|
|
||||||
"Could not remove unencrypted database file (may be locked)",
|
|
||||||
{
|
|
||||||
operation: "db_security_cleanup_deferred",
|
|
||||||
path: dbPath,
|
|
||||||
error: error instanceof Error ? error.message : "Unknown error",
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
// Try again after a short delay
|
|
||||||
setTimeout(() => {
|
|
||||||
try {
|
|
||||||
if (fs.existsSync(dbPath)) {
|
|
||||||
fs.unlinkSync(dbPath);
|
|
||||||
databaseLogger.success(
|
|
||||||
"Delayed cleanup: unencrypted database file removed",
|
|
||||||
{
|
|
||||||
operation: "db_security_cleanup_delayed_success",
|
|
||||||
removedPath: dbPath,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} catch (delayedError) {
|
|
||||||
databaseLogger.error(
|
|
||||||
"Failed to remove unencrypted database file even after delay",
|
|
||||||
delayedError,
|
|
||||||
{
|
|
||||||
operation: "db_security_cleanup_delayed_failed",
|
|
||||||
path: dbPath,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}, 2000);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Always save the in-memory database (whether new or existing)
|
// Always save the in-memory database (whether new or existing)
|
||||||
@@ -501,15 +566,35 @@ async function handlePostInitFileEncryption() {
|
|||||||
// Save immediately after initialization
|
// Save immediately after initialization
|
||||||
await saveMemoryDatabaseToFile();
|
await saveMemoryDatabaseToFile();
|
||||||
|
|
||||||
// Set up periodic saves every 5 minutes
|
databaseLogger.info("Setting up periodic database saves", {
|
||||||
setInterval(saveMemoryDatabaseToFile, 5 * 60 * 1000);
|
operation: "db_periodic_save_setup",
|
||||||
|
interval: "15 seconds",
|
||||||
|
});
|
||||||
|
|
||||||
|
// Set up periodic saves every 15 seconds for real-time persistence
|
||||||
|
setInterval(saveMemoryDatabaseToFile, 15 * 1000);
|
||||||
|
|
||||||
|
// Initialize database save trigger for real-time saves
|
||||||
|
DatabaseSaveTrigger.initialize(saveMemoryDatabaseToFile);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Perform migration cleanup on startup (remove old backup files)
|
||||||
|
try {
|
||||||
|
const migration = new DatabaseMigration(dataDir);
|
||||||
|
migration.cleanupOldBackups();
|
||||||
|
} catch (cleanupError) {
|
||||||
|
databaseLogger.warn("Failed to cleanup old migration files", {
|
||||||
|
operation: "migration_cleanup_startup_failed",
|
||||||
|
error: cleanupError instanceof Error ? cleanupError.message : "Unknown error",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
databaseLogger.error(
|
databaseLogger.error(
|
||||||
"Failed to handle database file encryption/cleanup",
|
"Failed to handle database file encryption setup",
|
||||||
error,
|
error,
|
||||||
{
|
{
|
||||||
operation: "db_encrypt_cleanup_failed",
|
operation: "db_encrypt_setup_failed",
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -517,8 +602,19 @@ async function handlePostInitFileEncryption() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
initializeDatabase()
|
// Export a promise that resolves when database is fully initialized
|
||||||
.then(() => handlePostInitFileEncryption())
|
export const databaseReady = initializeCompleteDatabase()
|
||||||
|
.then(async () => {
|
||||||
|
await handlePostInitFileEncryption();
|
||||||
|
|
||||||
|
databaseLogger.success("Database connection established", {
|
||||||
|
operation: "db_init",
|
||||||
|
path: actualDbPath,
|
||||||
|
hasEncryptedBackup:
|
||||||
|
enableFileEncryption &&
|
||||||
|
DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath),
|
||||||
|
});
|
||||||
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
databaseLogger.error("Failed to initialize database", error, {
|
databaseLogger.error("Failed to initialize database", error, {
|
||||||
operation: "db_init",
|
operation: "db_init",
|
||||||
@@ -526,14 +622,6 @@ initializeDatabase()
|
|||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
databaseLogger.success("Database connection established", {
|
|
||||||
operation: "db_init",
|
|
||||||
path: actualDbPath,
|
|
||||||
hasEncryptedBackup:
|
|
||||||
enableFileEncryption &&
|
|
||||||
DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Cleanup function for database and temporary files
|
// Cleanup function for database and temporary files
|
||||||
async function cleanupDatabase() {
|
async function cleanupDatabase() {
|
||||||
// Save in-memory database before closing
|
// Save in-memory database before closing
|
||||||
@@ -619,9 +707,27 @@ process.on("SIGTERM", async () => {
|
|||||||
process.exit(0);
|
process.exit(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Export database connection and file encryption utilities
|
// Database connection - will be initialized after database setup
|
||||||
export const db = drizzle(sqlite, { schema });
|
let db: ReturnType<typeof drizzle<typeof schema>>;
|
||||||
export const sqliteInstance = sqlite; // Export underlying SQLite instance for schema queries
|
|
||||||
|
// Export database connection getter function to avoid undefined access
|
||||||
|
export function getDb(): ReturnType<typeof drizzle<typeof schema>> {
|
||||||
|
if (!db) {
|
||||||
|
throw new Error("Database not initialized. Ensure databaseReady promise is awaited before accessing db.");
|
||||||
|
}
|
||||||
|
return db;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export raw SQLite instance for migrations
|
||||||
|
export function getSqlite(): Database.Database {
|
||||||
|
if (!sqlite) {
|
||||||
|
throw new Error("SQLite not initialized. Ensure databaseReady promise is awaited before accessing sqlite.");
|
||||||
|
}
|
||||||
|
return sqlite;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Legacy export for compatibility - will throw if accessed before initialization
|
||||||
|
export { db };
|
||||||
export { DatabaseFileEncryption };
|
export { DatabaseFileEncryption };
|
||||||
export const databasePaths = {
|
export const databasePaths = {
|
||||||
main: actualDbPath,
|
main: actualDbPath,
|
||||||
@@ -660,3 +766,6 @@ function getMemoryDatabaseBuffer(): Buffer {
|
|||||||
|
|
||||||
// Export save function for manual saves and buffer access
|
// Export save function for manual saves and buffer access
|
||||||
export { saveMemoryDatabaseToFile, getMemoryDatabaseBuffer };
|
export { saveMemoryDatabaseToFile, getMemoryDatabaseBuffer };
|
||||||
|
|
||||||
|
// Export database save trigger for real-time saves
|
||||||
|
export { DatabaseSaveTrigger };
|
||||||
|
|||||||
600
src/backend/database/db/old-index.ts.bak
Normal file
@@ -0,0 +1,600 @@
|
|||||||
|
import { drizzle } from "drizzle-orm/better-sqlite3";
|
||||||
|
This backup file appears to have been accidentally committed to the repository. It should be removed to keep the codebase clean and avoid potential confusion for future developers. 
This backup file appears to have been accidentally committed to the repository. It should be removed to keep the codebase clean and avoid potential confusion for future developers.
|
|||||||
|
import Database from "better-sqlite3";
|
||||||
|
import * as schema from "./schema.js";
|
||||||
|
import { databaseLogger } from "../../utils/logger.js";
|
||||||
|
import { UserDatabaseManager } from "../../utils/user-database-manager.js";
|
||||||
|
|
||||||
|
// Global database manager instance
|
||||||
|
const databaseManager = UserDatabaseManager.getInstance();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize database system - simplified for user-based architecture
|
||||||
|
*/
|
||||||
|
async function initializeDatabase(): Promise<void> {
|
||||||
|
try {
|
||||||
|
databaseLogger.info("Initializing database system (user-based architecture)", {
|
||||||
|
operation: "db_init_v3",
|
||||||
|
});
|
||||||
|
|
||||||
|
// Initialize system database (unencrypted)
|
||||||
|
await databaseManager.initializeSystem();
|
||||||
|
|
||||||
|
databaseLogger.success("Database system initialized successfully", {
|
||||||
|
operation: "db_init_v3_success",
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("Failed to initialize database system", error, {
|
||||||
|
operation: "db_init_v3_failed",
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export a promise that resolves when database is fully initialized
|
||||||
|
export const databaseReady = initializeDatabase()
|
||||||
|
.then(() => {
|
||||||
|
databaseLogger.success("Database system ready", {
|
||||||
|
operation: "db_ready",
|
||||||
|
architecture: "v3-user-based",
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
databaseLogger.error("Failed to initialize database system", error, {
|
||||||
|
operation: "db_ready_failed",
|
||||||
|
});
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
databaseLogger.info(`Initializing SQLite database`, {
|
||||||
|
operation: "db_init",
|
||||||
|
path: actualDbPath,
|
||||||
|
encrypted:
|
||||||
|
enableFileEncryption &&
|
||||||
|
DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath),
|
||||||
|
inMemory: true,
|
||||||
|
isNewDatabase,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create module-level sqlite instance after database is initialized
|
||||||
|
sqlite = memoryDatabase;
|
||||||
|
|
||||||
|
// Initialize drizzle ORM with the configured database
|
||||||
|
db = drizzle(sqlite, { schema });
|
||||||
|
|
||||||
|
databaseLogger.info("Database ORM initialized", {
|
||||||
|
operation: "drizzle_init",
|
||||||
|
tablesConfigured: Object.keys(schema).length
|
||||||
|
});
|
||||||
|
|
||||||
|
sqlite.exec(`
|
||||||
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
password_hash TEXT NOT NULL,
|
||||||
|
is_admin INTEGER NOT NULL DEFAULT 0,
|
||||||
|
is_oidc INTEGER NOT NULL DEFAULT 0,
|
||||||
|
client_id TEXT NOT NULL,
|
||||||
|
client_secret TEXT NOT NULL,
|
||||||
|
issuer_url TEXT NOT NULL,
|
||||||
|
authorization_url TEXT NOT NULL,
|
||||||
|
token_url TEXT NOT NULL,
|
||||||
|
redirect_uri TEXT,
|
||||||
|
identifier_path TEXT NOT NULL,
|
||||||
|
name_path TEXT NOT NULL,
|
||||||
|
scopes TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS settings (
|
||||||
|
key TEXT PRIMARY KEY,
|
||||||
|
value TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS ssh_data (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
name TEXT,
|
||||||
|
ip TEXT NOT NULL,
|
||||||
|
port INTEGER NOT NULL,
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
folder TEXT,
|
||||||
|
tags TEXT,
|
||||||
|
pin INTEGER NOT NULL DEFAULT 0,
|
||||||
|
auth_type TEXT NOT NULL,
|
||||||
|
password TEXT,
|
||||||
|
key TEXT,
|
||||||
|
key_password TEXT,
|
||||||
|
key_type TEXT,
|
||||||
|
enable_terminal INTEGER NOT NULL DEFAULT 1,
|
||||||
|
enable_tunnel INTEGER NOT NULL DEFAULT 1,
|
||||||
|
tunnel_connections TEXT,
|
||||||
|
enable_file_manager INTEGER NOT NULL DEFAULT 1,
|
||||||
|
default_path TEXT,
|
||||||
|
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS file_manager_recent (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
host_id INTEGER NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
path TEXT NOT NULL,
|
||||||
|
last_opened TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users (id),
|
||||||
|
FOREIGN KEY (host_id) REFERENCES ssh_data (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS file_manager_pinned (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
host_id INTEGER NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
path TEXT NOT NULL,
|
||||||
|
pinned_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users (id),
|
||||||
|
FOREIGN KEY (host_id) REFERENCES ssh_data (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS file_manager_shortcuts (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
host_id INTEGER NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
path TEXT NOT NULL,
|
||||||
|
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users (id),
|
||||||
|
FOREIGN KEY (host_id) REFERENCES ssh_data (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS dismissed_alerts (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
alert_id TEXT NOT NULL,
|
||||||
|
dismissed_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS ssh_credentials (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
folder TEXT,
|
||||||
|
tags TEXT,
|
||||||
|
auth_type TEXT NOT NULL,
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
password TEXT,
|
||||||
|
key TEXT,
|
||||||
|
key_password TEXT,
|
||||||
|
key_type TEXT,
|
||||||
|
usage_count INTEGER NOT NULL DEFAULT 0,
|
||||||
|
last_used TEXT,
|
||||||
|
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS ssh_credential_usage (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
credential_id INTEGER NOT NULL,
|
||||||
|
host_id INTEGER NOT NULL,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
used_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (credential_id) REFERENCES ssh_credentials (id),
|
||||||
|
FOREIGN KEY (host_id) REFERENCES ssh_data (id),
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users (id)
|
||||||
|
);
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Run schema migrations
|
||||||
|
migrateSchema();
|
||||||
|
|
||||||
|
// Initialize default settings
|
||||||
|
try {
|
||||||
|
const row = sqlite
|
||||||
|
.prepare("SELECT value FROM settings WHERE key = 'allow_registration'")
|
||||||
|
.get();
|
||||||
|
if (!row) {
|
||||||
|
databaseLogger.info("Initializing default settings", {
|
||||||
|
operation: "db_init",
|
||||||
|
setting: "allow_registration",
|
||||||
|
});
|
||||||
|
sqlite
|
||||||
|
.prepare(
|
||||||
|
"INSERT INTO settings (key, value) VALUES ('allow_registration', 'true')",
|
||||||
|
)
|
||||||
|
.run();
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
databaseLogger.warn("Could not initialize default settings", {
|
||||||
|
operation: "db_init",
|
||||||
|
error: e,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const addColumnIfNotExists = (
|
||||||
|
table: string,
|
||||||
|
column: string,
|
||||||
|
definition: string,
|
||||||
|
) => {
|
||||||
|
try {
|
||||||
|
sqlite
|
||||||
|
.prepare(
|
||||||
|
`SELECT ${column}
|
||||||
|
FROM ${table} LIMIT 1`,
|
||||||
|
)
|
||||||
|
.get();
|
||||||
|
} catch (e) {
|
||||||
|
try {
|
||||||
|
databaseLogger.debug(`Adding column ${column} to ${table}`, {
|
||||||
|
operation: "schema_migration",
|
||||||
|
table,
|
||||||
|
column,
|
||||||
|
});
|
||||||
|
sqlite.exec(`ALTER TABLE ${table}
|
||||||
|
ADD COLUMN ${column} ${definition};`);
|
||||||
|
databaseLogger.success(`Column ${column} added to ${table}`, {
|
||||||
|
operation: "schema_migration",
|
||||||
|
table,
|
||||||
|
column,
|
||||||
|
});
|
||||||
|
} catch (alterError) {
|
||||||
|
databaseLogger.warn(`Failed to add column ${column} to ${table}`, {
|
||||||
|
operation: "schema_migration",
|
||||||
|
table,
|
||||||
|
column,
|
||||||
|
error: alterError,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const migrateSchema = () => {
|
||||||
|
databaseLogger.info("Checking for schema updates...", {
|
||||||
|
operation: "schema_migration",
|
||||||
|
});
|
||||||
|
|
||||||
|
addColumnIfNotExists("users", "is_admin", "INTEGER NOT NULL DEFAULT 0");
|
||||||
|
|
||||||
|
addColumnIfNotExists("users", "is_oidc", "INTEGER NOT NULL DEFAULT 0");
|
||||||
|
addColumnIfNotExists("users", "oidc_identifier", "TEXT");
|
||||||
|
addColumnIfNotExists("users", "client_id", "TEXT");
|
||||||
|
addColumnIfNotExists("users", "client_secret", "TEXT");
|
||||||
|
addColumnIfNotExists("users", "issuer_url", "TEXT");
|
||||||
|
addColumnIfNotExists("users", "authorization_url", "TEXT");
|
||||||
|
addColumnIfNotExists("users", "token_url", "TEXT");
|
||||||
|
|
||||||
|
addColumnIfNotExists("users", "identifier_path", "TEXT");
|
||||||
|
addColumnIfNotExists("users", "name_path", "TEXT");
|
||||||
|
addColumnIfNotExists("users", "scopes", "TEXT");
|
||||||
|
|
||||||
|
addColumnIfNotExists("users", "totp_secret", "TEXT");
|
||||||
|
addColumnIfNotExists("users", "totp_enabled", "INTEGER NOT NULL DEFAULT 0");
|
||||||
|
addColumnIfNotExists("users", "totp_backup_codes", "TEXT");
|
||||||
|
|
||||||
|
addColumnIfNotExists("ssh_data", "name", "TEXT");
|
||||||
|
addColumnIfNotExists("ssh_data", "folder", "TEXT");
|
||||||
|
addColumnIfNotExists("ssh_data", "tags", "TEXT");
|
||||||
|
addColumnIfNotExists("ssh_data", "pin", "INTEGER NOT NULL DEFAULT 0");
|
||||||
|
addColumnIfNotExists(
|
||||||
|
"ssh_data",
|
||||||
|
"auth_type",
|
||||||
|
'TEXT NOT NULL DEFAULT "password"',
|
||||||
|
);
|
||||||
|
addColumnIfNotExists("ssh_data", "password", "TEXT");
|
||||||
|
addColumnIfNotExists("ssh_data", "key", "TEXT");
|
||||||
|
addColumnIfNotExists("ssh_data", "key_password", "TEXT");
|
||||||
|
addColumnIfNotExists("ssh_data", "key_type", "TEXT");
|
||||||
|
addColumnIfNotExists(
|
||||||
|
"ssh_data",
|
||||||
|
"enable_terminal",
|
||||||
|
"INTEGER NOT NULL DEFAULT 1",
|
||||||
|
);
|
||||||
|
addColumnIfNotExists(
|
||||||
|
"ssh_data",
|
||||||
|
"enable_tunnel",
|
||||||
|
"INTEGER NOT NULL DEFAULT 1",
|
||||||
|
);
|
||||||
|
addColumnIfNotExists("ssh_data", "tunnel_connections", "TEXT");
|
||||||
|
addColumnIfNotExists(
|
||||||
|
"ssh_data",
|
||||||
|
"enable_file_manager",
|
||||||
|
"INTEGER NOT NULL DEFAULT 1",
|
||||||
|
);
|
||||||
|
addColumnIfNotExists("ssh_data", "default_path", "TEXT");
|
||||||
|
addColumnIfNotExists(
|
||||||
|
"ssh_data",
|
||||||
|
"created_at",
|
||||||
|
"TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
||||||
|
);
|
||||||
|
addColumnIfNotExists(
|
||||||
|
"ssh_data",
|
||||||
|
"updated_at",
|
||||||
|
"TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
||||||
|
);
|
||||||
|
|
||||||
|
addColumnIfNotExists(
|
||||||
|
"ssh_data",
|
||||||
|
"credential_id",
|
||||||
|
"INTEGER REFERENCES ssh_credentials(id)",
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
// SSH credentials table migrations for encryption support
|
||||||
|
addColumnIfNotExists("ssh_credentials", "private_key", "TEXT");
|
||||||
|
addColumnIfNotExists("ssh_credentials", "public_key", "TEXT");
|
||||||
|
addColumnIfNotExists("ssh_credentials", "detected_key_type", "TEXT");
|
||||||
|
|
||||||
|
addColumnIfNotExists("file_manager_recent", "host_id", "INTEGER NOT NULL");
|
||||||
|
addColumnIfNotExists("file_manager_pinned", "host_id", "INTEGER NOT NULL");
|
||||||
|
addColumnIfNotExists("file_manager_shortcuts", "host_id", "INTEGER NOT NULL");
|
||||||
|
|
||||||
|
databaseLogger.success("Schema migration completed", {
|
||||||
|
operation: "schema_migration",
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
// Function to save in-memory database to encrypted file
|
||||||
|
async function saveMemoryDatabaseToFile() {
|
||||||
|
if (!memoryDatabase || !enableFileEncryption) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Export in-memory database to buffer
|
||||||
|
const buffer = memoryDatabase.serialize();
|
||||||
|
|
||||||
|
// Encrypt and save to file (now async)
|
||||||
|
await DatabaseFileEncryption.encryptDatabaseFromBuffer(buffer, encryptedDbPath);
|
||||||
|
|
||||||
|
databaseLogger.debug("In-memory database saved to encrypted file", {
|
||||||
|
operation: "memory_db_save",
|
||||||
|
bufferSize: buffer.length,
|
||||||
|
encryptedPath: encryptedDbPath,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("Failed to save in-memory database", error, {
|
||||||
|
operation: "memory_db_save_failed",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to handle post-initialization file encryption and cleanup
|
||||||
|
async function handlePostInitFileEncryption() {
|
||||||
|
if (!enableFileEncryption) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Clean up any existing unencrypted database files
|
||||||
|
if (fs.existsSync(dbPath)) {
|
||||||
|
databaseLogger.warn(
|
||||||
|
"Found unencrypted database file, removing for security",
|
||||||
|
{
|
||||||
|
operation: "db_security_cleanup_existing",
|
||||||
|
removingPath: dbPath,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
fs.unlinkSync(dbPath);
|
||||||
|
databaseLogger.success(
|
||||||
|
"Unencrypted database file removed for security",
|
||||||
|
{
|
||||||
|
operation: "db_security_cleanup_complete",
|
||||||
|
removedPath: dbPath,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.warn(
|
||||||
|
"Could not remove unencrypted database file (may be locked)",
|
||||||
|
{
|
||||||
|
operation: "db_security_cleanup_deferred",
|
||||||
|
path: dbPath,
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Try again after a short delay
|
||||||
|
setTimeout(() => {
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(dbPath)) {
|
||||||
|
fs.unlinkSync(dbPath);
|
||||||
|
databaseLogger.success(
|
||||||
|
"Delayed cleanup: unencrypted database file removed",
|
||||||
|
{
|
||||||
|
operation: "db_security_cleanup_delayed_success",
|
||||||
|
removedPath: dbPath,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (delayedError) {
|
||||||
|
databaseLogger.error(
|
||||||
|
"Failed to remove unencrypted database file even after delay",
|
||||||
|
delayedError,
|
||||||
|
{
|
||||||
|
operation: "db_security_cleanup_delayed_failed",
|
||||||
|
path: dbPath,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}, 2000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always save the in-memory database (whether new or existing)
|
||||||
|
if (memoryDatabase) {
|
||||||
|
// Save immediately after initialization
|
||||||
|
await saveMemoryDatabaseToFile();
|
||||||
|
|
||||||
|
// Set up periodic saves every 5 minutes
|
||||||
|
setInterval(saveMemoryDatabaseToFile, 5 * 60 * 1000);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error(
|
||||||
|
"Failed to handle database file encryption/cleanup",
|
||||||
|
error,
|
||||||
|
{
|
||||||
|
operation: "db_encrypt_cleanup_failed",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Don't fail the entire initialization for this
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export a promise that resolves when database is fully initialized
|
||||||
|
export const databaseReady = initializeCompleteDatabase()
|
||||||
|
.then(async () => {
|
||||||
|
await handlePostInitFileEncryption();
|
||||||
|
|
||||||
|
databaseLogger.success("Database connection established", {
|
||||||
|
operation: "db_init",
|
||||||
|
path: actualDbPath,
|
||||||
|
hasEncryptedBackup:
|
||||||
|
enableFileEncryption &&
|
||||||
|
DatabaseFileEncryption.isEncryptedDatabaseFile(encryptedDbPath),
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
databaseLogger.error("Failed to initialize database", error, {
|
||||||
|
operation: "db_init",
|
||||||
|
});
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Cleanup function for database and temporary files
|
||||||
|
async function cleanupDatabase() {
|
||||||
|
// Save in-memory database before closing
|
||||||
|
if (memoryDatabase) {
|
||||||
|
try {
|
||||||
|
await saveMemoryDatabaseToFile();
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error(
|
||||||
|
"Failed to save in-memory database before shutdown",
|
||||||
|
error,
|
||||||
|
{
|
||||||
|
operation: "shutdown_save_failed",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close database connection
|
||||||
|
try {
|
||||||
|
if (sqlite) {
|
||||||
|
sqlite.close();
|
||||||
|
databaseLogger.debug("Database connection closed", {
|
||||||
|
operation: "db_close",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.warn("Error closing database connection", {
|
||||||
|
operation: "db_close_error",
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean up temp directory
|
||||||
|
try {
|
||||||
|
const tempDir = path.join(dataDir, ".temp");
|
||||||
|
if (fs.existsSync(tempDir)) {
|
||||||
|
const files = fs.readdirSync(tempDir);
|
||||||
|
for (const file of files) {
|
||||||
|
try {
|
||||||
|
fs.unlinkSync(path.join(tempDir, file));
|
||||||
|
} catch {
|
||||||
|
// Ignore individual file cleanup errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
fs.rmdirSync(tempDir);
|
||||||
|
databaseLogger.debug("Temp directory cleaned up", {
|
||||||
|
operation: "temp_dir_cleanup",
|
||||||
|
});
|
||||||
|
} catch {
|
||||||
|
// Ignore directory removal errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Ignore temp directory cleanup errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Register cleanup handlers
|
||||||
|
process.on("exit", () => {
|
||||||
|
// Synchronous cleanup only for exit event
|
||||||
|
if (sqlite) {
|
||||||
|
try {
|
||||||
|
sqlite.close();
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on("SIGINT", async () => {
|
||||||
|
databaseLogger.info("Received SIGINT, cleaning up...", {
|
||||||
|
operation: "shutdown",
|
||||||
|
});
|
||||||
|
await cleanupDatabase();
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on("SIGTERM", async () => {
|
||||||
|
databaseLogger.info("Received SIGTERM, cleaning up...", {
|
||||||
|
operation: "shutdown",
|
||||||
|
});
|
||||||
|
await cleanupDatabase();
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Database connection - will be initialized after database setup
|
||||||
|
let db: ReturnType<typeof drizzle<typeof schema>>;
|
||||||
|
|
||||||
|
// Export database connection getter function to avoid undefined access
|
||||||
|
export function getDb(): ReturnType<typeof drizzle<typeof schema>> {
|
||||||
|
if (!db) {
|
||||||
|
throw new Error("Database not initialized. Ensure databaseReady promise is awaited before accessing db.");
|
||||||
|
}
|
||||||
|
return db;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Legacy export for compatibility - will throw if accessed before initialization
|
||||||
|
export { db };
|
||||||
|
export { DatabaseFileEncryption };
|
||||||
|
export const databasePaths = {
|
||||||
|
main: actualDbPath,
|
||||||
|
encrypted: encryptedDbPath,
|
||||||
|
directory: dbDir,
|
||||||
|
inMemory: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Memory database buffer function
|
||||||
|
function getMemoryDatabaseBuffer(): Buffer {
|
||||||
|
if (!memoryDatabase) {
|
||||||
|
throw new Error("Memory database not initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Export in-memory database to buffer
|
||||||
|
const buffer = memoryDatabase.serialize();
|
||||||
|
|
||||||
|
databaseLogger.debug("Memory database serialized to buffer", {
|
||||||
|
operation: "memory_db_serialize",
|
||||||
|
bufferSize: buffer.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
return buffer;
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error(
|
||||||
|
"Failed to serialize memory database to buffer",
|
||||||
|
error,
|
||||||
|
{
|
||||||
|
operation: "memory_db_serialize_failed",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export save function for manual saves and buffer access
|
||||||
|
export { saveMemoryDatabaseToFile, getMemoryDatabaseBuffer };
|
||||||
@@ -45,13 +45,15 @@ export const sshData = sqliteTable("ssh_data", {
|
|||||||
authType: text("auth_type").notNull(),
|
authType: text("auth_type").notNull(),
|
||||||
|
|
||||||
password: text("password"),
|
password: text("password"),
|
||||||
requirePassword: integer("require_password", { mode: "boolean" })
|
|
||||||
.notNull()
|
|
||||||
.default(true),
|
|
||||||
key: text("key", { length: 8192 }),
|
key: text("key", { length: 8192 }),
|
||||||
keyPassword: text("key_password"),
|
keyPassword: text("key_password"),
|
||||||
keyType: text("key_type"),
|
keyType: text("key_type"),
|
||||||
|
|
||||||
|
// AutoStart plaintext fields (populated only when autoStart is enabled)
|
||||||
|
autostartPassword: text("autostart_password"),
|
||||||
|
autostartKey: text("autostart_key", { length: 8192 }),
|
||||||
|
autostartKeyPassword: text("autostart_key_password"),
|
||||||
|
|
||||||
credentialId: integer("credential_id").references(() => sshCredentials.id),
|
credentialId: integer("credential_id").references(() => sshCredentials.id),
|
||||||
enableTerminal: integer("enable_terminal", { mode: "boolean" })
|
enableTerminal: integer("enable_terminal", { mode: "boolean" })
|
||||||
.notNull()
|
.notNull()
|
||||||
@@ -171,3 +173,4 @@ export const sshCredentialUsage = sqliteTable("ssh_credential_usage", {
|
|||||||
.notNull()
|
.notNull()
|
||||||
.default(sql`CURRENT_TIMESTAMP`),
|
.default(sql`CURRENT_TIMESTAMP`),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { dismissedAlerts } from "../db/schema.js";
|
|||||||
import { eq, and } from "drizzle-orm";
|
import { eq, and } from "drizzle-orm";
|
||||||
import fetch from "node-fetch";
|
import fetch from "node-fetch";
|
||||||
import { authLogger } from "../../utils/logger.js";
|
import { authLogger } from "../../utils/logger.js";
|
||||||
|
import { AuthManager } from "../../utils/auth-manager.js";
|
||||||
|
|
||||||
interface CacheEntry {
|
interface CacheEntry {
|
||||||
data: any;
|
data: any;
|
||||||
@@ -107,31 +108,15 @@ async function fetchAlertsFromGitHub(): Promise<TermixAlert[]> {
|
|||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// Route: Get all active alerts
|
// Initialize auth middleware
|
||||||
|
const authManager = AuthManager.getInstance();
|
||||||
|
const authenticateJWT = authManager.createAuthMiddleware();
|
||||||
|
|
||||||
|
// Route: Get alerts for the authenticated user (excluding dismissed ones)
|
||||||
// GET /alerts
|
// GET /alerts
|
||||||
router.get("/", async (req, res) => {
|
router.get("/", authenticateJWT, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const alerts = await fetchAlertsFromGitHub();
|
const userId = (req as any).userId;
|
||||||
res.json({
|
|
||||||
alerts,
|
|
||||||
cached: alertCache.get("termix_alerts") !== null,
|
|
||||||
total_count: alerts.length,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
authLogger.error("Failed to get alerts", error);
|
|
||||||
res.status(500).json({ error: "Failed to fetch alerts" });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Route: Get alerts for a specific user (excluding dismissed ones)
|
|
||||||
// GET /alerts/user/:userId
|
|
||||||
router.get("/user/:userId", async (req, res) => {
|
|
||||||
try {
|
|
||||||
const { userId } = req.params;
|
|
||||||
|
|
||||||
if (!userId) {
|
|
||||||
return res.status(400).json({ error: "User ID is required" });
|
|
||||||
}
|
|
||||||
|
|
||||||
const allAlerts = await fetchAlertsFromGitHub();
|
const allAlerts = await fetchAlertsFromGitHub();
|
||||||
|
|
||||||
@@ -144,32 +129,33 @@ router.get("/user/:userId", async (req, res) => {
|
|||||||
dismissedAlertRecords.map((record) => record.alertId),
|
dismissedAlertRecords.map((record) => record.alertId),
|
||||||
);
|
);
|
||||||
|
|
||||||
const userAlerts = allAlerts.filter(
|
const activeAlertsForUser = allAlerts.filter(
|
||||||
(alert) => !dismissedAlertIds.has(alert.id),
|
(alert) => !dismissedAlertIds.has(alert.id),
|
||||||
);
|
);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
alerts: userAlerts,
|
alerts: activeAlertsForUser,
|
||||||
total_count: userAlerts.length,
|
cached: alertCache.get("termix_alerts") !== null,
|
||||||
dismissed_count: dismissedAlertIds.size,
|
total_count: activeAlertsForUser.length,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
authLogger.error("Failed to get user alerts", error);
|
authLogger.error("Failed to get user alerts", error);
|
||||||
res.status(500).json({ error: "Failed to fetch user alerts" });
|
res.status(500).json({ error: "Failed to fetch alerts" });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Route: Dismiss an alert for a user
|
// Deprecated endpoint - use GET /alerts instead
|
||||||
// POST /alerts/dismiss
|
|
||||||
router.post("/dismiss", async (req, res) => {
|
|
||||||
try {
|
|
||||||
const { userId, alertId } = req.body;
|
|
||||||
|
|
||||||
if (!userId || !alertId) {
|
// Route: Dismiss an alert for the authenticated user
|
||||||
authLogger.warn("Missing userId or alertId in dismiss request");
|
// POST /alerts/dismiss
|
||||||
return res
|
router.post("/dismiss", authenticateJWT, async (req, res) => {
|
||||||
.status(400)
|
try {
|
||||||
.json({ error: "User ID and Alert ID are required" });
|
const { alertId } = req.body;
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
|
||||||
|
if (!alertId) {
|
||||||
|
authLogger.warn("Missing alertId in dismiss request", { userId });
|
||||||
|
return res.status(400).json({ error: "Alert ID is required" });
|
||||||
}
|
}
|
||||||
|
|
||||||
const existingDismissal = await db
|
const existingDismissal = await db
|
||||||
@@ -201,13 +187,9 @@ router.post("/dismiss", async (req, res) => {
|
|||||||
|
|
||||||
// Route: Get dismissed alerts for a user
|
// Route: Get dismissed alerts for a user
|
||||||
// GET /alerts/dismissed/:userId
|
// GET /alerts/dismissed/:userId
|
||||||
router.get("/dismissed/:userId", async (req, res) => {
|
router.get("/dismissed", authenticateJWT, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const { userId } = req.params;
|
const userId = (req as any).userId;
|
||||||
|
|
||||||
if (!userId) {
|
|
||||||
return res.status(400).json({ error: "User ID is required" });
|
|
||||||
}
|
|
||||||
|
|
||||||
const dismissedAlertRecords = await db
|
const dismissedAlertRecords = await db
|
||||||
.select({
|
.select({
|
||||||
@@ -227,16 +209,15 @@ router.get("/dismissed/:userId", async (req, res) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Route: Undismiss an alert for a user (remove from dismissed list)
|
// Route: Undismiss an alert for the authenticated user (remove from dismissed list)
|
||||||
// DELETE /alerts/dismiss
|
// DELETE /alerts/dismiss
|
||||||
router.delete("/dismiss", async (req, res) => {
|
router.delete("/dismiss", authenticateJWT, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const { userId, alertId } = req.body;
|
const { alertId } = req.body;
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
|
||||||
if (!userId || !alertId) {
|
if (!alertId) {
|
||||||
return res
|
return res.status(400).json({ error: "Alert ID is required" });
|
||||||
.status(400)
|
|
||||||
.json({ error: "User ID and Alert ID are required" });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await db
|
const result = await db
|
||||||
|
|||||||
@@ -5,7 +5,8 @@ import { eq, and, desc, sql } from "drizzle-orm";
|
|||||||
import type { Request, Response, NextFunction } from "express";
|
import type { Request, Response, NextFunction } from "express";
|
||||||
import jwt from "jsonwebtoken";
|
import jwt from "jsonwebtoken";
|
||||||
import { authLogger } from "../../utils/logger.js";
|
import { authLogger } from "../../utils/logger.js";
|
||||||
import { EncryptedDBOperations } from "../../utils/encrypted-db-operations.js";
|
import { SimpleDBOps } from "../../utils/simple-db-ops.js";
|
||||||
|
import { AuthManager } from "../../utils/auth-manager.js";
|
||||||
import {
|
import {
|
||||||
parseSSHKey,
|
parseSSHKey,
|
||||||
parsePublicKey,
|
parsePublicKey,
|
||||||
@@ -84,29 +85,14 @@ function isNonEmptyString(val: any): val is string {
|
|||||||
return typeof val === "string" && val.trim().length > 0;
|
return typeof val === "string" && val.trim().length > 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
function authenticateJWT(req: Request, res: Response, next: NextFunction) {
|
// Use AuthManager middleware for authentication
|
||||||
const authHeader = req.headers["authorization"];
|
const authManager = AuthManager.getInstance();
|
||||||
if (!authHeader || !authHeader.startsWith("Bearer ")) {
|
const authenticateJWT = authManager.createAuthMiddleware();
|
||||||
authLogger.warn("Missing or invalid Authorization header");
|
const requireDataAccess = authManager.createDataAccessMiddleware();
|
||||||
return res
|
|
||||||
.status(401)
|
|
||||||
.json({ error: "Missing or invalid Authorization header" });
|
|
||||||
}
|
|
||||||
const token = authHeader.split(" ")[1];
|
|
||||||
const jwtSecret = process.env.JWT_SECRET || "secret";
|
|
||||||
try {
|
|
||||||
const payload = jwt.verify(token, jwtSecret) as JWTPayload;
|
|
||||||
(req as any).userId = payload.userId;
|
|
||||||
next();
|
|
||||||
} catch (err) {
|
|
||||||
authLogger.warn("Invalid or expired token");
|
|
||||||
return res.status(401).json({ error: "Invalid or expired token" });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a new credential
|
// Create a new credential
|
||||||
// POST /credentials
|
// POST /credentials
|
||||||
router.post("/", authenticateJWT, async (req: Request, res: Response) => {
|
router.post("/", authenticateJWT, requireDataAccess, async (req: Request, res: Response) => {
|
||||||
const userId = (req as any).userId;
|
const userId = (req as any).userId;
|
||||||
const {
|
const {
|
||||||
name,
|
name,
|
||||||
@@ -210,10 +196,11 @@ router.post("/", authenticateJWT, async (req: Request, res: Response) => {
|
|||||||
lastUsed: null,
|
lastUsed: null,
|
||||||
};
|
};
|
||||||
|
|
||||||
const created = (await EncryptedDBOperations.insert(
|
const created = (await SimpleDBOps.insert(
|
||||||
sshCredentials,
|
sshCredentials,
|
||||||
"ssh_credentials",
|
"ssh_credentials",
|
||||||
credentialData,
|
credentialData,
|
||||||
|
userId,
|
||||||
)) as typeof credentialData & { id: number };
|
)) as typeof credentialData & { id: number };
|
||||||
|
|
||||||
authLogger.success(
|
authLogger.success(
|
||||||
@@ -245,7 +232,7 @@ router.post("/", authenticateJWT, async (req: Request, res: Response) => {
|
|||||||
|
|
||||||
// Get all credentials for the authenticated user
|
// Get all credentials for the authenticated user
|
||||||
// GET /credentials
|
// GET /credentials
|
||||||
router.get("/", authenticateJWT, async (req: Request, res: Response) => {
|
router.get("/", authenticateJWT, requireDataAccess, async (req: Request, res: Response) => {
|
||||||
const userId = (req as any).userId;
|
const userId = (req as any).userId;
|
||||||
|
|
||||||
if (!isNonEmptyString(userId)) {
|
if (!isNonEmptyString(userId)) {
|
||||||
@@ -254,13 +241,14 @@ router.get("/", authenticateJWT, async (req: Request, res: Response) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const credentials = await EncryptedDBOperations.select(
|
const credentials = await SimpleDBOps.select(
|
||||||
db
|
db
|
||||||
.select()
|
.select()
|
||||||
.from(sshCredentials)
|
.from(sshCredentials)
|
||||||
.where(eq(sshCredentials.userId, userId))
|
.where(eq(sshCredentials.userId, userId))
|
||||||
.orderBy(desc(sshCredentials.updatedAt)),
|
.orderBy(desc(sshCredentials.updatedAt)),
|
||||||
"ssh_credentials",
|
"ssh_credentials",
|
||||||
|
userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
res.json(credentials.map((cred) => formatCredentialOutput(cred)));
|
res.json(credentials.map((cred) => formatCredentialOutput(cred)));
|
||||||
@@ -272,7 +260,7 @@ router.get("/", authenticateJWT, async (req: Request, res: Response) => {
|
|||||||
|
|
||||||
// Get all unique credential folders for the authenticated user
|
// Get all unique credential folders for the authenticated user
|
||||||
// GET /credentials/folders
|
// GET /credentials/folders
|
||||||
router.get("/folders", authenticateJWT, async (req: Request, res: Response) => {
|
router.get("/folders", authenticateJWT, requireDataAccess, async (req: Request, res: Response) => {
|
||||||
const userId = (req as any).userId;
|
const userId = (req as any).userId;
|
||||||
|
|
||||||
if (!isNonEmptyString(userId)) {
|
if (!isNonEmptyString(userId)) {
|
||||||
@@ -305,7 +293,7 @@ router.get("/folders", authenticateJWT, async (req: Request, res: Response) => {
|
|||||||
|
|
||||||
// Get a specific credential by ID (with plain text secrets)
|
// Get a specific credential by ID (with plain text secrets)
|
||||||
// GET /credentials/:id
|
// GET /credentials/:id
|
||||||
router.get("/:id", authenticateJWT, async (req: Request, res: Response) => {
|
router.get("/:id", authenticateJWT, requireDataAccess, async (req: Request, res: Response) => {
|
||||||
const userId = (req as any).userId;
|
const userId = (req as any).userId;
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
|
|
||||||
@@ -315,7 +303,7 @@ router.get("/:id", authenticateJWT, async (req: Request, res: Response) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const credentials = await EncryptedDBOperations.select(
|
const credentials = await SimpleDBOps.select(
|
||||||
db
|
db
|
||||||
.select()
|
.select()
|
||||||
.from(sshCredentials)
|
.from(sshCredentials)
|
||||||
@@ -326,6 +314,7 @@ router.get("/:id", authenticateJWT, async (req: Request, res: Response) => {
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
"ssh_credentials",
|
"ssh_credentials",
|
||||||
|
userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (credentials.length === 0) {
|
if (credentials.length === 0) {
|
||||||
@@ -362,7 +351,7 @@ router.get("/:id", authenticateJWT, async (req: Request, res: Response) => {
|
|||||||
|
|
||||||
// Update a credential
|
// Update a credential
|
||||||
// PUT /credentials/:id
|
// PUT /credentials/:id
|
||||||
router.put("/:id", authenticateJWT, async (req: Request, res: Response) => {
|
router.put("/:id", authenticateJWT, requireDataAccess, async (req: Request, res: Response) => {
|
||||||
const userId = (req as any).userId;
|
const userId = (req as any).userId;
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
const updateData = req.body;
|
const updateData = req.body;
|
||||||
@@ -437,18 +426,19 @@ router.put("/:id", authenticateJWT, async (req: Request, res: Response) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (Object.keys(updateFields).length === 0) {
|
if (Object.keys(updateFields).length === 0) {
|
||||||
const existing = await EncryptedDBOperations.select(
|
const existing = await SimpleDBOps.select(
|
||||||
db
|
db
|
||||||
.select()
|
.select()
|
||||||
.from(sshCredentials)
|
.from(sshCredentials)
|
||||||
.where(eq(sshCredentials.id, parseInt(id))),
|
.where(eq(sshCredentials.id, parseInt(id))),
|
||||||
"ssh_credentials",
|
"ssh_credentials",
|
||||||
|
userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
return res.json(formatCredentialOutput(existing[0]));
|
return res.json(formatCredentialOutput(existing[0]));
|
||||||
}
|
}
|
||||||
|
|
||||||
await EncryptedDBOperations.update(
|
await SimpleDBOps.update(
|
||||||
sshCredentials,
|
sshCredentials,
|
||||||
"ssh_credentials",
|
"ssh_credentials",
|
||||||
and(
|
and(
|
||||||
@@ -456,14 +446,16 @@ router.put("/:id", authenticateJWT, async (req: Request, res: Response) => {
|
|||||||
eq(sshCredentials.userId, userId),
|
eq(sshCredentials.userId, userId),
|
||||||
),
|
),
|
||||||
updateFields,
|
updateFields,
|
||||||
|
userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
const updated = await EncryptedDBOperations.select(
|
const updated = await SimpleDBOps.select(
|
||||||
db
|
db
|
||||||
.select()
|
.select()
|
||||||
.from(sshCredentials)
|
.from(sshCredentials)
|
||||||
.where(eq(sshCredentials.id, parseInt(id))),
|
.where(eq(sshCredentials.id, parseInt(id))),
|
||||||
"ssh_credentials",
|
"ssh_credentials",
|
||||||
|
userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
const credential = updated[0];
|
const credential = updated[0];
|
||||||
@@ -490,7 +482,7 @@ router.put("/:id", authenticateJWT, async (req: Request, res: Response) => {
|
|||||||
|
|
||||||
// Delete a credential
|
// Delete a credential
|
||||||
// DELETE /credentials/:id
|
// DELETE /credentials/:id
|
||||||
router.delete("/:id", authenticateJWT, async (req: Request, res: Response) => {
|
router.delete("/:id", authenticateJWT, requireDataAccess, async (req: Request, res: Response) => {
|
||||||
const userId = (req as any).userId;
|
const userId = (req as any).userId;
|
||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
|
|
||||||
|
|||||||
@@ -8,12 +8,16 @@ import {
|
|||||||
fileManagerPinned,
|
fileManagerPinned,
|
||||||
fileManagerShortcuts,
|
fileManagerShortcuts,
|
||||||
} from "../db/schema.js";
|
} from "../db/schema.js";
|
||||||
import { eq, and, desc } from "drizzle-orm";
|
import { eq, and, desc, isNotNull, or } from "drizzle-orm";
|
||||||
import type { Request, Response, NextFunction } from "express";
|
import type { Request, Response, NextFunction } from "express";
|
||||||
import jwt from "jsonwebtoken";
|
import jwt from "jsonwebtoken";
|
||||||
import multer from "multer";
|
import multer from "multer";
|
||||||
import { sshLogger } from "../../utils/logger.js";
|
import { sshLogger } from "../../utils/logger.js";
|
||||||
import { EncryptedDBOperations } from "../../utils/encrypted-db-operations.js";
|
import { SimpleDBOps } from "../../utils/simple-db-ops.js";
|
||||||
|
import { AuthManager } from "../../utils/auth-manager.js";
|
||||||
|
import { DataCrypto } from "../../utils/data-crypto.js";
|
||||||
|
import { SystemCrypto } from "../../utils/system-crypto.js";
|
||||||
|
import { DatabaseSaveTrigger } from "../db/index.js";
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
@@ -31,65 +35,198 @@ function isValidPort(port: any): port is number {
|
|||||||
return typeof port === "number" && port > 0 && port <= 65535;
|
return typeof port === "number" && port > 0 && port <= 65535;
|
||||||
}
|
}
|
||||||
|
|
||||||
function authenticateJWT(req: Request, res: Response, next: NextFunction) {
|
// Use AuthManager middleware for authentication
|
||||||
const authHeader = req.headers.authorization;
|
const authManager = AuthManager.getInstance();
|
||||||
if (!authHeader || !authHeader.startsWith("Bearer ")) {
|
const authenticateJWT = authManager.createAuthMiddleware();
|
||||||
sshLogger.warn("Missing or invalid Authorization header");
|
const requireDataAccess = authManager.createDataAccessMiddleware();
|
||||||
return res
|
|
||||||
.status(401)
|
|
||||||
.json({ error: "Missing or invalid Authorization header" });
|
|
||||||
}
|
|
||||||
const token = authHeader.split(" ")[1];
|
|
||||||
const jwtSecret = process.env.JWT_SECRET || "secret";
|
|
||||||
try {
|
|
||||||
const payload = jwt.verify(token, jwtSecret) as JWTPayload;
|
|
||||||
(req as any).userId = payload.userId;
|
|
||||||
next();
|
|
||||||
} catch (err) {
|
|
||||||
sshLogger.warn("Invalid or expired token");
|
|
||||||
return res.status(401).json({ error: "Invalid or expired token" });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function isLocalhost(req: Request) {
|
|
||||||
const ip = req.ip || req.connection?.remoteAddress;
|
|
||||||
return ip === "127.0.0.1" || ip === "::1" || ip === "::ffff:127.0.0.1";
|
|
||||||
}
|
|
||||||
|
|
||||||
// Internal-only endpoint for autostart (no JWT)
|
// Internal-only endpoint for autostart - requires internal auth token
|
||||||
router.get("/db/host/internal", async (req: Request, res: Response) => {
|
router.get("/db/host/internal", async (req: Request, res: Response) => {
|
||||||
if (!isLocalhost(req) && req.headers["x-internal-request"] !== "1") {
|
try {
|
||||||
sshLogger.warn("Unauthorized attempt to access internal SSH host endpoint");
|
// Check for internal authentication token using SystemCrypto
|
||||||
|
const internalToken = req.headers["x-internal-auth-token"];
|
||||||
|
const systemCrypto = SystemCrypto.getInstance();
|
||||||
|
const expectedToken = await systemCrypto.getInternalAuthToken();
|
||||||
|
|
||||||
|
if (internalToken !== expectedToken) {
|
||||||
|
sshLogger.warn("Unauthorized attempt to access internal SSH host endpoint", {
|
||||||
|
source: req.ip,
|
||||||
|
userAgent: req.headers["user-agent"],
|
||||||
|
providedToken: internalToken ? "present" : "missing"
|
||||||
|
});
|
||||||
return res.status(403).json({ error: "Forbidden" });
|
return res.status(403).json({ error: "Forbidden" });
|
||||||
}
|
}
|
||||||
|
} catch (error) {
|
||||||
|
sshLogger.error("Failed to validate internal auth token", error);
|
||||||
|
return res.status(500).json({ error: "Internal server error" });
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const data = await EncryptedDBOperations.select(
|
// Query sshData directly for hosts that have autostart plaintext fields populated
|
||||||
db.select().from(sshData),
|
const autostartHosts = await db.select()
|
||||||
"ssh_data",
|
.from(sshData)
|
||||||
|
.where(
|
||||||
|
// Check if any autostart fields are populated (meaning autostart is enabled)
|
||||||
|
or(
|
||||||
|
isNotNull(sshData.autostartPassword),
|
||||||
|
isNotNull(sshData.autostartKey)
|
||||||
|
)
|
||||||
);
|
);
|
||||||
const result = data.map((row: any) => {
|
|
||||||
|
console.log("=== AUTOSTART QUERY DEBUG ===");
|
||||||
|
console.log("Found autostart hosts count:", autostartHosts.length);
|
||||||
|
autostartHosts.forEach((host, index) => {
|
||||||
|
console.log(`Host ${index + 1}:`, {
|
||||||
|
id: host.id,
|
||||||
|
ip: host.ip,
|
||||||
|
username: host.username,
|
||||||
|
hasAutostartPassword: !!host.autostartPassword,
|
||||||
|
hasAutostartKey: !!host.autostartKey,
|
||||||
|
autostartPasswordLength: host.autostartPassword?.length || 0,
|
||||||
|
autostartKeyLength: host.autostartKey?.length || 0
|
||||||
|
});
|
||||||
|
});
|
||||||
|
console.log("=== END AUTOSTART QUERY DEBUG ===");
|
||||||
|
|
||||||
|
sshLogger.info("Internal autostart endpoint accessed", {
|
||||||
|
operation: "autostart_internal_access",
|
||||||
|
configCount: autostartHosts.length,
|
||||||
|
source: req.ip,
|
||||||
|
userAgent: req.headers["user-agent"]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Transform to expected format for tunnel service
|
||||||
|
const result = autostartHosts.map((host) => {
|
||||||
|
const tunnelConnections = host.tunnelConnections
|
||||||
|
? JSON.parse(host.tunnelConnections)
|
||||||
|
: [];
|
||||||
|
|
||||||
|
// Debug: Log what we're reading from database
|
||||||
|
sshLogger.info(`Autostart host from DB:`, {
|
||||||
|
hostId: host.id,
|
||||||
|
ip: host.ip,
|
||||||
|
username: host.username,
|
||||||
|
hasAutostartPassword: !!host.autostartPassword,
|
||||||
|
hasAutostartKey: !!host.autostartKey,
|
||||||
|
hasEncryptedPassword: !!host.password,
|
||||||
|
hasEncryptedKey: !!host.key,
|
||||||
|
authType: host.authType,
|
||||||
|
autostartPasswordLength: host.autostartPassword?.length || 0,
|
||||||
|
autostartKeyLength: host.autostartKey?.length || 0,
|
||||||
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...row,
|
id: host.id,
|
||||||
tags:
|
userId: host.userId,
|
||||||
typeof row.tags === "string"
|
name: host.name || `autostart-${host.id}`,
|
||||||
? row.tags
|
ip: host.ip,
|
||||||
? row.tags.split(",").filter(Boolean)
|
port: host.port,
|
||||||
: []
|
username: host.username,
|
||||||
: [],
|
password: host.autostartPassword,
|
||||||
pin: !!row.pin,
|
key: host.autostartKey,
|
||||||
requirePassword: !!row.requirePassword,
|
keyPassword: host.autostartKeyPassword,
|
||||||
enableTerminal: !!row.enableTerminal,
|
// Include explicit autostart fields for tunnel service
|
||||||
enableTunnel: !!row.enableTunnel,
|
autostartPassword: host.autostartPassword,
|
||||||
tunnelConnections: row.tunnelConnections
|
autostartKey: host.autostartKey,
|
||||||
? JSON.parse(row.tunnelConnections)
|
autostartKeyPassword: host.autostartKeyPassword,
|
||||||
: [],
|
authType: host.authType,
|
||||||
enableFileManager: !!row.enableFileManager,
|
enableTunnel: true,
|
||||||
|
tunnelConnections: tunnelConnections.filter((tunnel: any) => tunnel.autoStart),
|
||||||
|
pin: false,
|
||||||
|
enableTerminal: false,
|
||||||
|
enableFileManager: false,
|
||||||
|
tags: ["autostart"],
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
res.json(result);
|
res.json(result);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
sshLogger.error("Failed to fetch SSH data (internal)", err);
|
sshLogger.error("Failed to fetch autostart SSH data", err);
|
||||||
res.status(500).json({ error: "Failed to fetch SSH data" });
|
res.status(500).json({ error: "Failed to fetch autostart SSH data" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Internal-only endpoint for all hosts - requires internal auth token (for tunnel endpointHost resolution)
|
||||||
|
router.get("/db/host/internal/all", async (req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
// Check for internal authentication token using SystemCrypto
|
||||||
|
const internalToken = req.headers["x-internal-auth-token"];
|
||||||
|
if (!internalToken) {
|
||||||
|
return res.status(401).json({ error: "Internal authentication token required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const systemCrypto = SystemCrypto.getInstance();
|
||||||
|
const expectedToken = await systemCrypto.getInternalAuthToken();
|
||||||
|
|
||||||
|
if (internalToken !== expectedToken) {
|
||||||
|
return res.status(401).json({ error: "Invalid internal authentication token" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query all hosts for endpointHost resolution
|
||||||
|
const allHosts = await db.select().from(sshData);
|
||||||
|
|
||||||
|
sshLogger.info("Internal all hosts endpoint accessed", {
|
||||||
|
operation: "all_hosts_internal_access",
|
||||||
|
hostCount: allHosts.length,
|
||||||
|
source: req.ip,
|
||||||
|
userAgent: req.headers["user-agent"]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Transform to expected format for tunnel service
|
||||||
|
const result = allHosts.map((host) => {
|
||||||
|
const tunnelConnections = host.tunnelConnections
|
||||||
|
? JSON.parse(host.tunnelConnections)
|
||||||
|
: [];
|
||||||
|
|
||||||
|
// Debug: Log what we're reading from database for all hosts
|
||||||
|
sshLogger.info(`All hosts endpoint - host from DB:`, {
|
||||||
|
hostId: host.id,
|
||||||
|
ip: host.ip,
|
||||||
|
username: host.username,
|
||||||
|
hasAutostartPassword: !!host.autostartPassword,
|
||||||
|
hasAutostartKey: !!host.autostartKey,
|
||||||
|
hasEncryptedPassword: !!host.password,
|
||||||
|
hasEncryptedKey: !!host.key,
|
||||||
|
authType: host.authType,
|
||||||
|
autostartPasswordLength: host.autostartPassword?.length || 0,
|
||||||
|
autostartKeyLength: host.autostartKey?.length || 0,
|
||||||
|
encryptedPasswordLength: host.password?.length || 0,
|
||||||
|
encryptedKeyLength: host.key?.length || 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: host.id,
|
||||||
|
userId: host.userId,
|
||||||
|
name: host.name || `${host.username}@${host.ip}`,
|
||||||
|
ip: host.ip,
|
||||||
|
port: host.port,
|
||||||
|
username: host.username,
|
||||||
|
password: host.autostartPassword || host.password,
|
||||||
|
key: host.autostartKey || host.key,
|
||||||
|
keyPassword: host.autostartKeyPassword || host.keyPassword,
|
||||||
|
// Include autostart fields for fallback
|
||||||
|
autostartPassword: host.autostartPassword,
|
||||||
|
autostartKey: host.autostartKey,
|
||||||
|
autostartKeyPassword: host.autostartKeyPassword,
|
||||||
|
authType: host.authType,
|
||||||
|
keyType: host.keyType,
|
||||||
|
credentialId: host.credentialId,
|
||||||
|
enableTunnel: !!host.enableTunnel,
|
||||||
|
tunnelConnections: tunnelConnections,
|
||||||
|
pin: !!host.pin,
|
||||||
|
enableTerminal: !!host.enableTerminal,
|
||||||
|
enableFileManager: !!host.enableFileManager,
|
||||||
|
defaultPath: host.defaultPath,
|
||||||
|
createdAt: host.createdAt,
|
||||||
|
updatedAt: host.updatedAt,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json(result);
|
||||||
|
} catch (err) {
|
||||||
|
sshLogger.error("Failed to fetch all hosts for internal use", err);
|
||||||
|
res.status(500).json({ error: "Failed to fetch all hosts" });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -98,6 +235,7 @@ router.get("/db/host/internal", async (req: Request, res: Response) => {
|
|||||||
router.post(
|
router.post(
|
||||||
"/db/host",
|
"/db/host",
|
||||||
authenticateJWT,
|
authenticateJWT,
|
||||||
|
requireDataAccess,
|
||||||
upload.single("key"),
|
upload.single("key"),
|
||||||
async (req: Request, res: Response) => {
|
async (req: Request, res: Response) => {
|
||||||
const userId = (req as any).userId;
|
const userId = (req as any).userId;
|
||||||
@@ -138,7 +276,6 @@ router.post(
|
|||||||
port,
|
port,
|
||||||
username,
|
username,
|
||||||
password,
|
password,
|
||||||
requirePassword,
|
|
||||||
authMethod,
|
authMethod,
|
||||||
authType,
|
authType,
|
||||||
credentialId,
|
credentialId,
|
||||||
@@ -190,7 +327,6 @@ router.post(
|
|||||||
|
|
||||||
if (effectiveAuthType === "password") {
|
if (effectiveAuthType === "password") {
|
||||||
sshDataObj.password = password || null;
|
sshDataObj.password = password || null;
|
||||||
sshDataObj.requirePassword = requirePassword !== false ? 1 : 0;
|
|
||||||
sshDataObj.key = null;
|
sshDataObj.key = null;
|
||||||
sshDataObj.keyPassword = null;
|
sshDataObj.keyPassword = null;
|
||||||
sshDataObj.keyType = null;
|
sshDataObj.keyType = null;
|
||||||
@@ -199,21 +335,20 @@ router.post(
|
|||||||
sshDataObj.keyPassword = keyPassword || null;
|
sshDataObj.keyPassword = keyPassword || null;
|
||||||
sshDataObj.keyType = keyType;
|
sshDataObj.keyType = keyType;
|
||||||
sshDataObj.password = null;
|
sshDataObj.password = null;
|
||||||
sshDataObj.requirePassword = 1; // Default to true for non-password auth
|
|
||||||
} else {
|
} else {
|
||||||
// For credential auth
|
// For credential auth
|
||||||
sshDataObj.password = null;
|
sshDataObj.password = null;
|
||||||
sshDataObj.key = null;
|
sshDataObj.key = null;
|
||||||
sshDataObj.keyPassword = null;
|
sshDataObj.keyPassword = null;
|
||||||
sshDataObj.keyType = null;
|
sshDataObj.keyType = null;
|
||||||
sshDataObj.requirePassword = 1; // Default to true for non-password auth
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const result = await EncryptedDBOperations.insert(
|
const result = await SimpleDBOps.insert(
|
||||||
sshData,
|
sshData,
|
||||||
"ssh_data",
|
"ssh_data",
|
||||||
sshDataObj,
|
sshDataObj,
|
||||||
|
userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!result) {
|
if (!result) {
|
||||||
@@ -237,7 +372,6 @@ router.post(
|
|||||||
: []
|
: []
|
||||||
: [],
|
: [],
|
||||||
pin: !!createdHost.pin,
|
pin: !!createdHost.pin,
|
||||||
requirePassword: !!createdHost.requirePassword,
|
|
||||||
enableTerminal: !!createdHost.enableTerminal,
|
enableTerminal: !!createdHost.enableTerminal,
|
||||||
enableTunnel: !!createdHost.enableTunnel,
|
enableTunnel: !!createdHost.enableTunnel,
|
||||||
tunnelConnections: createdHost.tunnelConnections
|
tunnelConnections: createdHost.tunnelConnections
|
||||||
@@ -324,7 +458,6 @@ router.put(
|
|||||||
port,
|
port,
|
||||||
username,
|
username,
|
||||||
password,
|
password,
|
||||||
requirePassword,
|
|
||||||
authMethod,
|
authMethod,
|
||||||
authType,
|
authType,
|
||||||
credentialId,
|
credentialId,
|
||||||
@@ -379,7 +512,6 @@ router.put(
|
|||||||
if (password) {
|
if (password) {
|
||||||
sshDataObj.password = password;
|
sshDataObj.password = password;
|
||||||
}
|
}
|
||||||
sshDataObj.requirePassword = requirePassword !== false ? 1 : 0;
|
|
||||||
sshDataObj.key = null;
|
sshDataObj.key = null;
|
||||||
sshDataObj.keyPassword = null;
|
sshDataObj.keyPassword = null;
|
||||||
sshDataObj.keyType = null;
|
sshDataObj.keyType = null;
|
||||||
@@ -394,25 +526,24 @@ router.put(
|
|||||||
sshDataObj.keyType = keyType;
|
sshDataObj.keyType = keyType;
|
||||||
}
|
}
|
||||||
sshDataObj.password = null;
|
sshDataObj.password = null;
|
||||||
sshDataObj.requirePassword = 1; // Default to true for non-password auth
|
|
||||||
} else {
|
} else {
|
||||||
// For credential auth
|
// For credential auth
|
||||||
sshDataObj.password = null;
|
sshDataObj.password = null;
|
||||||
sshDataObj.key = null;
|
sshDataObj.key = null;
|
||||||
sshDataObj.keyPassword = null;
|
sshDataObj.keyPassword = null;
|
||||||
sshDataObj.keyType = null;
|
sshDataObj.keyType = null;
|
||||||
sshDataObj.requirePassword = 1; // Default to true for non-password auth
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await EncryptedDBOperations.update(
|
await SimpleDBOps.update(
|
||||||
sshData,
|
sshData,
|
||||||
"ssh_data",
|
"ssh_data",
|
||||||
and(eq(sshData.id, Number(hostId)), eq(sshData.userId, userId)),
|
and(eq(sshData.id, Number(hostId)), eq(sshData.userId, userId)),
|
||||||
sshDataObj,
|
sshDataObj,
|
||||||
|
userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
const updatedHosts = await EncryptedDBOperations.select(
|
const updatedHosts = await SimpleDBOps.select(
|
||||||
db
|
db
|
||||||
.select()
|
.select()
|
||||||
.from(sshData)
|
.from(sshData)
|
||||||
@@ -420,6 +551,7 @@ router.put(
|
|||||||
and(eq(sshData.id, Number(hostId)), eq(sshData.userId, userId)),
|
and(eq(sshData.id, Number(hostId)), eq(sshData.userId, userId)),
|
||||||
),
|
),
|
||||||
"ssh_data",
|
"ssh_data",
|
||||||
|
userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (updatedHosts.length === 0) {
|
if (updatedHosts.length === 0) {
|
||||||
@@ -441,7 +573,6 @@ router.put(
|
|||||||
: []
|
: []
|
||||||
: [],
|
: [],
|
||||||
pin: !!updatedHost.pin,
|
pin: !!updatedHost.pin,
|
||||||
requirePassword: !!updatedHost.requirePassword,
|
|
||||||
enableTerminal: !!updatedHost.enableTerminal,
|
enableTerminal: !!updatedHost.enableTerminal,
|
||||||
enableTunnel: !!updatedHost.enableTunnel,
|
enableTunnel: !!updatedHost.enableTunnel,
|
||||||
tunnelConnections: updatedHost.tunnelConnections
|
tunnelConnections: updatedHost.tunnelConnections
|
||||||
@@ -493,9 +624,10 @@ router.get("/db/host", authenticateJWT, async (req: Request, res: Response) => {
|
|||||||
return res.status(400).json({ error: "Invalid userId" });
|
return res.status(400).json({ error: "Invalid userId" });
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
const data = await EncryptedDBOperations.select(
|
const data = await SimpleDBOps.select(
|
||||||
db.select().from(sshData).where(eq(sshData.userId, userId)),
|
db.select().from(sshData).where(eq(sshData.userId, userId)),
|
||||||
"ssh_data",
|
"ssh_data",
|
||||||
|
userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
const result = await Promise.all(
|
const result = await Promise.all(
|
||||||
@@ -509,7 +641,6 @@ router.get("/db/host", authenticateJWT, async (req: Request, res: Response) => {
|
|||||||
: []
|
: []
|
||||||
: [],
|
: [],
|
||||||
pin: !!row.pin,
|
pin: !!row.pin,
|
||||||
requirePassword: !!row.requirePassword,
|
|
||||||
enableTerminal: !!row.enableTerminal,
|
enableTerminal: !!row.enableTerminal,
|
||||||
enableTunnel: !!row.enableTunnel,
|
enableTunnel: !!row.enableTunnel,
|
||||||
tunnelConnections: row.tunnelConnections
|
tunnelConnections: row.tunnelConnections
|
||||||
@@ -1113,7 +1244,7 @@ router.put(
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const updatedHosts = await EncryptedDBOperations.update(
|
const updatedHosts = await SimpleDBOps.update(
|
||||||
sshData,
|
sshData,
|
||||||
"ssh_data",
|
"ssh_data",
|
||||||
and(eq(sshData.userId, userId), eq(sshData.folder, oldName)),
|
and(eq(sshData.userId, userId), eq(sshData.folder, oldName)),
|
||||||
@@ -1121,6 +1252,7 @@ router.put(
|
|||||||
folder: newName,
|
folder: newName,
|
||||||
updatedAt: new Date().toISOString(),
|
updatedAt: new Date().toISOString(),
|
||||||
},
|
},
|
||||||
|
userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
const updatedCredentials = await db
|
const updatedCredentials = await db
|
||||||
@@ -1137,6 +1269,9 @@ router.put(
|
|||||||
)
|
)
|
||||||
.returning();
|
.returning();
|
||||||
|
|
||||||
|
// Trigger database save after folder rename
|
||||||
|
DatabaseSaveTrigger.triggerSave("folder_rename");
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
message: "Folder renamed successfully",
|
message: "Folder renamed successfully",
|
||||||
updatedHosts: updatedHosts.length,
|
updatedHosts: updatedHosts.length,
|
||||||
@@ -1261,7 +1396,7 @@ router.post(
|
|||||||
updatedAt: new Date().toISOString(),
|
updatedAt: new Date().toISOString(),
|
||||||
};
|
};
|
||||||
|
|
||||||
await EncryptedDBOperations.insert(sshData, "ssh_data", sshDataObj);
|
await SimpleDBOps.insert(sshData, "ssh_data", sshDataObj, userId);
|
||||||
results.success++;
|
results.success++;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
results.failed++;
|
results.failed++;
|
||||||
@@ -1280,4 +1415,295 @@ router.post(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Route: Enable autostart for SSH configuration (requires JWT)
|
||||||
|
// POST /ssh/autostart/enable
|
||||||
|
router.post(
|
||||||
|
"/autostart/enable",
|
||||||
|
authenticateJWT,
|
||||||
|
requireDataAccess,
|
||||||
|
async (req: Request, res: Response) => {
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
const { sshConfigId } = req.body;
|
||||||
|
|
||||||
|
if (!sshConfigId || typeof sshConfigId !== "number") {
|
||||||
|
sshLogger.warn("Missing or invalid sshConfigId in autostart enable request", {
|
||||||
|
operation: "autostart_enable",
|
||||||
|
userId,
|
||||||
|
sshConfigId
|
||||||
|
});
|
||||||
|
return res.status(400).json({ error: "Valid sshConfigId is required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Validate user has access to decrypt the data
|
||||||
|
const userDataKey = DataCrypto.getUserDataKey(userId);
|
||||||
|
if (!userDataKey) {
|
||||||
|
sshLogger.warn("User attempted to enable autostart without unlocked data", {
|
||||||
|
operation: "autostart_enable_failed",
|
||||||
|
userId,
|
||||||
|
sshConfigId,
|
||||||
|
reason: "data_locked"
|
||||||
|
});
|
||||||
|
return res.status(400).json({
|
||||||
|
error: "Failed to enable autostart. Ensure user data is unlocked."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get and decrypt SSH configuration
|
||||||
|
const sshConfig = await db.select()
|
||||||
|
.from(sshData)
|
||||||
|
.where(and(
|
||||||
|
eq(sshData.id, sshConfigId),
|
||||||
|
eq(sshData.userId, userId)
|
||||||
|
));
|
||||||
|
|
||||||
|
if (sshConfig.length === 0) {
|
||||||
|
sshLogger.warn("SSH config not found for autostart enable", {
|
||||||
|
operation: "autostart_enable_failed",
|
||||||
|
userId,
|
||||||
|
sshConfigId,
|
||||||
|
reason: "config_not_found"
|
||||||
|
});
|
||||||
|
return res.status(404).json({
|
||||||
|
error: "SSH configuration not found"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = sshConfig[0];
|
||||||
|
|
||||||
|
// Decrypt sensitive fields
|
||||||
|
const decryptedConfig = DataCrypto.decryptRecord("ssh_data", config, userId, userDataKey);
|
||||||
|
|
||||||
|
// Debug: Log what we're about to save
|
||||||
|
console.log("=== AUTOSTART DEBUG: Decrypted credentials ===");
|
||||||
|
console.log("sshConfigId:", sshConfigId);
|
||||||
|
console.log("authType:", config.authType);
|
||||||
|
console.log("hasPassword:", !!decryptedConfig.password);
|
||||||
|
console.log("hasKey:", !!decryptedConfig.key);
|
||||||
|
console.log("hasKeyPassword:", !!decryptedConfig.keyPassword);
|
||||||
|
console.log("passwordLength:", decryptedConfig.password?.length || 0);
|
||||||
|
console.log("keyLength:", decryptedConfig.key?.length || 0);
|
||||||
|
console.log("=== END AUTOSTART DEBUG ===");
|
||||||
|
|
||||||
|
// Also handle tunnel connections - populate endpoint credentials
|
||||||
|
let updatedTunnelConnections = config.tunnelConnections;
|
||||||
|
if (config.tunnelConnections) {
|
||||||
|
try {
|
||||||
|
const tunnelConnections = JSON.parse(config.tunnelConnections);
|
||||||
|
|
||||||
|
// For each tunnel connection, try to resolve endpoint credentials
|
||||||
|
const resolvedConnections = await Promise.all(
|
||||||
|
tunnelConnections.map(async (tunnel: any) => {
|
||||||
|
if (tunnel.autoStart && tunnel.endpointHost && !tunnel.endpointPassword && !tunnel.endpointKey) {
|
||||||
|
console.log("=== RESOLVING ENDPOINT CREDENTIALS ===");
|
||||||
|
console.log("endpointHost:", tunnel.endpointHost);
|
||||||
|
|
||||||
|
// Find endpoint host by name or username@ip
|
||||||
|
const endpointHosts = await db.select()
|
||||||
|
.from(sshData)
|
||||||
|
.where(eq(sshData.userId, userId));
|
||||||
|
|
||||||
|
const endpointHost = endpointHosts.find(h =>
|
||||||
|
h.name === tunnel.endpointHost ||
|
||||||
|
`${h.username}@${h.ip}` === tunnel.endpointHost
|
||||||
|
);
|
||||||
|
|
||||||
|
if (endpointHost) {
|
||||||
|
console.log("Found endpoint host:", endpointHost.id, endpointHost.ip);
|
||||||
|
|
||||||
|
// Decrypt endpoint host credentials
|
||||||
|
const decryptedEndpoint = DataCrypto.decryptRecord("ssh_data", endpointHost, userId, userDataKey);
|
||||||
|
|
||||||
|
console.log("Endpoint credentials:", {
|
||||||
|
hasPassword: !!decryptedEndpoint.password,
|
||||||
|
hasKey: !!decryptedEndpoint.key,
|
||||||
|
passwordLength: decryptedEndpoint.password?.length || 0
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add endpoint credentials to tunnel connection
|
||||||
|
return {
|
||||||
|
...tunnel,
|
||||||
|
endpointPassword: decryptedEndpoint.password || null,
|
||||||
|
endpointKey: decryptedEndpoint.key || null,
|
||||||
|
endpointKeyPassword: decryptedEndpoint.keyPassword || null,
|
||||||
|
endpointAuthType: endpointHost.authType
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tunnel;
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
updatedTunnelConnections = JSON.stringify(resolvedConnections);
|
||||||
|
console.log("=== UPDATED TUNNEL CONNECTIONS ===");
|
||||||
|
} catch (error) {
|
||||||
|
console.log("=== TUNNEL CONNECTION UPDATE FAILED ===", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the SSH config with plaintext autostart fields and resolved tunnel connections
|
||||||
|
const updateResult = await db.update(sshData)
|
||||||
|
.set({
|
||||||
|
autostartPassword: decryptedConfig.password || null,
|
||||||
|
autostartKey: decryptedConfig.key || null,
|
||||||
|
autostartKeyPassword: decryptedConfig.keyPassword || null,
|
||||||
|
tunnelConnections: updatedTunnelConnections,
|
||||||
|
})
|
||||||
|
.where(eq(sshData.id, sshConfigId));
|
||||||
|
|
||||||
|
// Debug: Log update result
|
||||||
|
console.log("=== AUTOSTART DEBUG: Update result ===");
|
||||||
|
console.log("updateResult:", updateResult);
|
||||||
|
console.log("update completed for sshConfigId:", sshConfigId);
|
||||||
|
console.log("=== END UPDATE DEBUG ===");
|
||||||
|
|
||||||
|
// Force database save after autostart update
|
||||||
|
try {
|
||||||
|
await DatabaseSaveTrigger.triggerSave();
|
||||||
|
console.log("=== DATABASE SAVE TRIGGERED AFTER AUTOSTART ===");
|
||||||
|
} catch (saveError) {
|
||||||
|
console.log("=== DATABASE SAVE FAILED ===", saveError);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the data was actually saved
|
||||||
|
try {
|
||||||
|
const verifyQuery = await db.select()
|
||||||
|
.from(sshData)
|
||||||
|
.where(eq(sshData.id, sshConfigId));
|
||||||
|
|
||||||
|
if (verifyQuery.length > 0) {
|
||||||
|
const saved = verifyQuery[0];
|
||||||
|
console.log("=== VERIFICATION: Data actually saved ===");
|
||||||
|
console.log("autostartPassword exists:", !!saved.autostartPassword);
|
||||||
|
console.log("autostartKey exists:", !!saved.autostartKey);
|
||||||
|
console.log("autostartPassword length:", saved.autostartPassword?.length || 0);
|
||||||
|
console.log("=== END VERIFICATION ===");
|
||||||
|
}
|
||||||
|
} catch (verifyError) {
|
||||||
|
console.log("=== VERIFICATION FAILED ===", verifyError);
|
||||||
|
}
|
||||||
|
|
||||||
|
sshLogger.success("AutoStart enabled successfully", {
|
||||||
|
operation: "autostart_enabled",
|
||||||
|
userId,
|
||||||
|
sshConfigId,
|
||||||
|
host: config.ip
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
message: "AutoStart enabled successfully",
|
||||||
|
sshConfigId
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
sshLogger.error("Error enabling autostart", error, {
|
||||||
|
operation: "autostart_enable_error",
|
||||||
|
userId,
|
||||||
|
sshConfigId
|
||||||
|
});
|
||||||
|
res.status(500).json({ error: "Internal server error" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Route: Disable autostart for SSH configuration (requires JWT)
|
||||||
|
// DELETE /ssh/autostart/disable
|
||||||
|
router.delete(
|
||||||
|
"/autostart/disable",
|
||||||
|
authenticateJWT,
|
||||||
|
async (req: Request, res: Response) => {
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
const { sshConfigId } = req.body;
|
||||||
|
|
||||||
|
if (!sshConfigId || typeof sshConfigId !== "number") {
|
||||||
|
sshLogger.warn("Missing or invalid sshConfigId in autostart disable request", {
|
||||||
|
operation: "autostart_disable",
|
||||||
|
userId,
|
||||||
|
sshConfigId
|
||||||
|
});
|
||||||
|
return res.status(400).json({ error: "Valid sshConfigId is required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Clear the autostart plaintext fields for this SSH config
|
||||||
|
const result = await db.update(sshData)
|
||||||
|
.set({
|
||||||
|
autostartPassword: null,
|
||||||
|
autostartKey: null,
|
||||||
|
autostartKeyPassword: null,
|
||||||
|
})
|
||||||
|
.where(and(
|
||||||
|
eq(sshData.id, sshConfigId),
|
||||||
|
eq(sshData.userId, userId)
|
||||||
|
));
|
||||||
|
|
||||||
|
sshLogger.info("AutoStart disabled successfully", {
|
||||||
|
operation: "autostart_disabled",
|
||||||
|
userId,
|
||||||
|
sshConfigId
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
message: "AutoStart disabled successfully",
|
||||||
|
sshConfigId
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
sshLogger.error("Error disabling autostart", error, {
|
||||||
|
operation: "autostart_disable_error",
|
||||||
|
userId,
|
||||||
|
sshConfigId
|
||||||
|
});
|
||||||
|
res.status(500).json({ error: "Internal server error" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Route: Get autostart status for user's SSH configurations (requires JWT)
|
||||||
|
// GET /ssh/autostart/status
|
||||||
|
router.get(
|
||||||
|
"/autostart/status",
|
||||||
|
authenticateJWT,
|
||||||
|
async (req: Request, res: Response) => {
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Query user's SSH configs that have autostart enabled
|
||||||
|
const autostartConfigs = await db.select()
|
||||||
|
.from(sshData)
|
||||||
|
.where(and(
|
||||||
|
eq(sshData.userId, userId),
|
||||||
|
or(
|
||||||
|
isNotNull(sshData.autostartPassword),
|
||||||
|
isNotNull(sshData.autostartKey)
|
||||||
|
)
|
||||||
|
));
|
||||||
|
|
||||||
|
// Map to just the basic info needed for status
|
||||||
|
const statusList = autostartConfigs.map(config => ({
|
||||||
|
sshConfigId: config.id,
|
||||||
|
host: config.ip,
|
||||||
|
port: config.port,
|
||||||
|
username: config.username,
|
||||||
|
authType: config.authType
|
||||||
|
}));
|
||||||
|
|
||||||
|
sshLogger.info("AutoStart status retrieved", {
|
||||||
|
operation: "autostart_status",
|
||||||
|
userId,
|
||||||
|
configCount: statusList.length
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
autostart_configs: statusList,
|
||||||
|
total_count: statusList.length
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
sshLogger.error("Error getting autostart status", error, {
|
||||||
|
operation: "autostart_status_error",
|
||||||
|
userId
|
||||||
|
});
|
||||||
|
res.status(500).json({ error: "Internal server error" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
export default router;
|
export default router;
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import {
|
|||||||
fileManagerPinned,
|
fileManagerPinned,
|
||||||
fileManagerShortcuts,
|
fileManagerShortcuts,
|
||||||
dismissedAlerts,
|
dismissedAlerts,
|
||||||
|
settings,
|
||||||
} from "../db/schema.js";
|
} from "../db/schema.js";
|
||||||
import { eq, and } from "drizzle-orm";
|
import { eq, and } from "drizzle-orm";
|
||||||
import bcrypt from "bcryptjs";
|
import bcrypt from "bcryptjs";
|
||||||
@@ -16,6 +17,12 @@ import speakeasy from "speakeasy";
|
|||||||
import QRCode from "qrcode";
|
import QRCode from "qrcode";
|
||||||
import type { Request, Response, NextFunction } from "express";
|
import type { Request, Response, NextFunction } from "express";
|
||||||
import { authLogger, apiLogger } from "../../utils/logger.js";
|
import { authLogger, apiLogger } from "../../utils/logger.js";
|
||||||
|
import { AuthManager } from "../../utils/auth-manager.js";
|
||||||
|
import { UserCrypto } from "../../utils/user-crypto.js";
|
||||||
|
import { DataCrypto } from "../../utils/data-crypto.js";
|
||||||
|
|
||||||
|
// Get auth manager instance
|
||||||
|
const authManager = AuthManager.getInstance();
|
||||||
|
|
||||||
async function verifyOIDCToken(
|
async function verifyOIDCToken(
|
||||||
idToken: string,
|
idToken: string,
|
||||||
@@ -129,35 +136,12 @@ interface JWTPayload {
|
|||||||
exp?: number;
|
exp?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
// JWT authentication middleware
|
// JWT authentication middleware - only verify JWT, no data unlock required
|
||||||
function authenticateJWT(req: Request, res: Response, next: NextFunction) {
|
const authenticateJWT = authManager.createAuthMiddleware();
|
||||||
const authHeader = req.headers["authorization"];
|
const requireAdmin = authManager.createAdminMiddleware();
|
||||||
if (!authHeader || !authHeader.startsWith("Bearer ")) {
|
|
||||||
authLogger.warn("Missing or invalid Authorization header", {
|
// Data access middleware - requires user to have unlocked data keys
|
||||||
operation: "auth",
|
const requireDataAccess = authManager.createDataAccessMiddleware();
|
||||||
method: req.method,
|
|
||||||
url: req.url,
|
|
||||||
});
|
|
||||||
return res
|
|
||||||
.status(401)
|
|
||||||
.json({ error: "Missing or invalid Authorization header" });
|
|
||||||
}
|
|
||||||
const token = authHeader.split(" ")[1];
|
|
||||||
const jwtSecret = process.env.JWT_SECRET || "secret";
|
|
||||||
try {
|
|
||||||
const payload = jwt.verify(token, jwtSecret) as JWTPayload;
|
|
||||||
(req as any).userId = payload.userId;
|
|
||||||
next();
|
|
||||||
} catch (err) {
|
|
||||||
authLogger.warn("Invalid or expired token", {
|
|
||||||
operation: "auth",
|
|
||||||
method: req.method,
|
|
||||||
url: req.url,
|
|
||||||
error: err,
|
|
||||||
});
|
|
||||||
return res.status(401).json({ error: "Invalid or expired token" });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Route: Create traditional user (username/password)
|
// Route: Create traditional user (username/password)
|
||||||
// POST /users/create
|
// POST /users/create
|
||||||
@@ -208,19 +192,10 @@ router.post("/create", async (req, res) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let isFirstUser = false;
|
let isFirstUser = false;
|
||||||
try {
|
|
||||||
const countResult = db.$client
|
const countResult = db.$client
|
||||||
.prepare("SELECT COUNT(*) as count FROM users")
|
.prepare("SELECT COUNT(*) as count FROM users")
|
||||||
.get();
|
.get();
|
||||||
isFirstUser = ((countResult as any)?.count || 0) === 0;
|
isFirstUser = ((countResult as any)?.count || 0) === 0;
|
||||||
} catch (e) {
|
|
||||||
isFirstUser = true;
|
|
||||||
authLogger.warn("Failed to check user count, assuming first user", {
|
|
||||||
operation: "user_create",
|
|
||||||
username,
|
|
||||||
error: e,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const saltRounds = parseInt(process.env.SALT || "10", 10);
|
const saltRounds = parseInt(process.env.SALT || "10", 10);
|
||||||
const password_hash = await bcrypt.hash(password, saltRounds);
|
const password_hash = await bcrypt.hash(password, saltRounds);
|
||||||
@@ -244,6 +219,25 @@ router.post("/create", async (req, res) => {
|
|||||||
totp_backup_codes: null,
|
totp_backup_codes: null,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Set up user data encryption (KEK-DEK architecture)
|
||||||
|
try {
|
||||||
|
await authManager.registerUser(id, password);
|
||||||
|
authLogger.success("User encryption setup completed", {
|
||||||
|
operation: "user_encryption_setup",
|
||||||
|
userId: id,
|
||||||
|
});
|
||||||
|
} catch (encryptionError) {
|
||||||
|
// If encryption setup fails, delete user record
|
||||||
|
await db.delete(users).where(eq(users.id, id));
|
||||||
|
authLogger.error("Failed to setup user encryption, user creation rolled back", encryptionError, {
|
||||||
|
operation: "user_create_encryption_failed",
|
||||||
|
userId: id,
|
||||||
|
});
|
||||||
|
return res.status(500).json({
|
||||||
|
error: "Failed to setup user security - user creation cancelled"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
authLogger.success(
|
authLogger.success(
|
||||||
`Traditional user created: ${username} (is_admin: ${isFirstUser})`,
|
`Traditional user created: ${username} (is_admin: ${isFirstUser})`,
|
||||||
{
|
{
|
||||||
@@ -343,11 +337,46 @@ router.post("/oidc-config", authenticateJWT, async (req, res) => {
|
|||||||
scopes: scopes || "openid email profile",
|
scopes: scopes || "openid email profile",
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Encrypt sensitive configuration for storage
|
||||||
|
let encryptedConfig;
|
||||||
|
try {
|
||||||
|
// Use admin's data key to encrypt OIDC configuration
|
||||||
|
const adminDataKey = DataCrypto.getUserDataKey(userId);
|
||||||
|
if (adminDataKey) {
|
||||||
|
// Provide stable recordId for settings objects
|
||||||
|
const configWithId = { ...config, id: `oidc-config-${userId}` };
|
||||||
|
encryptedConfig = DataCrypto.encryptRecord("settings", configWithId, userId, adminDataKey);
|
||||||
|
authLogger.info("OIDC configuration encrypted with admin data key", {
|
||||||
|
operation: "oidc_config_encrypt",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// If admin data not unlocked, only encrypt client_secret
|
||||||
|
encryptedConfig = {
|
||||||
|
...config,
|
||||||
|
client_secret: `encrypted:${Buffer.from(client_secret).toString('base64')}`, // Simple base64 encoding
|
||||||
|
};
|
||||||
|
authLogger.warn("OIDC configuration stored with basic encoding - admin should re-save with password", {
|
||||||
|
operation: "oidc_config_basic_encoding",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (encryptError) {
|
||||||
|
authLogger.error("Failed to encrypt OIDC configuration, storing with basic encoding", encryptError, {
|
||||||
|
operation: "oidc_config_encrypt_failed",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
encryptedConfig = {
|
||||||
|
...config,
|
||||||
|
client_secret: `encoded:${Buffer.from(client_secret).toString('base64')}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
db.$client
|
db.$client
|
||||||
.prepare(
|
.prepare(
|
||||||
"INSERT OR REPLACE INTO settings (key, value) VALUES ('oidc_config', ?)",
|
"INSERT OR REPLACE INTO settings (key, value) VALUES ('oidc_config', ?)",
|
||||||
)
|
)
|
||||||
.run(JSON.stringify(config));
|
.run(JSON.stringify(encryptedConfig));
|
||||||
authLogger.info("OIDC configuration updated", {
|
authLogger.info("OIDC configuration updated", {
|
||||||
operation: "oidc_update",
|
operation: "oidc_update",
|
||||||
userId,
|
userId,
|
||||||
@@ -383,7 +412,7 @@ router.delete("/oidc-config", authenticateJWT, async (req, res) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Route: Get OIDC configuration
|
// Route: Get OIDC configuration (public - needed for login page)
|
||||||
// GET /users/oidc-config
|
// GET /users/oidc-config
|
||||||
router.get("/oidc-config", async (req, res) => {
|
router.get("/oidc-config", async (req, res) => {
|
||||||
try {
|
try {
|
||||||
@@ -393,7 +422,62 @@ router.get("/oidc-config", async (req, res) => {
|
|||||||
if (!row) {
|
if (!row) {
|
||||||
return res.json(null);
|
return res.json(null);
|
||||||
}
|
}
|
||||||
res.json(JSON.parse((row as any).value));
|
|
||||||
|
let config = JSON.parse((row as any).value);
|
||||||
|
|
||||||
|
// Decrypt or decode client_secret for display
|
||||||
|
if (config.client_secret) {
|
||||||
|
if (config.client_secret.startsWith('encrypted:')) {
|
||||||
|
// Requires admin permission to decrypt
|
||||||
|
const authHeader = req.headers["authorization"];
|
||||||
|
if (authHeader?.startsWith("Bearer ")) {
|
||||||
|
const token = authHeader.split(" ")[1];
|
||||||
|
const authManager = AuthManager.getInstance();
|
||||||
|
const payload = await authManager.verifyJWTToken(token);
|
||||||
|
|
||||||
|
if (payload) {
|
||||||
|
const userId = payload.userId;
|
||||||
|
const user = await db.select().from(users).where(eq(users.id, userId));
|
||||||
|
|
||||||
|
if (user && user.length > 0 && user[0].is_admin) {
|
||||||
|
try {
|
||||||
|
const adminDataKey = DataCrypto.getUserDataKey(userId);
|
||||||
|
if (adminDataKey) {
|
||||||
|
// Use same stable recordId for decryption - note: FieldCrypto will use stored recordId
|
||||||
|
config = DataCrypto.decryptRecord("settings", config, userId, adminDataKey);
|
||||||
|
} else {
|
||||||
|
// Admin data not unlocked, hide client_secret
|
||||||
|
config.client_secret = "[ENCRYPTED - PASSWORD REQUIRED]";
|
||||||
|
}
|
||||||
|
} catch (decryptError) {
|
||||||
|
authLogger.warn("Failed to decrypt OIDC config for admin", {
|
||||||
|
operation: "oidc_config_decrypt_failed",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
config.client_secret = "[ENCRYPTED - DECRYPTION FAILED]";
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
config.client_secret = "[ENCRYPTED - ADMIN ONLY]";
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
config.client_secret = "[ENCRYPTED - AUTH REQUIRED]";
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
config.client_secret = "[ENCRYPTED - AUTH REQUIRED]";
|
||||||
|
}
|
||||||
|
} else if (config.client_secret.startsWith('encoded:')) {
|
||||||
|
// base64 decode
|
||||||
|
try {
|
||||||
|
const decoded = Buffer.from(config.client_secret.substring(8), 'base64').toString('utf8');
|
||||||
|
config.client_secret = decoded;
|
||||||
|
} catch {
|
||||||
|
config.client_secret = "[ENCODING ERROR]";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Otherwise plaintext, return directly
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json(config);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
authLogger.error("Failed to get OIDC config", err);
|
authLogger.error("Failed to get OIDC config", err);
|
||||||
res.status(500).json({ error: "Failed to get OIDC config" });
|
res.status(500).json({ error: "Failed to get OIDC config" });
|
||||||
@@ -654,14 +738,10 @@ router.get("/oidc/callback", async (req, res) => {
|
|||||||
|
|
||||||
let isFirstUser = false;
|
let isFirstUser = false;
|
||||||
if (!user || user.length === 0) {
|
if (!user || user.length === 0) {
|
||||||
try {
|
|
||||||
const countResult = db.$client
|
const countResult = db.$client
|
||||||
.prepare("SELECT COUNT(*) as count FROM users")
|
.prepare("SELECT COUNT(*) as count FROM users")
|
||||||
.get();
|
.get();
|
||||||
isFirstUser = ((countResult as any)?.count || 0) === 0;
|
isFirstUser = ((countResult as any)?.count || 0) === 0;
|
||||||
} catch (e) {
|
|
||||||
isFirstUser = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const id = nanoid();
|
const id = nanoid();
|
||||||
await db.insert(users).values({
|
await db.insert(users).values({
|
||||||
@@ -693,8 +773,7 @@ router.get("/oidc/callback", async (req, res) => {
|
|||||||
|
|
||||||
const userRecord = user[0];
|
const userRecord = user[0];
|
||||||
|
|
||||||
const jwtSecret = process.env.JWT_SECRET || "secret";
|
const token = await authManager.generateJWTToken(userRecord.id, {
|
||||||
const token = jwt.sign({ userId: userRecord.id }, jwtSecret, {
|
|
||||||
expiresIn: "50d",
|
expiresIn: "50d",
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -775,22 +854,69 @@ router.post("/login", async (req, res) => {
|
|||||||
});
|
});
|
||||||
return res.status(401).json({ error: "Incorrect password" });
|
return res.status(401).json({ error: "Incorrect password" });
|
||||||
}
|
}
|
||||||
const jwtSecret = process.env.JWT_SECRET || "secret";
|
|
||||||
const token = jwt.sign({ userId: userRecord.id }, jwtSecret, {
|
|
||||||
expiresIn: "50d",
|
|
||||||
});
|
|
||||||
|
|
||||||
|
// Check if legacy user needs encryption setup
|
||||||
|
try {
|
||||||
|
const kekSalt = await db
|
||||||
|
.select()
|
||||||
|
.from(settings)
|
||||||
|
.where(eq(settings.key, `user_kek_salt_${userRecord.id}`));
|
||||||
|
|
||||||
|
if (kekSalt.length === 0) {
|
||||||
|
// Legacy user first login - set up new encryption
|
||||||
|
await authManager.registerUser(userRecord.id, password);
|
||||||
|
authLogger.success("Legacy user encryption initialized", {
|
||||||
|
operation: "legacy_user_setup",
|
||||||
|
username,
|
||||||
|
userId: userRecord.id,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (setupError) {
|
||||||
|
authLogger.error("Failed to initialize user encryption", setupError, {
|
||||||
|
operation: "user_encryption_setup_failed",
|
||||||
|
username,
|
||||||
|
userId: userRecord.id,
|
||||||
|
});
|
||||||
|
// Encryption setup failure should not block login for existing users
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unlock user data keys
|
||||||
|
const dataUnlocked = await authManager.authenticateUser(userRecord.id, password);
|
||||||
|
if (!dataUnlocked) {
|
||||||
|
authLogger.error("Failed to unlock user data during login", undefined, {
|
||||||
|
operation: "user_login_data_unlock_failed",
|
||||||
|
username,
|
||||||
|
userId: userRecord.id,
|
||||||
|
});
|
||||||
|
return res.status(500).json({
|
||||||
|
error: "Failed to unlock user data - please contact administrator"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// TOTP handling
|
||||||
if (userRecord.totp_enabled) {
|
if (userRecord.totp_enabled) {
|
||||||
const tempToken = jwt.sign(
|
const tempToken = await authManager.generateJWTToken(userRecord.id, {
|
||||||
{ userId: userRecord.id, pending_totp: true },
|
pendingTOTP: true,
|
||||||
jwtSecret,
|
expiresIn: "10m",
|
||||||
{ expiresIn: "10m" },
|
});
|
||||||
);
|
|
||||||
return res.json({
|
return res.json({
|
||||||
requires_totp: true,
|
requires_totp: true,
|
||||||
temp_token: tempToken,
|
temp_token: tempToken,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Generate normal JWT token
|
||||||
|
const token = await authManager.generateJWTToken(userRecord.id, {
|
||||||
|
expiresIn: "24h",
|
||||||
|
});
|
||||||
|
|
||||||
|
authLogger.success(`User logged in successfully: ${username}`, {
|
||||||
|
operation: "user_login_success",
|
||||||
|
username,
|
||||||
|
userId: userRecord.id,
|
||||||
|
dataUnlocked: true,
|
||||||
|
});
|
||||||
|
|
||||||
return res.json({
|
return res.json({
|
||||||
token,
|
token,
|
||||||
is_admin: !!userRecord.is_admin,
|
is_admin: !!userRecord.is_admin,
|
||||||
@@ -829,10 +955,36 @@ router.get("/me", authenticateJWT, async (req: Request, res: Response) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Route: Count users
|
// Route: Check if system requires initial setup (public - for first-time setup detection)
|
||||||
// GET /users/count
|
// GET /users/setup-required
|
||||||
router.get("/count", async (req, res) => {
|
router.get("/setup-required", async (req, res) => {
|
||||||
try {
|
try {
|
||||||
|
const countResult = db.$client
|
||||||
|
.prepare("SELECT COUNT(*) as count FROM users")
|
||||||
|
.get();
|
||||||
|
const count = (countResult as any)?.count || 0;
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
setup_required: count === 0,
|
||||||
|
// 不暴露具体用户数量,只返回是否需要初始化
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
authLogger.error("Failed to check setup status", err);
|
||||||
|
res.status(500).json({ error: "Failed to check setup status" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Route: Count users (admin only - for dashboard statistics)
|
||||||
|
// GET /users/count
|
||||||
|
router.get("/count", authenticateJWT, async (req, res) => {
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
try {
|
||||||
|
// 只有管理员可以查看用户统计
|
||||||
|
const user = await db.select().from(users).where(eq(users.id, userId));
|
||||||
|
if (!user[0] || !user[0].is_admin) {
|
||||||
|
return res.status(403).json({ error: "Admin access required" });
|
||||||
|
}
|
||||||
|
|
||||||
const countResult = db.$client
|
const countResult = db.$client
|
||||||
.prepare("SELECT COUNT(*) as count FROM users")
|
.prepare("SELECT COUNT(*) as count FROM users")
|
||||||
.get();
|
.get();
|
||||||
@@ -846,7 +998,7 @@ router.get("/count", async (req, res) => {
|
|||||||
|
|
||||||
// Route: DB health check (actually queries DB)
|
// Route: DB health check (actually queries DB)
|
||||||
// GET /users/db-health
|
// GET /users/db-health
|
||||||
router.get("/db-health", async (req, res) => {
|
router.get("/db-health", requireAdmin, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
db.$client.prepare("SELECT 1").get();
|
db.$client.prepare("SELECT 1").get();
|
||||||
res.json({ status: "ok" });
|
res.json({ status: "ok" });
|
||||||
@@ -856,7 +1008,7 @@ router.get("/db-health", async (req, res) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Route: Get registration allowed status
|
// Route: Get registration allowed status (public - needed for login page)
|
||||||
// GET /users/registration-allowed
|
// GET /users/registration-allowed
|
||||||
router.get("/registration-allowed", async (req, res) => {
|
router.get("/registration-allowed", async (req, res) => {
|
||||||
try {
|
try {
|
||||||
@@ -1245,11 +1397,9 @@ router.post("/totp/verify-login", async (req, res) => {
|
|||||||
return res.status(400).json({ error: "Token and TOTP code are required" });
|
return res.status(400).json({ error: "Token and TOTP code are required" });
|
||||||
}
|
}
|
||||||
|
|
||||||
const jwtSecret = process.env.JWT_SECRET || "secret";
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const decoded = jwt.verify(temp_token, jwtSecret) as any;
|
const decoded = await authManager.verifyJWTToken(temp_token);
|
||||||
if (!decoded.pending_totp) {
|
if (!decoded || !decoded.pendingTOTP) {
|
||||||
return res.status(401).json({ error: "Invalid temporary token" });
|
return res.status(401).json({ error: "Invalid temporary token" });
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1291,7 +1441,7 @@ router.post("/totp/verify-login", async (req, res) => {
|
|||||||
.where(eq(users.id, userRecord.id));
|
.where(eq(users.id, userRecord.id));
|
||||||
}
|
}
|
||||||
|
|
||||||
const token = jwt.sign({ userId: userRecord.id }, jwtSecret, {
|
const token = await authManager.generateJWTToken(userRecord.id, {
|
||||||
expiresIn: "50d",
|
expiresIn: "50d",
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1606,4 +1756,175 @@ router.delete("/delete-user", authenticateJWT, async (req, res) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// ===== New security API endpoints =====
|
||||||
|
|
||||||
|
// Route: User data unlock - used when session expires
|
||||||
|
// POST /users/unlock-data
|
||||||
|
router.post("/unlock-data", authenticateJWT, async (req, res) => {
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
const { password } = req.body;
|
||||||
|
|
||||||
|
if (!password) {
|
||||||
|
return res.status(400).json({ error: "Password is required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const unlocked = await authManager.authenticateUser(userId, password);
|
||||||
|
if (unlocked) {
|
||||||
|
authLogger.success("User data unlocked", {
|
||||||
|
operation: "user_data_unlock",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: "Data unlocked successfully"
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
authLogger.warn("Failed to unlock user data - invalid password", {
|
||||||
|
operation: "user_data_unlock_failed",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
res.status(401).json({ error: "Invalid password" });
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
authLogger.error("Data unlock failed", err, {
|
||||||
|
operation: "user_data_unlock_error",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
res.status(500).json({ error: "Failed to unlock data" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Route: Check user data unlock status
|
||||||
|
// GET /users/data-status
|
||||||
|
router.get("/data-status", authenticateJWT, async (req, res) => {
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const isUnlocked = authManager.isUserUnlocked(userId);
|
||||||
|
const userCrypto = UserCrypto.getInstance();
|
||||||
|
const sessionStatus = { unlocked: isUnlocked };
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
isUnlocked,
|
||||||
|
session: sessionStatus,
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
authLogger.error("Failed to get data status", err, {
|
||||||
|
operation: "data_status_error",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
res.status(500).json({ error: "Failed to get data status" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Route: User logout (clear data session)
|
||||||
|
// POST /users/logout
|
||||||
|
router.post("/logout", authenticateJWT, async (req, res) => {
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
|
||||||
|
try {
|
||||||
|
authManager.logoutUser(userId);
|
||||||
|
authLogger.info("User logged out", {
|
||||||
|
operation: "user_logout",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
res.json({ message: "Logged out successfully" });
|
||||||
|
} catch (err) {
|
||||||
|
authLogger.error("Logout failed", err, {
|
||||||
|
operation: "logout_error",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
res.status(500).json({ error: "Logout failed" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Route: Change user password (re-encrypt data keys)
|
||||||
|
// POST /users/change-password
|
||||||
|
router.post("/change-password", authenticateJWT, async (req, res) => {
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
const { currentPassword, newPassword } = req.body;
|
||||||
|
|
||||||
|
if (!currentPassword || !newPassword) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: "Current password and new password are required"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (newPassword.length < 8) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: "New password must be at least 8 characters long"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Verify current password and change
|
||||||
|
const success = await authManager.changeUserPassword(
|
||||||
|
userId,
|
||||||
|
currentPassword,
|
||||||
|
newPassword
|
||||||
|
);
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
// Also update password hash in database
|
||||||
|
const saltRounds = parseInt(process.env.SALT || "10", 10);
|
||||||
|
const newPasswordHash = await bcrypt.hash(newPassword, saltRounds);
|
||||||
|
await db
|
||||||
|
.update(users)
|
||||||
|
.set({ password_hash: newPasswordHash })
|
||||||
|
.where(eq(users.id, userId));
|
||||||
|
|
||||||
|
authLogger.success("User password changed successfully", {
|
||||||
|
operation: "password_change_success",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: "Password changed successfully"
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
authLogger.warn("Password change failed - invalid current password", {
|
||||||
|
operation: "password_change_failed",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
res.status(401).json({ error: "Current password is incorrect" });
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
authLogger.error("Password change failed", err, {
|
||||||
|
operation: "password_change_error",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
res.status(500).json({ error: "Failed to change password" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Route: Get security status (admin)
|
||||||
|
// GET /users/security-status
|
||||||
|
router.get("/security-status", authenticateJWT, async (req, res) => {
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const user = await db.select().from(users).where(eq(users.id, userId));
|
||||||
|
if (!user || user.length === 0 || !user[0].is_admin) {
|
||||||
|
return res.status(403).json({ error: "Not authorized" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Simplified security status for new architecture
|
||||||
|
const securityStatus = {
|
||||||
|
initialized: true,
|
||||||
|
system: { hasSecret: true, isValid: true },
|
||||||
|
activeSessions: {},
|
||||||
|
activeSessionCount: 0
|
||||||
|
};
|
||||||
|
res.json(securityStatus);
|
||||||
|
} catch (err) {
|
||||||
|
authLogger.error("Failed to get security status", err, {
|
||||||
|
operation: "security_status_error",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
res.status(500).json({ error: "Failed to get security status" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
export default router;
|
export default router;
|
||||||
|
|||||||
@@ -1,19 +1,20 @@
|
|||||||
import express from "express";
|
import express from "express";
|
||||||
import cors from "cors";
|
import cors from "cors";
|
||||||
import { Client as SSHClient } from "ssh2";
|
import { Client as SSHClient } from "ssh2";
|
||||||
import { db } from "../database/db/index.js";
|
import { getDb } from "../database/db/index.js";
|
||||||
import { sshCredentials } from "../database/db/schema.js";
|
import { sshCredentials } from "../database/db/schema.js";
|
||||||
import { eq, and } from "drizzle-orm";
|
import { eq, and } from "drizzle-orm";
|
||||||
import { fileLogger } from "../utils/logger.js";
|
import { fileLogger } from "../utils/logger.js";
|
||||||
import { EncryptedDBOperations } from "../utils/encrypted-db-operations.js";
|
import { SimpleDBOps } from "../utils/simple-db-ops.js";
|
||||||
|
import { AuthManager } from "../utils/auth-manager.js";
|
||||||
|
|
||||||
// 可执行文件检测工具函数
|
// Executable file detection utility function
|
||||||
function isExecutableFile(permissions: string, fileName: string): boolean {
|
function isExecutableFile(permissions: string, fileName: string): boolean {
|
||||||
// 检查执行权限位 (user, group, other)
|
// Check execute permission bits (user, group, other)
|
||||||
const hasExecutePermission =
|
const hasExecutePermission =
|
||||||
permissions[3] === "x" || permissions[6] === "x" || permissions[9] === "x";
|
permissions[3] === "x" || permissions[6] === "x" || permissions[9] === "x";
|
||||||
|
|
||||||
// 常见的脚本文件扩展名
|
// Common script file extensions
|
||||||
const scriptExtensions = [
|
const scriptExtensions = [
|
||||||
".sh",
|
".sh",
|
||||||
".py",
|
".py",
|
||||||
@@ -29,13 +30,13 @@ function isExecutableFile(permissions: string, fileName: string): boolean {
|
|||||||
fileName.toLowerCase().endsWith(ext),
|
fileName.toLowerCase().endsWith(ext),
|
||||||
);
|
);
|
||||||
|
|
||||||
// 常见的编译可执行文件(无扩展名或特定扩展名)
|
// Common compiled executable files (no extension or specific extensions)
|
||||||
const executableExtensions = [".bin", ".exe", ".out"];
|
const executableExtensions = [".bin", ".exe", ".out"];
|
||||||
const hasExecutableExtension = executableExtensions.some((ext) =>
|
const hasExecutableExtension = executableExtensions.some((ext) =>
|
||||||
fileName.toLowerCase().endsWith(ext),
|
fileName.toLowerCase().endsWith(ext),
|
||||||
);
|
);
|
||||||
|
|
||||||
// 无扩展名且有执行权限的文件通常是可执行文件
|
// Files with no extension and execute permission are usually executable files
|
||||||
const hasNoExtension = !fileName.includes(".") && hasExecutePermission;
|
const hasNoExtension = !fileName.includes(".") && hasExecutePermission;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -58,9 +59,13 @@ app.use(
|
|||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
app.use(express.json({ limit: "100mb" }));
|
app.use(express.json({ limit: "1gb" }));
|
||||||
app.use(express.urlencoded({ limit: "100mb", extended: true }));
|
app.use(express.urlencoded({ limit: "1gb", extended: true }));
|
||||||
app.use(express.raw({ limit: "200mb", type: "application/octet-stream" }));
|
app.use(express.raw({ limit: "5gb", type: "application/octet-stream" }));
|
||||||
|
|
||||||
|
// Initialize AuthManager and add authentication middleware
|
||||||
|
const authManager = AuthManager.getInstance();
|
||||||
|
app.use(authManager.createAuthMiddleware());
|
||||||
|
|
||||||
interface SSHSession {
|
interface SSHSession {
|
||||||
client: SSHClient;
|
client: SSHClient;
|
||||||
@@ -85,7 +90,14 @@ function cleanupSession(sessionId: string) {
|
|||||||
function scheduleSessionCleanup(sessionId: string) {
|
function scheduleSessionCleanup(sessionId: string) {
|
||||||
const session = sshSessions[sessionId];
|
const session = sshSessions[sessionId];
|
||||||
if (session) {
|
if (session) {
|
||||||
|
// Clear existing timeout
|
||||||
if (session.timeout) clearTimeout(session.timeout);
|
if (session.timeout) clearTimeout(session.timeout);
|
||||||
|
|
||||||
|
// Increase timeout to 30 minutes of inactivity
|
||||||
|
session.timeout = setTimeout(() => {
|
||||||
|
fileLogger.info(`Cleaning up inactive SSH session: ${sessionId}`);
|
||||||
|
cleanupSession(sessionId);
|
||||||
|
}, 30 * 60 * 1000); // 30 minutes - increased from 10 minutes
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -101,9 +113,19 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
|
|||||||
keyPassword,
|
keyPassword,
|
||||||
authType,
|
authType,
|
||||||
credentialId,
|
credentialId,
|
||||||
userId,
|
|
||||||
} = req.body;
|
} = req.body;
|
||||||
|
|
||||||
|
// Use authenticated user ID from middleware
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
|
||||||
|
if (!userId) {
|
||||||
|
fileLogger.error("SSH connection rejected: no authenticated user", {
|
||||||
|
operation: "file_connect_auth",
|
||||||
|
sessionId,
|
||||||
|
});
|
||||||
|
return res.status(401).json({ error: "Authentication required" });
|
||||||
|
}
|
||||||
|
|
||||||
if (!sessionId || !ip || !username || !port) {
|
if (!sessionId || !ip || !username || !port) {
|
||||||
fileLogger.warn("Missing SSH connection parameters for file manager", {
|
fileLogger.warn("Missing SSH connection parameters for file manager", {
|
||||||
operation: "file_connect",
|
operation: "file_connect",
|
||||||
@@ -123,8 +145,8 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
|
|||||||
let resolvedCredentials = { password, sshKey, keyPassword, authType };
|
let resolvedCredentials = { password, sshKey, keyPassword, authType };
|
||||||
if (credentialId && hostId && userId) {
|
if (credentialId && hostId && userId) {
|
||||||
try {
|
try {
|
||||||
const credentials = await EncryptedDBOperations.select(
|
const credentials = await SimpleDBOps.select(
|
||||||
db
|
getDb()
|
||||||
.select()
|
.select()
|
||||||
.from(sshCredentials)
|
.from(sshCredentials)
|
||||||
.where(
|
.where(
|
||||||
@@ -134,6 +156,7 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
"ssh_credentials",
|
"ssh_credentials",
|
||||||
|
userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (credentials.length > 0) {
|
if (credentials.length > 0) {
|
||||||
@@ -176,9 +199,9 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
|
|||||||
host: ip,
|
host: ip,
|
||||||
port: port || 22,
|
port: port || 22,
|
||||||
username,
|
username,
|
||||||
readyTimeout: 0,
|
readyTimeout: 60000,
|
||||||
keepaliveInterval: 30000,
|
keepaliveInterval: 30000,
|
||||||
keepaliveCountMax: 0,
|
keepaliveCountMax: 3,
|
||||||
algorithms: {
|
algorithms: {
|
||||||
kex: [
|
kex: [
|
||||||
"diffie-hellman-group14-sha256",
|
"diffie-hellman-group14-sha256",
|
||||||
@@ -201,7 +224,7 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
|
|||||||
"aes256-cbc",
|
"aes256-cbc",
|
||||||
"3des-cbc",
|
"3des-cbc",
|
||||||
],
|
],
|
||||||
hmac: ["hmac-sha2-256", "hmac-sha2-512", "hmac-sha1", "hmac-md5"],
|
hmac: ["hmac-sha2-256-etm@openssh.com", "hmac-sha2-512-etm@openssh.com", "hmac-sha2-256", "hmac-sha2-512", "hmac-sha1", "hmac-md5"],
|
||||||
compress: ["none", "zlib@openssh.com", "zlib"],
|
compress: ["none", "zlib@openssh.com", "zlib"],
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -259,6 +282,7 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
|
|||||||
isConnected: true,
|
isConnected: true,
|
||||||
lastActive: Date.now(),
|
lastActive: Date.now(),
|
||||||
};
|
};
|
||||||
|
scheduleSessionCleanup(sessionId);
|
||||||
res.json({ status: "success", message: "SSH connection established" });
|
res.json({ status: "success", message: "SSH connection established" });
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -297,6 +321,41 @@ app.get("/ssh/file_manager/ssh/status", (req, res) => {
|
|||||||
res.json({ status: "success", connected: isConnected });
|
res.json({ status: "success", connected: isConnected });
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// SSH keepalive endpoint - extends session timeout and verifies connection
|
||||||
|
app.post("/ssh/file_manager/ssh/keepalive", (req, res) => {
|
||||||
|
const { sessionId } = req.body;
|
||||||
|
|
||||||
|
if (!sessionId) {
|
||||||
|
return res.status(400).json({ error: "Session ID is required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const session = sshSessions[sessionId];
|
||||||
|
|
||||||
|
if (!session || !session.isConnected) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: "SSH session not found or not connected",
|
||||||
|
connected: false
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update last active time and reschedule cleanup
|
||||||
|
session.lastActive = Date.now();
|
||||||
|
scheduleSessionCleanup(sessionId);
|
||||||
|
|
||||||
|
fileLogger.debug(`SSH session keepalive: ${sessionId}`, {
|
||||||
|
operation: "ssh_keepalive",
|
||||||
|
sessionId,
|
||||||
|
lastActive: session.lastActive,
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
status: "success",
|
||||||
|
connected: true,
|
||||||
|
message: "Session keepalive successful",
|
||||||
|
lastActive: session.lastActive
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
app.get("/ssh/file_manager/ssh/listFiles", (req, res) => {
|
app.get("/ssh/file_manager/ssh/listFiles", (req, res) => {
|
||||||
const sessionId = req.query.sessionId as string;
|
const sessionId = req.query.sessionId as string;
|
||||||
const sshConn = sshSessions[sessionId];
|
const sshConn = sshSessions[sessionId];
|
||||||
@@ -351,12 +410,12 @@ app.get("/ssh/file_manager/ssh/listFiles", (req, res) => {
|
|||||||
const group = parts[3];
|
const group = parts[3];
|
||||||
const size = parseInt(parts[4], 10);
|
const size = parseInt(parts[4], 10);
|
||||||
|
|
||||||
// 日期可能占夨3个部分(月 日 时间)或者是(月 日 年)
|
// Date may occupy 3 parts (month day time) or (month day year)
|
||||||
let dateStr = "";
|
let dateStr = "";
|
||||||
let nameStartIndex = 8;
|
let nameStartIndex = 8;
|
||||||
|
|
||||||
if (parts[5] && parts[6] && parts[7]) {
|
if (parts[5] && parts[6] && parts[7]) {
|
||||||
// 常规格式: 月 日 时间/年
|
// Regular format: month day time/year
|
||||||
dateStr = `${parts[5]} ${parts[6]} ${parts[7]}`;
|
dateStr = `${parts[5]} ${parts[6]} ${parts[7]}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -366,7 +425,7 @@ app.get("/ssh/file_manager/ssh/listFiles", (req, res) => {
|
|||||||
|
|
||||||
if (name === "." || name === "..") continue;
|
if (name === "." || name === "..") continue;
|
||||||
|
|
||||||
// 解析符号链接目标
|
// Parse symbolic link target
|
||||||
let actualName = name;
|
let actualName = name;
|
||||||
let linkTarget = undefined;
|
let linkTarget = undefined;
|
||||||
if (isLink && name.includes(" -> ")) {
|
if (isLink && name.includes(" -> ")) {
|
||||||
@@ -378,17 +437,17 @@ app.get("/ssh/file_manager/ssh/listFiles", (req, res) => {
|
|||||||
files.push({
|
files.push({
|
||||||
name: actualName,
|
name: actualName,
|
||||||
type: isDirectory ? "directory" : isLink ? "link" : "file",
|
type: isDirectory ? "directory" : isLink ? "link" : "file",
|
||||||
size: isDirectory ? undefined : size, // 目录不显示大小
|
size: isDirectory ? undefined : size, // Directories don't show size
|
||||||
modified: dateStr,
|
modified: dateStr,
|
||||||
permissions,
|
permissions,
|
||||||
owner,
|
owner,
|
||||||
group,
|
group,
|
||||||
linkTarget, // 符号链接的目标
|
linkTarget, // Symbolic link target
|
||||||
path: `${sshPath.endsWith("/") ? sshPath : sshPath + "/"}${actualName}`, // 添加完整路径
|
path: `${sshPath.endsWith("/") ? sshPath : sshPath + "/"}${actualName}`, // Add full path
|
||||||
executable:
|
executable:
|
||||||
!isDirectory && !isLink
|
!isDirectory && !isLink
|
||||||
? isExecutableFile(permissions, actualName)
|
? isExecutableFile(permissions, actualName)
|
||||||
: false, // 检测可执行文件
|
: false, // Detect executable files
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -484,8 +543,8 @@ app.get("/ssh/file_manager/ssh/readFile", (req, res) => {
|
|||||||
|
|
||||||
sshConn.lastActive = Date.now();
|
sshConn.lastActive = Date.now();
|
||||||
|
|
||||||
// First check file size to prevent loading huge files
|
// Support large file reading - increased limit for better compatibility
|
||||||
const MAX_READ_SIZE = 10 * 1024 * 1024; // 10MB - same as frontend limit
|
const MAX_READ_SIZE = 500 * 1024 * 1024; // 500MB - much more reasonable limit
|
||||||
const escapedPath = filePath.replace(/'/g, "'\"'\"'");
|
const escapedPath = filePath.replace(/'/g, "'\"'\"'");
|
||||||
|
|
||||||
// Get file size first
|
// Get file size first
|
||||||
@@ -510,10 +569,20 @@ app.get("/ssh/file_manager/ssh/readFile", (req, res) => {
|
|||||||
|
|
||||||
sizeStream.on("close", (sizeCode) => {
|
sizeStream.on("close", (sizeCode) => {
|
||||||
if (sizeCode !== 0) {
|
if (sizeCode !== 0) {
|
||||||
|
// Check if it's a file not found error (case-insensitive)
|
||||||
|
const errorLower = sizeErrorData.toLowerCase();
|
||||||
|
const isFileNotFound = errorLower.includes("no such file or directory") ||
|
||||||
|
errorLower.includes("cannot access") ||
|
||||||
|
errorLower.includes("not found") ||
|
||||||
|
errorLower.includes("resource not found");
|
||||||
|
|
||||||
fileLogger.error(`File size check failed: ${sizeErrorData}`);
|
fileLogger.error(`File size check failed: ${sizeErrorData}`);
|
||||||
return res
|
return res
|
||||||
.status(500)
|
.status(isFileNotFound ? 404 : 500)
|
||||||
.json({ error: `Cannot check file size: ${sizeErrorData}` });
|
.json({
|
||||||
|
error: `Cannot check file size: ${sizeErrorData}`,
|
||||||
|
fileNotFound: isFileNotFound
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const fileSize = parseInt(sizeData.trim(), 10);
|
const fileSize = parseInt(sizeData.trim(), 10);
|
||||||
@@ -563,9 +632,19 @@ app.get("/ssh/file_manager/ssh/readFile", (req, res) => {
|
|||||||
fileLogger.error(
|
fileLogger.error(
|
||||||
`SSH readFile command failed with code ${code}: ${errorData.replace(/\n/g, " ").trim()}`,
|
`SSH readFile command failed with code ${code}: ${errorData.replace(/\n/g, " ").trim()}`,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Check if it's a "file not found" error
|
||||||
|
const isFileNotFound =
|
||||||
|
errorData.includes("No such file or directory") ||
|
||||||
|
errorData.includes("cannot access") ||
|
||||||
|
errorData.includes("not found");
|
||||||
|
|
||||||
return res
|
return res
|
||||||
.status(500)
|
.status(isFileNotFound ? 404 : 500)
|
||||||
.json({ error: `Command failed: ${errorData}` });
|
.json({
|
||||||
|
error: `Command failed: ${errorData}`,
|
||||||
|
fileNotFound: isFileNotFound
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
res.json({ content: data, path: filePath });
|
res.json({ content: data, path: filePath });
|
||||||
@@ -1492,8 +1571,22 @@ app.put("/ssh/file_manager/ssh/moveItem", async (req, res) => {
|
|||||||
|
|
||||||
const moveCommand = `mv '${escapedOldPath}' '${escapedNewPath}' && echo "SUCCESS" && exit 0`;
|
const moveCommand = `mv '${escapedOldPath}' '${escapedNewPath}' && echo "SUCCESS" && exit 0`;
|
||||||
|
|
||||||
|
// Add timeout for move operation
|
||||||
|
const commandTimeout = setTimeout(() => {
|
||||||
|
if (!res.headersSent) {
|
||||||
|
res.status(408).json({
|
||||||
|
error: "Move operation timed out. SSH connection may be unstable.",
|
||||||
|
toast: {
|
||||||
|
type: "error",
|
||||||
|
message: "Move operation timed out. SSH connection may be unstable.",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, 60000); // 60 second timeout for move operations
|
||||||
|
|
||||||
sshConn.client.exec(moveCommand, (err, stream) => {
|
sshConn.client.exec(moveCommand, (err, stream) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
clearTimeout(commandTimeout);
|
||||||
fileLogger.error("SSH moveItem error:", err);
|
fileLogger.error("SSH moveItem error:", err);
|
||||||
if (!res.headersSent) {
|
if (!res.headersSent) {
|
||||||
return res.status(500).json({ error: err.message });
|
return res.status(500).json({ error: err.message });
|
||||||
@@ -1527,6 +1620,7 @@ app.put("/ssh/file_manager/ssh/moveItem", async (req, res) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
stream.on("close", (code) => {
|
stream.on("close", (code) => {
|
||||||
|
clearTimeout(commandTimeout);
|
||||||
if (outputData.includes("SUCCESS")) {
|
if (outputData.includes("SUCCESS")) {
|
||||||
if (!res.headersSent) {
|
if (!res.headersSent) {
|
||||||
res.json({
|
res.json({
|
||||||
@@ -1569,6 +1663,7 @@ app.put("/ssh/file_manager/ssh/moveItem", async (req, res) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
stream.on("error", (streamErr) => {
|
stream.on("error", (streamErr) => {
|
||||||
|
clearTimeout(commandTimeout);
|
||||||
fileLogger.error("SSH moveItem stream error:", streamErr);
|
fileLogger.error("SSH moveItem stream error:", streamErr);
|
||||||
if (!res.headersSent) {
|
if (!res.headersSent) {
|
||||||
res.status(500).json({ error: `Stream error: ${streamErr.message}` });
|
res.status(500).json({ error: `Stream error: ${streamErr.message}` });
|
||||||
@@ -1633,8 +1728,8 @@ app.post("/ssh/file_manager/ssh/downloadFile", async (req, res) => {
|
|||||||
.json({ error: "Cannot download directories or special files" });
|
.json({ error: "Cannot download directories or special files" });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check file size (limit to 100MB for safety)
|
// Support large file downloads - increased limit for better compatibility
|
||||||
const MAX_FILE_SIZE = 100 * 1024 * 1024; // 100MB
|
const MAX_FILE_SIZE = 5 * 1024 * 1024 * 1024; // 5GB - reasonable for SSH file operations
|
||||||
if (stats.size > MAX_FILE_SIZE) {
|
if (stats.size > MAX_FILE_SIZE) {
|
||||||
fileLogger.warn("File too large for download", {
|
fileLogger.warn("File too large for download", {
|
||||||
operation: "file_download",
|
operation: "file_download",
|
||||||
@@ -1705,66 +1800,26 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
|
|||||||
// Extract source name
|
// Extract source name
|
||||||
const sourceName = sourcePath.split("/").pop() || "copied_item";
|
const sourceName = sourcePath.split("/").pop() || "copied_item";
|
||||||
|
|
||||||
// First check if source file exists
|
// Linus principle: simplify - generate unique name directly without complex checks
|
||||||
const escapedSourceForCheck = sourcePath.replace(/'/g, "'\"'\"'");
|
|
||||||
const checkExistsCommand = `test -e '${escapedSourceForCheck}'`;
|
|
||||||
const checkExists = await new Promise<boolean>((resolve) => {
|
|
||||||
sshConn.client.exec(checkExistsCommand, (err, stream) => {
|
|
||||||
if (err) {
|
|
||||||
fileLogger.error("File existence check error:", err);
|
|
||||||
resolve(false);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
stream.on("close", (code) => {
|
|
||||||
fileLogger.info("File existence check completed", {
|
|
||||||
sourcePath,
|
|
||||||
exists: code === 0,
|
|
||||||
});
|
|
||||||
resolve(code === 0);
|
|
||||||
});
|
|
||||||
|
|
||||||
stream.on("error", () => resolve(false));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!checkExists) {
|
|
||||||
return res.status(404).json({
|
|
||||||
error: `Source file not found: ${sourcePath}`,
|
|
||||||
toast: {
|
|
||||||
type: "error",
|
|
||||||
message: `Source file not found: ${sourceName}`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use timestamp for uniqueness
|
|
||||||
const timestamp = Date.now().toString().slice(-8);
|
const timestamp = Date.now().toString().slice(-8);
|
||||||
const nameWithoutExt = sourceName.includes(".")
|
const uniqueName = `${sourceName}_copy_${timestamp}`;
|
||||||
? sourceName.substring(0, sourceName.lastIndexOf("."))
|
const targetPath = `${targetDir}/${uniqueName}`;
|
||||||
: sourceName;
|
|
||||||
const extension = sourceName.includes(".")
|
|
||||||
? sourceName.substring(sourceName.lastIndexOf("."))
|
|
||||||
: "";
|
|
||||||
|
|
||||||
// Always use timestamp suffix to ensure uniqueness without SSH calls
|
fileLogger.info("Starting copy operation", {
|
||||||
const uniqueName = `${nameWithoutExt}_copy_${timestamp}${extension}`;
|
|
||||||
|
|
||||||
fileLogger.info("Using timestamp-based unique name", {
|
|
||||||
originalName: sourceName,
|
originalName: sourceName,
|
||||||
uniqueName,
|
uniqueName,
|
||||||
|
sourcePath,
|
||||||
|
targetPath,
|
||||||
|
sessionId,
|
||||||
});
|
});
|
||||||
const targetPath = `${targetDir}/${uniqueName}`;
|
|
||||||
|
|
||||||
// Escape paths for shell commands
|
// Escape paths for shell commands
|
||||||
const escapedSource = sourcePath.replace(/'/g, "'\"'\"'");
|
const escapedSource = sourcePath.replace(/'/g, "'\"'\"'");
|
||||||
const escapedTarget = targetPath.replace(/'/g, "'\"'\"'");
|
const escapedTarget = targetPath.replace(/'/g, "'\"'\"'");
|
||||||
|
|
||||||
// Use cp with explicit flags to avoid hanging on prompts
|
// Linus principle: simplify - use basic cp command for reliability
|
||||||
// -f: force overwrite without prompting
|
// Just copy the file without complex flags that might cause issues
|
||||||
// -r: recursive for directories
|
const copyCommand = `cp '${escapedSource}' '${escapedTarget}' && echo "COPY_SUCCESS"`;
|
||||||
// -p: preserve timestamps, permissions
|
|
||||||
const copyCommand = `cp -fpr '${escapedSource}' '${escapedTarget}' 2>&1`;
|
|
||||||
|
|
||||||
fileLogger.info("Starting file copy operation", {
|
fileLogger.info("Starting file copy operation", {
|
||||||
operation: "file_copy_start",
|
operation: "file_copy_start",
|
||||||
@@ -1777,7 +1832,7 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
|
|||||||
|
|
||||||
// Add timeout to prevent hanging
|
// Add timeout to prevent hanging
|
||||||
const commandTimeout = setTimeout(() => {
|
const commandTimeout = setTimeout(() => {
|
||||||
fileLogger.error("Copy command timed out after 20 seconds", {
|
fileLogger.error("Copy command timed out after 60 seconds", {
|
||||||
sourcePath,
|
sourcePath,
|
||||||
targetPath,
|
targetPath,
|
||||||
command: copyCommand,
|
command: copyCommand,
|
||||||
@@ -1792,7 +1847,7 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}, 20000); // 20 second timeout for better responsiveness
|
}, 60000); // 60 second timeout for large files
|
||||||
|
|
||||||
sshConn.client.exec(copyCommand, (err, stream) => {
|
sshConn.client.exec(copyCommand, (err, stream) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
@@ -1864,6 +1919,10 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Verify copy completion with COPY_SUCCESS marker or exit code 0
|
||||||
|
const copySuccessful = stdoutData.includes("COPY_SUCCESS") || code === 0;
|
||||||
|
|
||||||
|
if (copySuccessful) {
|
||||||
fileLogger.success("Item copied successfully", {
|
fileLogger.success("Item copied successfully", {
|
||||||
operation: "file_copy",
|
operation: "file_copy",
|
||||||
sessionId,
|
sessionId,
|
||||||
@@ -1886,6 +1945,29 @@ app.post("/ssh/file_manager/ssh/copyItem", async (req, res) => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
fileLogger.warn("Copy completed but without success confirmation", {
|
||||||
|
operation: "file_copy_uncertain",
|
||||||
|
sessionId,
|
||||||
|
sourcePath,
|
||||||
|
targetPath,
|
||||||
|
code,
|
||||||
|
stdoutData: stdoutData.substring(0, 200),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!res.headersSent) {
|
||||||
|
res.json({
|
||||||
|
message: "Copy may have completed",
|
||||||
|
sourcePath,
|
||||||
|
targetPath,
|
||||||
|
uniqueName,
|
||||||
|
toast: {
|
||||||
|
type: "warning",
|
||||||
|
message: `Copy completed but verification uncertain for: ${uniqueName}`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
stream.on("error", (streamErr) => {
|
stream.on("error", (streamErr) => {
|
||||||
@@ -1933,7 +2015,7 @@ process.on("SIGTERM", () => {
|
|||||||
process.exit(0);
|
process.exit(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
// 执行可执行文件
|
// Execute executable file
|
||||||
app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => {
|
app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => {
|
||||||
const { sessionId, filePath, hostId, userId } = req.body;
|
const { sessionId, filePath, hostId, userId } = req.body;
|
||||||
const sshConn = sshSessions[sessionId];
|
const sshConn = sshSessions[sessionId];
|
||||||
@@ -1957,7 +2039,7 @@ app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => {
|
|||||||
|
|
||||||
const escapedPath = filePath.replace(/'/g, "'\"'\"'");
|
const escapedPath = filePath.replace(/'/g, "'\"'\"'");
|
||||||
|
|
||||||
// 检查文件是否存在且可执行
|
// Check if file exists and is executable
|
||||||
const checkCommand = `test -x '${escapedPath}' && echo "EXECUTABLE" || echo "NOT_EXECUTABLE"`;
|
const checkCommand = `test -x '${escapedPath}' && echo "EXECUTABLE" || echo "NOT_EXECUTABLE"`;
|
||||||
|
|
||||||
sshConn.client.exec(checkCommand, (checkErr, checkStream) => {
|
sshConn.client.exec(checkCommand, (checkErr, checkStream) => {
|
||||||
@@ -1978,7 +2060,7 @@ app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => {
|
|||||||
return res.status(400).json({ error: "File is not executable" });
|
return res.status(400).json({ error: "File is not executable" });
|
||||||
}
|
}
|
||||||
|
|
||||||
// 执行文件
|
// Execute file
|
||||||
const executeCommand = `cd "$(dirname '${escapedPath}')" && '${escapedPath}' 2>&1; echo "EXIT_CODE:$?"`;
|
const executeCommand = `cd "$(dirname '${escapedPath}')" && '${escapedPath}' 2>&1; echo "EXIT_CODE:$?"`;
|
||||||
|
|
||||||
fileLogger.info("Executing file", {
|
fileLogger.info("Executing file", {
|
||||||
@@ -2006,7 +2088,7 @@ app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
stream.on("close", (code) => {
|
stream.on("close", (code) => {
|
||||||
// 从输出中提取退出代码
|
// Extract exit code from output
|
||||||
const exitCodeMatch = output.match(/EXIT_CODE:(\d+)$/);
|
const exitCodeMatch = output.match(/EXIT_CODE:(\d+)$/);
|
||||||
const actualExitCode = exitCodeMatch
|
const actualExitCode = exitCodeMatch
|
||||||
? parseInt(exitCodeMatch[1])
|
? parseInt(exitCodeMatch[1])
|
||||||
@@ -2043,9 +2125,21 @@ app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const PORT = 8084;
|
const PORT = 8084;
|
||||||
app.listen(PORT, () => {
|
app.listen(PORT, async () => {
|
||||||
fileLogger.success("File Manager API server started", {
|
fileLogger.success("File Manager API server started", {
|
||||||
operation: "server_start",
|
operation: "server_start",
|
||||||
port: PORT,
|
port: PORT,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Initialize AuthManager for JWT verification
|
||||||
|
try {
|
||||||
|
await authManager.initialize();
|
||||||
|
fileLogger.info("AuthManager initialized for file manager", {
|
||||||
|
operation: "auth_init",
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
fileLogger.error("Failed to initialize AuthManager", err, {
|
||||||
|
operation: "auth_init_error",
|
||||||
|
});
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -2,11 +2,12 @@ import express from "express";
|
|||||||
import net from "net";
|
import net from "net";
|
||||||
import cors from "cors";
|
import cors from "cors";
|
||||||
import { Client, type ConnectConfig } from "ssh2";
|
import { Client, type ConnectConfig } from "ssh2";
|
||||||
import { db } from "../database/db/index.js";
|
import { getDb } from "../database/db/index.js";
|
||||||
import { sshData, sshCredentials } from "../database/db/schema.js";
|
import { sshData, sshCredentials } from "../database/db/schema.js";
|
||||||
import { eq, and } from "drizzle-orm";
|
import { eq, and } from "drizzle-orm";
|
||||||
import { statsLogger } from "../utils/logger.js";
|
import { statsLogger } from "../utils/logger.js";
|
||||||
import { EncryptedDBOperations } from "../utils/encrypted-db-operations.js";
|
import { SimpleDBOps } from "../utils/simple-db-ops.js";
|
||||||
|
import { AuthManager } from "../utils/auth-manager.js";
|
||||||
|
|
||||||
interface PooledConnection {
|
interface PooledConnection {
|
||||||
client: Client;
|
client: Client;
|
||||||
@@ -228,6 +229,7 @@ class MetricsCache {
|
|||||||
const connectionPool = new SSHConnectionPool();
|
const connectionPool = new SSHConnectionPool();
|
||||||
const requestQueue = new RequestQueue();
|
const requestQueue = new RequestQueue();
|
||||||
const metricsCache = new MetricsCache();
|
const metricsCache = new MetricsCache();
|
||||||
|
const authManager = AuthManager.getInstance();
|
||||||
|
|
||||||
type HostStatus = "online" | "offline";
|
type HostStatus = "online" | "offline";
|
||||||
|
|
||||||
@@ -303,19 +305,23 @@ app.use((req, res, next) => {
|
|||||||
});
|
});
|
||||||
app.use(express.json({ limit: "1mb" }));
|
app.use(express.json({ limit: "1mb" }));
|
||||||
|
|
||||||
|
// Add authentication middleware - Linus principle: eliminate special cases
|
||||||
|
app.use(authManager.createAuthMiddleware());
|
||||||
|
|
||||||
const hostStatuses: Map<number, StatusEntry> = new Map();
|
const hostStatuses: Map<number, StatusEntry> = new Map();
|
||||||
|
|
||||||
async function fetchAllHosts(): Promise<SSHHostWithCredentials[]> {
|
async function fetchAllHosts(userId: string): Promise<SSHHostWithCredentials[]> {
|
||||||
try {
|
try {
|
||||||
const hosts = await EncryptedDBOperations.select(
|
const hosts = await SimpleDBOps.select(
|
||||||
db.select().from(sshData),
|
getDb().select().from(sshData).where(eq(sshData.userId, userId)),
|
||||||
"ssh_data",
|
"ssh_data",
|
||||||
|
userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
const hostsWithCredentials: SSHHostWithCredentials[] = [];
|
const hostsWithCredentials: SSHHostWithCredentials[] = [];
|
||||||
for (const host of hosts) {
|
for (const host of hosts) {
|
||||||
try {
|
try {
|
||||||
const hostWithCreds = await resolveHostCredentials(host);
|
const hostWithCreds = await resolveHostCredentials(host, userId);
|
||||||
if (hostWithCreds) {
|
if (hostWithCreds) {
|
||||||
hostsWithCredentials.push(hostWithCreds);
|
hostsWithCredentials.push(hostWithCreds);
|
||||||
}
|
}
|
||||||
@@ -335,11 +341,13 @@ async function fetchAllHosts(): Promise<SSHHostWithCredentials[]> {
|
|||||||
|
|
||||||
async function fetchHostById(
|
async function fetchHostById(
|
||||||
id: number,
|
id: number,
|
||||||
|
userId: string,
|
||||||
): Promise<SSHHostWithCredentials | undefined> {
|
): Promise<SSHHostWithCredentials | undefined> {
|
||||||
try {
|
try {
|
||||||
const hosts = await EncryptedDBOperations.select(
|
const hosts = await SimpleDBOps.select(
|
||||||
db.select().from(sshData).where(eq(sshData.id, id)),
|
getDb().select().from(sshData).where(and(eq(sshData.id, id), eq(sshData.userId, userId))),
|
||||||
"ssh_data",
|
"ssh_data",
|
||||||
|
userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (hosts.length === 0) {
|
if (hosts.length === 0) {
|
||||||
@@ -347,7 +355,7 @@ async function fetchHostById(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const host = hosts[0];
|
const host = hosts[0];
|
||||||
return await resolveHostCredentials(host);
|
return await resolveHostCredentials(host, userId);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
statsLogger.error(`Failed to fetch host ${id}`, err);
|
statsLogger.error(`Failed to fetch host ${id}`, err);
|
||||||
return undefined;
|
return undefined;
|
||||||
@@ -356,6 +364,7 @@ async function fetchHostById(
|
|||||||
|
|
||||||
async function resolveHostCredentials(
|
async function resolveHostCredentials(
|
||||||
host: any,
|
host: any,
|
||||||
|
userId: string,
|
||||||
): Promise<SSHHostWithCredentials | undefined> {
|
): Promise<SSHHostWithCredentials | undefined> {
|
||||||
try {
|
try {
|
||||||
const baseHost: any = {
|
const baseHost: any = {
|
||||||
@@ -387,17 +396,18 @@ async function resolveHostCredentials(
|
|||||||
|
|
||||||
if (host.credentialId) {
|
if (host.credentialId) {
|
||||||
try {
|
try {
|
||||||
const credentials = await EncryptedDBOperations.select(
|
const credentials = await SimpleDBOps.select(
|
||||||
db
|
getDb()
|
||||||
.select()
|
.select()
|
||||||
.from(sshCredentials)
|
.from(sshCredentials)
|
||||||
.where(
|
.where(
|
||||||
and(
|
and(
|
||||||
eq(sshCredentials.id, host.credentialId),
|
eq(sshCredentials.id, host.credentialId),
|
||||||
eq(sshCredentials.userId, host.userId),
|
eq(sshCredentials.userId, userId),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
"ssh_credentials",
|
"ssh_credentials",
|
||||||
|
userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (credentials.length > 0) {
|
if (credentials.length > 0) {
|
||||||
@@ -480,7 +490,31 @@ function buildSshConfig(host: SSHHostWithCredentials): ConnectConfig {
|
|||||||
port: host.port || 22,
|
port: host.port || 22,
|
||||||
username: host.username || "root",
|
username: host.username || "root",
|
||||||
readyTimeout: 10_000,
|
readyTimeout: 10_000,
|
||||||
algorithms: {},
|
algorithms: {
|
||||||
|
kex: [
|
||||||
|
"diffie-hellman-group14-sha256",
|
||||||
|
"diffie-hellman-group14-sha1",
|
||||||
|
"diffie-hellman-group1-sha1",
|
||||||
|
"diffie-hellman-group-exchange-sha256",
|
||||||
|
"diffie-hellman-group-exchange-sha1",
|
||||||
|
"ecdh-sha2-nistp256",
|
||||||
|
"ecdh-sha2-nistp384",
|
||||||
|
"ecdh-sha2-nistp521",
|
||||||
|
],
|
||||||
|
cipher: [
|
||||||
|
"aes128-ctr",
|
||||||
|
"aes192-ctr",
|
||||||
|
"aes256-ctr",
|
||||||
|
"aes128-gcm@openssh.com",
|
||||||
|
"aes256-gcm@openssh.com",
|
||||||
|
"aes128-cbc",
|
||||||
|
"aes192-cbc",
|
||||||
|
"aes256-cbc",
|
||||||
|
"3des-cbc",
|
||||||
|
],
|
||||||
|
hmac: ["hmac-sha2-256-etm@openssh.com", "hmac-sha2-512-etm@openssh.com", "hmac-sha2-256", "hmac-sha2-512", "hmac-sha1", "hmac-md5"],
|
||||||
|
compress: ["none", "zlib@openssh.com", "zlib"],
|
||||||
|
},
|
||||||
} as ConnectConfig;
|
} as ConnectConfig;
|
||||||
|
|
||||||
if (host.authType === "password") {
|
if (host.authType === "password") {
|
||||||
@@ -809,11 +843,19 @@ function tcpPing(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function pollStatusesOnce(): Promise<void> {
|
async function pollStatusesOnce(userId?: string): Promise<void> {
|
||||||
const hosts = await fetchAllHosts();
|
if (!userId) {
|
||||||
|
statsLogger.warn("Skipping status poll - no authenticated user", {
|
||||||
|
operation: "status_poll",
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const hosts = await fetchAllHosts(userId);
|
||||||
if (hosts.length === 0) {
|
if (hosts.length === 0) {
|
||||||
statsLogger.warn("No hosts retrieved for status polling", {
|
statsLogger.warn("No hosts retrieved for status polling", {
|
||||||
operation: "status_poll",
|
operation: "status_poll",
|
||||||
|
userId,
|
||||||
});
|
});
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -845,8 +887,10 @@ async function pollStatusesOnce(): Promise<void> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
app.get("/status", async (req, res) => {
|
app.get("/status", async (req, res) => {
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
|
||||||
if (hostStatuses.size === 0) {
|
if (hostStatuses.size === 0) {
|
||||||
await pollStatusesOnce();
|
await pollStatusesOnce(userId);
|
||||||
}
|
}
|
||||||
const result: Record<number, StatusEntry> = {};
|
const result: Record<number, StatusEntry> = {};
|
||||||
for (const [id, entry] of hostStatuses.entries()) {
|
for (const [id, entry] of hostStatuses.entries()) {
|
||||||
@@ -857,9 +901,10 @@ app.get("/status", async (req, res) => {
|
|||||||
|
|
||||||
app.get("/status/:id", validateHostId, async (req, res) => {
|
app.get("/status/:id", validateHostId, async (req, res) => {
|
||||||
const id = Number(req.params.id);
|
const id = Number(req.params.id);
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const host = await fetchHostById(id);
|
const host = await fetchHostById(id, userId);
|
||||||
if (!host) {
|
if (!host) {
|
||||||
return res.status(404).json({ error: "Host not found" });
|
return res.status(404).json({ error: "Host not found" });
|
||||||
}
|
}
|
||||||
@@ -880,15 +925,17 @@ app.get("/status/:id", validateHostId, async (req, res) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
app.post("/refresh", async (req, res) => {
|
app.post("/refresh", async (req, res) => {
|
||||||
await pollStatusesOnce();
|
const userId = (req as any).userId;
|
||||||
|
await pollStatusesOnce(userId);
|
||||||
res.json({ message: "Refreshed" });
|
res.json({ message: "Refreshed" });
|
||||||
});
|
});
|
||||||
|
|
||||||
app.get("/metrics/:id", validateHostId, async (req, res) => {
|
app.get("/metrics/:id", validateHostId, async (req, res) => {
|
||||||
const id = Number(req.params.id);
|
const id = Number(req.params.id);
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const host = await fetchHostById(id);
|
const host = await fetchHostById(id, userId);
|
||||||
if (!host) {
|
if (!host) {
|
||||||
return res.status(404).json({ error: "Host not found" });
|
return res.status(404).json({ error: "Host not found" });
|
||||||
}
|
}
|
||||||
@@ -947,11 +994,21 @@ app.listen(PORT, async () => {
|
|||||||
operation: "server_start",
|
operation: "server_start",
|
||||||
port: PORT,
|
port: PORT,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Initialize AuthManager for JWT verification
|
||||||
try {
|
try {
|
||||||
await pollStatusesOnce();
|
await authManager.initialize();
|
||||||
|
statsLogger.info("AuthManager initialized for metrics collection", {
|
||||||
|
operation: "auth_init",
|
||||||
|
});
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
statsLogger.error("Initial poll failed", err, {
|
statsLogger.error("Failed to initialize AuthManager", err, {
|
||||||
operation: "initial_poll",
|
operation: "auth_init_error",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Skip initial poll - requires user authentication
|
||||||
|
statsLogger.info("Server ready - status polling will begin with first authenticated request", {
|
||||||
|
operation: "server_ready",
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,34 +1,220 @@
|
|||||||
import { WebSocketServer, WebSocket, type RawData } from "ws";
|
import { WebSocketServer, WebSocket, type RawData } from "ws";
|
||||||
import { Client, type ClientChannel, type PseudoTtyOptions } from "ssh2";
|
import { Client, type ClientChannel, type PseudoTtyOptions } from "ssh2";
|
||||||
import { db } from "../database/db/index.js";
|
import { parse as parseUrl } from "url";
|
||||||
|
import { getDb } from "../database/db/index.js";
|
||||||
import { sshCredentials } from "../database/db/schema.js";
|
import { sshCredentials } from "../database/db/schema.js";
|
||||||
import { eq, and } from "drizzle-orm";
|
import { eq, and } from "drizzle-orm";
|
||||||
import { sshLogger } from "../utils/logger.js";
|
import { sshLogger } from "../utils/logger.js";
|
||||||
import { EncryptedDBOperations } from "../utils/encrypted-db-operations.js";
|
import { SimpleDBOps } from "../utils/simple-db-ops.js";
|
||||||
|
import { AuthManager } from "../utils/auth-manager.js";
|
||||||
|
import { UserCrypto } from "../utils/user-crypto.js";
|
||||||
|
|
||||||
const wss = new WebSocketServer({ port: 8082 });
|
// Get auth instances
|
||||||
|
const authManager = AuthManager.getInstance();
|
||||||
|
const userCrypto = UserCrypto.getInstance();
|
||||||
|
|
||||||
sshLogger.success("SSH Terminal WebSocket server started", {
|
// Track user connections for rate limiting
|
||||||
operation: "server_start",
|
const userConnections = new Map<string, Set<WebSocket>>();
|
||||||
|
|
||||||
|
const wss = new WebSocketServer({
|
||||||
port: 8082,
|
port: 8082,
|
||||||
|
// WebSocket authentication during handshake
|
||||||
|
verifyClient: async (info) => {
|
||||||
|
try {
|
||||||
|
const url = parseUrl(info.req.url!, true);
|
||||||
|
const token = url.query.token as string;
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
sshLogger.warn("WebSocket connection rejected: missing token", {
|
||||||
|
operation: "websocket_auth_reject",
|
||||||
|
reason: "missing_token",
|
||||||
|
ip: info.req.socket.remoteAddress
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = await authManager.verifyJWTToken(token);
|
||||||
|
|
||||||
|
if (!payload) {
|
||||||
|
sshLogger.warn("WebSocket connection rejected: invalid token", {
|
||||||
|
operation: "websocket_auth_reject",
|
||||||
|
reason: "invalid_token",
|
||||||
|
ip: info.req.socket.remoteAddress
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for TOTP pending (should not allow terminal access during TOTP)
|
||||||
|
if (payload.pendingTOTP) {
|
||||||
|
sshLogger.warn("WebSocket connection rejected: TOTP verification pending", {
|
||||||
|
operation: "websocket_auth_reject",
|
||||||
|
reason: "totp_pending",
|
||||||
|
userId: payload.userId,
|
||||||
|
ip: info.req.socket.remoteAddress
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check connection limits per user (max 3 concurrent connections)
|
||||||
|
const existingConnections = userConnections.get(payload.userId);
|
||||||
|
if (existingConnections && existingConnections.size >= 3) {
|
||||||
|
sshLogger.warn("WebSocket connection rejected: too many connections", {
|
||||||
|
operation: "websocket_auth_reject",
|
||||||
|
reason: "connection_limit",
|
||||||
|
userId: payload.userId,
|
||||||
|
currentConnections: existingConnections.size,
|
||||||
|
ip: info.req.socket.remoteAddress
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: We don't need to attach user info to request anymore
|
||||||
|
// Connection handler will re-verify JWT directly from URL
|
||||||
|
|
||||||
|
The WebSocket connection limit is hardcoded to 3. While this is a reasonable default, it might be beneficial to make this configurable via an environment variable, for example 
The WebSocket connection limit is hardcoded to 3. While this is a reasonable default, it might be beneficial to make this configurable via an environment variable, for example `WEBSOCKET_CONNECTION_LIMIT`. This would provide more flexibility for administrators who might need to adjust this limit based on their use case and server resources.
|
|||||||
|
sshLogger.info("WebSocket connection authenticated", {
|
||||||
|
operation: "websocket_auth_success",
|
||||||
|
userId: payload.userId,
|
||||||
|
ip: info.req.socket.remoteAddress
|
||||||
|
});
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
sshLogger.error("WebSocket authentication error", error, {
|
||||||
|
operation: "websocket_auth_error",
|
||||||
|
ip: info.req.socket.remoteAddress
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
wss.on("connection", (ws: WebSocket) => {
|
sshLogger.success("SSH Terminal WebSocket server started with authentication", {
|
||||||
|
operation: "server_start",
|
||||||
|
port: 8082,
|
||||||
|
features: ["JWT_auth", "connection_limits", "data_access_control"]
|
||||||
|
});
|
||||||
|
|
||||||
|
wss.on("connection", async (ws: WebSocket, req) => {
|
||||||
|
// Linus principle: eliminate complexity - always parse JWT from URL directly
|
||||||
|
let userId: string | undefined;
|
||||||
|
let userPayload: any;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const url = parseUrl(req.url!, true);
|
||||||
|
const token = url.query.token as string;
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
sshLogger.warn("WebSocket connection rejected: missing token in connection", {
|
||||||
|
operation: "websocket_connection_reject",
|
||||||
|
reason: "missing_token",
|
||||||
|
ip: req.socket.remoteAddress
|
||||||
|
});
|
||||||
|
ws.close(1008, "Authentication required");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = await authManager.verifyJWTToken(token);
|
||||||
|
if (!payload) {
|
||||||
|
sshLogger.warn("WebSocket connection rejected: invalid token in connection", {
|
||||||
|
operation: "websocket_connection_reject",
|
||||||
|
reason: "invalid_token",
|
||||||
|
ip: req.socket.remoteAddress
|
||||||
|
});
|
||||||
|
ws.close(1008, "Authentication required");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
userId = payload.userId;
|
||||||
|
userPayload = payload;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
sshLogger.error("WebSocket JWT verification failed during connection", error, {
|
||||||
|
operation: "websocket_connection_auth_error",
|
||||||
|
ip: req.socket.remoteAddress
|
||||||
|
});
|
||||||
|
ws.close(1008, "Authentication required");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check data access permissions
|
||||||
|
const dataKey = userCrypto.getUserDataKey(userId);
|
||||||
|
if (!dataKey) {
|
||||||
|
sshLogger.warn("WebSocket connection rejected: data locked", {
|
||||||
|
operation: "websocket_data_locked",
|
||||||
|
userId,
|
||||||
|
ip: req.socket.remoteAddress
|
||||||
|
});
|
||||||
|
ws.send(JSON.stringify({
|
||||||
|
type: "error",
|
||||||
|
message: "Data locked - re-authenticate with password",
|
||||||
|
code: "DATA_LOCKED"
|
||||||
|
}));
|
||||||
|
ws.close(1008, "Data access required");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track user connections for limits
|
||||||
|
if (!userConnections.has(userId)) {
|
||||||
|
userConnections.set(userId, new Set());
|
||||||
|
}
|
||||||
|
const userWs = userConnections.get(userId)!;
|
||||||
|
userWs.add(ws);
|
||||||
|
|
||||||
|
sshLogger.info("WebSocket connection established", {
|
||||||
|
operation: "websocket_connection_established",
|
||||||
|
userId,
|
||||||
|
userConnections: userWs.size,
|
||||||
|
ip: req.socket.remoteAddress
|
||||||
|
});
|
||||||
|
|
||||||
let sshConn: Client | null = null;
|
let sshConn: Client | null = null;
|
||||||
let sshStream: ClientChannel | null = null;
|
let sshStream: ClientChannel | null = null;
|
||||||
let pingInterval: NodeJS.Timeout | null = null;
|
let pingInterval: NodeJS.Timeout | null = null;
|
||||||
|
|
||||||
ws.on("close", () => {
|
ws.on("close", () => {
|
||||||
|
// Clean up user connection tracking
|
||||||
|
const userWs = userConnections.get(userId);
|
||||||
|
if (userWs) {
|
||||||
|
userWs.delete(ws);
|
||||||
|
if (userWs.size === 0) {
|
||||||
|
userConnections.delete(userId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sshLogger.info("WebSocket connection closed", {
|
||||||
|
operation: "websocket_connection_closed",
|
||||||
|
userId,
|
||||||
|
remainingConnections: userWs?.size || 0
|
||||||
|
});
|
||||||
|
|
||||||
cleanupSSH();
|
cleanupSSH();
|
||||||
});
|
});
|
||||||
|
|
||||||
ws.on("message", (msg: RawData) => {
|
ws.on("message", (msg: RawData) => {
|
||||||
|
// Verify user still has data access before processing any messages
|
||||||
|
const currentDataKey = userCrypto.getUserDataKey(userId);
|
||||||
|
if (!currentDataKey) {
|
||||||
|
sshLogger.warn("WebSocket message rejected: data access expired", {
|
||||||
|
operation: "websocket_message_rejected",
|
||||||
|
userId,
|
||||||
|
reason: "data_access_expired"
|
||||||
|
});
|
||||||
|
ws.send(JSON.stringify({
|
||||||
|
type: "error",
|
||||||
|
message: "Data access expired - please re-authenticate",
|
||||||
|
code: "DATA_EXPIRED"
|
||||||
|
}));
|
||||||
|
ws.close(1008, "Data access expired");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
let parsed: any;
|
let parsed: any;
|
||||||
try {
|
try {
|
||||||
parsed = JSON.parse(msg.toString());
|
parsed = JSON.parse(msg.toString());
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
sshLogger.error("Invalid JSON received", e, {
|
sshLogger.error("Invalid JSON received", e, {
|
||||||
operation: "websocket_message",
|
operation: "websocket_message_invalid_json",
|
||||||
|
userId,
|
||||||
messageLength: msg.toString().length,
|
messageLength: msg.toString().length,
|
||||||
});
|
});
|
||||||
ws.send(JSON.stringify({ type: "error", message: "Invalid JSON" }));
|
ws.send(JSON.stringify({ type: "error", message: "Invalid JSON" }));
|
||||||
@@ -39,9 +225,14 @@ wss.on("connection", (ws: WebSocket) => {
|
|||||||
|
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case "connectToHost":
|
case "connectToHost":
|
||||||
|
// Ensure userId is attached to hostConfig for secure credential resolution
|
||||||
|
if (data.hostConfig) {
|
||||||
|
data.hostConfig.userId = userId;
|
||||||
|
}
|
||||||
handleConnectToHost(data).catch((error) => {
|
handleConnectToHost(data).catch((error) => {
|
||||||
sshLogger.error("Failed to connect to host", error, {
|
sshLogger.error("Failed to connect to host", error, {
|
||||||
operation: "ssh_connect",
|
operation: "ssh_connect",
|
||||||
|
userId,
|
||||||
hostId: data.hostConfig?.id,
|
hostId: data.hostConfig?.id,
|
||||||
ip: data.hostConfig?.ip,
|
ip: data.hostConfig?.ip,
|
||||||
});
|
});
|
||||||
@@ -82,7 +273,8 @@ wss.on("connection", (ws: WebSocket) => {
|
|||||||
|
|
||||||
default:
|
default:
|
||||||
sshLogger.warn("Unknown message type received", {
|
sshLogger.warn("Unknown message type received", {
|
||||||
operation: "websocket_message",
|
operation: "websocket_message_unknown_type",
|
||||||
|
userId,
|
||||||
messageType: type,
|
messageType: type,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -187,21 +379,21 @@ wss.on("connection", (ws: WebSocket) => {
|
|||||||
hasCredentialId: !!credentialId,
|
hasCredentialId: !!credentialId,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (password) {
|
// SECURITY: Never log password information - removed password preview logging
|
||||||
sshLogger.debug(`Password preview: "${password.substring(0, 15)}..."`, {
|
sshLogger.debug(`SSH authentication setup`, {
|
||||||
operation: "terminal_ssh_password",
|
operation: "terminal_ssh_auth_setup",
|
||||||
|
userId,
|
||||||
|
hostId: id,
|
||||||
|
authType,
|
||||||
|
hasPassword: !!password,
|
||||||
|
hasCredentialId: !!credentialId,
|
||||||
});
|
});
|
||||||
} else {
|
|
||||||
sshLogger.debug(`No password provided`, {
|
|
||||||
operation: "terminal_ssh_password",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let resolvedCredentials = { password, key, keyPassword, keyType, authType };
|
let resolvedCredentials = { password, key, keyPassword, keyType, authType };
|
||||||
if (credentialId && id && hostConfig.userId) {
|
if (credentialId && id && hostConfig.userId) {
|
||||||
try {
|
try {
|
||||||
const credentials = await EncryptedDBOperations.select(
|
const credentials = await SimpleDBOps.select(
|
||||||
db
|
getDb()
|
||||||
.select()
|
.select()
|
||||||
.from(sshCredentials)
|
.from(sshCredentials)
|
||||||
.where(
|
.where(
|
||||||
@@ -211,6 +403,7 @@ wss.on("connection", (ws: WebSocket) => {
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
"ssh_credentials",
|
"ssh_credentials",
|
||||||
|
hostConfig.userId,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (credentials.length > 0) {
|
if (credentials.length > 0) {
|
||||||
@@ -443,7 +636,7 @@ wss.on("connection", (ws: WebSocket) => {
|
|||||||
"aes256-cbc",
|
"aes256-cbc",
|
||||||
"3des-cbc",
|
"3des-cbc",
|
||||||
],
|
],
|
||||||
hmac: ["hmac-sha2-256", "hmac-sha2-512", "hmac-sha1", "hmac-md5"],
|
hmac: ["hmac-sha2-256-etm@openssh.com", "hmac-sha2-512-etm@openssh.com", "hmac-sha2-256", "hmac-sha2-512", "hmac-sha1", "hmac-md5"],
|
||||||
compress: ["none", "zlib@openssh.com", "zlib"],
|
compress: ["none", "zlib@openssh.com", "zlib"],
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import cors from "cors";
|
|||||||
import { Client } from "ssh2";
|
import { Client } from "ssh2";
|
||||||
import { ChildProcess } from "child_process";
|
import { ChildProcess } from "child_process";
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
import { db } from "../database/db/index.js";
|
import { getDb } from "../database/db/index.js";
|
||||||
import { sshCredentials } from "../database/db/schema.js";
|
import { sshCredentials } from "../database/db/schema.js";
|
||||||
import { eq, and } from "drizzle-orm";
|
import { eq, and } from "drizzle-orm";
|
||||||
import type {
|
import type {
|
||||||
@@ -15,6 +15,7 @@ import type {
|
|||||||
} from "../../types/index.js";
|
} from "../../types/index.js";
|
||||||
import { CONNECTION_STATES } from "../../types/index.js";
|
import { CONNECTION_STATES } from "../../types/index.js";
|
||||||
import { tunnelLogger } from "../utils/logger.js";
|
import { tunnelLogger } from "../utils/logger.js";
|
||||||
|
import { SystemCrypto } from "../utils/system-crypto.js";
|
||||||
|
|
||||||
const app = express();
|
const app = express();
|
||||||
app.use(
|
app.use(
|
||||||
@@ -43,6 +44,8 @@ const verificationTimers = new Map<string, NodeJS.Timeout>();
|
|||||||
const activeRetryTimers = new Map<string, NodeJS.Timeout>();
|
const activeRetryTimers = new Map<string, NodeJS.Timeout>();
|
||||||
const countdownIntervals = new Map<string, NodeJS.Timeout>();
|
const countdownIntervals = new Map<string, NodeJS.Timeout>();
|
||||||
const retryExhaustedTunnels = new Set<string>();
|
const retryExhaustedTunnels = new Set<string>();
|
||||||
|
const cleanupInProgress = new Set<string>();
|
||||||
|
const tunnelConnecting = new Set<string>();
|
||||||
|
|
||||||
const tunnelConfigs = new Map<string, TunnelConfig>();
|
const tunnelConfigs = new Map<string, TunnelConfig>();
|
||||||
const activeTunnelProcesses = new Map<string, ChildProcess>();
|
const activeTunnelProcesses = new Map<string, ChildProcess>();
|
||||||
@@ -123,16 +126,37 @@ function getTunnelMarker(tunnelName: string) {
|
|||||||
return `TUNNEL_MARKER_${tunnelName.replace(/[^a-zA-Z0-9]/g, "_")}`;
|
return `TUNNEL_MARKER_${tunnelName.replace(/[^a-zA-Z0-9]/g, "_")}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function cleanupTunnelResources(tunnelName: string): void {
|
function cleanupTunnelResources(tunnelName: string, forceCleanup = false): void {
|
||||||
|
tunnelLogger.info(`Cleaning up resources for tunnel '${tunnelName}' (force=${forceCleanup})`);
|
||||||
|
|
||||||
|
// Prevent concurrent cleanup operations
|
||||||
|
if (cleanupInProgress.has(tunnelName)) {
|
||||||
|
tunnelLogger.info(`Cleanup already in progress for '${tunnelName}', skipping`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Protect connecting tunnels unless forced
|
||||||
|
if (!forceCleanup && tunnelConnecting.has(tunnelName)) {
|
||||||
|
tunnelLogger.info(`Tunnel '${tunnelName}' is connecting, skipping cleanup (use force=true to override)`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanupInProgress.add(tunnelName);
|
||||||
|
|
||||||
const tunnelConfig = tunnelConfigs.get(tunnelName);
|
const tunnelConfig = tunnelConfigs.get(tunnelName);
|
||||||
if (tunnelConfig) {
|
if (tunnelConfig) {
|
||||||
killRemoteTunnelByMarker(tunnelConfig, tunnelName, (err) => {
|
killRemoteTunnelByMarker(tunnelConfig, tunnelName, (err) => {
|
||||||
|
cleanupInProgress.delete(tunnelName);
|
||||||
if (err) {
|
if (err) {
|
||||||
tunnelLogger.error(
|
tunnelLogger.error(
|
||||||
`Failed to kill remote tunnel for '${tunnelName}': ${err.message}`,
|
`Failed to kill remote tunnel for '${tunnelName}': ${err.message}`,
|
||||||
);
|
);
|
||||||
|
} else {
|
||||||
|
tunnelLogger.info(`Successfully cleaned up remote tunnel processes for '${tunnelName}'`);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
} else {
|
||||||
|
cleanupInProgress.delete(tunnelName);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (activeTunnelProcesses.has(tunnelName)) {
|
if (activeTunnelProcesses.has(tunnelName)) {
|
||||||
@@ -154,6 +178,7 @@ function cleanupTunnelResources(tunnelName: string): void {
|
|||||||
try {
|
try {
|
||||||
const conn = activeTunnels.get(tunnelName);
|
const conn = activeTunnels.get(tunnelName);
|
||||||
if (conn) {
|
if (conn) {
|
||||||
|
tunnelLogger.info(`Closing SSH2 connection for tunnel '${tunnelName}'`);
|
||||||
conn.end();
|
conn.end();
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
@@ -163,6 +188,7 @@ function cleanupTunnelResources(tunnelName: string): void {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
activeTunnels.delete(tunnelName);
|
activeTunnels.delete(tunnelName);
|
||||||
|
tunnelLogger.info(`Removed tunnel '${tunnelName}' from activeTunnels`);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (tunnelVerifications.has(tunnelName)) {
|
if (tunnelVerifications.has(tunnelName)) {
|
||||||
@@ -203,6 +229,8 @@ function cleanupTunnelResources(tunnelName: string): void {
|
|||||||
function resetRetryState(tunnelName: string): void {
|
function resetRetryState(tunnelName: string): void {
|
||||||
retryCounters.delete(tunnelName);
|
retryCounters.delete(tunnelName);
|
||||||
retryExhaustedTunnels.delete(tunnelName);
|
retryExhaustedTunnels.delete(tunnelName);
|
||||||
|
cleanupInProgress.delete(tunnelName);
|
||||||
|
tunnelConnecting.delete(tunnelName);
|
||||||
|
|
||||||
if (activeRetryTimers.has(tunnelName)) {
|
if (activeRetryTimers.has(tunnelName)) {
|
||||||
clearTimeout(activeRetryTimers.get(tunnelName)!);
|
clearTimeout(activeRetryTimers.get(tunnelName)!);
|
||||||
@@ -394,7 +422,11 @@ async function connectSSHTunnel(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
cleanupTunnelResources(tunnelName);
|
// Mark tunnel as connecting to protect from cleanup
|
||||||
|
tunnelConnecting.add(tunnelName);
|
||||||
|
|
||||||
|
// Force cleanup any existing resources before new connection
|
||||||
|
cleanupTunnelResources(tunnelName, true);
|
||||||
|
|
||||||
if (retryAttempt === 0) {
|
if (retryAttempt === 0) {
|
||||||
retryExhaustedTunnels.delete(tunnelName);
|
retryExhaustedTunnels.delete(tunnelName);
|
||||||
@@ -441,7 +473,7 @@ async function connectSSHTunnel(
|
|||||||
|
|
||||||
if (tunnelConfig.sourceCredentialId && tunnelConfig.sourceUserId) {
|
if (tunnelConfig.sourceCredentialId && tunnelConfig.sourceUserId) {
|
||||||
try {
|
try {
|
||||||
const credentials = await db
|
const credentials = await getDb()
|
||||||
.select()
|
.select()
|
||||||
.from(sshCredentials)
|
.from(sshCredentials)
|
||||||
.where(
|
.where(
|
||||||
@@ -485,9 +517,35 @@ async function connectSSHTunnel(
|
|||||||
authMethod: tunnelConfig.endpointAuthMethod,
|
authMethod: tunnelConfig.endpointAuthMethod,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
tunnelLogger.info(`Source credentials for '${tunnelName}': authMethod=${resolvedSourceCredentials.authMethod}, hasPassword=${!!resolvedSourceCredentials.password}, hasSSHKey=${!!resolvedSourceCredentials.sshKey}`);
|
||||||
|
tunnelLogger.info(`Final endpoint credentials for '${tunnelName}': authMethod=${resolvedEndpointCredentials.authMethod}, hasPassword=${!!resolvedEndpointCredentials.password}, hasSSHKey=${!!resolvedEndpointCredentials.sshKey}, credentialId=${tunnelConfig.endpointCredentialId}`);
|
||||||
|
|
||||||
|
// Validate that we have usable endpoint credentials
|
||||||
|
if (resolvedEndpointCredentials.authMethod === "password" && !resolvedEndpointCredentials.password) {
|
||||||
|
const errorMessage = `Cannot connect tunnel '${tunnelName}': endpoint host requires password authentication but no plaintext password available. Enable autostart for endpoint host or configure credentials in tunnel connection.`;
|
||||||
|
tunnelLogger.error(errorMessage);
|
||||||
|
broadcastTunnelStatus(tunnelName, {
|
||||||
|
connected: false,
|
||||||
|
status: CONNECTION_STATES.FAILED,
|
||||||
|
reason: errorMessage,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (resolvedEndpointCredentials.authMethod === "key" && !resolvedEndpointCredentials.sshKey) {
|
||||||
|
const errorMessage = `Cannot connect tunnel '${tunnelName}': endpoint host requires key authentication but no plaintext key available. Enable autostart for endpoint host or configure credentials in tunnel connection.`;
|
||||||
|
tunnelLogger.error(errorMessage);
|
||||||
|
broadcastTunnelStatus(tunnelName, {
|
||||||
|
connected: false,
|
||||||
|
status: CONNECTION_STATES.FAILED,
|
||||||
|
reason: errorMessage,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (tunnelConfig.endpointCredentialId && tunnelConfig.endpointUserId) {
|
if (tunnelConfig.endpointCredentialId && tunnelConfig.endpointUserId) {
|
||||||
try {
|
try {
|
||||||
const credentials = await db
|
const credentials = await getDb()
|
||||||
.select()
|
.select()
|
||||||
.from(sshCredentials)
|
.from(sshCredentials)
|
||||||
.where(
|
.where(
|
||||||
@@ -506,6 +564,7 @@ async function connectSSHTunnel(
|
|||||||
keyType: credential.keyType,
|
keyType: credential.keyType,
|
||||||
authMethod: credential.authType,
|
authMethod: credential.authType,
|
||||||
};
|
};
|
||||||
|
tunnelLogger.info(`Resolved endpoint credentials from DB for '${tunnelName}': authMethod=${resolvedEndpointCredentials.authMethod}, hasPassword=${!!resolvedEndpointCredentials.password}, hasSSHKey=${!!resolvedEndpointCredentials.sshKey}`);
|
||||||
} else {
|
} else {
|
||||||
tunnelLogger.warn("No endpoint credentials found in database", {
|
tunnelLogger.warn("No endpoint credentials found in database", {
|
||||||
operation: "tunnel_connect",
|
operation: "tunnel_connect",
|
||||||
@@ -555,6 +614,9 @@ async function connectSSHTunnel(
|
|||||||
clearTimeout(connectionTimeout);
|
clearTimeout(connectionTimeout);
|
||||||
tunnelLogger.error(`SSH error for '${tunnelName}': ${err.message}`);
|
tunnelLogger.error(`SSH error for '${tunnelName}': ${err.message}`);
|
||||||
|
|
||||||
|
// Clear connecting state on error
|
||||||
|
tunnelConnecting.delete(tunnelName);
|
||||||
|
|
||||||
if (activeRetryTimers.has(tunnelName)) {
|
if (activeRetryTimers.has(tunnelName)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -583,6 +645,9 @@ async function connectSSHTunnel(
|
|||||||
conn.on("close", () => {
|
conn.on("close", () => {
|
||||||
clearTimeout(connectionTimeout);
|
clearTimeout(connectionTimeout);
|
||||||
|
|
||||||
|
// Clear connecting state on close
|
||||||
|
tunnelConnecting.delete(tunnelName);
|
||||||
|
|
||||||
if (activeRetryTimers.has(tunnelName)) {
|
if (activeRetryTimers.has(tunnelName)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -620,11 +685,13 @@ async function connectSSHTunnel(
|
|||||||
resolvedEndpointCredentials.sshKey
|
resolvedEndpointCredentials.sshKey
|
||||||
) {
|
) {
|
||||||
const keyFilePath = `/tmp/tunnel_key_${tunnelName.replace(/[^a-zA-Z0-9]/g, "_")}`;
|
const keyFilePath = `/tmp/tunnel_key_${tunnelName.replace(/[^a-zA-Z0-9]/g, "_")}`;
|
||||||
tunnelCmd = `echo '${resolvedEndpointCredentials.sshKey}' > ${keyFilePath} && chmod 600 ${keyFilePath} && ssh -i ${keyFilePath} -N -o StrictHostKeyChecking=no -o ExitOnForwardFailure=yes -o ServerAliveInterval=30 -o ServerAliveCountMax=3 -R ${tunnelConfig.endpointPort}:localhost:${tunnelConfig.sourcePort} ${tunnelConfig.endpointUsername}@${tunnelConfig.endpointIP} ${tunnelMarker} && rm -f ${keyFilePath}`;
|
tunnelCmd = `echo '${resolvedEndpointCredentials.sshKey}' > ${keyFilePath} && chmod 600 ${keyFilePath} && exec -a "${tunnelMarker}" ssh -i ${keyFilePath} -v -N -o StrictHostKeyChecking=no -o ExitOnForwardFailure=yes -o ServerAliveInterval=30 -o ServerAliveCountMax=3 -o GatewayPorts=yes -R ${tunnelConfig.endpointPort}:localhost:${tunnelConfig.sourcePort} ${tunnelConfig.endpointUsername}@${tunnelConfig.endpointIP} && rm -f ${keyFilePath}`;
|
||||||
} else {
|
} else {
|
||||||
tunnelCmd = `sshpass -p '${resolvedEndpointCredentials.password || ""}' ssh -N -o StrictHostKeyChecking=no -o ExitOnForwardFailure=yes -o ServerAliveInterval=30 -o ServerAliveCountMax=3 -R ${tunnelConfig.endpointPort}:localhost:${tunnelConfig.sourcePort} ${tunnelConfig.endpointUsername}@${tunnelConfig.endpointIP} ${tunnelMarker}`;
|
tunnelCmd = `exec -a "${tunnelMarker}" sshpass -p '${resolvedEndpointCredentials.password || ""}' ssh -v -N -o StrictHostKeyChecking=no -o ExitOnForwardFailure=yes -o ServerAliveInterval=30 -o ServerAliveCountMax=3 -o GatewayPorts=yes -R ${tunnelConfig.endpointPort}:localhost:${tunnelConfig.sourcePort} ${tunnelConfig.endpointUsername}@${tunnelConfig.endpointIP}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tunnelLogger.info(`Executing tunnel command for '${tunnelName}': ${tunnelCmd.replace(/sshpass -p '[^']*'/g, 'sshpass -p [HIDDEN]').replace(/echo '[^']*'/g, 'echo [HIDDEN]')}`);
|
||||||
|
|
||||||
conn.exec(tunnelCmd, (err, stream) => {
|
conn.exec(tunnelCmd, (err, stream) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
tunnelLogger.error(
|
tunnelLogger.error(
|
||||||
@@ -651,6 +718,9 @@ async function connectSSHTunnel(
|
|||||||
!manualDisconnects.has(tunnelName) &&
|
!manualDisconnects.has(tunnelName) &&
|
||||||
activeTunnels.has(tunnelName)
|
activeTunnels.has(tunnelName)
|
||||||
) {
|
) {
|
||||||
|
// Clear connecting state on successful connection
|
||||||
|
tunnelConnecting.delete(tunnelName);
|
||||||
|
|
||||||
broadcastTunnelStatus(tunnelName, {
|
broadcastTunnelStatus(tunnelName, {
|
||||||
connected: true,
|
connected: true,
|
||||||
status: CONNECTION_STATES.CONNECTED,
|
status: CONNECTION_STATES.CONNECTED,
|
||||||
@@ -722,12 +792,52 @@ async function connectSSHTunnel(
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
stream.stdout?.on("data", (data: Buffer) => {});
|
stream.stdout?.on("data", (data: Buffer) => {
|
||||||
|
const output = data.toString().trim();
|
||||||
|
if (output) {
|
||||||
|
tunnelLogger.info(`SSH stdout for '${tunnelName}': ${output}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
stream.on("error", (err: Error) => {});
|
stream.on("error", (err: Error) => {});
|
||||||
|
|
||||||
stream.stderr.on("data", (data) => {
|
stream.stderr.on("data", (data) => {
|
||||||
const errorMsg = data.toString().trim();
|
const errorMsg = data.toString().trim();
|
||||||
|
if (errorMsg) {
|
||||||
|
tunnelLogger.error(`SSH stderr for '${tunnelName}': ${errorMsg}`);
|
||||||
|
|
||||||
|
// Check for specific SSH errors
|
||||||
|
if (errorMsg.includes("sshpass: command not found") || errorMsg.includes("sshpass not found")) {
|
||||||
|
broadcastTunnelStatus(tunnelName, {
|
||||||
|
connected: false,
|
||||||
|
status: CONNECTION_STATES.FAILED,
|
||||||
|
reason: "sshpass tool not found on source host. Please install sshpass or use SSH key authentication.",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for port forwarding errors
|
||||||
|
if (errorMsg.includes("remote port forwarding failed") || errorMsg.includes("Error: remote port forwarding failed")) {
|
||||||
|
const portMatch = errorMsg.match(/listen port (\d+)/);
|
||||||
|
const port = portMatch ? portMatch[1] : tunnelConfig.endpointPort;
|
||||||
|
|
||||||
|
tunnelLogger.error(`Port forwarding failed for tunnel '${tunnelName}' on port ${port}. This prevents tunnel establishment.`);
|
||||||
|
|
||||||
|
// Close the connection immediately to prevent retries
|
||||||
|
if (activeTunnels.has(tunnelName)) {
|
||||||
|
const conn = activeTunnels.get(tunnelName);
|
||||||
|
if (conn) {
|
||||||
|
conn.end();
|
||||||
|
}
|
||||||
|
activeTunnels.delete(tunnelName);
|
||||||
|
}
|
||||||
|
|
||||||
|
broadcastTunnelStatus(tunnelName, {
|
||||||
|
connected: false,
|
||||||
|
status: CONNECTION_STATES.FAILED,
|
||||||
|
reason: `Remote port forwarding failed for port ${port}. Port may be in use, requires root privileges, or SSH server doesn't allow port forwarding. Try a different port.`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -763,7 +873,7 @@ async function connectSSHTunnel(
|
|||||||
"aes256-cbc",
|
"aes256-cbc",
|
||||||
"3des-cbc",
|
"3des-cbc",
|
||||||
],
|
],
|
||||||
hmac: ["hmac-sha2-256", "hmac-sha2-512", "hmac-sha1", "hmac-md5"],
|
hmac: ["hmac-sha2-256-etm@openssh.com", "hmac-sha2-512-etm@openssh.com", "hmac-sha2-256", "hmac-sha2-512", "hmac-sha1", "hmac-md5"],
|
||||||
compress: ["none", "zlib@openssh.com", "zlib"],
|
compress: ["none", "zlib@openssh.com", "zlib"],
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -827,12 +937,54 @@ async function connectSSHTunnel(
|
|||||||
conn.connect(connOptions);
|
conn.connect(connOptions);
|
||||||
}
|
}
|
||||||
|
|
||||||
function killRemoteTunnelByMarker(
|
async function killRemoteTunnelByMarker(
|
||||||
tunnelConfig: TunnelConfig,
|
tunnelConfig: TunnelConfig,
|
||||||
tunnelName: string,
|
tunnelName: string,
|
||||||
callback: (err?: Error) => void,
|
callback: (err?: Error) => void,
|
||||||
) {
|
) {
|
||||||
const tunnelMarker = getTunnelMarker(tunnelName);
|
const tunnelMarker = getTunnelMarker(tunnelName);
|
||||||
|
tunnelLogger.info(`Attempting to kill remote tunnel processes with marker '${tunnelMarker}' on source host ${tunnelConfig.sourceIP}`);
|
||||||
|
|
||||||
|
// Resolve source credentials using same logic as main tunnel connection
|
||||||
|
let resolvedSourceCredentials = {
|
||||||
|
password: tunnelConfig.sourcePassword,
|
||||||
|
sshKey: tunnelConfig.sourceSSHKey,
|
||||||
|
keyPassword: tunnelConfig.sourceKeyPassword,
|
||||||
|
keyType: tunnelConfig.sourceKeyType,
|
||||||
|
authMethod: tunnelConfig.sourceAuthMethod,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (tunnelConfig.sourceCredentialId && tunnelConfig.sourceUserId) {
|
||||||
|
try {
|
||||||
|
const credentials = await getDb()
|
||||||
|
.select()
|
||||||
|
.from(sshCredentials)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(sshCredentials.id, tunnelConfig.sourceCredentialId),
|
||||||
|
eq(sshCredentials.userId, tunnelConfig.sourceUserId),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (credentials.length > 0) {
|
||||||
|
const credential = credentials[0];
|
||||||
|
resolvedSourceCredentials = {
|
||||||
|
password: credential.password,
|
||||||
|
sshKey: credential.privateKey || credential.key,
|
||||||
|
keyPassword: credential.keyPassword,
|
||||||
|
keyType: credential.keyType,
|
||||||
|
authMethod: credential.authType,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
tunnelLogger.warn("Failed to resolve source credentials for cleanup", {
|
||||||
|
tunnelName,
|
||||||
|
credentialId: tunnelConfig.sourceCredentialId,
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const conn = new Client();
|
const conn = new Client();
|
||||||
const connOptions: any = {
|
const connOptions: any = {
|
||||||
host: tunnelConfig.sourceIP,
|
host: tunnelConfig.sourceIP,
|
||||||
@@ -865,52 +1017,142 @@ function killRemoteTunnelByMarker(
|
|||||||
"aes256-cbc",
|
"aes256-cbc",
|
||||||
"3des-cbc",
|
"3des-cbc",
|
||||||
],
|
],
|
||||||
hmac: ["hmac-sha2-256", "hmac-sha2-512", "hmac-sha1", "hmac-md5"],
|
hmac: ["hmac-sha2-256-etm@openssh.com", "hmac-sha2-512-etm@openssh.com", "hmac-sha2-256", "hmac-sha2-512", "hmac-sha1", "hmac-md5"],
|
||||||
compress: ["none", "zlib@openssh.com", "zlib"],
|
compress: ["none", "zlib@openssh.com", "zlib"],
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
if (tunnelConfig.sourceAuthMethod === "key" && tunnelConfig.sourceSSHKey) {
|
|
||||||
if (
|
if (
|
||||||
!tunnelConfig.sourceSSHKey.includes("-----BEGIN") ||
|
resolvedSourceCredentials.authMethod === "key" &&
|
||||||
!tunnelConfig.sourceSSHKey.includes("-----END")
|
resolvedSourceCredentials.sshKey
|
||||||
|
) {
|
||||||
|
if (
|
||||||
|
!resolvedSourceCredentials.sshKey.includes("-----BEGIN") ||
|
||||||
|
!resolvedSourceCredentials.sshKey.includes("-----END")
|
||||||
) {
|
) {
|
||||||
callback(new Error("Invalid SSH key format"));
|
callback(new Error("Invalid SSH key format"));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const cleanKey = tunnelConfig.sourceSSHKey
|
const cleanKey = resolvedSourceCredentials.sshKey
|
||||||
.trim()
|
.trim()
|
||||||
.replace(/\r\n/g, "\n")
|
.replace(/\r\n/g, "\n")
|
||||||
.replace(/\r/g, "\n");
|
.replace(/\r/g, "\n");
|
||||||
connOptions.privateKey = Buffer.from(cleanKey, "utf8");
|
connOptions.privateKey = Buffer.from(cleanKey, "utf8");
|
||||||
if (tunnelConfig.sourceKeyPassword) {
|
if (resolvedSourceCredentials.keyPassword) {
|
||||||
connOptions.passphrase = tunnelConfig.sourceKeyPassword;
|
connOptions.passphrase = resolvedSourceCredentials.keyPassword;
|
||||||
}
|
}
|
||||||
if (tunnelConfig.sourceKeyType && tunnelConfig.sourceKeyType !== "auto") {
|
if (
|
||||||
connOptions.privateKeyType = tunnelConfig.sourceKeyType;
|
resolvedSourceCredentials.keyType &&
|
||||||
|
resolvedSourceCredentials.keyType !== "auto"
|
||||||
|
) {
|
||||||
|
connOptions.privateKeyType = resolvedSourceCredentials.keyType;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
connOptions.password = tunnelConfig.sourcePassword;
|
connOptions.password = resolvedSourceCredentials.password;
|
||||||
}
|
}
|
||||||
|
|
||||||
conn.on("ready", () => {
|
conn.on("ready", () => {
|
||||||
const killCmd = `pkill -f '${tunnelMarker}'`;
|
// First, check for existing processes and get their PIDs
|
||||||
conn.exec(killCmd, (err, stream) => {
|
const checkCmd = `ps aux | grep -E '(${tunnelMarker}|ssh.*-R.*${tunnelConfig.endpointPort}:localhost:${tunnelConfig.sourcePort}.*${tunnelConfig.endpointUsername}@${tunnelConfig.endpointIP}|sshpass.*ssh.*-R.*${tunnelConfig.endpointPort})' | grep -v grep`;
|
||||||
if (err) {
|
|
||||||
|
conn.exec(checkCmd, (err, stream) => {
|
||||||
|
let foundProcesses = false;
|
||||||
|
|
||||||
|
stream.on("data", (data) => {
|
||||||
|
const output = data.toString().trim();
|
||||||
|
if (output) {
|
||||||
|
foundProcesses = true;
|
||||||
|
tunnelLogger.info(`Found running tunnel processes for '${tunnelName}': ${output}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
stream.on("close", () => {
|
||||||
|
if (!foundProcesses) {
|
||||||
|
tunnelLogger.info(`No running tunnel processes found for '${tunnelName}', cleanup not needed`);
|
||||||
conn.end();
|
conn.end();
|
||||||
callback(err);
|
callback();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
stream.on("close", () => {
|
|
||||||
|
// Execute kill commands sequentially for better control
|
||||||
|
const killCmds = [
|
||||||
|
`pkill -TERM -f '${tunnelMarker}'`,
|
||||||
|
`sleep 1 && pkill -f 'ssh.*-R.*${tunnelConfig.endpointPort}:localhost:${tunnelConfig.sourcePort}.*${tunnelConfig.endpointUsername}@${tunnelConfig.endpointIP}'`,
|
||||||
|
`sleep 1 && pkill -f 'sshpass.*ssh.*-R.*${tunnelConfig.endpointPort}'`,
|
||||||
|
`sleep 2 && pkill -9 -f '${tunnelMarker}'`, // Force kill after delay
|
||||||
|
];
|
||||||
|
|
||||||
|
let commandIndex = 0;
|
||||||
|
|
||||||
|
function executeNextKillCommand() {
|
||||||
|
if (commandIndex >= killCmds.length) {
|
||||||
|
// Final verification
|
||||||
|
conn.exec(checkCmd, (err, verifyStream) => {
|
||||||
|
let stillRunning = false;
|
||||||
|
|
||||||
|
verifyStream.on("data", (data) => {
|
||||||
|
const output = data.toString().trim();
|
||||||
|
if (output) {
|
||||||
|
stillRunning = true;
|
||||||
|
tunnelLogger.warn(`Processes still running after cleanup for '${tunnelName}': ${output}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
verifyStream.on("close", () => {
|
||||||
|
if (!stillRunning) {
|
||||||
|
tunnelLogger.info(`All tunnel processes successfully terminated for '${tunnelName}'`);
|
||||||
|
} else {
|
||||||
|
tunnelLogger.warn(`Some tunnel processes may still be running for '${tunnelName}'`);
|
||||||
|
}
|
||||||
conn.end();
|
conn.end();
|
||||||
callback();
|
callback();
|
||||||
});
|
});
|
||||||
stream.on("data", () => {});
|
});
|
||||||
stream.stderr.on("data", () => {});
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const killCmd = killCmds[commandIndex];
|
||||||
|
|
||||||
|
conn.exec(killCmd, (err, stream) => {
|
||||||
|
if (err) {
|
||||||
|
tunnelLogger.warn(`Kill command ${commandIndex + 1} failed for '${tunnelName}': ${err.message}`);
|
||||||
|
} else {
|
||||||
|
tunnelLogger.info(`Executed kill command ${commandIndex + 1} for '${tunnelName}': ${killCmd.replace(/sleep \d+ && /, '')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
stream.on("close", (code) => {
|
||||||
|
tunnelLogger.info(`Kill command ${commandIndex + 1} completed with code ${code} for '${tunnelName}'`);
|
||||||
|
commandIndex++;
|
||||||
|
executeNextKillCommand();
|
||||||
|
});
|
||||||
|
|
||||||
|
stream.on("data", (data) => {
|
||||||
|
const output = data.toString().trim();
|
||||||
|
if (output) {
|
||||||
|
tunnelLogger.info(`Kill command ${commandIndex + 1} output for '${tunnelName}': ${output}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
stream.stderr.on("data", (data) => {
|
||||||
|
const output = data.toString().trim();
|
||||||
|
if (output && !output.includes("debug1")) {
|
||||||
|
tunnelLogger.warn(`Kill command ${commandIndex + 1} stderr for '${tunnelName}': ${output}`);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
executeNextKillCommand();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
conn.on("error", (err) => {
|
conn.on("error", (err) => {
|
||||||
|
tunnelLogger.error(`Failed to connect to source host for killing tunnel '${tunnelName}': ${err.message}`);
|
||||||
callback(err);
|
callback(err);
|
||||||
});
|
});
|
||||||
|
|
||||||
conn.connect(connOptions);
|
conn.connect(connOptions);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -938,6 +1180,10 @@ app.post("/ssh/tunnel/connect", (req, res) => {
|
|||||||
|
|
||||||
const tunnelName = tunnelConfig.name;
|
const tunnelName = tunnelConfig.name;
|
||||||
|
|
||||||
|
// Clean up any existing resources before starting new connection
|
||||||
|
tunnelLogger.info(`Starting new connection for '${tunnelName}', cleaning up any existing resources`);
|
||||||
|
cleanupTunnelResources(tunnelName);
|
||||||
|
|
||||||
manualDisconnects.delete(tunnelName);
|
manualDisconnects.delete(tunnelName);
|
||||||
retryCounters.delete(tunnelName);
|
retryCounters.delete(tunnelName);
|
||||||
retryExhaustedTunnels.delete(tunnelName);
|
retryExhaustedTunnels.delete(tunnelName);
|
||||||
@@ -969,6 +1215,10 @@ app.post("/ssh/tunnel/disconnect", (req, res) => {
|
|||||||
activeRetryTimers.delete(tunnelName);
|
activeRetryTimers.delete(tunnelName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Immediately clean up active connections (force cleanup)
|
||||||
|
tunnelLogger.info(`Manual disconnect requested for '${tunnelName}', cleaning up resources`);
|
||||||
|
cleanupTunnelResources(tunnelName, true);
|
||||||
|
|
||||||
broadcastTunnelStatus(tunnelName, {
|
broadcastTunnelStatus(tunnelName, {
|
||||||
connected: false,
|
connected: false,
|
||||||
status: CONNECTION_STATES.DISCONNECTED,
|
status: CONNECTION_STATES.DISCONNECTED,
|
||||||
@@ -1005,6 +1255,10 @@ app.post("/ssh/tunnel/cancel", (req, res) => {
|
|||||||
countdownIntervals.delete(tunnelName);
|
countdownIntervals.delete(tunnelName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Immediately clean up active connections for cancel operation too (force cleanup)
|
||||||
|
tunnelLogger.info(`Cancel requested for '${tunnelName}', cleaning up resources`);
|
||||||
|
cleanupTunnelResources(tunnelName, true);
|
||||||
|
|
||||||
broadcastTunnelStatus(tunnelName, {
|
broadcastTunnelStatus(tunnelName, {
|
||||||
connected: false,
|
connected: false,
|
||||||
status: CONNECTION_STATES.DISCONNECTED,
|
status: CONNECTION_STATES.DISCONNECTED,
|
||||||
@@ -1023,49 +1277,95 @@ app.post("/ssh/tunnel/cancel", (req, res) => {
|
|||||||
|
|
||||||
async function initializeAutoStartTunnels(): Promise<void> {
|
async function initializeAutoStartTunnels(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const response = await axios.get(
|
// Get internal auth token from SystemCrypto
|
||||||
|
const systemCrypto = SystemCrypto.getInstance();
|
||||||
|
const internalAuthToken = await systemCrypto.getInternalAuthToken();
|
||||||
|
|
||||||
|
// Get autostart hosts for tunnel configs
|
||||||
|
const autostartResponse = await axios.get(
|
||||||
"http://localhost:8081/ssh/db/host/internal",
|
"http://localhost:8081/ssh/db/host/internal",
|
||||||
{
|
{
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"X-Internal-Request": "1",
|
"X-Internal-Auth-Token": internalAuthToken,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
const hosts: SSHHost[] = response.data || [];
|
// Get all hosts for endpointHost resolution
|
||||||
|
const allHostsResponse = await axios.get(
|
||||||
|
"http://localhost:8081/ssh/db/host/internal/all",
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"X-Internal-Auth-Token": internalAuthToken,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const autostartHosts: SSHHost[] = autostartResponse.data || [];
|
||||||
|
const allHosts: SSHHost[] = allHostsResponse.data || [];
|
||||||
const autoStartTunnels: TunnelConfig[] = [];
|
const autoStartTunnels: TunnelConfig[] = [];
|
||||||
|
|
||||||
for (const host of hosts) {
|
tunnelLogger.info(`Found ${autostartHosts.length} autostart hosts and ${allHosts.length} total hosts for endpointHost resolution`);
|
||||||
|
|
||||||
|
for (const host of autostartHosts) {
|
||||||
if (host.enableTunnel && host.tunnelConnections) {
|
if (host.enableTunnel && host.tunnelConnections) {
|
||||||
for (const tunnelConnection of host.tunnelConnections) {
|
for (const tunnelConnection of host.tunnelConnections) {
|
||||||
if (tunnelConnection.autoStart) {
|
if (tunnelConnection.autoStart) {
|
||||||
const endpointHost = hosts.find(
|
const endpointHost = allHosts.find(
|
||||||
(h) =>
|
(h) =>
|
||||||
h.name === tunnelConnection.endpointHost ||
|
h.name === tunnelConnection.endpointHost ||
|
||||||
`${h.username}@${h.ip}` === tunnelConnection.endpointHost,
|
`${h.username}@${h.ip}` === tunnelConnection.endpointHost,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (endpointHost) {
|
if (endpointHost) {
|
||||||
|
tunnelLogger.info(`Setting up tunnel credentials for '${host.name || `${host.username}@${host.ip}`}' -> '${endpointHost.name || `${endpointHost.username}@${endpointHost.ip}`}': sourceAutostart=${!!host.autostartPassword}, endpointAutostart=${!!endpointHost.autostartPassword}, endpointEncrypted=${!!endpointHost.password}`);
|
||||||
|
|
||||||
|
// Debug: Log actual credential availability
|
||||||
|
tunnelLogger.info(`Source host credentials debug:`, {
|
||||||
|
hostId: host.id,
|
||||||
|
hasAutostartPassword: !!host.autostartPassword,
|
||||||
|
hasAutostartKey: !!host.autostartKey,
|
||||||
|
hasEncryptedPassword: !!host.password,
|
||||||
|
hasEncryptedKey: !!host.key,
|
||||||
|
authType: host.authType
|
||||||
|
});
|
||||||
|
|
||||||
|
tunnelLogger.info(`Endpoint host credentials debug:`, {
|
||||||
|
hostId: endpointHost.id,
|
||||||
|
hasAutostartPassword: !!endpointHost.autostartPassword,
|
||||||
|
hasAutostartKey: !!endpointHost.autostartKey,
|
||||||
|
hasEncryptedPassword: !!endpointHost.password,
|
||||||
|
hasEncryptedKey: !!endpointHost.key,
|
||||||
|
authType: endpointHost.authType
|
||||||
|
});
|
||||||
|
|
||||||
const tunnelConfig: TunnelConfig = {
|
const tunnelConfig: TunnelConfig = {
|
||||||
name: `${host.name || `${host.username}@${host.ip}`}_${tunnelConnection.sourcePort}_${tunnelConnection.endpointPort}`,
|
name: `${host.name || `${host.username}@${host.ip}`}_${tunnelConnection.sourcePort}_${tunnelConnection.endpointPort}`,
|
||||||
hostName: host.name || `${host.username}@${host.ip}`,
|
hostName: host.name || `${host.username}@${host.ip}`,
|
||||||
sourceIP: host.ip,
|
sourceIP: host.ip,
|
||||||
sourceSSHPort: host.port,
|
sourceSSHPort: host.port,
|
||||||
sourceUsername: host.username,
|
sourceUsername: host.username,
|
||||||
sourcePassword: host.password,
|
// Prefer autostart credentials for source host, fallback to encrypted credentials
|
||||||
|
sourcePassword: host.autostartPassword || host.password,
|
||||||
sourceAuthMethod: host.authType,
|
sourceAuthMethod: host.authType,
|
||||||
sourceSSHKey: host.key,
|
sourceSSHKey: host.autostartKey || host.key,
|
||||||
sourceKeyPassword: host.keyPassword,
|
sourceKeyPassword: host.autostartKeyPassword || host.keyPassword,
|
||||||
sourceKeyType: host.keyType,
|
sourceKeyType: host.keyType,
|
||||||
|
sourceCredentialId: host.credentialId,
|
||||||
|
sourceUserId: host.userId,
|
||||||
endpointIP: endpointHost.ip,
|
endpointIP: endpointHost.ip,
|
||||||
endpointSSHPort: endpointHost.port,
|
endpointSSHPort: endpointHost.port,
|
||||||
endpointUsername: endpointHost.username,
|
endpointUsername: endpointHost.username,
|
||||||
endpointPassword: endpointHost.password,
|
// Prefer TunnelConnection credentials, then autostart credentials, fallback to encrypted credentials
|
||||||
endpointAuthMethod: endpointHost.authType,
|
endpointPassword: tunnelConnection.endpointPassword || endpointHost.autostartPassword || endpointHost.password,
|
||||||
endpointSSHKey: endpointHost.key,
|
endpointAuthMethod: tunnelConnection.endpointAuthType || endpointHost.authType,
|
||||||
endpointKeyPassword: endpointHost.keyPassword,
|
endpointSSHKey: tunnelConnection.endpointKey || endpointHost.autostartKey || endpointHost.key,
|
||||||
endpointKeyType: endpointHost.keyType,
|
endpointKeyPassword: tunnelConnection.endpointKeyPassword || endpointHost.autostartKeyPassword || endpointHost.keyPassword,
|
||||||
|
endpointKeyType: tunnelConnection.endpointKeyType || endpointHost.keyType,
|
||||||
|
endpointCredentialId: endpointHost.credentialId,
|
||||||
|
endpointUserId: endpointHost.userId,
|
||||||
sourcePort: tunnelConnection.sourcePort,
|
sourcePort: tunnelConnection.sourcePort,
|
||||||
endpointPort: tunnelConnection.endpointPort,
|
endpointPort: tunnelConnection.endpointPort,
|
||||||
maxRetries: tunnelConnection.maxRetries,
|
maxRetries: tunnelConnection.maxRetries,
|
||||||
@@ -1074,7 +1374,25 @@ async function initializeAutoStartTunnels(): Promise<void> {
|
|||||||
isPinned: host.pin,
|
isPinned: host.pin,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Validate source and endpoint credentials availability
|
||||||
|
const hasSourcePassword = host.autostartPassword;
|
||||||
|
const hasSourceKey = host.autostartKey;
|
||||||
|
const hasEndpointPassword = tunnelConnection.endpointPassword || endpointHost.autostartPassword;
|
||||||
|
const hasEndpointKey = tunnelConnection.endpointKey || endpointHost.autostartKey;
|
||||||
|
|
||||||
|
if (!hasSourcePassword && !hasSourceKey) {
|
||||||
|
tunnelLogger.warn(`Tunnel '${tunnelConfig.name}' may fail: source host '${host.name || `${host.username}@${host.ip}`}' has no plaintext credentials. Enable autostart for this host to use unattended tunneling.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!hasEndpointPassword && !hasEndpointKey) {
|
||||||
|
tunnelLogger.warn(`Tunnel '${tunnelConfig.name}' may fail: endpoint host '${endpointHost.name || `${endpointHost.username}@${endpointHost.ip}`}' has no plaintext credentials. Consider enabling autostart for this host or configuring credentials in tunnel connection.`);
|
||||||
|
}
|
||||||
|
|
||||||
autoStartTunnels.push(tunnelConfig);
|
autoStartTunnels.push(tunnelConfig);
|
||||||
|
} else {
|
||||||
|
tunnelLogger.error(
|
||||||
|
`Failed to find endpointHost '${tunnelConnection.endpointHost}' for tunnel from ${host.name || `${host.username}@${host.ip}`}. Available hosts: ${allHosts.map(h => h.name || `${h.username}@${h.ip}`).join(', ')}`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,30 +1,150 @@
|
|||||||
// npx tsc -p tsconfig.node.json
|
// npx tsc -p tsconfig.node.json
|
||||||
// node ./dist/backend/starter.js
|
// node ./dist/backend/starter.js
|
||||||
|
|
||||||
import "./database/database.js";
|
|
||||||
import { DatabaseEncryption } from "./utils/database-encryption.js";
|
|
||||||
import { systemLogger, versionLogger } from "./utils/logger.js";
|
|
||||||
import "dotenv/config";
|
import "dotenv/config";
|
||||||
|
import dotenv from "dotenv";
|
||||||
|
import { promises as fs } from "fs";
|
||||||
|
import path from "path";
|
||||||
|
import { AutoSSLSetup } from "./utils/auto-ssl-setup.js";
|
||||||
|
import { AuthManager } from "./utils/auth-manager.js";
|
||||||
|
import { DataCrypto } from "./utils/data-crypto.js";
|
||||||
|
import { SystemCrypto } from "./utils/system-crypto.js";
|
||||||
|
import { systemLogger, versionLogger } from "./utils/logger.js";
|
||||||
|
|
||||||
(async () => {
|
(async () => {
|
||||||
try {
|
try {
|
||||||
|
// Load persistent .env file from config directory if available (Docker)
|
||||||
|
if (process.env.NODE_ENV === 'production') {
|
||||||
|
try {
|
||||||
|
await fs.access('/app/config/.env');
|
||||||
|
dotenv.config({ path: '/app/config/.env' });
|
||||||
|
systemLogger.info("Loaded persistent configuration from /app/config/.env", {
|
||||||
|
operation: "config_load"
|
||||||
|
});
|
||||||
|
} catch {
|
||||||
|
// Config file doesn't exist yet, will be created on first run
|
||||||
|
systemLogger.info("No persistent config found, will create on first run", {
|
||||||
|
operation: "config_init"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const version = process.env.VERSION || "unknown";
|
const version = process.env.VERSION || "unknown";
|
||||||
versionLogger.info(`Termix Backend starting - Version: ${version}`, {
|
versionLogger.info(`Termix Backend starting - Version: ${version}`, {
|
||||||
operation: "startup",
|
operation: "startup",
|
||||||
version: version,
|
version: version,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Auto-initialize SSL/TLS configuration
|
||||||
|
await AutoSSLSetup.initialize();
|
||||||
|
|
||||||
|
// Initialize database first - required before other services
|
||||||
|
systemLogger.info("Initializing database...", {
|
||||||
|
operation: "database_init"
|
||||||
|
});
|
||||||
|
const dbModule = await import("./database/db/index.js");
|
||||||
|
await dbModule.databaseReady;
|
||||||
|
systemLogger.success("Database initialized successfully", {
|
||||||
|
operation: "database_init_complete"
|
||||||
|
});
|
||||||
|
|
||||||
|
// Production environment security checks
|
||||||
|
if (process.env.NODE_ENV === 'production') {
|
||||||
|
systemLogger.info("Running production environment security checks...", {
|
||||||
|
operation: "security_checks",
|
||||||
|
});
|
||||||
|
|
||||||
|
const securityIssues: string[] = [];
|
||||||
|
|
||||||
|
// Check JWT and database keys (auto-generated if missing - warnings only)
|
||||||
|
if (!process.env.JWT_SECRET) {
|
||||||
|
systemLogger.warn("JWT_SECRET not set - using auto-generated keys (consider setting for production)", {
|
||||||
|
operation: "security_warning",
|
||||||
|
note: "Auto-generated keys are secure but not persistent across deployments"
|
||||||
|
});
|
||||||
|
} else if (process.env.JWT_SECRET.length < 64) {
|
||||||
|
securityIssues.push("JWT_SECRET should be at least 64 characters in production");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!process.env.DATABASE_KEY) {
|
||||||
|
systemLogger.warn("DATABASE_KEY not set - using auto-generated keys (consider setting for production)", {
|
||||||
|
operation: "security_warning",
|
||||||
|
note: "Auto-generated keys are secure but not persistent across deployments"
|
||||||
|
});
|
||||||
|
} else if (process.env.DATABASE_KEY.length < 64) {
|
||||||
|
securityIssues.push("DATABASE_KEY should be at least 64 characters in production");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!process.env.INTERNAL_AUTH_TOKEN) {
|
||||||
|
systemLogger.warn("INTERNAL_AUTH_TOKEN not set - using auto-generated token (consider setting for production)", {
|
||||||
|
operation: "security_warning",
|
||||||
|
note: "Auto-generated tokens are secure but not persistent across deployments"
|
||||||
|
});
|
||||||
|
} else if (process.env.INTERNAL_AUTH_TOKEN.length < 32) {
|
||||||
|
securityIssues.push("INTERNAL_AUTH_TOKEN should be at least 32 characters in production");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check database file encryption
|
||||||
|
if (process.env.DB_FILE_ENCRYPTION === 'false') {
|
||||||
|
securityIssues.push("Database file encryption should be enabled in production");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Check CORS configuration warning
|
||||||
|
systemLogger.warn("Production deployment detected - ensure CORS is properly configured", {
|
||||||
|
operation: "security_checks",
|
||||||
|
warning: "Verify frontend domain whitelist"
|
||||||
|
});
|
||||||
|
|
||||||
|
if (securityIssues.length > 0) {
|
||||||
|
systemLogger.error("SECURITY ISSUES DETECTED IN PRODUCTION:", {
|
||||||
|
operation: "security_checks_failed",
|
||||||
|
issues: securityIssues,
|
||||||
|
});
|
||||||
|
for (const issue of securityIssues) {
|
||||||
|
systemLogger.error(`- ${issue}`, { operation: "security_issue" });
|
||||||
|
}
|
||||||
|
systemLogger.error("Fix these issues before running in production!", {
|
||||||
|
operation: "security_checks_failed",
|
||||||
|
});
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
systemLogger.success("Production security checks passed", {
|
||||||
|
operation: "security_checks_complete",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
systemLogger.info("Initializing backend services...", {
|
systemLogger.info("Initializing backend services...", {
|
||||||
operation: "startup",
|
operation: "startup",
|
||||||
|
environment: process.env.NODE_ENV || "development",
|
||||||
});
|
});
|
||||||
|
|
||||||
// Initialize database encryption before other services
|
// Initialize simplified authentication system
|
||||||
await DatabaseEncryption.initialize();
|
const authManager = AuthManager.getInstance();
|
||||||
systemLogger.info("Database encryption initialized", {
|
await authManager.initialize();
|
||||||
operation: "encryption_init",
|
DataCrypto.initialize();
|
||||||
|
|
||||||
|
// Initialize system crypto keys (JWT, Database, Internal Auth)
|
||||||
|
const systemCrypto = SystemCrypto.getInstance();
|
||||||
|
await systemCrypto.initializeJWTSecret();
|
||||||
|
await systemCrypto.initializeDatabaseKey();
|
||||||
|
await systemCrypto.initializeInternalAuthToken();
|
||||||
|
|
||||||
|
systemLogger.info("Security system initialized (KEK-DEK architecture + SystemCrypto)", {
|
||||||
|
operation: "security_init",
|
||||||
});
|
});
|
||||||
|
|
||||||
// Load modules that depend on encryption after initialization
|
// Load database-dependent modules after database initialization
|
||||||
|
systemLogger.info("Starting database API server...", {
|
||||||
|
operation: "api_server_init"
|
||||||
|
});
|
||||||
|
await import("./database/database.js");
|
||||||
|
|
||||||
|
// Load modules that depend on database and encryption
|
||||||
|
systemLogger.info("Starting SSH services...", {
|
||||||
|
operation: "ssh_services_init"
|
||||||
|
});
|
||||||
await import("./ssh/terminal.js");
|
await import("./ssh/terminal.js");
|
||||||
await import("./ssh/tunnel.js");
|
await import("./ssh/tunnel.js");
|
||||||
await import("./ssh/file-manager.js");
|
await import("./ssh/file-manager.js");
|
||||||
@@ -43,6 +163,9 @@ import "dotenv/config";
|
|||||||
version: version,
|
version: version,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Display SSL configuration info
|
||||||
|
AutoSSLSetup.logSSLInfo();
|
||||||
|
|
||||||
process.on("SIGINT", () => {
|
process.on("SIGINT", () => {
|
||||||
systemLogger.info(
|
systemLogger.info(
|
||||||
"Received SIGINT signal, initiating graceful shutdown...",
|
"Received SIGINT signal, initiating graceful shutdown...",
|
||||||
|
|||||||
298
src/backend/utils/auth-manager.ts
Normal file
@@ -0,0 +1,298 @@
|
|||||||
|
import jwt from "jsonwebtoken";
|
||||||
|
import { UserCrypto } from "./user-crypto.js";
|
||||||
|
import { SystemCrypto } from "./system-crypto.js";
|
||||||
|
import { DataCrypto } from "./data-crypto.js";
|
||||||
|
import { databaseLogger } from "./logger.js";
|
||||||
|
import type { Request, Response, NextFunction } from "express";
|
||||||
|
|
||||||
|
interface AuthenticationResult {
|
||||||
|
success: boolean;
|
||||||
|
token?: string;
|
||||||
|
userId?: string;
|
||||||
|
isAdmin?: boolean;
|
||||||
|
username?: string;
|
||||||
|
requiresTOTP?: boolean;
|
||||||
|
tempToken?: string;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface JWTPayload {
|
||||||
|
userId: string;
|
||||||
|
pendingTOTP?: boolean;
|
||||||
|
iat?: number;
|
||||||
|
exp?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AuthManager - Simplified authentication manager
|
||||||
|
*
|
||||||
|
* Responsibilities:
|
||||||
|
* - JWT generation and validation
|
||||||
|
* - Authentication middleware
|
||||||
|
* - User login/logout
|
||||||
|
*
|
||||||
|
* No more two-layer sessions - use UserKeyManager directly
|
||||||
|
*/
|
||||||
|
class AuthManager {
|
||||||
|
private static instance: AuthManager;
|
||||||
|
private systemCrypto: SystemCrypto;
|
||||||
|
private userCrypto: UserCrypto;
|
||||||
|
|
||||||
|
private constructor() {
|
||||||
|
this.systemCrypto = SystemCrypto.getInstance();
|
||||||
|
this.userCrypto = UserCrypto.getInstance();
|
||||||
|
}
|
||||||
|
|
||||||
|
static getInstance(): AuthManager {
|
||||||
|
if (!this.instance) {
|
||||||
|
this.instance = new AuthManager();
|
||||||
|
}
|
||||||
|
return this.instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize authentication system
|
||||||
|
*/
|
||||||
|
async initialize(): Promise<void> {
|
||||||
|
await this.systemCrypto.initializeJWTSecret();
|
||||||
|
databaseLogger.info("AuthManager initialized", {
|
||||||
|
operation: "auth_init"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* User registration
|
||||||
|
*/
|
||||||
|
async registerUser(userId: string, password: string): Promise<void> {
|
||||||
|
await this.userCrypto.setupUserEncryption(userId, password);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* User login with lazy encryption migration
|
||||||
|
*/
|
||||||
|
async authenticateUser(userId: string, password: string): Promise<boolean> {
|
||||||
|
const authenticated = await this.userCrypto.authenticateUser(userId, password);
|
||||||
|
|
||||||
|
if (authenticated) {
|
||||||
|
// Trigger lazy encryption migration for user's sensitive fields
|
||||||
|
await this.performLazyEncryptionMigration(userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
return authenticated;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Perform lazy encryption migration for user's sensitive data
|
||||||
|
* This runs asynchronously after successful login
|
||||||
|
*/
|
||||||
|
private async performLazyEncryptionMigration(userId: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
const userDataKey = this.getUserDataKey(userId);
|
||||||
|
if (!userDataKey) {
|
||||||
|
databaseLogger.warn("Cannot perform lazy encryption migration - user data key not available", {
|
||||||
|
operation: "lazy_encryption_migration_no_key",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import database connection - need to access raw SQLite for migration
|
||||||
|
const { getSqlite, saveMemoryDatabaseToFile, databaseReady } = await import("../database/db/index.js");
|
||||||
|
|
||||||
|
// Ensure database is fully initialized before accessing SQLite
|
||||||
|
await databaseReady;
|
||||||
|
const sqlite = getSqlite();
|
||||||
|
|
||||||
|
// Perform the migration
|
||||||
|
const migrationResult = await DataCrypto.migrateUserSensitiveFields(
|
||||||
|
userId,
|
||||||
|
userDataKey,
|
||||||
|
sqlite
|
||||||
|
);
|
||||||
|
|
||||||
|
if (migrationResult.migrated) {
|
||||||
|
// Save the in-memory database to disk to persist the migration
|
||||||
|
await saveMemoryDatabaseToFile();
|
||||||
|
|
||||||
|
databaseLogger.success("Lazy encryption migration completed for user", {
|
||||||
|
operation: "lazy_encryption_migration_success",
|
||||||
|
userId,
|
||||||
|
migratedTables: migrationResult.migratedTables,
|
||||||
|
migratedFieldsCount: migrationResult.migratedFieldsCount,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
databaseLogger.debug("No lazy encryption migration needed for user", {
|
||||||
|
operation: "lazy_encryption_migration_not_needed",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
// Log error but don't fail the login process
|
||||||
|
databaseLogger.error("Lazy encryption migration failed", error, {
|
||||||
|
operation: "lazy_encryption_migration_error",
|
||||||
|
userId,
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate JWT Token
|
||||||
|
*/
|
||||||
|
async generateJWTToken(
|
||||||
|
userId: string,
|
||||||
|
options: { expiresIn?: string; pendingTOTP?: boolean } = {}
|
||||||
|
): Promise<string> {
|
||||||
|
const jwtSecret = await this.systemCrypto.getJWTSecret();
|
||||||
|
|
||||||
|
const payload: JWTPayload = { userId };
|
||||||
|
if (options.pendingTOTP) {
|
||||||
|
payload.pendingTOTP = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return jwt.sign(payload, jwtSecret, {
|
||||||
|
expiresIn: options.expiresIn || "24h"
|
||||||
|
} as jwt.SignOptions);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify JWT Token
|
||||||
|
*/
|
||||||
|
async verifyJWTToken(token: string): Promise<JWTPayload | null> {
|
||||||
|
try {
|
||||||
|
const jwtSecret = await this.systemCrypto.getJWTSecret();
|
||||||
|
const payload = jwt.verify(token, jwtSecret) as JWTPayload;
|
||||||
|
return payload;
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.warn("JWT verification failed", {
|
||||||
|
operation: "jwt_verify_failed",
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error',
|
||||||
|
});
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Authentication middleware
|
||||||
|
*/
|
||||||
|
createAuthMiddleware() {
|
||||||
|
return async (req: Request, res: Response, next: NextFunction) => {
|
||||||
|
const authHeader = req.headers["authorization"];
|
||||||
|
if (!authHeader?.startsWith("Bearer ")) {
|
||||||
|
return res.status(401).json({ error: "Missing Authorization header" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const token = authHeader.split(" ")[1];
|
||||||
|
const payload = await this.verifyJWTToken(token);
|
||||||
|
|
||||||
|
if (!payload) {
|
||||||
|
return res.status(401).json({ error: "Invalid token" });
|
||||||
|
}
|
||||||
|
|
||||||
|
(req as any).userId = payload.userId;
|
||||||
|
(req as any).pendingTOTP = payload.pendingTOTP;
|
||||||
|
next();
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Data access middleware - requires user to have unlocked data
|
||||||
|
*/
|
||||||
|
createDataAccessMiddleware() {
|
||||||
|
return async (req: Request, res: Response, next: NextFunction) => {
|
||||||
|
const userId = (req as any).userId;
|
||||||
|
if (!userId) {
|
||||||
|
return res.status(401).json({ error: "Authentication required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const dataKey = this.userCrypto.getUserDataKey(userId);
|
||||||
|
if (!dataKey) {
|
||||||
|
return res.status(423).json({
|
||||||
|
error: "Data locked - re-authenticate with password",
|
||||||
|
code: "DATA_LOCKED"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
(req as any).dataKey = dataKey;
|
||||||
|
next();
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Admin middleware - requires user to be authenticated and have admin privileges
|
||||||
|
*/
|
||||||
|
createAdminMiddleware() {
|
||||||
|
return async (req: Request, res: Response, next: NextFunction) => {
|
||||||
|
const authHeader = req.headers["authorization"];
|
||||||
|
if (!authHeader?.startsWith("Bearer ")) {
|
||||||
|
return res.status(401).json({ error: "Missing Authorization header" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const token = authHeader.split(" ")[1];
|
||||||
|
const payload = await this.verifyJWTToken(token);
|
||||||
|
|
||||||
|
if (!payload) {
|
||||||
|
return res.status(401).json({ error: "Invalid token" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if user is admin
|
||||||
|
try {
|
||||||
|
const { db } = await import("../database/db/index.js");
|
||||||
|
const { users } = await import("../database/db/schema.js");
|
||||||
|
const { eq } = await import("drizzle-orm");
|
||||||
|
|
||||||
|
const user = await db.select().from(users).where(eq(users.id, payload.userId));
|
||||||
|
|
||||||
|
if (!user || user.length === 0 || !user[0].is_admin) {
|
||||||
|
databaseLogger.warn("Non-admin user attempted to access admin endpoint", {
|
||||||
|
operation: "admin_access_denied",
|
||||||
|
userId: payload.userId,
|
||||||
|
endpoint: req.path,
|
||||||
|
});
|
||||||
|
return res.status(403).json({ error: "Admin access required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
(req as any).userId = payload.userId;
|
||||||
|
(req as any).pendingTOTP = payload.pendingTOTP;
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("Failed to verify admin privileges", error, {
|
||||||
|
operation: "admin_check_failed",
|
||||||
|
userId: payload.userId,
|
||||||
|
});
|
||||||
|
return res.status(500).json({ error: "Failed to verify admin privileges" });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* User logout
|
||||||
|
*/
|
||||||
|
logoutUser(userId: string): void {
|
||||||
|
this.userCrypto.logoutUser(userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get user data key
|
||||||
|
*/
|
||||||
|
getUserDataKey(userId: string): Buffer | null {
|
||||||
|
return this.userCrypto.getUserDataKey(userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if user is unlocked
|
||||||
|
*/
|
||||||
|
isUserUnlocked(userId: string): boolean {
|
||||||
|
return this.userCrypto.isUserUnlocked(userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Change user password
|
||||||
|
*/
|
||||||
|
async changeUserPassword(userId: string, oldPassword: string, newPassword: string): Promise<boolean> {
|
||||||
|
return await this.userCrypto.changeUserPassword(userId, oldPassword, newPassword);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { AuthManager, type AuthenticationResult, type JWTPayload };
|
||||||
261
src/backend/utils/auto-ssl-setup.ts
Normal file
@@ -0,0 +1,261 @@
|
|||||||
|
import { execSync } from "child_process";
|
||||||
|
import { promises as fs } from "fs";
|
||||||
|
import path from "path";
|
||||||
|
import crypto from "crypto";
|
||||||
|
import { systemLogger } from "./logger.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Auto SSL Setup - Optional SSL certificate generation for Termix
|
||||||
|
*
|
||||||
|
* Linus principle: Simple defaults, optional security features
|
||||||
|
* - SSL disabled by default to avoid setup complexity
|
||||||
|
* - Auto-generates SSL certificates when enabled
|
||||||
|
* - Uses container-appropriate paths
|
||||||
|
* - Users can enable SSL by setting ENABLE_SSL=true
|
||||||
|
*/
|
||||||
|
export class AutoSSLSetup {
|
||||||
|
private static readonly SSL_DIR = path.join(process.cwd(), "ssl");
|
||||||
|
private static readonly CERT_FILE = path.join(AutoSSLSetup.SSL_DIR, "termix.crt");
|
||||||
|
private static readonly KEY_FILE = path.join(AutoSSLSetup.SSL_DIR, "termix.key");
|
||||||
|
private static readonly ENV_FILE = path.join(process.cwd(), ".env");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize SSL setup automatically during system startup
|
||||||
|
*/
|
||||||
|
static async initialize(): Promise<void> {
|
||||||
|
try {
|
||||||
|
systemLogger.info("🔐 Initializing SSL/TLS configuration...", {
|
||||||
|
operation: "ssl_auto_init"
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if SSL is already properly configured
|
||||||
|
if (await this.isSSLConfigured()) {
|
||||||
|
systemLogger.info("✅ SSL configuration already exists and is valid", {
|
||||||
|
operation: "ssl_already_configured"
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-generate SSL certificates
|
||||||
|
await this.generateSSLCertificates();
|
||||||
|
|
||||||
|
// Setup environment variables for SSL
|
||||||
|
await this.setupEnvironmentVariables();
|
||||||
|
|
||||||
|
systemLogger.success("🚀 SSL/TLS configuration completed successfully", {
|
||||||
|
operation: "ssl_auto_init_complete",
|
||||||
|
https_port: process.env.SSL_PORT || "8443",
|
||||||
|
note: "HTTPS/WSS is now enabled by default"
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
systemLogger.error("❌ Failed to initialize SSL configuration", error, {
|
||||||
|
operation: "ssl_auto_init_failed"
|
||||||
|
});
|
||||||
|
|
||||||
|
// Don't crash the application - fallback to HTTP
|
||||||
|
systemLogger.warn("⚠️ Falling back to HTTP-only mode", {
|
||||||
|
operation: "ssl_fallback_http"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if SSL is already properly configured
|
||||||
|
*/
|
||||||
|
private static async isSSLConfigured(): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
// Check if certificate files exist
|
||||||
|
await fs.access(this.CERT_FILE);
|
||||||
|
await fs.access(this.KEY_FILE);
|
||||||
|
|
||||||
|
// Check if certificate is still valid (at least 30 days)
|
||||||
|
const result = execSync(`openssl x509 -in "${this.CERT_FILE}" -checkend 2592000 -noout`, {
|
||||||
|
stdio: 'pipe'
|
||||||
|
});
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate SSL certificates automatically
|
||||||
|
*/
|
||||||
|
private static async generateSSLCertificates(): Promise<void> {
|
||||||
|
systemLogger.info("🔑 Generating SSL certificates for local development...", {
|
||||||
|
operation: "ssl_cert_generation"
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Create SSL directory
|
||||||
|
await fs.mkdir(this.SSL_DIR, { recursive: true });
|
||||||
|
|
||||||
|
// Create OpenSSL config for comprehensive certificate
|
||||||
|
const configFile = path.join(this.SSL_DIR, "openssl.conf");
|
||||||
|
const opensslConfig = `
|
||||||
|
[req]
|
||||||
|
default_bits = 2048
|
||||||
|
prompt = no
|
||||||
|
default_md = sha256
|
||||||
|
distinguished_name = dn
|
||||||
|
req_extensions = v3_req
|
||||||
|
|
||||||
|
[dn]
|
||||||
|
C=US
|
||||||
|
ST=State
|
||||||
|
L=City
|
||||||
|
O=Termix
|
||||||
|
OU=IT Department
|
||||||
|
CN=localhost
|
||||||
|
|
||||||
|
[v3_req]
|
||||||
|
basicConstraints = CA:FALSE
|
||||||
|
keyUsage = nonRepudiation, digitalSignature, keyEncipherment
|
||||||
|
subjectAltName = @alt_names
|
||||||
|
|
||||||
|
[alt_names]
|
||||||
|
DNS.1 = localhost
|
||||||
|
DNS.2 = 127.0.0.1
|
||||||
|
DNS.3 = *.localhost
|
||||||
|
DNS.4 = termix.local
|
||||||
|
DNS.5 = *.termix.local
|
||||||
|
IP.1 = 127.0.0.1
|
||||||
|
IP.2 = ::1
|
||||||
|
`.trim();
|
||||||
|
|
||||||
|
await fs.writeFile(configFile, opensslConfig);
|
||||||
|
|
||||||
|
// Generate private key
|
||||||
|
execSync(`openssl genrsa -out "${this.KEY_FILE}" 2048`, { stdio: 'pipe' });
|
||||||
|
|
||||||
|
// Generate certificate
|
||||||
|
execSync(`openssl req -new -x509 -key "${this.KEY_FILE}" -out "${this.CERT_FILE}" -days 365 -config "${configFile}" -extensions v3_req`, {
|
||||||
|
stdio: 'pipe'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Set proper permissions
|
||||||
|
await fs.chmod(this.KEY_FILE, 0o600);
|
||||||
|
await fs.chmod(this.CERT_FILE, 0o644);
|
||||||
|
|
||||||
|
// Clean up temp config
|
||||||
|
await fs.unlink(configFile);
|
||||||
|
|
||||||
|
systemLogger.success("✅ SSL certificates generated successfully", {
|
||||||
|
operation: "ssl_cert_generated",
|
||||||
|
cert_path: this.CERT_FILE,
|
||||||
|
key_path: this.KEY_FILE,
|
||||||
|
valid_days: 365
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`SSL certificate generation failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup environment variables for SSL configuration
|
||||||
|
*/
|
||||||
|
private static async setupEnvironmentVariables(): Promise<void> {
|
||||||
|
systemLogger.info("⚙️ Configuring SSL environment variables...", {
|
||||||
|
operation: "ssl_env_setup"
|
||||||
|
});
|
||||||
|
|
||||||
|
// Use container paths in production, local paths in development
|
||||||
|
const isProduction = process.env.NODE_ENV === "production";
|
||||||
|
const certPath = isProduction ? "/app/ssl/termix.crt" : this.CERT_FILE;
|
||||||
|
const keyPath = isProduction ? "/app/ssl/termix.key" : this.KEY_FILE;
|
||||||
|
|
||||||
|
const sslEnvVars = {
|
||||||
|
ENABLE_SSL: "false", // Disable SSL by default to avoid setup issues
|
||||||
|
SSL_PORT: process.env.SSL_PORT || "8443",
|
||||||
|
SSL_CERT_PATH: certPath,
|
||||||
|
SSL_KEY_PATH: keyPath,
|
||||||
|
SSL_DOMAIN: "localhost"
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check if .env file exists
|
||||||
|
let envContent = "";
|
||||||
|
try {
|
||||||
|
envContent = await fs.readFile(this.ENV_FILE, 'utf8');
|
||||||
|
} catch {
|
||||||
|
// .env doesn't exist, will create new one
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update or add SSL variables
|
||||||
|
let updatedContent = envContent;
|
||||||
|
let hasChanges = false;
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(sslEnvVars)) {
|
||||||
|
const regex = new RegExp(`^${key}=.*$`, 'm');
|
||||||
|
|
||||||
|
if (regex.test(updatedContent)) {
|
||||||
|
// Update existing variable
|
||||||
|
updatedContent = updatedContent.replace(regex, `${key}=${value}`);
|
||||||
|
} else {
|
||||||
|
// Add new variable
|
||||||
|
if (!updatedContent.includes(`# SSL Configuration`)) {
|
||||||
|
updatedContent += `\n# SSL Configuration (Auto-generated)\n`;
|
||||||
|
}
|
||||||
|
updatedContent += `${key}=${value}\n`;
|
||||||
|
hasChanges = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write updated .env file if there are changes
|
||||||
|
if (hasChanges || !envContent) {
|
||||||
|
await fs.writeFile(this.ENV_FILE, updatedContent.trim() + '\n');
|
||||||
|
|
||||||
|
systemLogger.info("✅ SSL environment variables configured", {
|
||||||
|
operation: "ssl_env_configured",
|
||||||
|
file: this.ENV_FILE,
|
||||||
|
variables: Object.keys(sslEnvVars)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update process.env for current session
|
||||||
|
for (const [key, value] of Object.entries(sslEnvVars)) {
|
||||||
|
process.env[key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get SSL configuration for nginx/server
|
||||||
|
*/
|
||||||
|
static getSSLConfig() {
|
||||||
|
return {
|
||||||
|
enabled: process.env.ENABLE_SSL === "true",
|
||||||
|
port: parseInt(process.env.SSL_PORT || "8443"),
|
||||||
|
certPath: process.env.SSL_CERT_PATH || this.CERT_FILE,
|
||||||
|
keyPath: process.env.SSL_KEY_PATH || this.KEY_FILE,
|
||||||
|
domain: process.env.SSL_DOMAIN || "localhost"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Display SSL setup information
|
||||||
|
*/
|
||||||
|
static logSSLInfo(): void {
|
||||||
|
const config = this.getSSLConfig();
|
||||||
|
|
||||||
|
if (config.enabled) {
|
||||||
|
console.log(`
|
||||||
|
╔══════════════════════════════════════════════════════════════╗
|
||||||
|
║ 🔒 Termix SSL/TLS Enabled ║
|
||||||
|
╠══════════════════════════════════════════════════════════════╣
|
||||||
|
║ HTTPS Port: ${config.port.toString().padEnd(47)} ║
|
||||||
|
║ HTTP Port: ${(process.env.PORT || "8080").padEnd(47)} ║
|
||||||
|
║ Domain: ${config.domain.padEnd(47)} ║
|
||||||
|
║ ║
|
||||||
|
║ 🌐 Access URLs: ║
|
||||||
|
║ • HTTPS: https://localhost:${config.port.toString().padEnd(31)} ║
|
||||||
|
║ • HTTP: http://localhost:${(process.env.PORT || "8080").padEnd(32)} ║
|
||||||
|
║ ║
|
||||||
|
║ 🔐 WebSocket connections automatically use WSS over HTTPS ║
|
||||||
|
║ ⚠️ Self-signed certificate will show browser warnings ║
|
||||||
|
╚══════════════════════════════════════════════════════════════╝
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
313
src/backend/utils/data-crypto.ts
Normal file
@@ -0,0 +1,313 @@
|
|||||||
|
import { FieldCrypto } from "./field-crypto.js";
|
||||||
|
import { LazyFieldEncryption } from "./lazy-field-encryption.js";
|
||||||
|
import { UserCrypto } from "./user-crypto.js";
|
||||||
|
import { databaseLogger } from "./logger.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DataCrypto - Simplified database encryption
|
||||||
|
*
|
||||||
|
* Linus principles:
|
||||||
|
* - Remove all "backward compatibility" garbage
|
||||||
|
* - Remove all special case handling
|
||||||
|
* - Data is either properly encrypted or operation fails
|
||||||
|
* - No legacy data concept
|
||||||
|
*/
|
||||||
|
class DataCrypto {
|
||||||
|
private static userCrypto: UserCrypto;
|
||||||
|
|
||||||
|
static initialize() {
|
||||||
|
this.userCrypto = UserCrypto.getInstance();
|
||||||
|
databaseLogger.info("DataCrypto initialized - no legacy compatibility", {
|
||||||
|
operation: "data_crypto_init",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Encrypt record - simple and direct
|
||||||
|
*/
|
||||||
|
static encryptRecord(tableName: string, record: any, userId: string, userDataKey: Buffer): any {
|
||||||
|
const encryptedRecord = { ...record };
|
||||||
|
const recordId = record.id || 'temp-' + Date.now();
|
||||||
|
|
||||||
|
for (const [fieldName, value] of Object.entries(record)) {
|
||||||
|
if (FieldCrypto.shouldEncryptField(tableName, fieldName) && value) {
|
||||||
|
encryptedRecord[fieldName] = FieldCrypto.encryptField(
|
||||||
|
value as string,
|
||||||
|
userDataKey,
|
||||||
|
recordId,
|
||||||
|
fieldName
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return encryptedRecord;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decrypt record with lazy encryption support
|
||||||
|
* Handles both encrypted and plaintext fields (from migration)
|
||||||
|
*/
|
||||||
|
static decryptRecord(tableName: string, record: any, userId: string, userDataKey: Buffer): any {
|
||||||
|
if (!record) return record;
|
||||||
|
|
||||||
|
const decryptedRecord = { ...record };
|
||||||
|
const recordId = record.id;
|
||||||
|
|
||||||
|
for (const [fieldName, value] of Object.entries(record)) {
|
||||||
|
if (FieldCrypto.shouldEncryptField(tableName, fieldName) && value) {
|
||||||
|
// Use lazy encryption to handle both plaintext and encrypted data
|
||||||
|
decryptedRecord[fieldName] = LazyFieldEncryption.safeGetFieldValue(
|
||||||
|
value as string,
|
||||||
|
userDataKey,
|
||||||
|
recordId,
|
||||||
|
fieldName
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return decryptedRecord;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Batch decrypt
|
||||||
|
*/
|
||||||
|
static decryptRecords(tableName: string, records: any[], userId: string, userDataKey: Buffer): any[] {
|
||||||
|
if (!Array.isArray(records)) return records;
|
||||||
|
return records.map((record) => this.decryptRecord(tableName, record, userId, userDataKey));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migrate user's plaintext sensitive fields to encrypted format
|
||||||
|
* Called during user login to gradually encrypt legacy data
|
||||||
|
*/
|
||||||
|
static async migrateUserSensitiveFields(
|
||||||
|
userId: string,
|
||||||
|
userDataKey: Buffer,
|
||||||
|
db: any
|
||||||
|
): Promise<{
|
||||||
|
migrated: boolean;
|
||||||
|
migratedTables: string[];
|
||||||
|
migratedFieldsCount: number;
|
||||||
|
}> {
|
||||||
|
let migrated = false;
|
||||||
|
const migratedTables: string[] = [];
|
||||||
|
let migratedFieldsCount = 0;
|
||||||
|
|
||||||
|
try {
|
||||||
|
databaseLogger.info("Starting user sensitive fields migration", {
|
||||||
|
operation: "user_sensitive_migration_start",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if migration is needed
|
||||||
|
const { needsMigration, plaintextFields } = await LazyFieldEncryption.checkUserNeedsMigration(
|
||||||
|
userId,
|
||||||
|
userDataKey,
|
||||||
|
db
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!needsMigration) {
|
||||||
|
databaseLogger.info("No migration needed for user", {
|
||||||
|
operation: "user_sensitive_migration_not_needed",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
return { migrated: false, migratedTables: [], migratedFieldsCount: 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
databaseLogger.info("User requires sensitive field migration", {
|
||||||
|
operation: "user_sensitive_migration_required",
|
||||||
|
userId,
|
||||||
|
plaintextFieldsCount: plaintextFields.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Process ssh_data table
|
||||||
|
const sshDataRecords = db.prepare("SELECT * FROM ssh_data WHERE user_id = ?").all(userId);
|
||||||
|
for (const record of sshDataRecords) {
|
||||||
|
const sensitiveFields = LazyFieldEncryption.getSensitiveFieldsForTable('ssh_data');
|
||||||
|
const { updatedRecord, migratedFields, needsUpdate } = LazyFieldEncryption.migrateRecordSensitiveFields(
|
||||||
|
record,
|
||||||
|
sensitiveFields,
|
||||||
|
userDataKey,
|
||||||
|
record.id.toString()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (needsUpdate) {
|
||||||
|
// Update the record in database
|
||||||
|
const updateQuery = `
|
||||||
|
UPDATE ssh_data
|
||||||
|
SET password = ?, key = ?, key_password = ?, updated_at = CURRENT_TIMESTAMP
|
||||||
|
WHERE id = ?
|
||||||
|
`;
|
||||||
|
db.prepare(updateQuery).run(
|
||||||
|
updatedRecord.password || null,
|
||||||
|
updatedRecord.key || null,
|
||||||
|
updatedRecord.key_password || null,
|
||||||
|
record.id
|
||||||
|
);
|
||||||
|
|
||||||
|
migratedFieldsCount += migratedFields.length;
|
||||||
|
if (!migratedTables.includes('ssh_data')) {
|
||||||
|
migratedTables.push('ssh_data');
|
||||||
|
}
|
||||||
|
migrated = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process ssh_credentials table
|
||||||
|
const sshCredentialsRecords = db.prepare("SELECT * FROM ssh_credentials WHERE user_id = ?").all(userId);
|
||||||
|
for (const record of sshCredentialsRecords) {
|
||||||
|
const sensitiveFields = LazyFieldEncryption.getSensitiveFieldsForTable('ssh_credentials');
|
||||||
|
const { updatedRecord, migratedFields, needsUpdate } = LazyFieldEncryption.migrateRecordSensitiveFields(
|
||||||
|
record,
|
||||||
|
sensitiveFields,
|
||||||
|
userDataKey,
|
||||||
|
record.id.toString()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (needsUpdate) {
|
||||||
|
// Update the record in database
|
||||||
|
const updateQuery = `
|
||||||
|
UPDATE ssh_credentials
|
||||||
|
SET password = ?, key = ?, key_password = ?, private_key = ?, updated_at = CURRENT_TIMESTAMP
|
||||||
|
WHERE id = ?
|
||||||
|
`;
|
||||||
|
db.prepare(updateQuery).run(
|
||||||
|
updatedRecord.password || null,
|
||||||
|
updatedRecord.key || null,
|
||||||
|
updatedRecord.key_password || null,
|
||||||
|
updatedRecord.private_key || null,
|
||||||
|
record.id
|
||||||
|
);
|
||||||
|
|
||||||
|
migratedFieldsCount += migratedFields.length;
|
||||||
|
if (!migratedTables.includes('ssh_credentials')) {
|
||||||
|
migratedTables.push('ssh_credentials');
|
||||||
|
}
|
||||||
|
migrated = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process users table
|
||||||
|
const userRecord = db.prepare("SELECT * FROM users WHERE id = ?").get(userId);
|
||||||
|
if (userRecord) {
|
||||||
|
const sensitiveFields = LazyFieldEncryption.getSensitiveFieldsForTable('users');
|
||||||
|
const { updatedRecord, migratedFields, needsUpdate } = LazyFieldEncryption.migrateRecordSensitiveFields(
|
||||||
|
userRecord,
|
||||||
|
sensitiveFields,
|
||||||
|
userDataKey,
|
||||||
|
userId
|
||||||
|
);
|
||||||
|
|
||||||
|
if (needsUpdate) {
|
||||||
|
// Update the record in database
|
||||||
|
const updateQuery = `
|
||||||
|
UPDATE users
|
||||||
|
SET totp_secret = ?, totp_backup_codes = ?
|
||||||
|
WHERE id = ?
|
||||||
|
`;
|
||||||
|
db.prepare(updateQuery).run(
|
||||||
|
updatedRecord.totp_secret || null,
|
||||||
|
updatedRecord.totp_backup_codes || null,
|
||||||
|
userId
|
||||||
|
);
|
||||||
|
|
||||||
|
migratedFieldsCount += migratedFields.length;
|
||||||
|
if (!migratedTables.includes('users')) {
|
||||||
|
migratedTables.push('users');
|
||||||
|
}
|
||||||
|
migrated = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (migrated) {
|
||||||
|
databaseLogger.success("User sensitive fields migration completed", {
|
||||||
|
operation: "user_sensitive_migration_success",
|
||||||
|
userId,
|
||||||
|
migratedTables,
|
||||||
|
migratedFieldsCount,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { migrated, migratedTables, migratedFieldsCount };
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("User sensitive fields migration failed", error, {
|
||||||
|
operation: "user_sensitive_migration_failed",
|
||||||
|
userId,
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
});
|
||||||
|
|
||||||
|
// Don't throw error to avoid breaking user login
|
||||||
|
return { migrated: false, migratedTables: [], migratedFieldsCount: 0 };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get user data key
|
||||||
|
*/
|
||||||
|
static getUserDataKey(userId: string): Buffer | null {
|
||||||
|
return this.userCrypto.getUserDataKey(userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify user access permissions - simple and direct
|
||||||
|
*/
|
||||||
|
static validateUserAccess(userId: string): Buffer {
|
||||||
|
const userDataKey = this.getUserDataKey(userId);
|
||||||
|
if (!userDataKey) {
|
||||||
|
throw new Error(`User ${userId} data not unlocked`);
|
||||||
|
}
|
||||||
|
return userDataKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convenience method: automatically get user key and encrypt
|
||||||
|
*/
|
||||||
|
static encryptRecordForUser(tableName: string, record: any, userId: string): any {
|
||||||
|
const userDataKey = this.validateUserAccess(userId);
|
||||||
|
return this.encryptRecord(tableName, record, userId, userDataKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convenience method: automatically get user key and decrypt
|
||||||
|
*/
|
||||||
|
static decryptRecordForUser(tableName: string, record: any, userId: string): any {
|
||||||
|
const userDataKey = this.validateUserAccess(userId);
|
||||||
|
return this.decryptRecord(tableName, record, userId, userDataKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convenience method: batch decrypt
|
||||||
|
*/
|
||||||
|
static decryptRecordsForUser(tableName: string, records: any[], userId: string): any[] {
|
||||||
|
const userDataKey = this.validateUserAccess(userId);
|
||||||
|
return this.decryptRecords(tableName, records, userId, userDataKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if user can access data
|
||||||
|
*/
|
||||||
|
static canUserAccessData(userId: string): boolean {
|
||||||
|
return this.userCrypto.isUserUnlocked(userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test encryption functionality
|
||||||
|
*/
|
||||||
|
static testUserEncryption(userId: string): boolean {
|
||||||
|
try {
|
||||||
|
const userDataKey = this.getUserDataKey(userId);
|
||||||
|
if (!userDataKey) return false;
|
||||||
|
|
||||||
|
const testData = "test-" + Date.now();
|
||||||
|
const encrypted = FieldCrypto.encryptField(testData, userDataKey, "test-record", "test-field");
|
||||||
|
const decrypted = FieldCrypto.decryptField(encrypted, userDataKey, "test-record", "test-field");
|
||||||
|
|
||||||
|
return decrypted === testData;
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { DataCrypto };
|
||||||
@@ -1,287 +0,0 @@
|
|||||||
import { FieldEncryption } from "./encryption.js";
|
|
||||||
import { EncryptionKeyManager } from "./encryption-key-manager.js";
|
|
||||||
import { databaseLogger } from "./logger.js";
|
|
||||||
|
|
||||||
interface EncryptionContext {
|
|
||||||
masterPassword: string;
|
|
||||||
encryptionEnabled: boolean;
|
|
||||||
forceEncryption: boolean;
|
|
||||||
migrateOnAccess: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
class DatabaseEncryption {
|
|
||||||
private static context: EncryptionContext | null = null;
|
|
||||||
|
|
||||||
static async initialize(config: Partial<EncryptionContext> = {}) {
|
|
||||||
const keyManager = EncryptionKeyManager.getInstance();
|
|
||||||
const masterPassword =
|
|
||||||
config.masterPassword || (await keyManager.initializeKey());
|
|
||||||
|
|
||||||
this.context = {
|
|
||||||
masterPassword,
|
|
||||||
encryptionEnabled: config.encryptionEnabled ?? true,
|
|
||||||
forceEncryption: config.forceEncryption ?? false,
|
|
||||||
migrateOnAccess: config.migrateOnAccess ?? true,
|
|
||||||
};
|
|
||||||
|
|
||||||
databaseLogger.info("Database encryption initialized", {
|
|
||||||
operation: "encryption_init",
|
|
||||||
enabled: this.context.encryptionEnabled,
|
|
||||||
forceEncryption: this.context.forceEncryption,
|
|
||||||
dynamicKey: !config.masterPassword,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
static getContext(): EncryptionContext {
|
|
||||||
if (!this.context) {
|
|
||||||
throw new Error(
|
|
||||||
"DatabaseEncryption not initialized. Call initialize() first.",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return this.context;
|
|
||||||
}
|
|
||||||
|
|
||||||
static encryptRecord(tableName: string, record: any): any {
|
|
||||||
const context = this.getContext();
|
|
||||||
if (!context.encryptionEnabled) return record;
|
|
||||||
|
|
||||||
const encryptedRecord = { ...record };
|
|
||||||
let hasEncryption = false;
|
|
||||||
|
|
||||||
for (const [fieldName, value] of Object.entries(record)) {
|
|
||||||
if (FieldEncryption.shouldEncryptField(tableName, fieldName) && value) {
|
|
||||||
try {
|
|
||||||
const fieldKey = FieldEncryption.getFieldKey(
|
|
||||||
context.masterPassword,
|
|
||||||
`${tableName}.${fieldName}`,
|
|
||||||
);
|
|
||||||
encryptedRecord[fieldName] = FieldEncryption.encryptField(
|
|
||||||
value as string,
|
|
||||||
fieldKey,
|
|
||||||
);
|
|
||||||
hasEncryption = true;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(
|
|
||||||
`Failed to encrypt field ${tableName}.${fieldName}`,
|
|
||||||
error,
|
|
||||||
{
|
|
||||||
operation: "field_encryption",
|
|
||||||
table: tableName,
|
|
||||||
field: fieldName,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasEncryption) {
|
|
||||||
databaseLogger.debug(`Encrypted sensitive fields for ${tableName}`, {
|
|
||||||
operation: "record_encryption",
|
|
||||||
table: tableName,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return encryptedRecord;
|
|
||||||
}
|
|
||||||
|
|
||||||
static decryptRecord(tableName: string, record: any): any {
|
|
||||||
const context = this.getContext();
|
|
||||||
if (!record) return record;
|
|
||||||
|
|
||||||
const decryptedRecord = { ...record };
|
|
||||||
let hasDecryption = false;
|
|
||||||
let needsMigration = false;
|
|
||||||
|
|
||||||
for (const [fieldName, value] of Object.entries(record)) {
|
|
||||||
if (FieldEncryption.shouldEncryptField(tableName, fieldName) && value) {
|
|
||||||
try {
|
|
||||||
const fieldKey = FieldEncryption.getFieldKey(
|
|
||||||
context.masterPassword,
|
|
||||||
`${tableName}.${fieldName}`,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (FieldEncryption.isEncrypted(value as string)) {
|
|
||||||
decryptedRecord[fieldName] = FieldEncryption.decryptField(
|
|
||||||
value as string,
|
|
||||||
fieldKey,
|
|
||||||
);
|
|
||||||
hasDecryption = true;
|
|
||||||
} else if (context.encryptionEnabled && !context.forceEncryption) {
|
|
||||||
decryptedRecord[fieldName] = value;
|
|
||||||
needsMigration = context.migrateOnAccess;
|
|
||||||
} else if (context.forceEncryption) {
|
|
||||||
databaseLogger.warn(
|
|
||||||
`Unencrypted field detected in force encryption mode`,
|
|
||||||
{
|
|
||||||
operation: "decryption_warning",
|
|
||||||
table: tableName,
|
|
||||||
field: fieldName,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
decryptedRecord[fieldName] = value;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(
|
|
||||||
`Failed to decrypt field ${tableName}.${fieldName}`,
|
|
||||||
error,
|
|
||||||
{
|
|
||||||
operation: "field_decryption",
|
|
||||||
table: tableName,
|
|
||||||
field: fieldName,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
if (context.forceEncryption) {
|
|
||||||
throw error;
|
|
||||||
} else {
|
|
||||||
decryptedRecord[fieldName] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (needsMigration) {
|
|
||||||
this.scheduleFieldMigration(tableName, record);
|
|
||||||
}
|
|
||||||
|
|
||||||
return decryptedRecord;
|
|
||||||
}
|
|
||||||
|
|
||||||
static decryptRecords(tableName: string, records: any[]): any[] {
|
|
||||||
if (!Array.isArray(records)) return records;
|
|
||||||
return records.map((record) => this.decryptRecord(tableName, record));
|
|
||||||
}
|
|
||||||
|
|
||||||
private static scheduleFieldMigration(tableName: string, record: any) {
|
|
||||||
setTimeout(async () => {
|
|
||||||
try {
|
|
||||||
await this.migrateRecord(tableName, record);
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(
|
|
||||||
`Failed to migrate record ${tableName}:${record.id}`,
|
|
||||||
error,
|
|
||||||
{
|
|
||||||
operation: "migration_failed",
|
|
||||||
table: tableName,
|
|
||||||
recordId: record.id,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}, 1000);
|
|
||||||
}
|
|
||||||
|
|
||||||
static async migrateRecord(tableName: string, record: any): Promise<any> {
|
|
||||||
const context = this.getContext();
|
|
||||||
if (!context.encryptionEnabled || !context.migrateOnAccess) return record;
|
|
||||||
|
|
||||||
let needsUpdate = false;
|
|
||||||
const updatedRecord = { ...record };
|
|
||||||
|
|
||||||
for (const [fieldName, value] of Object.entries(record)) {
|
|
||||||
if (
|
|
||||||
FieldEncryption.shouldEncryptField(tableName, fieldName) &&
|
|
||||||
value &&
|
|
||||||
!FieldEncryption.isEncrypted(value as string)
|
|
||||||
) {
|
|
||||||
try {
|
|
||||||
const fieldKey = FieldEncryption.getFieldKey(
|
|
||||||
context.masterPassword,
|
|
||||||
`${tableName}.${fieldName}`,
|
|
||||||
);
|
|
||||||
updatedRecord[fieldName] = FieldEncryption.encryptField(
|
|
||||||
value as string,
|
|
||||||
fieldKey,
|
|
||||||
);
|
|
||||||
needsUpdate = true;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(
|
|
||||||
`Failed to migrate field ${tableName}.${fieldName}`,
|
|
||||||
error,
|
|
||||||
{
|
|
||||||
operation: "field_migration",
|
|
||||||
table: tableName,
|
|
||||||
field: fieldName,
|
|
||||||
recordId: record.id,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return updatedRecord;
|
|
||||||
}
|
|
||||||
|
|
||||||
static validateConfiguration(): boolean {
|
|
||||||
try {
|
|
||||||
const context = this.getContext();
|
|
||||||
const testData = "test-encryption-data";
|
|
||||||
const testKey = FieldEncryption.getFieldKey(
|
|
||||||
context.masterPassword,
|
|
||||||
"test",
|
|
||||||
);
|
|
||||||
|
|
||||||
const encrypted = FieldEncryption.encryptField(testData, testKey);
|
|
||||||
const decrypted = FieldEncryption.decryptField(encrypted, testKey);
|
|
||||||
|
|
||||||
return decrypted === testData;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(
|
|
||||||
"Encryption configuration validation failed",
|
|
||||||
error,
|
|
||||||
{
|
|
||||||
operation: "config_validation",
|
|
||||||
},
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static getEncryptionStatus() {
|
|
||||||
try {
|
|
||||||
const context = this.getContext();
|
|
||||||
return {
|
|
||||||
enabled: context.encryptionEnabled,
|
|
||||||
forceEncryption: context.forceEncryption,
|
|
||||||
migrateOnAccess: context.migrateOnAccess,
|
|
||||||
configValid: this.validateConfiguration(),
|
|
||||||
};
|
|
||||||
} catch {
|
|
||||||
return {
|
|
||||||
enabled: false,
|
|
||||||
forceEncryption: false,
|
|
||||||
migrateOnAccess: false,
|
|
||||||
configValid: false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static async getDetailedStatus() {
|
|
||||||
const keyManager = EncryptionKeyManager.getInstance();
|
|
||||||
const keyStatus = await keyManager.getEncryptionStatus();
|
|
||||||
const encryptionStatus = this.getEncryptionStatus();
|
|
||||||
|
|
||||||
return {
|
|
||||||
...encryptionStatus,
|
|
||||||
key: keyStatus,
|
|
||||||
initialized: this.context !== null,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
static async reinitializeWithNewKey(): Promise<void> {
|
|
||||||
const keyManager = EncryptionKeyManager.getInstance();
|
|
||||||
const newKey = await keyManager.regenerateKey();
|
|
||||||
|
|
||||||
this.context = null;
|
|
||||||
await this.initialize({ masterPassword: newKey });
|
|
||||||
|
|
||||||
databaseLogger.warn("Database encryption reinitialized with new key", {
|
|
||||||
operation: "encryption_reinit",
|
|
||||||
requiresMigration: true,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { DatabaseEncryption };
|
|
||||||
export type { EncryptionContext };
|
|
||||||
@@ -1,55 +1,45 @@
|
|||||||
import crypto from "crypto";
|
import crypto from "crypto";
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
import { HardwareFingerprint } from "./hardware-fingerprint.js";
|
|
||||||
import { databaseLogger } from "./logger.js";
|
import { databaseLogger } from "./logger.js";
|
||||||
|
import { SystemCrypto } from "./system-crypto.js";
|
||||||
|
|
||||||
interface EncryptedFileMetadata {
|
interface EncryptedFileMetadata {
|
||||||
iv: string;
|
iv: string;
|
||||||
tag: string;
|
tag: string;
|
||||||
version: string;
|
version: string;
|
||||||
fingerprint: string;
|
fingerprint: string;
|
||||||
salt: string;
|
|
||||||
algorithm: string;
|
algorithm: string;
|
||||||
|
keySource?: string; // Track where the key comes from (SystemCrypto) - v2 only
|
||||||
|
salt?: string; // Legacy v1 format only
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Database file encryption - encrypts the entire SQLite database file at rest
|
* Database file encryption - encrypts the entire SQLite database file at rest
|
||||||
* This provides an additional security layer on top of field-level encryption
|
* Uses SystemCrypto for key management - no more fixed seed garbage!
|
||||||
|
*
|
||||||
|
* Linus principles applied:
|
||||||
|
* - Remove hardcoded keys security disaster
|
||||||
|
* - Use SystemCrypto instance keys for proper per-instance security
|
||||||
|
* - Simple and direct, no complex key derivation
|
||||||
*/
|
*/
|
||||||
class DatabaseFileEncryption {
|
class DatabaseFileEncryption {
|
||||||
private static readonly VERSION = "v1";
|
private static readonly VERSION = "v2";
|
||||||
private static readonly ALGORITHM = "aes-256-gcm";
|
private static readonly ALGORITHM = "aes-256-gcm";
|
||||||
private static readonly KEY_ITERATIONS = 100000;
|
|
||||||
private static readonly ENCRYPTED_FILE_SUFFIX = ".encrypted";
|
private static readonly ENCRYPTED_FILE_SUFFIX = ".encrypted";
|
||||||
private static readonly METADATA_FILE_SUFFIX = ".meta";
|
private static readonly METADATA_FILE_SUFFIX = ".meta";
|
||||||
|
private static systemCrypto = SystemCrypto.getInstance();
|
||||||
/**
|
|
||||||
* Generate file encryption key from hardware fingerprint
|
|
||||||
*/
|
|
||||||
private static generateFileEncryptionKey(salt: Buffer): Buffer {
|
|
||||||
const hardwareFingerprint = HardwareFingerprint.generate();
|
|
||||||
|
|
||||||
const key = crypto.pbkdf2Sync(
|
|
||||||
hardwareFingerprint,
|
|
||||||
salt,
|
|
||||||
this.KEY_ITERATIONS,
|
|
||||||
32, // 256 bits for AES-256
|
|
||||||
"sha256",
|
|
||||||
);
|
|
||||||
|
|
||||||
return key;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encrypt database from buffer (for in-memory databases)
|
* Encrypt database from buffer (for in-memory databases)
|
||||||
*/
|
*/
|
||||||
static encryptDatabaseFromBuffer(buffer: Buffer, targetPath: string): string {
|
static async encryptDatabaseFromBuffer(buffer: Buffer, targetPath: string): Promise<string> {
|
||||||
try {
|
try {
|
||||||
|
// Get database key from SystemCrypto (no more fixed seed garbage!)
|
||||||
|
const key = await this.systemCrypto.getDatabaseKey();
|
||||||
|
|
||||||
// Generate encryption components
|
// Generate encryption components
|
||||||
const salt = crypto.randomBytes(32);
|
|
||||||
const iv = crypto.randomBytes(16);
|
const iv = crypto.randomBytes(16);
|
||||||
const key = this.generateFileEncryptionKey(salt);
|
|
||||||
|
|
||||||
// Encrypt the buffer
|
// Encrypt the buffer
|
||||||
const cipher = crypto.createCipheriv(this.ALGORITHM, key, iv) as any;
|
const cipher = crypto.createCipheriv(this.ALGORITHM, key, iv) as any;
|
||||||
@@ -61,9 +51,9 @@ class DatabaseFileEncryption {
|
|||||||
iv: iv.toString("hex"),
|
iv: iv.toString("hex"),
|
||||||
tag: tag.toString("hex"),
|
tag: tag.toString("hex"),
|
||||||
version: this.VERSION,
|
version: this.VERSION,
|
||||||
fingerprint: HardwareFingerprint.generate().substring(0, 16),
|
fingerprint: "termix-v2-systemcrypto", // SystemCrypto managed key
|
||||||
salt: salt.toString("hex"),
|
|
||||||
algorithm: this.ALGORITHM,
|
algorithm: this.ALGORITHM,
|
||||||
|
keySource: "SystemCrypto",
|
||||||
};
|
};
|
||||||
|
|
||||||
// Write encrypted file and metadata
|
// Write encrypted file and metadata
|
||||||
@@ -86,7 +76,7 @@ class DatabaseFileEncryption {
|
|||||||
/**
|
/**
|
||||||
* Encrypt database file
|
* Encrypt database file
|
||||||
*/
|
*/
|
||||||
static encryptDatabaseFile(sourcePath: string, targetPath?: string): string {
|
static async encryptDatabaseFile(sourcePath: string, targetPath?: string): Promise<string> {
|
||||||
if (!fs.existsSync(sourcePath)) {
|
if (!fs.existsSync(sourcePath)) {
|
||||||
throw new Error(`Source database file does not exist: ${sourcePath}`);
|
throw new Error(`Source database file does not exist: ${sourcePath}`);
|
||||||
}
|
}
|
||||||
@@ -99,10 +89,11 @@ class DatabaseFileEncryption {
|
|||||||
// Read source file
|
// Read source file
|
||||||
const sourceData = fs.readFileSync(sourcePath);
|
const sourceData = fs.readFileSync(sourcePath);
|
||||||
|
|
||||||
|
// Get database key from SystemCrypto (no more fixed seed garbage!)
|
||||||
|
const key = await this.systemCrypto.getDatabaseKey();
|
||||||
|
|
||||||
// Generate encryption components
|
// Generate encryption components
|
||||||
const salt = crypto.randomBytes(32);
|
|
||||||
const iv = crypto.randomBytes(16);
|
const iv = crypto.randomBytes(16);
|
||||||
const key = this.generateFileEncryptionKey(salt);
|
|
||||||
|
|
||||||
// Encrypt the file
|
// Encrypt the file
|
||||||
const cipher = crypto.createCipheriv(this.ALGORITHM, key, iv) as any;
|
const cipher = crypto.createCipheriv(this.ALGORITHM, key, iv) as any;
|
||||||
@@ -117,9 +108,9 @@ class DatabaseFileEncryption {
|
|||||||
iv: iv.toString("hex"),
|
iv: iv.toString("hex"),
|
||||||
tag: tag.toString("hex"),
|
tag: tag.toString("hex"),
|
||||||
version: this.VERSION,
|
version: this.VERSION,
|
||||||
fingerprint: HardwareFingerprint.generate().substring(0, 16),
|
fingerprint: "termix-v2-systemcrypto", // SystemCrypto managed key
|
||||||
salt: salt.toString("hex"),
|
|
||||||
algorithm: this.ALGORITHM,
|
algorithm: this.ALGORITHM,
|
||||||
|
keySource: "SystemCrypto",
|
||||||
};
|
};
|
||||||
|
|
||||||
// Write encrypted file and metadata
|
// Write encrypted file and metadata
|
||||||
@@ -151,7 +142,7 @@ class DatabaseFileEncryption {
|
|||||||
/**
|
/**
|
||||||
* Decrypt database file to buffer (for in-memory usage)
|
* Decrypt database file to buffer (for in-memory usage)
|
||||||
*/
|
*/
|
||||||
static decryptDatabaseToBuffer(encryptedPath: string): Buffer {
|
static async decryptDatabaseToBuffer(encryptedPath: string): Promise<Buffer> {
|
||||||
if (!fs.existsSync(encryptedPath)) {
|
if (!fs.existsSync(encryptedPath)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Encrypted database file does not exist: ${encryptedPath}`,
|
`Encrypted database file does not exist: ${encryptedPath}`,
|
||||||
@@ -168,28 +159,29 @@ class DatabaseFileEncryption {
|
|||||||
const metadataContent = fs.readFileSync(metadataPath, "utf8");
|
const metadataContent = fs.readFileSync(metadataPath, "utf8");
|
||||||
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
|
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
|
||||||
|
|
||||||
// Validate metadata version
|
|
||||||
if (metadata.version !== this.VERSION) {
|
|
||||||
throw new Error(`Unsupported encryption version: ${metadata.version}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate hardware fingerprint
|
|
||||||
const currentFingerprint = HardwareFingerprint.generate().substring(
|
|
||||||
0,
|
|
||||||
16,
|
|
||||||
);
|
|
||||||
if (metadata.fingerprint !== currentFingerprint) {
|
|
||||||
throw new Error(
|
|
||||||
"Hardware fingerprint mismatch - database was encrypted on different hardware",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read encrypted data
|
// Read encrypted data
|
||||||
const encryptedData = fs.readFileSync(encryptedPath);
|
const encryptedData = fs.readFileSync(encryptedPath);
|
||||||
|
|
||||||
// Generate decryption key
|
// Get decryption key based on version
|
||||||
|
let key: Buffer;
|
||||||
|
if (metadata.version === "v2") {
|
||||||
|
// New v2 format: use SystemCrypto key
|
||||||
|
key = await this.systemCrypto.getDatabaseKey();
|
||||||
|
} else if (metadata.version === "v1") {
|
||||||
|
// Legacy v1 format: use deprecated salt-based key derivation
|
||||||
|
databaseLogger.warn("Decrypting legacy v1 encrypted database - consider upgrading", {
|
||||||
|
operation: "decrypt_legacy_v1",
|
||||||
|
path: encryptedPath
|
||||||
|
});
|
||||||
|
if (!metadata.salt) {
|
||||||
|
throw new Error("v1 encrypted file missing required salt field");
|
||||||
|
}
|
||||||
const salt = Buffer.from(metadata.salt, "hex");
|
const salt = Buffer.from(metadata.salt, "hex");
|
||||||
const key = this.generateFileEncryptionKey(salt);
|
const fixedSeed = process.env.DB_FILE_KEY || "termix-database-file-encryption-seed-v1";
|
||||||
|
key = crypto.pbkdf2Sync(fixedSeed, salt, 100000, 32, "sha256");
|
||||||
|
} else {
|
||||||
|
throw new Error(`Unsupported encryption version: ${metadata.version}`);
|
||||||
|
}
|
||||||
|
|
||||||
// Decrypt to buffer
|
// Decrypt to buffer
|
||||||
const decipher = crypto.createDecipheriv(
|
const decipher = crypto.createDecipheriv(
|
||||||
@@ -219,10 +211,10 @@ class DatabaseFileEncryption {
|
|||||||
/**
|
/**
|
||||||
* Decrypt database file
|
* Decrypt database file
|
||||||
*/
|
*/
|
||||||
static decryptDatabaseFile(
|
static async decryptDatabaseFile(
|
||||||
encryptedPath: string,
|
encryptedPath: string,
|
||||||
targetPath?: string,
|
targetPath?: string,
|
||||||
): string {
|
): Promise<string> {
|
||||||
if (!fs.existsSync(encryptedPath)) {
|
if (!fs.existsSync(encryptedPath)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Encrypted database file does not exist: ${encryptedPath}`,
|
`Encrypted database file does not exist: ${encryptedPath}`,
|
||||||
@@ -242,33 +234,29 @@ class DatabaseFileEncryption {
|
|||||||
const metadataContent = fs.readFileSync(metadataPath, "utf8");
|
const metadataContent = fs.readFileSync(metadataPath, "utf8");
|
||||||
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
|
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
|
||||||
|
|
||||||
// Validate metadata version
|
|
||||||
if (metadata.version !== this.VERSION) {
|
|
||||||
throw new Error(`Unsupported encryption version: ${metadata.version}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate hardware fingerprint
|
|
||||||
const currentFingerprint = HardwareFingerprint.generate().substring(
|
|
||||||
0,
|
|
||||||
16,
|
|
||||||
);
|
|
||||||
if (metadata.fingerprint !== currentFingerprint) {
|
|
||||||
databaseLogger.warn("Hardware fingerprint mismatch for database file", {
|
|
||||||
operation: "database_file_decryption",
|
|
||||||
expected: metadata.fingerprint,
|
|
||||||
current: currentFingerprint,
|
|
||||||
});
|
|
||||||
throw new Error(
|
|
||||||
"Hardware fingerprint mismatch - database was encrypted on different hardware",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read encrypted data
|
// Read encrypted data
|
||||||
const encryptedData = fs.readFileSync(encryptedPath);
|
const encryptedData = fs.readFileSync(encryptedPath);
|
||||||
|
|
||||||
// Generate decryption key
|
// Get decryption key based on version
|
||||||
|
let key: Buffer;
|
||||||
|
if (metadata.version === "v2") {
|
||||||
|
// New v2 format: use SystemCrypto key
|
||||||
|
key = await this.systemCrypto.getDatabaseKey();
|
||||||
|
} else if (metadata.version === "v1") {
|
||||||
|
// Legacy v1 format: use deprecated salt-based key derivation
|
||||||
|
databaseLogger.warn("Decrypting legacy v1 encrypted database - consider upgrading", {
|
||||||
|
operation: "decrypt_legacy_v1",
|
||||||
|
path: encryptedPath
|
||||||
|
});
|
||||||
|
if (!metadata.salt) {
|
||||||
|
throw new Error("v1 encrypted file missing required salt field");
|
||||||
|
}
|
||||||
const salt = Buffer.from(metadata.salt, "hex");
|
const salt = Buffer.from(metadata.salt, "hex");
|
||||||
const key = this.generateFileEncryptionKey(salt);
|
const fixedSeed = process.env.DB_FILE_KEY || "termix-database-file-encryption-seed-v1";
|
||||||
|
key = crypto.pbkdf2Sync(fixedSeed, salt, 100000, 32, "sha256");
|
||||||
|
} else {
|
||||||
|
throw new Error(`Unsupported encryption version: ${metadata.version}`);
|
||||||
|
}
|
||||||
|
|
||||||
// Decrypt the file
|
// Decrypt the file
|
||||||
const decipher = crypto.createDecipheriv(
|
const decipher = crypto.createDecipheriv(
|
||||||
@@ -350,16 +338,13 @@ class DatabaseFileEncryption {
|
|||||||
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
|
const metadata: EncryptedFileMetadata = JSON.parse(metadataContent);
|
||||||
|
|
||||||
const fileStats = fs.statSync(encryptedPath);
|
const fileStats = fs.statSync(encryptedPath);
|
||||||
const currentFingerprint = HardwareFingerprint.generate().substring(
|
const currentFingerprint = "termix-v1-file"; // Fixed identifier
|
||||||
0,
|
|
||||||
16,
|
|
||||||
);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
version: metadata.version,
|
version: metadata.version,
|
||||||
algorithm: metadata.algorithm,
|
algorithm: metadata.algorithm,
|
||||||
fingerprint: metadata.fingerprint,
|
fingerprint: metadata.fingerprint,
|
||||||
isCurrentHardware: metadata.fingerprint === currentFingerprint,
|
isCurrentHardware: true, // Hardware validation removed
|
||||||
fileSize: fileStats.size,
|
fileSize: fileStats.size,
|
||||||
};
|
};
|
||||||
} catch {
|
} catch {
|
||||||
@@ -370,10 +355,10 @@ class DatabaseFileEncryption {
|
|||||||
/**
|
/**
|
||||||
* Securely backup database by creating encrypted copy
|
* Securely backup database by creating encrypted copy
|
||||||
*/
|
*/
|
||||||
static createEncryptedBackup(
|
static async createEncryptedBackup(
|
||||||
databasePath: string,
|
databasePath: string,
|
||||||
backupDir: string,
|
backupDir: string,
|
||||||
): string {
|
): Promise<string> {
|
||||||
if (!fs.existsSync(databasePath)) {
|
if (!fs.existsSync(databasePath)) {
|
||||||
throw new Error(`Database file does not exist: ${databasePath}`);
|
throw new Error(`Database file does not exist: ${databasePath}`);
|
||||||
}
|
}
|
||||||
@@ -389,7 +374,7 @@ class DatabaseFileEncryption {
|
|||||||
const backupPath = path.join(backupDir, backupFileName);
|
const backupPath = path.join(backupDir, backupFileName);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const encryptedPath = this.encryptDatabaseFile(databasePath, backupPath);
|
const encryptedPath = await this.encryptDatabaseFile(databasePath, backupPath);
|
||||||
|
|
||||||
databaseLogger.info("Encrypted database backup created", {
|
databaseLogger.info("Encrypted database backup created", {
|
||||||
operation: "database_backup",
|
operation: "database_backup",
|
||||||
@@ -412,16 +397,16 @@ class DatabaseFileEncryption {
|
|||||||
/**
|
/**
|
||||||
* Restore database from encrypted backup
|
* Restore database from encrypted backup
|
||||||
*/
|
*/
|
||||||
static restoreFromEncryptedBackup(
|
static async restoreFromEncryptedBackup(
|
||||||
backupPath: string,
|
backupPath: string,
|
||||||
targetPath: string,
|
targetPath: string,
|
||||||
): string {
|
): Promise<string> {
|
||||||
if (!this.isEncryptedDatabaseFile(backupPath)) {
|
if (!this.isEncryptedDatabaseFile(backupPath)) {
|
||||||
throw new Error("Invalid encrypted backup file");
|
throw new Error("Invalid encrypted backup file");
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const restoredPath = this.decryptDatabaseFile(backupPath, targetPath);
|
const restoredPath = await this.decryptDatabaseFile(backupPath, targetPath);
|
||||||
|
|
||||||
databaseLogger.info("Database restored from encrypted backup", {
|
databaseLogger.info("Database restored from encrypted backup", {
|
||||||
operation: "database_restore",
|
operation: "database_restore",
|
||||||
@@ -440,17 +425,6 @@ class DatabaseFileEncryption {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate hardware compatibility for encrypted file
|
|
||||||
*/
|
|
||||||
static validateHardwareCompatibility(encryptedPath: string): boolean {
|
|
||||||
try {
|
|
||||||
const info = this.getEncryptedFileInfo(encryptedPath);
|
|
||||||
return info?.isCurrentHardware ?? false;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clean up temporary files
|
* Clean up temporary files
|
||||||
|
|||||||
@@ -1,504 +1,457 @@
|
|||||||
|
import Database from "better-sqlite3";
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
import crypto from "crypto";
|
|
||||||
import { DatabaseFileEncryption } from "./database-file-encryption.js";
|
|
||||||
import { DatabaseEncryption } from "./database-encryption.js";
|
|
||||||
import { FieldEncryption } from "./encryption.js";
|
|
||||||
import { HardwareFingerprint } from "./hardware-fingerprint.js";
|
|
||||||
import { databaseLogger } from "./logger.js";
|
import { databaseLogger } from "./logger.js";
|
||||||
import { db, databasePaths } from "../database/db/index.js";
|
import { DatabaseFileEncryption } from "./database-file-encryption.js";
|
||||||
import {
|
|
||||||
users,
|
|
||||||
sshData,
|
|
||||||
sshCredentials,
|
|
||||||
settings,
|
|
||||||
fileManagerRecent,
|
|
||||||
fileManagerPinned,
|
|
||||||
fileManagerShortcuts,
|
|
||||||
dismissedAlerts,
|
|
||||||
sshCredentialUsage,
|
|
||||||
} from "../database/db/schema.js";
|
|
||||||
|
|
||||||
interface ExportMetadata {
|
export interface MigrationResult {
|
||||||
version: string;
|
|
||||||
exportedAt: string;
|
|
||||||
exportId: string;
|
|
||||||
sourceHardwareFingerprint: string;
|
|
||||||
tableCount: number;
|
|
||||||
recordCount: number;
|
|
||||||
encryptedFields: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
interface MigrationExport {
|
|
||||||
metadata: ExportMetadata;
|
|
||||||
data: {
|
|
||||||
[tableName: string]: any[];
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ImportResult {
|
|
||||||
success: boolean;
|
success: boolean;
|
||||||
imported: {
|
error?: string;
|
||||||
tables: number;
|
migratedTables: number;
|
||||||
records: number;
|
migratedRows: number;
|
||||||
};
|
backupPath?: string;
|
||||||
errors: string[];
|
duration: number;
|
||||||
warnings: string[];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
export interface MigrationStatus {
|
||||||
* Database migration utility for exporting/importing data between different hardware
|
needsMigration: boolean;
|
||||||
* Handles both field-level and file-level encryption/decryption during migration
|
hasUnencryptedDb: boolean;
|
||||||
*/
|
hasEncryptedDb: boolean;
|
||||||
class DatabaseMigration {
|
unencryptedDbSize: number;
|
||||||
private static readonly VERSION = "v1";
|
reason: string;
|
||||||
private static readonly EXPORT_FILE_EXTENSION = ".termix-export.json";
|
}
|
||||||
|
|
||||||
/**
|
export class DatabaseMigration {
|
||||||
* Export database for migration
|
private dataDir: string;
|
||||||
* Decrypts all encrypted fields for transport to new hardware
|
private unencryptedDbPath: string;
|
||||||
*/
|
private encryptedDbPath: string;
|
||||||
static async exportDatabase(exportPath?: string): Promise<string> {
|
|
||||||
const exportId = crypto.randomUUID();
|
|
||||||
const timestamp = new Date().toISOString();
|
|
||||||
const defaultExportPath = path.join(
|
|
||||||
databasePaths.directory,
|
|
||||||
`termix-export-${timestamp.replace(/[:.]/g, "-")}${this.EXPORT_FILE_EXTENSION}`,
|
|
||||||
);
|
|
||||||
const actualExportPath = exportPath || defaultExportPath;
|
|
||||||
|
|
||||||
try {
|
constructor(dataDir: string) {
|
||||||
databaseLogger.info("Starting database export for migration", {
|
this.dataDir = dataDir;
|
||||||
operation: "database_export",
|
this.unencryptedDbPath = path.join(dataDir, "db.sqlite");
|
||||||
exportId,
|
this.encryptedDbPath = `${this.unencryptedDbPath}.encrypted`;
|
||||||
exportPath: actualExportPath,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Define tables to export and their encryption status
|
|
||||||
const tablesToExport = [
|
|
||||||
{ name: "users", table: users, hasEncryption: true },
|
|
||||||
{ name: "ssh_data", table: sshData, hasEncryption: true },
|
|
||||||
{ name: "ssh_credentials", table: sshCredentials, hasEncryption: true },
|
|
||||||
{ name: "settings", table: settings, hasEncryption: false },
|
|
||||||
{
|
|
||||||
name: "file_manager_recent",
|
|
||||||
table: fileManagerRecent,
|
|
||||||
hasEncryption: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "file_manager_pinned",
|
|
||||||
table: fileManagerPinned,
|
|
||||||
hasEncryption: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "file_manager_shortcuts",
|
|
||||||
table: fileManagerShortcuts,
|
|
||||||
hasEncryption: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "dismissed_alerts",
|
|
||||||
table: dismissedAlerts,
|
|
||||||
hasEncryption: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "ssh_credential_usage",
|
|
||||||
table: sshCredentialUsage,
|
|
||||||
hasEncryption: false,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const exportData: MigrationExport = {
|
|
||||||
metadata: {
|
|
||||||
version: this.VERSION,
|
|
||||||
exportedAt: timestamp,
|
|
||||||
exportId,
|
|
||||||
sourceHardwareFingerprint: HardwareFingerprint.generate().substring(
|
|
||||||
0,
|
|
||||||
16,
|
|
||||||
),
|
|
||||||
tableCount: 0,
|
|
||||||
recordCount: 0,
|
|
||||||
encryptedFields: [],
|
|
||||||
},
|
|
||||||
data: {},
|
|
||||||
};
|
|
||||||
|
|
||||||
let totalRecords = 0;
|
|
||||||
|
|
||||||
// Export each table
|
|
||||||
for (const tableInfo of tablesToExport) {
|
|
||||||
try {
|
|
||||||
databaseLogger.debug(`Exporting table: ${tableInfo.name}`, {
|
|
||||||
operation: "table_export",
|
|
||||||
table: tableInfo.name,
|
|
||||||
hasEncryption: tableInfo.hasEncryption,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Query all records from the table
|
|
||||||
const records = await db.select().from(tableInfo.table);
|
|
||||||
|
|
||||||
// Decrypt encrypted fields if necessary
|
|
||||||
let processedRecords = records;
|
|
||||||
if (tableInfo.hasEncryption && records.length > 0) {
|
|
||||||
processedRecords = records.map((record) => {
|
|
||||||
try {
|
|
||||||
return DatabaseEncryption.decryptRecord(tableInfo.name, record);
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.warn(
|
|
||||||
`Failed to decrypt record in ${tableInfo.name}`,
|
|
||||||
{
|
|
||||||
operation: "export_decrypt_warning",
|
|
||||||
table: tableInfo.name,
|
|
||||||
recordId: (record as any).id,
|
|
||||||
error:
|
|
||||||
error instanceof Error ? error.message : "Unknown error",
|
|
||||||
},
|
|
||||||
);
|
|
||||||
// Return original record if decryption fails
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Track which fields were encrypted
|
|
||||||
if (records.length > 0) {
|
|
||||||
const sampleRecord = records[0];
|
|
||||||
for (const fieldName of Object.keys(sampleRecord)) {
|
|
||||||
if (
|
|
||||||
FieldEncryption.shouldEncryptField(tableInfo.name, fieldName)
|
|
||||||
) {
|
|
||||||
const fieldKey = `${tableInfo.name}.${fieldName}`;
|
|
||||||
if (!exportData.metadata.encryptedFields.includes(fieldKey)) {
|
|
||||||
exportData.metadata.encryptedFields.push(fieldKey);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exportData.data[tableInfo.name] = processedRecords;
|
|
||||||
totalRecords += processedRecords.length;
|
|
||||||
|
|
||||||
databaseLogger.debug(`Table ${tableInfo.name} exported`, {
|
|
||||||
operation: "table_export_complete",
|
|
||||||
table: tableInfo.name,
|
|
||||||
recordCount: processedRecords.length,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(
|
|
||||||
`Failed to export table ${tableInfo.name}`,
|
|
||||||
error,
|
|
||||||
{
|
|
||||||
operation: "table_export_failed",
|
|
||||||
table: tableInfo.name,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update metadata
|
|
||||||
exportData.metadata.tableCount = tablesToExport.length;
|
|
||||||
exportData.metadata.recordCount = totalRecords;
|
|
||||||
|
|
||||||
// Write export file
|
|
||||||
const exportContent = JSON.stringify(exportData, null, 2);
|
|
||||||
fs.writeFileSync(actualExportPath, exportContent, "utf8");
|
|
||||||
|
|
||||||
databaseLogger.success("Database export completed successfully", {
|
|
||||||
operation: "database_export_complete",
|
|
||||||
exportId,
|
|
||||||
exportPath: actualExportPath,
|
|
||||||
tableCount: exportData.metadata.tableCount,
|
|
||||||
recordCount: exportData.metadata.recordCount,
|
|
||||||
fileSize: exportContent.length,
|
|
||||||
});
|
|
||||||
|
|
||||||
return actualExportPath;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Database export failed", error, {
|
|
||||||
operation: "database_export_failed",
|
|
||||||
exportId,
|
|
||||||
exportPath: actualExportPath,
|
|
||||||
});
|
|
||||||
throw new Error(
|
|
||||||
`Database export failed: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Import database from migration export
|
* 检查是否需要迁移以及迁移状态
|
||||||
* Re-encrypts fields for the current hardware
|
|
||||||
*/
|
*/
|
||||||
static async importDatabase(
|
checkMigrationStatus(): MigrationStatus {
|
||||||
importPath: string,
|
const hasUnencryptedDb = fs.existsSync(this.unencryptedDbPath);
|
||||||
options: {
|
const hasEncryptedDb = DatabaseFileEncryption.isEncryptedDatabaseFile(this.encryptedDbPath);
|
||||||
replaceExisting?: boolean;
|
|
||||||
backupCurrent?: boolean;
|
|
||||||
} = {},
|
|
||||||
): Promise<ImportResult> {
|
|
||||||
const { replaceExisting = false, backupCurrent = true } = options;
|
|
||||||
|
|
||||||
if (!fs.existsSync(importPath)) {
|
|
||||||
throw new Error(`Import file does not exist: ${importPath}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
let unencryptedDbSize = 0;
|
||||||
|
if (hasUnencryptedDb) {
|
||||||
try {
|
try {
|
||||||
databaseLogger.info("Starting database import from migration export", {
|
unencryptedDbSize = fs.statSync(this.unencryptedDbPath).size;
|
||||||
operation: "database_import",
|
|
||||||
importPath,
|
|
||||||
replaceExisting,
|
|
||||||
backupCurrent,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Read and validate export file
|
|
||||||
const exportContent = fs.readFileSync(importPath, "utf8");
|
|
||||||
const exportData: MigrationExport = JSON.parse(exportContent);
|
|
||||||
|
|
||||||
// Validate export format
|
|
||||||
if (exportData.metadata.version !== this.VERSION) {
|
|
||||||
throw new Error(
|
|
||||||
`Unsupported export version: ${exportData.metadata.version}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const result: ImportResult = {
|
|
||||||
success: false,
|
|
||||||
imported: { tables: 0, records: 0 },
|
|
||||||
errors: [],
|
|
||||||
warnings: [],
|
|
||||||
};
|
|
||||||
|
|
||||||
// Create backup if requested
|
|
||||||
if (backupCurrent) {
|
|
||||||
try {
|
|
||||||
const backupPath = await this.createCurrentDatabaseBackup();
|
|
||||||
databaseLogger.info("Current database backed up before import", {
|
|
||||||
operation: "import_backup",
|
|
||||||
backupPath,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const warningMsg = `Failed to create backup: ${error instanceof Error ? error.message : "Unknown error"}`;
|
databaseLogger.warn("Could not get unencrypted database file size", {
|
||||||
result.warnings.push(warningMsg);
|
operation: "migration_status_check",
|
||||||
databaseLogger.warn("Failed to create pre-import backup", {
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
operation: "import_backup_failed",
|
|
||||||
error: warningMsg,
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Import data table by table
|
// 确定迁移状态
|
||||||
for (const [tableName, tableData] of Object.entries(exportData.data)) {
|
let needsMigration = false;
|
||||||
try {
|
let reason = "";
|
||||||
databaseLogger.debug(`Importing table: ${tableName}`, {
|
|
||||||
operation: "table_import",
|
|
||||||
table: tableName,
|
|
||||||
recordCount: tableData.length,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (replaceExisting) {
|
if (hasEncryptedDb && hasUnencryptedDb) {
|
||||||
// Clear existing data
|
// 两个都存在:可能是之前迁移失败或中断
|
||||||
const tableSchema = this.getTableSchema(tableName);
|
needsMigration = false;
|
||||||
if (tableSchema) {
|
reason = "Both encrypted and unencrypted databases exist. Skipping migration for safety. Manual intervention may be required.";
|
||||||
await db.delete(tableSchema);
|
} else if (hasEncryptedDb && !hasUnencryptedDb) {
|
||||||
databaseLogger.debug(`Cleared existing data from ${tableName}`, {
|
// 只有加密数据库:无需迁移
|
||||||
operation: "table_clear",
|
needsMigration = false;
|
||||||
table: tableName,
|
reason = "Only encrypted database exists. No migration needed.";
|
||||||
});
|
} else if (!hasEncryptedDb && hasUnencryptedDb) {
|
||||||
}
|
// 只有未加密数据库:需要迁移
|
||||||
}
|
needsMigration = true;
|
||||||
|
reason = "Unencrypted database found. Migration to encrypted format required.";
|
||||||
// Process and encrypt records
|
|
||||||
for (const record of tableData) {
|
|
||||||
try {
|
|
||||||
// Re-encrypt sensitive fields for current hardware
|
|
||||||
const processedRecord = DatabaseEncryption.encryptRecord(
|
|
||||||
tableName,
|
|
||||||
record,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Insert record
|
|
||||||
const tableSchema = this.getTableSchema(tableName);
|
|
||||||
if (tableSchema) {
|
|
||||||
await db.insert(tableSchema).values(processedRecord);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
const errorMsg = `Failed to import record in ${tableName}: ${error instanceof Error ? error.message : "Unknown error"}`;
|
|
||||||
result.errors.push(errorMsg);
|
|
||||||
databaseLogger.error("Failed to import record", error, {
|
|
||||||
operation: "record_import_failed",
|
|
||||||
table: tableName,
|
|
||||||
recordId: record.id,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result.imported.tables++;
|
|
||||||
result.imported.records += tableData.length;
|
|
||||||
|
|
||||||
databaseLogger.debug(`Table ${tableName} imported`, {
|
|
||||||
operation: "table_import_complete",
|
|
||||||
table: tableName,
|
|
||||||
recordCount: tableData.length,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
const errorMsg = `Failed to import table ${tableName}: ${error instanceof Error ? error.message : "Unknown error"}`;
|
|
||||||
result.errors.push(errorMsg);
|
|
||||||
databaseLogger.error("Failed to import table", error, {
|
|
||||||
operation: "table_import_failed",
|
|
||||||
table: tableName,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if import was successful
|
|
||||||
result.success = result.errors.length === 0;
|
|
||||||
|
|
||||||
if (result.success) {
|
|
||||||
databaseLogger.success("Database import completed successfully", {
|
|
||||||
operation: "database_import_complete",
|
|
||||||
importPath,
|
|
||||||
tablesImported: result.imported.tables,
|
|
||||||
recordsImported: result.imported.records,
|
|
||||||
warnings: result.warnings.length,
|
|
||||||
});
|
|
||||||
} else {
|
} else {
|
||||||
databaseLogger.error(
|
// 都不存在:全新安装
|
||||||
"Database import completed with errors",
|
needsMigration = false;
|
||||||
undefined,
|
reason = "No existing database found. This is a fresh installation.";
|
||||||
{
|
|
||||||
operation: "database_import_partial",
|
|
||||||
importPath,
|
|
||||||
tablesImported: result.imported.tables,
|
|
||||||
recordsImported: result.imported.records,
|
|
||||||
errorCount: result.errors.length,
|
|
||||||
warningCount: result.warnings.length,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return {
|
||||||
} catch (error) {
|
needsMigration,
|
||||||
databaseLogger.error("Database import failed", error, {
|
hasUnencryptedDb,
|
||||||
operation: "database_import_failed",
|
hasEncryptedDb,
|
||||||
importPath,
|
unencryptedDbSize,
|
||||||
});
|
reason,
|
||||||
throw new Error(
|
};
|
||||||
`Database import failed: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validate export file format and compatibility
|
* 创建未加密数据库的安全备份
|
||||||
*/
|
*/
|
||||||
static validateExportFile(exportPath: string): {
|
private createBackup(): string {
|
||||||
valid: boolean;
|
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||||
metadata?: ExportMetadata;
|
const backupPath = `${this.unencryptedDbPath}.migration-backup-${timestamp}`;
|
||||||
errors: string[];
|
|
||||||
} {
|
|
||||||
const result = {
|
|
||||||
valid: false,
|
|
||||||
metadata: undefined as ExportMetadata | undefined,
|
|
||||||
errors: [] as string[],
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (!fs.existsSync(exportPath)) {
|
databaseLogger.info("Creating migration backup", {
|
||||||
result.errors.push("Export file does not exist");
|
operation: "migration_backup_create",
|
||||||
return result;
|
source: this.unencryptedDbPath,
|
||||||
|
backup: backupPath,
|
||||||
|
});
|
||||||
|
|
||||||
|
fs.copyFileSync(this.unencryptedDbPath, backupPath);
|
||||||
|
|
||||||
|
// 验证备份完整性
|
||||||
|
const originalSize = fs.statSync(this.unencryptedDbPath).size;
|
||||||
|
const backupSize = fs.statSync(backupPath).size;
|
||||||
|
|
||||||
|
if (originalSize !== backupSize) {
|
||||||
|
throw new Error(`Backup size mismatch: original=${originalSize}, backup=${backupSize}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const exportContent = fs.readFileSync(exportPath, "utf8");
|
databaseLogger.success("Migration backup created successfully", {
|
||||||
const exportData: MigrationExport = JSON.parse(exportContent);
|
operation: "migration_backup_created",
|
||||||
|
backupPath,
|
||||||
// Validate structure
|
fileSize: backupSize,
|
||||||
if (!exportData.metadata || !exportData.data) {
|
});
|
||||||
result.errors.push("Invalid export file structure");
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate version
|
|
||||||
if (exportData.metadata.version !== this.VERSION) {
|
|
||||||
result.errors.push(
|
|
||||||
`Unsupported export version: ${exportData.metadata.version}`,
|
|
||||||
);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate required metadata fields
|
|
||||||
const requiredFields = [
|
|
||||||
"exportedAt",
|
|
||||||
"exportId",
|
|
||||||
"sourceHardwareFingerprint",
|
|
||||||
];
|
|
||||||
for (const field of requiredFields) {
|
|
||||||
if (!exportData.metadata[field as keyof ExportMetadata]) {
|
|
||||||
result.errors.push(`Missing required metadata field: ${field}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.errors.length === 0) {
|
|
||||||
result.valid = true;
|
|
||||||
result.metadata = exportData.metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
} catch (error) {
|
|
||||||
result.errors.push(
|
|
||||||
`Failed to parse export file: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
||||||
);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create backup of current database
|
|
||||||
*/
|
|
||||||
private static async createCurrentDatabaseBackup(): Promise<string> {
|
|
||||||
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
|
|
||||||
const backupDir = path.join(databasePaths.directory, "backups");
|
|
||||||
|
|
||||||
if (!fs.existsSync(backupDir)) {
|
|
||||||
fs.mkdirSync(backupDir, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create encrypted backup
|
|
||||||
const backupPath = DatabaseFileEncryption.createEncryptedBackup(
|
|
||||||
databasePaths.main,
|
|
||||||
backupDir,
|
|
||||||
);
|
|
||||||
|
|
||||||
return backupPath;
|
return backupPath;
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("Failed to create migration backup", error, {
|
||||||
|
operation: "migration_backup_failed",
|
||||||
|
source: this.unencryptedDbPath,
|
||||||
|
backup: backupPath,
|
||||||
|
});
|
||||||
|
throw new Error(`Backup creation failed: ${error instanceof Error ? error.message : "Unknown error"}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get table schema for database operations
|
* 验证数据库迁移的完整性
|
||||||
*/
|
*/
|
||||||
private static getTableSchema(tableName: string) {
|
private async verifyMigration(originalDb: Database.Database, memoryDb: Database.Database): Promise<boolean> {
|
||||||
const tableMap: { [key: string]: any } = {
|
try {
|
||||||
users: users,
|
databaseLogger.info("Verifying migration integrity", {
|
||||||
ssh_data: sshData,
|
operation: "migration_verify_start",
|
||||||
ssh_credentials: sshCredentials,
|
});
|
||||||
settings: settings,
|
|
||||||
file_manager_recent: fileManagerRecent,
|
// 临时禁用外键约束以进行验证查询
|
||||||
file_manager_pinned: fileManagerPinned,
|
memoryDb.exec("PRAGMA foreign_keys = OFF");
|
||||||
file_manager_shortcuts: fileManagerShortcuts,
|
|
||||||
dismissed_alerts: dismissedAlerts,
|
// 获取原数据库的表列表
|
||||||
ssh_credential_usage: sshCredentialUsage,
|
const originalTables = originalDb
|
||||||
|
.prepare(`
|
||||||
|
SELECT name FROM sqlite_master
|
||||||
|
WHERE type='table' AND name NOT LIKE 'sqlite_%'
|
||||||
|
ORDER BY name
|
||||||
|
`)
|
||||||
|
.all() as { name: string }[];
|
||||||
|
|
||||||
|
// 获取内存数据库的表列表
|
||||||
|
const memoryTables = memoryDb
|
||||||
|
.prepare(`
|
||||||
|
SELECT name FROM sqlite_master
|
||||||
|
WHERE type='table' AND name NOT LIKE 'sqlite_%'
|
||||||
|
ORDER BY name
|
||||||
|
`)
|
||||||
|
.all() as { name: string }[];
|
||||||
|
|
||||||
|
// 检查表数量是否一致
|
||||||
|
if (originalTables.length !== memoryTables.length) {
|
||||||
|
databaseLogger.error("Table count mismatch during migration verification", null, {
|
||||||
|
operation: "migration_verify_failed",
|
||||||
|
originalCount: originalTables.length,
|
||||||
|
memoryCount: memoryTables.length,
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let totalOriginalRows = 0;
|
||||||
|
let totalMemoryRows = 0;
|
||||||
|
|
||||||
|
// 逐表验证行数
|
||||||
|
for (const table of originalTables) {
|
||||||
|
const originalCount = originalDb.prepare(`SELECT COUNT(*) as count FROM ${table.name}`).get() as { count: number };
|
||||||
|
const memoryCount = memoryDb.prepare(`SELECT COUNT(*) as count FROM ${table.name}`).get() as { count: number };
|
||||||
|
|
||||||
|
totalOriginalRows += originalCount.count;
|
||||||
|
totalMemoryRows += memoryCount.count;
|
||||||
|
|
||||||
|
if (originalCount.count !== memoryCount.count) {
|
||||||
|
databaseLogger.error("Row count mismatch for table during migration verification", null, {
|
||||||
|
operation: "migration_verify_table_failed",
|
||||||
|
table: table.name,
|
||||||
|
originalRows: originalCount.count,
|
||||||
|
memoryRows: memoryCount.count,
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
databaseLogger.debug("Table verification passed", {
|
||||||
|
operation: "migration_verify_table_success",
|
||||||
|
table: table.name,
|
||||||
|
rows: originalCount.count,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
databaseLogger.success("Migration integrity verification completed", {
|
||||||
|
operation: "migration_verify_success",
|
||||||
|
tables: originalTables.length,
|
||||||
|
totalRows: totalOriginalRows,
|
||||||
|
});
|
||||||
|
|
||||||
|
// 重新启用外键约束
|
||||||
|
memoryDb.exec("PRAGMA foreign_keys = ON");
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("Migration verification failed", error, {
|
||||||
|
operation: "migration_verify_error",
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 执行数据库迁移
|
||||||
|
*/
|
||||||
|
async migrateDatabase(): Promise<MigrationResult> {
|
||||||
|
const startTime = Date.now();
|
||||||
|
let backupPath: string | undefined;
|
||||||
|
let migratedTables = 0;
|
||||||
|
let migratedRows = 0;
|
||||||
|
|
||||||
|
try {
|
||||||
|
databaseLogger.info("Starting database migration from unencrypted to encrypted format", {
|
||||||
|
operation: "migration_start",
|
||||||
|
source: this.unencryptedDbPath,
|
||||||
|
target: this.encryptedDbPath,
|
||||||
|
});
|
||||||
|
|
||||||
|
// 1. 创建安全备份
|
||||||
|
backupPath = this.createBackup();
|
||||||
|
|
||||||
|
// 2. 打开原数据库(只读)
|
||||||
|
const originalDb = new Database(this.unencryptedDbPath, { readonly: true });
|
||||||
|
|
||||||
|
// 3. 创建内存数据库
|
||||||
|
const memoryDb = new Database(":memory:");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 4. 获取所有表结构
|
||||||
|
const tables = originalDb
|
||||||
|
.prepare(`
|
||||||
|
SELECT name, sql FROM sqlite_master
|
||||||
|
WHERE type='table' AND name NOT LIKE 'sqlite_%'
|
||||||
|
`)
|
||||||
|
.all() as { name: string; sql: string }[];
|
||||||
|
|
||||||
|
databaseLogger.info("Found tables to migrate", {
|
||||||
|
operation: "migration_tables_found",
|
||||||
|
tableCount: tables.length,
|
||||||
|
tables: tables.map(t => t.name),
|
||||||
|
});
|
||||||
|
|
||||||
|
// 5. 在内存数据库中创建表结构
|
||||||
|
for (const table of tables) {
|
||||||
|
memoryDb.exec(table.sql);
|
||||||
|
migratedTables++;
|
||||||
|
|
||||||
|
databaseLogger.debug("Table structure created", {
|
||||||
|
operation: "migration_table_created",
|
||||||
|
table: table.name,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6. 禁用外键约束以避免插入顺序问题
|
||||||
|
databaseLogger.info("Disabling foreign key constraints for migration", {
|
||||||
|
operation: "migration_disable_fk",
|
||||||
|
});
|
||||||
|
memoryDb.exec("PRAGMA foreign_keys = OFF");
|
||||||
|
|
||||||
|
// 7. 复制每个表的数据
|
||||||
|
for (const table of tables) {
|
||||||
|
const rows = originalDb.prepare(`SELECT * FROM ${table.name}`).all();
|
||||||
|
|
||||||
|
if (rows.length > 0) {
|
||||||
|
const columns = Object.keys(rows[0]);
|
||||||
|
const placeholders = columns.map(() => "?").join(", ");
|
||||||
|
const insertStmt = memoryDb.prepare(
|
||||||
|
`INSERT INTO ${table.name} (${columns.join(", ")}) VALUES (${placeholders})`
|
||||||
|
);
|
||||||
|
|
||||||
|
// 使用事务批量插入
|
||||||
|
const insertTransaction = memoryDb.transaction((dataRows: any[]) => {
|
||||||
|
for (const row of dataRows) {
|
||||||
|
const values = columns.map((col) => row[col]);
|
||||||
|
insertStmt.run(values);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
insertTransaction(rows);
|
||||||
|
migratedRows += rows.length;
|
||||||
|
|
||||||
|
databaseLogger.debug("Table data migrated", {
|
||||||
|
operation: "migration_table_data",
|
||||||
|
table: table.name,
|
||||||
|
rows: rows.length,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 8. 重新启用外键约束
|
||||||
|
databaseLogger.info("Re-enabling foreign key constraints after migration", {
|
||||||
|
operation: "migration_enable_fk",
|
||||||
|
});
|
||||||
|
memoryDb.exec("PRAGMA foreign_keys = ON");
|
||||||
|
|
||||||
|
// 验证外键约束现在是否正常
|
||||||
|
const fkCheckResult = memoryDb.prepare("PRAGMA foreign_key_check").all();
|
||||||
|
if (fkCheckResult.length > 0) {
|
||||||
|
databaseLogger.error("Foreign key constraints violations detected after migration", null, {
|
||||||
|
operation: "migration_fk_check_failed",
|
||||||
|
violations: fkCheckResult,
|
||||||
|
});
|
||||||
|
throw new Error(`Foreign key violations detected: ${JSON.stringify(fkCheckResult)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
databaseLogger.success("Foreign key constraints verification passed", {
|
||||||
|
operation: "migration_fk_check_success",
|
||||||
|
});
|
||||||
|
|
||||||
|
// 9. 验证迁移完整性
|
||||||
|
const verificationPassed = await this.verifyMigration(originalDb, memoryDb);
|
||||||
|
if (!verificationPassed) {
|
||||||
|
throw new Error("Migration integrity verification failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
// 10. 导出内存数据库到缓冲区
|
||||||
|
const buffer = memoryDb.serialize();
|
||||||
|
|
||||||
|
// 11. 创建加密数据库文件
|
||||||
|
databaseLogger.info("Creating encrypted database file", {
|
||||||
|
operation: "migration_encrypt_start",
|
||||||
|
bufferSize: buffer.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
await DatabaseFileEncryption.encryptDatabaseFromBuffer(buffer, this.encryptedDbPath);
|
||||||
|
|
||||||
|
// 12. 验证加密文件
|
||||||
|
if (!DatabaseFileEncryption.isEncryptedDatabaseFile(this.encryptedDbPath)) {
|
||||||
|
throw new Error("Encrypted database file verification failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
// 13. 清理:重命名原文件而不是删除
|
||||||
|
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||||
|
const migratedPath = `${this.unencryptedDbPath}.migrated-${timestamp}`;
|
||||||
|
|
||||||
|
fs.renameSync(this.unencryptedDbPath, migratedPath);
|
||||||
|
|
||||||
|
databaseLogger.success("Database migration completed successfully", {
|
||||||
|
operation: "migration_complete",
|
||||||
|
migratedTables,
|
||||||
|
migratedRows,
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
backupPath,
|
||||||
|
migratedPath,
|
||||||
|
encryptedDbPath: this.encryptedDbPath,
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
migratedTables,
|
||||||
|
migratedRows,
|
||||||
|
backupPath,
|
||||||
|
duration: Date.now() - startTime,
|
||||||
};
|
};
|
||||||
|
|
||||||
return tableMap[tableName];
|
} finally {
|
||||||
|
// 确保数据库连接关闭
|
||||||
|
originalDb.close();
|
||||||
|
memoryDb.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : "Unknown error";
|
||||||
|
|
||||||
|
databaseLogger.error("Database migration failed", error, {
|
||||||
|
operation: "migration_failed",
|
||||||
|
migratedTables,
|
||||||
|
migratedRows,
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
backupPath,
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: errorMessage,
|
||||||
|
migratedTables,
|
||||||
|
migratedRows,
|
||||||
|
backupPath,
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get export file info without importing
|
* 清理旧的备份文件(保留最近3个)
|
||||||
*/
|
*/
|
||||||
static getExportInfo(exportPath: string): ExportMetadata | null {
|
cleanupOldBackups(): void {
|
||||||
const validation = this.validateExportFile(exportPath);
|
try {
|
||||||
return validation.valid ? validation.metadata! : null;
|
const backupPattern = /\.migration-backup-\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2}-\d{3}Z$/;
|
||||||
|
const migratedPattern = /\.migrated-\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2}-\d{3}Z$/;
|
||||||
|
|
||||||
|
const files = fs.readdirSync(this.dataDir);
|
||||||
|
|
||||||
|
// 查找备份文件和已迁移文件
|
||||||
|
const backupFiles = files.filter(f => backupPattern.test(f))
|
||||||
|
.map(f => ({
|
||||||
|
name: f,
|
||||||
|
path: path.join(this.dataDir, f),
|
||||||
|
mtime: fs.statSync(path.join(this.dataDir, f)).mtime,
|
||||||
|
}))
|
||||||
|
.sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
|
||||||
|
|
||||||
|
const migratedFiles = files.filter(f => migratedPattern.test(f))
|
||||||
|
.map(f => ({
|
||||||
|
name: f,
|
||||||
|
path: path.join(this.dataDir, f),
|
||||||
|
mtime: fs.statSync(path.join(this.dataDir, f)).mtime,
|
||||||
|
}))
|
||||||
|
.sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
|
||||||
|
|
||||||
|
// 保留最近3个备份文件
|
||||||
|
const backupsToDelete = backupFiles.slice(3);
|
||||||
|
const migratedToDelete = migratedFiles.slice(3);
|
||||||
|
|
||||||
|
for (const file of [...backupsToDelete, ...migratedToDelete]) {
|
||||||
|
try {
|
||||||
|
fs.unlinkSync(file.path);
|
||||||
|
databaseLogger.debug("Cleaned up old migration file", {
|
||||||
|
operation: "migration_cleanup",
|
||||||
|
file: file.name,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.warn("Failed to cleanup old migration file", {
|
||||||
|
operation: "migration_cleanup_failed",
|
||||||
|
file: file.name,
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (backupsToDelete.length > 0 || migratedToDelete.length > 0) {
|
||||||
|
databaseLogger.info("Migration cleanup completed", {
|
||||||
|
operation: "migration_cleanup_complete",
|
||||||
|
deletedBackups: backupsToDelete.length,
|
||||||
|
deletedMigrated: migratedToDelete.length,
|
||||||
|
remainingBackups: Math.min(backupFiles.length, 3),
|
||||||
|
remainingMigrated: Math.min(migratedFiles.length, 3),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.warn("Migration cleanup failed", {
|
||||||
|
operation: "migration_cleanup_error",
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export { DatabaseMigration };
|
|
||||||
export type { ExportMetadata, MigrationExport, ImportResult };
|
|
||||||
|
|||||||
162
src/backend/utils/database-save-trigger.ts
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
import { databaseLogger } from "./logger.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Database Save Trigger - 自动触发内存数据库保存到磁盘
|
||||||
|
* 确保数据修改后能持久化保存
|
||||||
|
*/
|
||||||
|
export class DatabaseSaveTrigger {
|
||||||
|
private static saveFunction: (() => Promise<void>) | null = null;
|
||||||
|
private static isInitialized = false;
|
||||||
|
private static pendingSave = false;
|
||||||
|
private static saveTimeout: NodeJS.Timeout | null = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 初始化保存触发器
|
||||||
|
*/
|
||||||
|
static initialize(saveFunction: () => Promise<void>): void {
|
||||||
|
this.saveFunction = saveFunction;
|
||||||
|
this.isInitialized = true;
|
||||||
|
|
||||||
|
databaseLogger.info("Database save trigger initialized", {
|
||||||
|
operation: "db_save_trigger_init",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 触发数据库保存 - 防抖处理,避免频繁保存
|
||||||
|
*/
|
||||||
|
static async triggerSave(reason: string = "data_modification"): Promise<void> {
|
||||||
|
if (!this.isInitialized || !this.saveFunction) {
|
||||||
|
databaseLogger.warn("Database save trigger not initialized", {
|
||||||
|
operation: "db_save_trigger_not_init",
|
||||||
|
reason,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 清除之前的定时器
|
||||||
|
if (this.saveTimeout) {
|
||||||
|
clearTimeout(this.saveTimeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 防抖:延迟2秒执行,如果2秒内有新的保存请求,则重新计时
|
||||||
|
this.saveTimeout = setTimeout(async () => {
|
||||||
|
if (this.pendingSave) {
|
||||||
|
databaseLogger.debug("Database save already in progress, skipping", {
|
||||||
|
operation: "db_save_trigger_skip",
|
||||||
|
reason,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.pendingSave = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
databaseLogger.debug("Triggering database save", {
|
||||||
|
operation: "db_save_trigger_start",
|
||||||
|
reason,
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.saveFunction!();
|
||||||
|
|
||||||
|
databaseLogger.debug("Database save completed", {
|
||||||
|
operation: "db_save_trigger_success",
|
||||||
|
reason,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("Database save failed", error, {
|
||||||
|
operation: "db_save_trigger_failed",
|
||||||
|
reason,
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
this.pendingSave = false;
|
||||||
|
}
|
||||||
|
}, 2000); // 2秒防抖
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 立即保存 - 用于关键操作
|
||||||
|
*/
|
||||||
|
static async forceSave(reason: string = "critical_operation"): Promise<void> {
|
||||||
|
if (!this.isInitialized || !this.saveFunction) {
|
||||||
|
databaseLogger.warn("Database save trigger not initialized for force save", {
|
||||||
|
operation: "db_save_trigger_force_not_init",
|
||||||
|
reason,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 清除防抖定时器
|
||||||
|
if (this.saveTimeout) {
|
||||||
|
clearTimeout(this.saveTimeout);
|
||||||
|
this.saveTimeout = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.pendingSave) {
|
||||||
|
databaseLogger.debug("Database save already in progress, waiting", {
|
||||||
|
operation: "db_save_trigger_force_wait",
|
||||||
|
reason,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.pendingSave = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
databaseLogger.info("Force saving database", {
|
||||||
|
operation: "db_save_trigger_force_start",
|
||||||
|
reason,
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.saveFunction();
|
||||||
|
|
||||||
|
databaseLogger.success("Database force save completed", {
|
||||||
|
operation: "db_save_trigger_force_success",
|
||||||
|
reason,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("Database force save failed", error, {
|
||||||
|
operation: "db_save_trigger_force_failed",
|
||||||
|
reason,
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
});
|
||||||
|
throw error; // 重新抛出错误,因为这是强制保存
|
||||||
|
} finally {
|
||||||
|
this.pendingSave = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取保存状态
|
||||||
|
*/
|
||||||
|
static getStatus(): {
|
||||||
|
initialized: boolean;
|
||||||
|
pendingSave: boolean;
|
||||||
|
hasPendingTimeout: boolean;
|
||||||
|
} {
|
||||||
|
return {
|
||||||
|
initialized: this.isInitialized,
|
||||||
|
pendingSave: this.pendingSave,
|
||||||
|
hasPendingTimeout: this.saveTimeout !== null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 清理资源
|
||||||
|
*/
|
||||||
|
static cleanup(): void {
|
||||||
|
if (this.saveTimeout) {
|
||||||
|
clearTimeout(this.saveTimeout);
|
||||||
|
this.saveTimeout = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.pendingSave = false;
|
||||||
|
this.isInitialized = false;
|
||||||
|
this.saveFunction = null;
|
||||||
|
|
||||||
|
databaseLogger.info("Database save trigger cleaned up", {
|
||||||
|
operation: "db_save_trigger_cleanup",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,728 +0,0 @@
|
|||||||
import fs from "fs";
|
|
||||||
import path from "path";
|
|
||||||
import crypto from "crypto";
|
|
||||||
import Database from "better-sqlite3";
|
|
||||||
import { sql, eq } from "drizzle-orm";
|
|
||||||
import { drizzle } from "drizzle-orm/better-sqlite3";
|
|
||||||
import { DatabaseEncryption } from "./database-encryption.js";
|
|
||||||
import { FieldEncryption } from "./encryption.js";
|
|
||||||
import { HardwareFingerprint } from "./hardware-fingerprint.js";
|
|
||||||
import { databaseLogger } from "./logger.js";
|
|
||||||
import { databasePaths, db, sqliteInstance } from "../database/db/index.js";
|
|
||||||
import { sshData, sshCredentials, users } from "../database/db/schema.js";
|
|
||||||
|
|
||||||
interface ExportMetadata {
|
|
||||||
version: string;
|
|
||||||
exportedAt: string;
|
|
||||||
exportId: string;
|
|
||||||
sourceHardwareFingerprint: string;
|
|
||||||
tableCount: number;
|
|
||||||
recordCount: number;
|
|
||||||
encryptedFields: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ImportResult {
|
|
||||||
success: boolean;
|
|
||||||
imported: {
|
|
||||||
tables: number;
|
|
||||||
records: number;
|
|
||||||
};
|
|
||||||
errors: string[];
|
|
||||||
warnings: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* SQLite database export/import utility for hardware migration
|
|
||||||
* Exports decrypted data to a new SQLite database file for hardware transfer
|
|
||||||
*/
|
|
||||||
class DatabaseSQLiteExport {
|
|
||||||
private static readonly VERSION = "v1";
|
|
||||||
private static readonly EXPORT_FILE_EXTENSION = ".termix-export.sqlite";
|
|
||||||
private static readonly METADATA_TABLE = "_termix_export_metadata";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Export database as SQLite file for migration
|
|
||||||
* Creates a new SQLite database with decrypted data
|
|
||||||
*/
|
|
||||||
static async exportDatabase(exportPath?: string): Promise<string> {
|
|
||||||
const exportId = crypto.randomUUID();
|
|
||||||
const timestamp = new Date().toISOString();
|
|
||||||
const defaultExportPath = path.join(
|
|
||||||
databasePaths.directory,
|
|
||||||
`termix-export-${timestamp.replace(/[:.]/g, "-")}${this.EXPORT_FILE_EXTENSION}`,
|
|
||||||
);
|
|
||||||
const actualExportPath = exportPath || defaultExportPath;
|
|
||||||
|
|
||||||
try {
|
|
||||||
databaseLogger.info("Starting SQLite database export for migration", {
|
|
||||||
operation: "database_sqlite_export",
|
|
||||||
exportId,
|
|
||||||
exportPath: actualExportPath,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create new SQLite database for export
|
|
||||||
const exportDb = new Database(actualExportPath);
|
|
||||||
|
|
||||||
// Define tables to export - only SSH-related data
|
|
||||||
const tablesToExport = [
|
|
||||||
{ name: "ssh_data", hasEncryption: true },
|
|
||||||
{ name: "ssh_credentials", hasEncryption: true },
|
|
||||||
];
|
|
||||||
|
|
||||||
const exportMetadata: ExportMetadata = {
|
|
||||||
version: this.VERSION,
|
|
||||||
exportedAt: timestamp,
|
|
||||||
exportId,
|
|
||||||
sourceHardwareFingerprint: HardwareFingerprint.generate().substring(
|
|
||||||
0,
|
|
||||||
16,
|
|
||||||
),
|
|
||||||
tableCount: 0,
|
|
||||||
recordCount: 0,
|
|
||||||
encryptedFields: [],
|
|
||||||
};
|
|
||||||
|
|
||||||
let totalRecords = 0;
|
|
||||||
|
|
||||||
// Check total records in SSH tables for debugging
|
|
||||||
const totalSshData = await db.select().from(sshData);
|
|
||||||
const totalSshCredentials = await db.select().from(sshCredentials);
|
|
||||||
|
|
||||||
databaseLogger.info(`Export preparation: found SSH data`, {
|
|
||||||
operation: "export_data_check",
|
|
||||||
totalSshData: totalSshData.length,
|
|
||||||
totalSshCredentials: totalSshCredentials.length,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create metadata table
|
|
||||||
exportDb.exec(`
|
|
||||||
CREATE TABLE ${this.METADATA_TABLE} (
|
|
||||||
key TEXT PRIMARY KEY,
|
|
||||||
value TEXT NOT NULL
|
|
||||||
)
|
|
||||||
`);
|
|
||||||
|
|
||||||
// Copy schema and data for each table
|
|
||||||
for (const tableInfo of tablesToExport) {
|
|
||||||
try {
|
|
||||||
databaseLogger.debug(`Exporting SQLite table: ${tableInfo.name}`, {
|
|
||||||
operation: "table_sqlite_export",
|
|
||||||
table: tableInfo.name,
|
|
||||||
hasEncryption: tableInfo.hasEncryption,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create table in export database using consistent schema
|
|
||||||
if (tableInfo.name === "ssh_data") {
|
|
||||||
// Create ssh_data table using exact schema matching Drizzle definition
|
|
||||||
const createTableSql = `CREATE TABLE ssh_data (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
user_id TEXT NOT NULL,
|
|
||||||
name TEXT,
|
|
||||||
ip TEXT NOT NULL,
|
|
||||||
port INTEGER NOT NULL,
|
|
||||||
username TEXT NOT NULL,
|
|
||||||
folder TEXT,
|
|
||||||
tags TEXT,
|
|
||||||
pin INTEGER NOT NULL DEFAULT 0,
|
|
||||||
auth_type TEXT NOT NULL,
|
|
||||||
password TEXT,
|
|
||||||
require_password INTEGER NOT NULL DEFAULT 1,
|
|
||||||
key TEXT,
|
|
||||||
key_password TEXT,
|
|
||||||
key_type TEXT,
|
|
||||||
credential_id INTEGER,
|
|
||||||
enable_terminal INTEGER NOT NULL DEFAULT 1,
|
|
||||||
enable_tunnel INTEGER NOT NULL DEFAULT 1,
|
|
||||||
tunnel_connections TEXT,
|
|
||||||
enable_file_manager INTEGER NOT NULL DEFAULT 1,
|
|
||||||
default_path TEXT,
|
|
||||||
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
|
|
||||||
)`;
|
|
||||||
exportDb.exec(createTableSql);
|
|
||||||
} else if (tableInfo.name === "ssh_credentials") {
|
|
||||||
// Create ssh_credentials table using exact schema matching Drizzle definition
|
|
||||||
const createTableSql = `CREATE TABLE ssh_credentials (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
username TEXT,
|
|
||||||
password TEXT,
|
|
||||||
key_content TEXT,
|
|
||||||
key_password TEXT,
|
|
||||||
key_type TEXT,
|
|
||||||
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
|
|
||||||
)`;
|
|
||||||
exportDb.exec(createTableSql);
|
|
||||||
} else {
|
|
||||||
databaseLogger.warn(`Unknown table ${tableInfo.name}, skipping`, {
|
|
||||||
operation: "table_sqlite_export_skip",
|
|
||||||
table: tableInfo.name,
|
|
||||||
});
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Query all records from tables using Drizzle
|
|
||||||
let records: any[];
|
|
||||||
if (tableInfo.name === "ssh_data") {
|
|
||||||
records = await db.select().from(sshData);
|
|
||||||
} else if (tableInfo.name === "ssh_credentials") {
|
|
||||||
records = await db.select().from(sshCredentials);
|
|
||||||
} else {
|
|
||||||
records = [];
|
|
||||||
}
|
|
||||||
|
|
||||||
databaseLogger.info(
|
|
||||||
`Found ${records.length} records in ${tableInfo.name} for export`,
|
|
||||||
{
|
|
||||||
operation: "table_record_count",
|
|
||||||
table: tableInfo.name,
|
|
||||||
recordCount: records.length,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
// Decrypt encrypted fields if necessary
|
|
||||||
let processedRecords = records;
|
|
||||||
if (tableInfo.hasEncryption && records.length > 0) {
|
|
||||||
processedRecords = records.map((record) => {
|
|
||||||
try {
|
|
||||||
return DatabaseEncryption.decryptRecord(tableInfo.name, record);
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.warn(
|
|
||||||
`Failed to decrypt record in ${tableInfo.name}`,
|
|
||||||
{
|
|
||||||
operation: "export_decrypt_warning",
|
|
||||||
table: tableInfo.name,
|
|
||||||
recordId: (record as any).id,
|
|
||||||
error:
|
|
||||||
error instanceof Error ? error.message : "Unknown error",
|
|
||||||
},
|
|
||||||
);
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Track encrypted fields
|
|
||||||
const sampleRecord = records[0];
|
|
||||||
for (const fieldName of Object.keys(sampleRecord)) {
|
|
||||||
if (this.shouldTrackEncryptedField(tableInfo.name, fieldName)) {
|
|
||||||
const fieldKey = `${tableInfo.name}.${fieldName}`;
|
|
||||||
if (!exportMetadata.encryptedFields.includes(fieldKey)) {
|
|
||||||
exportMetadata.encryptedFields.push(fieldKey);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert records into export database
|
|
||||||
if (processedRecords.length > 0) {
|
|
||||||
const sampleRecord = processedRecords[0];
|
|
||||||
const tsFieldNames = Object.keys(sampleRecord);
|
|
||||||
|
|
||||||
// Map TypeScript field names to database column names
|
|
||||||
const dbColumnNames = tsFieldNames.map((fieldName) => {
|
|
||||||
// Map TypeScript field names to database column names
|
|
||||||
const fieldMappings: Record<string, string> = {
|
|
||||||
userId: "user_id",
|
|
||||||
authType: "auth_type",
|
|
||||||
requirePassword: "require_password",
|
|
||||||
keyPassword: "key_password",
|
|
||||||
keyType: "key_type",
|
|
||||||
credentialId: "credential_id",
|
|
||||||
enableTerminal: "enable_terminal",
|
|
||||||
enableTunnel: "enable_tunnel",
|
|
||||||
tunnelConnections: "tunnel_connections",
|
|
||||||
enableFileManager: "enable_file_manager",
|
|
||||||
defaultPath: "default_path",
|
|
||||||
createdAt: "created_at",
|
|
||||||
updatedAt: "updated_at",
|
|
||||||
keyContent: "key_content",
|
|
||||||
};
|
|
||||||
return fieldMappings[fieldName] || fieldName;
|
|
||||||
});
|
|
||||||
|
|
||||||
const placeholders = dbColumnNames.map(() => "?").join(", ");
|
|
||||||
const insertSql = `INSERT INTO ${tableInfo.name} (${dbColumnNames.join(", ")}) VALUES (${placeholders})`;
|
|
||||||
|
|
||||||
const insertStmt = exportDb.prepare(insertSql);
|
|
||||||
|
|
||||||
for (const record of processedRecords) {
|
|
||||||
const values = tsFieldNames.map((fieldName) => {
|
|
||||||
const value: any = record[fieldName as keyof typeof record];
|
|
||||||
// Convert values to SQLite-compatible types
|
|
||||||
if (value === null || value === undefined) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
typeof value === "string" ||
|
|
||||||
typeof value === "number" ||
|
|
||||||
typeof value === "bigint"
|
|
||||||
) {
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
if (Buffer.isBuffer(value)) {
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
if (value instanceof Date) {
|
|
||||||
return value.toISOString();
|
|
||||||
}
|
|
||||||
if (typeof value === "boolean") {
|
|
||||||
return value ? 1 : 0;
|
|
||||||
}
|
|
||||||
// Convert objects and arrays to JSON strings
|
|
||||||
if (typeof value === "object") {
|
|
||||||
return JSON.stringify(value);
|
|
||||||
}
|
|
||||||
// Fallback: convert to string
|
|
||||||
return String(value);
|
|
||||||
});
|
|
||||||
insertStmt.run(values);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
totalRecords += processedRecords.length;
|
|
||||||
|
|
||||||
databaseLogger.debug(`SQLite table ${tableInfo.name} exported`, {
|
|
||||||
operation: "table_sqlite_export_complete",
|
|
||||||
table: tableInfo.name,
|
|
||||||
recordCount: processedRecords.length,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(
|
|
||||||
`Failed to export SQLite table ${tableInfo.name}`,
|
|
||||||
error,
|
|
||||||
{
|
|
||||||
operation: "table_sqlite_export_failed",
|
|
||||||
table: tableInfo.name,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update and store metadata
|
|
||||||
exportMetadata.tableCount = tablesToExport.length;
|
|
||||||
exportMetadata.recordCount = totalRecords;
|
|
||||||
|
|
||||||
const insertMetadata = exportDb.prepare(
|
|
||||||
`INSERT INTO ${this.METADATA_TABLE} (key, value) VALUES (?, ?)`,
|
|
||||||
);
|
|
||||||
insertMetadata.run("metadata", JSON.stringify(exportMetadata));
|
|
||||||
|
|
||||||
// Close export database
|
|
||||||
exportDb.close();
|
|
||||||
|
|
||||||
databaseLogger.success("SQLite database export completed successfully", {
|
|
||||||
operation: "database_sqlite_export_complete",
|
|
||||||
exportId,
|
|
||||||
exportPath: actualExportPath,
|
|
||||||
tableCount: exportMetadata.tableCount,
|
|
||||||
recordCount: exportMetadata.recordCount,
|
|
||||||
fileSize: fs.statSync(actualExportPath).size,
|
|
||||||
});
|
|
||||||
|
|
||||||
return actualExportPath;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("SQLite database export failed", error, {
|
|
||||||
operation: "database_sqlite_export_failed",
|
|
||||||
exportId,
|
|
||||||
exportPath: actualExportPath,
|
|
||||||
});
|
|
||||||
throw new Error(
|
|
||||||
`SQLite database export failed: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Import database from SQLite export
|
|
||||||
* Re-encrypts fields for the current hardware
|
|
||||||
*/
|
|
||||||
static async importDatabase(
|
|
||||||
importPath: string,
|
|
||||||
options: {
|
|
||||||
replaceExisting?: boolean;
|
|
||||||
backupCurrent?: boolean;
|
|
||||||
} = {},
|
|
||||||
): Promise<ImportResult> {
|
|
||||||
const { replaceExisting = false, backupCurrent = true } = options;
|
|
||||||
|
|
||||||
if (!fs.existsSync(importPath)) {
|
|
||||||
throw new Error(`Import file does not exist: ${importPath}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
databaseLogger.info("Starting SQLite database import from export", {
|
|
||||||
operation: "database_sqlite_import",
|
|
||||||
importPath,
|
|
||||||
replaceExisting,
|
|
||||||
backupCurrent,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Open import database
|
|
||||||
const importDb = new Database(importPath, { readonly: true });
|
|
||||||
|
|
||||||
// Validate export format
|
|
||||||
const metadataResult = importDb
|
|
||||||
.prepare(
|
|
||||||
`
|
|
||||||
SELECT value FROM ${this.METADATA_TABLE} WHERE key = 'metadata'
|
|
||||||
`,
|
|
||||||
)
|
|
||||||
.get() as { value: string } | undefined;
|
|
||||||
|
|
||||||
if (!metadataResult) {
|
|
||||||
throw new Error("Invalid export file: missing metadata");
|
|
||||||
}
|
|
||||||
|
|
||||||
const metadata: ExportMetadata = JSON.parse(metadataResult.value);
|
|
||||||
if (metadata.version !== this.VERSION) {
|
|
||||||
throw new Error(`Unsupported export version: ${metadata.version}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const result: ImportResult = {
|
|
||||||
success: false,
|
|
||||||
imported: { tables: 0, records: 0 },
|
|
||||||
errors: [],
|
|
||||||
warnings: [],
|
|
||||||
};
|
|
||||||
|
|
||||||
// Get current admin user to assign imported SSH records
|
|
||||||
const adminUser = await db
|
|
||||||
.select()
|
|
||||||
.from(users)
|
|
||||||
.where(eq(users.is_admin, true))
|
|
||||||
.limit(1);
|
|
||||||
if (adminUser.length === 0) {
|
|
||||||
throw new Error("No admin user found in current database");
|
|
||||||
}
|
|
||||||
const currentAdminUserId = adminUser[0].id;
|
|
||||||
|
|
||||||
databaseLogger.debug(
|
|
||||||
`Starting SSH data import - assigning to admin user ${currentAdminUserId}`,
|
|
||||||
{
|
|
||||||
operation: "ssh_data_import_start",
|
|
||||||
adminUserId: currentAdminUserId,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
// Create backup if requested
|
|
||||||
if (backupCurrent) {
|
|
||||||
try {
|
|
||||||
const backupPath = await this.createCurrentDatabaseBackup();
|
|
||||||
databaseLogger.info("Current database backed up before import", {
|
|
||||||
operation: "import_backup",
|
|
||||||
backupPath,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
const warningMsg = `Failed to create backup: ${error instanceof Error ? error.message : "Unknown error"}`;
|
|
||||||
result.warnings.push(warningMsg);
|
|
||||||
databaseLogger.warn("Failed to create pre-import backup", {
|
|
||||||
operation: "import_backup_failed",
|
|
||||||
error: warningMsg,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get list of tables to import (excluding metadata table)
|
|
||||||
const tables = importDb
|
|
||||||
.prepare(
|
|
||||||
`
|
|
||||||
SELECT name FROM sqlite_master
|
|
||||||
WHERE type='table' AND name != '${this.METADATA_TABLE}'
|
|
||||||
`,
|
|
||||||
)
|
|
||||||
.all() as { name: string }[];
|
|
||||||
|
|
||||||
// Import data table by table
|
|
||||||
for (const tableRow of tables) {
|
|
||||||
const tableName = tableRow.name;
|
|
||||||
|
|
||||||
try {
|
|
||||||
databaseLogger.debug(`Importing SQLite table: ${tableName}`, {
|
|
||||||
operation: "table_sqlite_import",
|
|
||||||
table: tableName,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Use additive import - don't clear existing data
|
|
||||||
// This preserves all current data including admin SSH connections
|
|
||||||
databaseLogger.debug(`Using additive import for ${tableName}`, {
|
|
||||||
operation: "table_additive_import",
|
|
||||||
table: tableName,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get all records from import table
|
|
||||||
const records = importDb.prepare(`SELECT * FROM ${tableName}`).all();
|
|
||||||
|
|
||||||
// Process and encrypt records
|
|
||||||
for (const record of records) {
|
|
||||||
try {
|
|
||||||
// Import all SSH data without user filtering
|
|
||||||
|
|
||||||
// Map database column names to TypeScript field names
|
|
||||||
const mappedRecord: any = {};
|
|
||||||
const columnToFieldMappings: Record<string, string> = {
|
|
||||||
user_id: "userId",
|
|
||||||
auth_type: "authType",
|
|
||||||
require_password: "requirePassword",
|
|
||||||
key_password: "keyPassword",
|
|
||||||
key_type: "keyType",
|
|
||||||
credential_id: "credentialId",
|
|
||||||
enable_terminal: "enableTerminal",
|
|
||||||
enable_tunnel: "enableTunnel",
|
|
||||||
tunnel_connections: "tunnelConnections",
|
|
||||||
enable_file_manager: "enableFileManager",
|
|
||||||
default_path: "defaultPath",
|
|
||||||
created_at: "createdAt",
|
|
||||||
updated_at: "updatedAt",
|
|
||||||
key_content: "keyContent",
|
|
||||||
};
|
|
||||||
|
|
||||||
// Convert database column names to TypeScript field names
|
|
||||||
for (const [dbColumn, value] of Object.entries(record)) {
|
|
||||||
const tsField = columnToFieldMappings[dbColumn] || dbColumn;
|
|
||||||
mappedRecord[tsField] = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Assign imported SSH records to current admin user to avoid foreign key constraint
|
|
||||||
if (tableName === "ssh_data" && mappedRecord.userId) {
|
|
||||||
const originalUserId = mappedRecord.userId;
|
|
||||||
mappedRecord.userId = currentAdminUserId;
|
|
||||||
databaseLogger.debug(
|
|
||||||
`Reassigned SSH record from user ${originalUserId} to admin ${currentAdminUserId}`,
|
|
||||||
{
|
|
||||||
operation: "user_reassignment",
|
|
||||||
originalUserId,
|
|
||||||
newUserId: currentAdminUserId,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Re-encrypt sensitive fields for current hardware
|
|
||||||
const processedRecord = DatabaseEncryption.encryptRecord(
|
|
||||||
tableName,
|
|
||||||
mappedRecord,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Insert record using Drizzle
|
|
||||||
try {
|
|
||||||
if (tableName === "ssh_data") {
|
|
||||||
await db
|
|
||||||
.insert(sshData)
|
|
||||||
.values(processedRecord)
|
|
||||||
.onConflictDoNothing();
|
|
||||||
} else if (tableName === "ssh_credentials") {
|
|
||||||
await db
|
|
||||||
.insert(sshCredentials)
|
|
||||||
.values(processedRecord)
|
|
||||||
.onConflictDoNothing();
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
// Handle any SQL errors gracefully
|
|
||||||
if (
|
|
||||||
error instanceof Error &&
|
|
||||||
error.message.includes("UNIQUE constraint failed")
|
|
||||||
) {
|
|
||||||
databaseLogger.debug(
|
|
||||||
`Skipping duplicate record in ${tableName}`,
|
|
||||||
{
|
|
||||||
operation: "duplicate_record_skip",
|
|
||||||
table: tableName,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
const errorMsg = `Failed to import record in ${tableName}: ${error instanceof Error ? error.message : "Unknown error"}`;
|
|
||||||
result.errors.push(errorMsg);
|
|
||||||
databaseLogger.error("Failed to import record", error, {
|
|
||||||
operation: "record_sqlite_import_failed",
|
|
||||||
table: tableName,
|
|
||||||
recordId: (record as any).id,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result.imported.tables++;
|
|
||||||
result.imported.records += records.length;
|
|
||||||
|
|
||||||
databaseLogger.debug(`SQLite table ${tableName} imported`, {
|
|
||||||
operation: "table_sqlite_import_complete",
|
|
||||||
table: tableName,
|
|
||||||
recordCount: records.length,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
const errorMsg = `Failed to import table ${tableName}: ${error instanceof Error ? error.message : "Unknown error"}`;
|
|
||||||
result.errors.push(errorMsg);
|
|
||||||
databaseLogger.error("Failed to import SQLite table", error, {
|
|
||||||
operation: "table_sqlite_import_failed",
|
|
||||||
table: tableName,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Close import database
|
|
||||||
importDb.close();
|
|
||||||
|
|
||||||
// Check if import was successful
|
|
||||||
result.success = result.errors.length === 0;
|
|
||||||
|
|
||||||
if (result.success) {
|
|
||||||
databaseLogger.success(
|
|
||||||
"SQLite database import completed successfully",
|
|
||||||
{
|
|
||||||
operation: "database_sqlite_import_complete",
|
|
||||||
importPath,
|
|
||||||
tablesImported: result.imported.tables,
|
|
||||||
recordsImported: result.imported.records,
|
|
||||||
warnings: result.warnings.length,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
databaseLogger.error(
|
|
||||||
"SQLite database import completed with errors",
|
|
||||||
undefined,
|
|
||||||
{
|
|
||||||
operation: "database_sqlite_import_partial",
|
|
||||||
importPath,
|
|
||||||
tablesImported: result.imported.tables,
|
|
||||||
recordsImported: result.imported.records,
|
|
||||||
errorCount: result.errors.length,
|
|
||||||
warningCount: result.warnings.length,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("SQLite database import failed", error, {
|
|
||||||
operation: "database_sqlite_import_failed",
|
|
||||||
importPath,
|
|
||||||
});
|
|
||||||
throw new Error(
|
|
||||||
`SQLite database import failed: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate SQLite export file
|
|
||||||
*/
|
|
||||||
static validateExportFile(exportPath: string): {
|
|
||||||
valid: boolean;
|
|
||||||
metadata?: ExportMetadata;
|
|
||||||
errors: string[];
|
|
||||||
} {
|
|
||||||
const result = {
|
|
||||||
valid: false,
|
|
||||||
metadata: undefined as ExportMetadata | undefined,
|
|
||||||
errors: [] as string[],
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (!fs.existsSync(exportPath)) {
|
|
||||||
result.errors.push("Export file does not exist");
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!exportPath.endsWith(this.EXPORT_FILE_EXTENSION)) {
|
|
||||||
result.errors.push("Invalid export file extension");
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
const exportDb = new Database(exportPath, { readonly: true });
|
|
||||||
|
|
||||||
try {
|
|
||||||
const metadataResult = exportDb
|
|
||||||
.prepare(
|
|
||||||
`
|
|
||||||
SELECT value FROM ${this.METADATA_TABLE} WHERE key = 'metadata'
|
|
||||||
`,
|
|
||||||
)
|
|
||||||
.get() as { value: string } | undefined;
|
|
||||||
|
|
||||||
if (!metadataResult) {
|
|
||||||
result.errors.push("Missing export metadata");
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
const metadata: ExportMetadata = JSON.parse(metadataResult.value);
|
|
||||||
|
|
||||||
if (metadata.version !== this.VERSION) {
|
|
||||||
result.errors.push(`Unsupported export version: ${metadata.version}`);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
result.valid = true;
|
|
||||||
result.metadata = metadata;
|
|
||||||
} finally {
|
|
||||||
exportDb.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
} catch (error) {
|
|
||||||
result.errors.push(
|
|
||||||
`Failed to validate export file: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
||||||
);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get export file info without importing
|
|
||||||
*/
|
|
||||||
static getExportInfo(exportPath: string): ExportMetadata | null {
|
|
||||||
const validation = this.validateExportFile(exportPath);
|
|
||||||
return validation.valid ? validation.metadata! : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create backup of current database
|
|
||||||
*/
|
|
||||||
private static async createCurrentDatabaseBackup(): Promise<string> {
|
|
||||||
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
|
|
||||||
const backupDir = path.join(databasePaths.directory, "backups");
|
|
||||||
|
|
||||||
if (!fs.existsSync(backupDir)) {
|
|
||||||
fs.mkdirSync(backupDir, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create SQLite backup
|
|
||||||
const backupPath = path.join(
|
|
||||||
backupDir,
|
|
||||||
`database-backup-${timestamp}.sqlite`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Copy current database file
|
|
||||||
fs.copyFileSync(databasePaths.main, backupPath);
|
|
||||||
|
|
||||||
return backupPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get table schema for database operations
|
|
||||||
* NOTE: This method is deprecated - we now use raw SQL to avoid FK issues
|
|
||||||
*/
|
|
||||||
private static getTableSchema(tableName: string) {
|
|
||||||
return null; // No longer used
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if a field should be tracked as encrypted
|
|
||||||
*/
|
|
||||||
private static shouldTrackEncryptedField(
|
|
||||||
tableName: string,
|
|
||||||
fieldName: string,
|
|
||||||
): boolean {
|
|
||||||
try {
|
|
||||||
return FieldEncryption.shouldEncryptField(tableName, fieldName);
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { DatabaseSQLiteExport };
|
|
||||||
export type { ExportMetadata, ImportResult };
|
|
||||||
@@ -1,242 +0,0 @@
|
|||||||
import { db } from "../database/db/index.js";
|
|
||||||
import { DatabaseEncryption } from "./database-encryption.js";
|
|
||||||
import { databaseLogger } from "./logger.js";
|
|
||||||
import type { SQLiteTable } from "drizzle-orm/sqlite-core";
|
|
||||||
|
|
||||||
type TableName = "users" | "ssh_data" | "ssh_credentials";
|
|
||||||
|
|
||||||
class EncryptedDBOperations {
|
|
||||||
static async insert<T extends Record<string, any>>(
|
|
||||||
table: SQLiteTable<any>,
|
|
||||||
tableName: TableName,
|
|
||||||
data: T,
|
|
||||||
): Promise<T> {
|
|
||||||
try {
|
|
||||||
const encryptedData = DatabaseEncryption.encryptRecord(tableName, data);
|
|
||||||
const result = await db.insert(table).values(encryptedData).returning();
|
|
||||||
|
|
||||||
// Decrypt the returned data to ensure consistency
|
|
||||||
const decryptedResult = DatabaseEncryption.decryptRecord(
|
|
||||||
tableName,
|
|
||||||
result[0],
|
|
||||||
);
|
|
||||||
|
|
||||||
databaseLogger.debug(`Inserted encrypted record into ${tableName}`, {
|
|
||||||
operation: "encrypted_insert",
|
|
||||||
table: tableName,
|
|
||||||
});
|
|
||||||
|
|
||||||
return decryptedResult as T;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(
|
|
||||||
`Failed to insert encrypted record into ${tableName}`,
|
|
||||||
error,
|
|
||||||
{
|
|
||||||
operation: "encrypted_insert_failed",
|
|
||||||
table: tableName,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static async select<T extends Record<string, any>>(
|
|
||||||
query: any,
|
|
||||||
tableName: TableName,
|
|
||||||
): Promise<T[]> {
|
|
||||||
try {
|
|
||||||
const results = await query;
|
|
||||||
const decryptedResults = DatabaseEncryption.decryptRecords(
|
|
||||||
tableName,
|
|
||||||
results,
|
|
||||||
);
|
|
||||||
|
|
||||||
return decryptedResults;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(
|
|
||||||
`Failed to select/decrypt records from ${tableName}`,
|
|
||||||
error,
|
|
||||||
{
|
|
||||||
operation: "encrypted_select_failed",
|
|
||||||
table: tableName,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static async selectOne<T extends Record<string, any>>(
|
|
||||||
query: any,
|
|
||||||
tableName: TableName,
|
|
||||||
): Promise<T | undefined> {
|
|
||||||
try {
|
|
||||||
const result = await query;
|
|
||||||
if (!result) return undefined;
|
|
||||||
|
|
||||||
const decryptedResult = DatabaseEncryption.decryptRecord(
|
|
||||||
tableName,
|
|
||||||
result,
|
|
||||||
);
|
|
||||||
|
|
||||||
return decryptedResult;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(
|
|
||||||
`Failed to select/decrypt single record from ${tableName}`,
|
|
||||||
error,
|
|
||||||
{
|
|
||||||
operation: "encrypted_select_one_failed",
|
|
||||||
table: tableName,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static async update<T extends Record<string, any>>(
|
|
||||||
table: SQLiteTable<any>,
|
|
||||||
tableName: TableName,
|
|
||||||
where: any,
|
|
||||||
data: Partial<T>,
|
|
||||||
): Promise<T[]> {
|
|
||||||
try {
|
|
||||||
const encryptedData = DatabaseEncryption.encryptRecord(tableName, data);
|
|
||||||
const result = await db
|
|
||||||
.update(table)
|
|
||||||
.set(encryptedData)
|
|
||||||
.where(where)
|
|
||||||
.returning();
|
|
||||||
|
|
||||||
databaseLogger.debug(`Updated encrypted record in ${tableName}`, {
|
|
||||||
operation: "encrypted_update",
|
|
||||||
table: tableName,
|
|
||||||
});
|
|
||||||
|
|
||||||
return result as T[];
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(
|
|
||||||
`Failed to update encrypted record in ${tableName}`,
|
|
||||||
error,
|
|
||||||
{
|
|
||||||
operation: "encrypted_update_failed",
|
|
||||||
table: tableName,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static async delete(
|
|
||||||
table: SQLiteTable<any>,
|
|
||||||
tableName: TableName,
|
|
||||||
where: any,
|
|
||||||
): Promise<any[]> {
|
|
||||||
try {
|
|
||||||
const result = await db.delete(table).where(where).returning();
|
|
||||||
|
|
||||||
databaseLogger.debug(`Deleted record from ${tableName}`, {
|
|
||||||
operation: "encrypted_delete",
|
|
||||||
table: tableName,
|
|
||||||
});
|
|
||||||
|
|
||||||
return result;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(`Failed to delete record from ${tableName}`, error, {
|
|
||||||
operation: "encrypted_delete_failed",
|
|
||||||
table: tableName,
|
|
||||||
});
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static async migrateExistingRecords(tableName: TableName): Promise<number> {
|
|
||||||
let migratedCount = 0;
|
|
||||||
|
|
||||||
try {
|
|
||||||
databaseLogger.info(`Starting encryption migration for ${tableName}`, {
|
|
||||||
operation: "migration_start",
|
|
||||||
table: tableName,
|
|
||||||
});
|
|
||||||
|
|
||||||
let table: SQLiteTable<any>;
|
|
||||||
let records: any[];
|
|
||||||
|
|
||||||
switch (tableName) {
|
|
||||||
case "users":
|
|
||||||
const { users } = await import("../database/db/schema.js");
|
|
||||||
table = users;
|
|
||||||
records = await db.select().from(users);
|
|
||||||
break;
|
|
||||||
case "ssh_data":
|
|
||||||
const { sshData } = await import("../database/db/schema.js");
|
|
||||||
table = sshData;
|
|
||||||
records = await db.select().from(sshData);
|
|
||||||
break;
|
|
||||||
case "ssh_credentials":
|
|
||||||
const { sshCredentials } = await import("../database/db/schema.js");
|
|
||||||
table = sshCredentials;
|
|
||||||
records = await db.select().from(sshCredentials);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown table: ${tableName}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const record of records) {
|
|
||||||
try {
|
|
||||||
const migratedRecord = await DatabaseEncryption.migrateRecord(
|
|
||||||
tableName,
|
|
||||||
record,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (JSON.stringify(migratedRecord) !== JSON.stringify(record)) {
|
|
||||||
const { eq } = await import("drizzle-orm");
|
|
||||||
await db
|
|
||||||
.update(table)
|
|
||||||
.set(migratedRecord)
|
|
||||||
.where(eq((table as any).id, record.id));
|
|
||||||
migratedCount++;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(
|
|
||||||
`Failed to migrate record ${record.id} in ${tableName}`,
|
|
||||||
error,
|
|
||||||
{
|
|
||||||
operation: "migration_record_failed",
|
|
||||||
table: tableName,
|
|
||||||
recordId: record.id,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
databaseLogger.success(`Migration completed for ${tableName}`, {
|
|
||||||
operation: "migration_complete",
|
|
||||||
table: tableName,
|
|
||||||
migratedCount,
|
|
||||||
totalRecords: records.length,
|
|
||||||
});
|
|
||||||
|
|
||||||
return migratedCount;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(`Migration failed for ${tableName}`, error, {
|
|
||||||
operation: "migration_failed",
|
|
||||||
table: tableName,
|
|
||||||
});
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static async healthCheck(): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
const status = DatabaseEncryption.getEncryptionStatus();
|
|
||||||
return status.configValid && status.enabled;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Encryption health check failed", error, {
|
|
||||||
operation: "health_check_failed",
|
|
||||||
});
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { EncryptedDBOperations };
|
|
||||||
export type { TableName };
|
|
||||||
@@ -1,353 +0,0 @@
|
|||||||
import crypto from "crypto";
|
|
||||||
import { db } from "../database/db/index.js";
|
|
||||||
import { settings } from "../database/db/schema.js";
|
|
||||||
import { eq } from "drizzle-orm";
|
|
||||||
import { databaseLogger } from "./logger.js";
|
|
||||||
import { MasterKeyProtection } from "./master-key-protection.js";
|
|
||||||
|
|
||||||
interface EncryptionKeyInfo {
|
|
||||||
hasKey: boolean;
|
|
||||||
keyId?: string;
|
|
||||||
createdAt?: string;
|
|
||||||
algorithm: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
class EncryptionKeyManager {
|
|
||||||
private static instance: EncryptionKeyManager;
|
|
||||||
private currentKey: string | null = null;
|
|
||||||
private keyInfo: EncryptionKeyInfo | null = null;
|
|
||||||
|
|
||||||
private constructor() {}
|
|
||||||
|
|
||||||
static getInstance(): EncryptionKeyManager {
|
|
||||||
if (!this.instance) {
|
|
||||||
this.instance = new EncryptionKeyManager();
|
|
||||||
}
|
|
||||||
return this.instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
private encodeKey(key: string): string {
|
|
||||||
return MasterKeyProtection.encryptMasterKey(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private decodeKey(encodedKey: string): string {
|
|
||||||
if (MasterKeyProtection.isProtectedKey(encodedKey)) {
|
|
||||||
return MasterKeyProtection.decryptMasterKey(encodedKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
databaseLogger.warn(
|
|
||||||
"Found legacy base64-encoded key, migrating to KEK protection",
|
|
||||||
{
|
|
||||||
operation: "key_migration_legacy",
|
|
||||||
},
|
|
||||||
);
|
|
||||||
const buffer = Buffer.from(encodedKey, "base64");
|
|
||||||
return buffer.toString("hex");
|
|
||||||
}
|
|
||||||
|
|
||||||
async initializeKey(): Promise<string> {
|
|
||||||
try {
|
|
||||||
let existingKey = await this.getStoredKey();
|
|
||||||
|
|
||||||
if (existingKey) {
|
|
||||||
databaseLogger.success("Found existing encryption key", {
|
|
||||||
operation: "key_init",
|
|
||||||
hasKey: true,
|
|
||||||
});
|
|
||||||
this.currentKey = existingKey;
|
|
||||||
return existingKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
const environmentKey = process.env.DB_ENCRYPTION_KEY;
|
|
||||||
if (environmentKey && environmentKey !== "default-key-change-me") {
|
|
||||||
if (!this.validateKeyStrength(environmentKey)) {
|
|
||||||
databaseLogger.error(
|
|
||||||
"Environment encryption key is too weak",
|
|
||||||
undefined,
|
|
||||||
{
|
|
||||||
operation: "key_init",
|
|
||||||
source: "environment",
|
|
||||||
keyLength: environmentKey.length,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
throw new Error(
|
|
||||||
"DB_ENCRYPTION_KEY is too weak. Must be at least 32 characters with good entropy.",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
databaseLogger.info("Using encryption key from environment variable", {
|
|
||||||
operation: "key_init",
|
|
||||||
source: "environment",
|
|
||||||
});
|
|
||||||
|
|
||||||
await this.storeKey(environmentKey);
|
|
||||||
this.currentKey = environmentKey;
|
|
||||||
return environmentKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
const newKey = await this.generateNewKey();
|
|
||||||
databaseLogger.warn(
|
|
||||||
"Generated new encryption key - PLEASE BACKUP THIS KEY",
|
|
||||||
{
|
|
||||||
operation: "key_init",
|
|
||||||
generated: true,
|
|
||||||
keyPreview: newKey.substring(0, 8) + "...",
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
return newKey;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Failed to initialize encryption key", error, {
|
|
||||||
operation: "key_init_failed",
|
|
||||||
});
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async generateNewKey(): Promise<string> {
|
|
||||||
const newKey = crypto.randomBytes(32).toString("hex");
|
|
||||||
const keyId = crypto.randomBytes(8).toString("hex");
|
|
||||||
|
|
||||||
await this.storeKey(newKey, keyId);
|
|
||||||
this.currentKey = newKey;
|
|
||||||
|
|
||||||
databaseLogger.success("Generated new encryption key", {
|
|
||||||
operation: "key_generated",
|
|
||||||
keyId,
|
|
||||||
keyLength: newKey.length,
|
|
||||||
});
|
|
||||||
|
|
||||||
return newKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async storeKey(key: string, keyId?: string): Promise<void> {
|
|
||||||
const now = new Date().toISOString();
|
|
||||||
const id = keyId || crypto.randomBytes(8).toString("hex");
|
|
||||||
|
|
||||||
const keyData = {
|
|
||||||
key: this.encodeKey(key),
|
|
||||||
keyId: id,
|
|
||||||
createdAt: now,
|
|
||||||
algorithm: "aes-256-gcm",
|
|
||||||
};
|
|
||||||
|
|
||||||
const encodedData = JSON.stringify(keyData);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const existing = await db
|
|
||||||
.select()
|
|
||||||
.from(settings)
|
|
||||||
.where(eq(settings.key, "db_encryption_key"));
|
|
||||||
|
|
||||||
if (existing.length > 0) {
|
|
||||||
await db
|
|
||||||
.update(settings)
|
|
||||||
.set({ value: encodedData })
|
|
||||||
.where(eq(settings.key, "db_encryption_key"));
|
|
||||||
} else {
|
|
||||||
await db.insert(settings).values({
|
|
||||||
key: "db_encryption_key",
|
|
||||||
value: encodedData,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const existingCreated = await db
|
|
||||||
.select()
|
|
||||||
.from(settings)
|
|
||||||
.where(eq(settings.key, "encryption_key_created"));
|
|
||||||
|
|
||||||
if (existingCreated.length > 0) {
|
|
||||||
await db
|
|
||||||
.update(settings)
|
|
||||||
.set({ value: now })
|
|
||||||
.where(eq(settings.key, "encryption_key_created"));
|
|
||||||
} else {
|
|
||||||
await db.insert(settings).values({
|
|
||||||
key: "encryption_key_created",
|
|
||||||
value: now,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
this.keyInfo = {
|
|
||||||
hasKey: true,
|
|
||||||
keyId: id,
|
|
||||||
createdAt: now,
|
|
||||||
algorithm: "aes-256-gcm",
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Failed to store encryption key", error, {
|
|
||||||
operation: "key_store_failed",
|
|
||||||
});
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async getStoredKey(): Promise<string | null> {
|
|
||||||
try {
|
|
||||||
const result = await db
|
|
||||||
.select()
|
|
||||||
.from(settings)
|
|
||||||
.where(eq(settings.key, "db_encryption_key"));
|
|
||||||
|
|
||||||
if (result.length === 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const encodedData = result[0].value;
|
|
||||||
let keyData;
|
|
||||||
|
|
||||||
try {
|
|
||||||
keyData = JSON.parse(encodedData);
|
|
||||||
} catch {
|
|
||||||
databaseLogger.warn("Found legacy base64-encoded key data, migrating", {
|
|
||||||
operation: "key_data_migration_legacy",
|
|
||||||
});
|
|
||||||
keyData = JSON.parse(Buffer.from(encodedData, "base64").toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
this.keyInfo = {
|
|
||||||
hasKey: true,
|
|
||||||
keyId: keyData.keyId,
|
|
||||||
createdAt: keyData.createdAt,
|
|
||||||
algorithm: keyData.algorithm,
|
|
||||||
};
|
|
||||||
|
|
||||||
const decodedKey = this.decodeKey(keyData.key);
|
|
||||||
|
|
||||||
if (!MasterKeyProtection.isProtectedKey(keyData.key)) {
|
|
||||||
databaseLogger.info("Auto-migrating legacy key to KEK protection", {
|
|
||||||
operation: "key_auto_migration",
|
|
||||||
keyId: keyData.keyId,
|
|
||||||
});
|
|
||||||
await this.storeKey(decodedKey, keyData.keyId);
|
|
||||||
}
|
|
||||||
|
|
||||||
return decodedKey;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Failed to retrieve stored encryption key", error, {
|
|
||||||
operation: "key_retrieve_failed",
|
|
||||||
});
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
getCurrentKey(): string | null {
|
|
||||||
return this.currentKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
async getKeyInfo(): Promise<EncryptionKeyInfo> {
|
|
||||||
if (!this.keyInfo) {
|
|
||||||
const hasKey = (await this.getStoredKey()) !== null;
|
|
||||||
return {
|
|
||||||
hasKey,
|
|
||||||
algorithm: "aes-256-gcm",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return this.keyInfo;
|
|
||||||
}
|
|
||||||
|
|
||||||
async regenerateKey(): Promise<string> {
|
|
||||||
databaseLogger.info("Regenerating encryption key", {
|
|
||||||
operation: "key_regenerate",
|
|
||||||
});
|
|
||||||
|
|
||||||
const oldKeyInfo = await this.getKeyInfo();
|
|
||||||
const newKey = await this.generateNewKey();
|
|
||||||
|
|
||||||
databaseLogger.warn(
|
|
||||||
"Encryption key regenerated - ALL DATA MUST BE RE-ENCRYPTED",
|
|
||||||
{
|
|
||||||
operation: "key_regenerated",
|
|
||||||
oldKeyId: oldKeyInfo.keyId,
|
|
||||||
newKeyId: this.keyInfo?.keyId,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
return newKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
private validateKeyStrength(key: string): boolean {
|
|
||||||
if (key.length < 32) return false;
|
|
||||||
|
|
||||||
const hasLower = /[a-z]/.test(key);
|
|
||||||
const hasUpper = /[A-Z]/.test(key);
|
|
||||||
const hasDigit = /\d/.test(key);
|
|
||||||
const hasSpecial = /[!@#$%^&*()_+\-=\[\]{};':"\\|,.<>\/?]/.test(key);
|
|
||||||
|
|
||||||
const entropyTest = new Set(key).size / key.length;
|
|
||||||
|
|
||||||
const complexity =
|
|
||||||
Number(hasLower) +
|
|
||||||
Number(hasUpper) +
|
|
||||||
Number(hasDigit) +
|
|
||||||
Number(hasSpecial);
|
|
||||||
return complexity >= 3 && entropyTest > 0.4;
|
|
||||||
}
|
|
||||||
|
|
||||||
async validateKey(key?: string): Promise<boolean> {
|
|
||||||
const testKey = key || this.currentKey;
|
|
||||||
if (!testKey) return false;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const testData = "validation-test-" + Date.now();
|
|
||||||
const testBuffer = Buffer.from(testKey, "hex");
|
|
||||||
|
|
||||||
if (testBuffer.length !== 32) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const iv = crypto.randomBytes(16);
|
|
||||||
const cipher = crypto.createCipheriv(
|
|
||||||
"aes-256-gcm",
|
|
||||||
testBuffer,
|
|
||||||
iv,
|
|
||||||
) as any;
|
|
||||||
cipher.update(testData, "utf8");
|
|
||||||
cipher.final();
|
|
||||||
cipher.getAuthTag();
|
|
||||||
|
|
||||||
return true;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
isInitialized(): boolean {
|
|
||||||
return this.currentKey !== null;
|
|
||||||
}
|
|
||||||
|
|
||||||
async getEncryptionStatus() {
|
|
||||||
const keyInfo = await this.getKeyInfo();
|
|
||||||
const isValid = await this.validateKey();
|
|
||||||
const kekProtected = await this.isKEKProtected();
|
|
||||||
|
|
||||||
return {
|
|
||||||
hasKey: keyInfo.hasKey,
|
|
||||||
keyValid: isValid,
|
|
||||||
keyId: keyInfo.keyId,
|
|
||||||
createdAt: keyInfo.createdAt,
|
|
||||||
algorithm: keyInfo.algorithm,
|
|
||||||
initialized: this.isInitialized(),
|
|
||||||
kekProtected,
|
|
||||||
kekValid: kekProtected ? MasterKeyProtection.validateProtection() : false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
private async isKEKProtected(): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
const result = await db
|
|
||||||
.select()
|
|
||||||
.from(settings)
|
|
||||||
.where(eq(settings.key, "db_encryption_key"));
|
|
||||||
if (result.length === 0) return false;
|
|
||||||
|
|
||||||
const keyData = JSON.parse(result[0].value);
|
|
||||||
return MasterKeyProtection.isProtectedKey(keyData.key);
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { EncryptionKeyManager };
|
|
||||||
export type { EncryptionKeyInfo };
|
|
||||||
@@ -1,435 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
import { DatabaseEncryption } from "./database-encryption.js";
|
|
||||||
import { EncryptedDBOperations } from "./encrypted-db-operations.js";
|
|
||||||
import { EncryptionKeyManager } from "./encryption-key-manager.js";
|
|
||||||
import { databaseLogger } from "./logger.js";
|
|
||||||
import { db } from "../database/db/index.js";
|
|
||||||
import { settings } from "../database/db/schema.js";
|
|
||||||
import { eq, sql } from "drizzle-orm";
|
|
||||||
|
|
||||||
interface MigrationConfig {
|
|
||||||
masterPassword?: string;
|
|
||||||
forceEncryption?: boolean;
|
|
||||||
backupEnabled?: boolean;
|
|
||||||
dryRun?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
class EncryptionMigration {
|
|
||||||
private config: MigrationConfig;
|
|
||||||
|
|
||||||
constructor(config: MigrationConfig = {}) {
|
|
||||||
this.config = {
|
|
||||||
masterPassword: config.masterPassword,
|
|
||||||
forceEncryption: config.forceEncryption ?? false,
|
|
||||||
backupEnabled: config.backupEnabled ?? true,
|
|
||||||
dryRun: config.dryRun ?? false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async runMigration(): Promise<void> {
|
|
||||||
databaseLogger.info("Starting database encryption migration", {
|
|
||||||
operation: "migration_start",
|
|
||||||
dryRun: this.config.dryRun,
|
|
||||||
forceEncryption: this.config.forceEncryption,
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
await this.validatePrerequisites();
|
|
||||||
|
|
||||||
if (this.config.backupEnabled && !this.config.dryRun) {
|
|
||||||
await this.createBackup();
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.initializeEncryption();
|
|
||||||
await this.migrateTables();
|
|
||||||
await this.updateSettings();
|
|
||||||
await this.verifyMigration();
|
|
||||||
|
|
||||||
databaseLogger.success(
|
|
||||||
"Database encryption migration completed successfully",
|
|
||||||
{
|
|
||||||
operation: "migration_complete",
|
|
||||||
},
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Migration failed", error, {
|
|
||||||
operation: "migration_failed",
|
|
||||||
});
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async validatePrerequisites(): Promise<void> {
|
|
||||||
databaseLogger.info("Validating migration prerequisites", {
|
|
||||||
operation: "validation",
|
|
||||||
});
|
|
||||||
|
|
||||||
// Check if KEK-managed encryption key exists
|
|
||||||
const keyManager = EncryptionKeyManager.getInstance();
|
|
||||||
|
|
||||||
if (!this.config.masterPassword) {
|
|
||||||
// Try to get current key from KEK manager
|
|
||||||
try {
|
|
||||||
const currentKey = keyManager.getCurrentKey();
|
|
||||||
if (!currentKey) {
|
|
||||||
// Initialize key if not available
|
|
||||||
const initializedKey = await keyManager.initializeKey();
|
|
||||||
this.config.masterPassword = initializedKey;
|
|
||||||
} else {
|
|
||||||
this.config.masterPassword = currentKey;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(
|
|
||||||
"Failed to retrieve encryption key from KEK manager. Please ensure encryption is properly initialized.",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate key strength
|
|
||||||
if (this.config.masterPassword.length < 16) {
|
|
||||||
throw new Error("Master password must be at least 16 characters long");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test database connection
|
|
||||||
try {
|
|
||||||
await db.select().from(settings).limit(1);
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error("Database connection failed");
|
|
||||||
}
|
|
||||||
|
|
||||||
databaseLogger.success("Prerequisites validation passed", {
|
|
||||||
operation: "validation_complete",
|
|
||||||
keySource: "kek_manager",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private async createBackup(): Promise<void> {
|
|
||||||
databaseLogger.info("Creating database backup before migration", {
|
|
||||||
operation: "backup_start",
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
const fs = await import("fs");
|
|
||||||
const path = await import("path");
|
|
||||||
const dataDir = process.env.DATA_DIR || "./db/data";
|
|
||||||
const dbPath = path.join(dataDir, "db.sqlite");
|
|
||||||
const backupPath = path.join(dataDir, `db-backup-${Date.now()}.sqlite`);
|
|
||||||
|
|
||||||
if (fs.existsSync(dbPath)) {
|
|
||||||
fs.copyFileSync(dbPath, backupPath);
|
|
||||||
databaseLogger.success(`Database backup created: ${backupPath}`, {
|
|
||||||
operation: "backup_complete",
|
|
||||||
backupPath,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Failed to create backup", error, {
|
|
||||||
operation: "backup_failed",
|
|
||||||
});
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async initializeEncryption(): Promise<void> {
|
|
||||||
databaseLogger.info("Initializing encryption system", {
|
|
||||||
operation: "encryption_init",
|
|
||||||
});
|
|
||||||
|
|
||||||
DatabaseEncryption.initialize({
|
|
||||||
masterPassword: this.config.masterPassword!,
|
|
||||||
encryptionEnabled: true,
|
|
||||||
forceEncryption: this.config.forceEncryption,
|
|
||||||
migrateOnAccess: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
const isHealthy = await EncryptedDBOperations.healthCheck();
|
|
||||||
if (!isHealthy) {
|
|
||||||
throw new Error("Encryption system health check failed");
|
|
||||||
}
|
|
||||||
|
|
||||||
databaseLogger.success("Encryption system initialized successfully", {
|
|
||||||
operation: "encryption_init_complete",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private async migrateTables(): Promise<void> {
|
|
||||||
const tables: Array<"users" | "ssh_data" | "ssh_credentials"> = [
|
|
||||||
"users",
|
|
||||||
"ssh_data",
|
|
||||||
"ssh_credentials",
|
|
||||||
];
|
|
||||||
|
|
||||||
let totalMigrated = 0;
|
|
||||||
|
|
||||||
for (const tableName of tables) {
|
|
||||||
databaseLogger.info(`Starting migration for table: ${tableName}`, {
|
|
||||||
operation: "table_migration_start",
|
|
||||||
table: tableName,
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (this.config.dryRun) {
|
|
||||||
databaseLogger.info(`[DRY RUN] Would migrate table: ${tableName}`, {
|
|
||||||
operation: "dry_run_table",
|
|
||||||
table: tableName,
|
|
||||||
});
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const migratedCount =
|
|
||||||
await EncryptedDBOperations.migrateExistingRecords(tableName);
|
|
||||||
totalMigrated += migratedCount;
|
|
||||||
|
|
||||||
databaseLogger.success(`Migration completed for table: ${tableName}`, {
|
|
||||||
operation: "table_migration_complete",
|
|
||||||
table: tableName,
|
|
||||||
migratedCount,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error(
|
|
||||||
`Migration failed for table: ${tableName}`,
|
|
||||||
error,
|
|
||||||
{
|
|
||||||
operation: "table_migration_failed",
|
|
||||||
table: tableName,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
databaseLogger.success(`All tables migrated successfully`, {
|
|
||||||
operation: "all_tables_migrated",
|
|
||||||
totalMigrated,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private async updateSettings(): Promise<void> {
|
|
||||||
if (this.config.dryRun) {
|
|
||||||
databaseLogger.info("[DRY RUN] Would update encryption settings", {
|
|
||||||
operation: "dry_run_settings",
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const encryptionSettings = [
|
|
||||||
{ key: "encryption_enabled", value: "true" },
|
|
||||||
{
|
|
||||||
key: "encryption_migration_completed",
|
|
||||||
value: new Date().toISOString(),
|
|
||||||
},
|
|
||||||
{ key: "encryption_version", value: "1.0" },
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const setting of encryptionSettings) {
|
|
||||||
const existing = await db
|
|
||||||
.select()
|
|
||||||
.from(settings)
|
|
||||||
.where(eq(settings.key, setting.key));
|
|
||||||
|
|
||||||
if (existing.length > 0) {
|
|
||||||
await db
|
|
||||||
.update(settings)
|
|
||||||
.set({ value: setting.value })
|
|
||||||
.where(eq(settings.key, setting.key));
|
|
||||||
} else {
|
|
||||||
await db.insert(settings).values(setting);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
databaseLogger.success("Encryption settings updated", {
|
|
||||||
operation: "settings_updated",
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Failed to update settings", error, {
|
|
||||||
operation: "settings_update_failed",
|
|
||||||
});
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async verifyMigration(): Promise<void> {
|
|
||||||
databaseLogger.info("Verifying migration integrity", {
|
|
||||||
operation: "verification_start",
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
const status = DatabaseEncryption.getEncryptionStatus();
|
|
||||||
|
|
||||||
if (!status.enabled || !status.configValid) {
|
|
||||||
throw new Error("Encryption system verification failed");
|
|
||||||
}
|
|
||||||
|
|
||||||
const testResult = await this.performTestEncryption();
|
|
||||||
if (!testResult) {
|
|
||||||
throw new Error("Test encryption/decryption failed");
|
|
||||||
}
|
|
||||||
|
|
||||||
databaseLogger.success("Migration verification completed successfully", {
|
|
||||||
operation: "verification_complete",
|
|
||||||
status,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Migration verification failed", error, {
|
|
||||||
operation: "verification_failed",
|
|
||||||
});
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async performTestEncryption(): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
const { FieldEncryption } = await import("./encryption.js");
|
|
||||||
const testData = `test-data-${Date.now()}`;
|
|
||||||
const testKey = FieldEncryption.getFieldKey(
|
|
||||||
this.config.masterPassword!,
|
|
||||||
"test",
|
|
||||||
);
|
|
||||||
|
|
||||||
const encrypted = FieldEncryption.encryptField(testData, testKey);
|
|
||||||
const decrypted = FieldEncryption.decryptField(encrypted, testKey);
|
|
||||||
|
|
||||||
return decrypted === testData;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static async checkMigrationStatus(): Promise<{
|
|
||||||
isEncryptionEnabled: boolean;
|
|
||||||
migrationCompleted: boolean;
|
|
||||||
migrationRequired: boolean;
|
|
||||||
migrationDate?: string;
|
|
||||||
}> {
|
|
||||||
try {
|
|
||||||
const encryptionEnabled = await db
|
|
||||||
.select()
|
|
||||||
.from(settings)
|
|
||||||
.where(eq(settings.key, "encryption_enabled"));
|
|
||||||
const migrationCompleted = await db
|
|
||||||
.select()
|
|
||||||
.from(settings)
|
|
||||||
.where(eq(settings.key, "encryption_migration_completed"));
|
|
||||||
|
|
||||||
const isEncryptionEnabled =
|
|
||||||
encryptionEnabled.length > 0 && encryptionEnabled[0].value === "true";
|
|
||||||
const isMigrationCompleted = migrationCompleted.length > 0;
|
|
||||||
|
|
||||||
// Check if migration is actually required by looking for unencrypted sensitive data
|
|
||||||
const migrationRequired = await this.checkIfMigrationRequired();
|
|
||||||
|
|
||||||
return {
|
|
||||||
isEncryptionEnabled,
|
|
||||||
migrationCompleted: isMigrationCompleted,
|
|
||||||
migrationRequired,
|
|
||||||
migrationDate: isMigrationCompleted
|
|
||||||
? migrationCompleted[0].value
|
|
||||||
: undefined,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Failed to check migration status", error, {
|
|
||||||
operation: "status_check_failed",
|
|
||||||
});
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static async checkIfMigrationRequired(): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
// Import table schemas
|
|
||||||
const { sshData, sshCredentials } = await import(
|
|
||||||
"../database/db/schema.js"
|
|
||||||
);
|
|
||||||
|
|
||||||
// Check if there's any unencrypted sensitive data in ssh_data
|
|
||||||
const sshDataCount = await db
|
|
||||||
.select({ count: sql<number>`count(*)` })
|
|
||||||
.from(sshData);
|
|
||||||
if (sshDataCount[0].count > 0) {
|
|
||||||
// Sample a few records to check if they contain unencrypted data
|
|
||||||
const sampleData = await db.select().from(sshData).limit(5);
|
|
||||||
for (const record of sampleData) {
|
|
||||||
if (record.password && !this.looksEncrypted(record.password)) {
|
|
||||||
return true; // Found unencrypted password
|
|
||||||
}
|
|
||||||
if (record.key && !this.looksEncrypted(record.key)) {
|
|
||||||
return true; // Found unencrypted key
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if there's any unencrypted sensitive data in ssh_credentials
|
|
||||||
const credentialsCount = await db
|
|
||||||
.select({ count: sql<number>`count(*)` })
|
|
||||||
.from(sshCredentials);
|
|
||||||
if (credentialsCount[0].count > 0) {
|
|
||||||
const sampleCredentials = await db
|
|
||||||
.select()
|
|
||||||
.from(sshCredentials)
|
|
||||||
.limit(5);
|
|
||||||
for (const record of sampleCredentials) {
|
|
||||||
if (record.password && !this.looksEncrypted(record.password)) {
|
|
||||||
return true; // Found unencrypted password
|
|
||||||
}
|
|
||||||
if (record.privateKey && !this.looksEncrypted(record.privateKey)) {
|
|
||||||
return true; // Found unencrypted private key
|
|
||||||
}
|
|
||||||
if (record.keyPassword && !this.looksEncrypted(record.keyPassword)) {
|
|
||||||
return true; // Found unencrypted key password
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false; // No unencrypted sensitive data found
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.warn(
|
|
||||||
"Failed to check if migration required, assuming required",
|
|
||||||
{
|
|
||||||
operation: "migration_check_failed",
|
|
||||||
error: error instanceof Error ? error.message : "Unknown error",
|
|
||||||
},
|
|
||||||
);
|
|
||||||
return true; // If we can't check, assume migration is required for safety
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static looksEncrypted(data: string): boolean {
|
|
||||||
if (!data) return true; // Empty data doesn't need encryption
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Check if it looks like our encrypted format: {"data":"...","iv":"...","tag":"..."}
|
|
||||||
const parsed = JSON.parse(data);
|
|
||||||
return !!(parsed.data && parsed.iv && parsed.tag);
|
|
||||||
} catch {
|
|
||||||
// If it's not JSON, check if it's a reasonable length for encrypted data
|
|
||||||
// Encrypted data is typically much longer than plaintext
|
|
||||||
return data.length > 100 && data.includes("="); // Base64-like characteristics
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
|
||||||
const config: MigrationConfig = {
|
|
||||||
masterPassword: process.env.DB_ENCRYPTION_KEY,
|
|
||||||
forceEncryption: process.env.FORCE_ENCRYPTION === "true",
|
|
||||||
backupEnabled: process.env.BACKUP_ENABLED !== "false",
|
|
||||||
dryRun: process.env.DRY_RUN === "true",
|
|
||||||
};
|
|
||||||
|
|
||||||
const migration = new EncryptionMigration(config);
|
|
||||||
|
|
||||||
migration
|
|
||||||
.runMigration()
|
|
||||||
.then(() => {
|
|
||||||
console.log("Migration completed successfully");
|
|
||||||
process.exit(0);
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
console.error("Migration failed:", error.message);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export { EncryptionMigration };
|
|
||||||
export type { MigrationConfig };
|
|
||||||
@@ -1,341 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
import { FieldEncryption } from "./encryption.js";
|
|
||||||
import { DatabaseEncryption } from "./database-encryption.js";
|
|
||||||
import { EncryptedDBOperations } from "./encrypted-db-operations.js";
|
|
||||||
import { databaseLogger } from "./logger.js";
|
|
||||||
|
|
||||||
class EncryptionTest {
|
|
||||||
private testPassword = "test-master-password-for-validation";
|
|
||||||
|
|
||||||
async runAllTests(): Promise<boolean> {
|
|
||||||
console.log("🔐 Starting Termix Database Encryption Tests...\n");
|
|
||||||
|
|
||||||
const tests = [
|
|
||||||
{
|
|
||||||
name: "Basic Encryption/Decryption",
|
|
||||||
test: () => this.testBasicEncryption(),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Field Encryption Detection",
|
|
||||||
test: () => this.testFieldDetection(),
|
|
||||||
},
|
|
||||||
{ name: "Key Derivation", test: () => this.testKeyDerivation() },
|
|
||||||
{
|
|
||||||
name: "Database Encryption Context",
|
|
||||||
test: () => this.testDatabaseContext(),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Record Encryption/Decryption",
|
|
||||||
test: () => this.testRecordOperations(),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Backward Compatibility",
|
|
||||||
test: () => this.testBackwardCompatibility(),
|
|
||||||
},
|
|
||||||
{ name: "Error Handling", test: () => this.testErrorHandling() },
|
|
||||||
{ name: "Performance Test", test: () => this.testPerformance() },
|
|
||||||
];
|
|
||||||
|
|
||||||
let passedTests = 0;
|
|
||||||
let totalTests = tests.length;
|
|
||||||
|
|
||||||
for (const test of tests) {
|
|
||||||
try {
|
|
||||||
console.log(`⏳ Running: ${test.name}...`);
|
|
||||||
await test.test();
|
|
||||||
console.log(`✅ PASSED: ${test.name}\n`);
|
|
||||||
passedTests++;
|
|
||||||
} catch (error) {
|
|
||||||
console.log(`❌ FAILED: ${test.name}`);
|
|
||||||
console.log(
|
|
||||||
` Error: ${error instanceof Error ? error.message : "Unknown error"}\n`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const success = passedTests === totalTests;
|
|
||||||
console.log(`\n🎯 Test Results: ${passedTests}/${totalTests} tests passed`);
|
|
||||||
|
|
||||||
if (success) {
|
|
||||||
console.log(
|
|
||||||
"🎉 All encryption tests PASSED! System is ready for production.",
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
console.log("⚠️ Some tests FAILED! Please review the implementation.");
|
|
||||||
}
|
|
||||||
|
|
||||||
return success;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async testBasicEncryption(): Promise<void> {
|
|
||||||
const testData = "Hello, World! This is sensitive data.";
|
|
||||||
const key = FieldEncryption.getFieldKey(this.testPassword, "test-field");
|
|
||||||
|
|
||||||
const encrypted = FieldEncryption.encryptField(testData, key);
|
|
||||||
const decrypted = FieldEncryption.decryptField(encrypted, key);
|
|
||||||
|
|
||||||
if (decrypted !== testData) {
|
|
||||||
throw new Error(
|
|
||||||
`Decryption mismatch: expected "${testData}", got "${decrypted}"`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!FieldEncryption.isEncrypted(encrypted)) {
|
|
||||||
throw new Error("Encrypted data not detected as encrypted");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (FieldEncryption.isEncrypted(testData)) {
|
|
||||||
throw new Error("Plain text incorrectly detected as encrypted");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async testFieldDetection(): Promise<void> {
|
|
||||||
const testCases = [
|
|
||||||
{ table: "users", field: "password_hash", shouldEncrypt: true },
|
|
||||||
{ table: "users", field: "username", shouldEncrypt: false },
|
|
||||||
{ table: "ssh_data", field: "password", shouldEncrypt: true },
|
|
||||||
{ table: "ssh_data", field: "ip", shouldEncrypt: false },
|
|
||||||
{ table: "ssh_credentials", field: "privateKey", shouldEncrypt: true },
|
|
||||||
{ table: "unknown_table", field: "any_field", shouldEncrypt: false },
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const testCase of testCases) {
|
|
||||||
const result = FieldEncryption.shouldEncryptField(
|
|
||||||
testCase.table,
|
|
||||||
testCase.field,
|
|
||||||
);
|
|
||||||
if (result !== testCase.shouldEncrypt) {
|
|
||||||
throw new Error(
|
|
||||||
`Field detection failed for ${testCase.table}.${testCase.field}: ` +
|
|
||||||
`expected ${testCase.shouldEncrypt}, got ${result}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async testKeyDerivation(): Promise<void> {
|
|
||||||
const password = "test-password";
|
|
||||||
const fieldType1 = "users.password_hash";
|
|
||||||
const fieldType2 = "ssh_data.password";
|
|
||||||
|
|
||||||
const key1a = FieldEncryption.getFieldKey(password, fieldType1);
|
|
||||||
const key1b = FieldEncryption.getFieldKey(password, fieldType1);
|
|
||||||
const key2 = FieldEncryption.getFieldKey(password, fieldType2);
|
|
||||||
|
|
||||||
if (!key1a.equals(key1b)) {
|
|
||||||
throw new Error("Same field type should produce identical keys");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (key1a.equals(key2)) {
|
|
||||||
throw new Error("Different field types should produce different keys");
|
|
||||||
}
|
|
||||||
|
|
||||||
const differentPasswordKey = FieldEncryption.getFieldKey(
|
|
||||||
"different-password",
|
|
||||||
fieldType1,
|
|
||||||
);
|
|
||||||
if (key1a.equals(differentPasswordKey)) {
|
|
||||||
throw new Error("Different passwords should produce different keys");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async testDatabaseContext(): Promise<void> {
|
|
||||||
DatabaseEncryption.initialize({
|
|
||||||
masterPassword: this.testPassword,
|
|
||||||
encryptionEnabled: true,
|
|
||||||
forceEncryption: false,
|
|
||||||
migrateOnAccess: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
const status = DatabaseEncryption.getEncryptionStatus();
|
|
||||||
if (!status.enabled) {
|
|
||||||
throw new Error("Encryption should be enabled");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!status.configValid) {
|
|
||||||
throw new Error("Configuration should be valid");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async testRecordOperations(): Promise<void> {
|
|
||||||
const testRecord = {
|
|
||||||
id: "test-id-123",
|
|
||||||
username: "testuser",
|
|
||||||
password_hash: "sensitive-password-hash",
|
|
||||||
is_admin: false,
|
|
||||||
};
|
|
||||||
|
|
||||||
const encrypted = DatabaseEncryption.encryptRecord("users", testRecord);
|
|
||||||
const decrypted = DatabaseEncryption.decryptRecord("users", encrypted);
|
|
||||||
|
|
||||||
if (decrypted.username !== testRecord.username) {
|
|
||||||
throw new Error("Non-sensitive field should remain unchanged");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (decrypted.password_hash !== testRecord.password_hash) {
|
|
||||||
throw new Error("Sensitive field should be properly decrypted");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!FieldEncryption.isEncrypted(encrypted.password_hash)) {
|
|
||||||
throw new Error("Sensitive field should be encrypted in stored record");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async testBackwardCompatibility(): Promise<void> {
|
|
||||||
const plaintextRecord = {
|
|
||||||
id: "legacy-id-456",
|
|
||||||
username: "legacyuser",
|
|
||||||
password_hash: "plain-text-password-hash",
|
|
||||||
is_admin: false,
|
|
||||||
};
|
|
||||||
|
|
||||||
const decrypted = DatabaseEncryption.decryptRecord(
|
|
||||||
"users",
|
|
||||||
plaintextRecord,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (decrypted.password_hash !== plaintextRecord.password_hash) {
|
|
||||||
throw new Error(
|
|
||||||
"Plain text fields should be returned as-is for backward compatibility",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (decrypted.username !== plaintextRecord.username) {
|
|
||||||
throw new Error("Non-sensitive fields should be unchanged");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async testErrorHandling(): Promise<void> {
|
|
||||||
const key = FieldEncryption.getFieldKey(this.testPassword, "test");
|
|
||||||
|
|
||||||
try {
|
|
||||||
FieldEncryption.decryptField("invalid-json-data", key);
|
|
||||||
throw new Error("Should have thrown error for invalid JSON");
|
|
||||||
} catch (error) {
|
|
||||||
if (!error || !(error as Error).message.includes("decryption failed")) {
|
|
||||||
throw new Error("Should throw appropriate decryption error");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const fakeEncrypted = JSON.stringify({
|
|
||||||
data: "fake",
|
|
||||||
iv: "fake",
|
|
||||||
tag: "fake",
|
|
||||||
});
|
|
||||||
FieldEncryption.decryptField(fakeEncrypted, key);
|
|
||||||
throw new Error("Should have thrown error for invalid encrypted data");
|
|
||||||
} catch (error) {
|
|
||||||
if (!error || !(error as Error).message.includes("Decryption failed")) {
|
|
||||||
throw new Error("Should throw appropriate error for corrupted data");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async testPerformance(): Promise<void> {
|
|
||||||
const testData =
|
|
||||||
"Performance test data that is reasonably long to simulate real SSH keys and passwords.";
|
|
||||||
const key = FieldEncryption.getFieldKey(
|
|
||||||
this.testPassword,
|
|
||||||
"performance-test",
|
|
||||||
);
|
|
||||||
|
|
||||||
const iterations = 100;
|
|
||||||
const startTime = Date.now();
|
|
||||||
|
|
||||||
for (let i = 0; i < iterations; i++) {
|
|
||||||
const encrypted = FieldEncryption.encryptField(testData, key);
|
|
||||||
const decrypted = FieldEncryption.decryptField(encrypted, key);
|
|
||||||
|
|
||||||
if (decrypted !== testData) {
|
|
||||||
throw new Error(`Performance test failed at iteration ${i}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const endTime = Date.now();
|
|
||||||
const totalTime = endTime - startTime;
|
|
||||||
const avgTime = totalTime / iterations;
|
|
||||||
|
|
||||||
console.log(
|
|
||||||
` ⚡ Performance: ${iterations} encrypt/decrypt cycles in ${totalTime}ms (${avgTime.toFixed(2)}ms avg)`,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (avgTime > 50) {
|
|
||||||
console.log(
|
|
||||||
" ⚠️ Warning: Encryption operations are slower than expected",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static async validateProduction(): Promise<boolean> {
|
|
||||||
console.log("🔒 Validating production encryption setup...\n");
|
|
||||||
|
|
||||||
try {
|
|
||||||
const encryptionKey = process.env.DB_ENCRYPTION_KEY;
|
|
||||||
|
|
||||||
if (!encryptionKey) {
|
|
||||||
console.log("❌ DB_ENCRYPTION_KEY environment variable not set");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (encryptionKey === "default-key-change-me") {
|
|
||||||
console.log("❌ DB_ENCRYPTION_KEY is using default value (INSECURE)");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (encryptionKey.length < 16) {
|
|
||||||
console.log(
|
|
||||||
"❌ DB_ENCRYPTION_KEY is too short (minimum 16 characters)",
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
DatabaseEncryption.initialize({
|
|
||||||
masterPassword: encryptionKey,
|
|
||||||
encryptionEnabled: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
const status = DatabaseEncryption.getEncryptionStatus();
|
|
||||||
if (!status.configValid) {
|
|
||||||
console.log("❌ Encryption configuration validation failed");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("✅ Production encryption setup is valid");
|
|
||||||
return true;
|
|
||||||
} catch (error) {
|
|
||||||
console.log(
|
|
||||||
`❌ Production validation failed: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
|
||||||
const testMode = process.argv[2];
|
|
||||||
|
|
||||||
if (testMode === "production") {
|
|
||||||
EncryptionTest.validateProduction()
|
|
||||||
.then((success) => {
|
|
||||||
process.exit(success ? 0 : 1);
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
console.error("Test execution failed:", error);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
const test = new EncryptionTest();
|
|
||||||
test
|
|
||||||
.runAllTests()
|
|
||||||
.then((success) => {
|
|
||||||
process.exit(success ? 0 : 1);
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
console.error("Test execution failed:", error);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { EncryptionTest };
|
|
||||||
@@ -1,172 +0,0 @@
|
|||||||
import crypto from "crypto";
|
|
||||||
|
|
||||||
interface EncryptedData {
|
|
||||||
data: string;
|
|
||||||
iv: string;
|
|
||||||
tag: string;
|
|
||||||
salt?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface EncryptionConfig {
|
|
||||||
algorithm: string;
|
|
||||||
keyLength: number;
|
|
||||||
ivLength: number;
|
|
||||||
saltLength: number;
|
|
||||||
iterations: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
class FieldEncryption {
|
|
||||||
private static readonly CONFIG: EncryptionConfig = {
|
|
||||||
algorithm: "aes-256-gcm",
|
|
||||||
keyLength: 32,
|
|
||||||
ivLength: 16,
|
|
||||||
saltLength: 32,
|
|
||||||
iterations: 100000,
|
|
||||||
};
|
|
||||||
|
|
||||||
private static readonly ENCRYPTED_FIELDS = {
|
|
||||||
users: [
|
|
||||||
"password_hash",
|
|
||||||
"client_secret",
|
|
||||||
"totp_secret",
|
|
||||||
"totp_backup_codes",
|
|
||||||
"oidc_identifier",
|
|
||||||
],
|
|
||||||
ssh_data: ["password", "key", "keyPassword"],
|
|
||||||
ssh_credentials: [
|
|
||||||
"password",
|
|
||||||
"privateKey",
|
|
||||||
"keyPassword",
|
|
||||||
"key",
|
|
||||||
"publicKey",
|
|
||||||
],
|
|
||||||
};
|
|
||||||
|
|
||||||
static isEncrypted(value: string | null): boolean {
|
|
||||||
if (!value) return false;
|
|
||||||
try {
|
|
||||||
const parsed = JSON.parse(value);
|
|
||||||
return !!(parsed.data && parsed.iv && parsed.tag);
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static deriveKey(password: string, salt: Buffer, keyType: string): Buffer {
|
|
||||||
const masterKey = crypto.pbkdf2Sync(
|
|
||||||
password,
|
|
||||||
salt,
|
|
||||||
this.CONFIG.iterations,
|
|
||||||
this.CONFIG.keyLength,
|
|
||||||
"sha256",
|
|
||||||
);
|
|
||||||
|
|
||||||
return Buffer.from(
|
|
||||||
crypto.hkdfSync(
|
|
||||||
"sha256",
|
|
||||||
masterKey,
|
|
||||||
salt,
|
|
||||||
keyType,
|
|
||||||
this.CONFIG.keyLength,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
static encrypt(plaintext: string, key: Buffer): EncryptedData {
|
|
||||||
if (!plaintext) return { data: "", iv: "", tag: "" };
|
|
||||||
|
|
||||||
const iv = crypto.randomBytes(this.CONFIG.ivLength);
|
|
||||||
const cipher = crypto.createCipheriv(this.CONFIG.algorithm, key, iv) as any;
|
|
||||||
cipher.setAAD(Buffer.from("termix-field-encryption"));
|
|
||||||
|
|
||||||
let encrypted = cipher.update(plaintext, "utf8", "hex");
|
|
||||||
encrypted += cipher.final("hex");
|
|
||||||
|
|
||||||
const tag = cipher.getAuthTag();
|
|
||||||
|
|
||||||
return {
|
|
||||||
data: encrypted,
|
|
||||||
iv: iv.toString("hex"),
|
|
||||||
tag: tag.toString("hex"),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
static decrypt(encryptedData: EncryptedData, key: Buffer): string {
|
|
||||||
if (!encryptedData.data) return "";
|
|
||||||
|
|
||||||
try {
|
|
||||||
const decipher = crypto.createDecipheriv(
|
|
||||||
this.CONFIG.algorithm,
|
|
||||||
key,
|
|
||||||
Buffer.from(encryptedData.iv, "hex"),
|
|
||||||
) as any;
|
|
||||||
decipher.setAAD(Buffer.from("termix-field-encryption"));
|
|
||||||
decipher.setAuthTag(Buffer.from(encryptedData.tag, "hex"));
|
|
||||||
|
|
||||||
let decrypted = decipher.update(encryptedData.data, "hex", "utf8");
|
|
||||||
decrypted += decipher.final("utf8");
|
|
||||||
|
|
||||||
return decrypted;
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(
|
|
||||||
`Decryption failed: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static encryptField(value: string, fieldKey: Buffer): string {
|
|
||||||
if (!value) return "";
|
|
||||||
if (this.isEncrypted(value)) return value;
|
|
||||||
|
|
||||||
const encrypted = this.encrypt(value, fieldKey);
|
|
||||||
return JSON.stringify(encrypted);
|
|
||||||
}
|
|
||||||
|
|
||||||
static decryptField(value: string, fieldKey: Buffer): string {
|
|
||||||
if (!value) return "";
|
|
||||||
if (!this.isEncrypted(value)) return value;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const encrypted: EncryptedData = JSON.parse(value);
|
|
||||||
return this.decrypt(encrypted, fieldKey);
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(
|
|
||||||
`Field decryption failed: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static getFieldKey(masterPassword: string, fieldType: string): Buffer {
|
|
||||||
const salt = crypto
|
|
||||||
.createHash("sha256")
|
|
||||||
.update(`termix-${fieldType}`)
|
|
||||||
.digest();
|
|
||||||
return this.deriveKey(masterPassword, salt, fieldType);
|
|
||||||
}
|
|
||||||
|
|
||||||
static shouldEncryptField(tableName: string, fieldName: string): boolean {
|
|
||||||
const tableFields =
|
|
||||||
this.ENCRYPTED_FIELDS[tableName as keyof typeof this.ENCRYPTED_FIELDS];
|
|
||||||
return tableFields ? tableFields.includes(fieldName) : false;
|
|
||||||
}
|
|
||||||
|
|
||||||
static generateSalt(): string {
|
|
||||||
return crypto.randomBytes(this.CONFIG.saltLength).toString("hex");
|
|
||||||
}
|
|
||||||
|
|
||||||
static validateEncryptionHealth(
|
|
||||||
encryptedValue: string,
|
|
||||||
key: Buffer,
|
|
||||||
): boolean {
|
|
||||||
try {
|
|
||||||
if (!this.isEncrypted(encryptedValue)) return false;
|
|
||||||
const decrypted = this.decryptField(encryptedValue, key);
|
|
||||||
return decrypted !== "";
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { FieldEncryption };
|
|
||||||
export type { EncryptedData, EncryptionConfig };
|
|
||||||
95
src/backend/utils/field-crypto.ts
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
import crypto from "crypto";
|
||||||
|
|
||||||
|
interface EncryptedData {
|
||||||
|
data: string;
|
||||||
|
iv: string;
|
||||||
|
tag: string;
|
||||||
|
salt: string;
|
||||||
|
recordId: string; // Store the recordId used for encryption context
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* FieldCrypto - Simple direct field encryption
|
||||||
|
*
|
||||||
|
* Linus principles:
|
||||||
|
* - No special cases
|
||||||
|
* - No compatibility checks
|
||||||
|
* - Data is either encrypted or fails
|
||||||
|
* - No "legacy data" concept
|
||||||
|
*/
|
||||||
|
class FieldCrypto {
|
||||||
|
private static readonly ALGORITHM = "aes-256-gcm";
|
||||||
|
private static readonly KEY_LENGTH = 32;
|
||||||
|
private static readonly IV_LENGTH = 16;
|
||||||
|
private static readonly SALT_LENGTH = 32;
|
||||||
|
|
||||||
|
// Fields requiring encryption - simple mapping, no complex logic
|
||||||
|
private static readonly ENCRYPTED_FIELDS = {
|
||||||
|
users: new Set(["password_hash", "client_secret", "totp_secret", "totp_backup_codes", "oidc_identifier"]),
|
||||||
|
ssh_data: new Set(["password", "key", "keyPassword"]),
|
||||||
|
ssh_credentials: new Set(["password", "privateKey", "keyPassword", "key", "publicKey"]),
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Encrypt field - no special cases
|
||||||
|
*/
|
||||||
|
static encryptField(plaintext: string, masterKey: Buffer, recordId: string, fieldName: string): string {
|
||||||
|
if (!plaintext) return "";
|
||||||
|
|
||||||
|
const salt = crypto.randomBytes(this.SALT_LENGTH);
|
||||||
|
const context = `${recordId}:${fieldName}`;
|
||||||
|
const fieldKey = Buffer.from(crypto.hkdfSync('sha256', masterKey, salt, context, this.KEY_LENGTH));
|
||||||
|
|
||||||
|
const iv = crypto.randomBytes(this.IV_LENGTH);
|
||||||
|
const cipher = crypto.createCipheriv(this.ALGORITHM, fieldKey, iv) as any;
|
||||||
|
|
||||||
|
let encrypted = cipher.update(plaintext, "utf8", "hex");
|
||||||
|
encrypted += cipher.final("hex");
|
||||||
|
const tag = cipher.getAuthTag();
|
||||||
|
|
||||||
|
const encryptedData: EncryptedData = {
|
||||||
|
data: encrypted,
|
||||||
|
iv: iv.toString("hex"),
|
||||||
|
tag: tag.toString("hex"),
|
||||||
|
salt: salt.toString("hex"),
|
||||||
|
recordId: recordId, // Store recordId for consistent decryption context
|
||||||
|
};
|
||||||
|
|
||||||
|
return JSON.stringify(encryptedData);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decrypt field - either succeeds or fails, no third option
|
||||||
|
*/
|
||||||
|
static decryptField(encryptedValue: string, masterKey: Buffer, recordId: string, fieldName: string): string {
|
||||||
|
if (!encryptedValue) return "";
|
||||||
|
|
||||||
|
const encrypted: EncryptedData = JSON.parse(encryptedValue);
|
||||||
|
const salt = Buffer.from(encrypted.salt, "hex");
|
||||||
|
|
||||||
|
// Use ONLY the recordId that was stored during encryption
|
||||||
|
if (!encrypted.recordId) {
|
||||||
|
throw new Error(`Encrypted field missing recordId context - data corruption or legacy format not supported`);
|
||||||
|
}
|
||||||
|
const context = `${encrypted.recordId}:${fieldName}`;
|
||||||
|
const fieldKey = Buffer.from(crypto.hkdfSync('sha256', masterKey, salt, context, this.KEY_LENGTH));
|
||||||
|
|
||||||
|
const decipher = crypto.createDecipheriv(this.ALGORITHM, fieldKey, Buffer.from(encrypted.iv, "hex")) as any;
|
||||||
|
decipher.setAuthTag(Buffer.from(encrypted.tag, "hex"));
|
||||||
|
|
||||||
|
let decrypted = decipher.update(encrypted.data, "hex", "utf8");
|
||||||
|
decrypted += decipher.final("utf8");
|
||||||
|
|
||||||
|
return decrypted;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if field needs encryption - simple table lookup, no complex logic
|
||||||
|
*/
|
||||||
|
static shouldEncryptField(tableName: string, fieldName: string): boolean {
|
||||||
|
const fields = this.ENCRYPTED_FIELDS[tableName as keyof typeof this.ENCRYPTED_FIELDS];
|
||||||
|
return fields ? fields.has(fieldName) : false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { FieldCrypto, type EncryptedData };
|
||||||
@@ -1,436 +0,0 @@
|
|||||||
import crypto from "crypto";
|
|
||||||
import os from "os";
|
|
||||||
import { execSync } from "child_process";
|
|
||||||
import fs from "fs";
|
|
||||||
import { databaseLogger } from "./logger.js";
|
|
||||||
|
|
||||||
interface HardwareInfo {
|
|
||||||
cpuId?: string;
|
|
||||||
motherboardUuid?: string;
|
|
||||||
diskSerial?: string;
|
|
||||||
biosSerial?: string;
|
|
||||||
tpmInfo?: string;
|
|
||||||
macAddresses?: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 硬件指纹生成器 - 使用真实硬件特征生成稳定的设备指纹
|
|
||||||
* 相比软件环境指纹,硬件指纹在虚拟化和容器环境中更加稳定
|
|
||||||
*/
|
|
||||||
class HardwareFingerprint {
|
|
||||||
private static readonly CACHE_KEY = "cached_hardware_fingerprint";
|
|
||||||
private static cachedFingerprint: string | null = null;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 生成硬件指纹
|
|
||||||
* 优先级:缓存 > 环境变量 > 硬件检测
|
|
||||||
*/
|
|
||||||
static generate(): string {
|
|
||||||
try {
|
|
||||||
if (this.cachedFingerprint) {
|
|
||||||
return this.cachedFingerprint;
|
|
||||||
}
|
|
||||||
|
|
||||||
const envFingerprint = process.env.TERMIX_HARDWARE_SEED;
|
|
||||||
if (envFingerprint && envFingerprint.length >= 32) {
|
|
||||||
databaseLogger.info("Using hardware seed from environment variable", {
|
|
||||||
operation: "hardware_fingerprint_env",
|
|
||||||
});
|
|
||||||
this.cachedFingerprint = this.hashFingerprint(envFingerprint);
|
|
||||||
return this.cachedFingerprint;
|
|
||||||
}
|
|
||||||
|
|
||||||
const hwInfo = this.detectHardwareInfo();
|
|
||||||
const fingerprint = this.generateFromHardware(hwInfo);
|
|
||||||
|
|
||||||
this.cachedFingerprint = fingerprint;
|
|
||||||
|
|
||||||
return fingerprint;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Hardware fingerprint generation failed", error, {
|
|
||||||
operation: "hardware_fingerprint_failed",
|
|
||||||
});
|
|
||||||
|
|
||||||
return this.generateFallbackFingerprint();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 检测硬件信息
|
|
||||||
*/
|
|
||||||
private static detectHardwareInfo(): HardwareInfo {
|
|
||||||
const platform = os.platform();
|
|
||||||
const hwInfo: HardwareInfo = {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
switch (platform) {
|
|
||||||
case "linux":
|
|
||||||
hwInfo.cpuId = this.getLinuxCpuId();
|
|
||||||
hwInfo.motherboardUuid = this.getLinuxMotherboardUuid();
|
|
||||||
hwInfo.diskSerial = this.getLinuxDiskSerial();
|
|
||||||
hwInfo.biosSerial = this.getLinuxBiosSerial();
|
|
||||||
break;
|
|
||||||
|
|
||||||
case "win32":
|
|
||||||
hwInfo.cpuId = this.getWindowsCpuId();
|
|
||||||
hwInfo.motherboardUuid = this.getWindowsMotherboardUuid();
|
|
||||||
hwInfo.diskSerial = this.getWindowsDiskSerial();
|
|
||||||
hwInfo.biosSerial = this.getWindowsBiosSerial();
|
|
||||||
break;
|
|
||||||
|
|
||||||
case "darwin":
|
|
||||||
hwInfo.cpuId = this.getMacOSCpuId();
|
|
||||||
hwInfo.motherboardUuid = this.getMacOSMotherboardUuid();
|
|
||||||
hwInfo.diskSerial = this.getMacOSDiskSerial();
|
|
||||||
hwInfo.biosSerial = this.getMacOSBiosSerial();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 所有平台都尝试获取MAC地址
|
|
||||||
hwInfo.macAddresses = this.getStableMacAddresses();
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Some hardware detection failed", error, {
|
|
||||||
operation: "hardware_detection_partial_failure",
|
|
||||||
platform,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return hwInfo;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Linux平台硬件信息获取
|
|
||||||
*/
|
|
||||||
private static getLinuxCpuId(): string | undefined {
|
|
||||||
try {
|
|
||||||
// 尝试多种方法获取CPU信息
|
|
||||||
const methods = [
|
|
||||||
() =>
|
|
||||||
fs
|
|
||||||
.readFileSync("/proc/cpuinfo", "utf8")
|
|
||||||
.match(/processor\s*:\s*(\d+)/)?.[1],
|
|
||||||
() =>
|
|
||||||
execSync('dmidecode -t processor | grep "ID:" | head -1', {
|
|
||||||
encoding: "utf8",
|
|
||||||
}).trim(),
|
|
||||||
() =>
|
|
||||||
execSync(
|
|
||||||
'cat /proc/cpuinfo | grep "cpu family\\|model\\|stepping" | md5sum',
|
|
||||||
{ encoding: "utf8" },
|
|
||||||
).split(" ")[0],
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const method of methods) {
|
|
||||||
try {
|
|
||||||
const result = method();
|
|
||||||
if (result && result.length > 0) return result;
|
|
||||||
} catch {
|
|
||||||
/* 继续尝试下一种方法 */
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
/* 忽略错误 */
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static getLinuxMotherboardUuid(): string | undefined {
|
|
||||||
try {
|
|
||||||
// 尝试多种方法获取主板UUID
|
|
||||||
const methods = [
|
|
||||||
() => fs.readFileSync("/sys/class/dmi/id/product_uuid", "utf8").trim(),
|
|
||||||
() => fs.readFileSync("/proc/sys/kernel/random/boot_id", "utf8").trim(),
|
|
||||||
() => execSync("dmidecode -s system-uuid", { encoding: "utf8" }).trim(),
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const method of methods) {
|
|
||||||
try {
|
|
||||||
const result = method();
|
|
||||||
if (result && result.length > 0 && result !== "Not Settable")
|
|
||||||
return result;
|
|
||||||
} catch {
|
|
||||||
/* 继续尝试下一种方法 */
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
/* 忽略错误 */
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static getLinuxDiskSerial(): string | undefined {
|
|
||||||
try {
|
|
||||||
// 获取根分区所在磁盘的序列号
|
|
||||||
const rootDisk = execSync(
|
|
||||||
"df / | tail -1 | awk '{print $1}' | sed 's/[0-9]*$//'",
|
|
||||||
{ encoding: "utf8" },
|
|
||||||
).trim();
|
|
||||||
if (rootDisk) {
|
|
||||||
const serial = execSync(
|
|
||||||
`udevadm info --name=${rootDisk} | grep ID_SERIAL= | cut -d= -f2`,
|
|
||||||
{ encoding: "utf8" },
|
|
||||||
).trim();
|
|
||||||
if (serial && serial.length > 0) return serial;
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
/* 忽略错误 */
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static getLinuxBiosSerial(): string | undefined {
|
|
||||||
try {
|
|
||||||
const methods = [
|
|
||||||
() => fs.readFileSync("/sys/class/dmi/id/board_serial", "utf8").trim(),
|
|
||||||
() =>
|
|
||||||
execSync("dmidecode -s baseboard-serial-number", {
|
|
||||||
encoding: "utf8",
|
|
||||||
}).trim(),
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const method of methods) {
|
|
||||||
try {
|
|
||||||
const result = method();
|
|
||||||
if (result && result.length > 0 && result !== "Not Specified")
|
|
||||||
return result;
|
|
||||||
} catch {
|
|
||||||
/* 继续尝试下一种方法 */
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
/* 忽略错误 */
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Windows平台硬件信息获取
|
|
||||||
*/
|
|
||||||
private static getWindowsCpuId(): string | undefined {
|
|
||||||
try {
|
|
||||||
const result = execSync("wmic cpu get ProcessorId /value", {
|
|
||||||
encoding: "utf8",
|
|
||||||
});
|
|
||||||
const match = result.match(/ProcessorId=(.+)/);
|
|
||||||
return match?.[1]?.trim();
|
|
||||||
} catch {
|
|
||||||
/* 忽略错误 */
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static getWindowsMotherboardUuid(): string | undefined {
|
|
||||||
try {
|
|
||||||
const result = execSync("wmic csproduct get UUID /value", {
|
|
||||||
encoding: "utf8",
|
|
||||||
});
|
|
||||||
const match = result.match(/UUID=(.+)/);
|
|
||||||
return match?.[1]?.trim();
|
|
||||||
} catch {
|
|
||||||
/* 忽略错误 */
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static getWindowsDiskSerial(): string | undefined {
|
|
||||||
try {
|
|
||||||
const result = execSync("wmic diskdrive get SerialNumber /value", {
|
|
||||||
encoding: "utf8",
|
|
||||||
});
|
|
||||||
const match = result.match(/SerialNumber=(.+)/);
|
|
||||||
return match?.[1]?.trim();
|
|
||||||
} catch {
|
|
||||||
/* 忽略错误 */
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static getWindowsBiosSerial(): string | undefined {
|
|
||||||
try {
|
|
||||||
const result = execSync("wmic baseboard get SerialNumber /value", {
|
|
||||||
encoding: "utf8",
|
|
||||||
});
|
|
||||||
const match = result.match(/SerialNumber=(.+)/);
|
|
||||||
return match?.[1]?.trim();
|
|
||||||
} catch {
|
|
||||||
/* 忽略错误 */
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* macOS平台硬件信息获取
|
|
||||||
*/
|
|
||||||
private static getMacOSCpuId(): string | undefined {
|
|
||||||
try {
|
|
||||||
const result = execSync("sysctl -n machdep.cpu.brand_string", {
|
|
||||||
encoding: "utf8",
|
|
||||||
});
|
|
||||||
return result.trim();
|
|
||||||
} catch {
|
|
||||||
/* 忽略错误 */
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static getMacOSMotherboardUuid(): string | undefined {
|
|
||||||
try {
|
|
||||||
const result = execSync(
|
|
||||||
'system_profiler SPHardwareDataType | grep "Hardware UUID"',
|
|
||||||
{ encoding: "utf8" },
|
|
||||||
);
|
|
||||||
const match = result.match(/Hardware UUID:\s*(.+)/);
|
|
||||||
return match?.[1]?.trim();
|
|
||||||
} catch {
|
|
||||||
/* 忽略错误 */
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static getMacOSDiskSerial(): string | undefined {
|
|
||||||
try {
|
|
||||||
const result = execSync(
|
|
||||||
'system_profiler SPStorageDataType | grep "Serial Number"',
|
|
||||||
{ encoding: "utf8" },
|
|
||||||
);
|
|
||||||
const match = result.match(/Serial Number:\s*(.+)/);
|
|
||||||
return match?.[1]?.trim();
|
|
||||||
} catch {
|
|
||||||
/* 忽略错误 */
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static getMacOSBiosSerial(): string | undefined {
|
|
||||||
try {
|
|
||||||
const result = execSync(
|
|
||||||
'system_profiler SPHardwareDataType | grep "Serial Number"',
|
|
||||||
{ encoding: "utf8" },
|
|
||||||
);
|
|
||||||
const match = result.match(/Serial Number \(system\):\s*(.+)/);
|
|
||||||
return match?.[1]?.trim();
|
|
||||||
} catch {
|
|
||||||
/* 忽略错误 */
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取稳定的MAC地址
|
|
||||||
* 排除虚拟接口和临时接口
|
|
||||||
*/
|
|
||||||
private static getStableMacAddresses(): string[] {
|
|
||||||
try {
|
|
||||||
const networkInterfaces = os.networkInterfaces();
|
|
||||||
const macAddresses: string[] = [];
|
|
||||||
|
|
||||||
for (const [interfaceName, interfaces] of Object.entries(
|
|
||||||
networkInterfaces,
|
|
||||||
)) {
|
|
||||||
if (!interfaces) continue;
|
|
||||||
|
|
||||||
// 排除虚拟接口和Docker接口
|
|
||||||
if (interfaceName.match(/^(lo|docker|veth|br-|virbr)/)) continue;
|
|
||||||
|
|
||||||
for (const iface of interfaces) {
|
|
||||||
if (
|
|
||||||
!iface.internal &&
|
|
||||||
iface.mac &&
|
|
||||||
iface.mac !== "00:00:00:00:00:00" &&
|
|
||||||
!iface.mac.startsWith("02:42:")
|
|
||||||
) {
|
|
||||||
// Docker接口特征
|
|
||||||
macAddresses.push(iface.mac);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return macAddresses.sort(); // 排序确保一致性
|
|
||||||
} catch {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 从硬件信息生成指纹
|
|
||||||
*/
|
|
||||||
private static generateFromHardware(hwInfo: HardwareInfo): string {
|
|
||||||
const components = [
|
|
||||||
hwInfo.motherboardUuid, // 最稳定的标识符
|
|
||||||
hwInfo.cpuId,
|
|
||||||
hwInfo.biosSerial,
|
|
||||||
hwInfo.diskSerial,
|
|
||||||
hwInfo.macAddresses?.join(","),
|
|
||||||
os.platform(), // 操作系统平台
|
|
||||||
os.arch(), // CPU架构
|
|
||||||
].filter(Boolean); // 过滤空值
|
|
||||||
|
|
||||||
if (components.length === 0) {
|
|
||||||
throw new Error("No hardware identifiers found");
|
|
||||||
}
|
|
||||||
|
|
||||||
return this.hashFingerprint(components.join("|"));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 生成回退指纹(当硬件检测失败时)
|
|
||||||
*/
|
|
||||||
private static generateFallbackFingerprint(): string {
|
|
||||||
const fallbackComponents = [
|
|
||||||
os.hostname(),
|
|
||||||
os.platform(),
|
|
||||||
os.arch(),
|
|
||||||
process.cwd(),
|
|
||||||
"fallback-mode",
|
|
||||||
];
|
|
||||||
|
|
||||||
databaseLogger.warn(
|
|
||||||
"Using fallback fingerprint due to hardware detection failure",
|
|
||||||
{
|
|
||||||
operation: "hardware_fingerprint_fallback",
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
return this.hashFingerprint(fallbackComponents.join("|"));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 标准化指纹哈希
|
|
||||||
*/
|
|
||||||
private static hashFingerprint(data: string): string {
|
|
||||||
return crypto.createHash("sha256").update(data).digest("hex");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取硬件指纹信息(用于调试和显示)
|
|
||||||
*/
|
|
||||||
static getHardwareInfo(): HardwareInfo & { fingerprint: string } {
|
|
||||||
const hwInfo = this.detectHardwareInfo();
|
|
||||||
return {
|
|
||||||
...hwInfo,
|
|
||||||
fingerprint: this.generate().substring(0, 16),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 验证当前硬件指纹
|
|
||||||
*/
|
|
||||||
static validateFingerprint(expectedFingerprint: string): boolean {
|
|
||||||
try {
|
|
||||||
const currentFingerprint = this.generate();
|
|
||||||
return currentFingerprint === expectedFingerprint;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 清除缓存(用于测试)
|
|
||||||
*/
|
|
||||||
static clearCache(): void {
|
|
||||||
this.cachedFingerprint = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { HardwareFingerprint };
|
|
||||||
export type { HardwareInfo };
|
|
||||||
295
src/backend/utils/lazy-field-encryption.ts
Normal file
@@ -0,0 +1,295 @@
|
|||||||
|
import { FieldCrypto } from "./field-crypto.js";
|
||||||
|
import { databaseLogger } from "./logger.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 延迟字段加密 - 处理从明文到加密的平滑迁移
|
||||||
|
* 用于在用户登录时将明文敏感数据逐步加密
|
||||||
|
*/
|
||||||
|
export class LazyFieldEncryption {
|
||||||
|
/**
|
||||||
|
* 检测字段是否为明文(未加密)
|
||||||
|
*/
|
||||||
|
static isPlaintextField(value: string): boolean {
|
||||||
|
if (!value) return false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(value);
|
||||||
|
// 如果能解析为JSON且包含加密数据结构,则认为已加密
|
||||||
|
if (parsed && typeof parsed === 'object' &&
|
||||||
|
parsed.data && parsed.iv && parsed.tag && parsed.salt && parsed.recordId) {
|
||||||
|
return false; // 已加密
|
||||||
|
}
|
||||||
|
// JSON格式但不是加密结构,视为明文
|
||||||
|
return true;
|
||||||
|
} catch (jsonError) {
|
||||||
|
// 无法解析为JSON,视为明文
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 安全获取字段值 - 自动处理明文和加密数据
|
||||||
|
* 如果是明文,直接返回;如果已加密,则解密
|
||||||
|
*/
|
||||||
|
static safeGetFieldValue(
|
||||||
|
fieldValue: string,
|
||||||
|
userKEK: Buffer,
|
||||||
|
recordId: string,
|
||||||
|
fieldName: string
|
||||||
|
): string {
|
||||||
|
if (!fieldValue) return "";
|
||||||
|
|
||||||
|
if (this.isPlaintextField(fieldValue)) {
|
||||||
|
// 明文数据,直接返回
|
||||||
|
databaseLogger.debug("Field detected as plaintext, returning as-is", {
|
||||||
|
operation: "lazy_encryption_plaintext_detected",
|
||||||
|
recordId,
|
||||||
|
fieldName,
|
||||||
|
valuePreview: fieldValue.substring(0, 10) + "...",
|
||||||
|
});
|
||||||
|
return fieldValue;
|
||||||
|
} else {
|
||||||
|
// 加密数据,需要解密
|
||||||
|
try {
|
||||||
|
const decrypted = FieldCrypto.decryptField(fieldValue, userKEK, recordId, fieldName);
|
||||||
|
databaseLogger.debug("Field decrypted successfully", {
|
||||||
|
operation: "lazy_encryption_decrypt_success",
|
||||||
|
recordId,
|
||||||
|
fieldName,
|
||||||
|
});
|
||||||
|
return decrypted;
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("Failed to decrypt field", error, {
|
||||||
|
operation: "lazy_encryption_decrypt_failed",
|
||||||
|
recordId,
|
||||||
|
fieldName,
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 迁移明文字段到加密状态
|
||||||
|
* 返回加密后的值,如果已经加密则返回原值
|
||||||
|
*/
|
||||||
|
static migrateFieldToEncrypted(
|
||||||
|
fieldValue: string,
|
||||||
|
userKEK: Buffer,
|
||||||
|
recordId: string,
|
||||||
|
fieldName: string
|
||||||
|
): { encrypted: string; wasPlaintext: boolean } {
|
||||||
|
if (!fieldValue) {
|
||||||
|
return { encrypted: "", wasPlaintext: false };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.isPlaintextField(fieldValue)) {
|
||||||
|
// 明文数据,需要加密
|
||||||
|
try {
|
||||||
|
const encrypted = FieldCrypto.encryptField(fieldValue, userKEK, recordId, fieldName);
|
||||||
|
|
||||||
|
databaseLogger.info("Field migrated from plaintext to encrypted", {
|
||||||
|
operation: "lazy_encryption_migrate_success",
|
||||||
|
recordId,
|
||||||
|
fieldName,
|
||||||
|
plaintextLength: fieldValue.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
return { encrypted, wasPlaintext: true };
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("Failed to encrypt plaintext field", error, {
|
||||||
|
operation: "lazy_encryption_migrate_failed",
|
||||||
|
recordId,
|
||||||
|
fieldName,
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// 已经加密,无需处理
|
||||||
|
databaseLogger.debug("Field already encrypted, no migration needed", {
|
||||||
|
operation: "lazy_encryption_already_encrypted",
|
||||||
|
recordId,
|
||||||
|
fieldName,
|
||||||
|
});
|
||||||
|
return { encrypted: fieldValue, wasPlaintext: false };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 批量迁移记录中的敏感字段
|
||||||
|
*/
|
||||||
|
static migrateRecordSensitiveFields(
|
||||||
|
record: any,
|
||||||
|
sensitiveFields: string[],
|
||||||
|
userKEK: Buffer,
|
||||||
|
recordId: string
|
||||||
|
): {
|
||||||
|
updatedRecord: any;
|
||||||
|
migratedFields: string[];
|
||||||
|
needsUpdate: boolean
|
||||||
|
} {
|
||||||
|
const updatedRecord = { ...record };
|
||||||
|
const migratedFields: string[] = [];
|
||||||
|
let needsUpdate = false;
|
||||||
|
|
||||||
|
for (const fieldName of sensitiveFields) {
|
||||||
|
const fieldValue = record[fieldName];
|
||||||
|
|
||||||
|
if (fieldValue && this.isPlaintextField(fieldValue)) {
|
||||||
|
try {
|
||||||
|
const { encrypted } = this.migrateFieldToEncrypted(
|
||||||
|
fieldValue,
|
||||||
|
userKEK,
|
||||||
|
recordId,
|
||||||
|
fieldName
|
||||||
|
);
|
||||||
|
|
||||||
|
updatedRecord[fieldName] = encrypted;
|
||||||
|
migratedFields.push(fieldName);
|
||||||
|
needsUpdate = true;
|
||||||
|
|
||||||
|
databaseLogger.debug("Record field migrated to encrypted", {
|
||||||
|
operation: "lazy_encryption_record_field_migrated",
|
||||||
|
recordId,
|
||||||
|
fieldName,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("Failed to migrate record field", error, {
|
||||||
|
operation: "lazy_encryption_record_field_failed",
|
||||||
|
recordId,
|
||||||
|
fieldName,
|
||||||
|
});
|
||||||
|
// 不抛出错误,继续处理其他字段
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (needsUpdate) {
|
||||||
|
databaseLogger.info("Record requires sensitive field migration", {
|
||||||
|
operation: "lazy_encryption_record_migration_needed",
|
||||||
|
recordId,
|
||||||
|
migratedFields,
|
||||||
|
totalMigratedFields: migratedFields.length,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { updatedRecord, migratedFields, needsUpdate };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取敏感字段列表 - 定义哪些字段需要延迟加密
|
||||||
|
*/
|
||||||
|
static getSensitiveFieldsForTable(tableName: string): string[] {
|
||||||
|
const sensitiveFieldsMap: Record<string, string[]> = {
|
||||||
|
'ssh_data': ['password', 'key', 'key_password'],
|
||||||
|
'ssh_credentials': ['password', 'key', 'key_password', 'private_key'],
|
||||||
|
'users': ['totp_secret', 'totp_backup_codes'],
|
||||||
|
};
|
||||||
|
|
||||||
|
return sensitiveFieldsMap[tableName] || [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 检查用户是否有需要迁移的明文数据
|
||||||
|
*/
|
||||||
|
static async checkUserNeedsMigration(
|
||||||
|
userId: string,
|
||||||
|
userKEK: Buffer,
|
||||||
|
db: any
|
||||||
|
): Promise<{
|
||||||
|
needsMigration: boolean;
|
||||||
|
plaintextFields: Array<{ table: string; recordId: string; fields: string[] }>;
|
||||||
|
}> {
|
||||||
|
const plaintextFields: Array<{ table: string; recordId: string; fields: string[] }> = [];
|
||||||
|
let needsMigration = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 检查 ssh_data 表
|
||||||
|
const sshHosts = db.prepare("SELECT * FROM ssh_data WHERE user_id = ?").all(userId);
|
||||||
|
for (const host of sshHosts) {
|
||||||
|
const sensitiveFields = this.getSensitiveFieldsForTable('ssh_data');
|
||||||
|
const hostPlaintextFields: string[] = [];
|
||||||
|
|
||||||
|
for (const field of sensitiveFields) {
|
||||||
|
if (host[field] && this.isPlaintextField(host[field])) {
|
||||||
|
hostPlaintextFields.push(field);
|
||||||
|
needsMigration = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hostPlaintextFields.length > 0) {
|
||||||
|
plaintextFields.push({
|
||||||
|
table: 'ssh_data',
|
||||||
|
recordId: host.id.toString(),
|
||||||
|
fields: hostPlaintextFields,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 检查 ssh_credentials 表
|
||||||
|
const sshCredentials = db.prepare("SELECT * FROM ssh_credentials WHERE user_id = ?").all(userId);
|
||||||
|
for (const credential of sshCredentials) {
|
||||||
|
const sensitiveFields = this.getSensitiveFieldsForTable('ssh_credentials');
|
||||||
|
const credentialPlaintextFields: string[] = [];
|
||||||
|
|
||||||
|
for (const field of sensitiveFields) {
|
||||||
|
if (credential[field] && this.isPlaintextField(credential[field])) {
|
||||||
|
credentialPlaintextFields.push(field);
|
||||||
|
needsMigration = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (credentialPlaintextFields.length > 0) {
|
||||||
|
plaintextFields.push({
|
||||||
|
table: 'ssh_credentials',
|
||||||
|
recordId: credential.id.toString(),
|
||||||
|
fields: credentialPlaintextFields,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 检查 users 表中的敏感字段
|
||||||
|
const user = db.prepare("SELECT * FROM users WHERE id = ?").get(userId);
|
||||||
|
if (user) {
|
||||||
|
const sensitiveFields = this.getSensitiveFieldsForTable('users');
|
||||||
|
const userPlaintextFields: string[] = [];
|
||||||
|
|
||||||
|
for (const field of sensitiveFields) {
|
||||||
|
if (user[field] && this.isPlaintextField(user[field])) {
|
||||||
|
userPlaintextFields.push(field);
|
||||||
|
needsMigration = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (userPlaintextFields.length > 0) {
|
||||||
|
plaintextFields.push({
|
||||||
|
table: 'users',
|
||||||
|
recordId: userId,
|
||||||
|
fields: userPlaintextFields,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
databaseLogger.info("User migration check completed", {
|
||||||
|
operation: "lazy_encryption_user_check",
|
||||||
|
userId,
|
||||||
|
needsMigration,
|
||||||
|
plaintextFieldsCount: plaintextFields.length,
|
||||||
|
totalPlaintextFields: plaintextFields.reduce((sum, item) => sum + item.fields.length, 0),
|
||||||
|
});
|
||||||
|
|
||||||
|
return { needsMigration, plaintextFields };
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("Failed to check user migration needs", error, {
|
||||||
|
operation: "lazy_encryption_user_check_failed",
|
||||||
|
userId,
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
});
|
||||||
|
|
||||||
|
return { needsMigration: false, plaintextFields: [] };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,201 +0,0 @@
|
|||||||
import crypto from "crypto";
|
|
||||||
import { databaseLogger } from "./logger.js";
|
|
||||||
import { HardwareFingerprint } from "./hardware-fingerprint.js";
|
|
||||||
|
|
||||||
interface ProtectedKeyData {
|
|
||||||
data: string;
|
|
||||||
iv: string;
|
|
||||||
tag: string;
|
|
||||||
version: string;
|
|
||||||
fingerprint: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
class MasterKeyProtection {
|
|
||||||
private static readonly VERSION = "v1";
|
|
||||||
private static readonly KEK_SALT = "termix-kek-salt-v1";
|
|
||||||
private static readonly KEK_ITERATIONS = 50000;
|
|
||||||
|
|
||||||
private static generateDeviceFingerprint(): string {
|
|
||||||
try {
|
|
||||||
const fingerprint = HardwareFingerprint.generate();
|
|
||||||
|
|
||||||
return fingerprint;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Failed to generate hardware fingerprint", error, {
|
|
||||||
operation: "hardware_fingerprint_generation_failed",
|
|
||||||
});
|
|
||||||
throw new Error("Hardware fingerprint generation failed");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static deriveKEK(): Buffer {
|
|
||||||
const fingerprint = this.generateDeviceFingerprint();
|
|
||||||
const salt = Buffer.from(this.KEK_SALT);
|
|
||||||
|
|
||||||
const kek = crypto.pbkdf2Sync(
|
|
||||||
fingerprint,
|
|
||||||
salt,
|
|
||||||
this.KEK_ITERATIONS,
|
|
||||||
32,
|
|
||||||
"sha256",
|
|
||||||
);
|
|
||||||
|
|
||||||
return kek;
|
|
||||||
}
|
|
||||||
|
|
||||||
static encryptMasterKey(masterKey: string): string {
|
|
||||||
if (!masterKey) {
|
|
||||||
throw new Error("Master key cannot be empty");
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const kek = this.deriveKEK();
|
|
||||||
const iv = crypto.randomBytes(16);
|
|
||||||
const cipher = crypto.createCipheriv("aes-256-gcm", kek, iv) as any;
|
|
||||||
|
|
||||||
let encrypted = cipher.update(masterKey, "hex", "hex");
|
|
||||||
encrypted += cipher.final("hex");
|
|
||||||
const tag = cipher.getAuthTag();
|
|
||||||
|
|
||||||
const protectedData: ProtectedKeyData = {
|
|
||||||
data: encrypted,
|
|
||||||
iv: iv.toString("hex"),
|
|
||||||
tag: tag.toString("hex"),
|
|
||||||
version: this.VERSION,
|
|
||||||
fingerprint: this.generateDeviceFingerprint().substring(0, 16),
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = JSON.stringify(protectedData);
|
|
||||||
|
|
||||||
databaseLogger.info("Master key encrypted with hardware KEK", {
|
|
||||||
operation: "master_key_encryption",
|
|
||||||
version: this.VERSION,
|
|
||||||
fingerprintPrefix: protectedData.fingerprint,
|
|
||||||
});
|
|
||||||
|
|
||||||
return result;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Failed to encrypt master key", error, {
|
|
||||||
operation: "master_key_encryption_failed",
|
|
||||||
});
|
|
||||||
throw new Error("Master key encryption failed");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static decryptMasterKey(encryptedKey: string): string {
|
|
||||||
if (!encryptedKey) {
|
|
||||||
throw new Error("Encrypted key cannot be empty");
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const protectedData: ProtectedKeyData = JSON.parse(encryptedKey);
|
|
||||||
|
|
||||||
if (protectedData.version !== this.VERSION) {
|
|
||||||
throw new Error(
|
|
||||||
`Unsupported protection version: ${protectedData.version}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const currentFingerprint = this.generateDeviceFingerprint().substring(
|
|
||||||
0,
|
|
||||||
16,
|
|
||||||
);
|
|
||||||
if (protectedData.fingerprint !== currentFingerprint) {
|
|
||||||
databaseLogger.warn("Hardware fingerprint mismatch detected", {
|
|
||||||
operation: "master_key_decryption",
|
|
||||||
expected: protectedData.fingerprint,
|
|
||||||
current: currentFingerprint,
|
|
||||||
});
|
|
||||||
throw new Error(
|
|
||||||
"Hardware fingerprint mismatch - key was encrypted on different hardware",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const kek = this.deriveKEK();
|
|
||||||
const decipher = crypto.createDecipheriv(
|
|
||||||
"aes-256-gcm",
|
|
||||||
kek,
|
|
||||||
Buffer.from(protectedData.iv, "hex"),
|
|
||||||
) as any;
|
|
||||||
decipher.setAuthTag(Buffer.from(protectedData.tag, "hex"));
|
|
||||||
|
|
||||||
let decrypted = decipher.update(protectedData.data, "hex", "hex");
|
|
||||||
decrypted += decipher.final("hex");
|
|
||||||
|
|
||||||
return decrypted;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Failed to decrypt master key", error, {
|
|
||||||
operation: "master_key_decryption_failed",
|
|
||||||
});
|
|
||||||
throw new Error(
|
|
||||||
`Master key decryption failed: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static isProtectedKey(data: string): boolean {
|
|
||||||
try {
|
|
||||||
const parsed = JSON.parse(data);
|
|
||||||
return !!(
|
|
||||||
parsed.data &&
|
|
||||||
parsed.iv &&
|
|
||||||
parsed.tag &&
|
|
||||||
parsed.version &&
|
|
||||||
parsed.fingerprint
|
|
||||||
);
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static validateProtection(): boolean {
|
|
||||||
try {
|
|
||||||
const testKey = crypto.randomBytes(32).toString("hex");
|
|
||||||
const encrypted = this.encryptMasterKey(testKey);
|
|
||||||
const decrypted = this.decryptMasterKey(encrypted);
|
|
||||||
|
|
||||||
const isValid = decrypted === testKey;
|
|
||||||
|
|
||||||
databaseLogger.info("Master key protection validation completed", {
|
|
||||||
operation: "protection_validation",
|
|
||||||
result: isValid ? "passed" : "failed",
|
|
||||||
});
|
|
||||||
|
|
||||||
return isValid;
|
|
||||||
} catch (error) {
|
|
||||||
databaseLogger.error("Master key protection validation failed", error, {
|
|
||||||
operation: "protection_validation_failed",
|
|
||||||
});
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static getProtectionInfo(encryptedKey: string): {
|
|
||||||
version: string;
|
|
||||||
fingerprint: string;
|
|
||||||
isCurrentDevice: boolean;
|
|
||||||
} | null {
|
|
||||||
try {
|
|
||||||
if (!this.isProtectedKey(encryptedKey)) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const protectedData: ProtectedKeyData = JSON.parse(encryptedKey);
|
|
||||||
const currentFingerprint = this.generateDeviceFingerprint().substring(
|
|
||||||
0,
|
|
||||||
16,
|
|
||||||
);
|
|
||||||
|
|
||||||
return {
|
|
||||||
version: protectedData.version,
|
|
||||||
fingerprint: protectedData.fingerprint,
|
|
||||||
isCurrentDevice: protectedData.fingerprint === currentFingerprint,
|
|
||||||
};
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { MasterKeyProtection };
|
|
||||||
export type { ProtectedKeyData };
|
|
||||||
204
src/backend/utils/simple-db-ops.ts
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
import { getDb, DatabaseSaveTrigger } from "../database/db/index.js";
|
||||||
|
import { DataCrypto } from "./data-crypto.js";
|
||||||
|
import { databaseLogger } from "./logger.js";
|
||||||
|
import type { SQLiteTable } from "drizzle-orm/sqlite-core";
|
||||||
|
|
||||||
|
type TableName = "users" | "ssh_data" | "ssh_credentials";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SimpleDBOps - Simplified encrypted database operations
|
||||||
|
*
|
||||||
|
* Linus-style simplification:
|
||||||
|
* - Remove all complex abstraction layers
|
||||||
|
* - Direct CRUD operations
|
||||||
|
* - Automatic encryption/decryption
|
||||||
|
* - No special case handling
|
||||||
|
*/
|
||||||
|
class SimpleDBOps {
|
||||||
|
/**
|
||||||
|
* Insert encrypted record
|
||||||
|
*/
|
||||||
|
static async insert<T extends Record<string, any>>(
|
||||||
|
table: SQLiteTable<any>,
|
||||||
|
tableName: TableName,
|
||||||
|
data: T,
|
||||||
|
userId: string,
|
||||||
|
): Promise<T> {
|
||||||
|
// Get user data key once and reuse throughout operation
|
||||||
|
const userDataKey = DataCrypto.validateUserAccess(userId);
|
||||||
|
|
||||||
|
// Generate consistent temporary ID for encryption context if record has no ID
|
||||||
|
const tempId = data.id || `temp-${userId}-${Date.now()}`;
|
||||||
|
const dataWithTempId = { ...data, id: tempId };
|
||||||
|
|
||||||
|
// Encrypt data using the locked key - recordId will be stored in encrypted fields
|
||||||
|
const encryptedData = DataCrypto.encryptRecord(tableName, dataWithTempId, userId, userDataKey);
|
||||||
|
|
||||||
|
// Remove temp ID if it was generated, let database assign real ID
|
||||||
|
if (!data.id) {
|
||||||
|
delete encryptedData.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert into database
|
||||||
|
const result = await getDb().insert(table).values(encryptedData).returning();
|
||||||
|
|
||||||
|
// Trigger database save after insert
|
||||||
|
DatabaseSaveTrigger.triggerSave(`insert_${tableName}`);
|
||||||
|
|
||||||
|
// Decrypt return result using the same key - FieldCrypto will use stored recordId
|
||||||
|
const decryptedResult = DataCrypto.decryptRecord(
|
||||||
|
tableName,
|
||||||
|
result[0],
|
||||||
|
userId,
|
||||||
|
userDataKey
|
||||||
|
);
|
||||||
|
|
||||||
|
databaseLogger.debug(`Inserted encrypted record into ${tableName}`, {
|
||||||
|
operation: "simple_insert",
|
||||||
|
table: tableName,
|
||||||
|
userId,
|
||||||
|
recordId: result[0].id,
|
||||||
|
});
|
||||||
|
|
||||||
|
return decryptedResult as T;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Query multiple records
|
||||||
|
*/
|
||||||
|
static async select<T extends Record<string, any>>(
|
||||||
|
query: any,
|
||||||
|
tableName: TableName,
|
||||||
|
userId: string,
|
||||||
|
): Promise<T[]> {
|
||||||
|
// Get user data key once and reuse throughout operation
|
||||||
|
const userDataKey = DataCrypto.validateUserAccess(userId);
|
||||||
|
|
||||||
|
// Execute query
|
||||||
|
const results = await query;
|
||||||
|
|
||||||
|
// Decrypt results using locked key
|
||||||
|
const decryptedResults = DataCrypto.decryptRecords(
|
||||||
|
tableName,
|
||||||
|
results,
|
||||||
|
userId,
|
||||||
|
userDataKey
|
||||||
|
);
|
||||||
|
|
||||||
|
return decryptedResults;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Query single record
|
||||||
|
*/
|
||||||
|
static async selectOne<T extends Record<string, any>>(
|
||||||
|
query: any,
|
||||||
|
tableName: TableName,
|
||||||
|
userId: string,
|
||||||
|
): Promise<T | undefined> {
|
||||||
|
// Get user data key once and reuse throughout operation
|
||||||
|
const userDataKey = DataCrypto.validateUserAccess(userId);
|
||||||
|
|
||||||
|
// Execute query
|
||||||
|
const result = await query;
|
||||||
|
if (!result) return undefined;
|
||||||
|
|
||||||
|
// Decrypt results using locked key
|
||||||
|
const decryptedResult = DataCrypto.decryptRecord(
|
||||||
|
tableName,
|
||||||
|
result,
|
||||||
|
userId,
|
||||||
|
userDataKey
|
||||||
|
);
|
||||||
|
|
||||||
|
databaseLogger.debug(`Selected single record from ${tableName}`, {
|
||||||
|
operation: "simple_select_one",
|
||||||
|
table: tableName,
|
||||||
|
userId,
|
||||||
|
recordId: result.id,
|
||||||
|
});
|
||||||
|
|
||||||
|
return decryptedResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update record
|
||||||
|
*/
|
||||||
|
static async update<T extends Record<string, any>>(
|
||||||
|
table: SQLiteTable<any>,
|
||||||
|
tableName: TableName,
|
||||||
|
where: any,
|
||||||
|
data: Partial<T>,
|
||||||
|
userId: string,
|
||||||
|
): Promise<T[]> {
|
||||||
|
// Get user data key once and reuse throughout operation
|
||||||
|
const userDataKey = DataCrypto.validateUserAccess(userId);
|
||||||
|
|
||||||
|
// Encrypt update data using the locked key
|
||||||
|
const encryptedData = DataCrypto.encryptRecord(tableName, data, userId, userDataKey);
|
||||||
|
|
||||||
|
// Execute update
|
||||||
|
const result = await getDb()
|
||||||
|
.update(table)
|
||||||
|
.set(encryptedData)
|
||||||
|
.where(where)
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
// Trigger database save after update
|
||||||
|
DatabaseSaveTrigger.triggerSave(`update_${tableName}`);
|
||||||
|
|
||||||
|
// Decrypt return data using the same key
|
||||||
|
const decryptedResults = DataCrypto.decryptRecords(
|
||||||
|
tableName,
|
||||||
|
result,
|
||||||
|
userId,
|
||||||
|
userDataKey
|
||||||
|
);
|
||||||
|
|
||||||
|
databaseLogger.debug(`Updated records in ${tableName}`, {
|
||||||
|
operation: "simple_update",
|
||||||
|
table: tableName,
|
||||||
|
userId,
|
||||||
|
updatedCount: result.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
return decryptedResults as T[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete record
|
||||||
|
*/
|
||||||
|
static async delete(
|
||||||
|
table: SQLiteTable<any>,
|
||||||
|
tableName: TableName,
|
||||||
|
where: any,
|
||||||
|
userId: string,
|
||||||
|
): Promise<any[]> {
|
||||||
|
const result = await getDb().delete(table).where(where).returning();
|
||||||
|
|
||||||
|
// Trigger database save after delete
|
||||||
|
DatabaseSaveTrigger.triggerSave(`delete_${tableName}`);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Health check
|
||||||
|
*/
|
||||||
|
static async healthCheck(userId: string): Promise<boolean> {
|
||||||
|
return DataCrypto.canUserAccessData(userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Special method: return encrypted data (for auto-start scenarios)
|
||||||
|
* No decryption, return data in encrypted state directly
|
||||||
|
*/
|
||||||
|
static async selectEncrypted(query: any, tableName: TableName): Promise<any[]> {
|
||||||
|
// Execute query directly, no decryption
|
||||||
|
const results = await query;
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { SimpleDBOps, type TableName };
|
||||||
329
src/backend/utils/system-crypto.ts
Normal file
@@ -0,0 +1,329 @@
|
|||||||
|
import crypto from "crypto";
|
||||||
|
import { promises as fs } from "fs";
|
||||||
|
import path from "path";
|
||||||
|
import { databaseLogger } from "./logger.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SystemCrypto - Open source friendly system key management
|
||||||
|
*
|
||||||
|
* Linus principles:
|
||||||
|
* - Remove complex "system master key" layer - doesn't solve real threats
|
||||||
|
* - Remove hardcoded default keys - security disaster for open source software
|
||||||
|
* - Auto-generate on first startup - each instance independently secure
|
||||||
|
* - Simple and direct, focus on real security boundaries
|
||||||
|
*/
|
||||||
|
class SystemCrypto {
|
||||||
|
private static instance: SystemCrypto;
|
||||||
|
private jwtSecret: string | null = null;
|
||||||
|
private databaseKey: Buffer | null = null;
|
||||||
|
private internalAuthToken: string | null = null;
|
||||||
|
|
||||||
|
|
||||||
|
private constructor() {}
|
||||||
|
|
||||||
|
static getInstance(): SystemCrypto {
|
||||||
|
if (!this.instance) {
|
||||||
|
this.instance = new SystemCrypto();
|
||||||
|
}
|
||||||
|
return this.instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize JWT secret - environment variable only
|
||||||
|
*/
|
||||||
|
async initializeJWTSecret(): Promise<void> {
|
||||||
|
try {
|
||||||
|
databaseLogger.info("Initializing JWT secret", {
|
||||||
|
operation: "jwt_init",
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check environment variable
|
||||||
|
const envSecret = process.env.JWT_SECRET;
|
||||||
|
if (envSecret && envSecret.length >= 64) {
|
||||||
|
this.jwtSecret = envSecret;
|
||||||
|
databaseLogger.info("✅ Using JWT secret from environment variable", {
|
||||||
|
operation: "jwt_env_loaded",
|
||||||
|
source: "environment"
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// No environment variable - generate and guide user
|
||||||
|
await this.generateAndGuideUser();
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("Failed to initialize JWT secret", error, {
|
||||||
|
operation: "jwt_init_failed",
|
||||||
|
});
|
||||||
|
throw new Error("JWT secret initialization failed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get JWT secret
|
||||||
|
*/
|
||||||
|
async getJWTSecret(): Promise<string> {
|
||||||
|
if (!this.jwtSecret) {
|
||||||
|
await this.initializeJWTSecret();
|
||||||
|
}
|
||||||
|
return this.jwtSecret!;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize database encryption key - environment variable only
|
||||||
|
*/
|
||||||
|
async initializeDatabaseKey(): Promise<void> {
|
||||||
|
try {
|
||||||
|
databaseLogger.info("Initializing database encryption key", {
|
||||||
|
operation: "db_key_init",
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check environment variable
|
||||||
|
const envKey = process.env.DATABASE_KEY;
|
||||||
|
if (envKey && envKey.length >= 64) {
|
||||||
|
this.databaseKey = Buffer.from(envKey, 'hex');
|
||||||
|
databaseLogger.info("✅ Using database key from environment variable", {
|
||||||
|
operation: "db_key_env_loaded",
|
||||||
|
source: "environment"
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// No environment variable - generate and guide user
|
||||||
|
await this.generateAndGuideDatabaseKey();
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("Failed to initialize database key", error, {
|
||||||
|
operation: "db_key_init_failed",
|
||||||
|
});
|
||||||
|
throw new Error("Database key initialization failed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get database encryption key
|
||||||
|
*/
|
||||||
|
async getDatabaseKey(): Promise<Buffer> {
|
||||||
|
if (!this.databaseKey) {
|
||||||
|
await this.initializeDatabaseKey();
|
||||||
|
}
|
||||||
|
return this.databaseKey!;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize internal auth token - environment variable only
|
||||||
|
*/
|
||||||
|
async initializeInternalAuthToken(): Promise<void> {
|
||||||
|
try {
|
||||||
|
databaseLogger.info("Initializing internal auth token", {
|
||||||
|
operation: "internal_auth_init",
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check environment variable
|
||||||
|
const envToken = process.env.INTERNAL_AUTH_TOKEN;
|
||||||
|
if (envToken && envToken.length >= 32) {
|
||||||
|
this.internalAuthToken = envToken;
|
||||||
|
databaseLogger.info("✅ Using internal auth token from environment variable", {
|
||||||
|
operation: "internal_auth_env_loaded",
|
||||||
|
source: "environment"
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// No environment variable - generate and guide user
|
||||||
|
await this.generateAndGuideInternalAuthToken();
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("Failed to initialize internal auth token", error, {
|
||||||
|
operation: "internal_auth_init_failed",
|
||||||
|
});
|
||||||
|
throw new Error("Internal auth token initialization failed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get internal auth token
|
||||||
|
*/
|
||||||
|
async getInternalAuthToken(): Promise<string> {
|
||||||
|
if (!this.internalAuthToken) {
|
||||||
|
await this.initializeInternalAuthToken();
|
||||||
|
}
|
||||||
|
return this.internalAuthToken!;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate and auto-save to .env file
|
||||||
|
*/
|
||||||
|
private async generateAndGuideUser(): Promise<void> {
|
||||||
|
const newSecret = crypto.randomBytes(32).toString('hex');
|
||||||
|
const instanceId = crypto.randomBytes(8).toString('hex');
|
||||||
|
|
||||||
|
// Set in memory for current session
|
||||||
|
this.jwtSecret = newSecret;
|
||||||
|
|
||||||
|
// Auto-save to .env file
|
||||||
|
await this.updateEnvFile("JWT_SECRET", newSecret);
|
||||||
|
|
||||||
|
databaseLogger.success("🔐 JWT secret auto-generated and saved to .env", {
|
||||||
|
operation: "jwt_auto_generated",
|
||||||
|
instanceId,
|
||||||
|
envVarName: "JWT_SECRET",
|
||||||
|
note: "Ready for use - no restart required"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// ===== Database key generation and storage methods =====
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate and auto-save database key to .env file
|
||||||
|
*/
|
||||||
|
private async generateAndGuideDatabaseKey(): Promise<void> {
|
||||||
|
const newKey = crypto.randomBytes(32); // 256-bit key for AES-256
|
||||||
|
const newKeyHex = newKey.toString('hex');
|
||||||
|
const instanceId = crypto.randomBytes(8).toString('hex');
|
||||||
|
|
||||||
|
// Set in memory for current session
|
||||||
|
this.databaseKey = newKey;
|
||||||
|
|
||||||
|
// Auto-save to .env file
|
||||||
|
await this.updateEnvFile("DATABASE_KEY", newKeyHex);
|
||||||
|
|
||||||
|
databaseLogger.success("🔒 Database key auto-generated and saved to .env", {
|
||||||
|
operation: "db_key_auto_generated",
|
||||||
|
instanceId,
|
||||||
|
envVarName: "DATABASE_KEY",
|
||||||
|
note: "Ready for use - no restart required"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate and auto-save internal auth token to .env file
|
||||||
|
*/
|
||||||
|
private async generateAndGuideInternalAuthToken(): Promise<void> {
|
||||||
|
const newToken = crypto.randomBytes(32).toString('hex'); // 256-bit token for security
|
||||||
|
const instanceId = crypto.randomBytes(8).toString('hex');
|
||||||
|
|
||||||
|
// Set in memory for current session
|
||||||
|
this.internalAuthToken = newToken;
|
||||||
|
|
||||||
|
// Auto-save to .env file
|
||||||
|
await this.updateEnvFile("INTERNAL_AUTH_TOKEN", newToken);
|
||||||
|
|
||||||
|
databaseLogger.success("🔑 Internal auth token auto-generated and saved to .env", {
|
||||||
|
operation: "internal_auth_auto_generated",
|
||||||
|
instanceId,
|
||||||
|
envVarName: "INTERNAL_AUTH_TOKEN",
|
||||||
|
note: "Ready for use - no restart required"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate JWT secret system
|
||||||
|
*/
|
||||||
|
async validateJWTSecret(): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const secret = await this.getJWTSecret();
|
||||||
|
if (!secret || secret.length < 32) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test JWT operations
|
||||||
|
const jwt = await import("jsonwebtoken");
|
||||||
|
const testPayload = { test: true, timestamp: Date.now() };
|
||||||
|
const token = jwt.default.sign(testPayload, secret, { expiresIn: "1s" });
|
||||||
|
const decoded = jwt.default.verify(token, secret);
|
||||||
|
|
||||||
|
return !!decoded;
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("JWT secret validation failed", error, {
|
||||||
|
operation: "jwt_validation_failed",
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get JWT key status (simplified version)
|
||||||
|
*/
|
||||||
|
async getSystemKeyStatus() {
|
||||||
|
const isValid = await this.validateJWTSecret();
|
||||||
|
const hasSecret = this.jwtSecret !== null;
|
||||||
|
|
||||||
|
|
||||||
|
// Check environment variable
|
||||||
|
const hasEnvVar = !!(process.env.JWT_SECRET && process.env.JWT_SECRET.length >= 64);
|
||||||
|
|
||||||
|
return {
|
||||||
|
hasSecret,
|
||||||
|
isValid,
|
||||||
|
storage: {
|
||||||
|
environment: hasEnvVar
|
||||||
|
},
|
||||||
|
algorithm: "HS256",
|
||||||
|
note: "Using simplified key management without encryption layers"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update .env file with new environment variable
|
||||||
|
*/
|
||||||
|
private async updateEnvFile(key: string, value: string): Promise<void> {
|
||||||
|
// Use persistent config directory if available (Docker), otherwise use current directory
|
||||||
|
const configDir = process.env.NODE_ENV === 'production' &&
|
||||||
|
await fs.access('/app/config').then(() => true).catch(() => false)
|
||||||
|
? '/app/config'
|
||||||
|
: process.cwd();
|
||||||
|
const envPath = path.join(configDir, ".env");
|
||||||
|
|
||||||
|
try {
|
||||||
|
let envContent = "";
|
||||||
|
|
||||||
|
// Read existing .env file if it exists
|
||||||
|
try {
|
||||||
|
envContent = await fs.readFile(envPath, "utf8");
|
||||||
|
} catch {
|
||||||
|
// File doesn't exist, will create new one
|
||||||
|
envContent = "# Termix Auto-generated Configuration\n\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if key already exists
|
||||||
|
const keyRegex = new RegExp(`^${key}=.*$`, "m");
|
||||||
|
|
||||||
|
if (keyRegex.test(envContent)) {
|
||||||
|
// Update existing key
|
||||||
|
envContent = envContent.replace(keyRegex, `${key}=${value}`);
|
||||||
|
} else {
|
||||||
|
// Add new key
|
||||||
|
if (!envContent.includes("# Security Keys")) {
|
||||||
|
envContent += "\n# Security Keys (Auto-generated)\n";
|
||||||
|
}
|
||||||
|
envContent += `${key}=${value}\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write updated content
|
||||||
|
await fs.writeFile(envPath, envContent);
|
||||||
|
|
||||||
|
// Update process.env for current session
|
||||||
|
process.env[key] = value;
|
||||||
|
|
||||||
|
databaseLogger.info(`Environment variable ${key} updated in .env file`, {
|
||||||
|
operation: "env_file_update",
|
||||||
|
key,
|
||||||
|
path: envPath
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error(`Failed to update .env file with ${key}`, error, {
|
||||||
|
operation: "env_file_update_failed",
|
||||||
|
key
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { SystemCrypto };
|
||||||
408
src/backend/utils/user-crypto.ts
Normal file
@@ -0,0 +1,408 @@
|
|||||||
|
import crypto from "crypto";
|
||||||
|
import { getDb } from "../database/db/index.js";
|
||||||
|
import { settings } from "../database/db/schema.js";
|
||||||
|
import { eq } from "drizzle-orm";
|
||||||
|
import { databaseLogger } from "./logger.js";
|
||||||
|
|
||||||
|
interface KEKSalt {
|
||||||
|
salt: string;
|
||||||
|
iterations: number;
|
||||||
|
algorithm: string;
|
||||||
|
createdAt: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface EncryptedDEK {
|
||||||
|
data: string;
|
||||||
|
iv: string;
|
||||||
|
tag: string;
|
||||||
|
algorithm: string;
|
||||||
|
createdAt: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface UserSession {
|
||||||
|
dataKey: Buffer; // Store DEK directly, delete just-in-time fantasy
|
||||||
|
lastActivity: number;
|
||||||
|
expiresAt: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* UserCrypto - Simple direct user encryption
|
||||||
|
*
|
||||||
|
* Linus principles:
|
||||||
|
* - Delete just-in-time fantasy, cache DEK directly
|
||||||
|
* - Reasonable 2-hour timeout, not 5-minute user experience disaster
|
||||||
|
* - Simple working implementation, not theoretically perfect garbage
|
||||||
|
* - Server restart invalidates sessions (this is reasonable)
|
||||||
|
*/
|
||||||
|
class UserCrypto {
|
||||||
|
private static instance: UserCrypto;
|
||||||
|
private userSessions: Map<string, UserSession> = new Map();
|
||||||
|
|
||||||
|
// Configuration constants - reasonable timeout settings
|
||||||
|
private static readonly PBKDF2_ITERATIONS = 100000;
|
||||||
|
private static readonly KEK_LENGTH = 32;
|
||||||
|
private static readonly DEK_LENGTH = 32;
|
||||||
|
private static readonly SESSION_DURATION = 2 * 60 * 60 * 1000; // 2 hours, reasonable user experience
|
||||||
|
private static readonly MAX_INACTIVITY = 30 * 60 * 1000; // 30 minutes, not 1-minute disaster
|
||||||
|
|
||||||
|
private constructor() {
|
||||||
|
// Reasonable cleanup interval
|
||||||
|
setInterval(() => {
|
||||||
|
this.cleanupExpiredSessions();
|
||||||
|
}, 5 * 60 * 1000); // Clean every 5 minutes, not 30 seconds
|
||||||
|
}
|
||||||
|
|
||||||
|
static getInstance(): UserCrypto {
|
||||||
|
if (!this.instance) {
|
||||||
|
this.instance = new UserCrypto();
|
||||||
|
}
|
||||||
|
return this.instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* User registration: generate KEK salt and DEK
|
||||||
|
*/
|
||||||
|
async setupUserEncryption(userId: string, password: string): Promise<void> {
|
||||||
|
const kekSalt = await this.generateKEKSalt();
|
||||||
|
await this.storeKEKSalt(userId, kekSalt);
|
||||||
|
|
||||||
|
const KEK = this.deriveKEK(password, kekSalt);
|
||||||
|
const DEK = crypto.randomBytes(UserCrypto.DEK_LENGTH);
|
||||||
|
const encryptedDEK = this.encryptDEK(DEK, KEK);
|
||||||
|
await this.storeEncryptedDEK(userId, encryptedDEK);
|
||||||
|
|
||||||
|
// Immediately clean temporary keys
|
||||||
|
KEK.fill(0);
|
||||||
|
DEK.fill(0);
|
||||||
|
|
||||||
|
databaseLogger.success("User encryption setup completed", {
|
||||||
|
operation: "user_crypto_setup",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* User authentication: validate password and cache DEK
|
||||||
|
* Deleted just-in-time fantasy, works directly
|
||||||
|
*/
|
||||||
|
async authenticateUser(userId: string, password: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
// Validate password and decrypt DEK
|
||||||
|
const kekSalt = await this.getKEKSalt(userId);
|
||||||
|
if (!kekSalt) return false;
|
||||||
|
|
||||||
|
const KEK = this.deriveKEK(password, kekSalt);
|
||||||
|
const encryptedDEK = await this.getEncryptedDEK(userId);
|
||||||
|
if (!encryptedDEK) {
|
||||||
|
KEK.fill(0);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEK = this.decryptDEK(encryptedDEK, KEK);
|
||||||
|
KEK.fill(0); // Immediately clean KEK
|
||||||
|
|
||||||
|
// Debug: Check DEK validity
|
||||||
|
if (!DEK || DEK.length === 0) {
|
||||||
|
databaseLogger.error("DEK is empty or invalid after decryption", {
|
||||||
|
operation: "user_crypto_auth_debug",
|
||||||
|
userId,
|
||||||
|
dekLength: DEK ? DEK.length : 0
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create user session, cache DEK directly
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
// Clean old session
|
||||||
|
const oldSession = this.userSessions.get(userId);
|
||||||
|
if (oldSession) {
|
||||||
|
oldSession.dataKey.fill(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.userSessions.set(userId, {
|
||||||
|
dataKey: Buffer.from(DEK), // Create proper Buffer copy
|
||||||
|
lastActivity: now,
|
||||||
|
expiresAt: now + UserCrypto.SESSION_DURATION,
|
||||||
|
});
|
||||||
|
|
||||||
|
DEK.fill(0); // Clean temporary DEK
|
||||||
|
|
||||||
|
databaseLogger.success("User authenticated and DEK cached", {
|
||||||
|
operation: "user_crypto_auth",
|
||||||
|
userId,
|
||||||
|
duration: UserCrypto.SESSION_DURATION,
|
||||||
|
});
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.warn("User authentication failed", {
|
||||||
|
operation: "user_crypto_auth_failed",
|
||||||
|
userId,
|
||||||
|
error: error instanceof Error ? error.message : "Unknown",
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get user data key - simple direct return from cache
|
||||||
|
* Deleted just-in-time derivation garbage
|
||||||
|
*/
|
||||||
|
getUserDataKey(userId: string): Buffer | null {
|
||||||
|
const session = this.userSessions.get(userId);
|
||||||
|
if (!session) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
// Check if session has expired
|
||||||
|
if (now > session.expiresAt) {
|
||||||
|
this.userSessions.delete(userId);
|
||||||
|
session.dataKey.fill(0);
|
||||||
|
databaseLogger.info("User session expired", {
|
||||||
|
operation: "user_session_expired",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if max inactivity time exceeded
|
||||||
|
if (now - session.lastActivity > UserCrypto.MAX_INACTIVITY) {
|
||||||
|
this.userSessions.delete(userId);
|
||||||
|
session.dataKey.fill(0);
|
||||||
|
databaseLogger.info("User session inactive timeout", {
|
||||||
|
operation: "user_session_inactive",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update last activity time
|
||||||
|
session.lastActivity = now;
|
||||||
|
return session.dataKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* User logout: clear session
|
||||||
|
*/
|
||||||
|
logoutUser(userId: string): void {
|
||||||
|
const session = this.userSessions.get(userId);
|
||||||
|
if (session) {
|
||||||
|
session.dataKey.fill(0); // Securely clear key
|
||||||
|
this.userSessions.delete(userId);
|
||||||
|
}
|
||||||
|
databaseLogger.info("User logged out", {
|
||||||
|
operation: "user_crypto_logout",
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if user is unlocked
|
||||||
|
*/
|
||||||
|
isUserUnlocked(userId: string): boolean {
|
||||||
|
return this.getUserDataKey(userId) !== null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Change user password
|
||||||
|
*/
|
||||||
|
async changeUserPassword(userId: string, oldPassword: string, newPassword: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
// Validate old password
|
||||||
|
const isValid = await this.validatePassword(userId, oldPassword);
|
||||||
|
if (!isValid) return false;
|
||||||
|
|
||||||
|
// Get current DEK
|
||||||
|
const kekSalt = await this.getKEKSalt(userId);
|
||||||
|
if (!kekSalt) return false;
|
||||||
|
|
||||||
|
const oldKEK = this.deriveKEK(oldPassword, kekSalt);
|
||||||
|
const encryptedDEK = await this.getEncryptedDEK(userId);
|
||||||
|
if (!encryptedDEK) return false;
|
||||||
|
|
||||||
|
const DEK = this.decryptDEK(encryptedDEK, oldKEK);
|
||||||
|
|
||||||
|
// Generate new KEK salt and encrypt DEK
|
||||||
|
const newKekSalt = await this.generateKEKSalt();
|
||||||
|
const newKEK = this.deriveKEK(newPassword, newKekSalt);
|
||||||
|
const newEncryptedDEK = this.encryptDEK(DEK, newKEK);
|
||||||
|
|
||||||
|
// Store new salt and encrypted DEK
|
||||||
|
await this.storeKEKSalt(userId, newKekSalt);
|
||||||
|
await this.storeEncryptedDEK(userId, newEncryptedDEK);
|
||||||
|
|
||||||
|
// Clean all temporary keys
|
||||||
|
oldKEK.fill(0);
|
||||||
|
newKEK.fill(0);
|
||||||
|
DEK.fill(0);
|
||||||
|
|
||||||
|
// Clean user session, require re-login
|
||||||
|
this.logoutUser(userId);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ===== Private methods =====
|
||||||
|
|
||||||
|
private async validatePassword(userId: string, password: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const kekSalt = await this.getKEKSalt(userId);
|
||||||
|
if (!kekSalt) return false;
|
||||||
|
|
||||||
|
const KEK = this.deriveKEK(password, kekSalt);
|
||||||
|
const encryptedDEK = await this.getEncryptedDEK(userId);
|
||||||
|
if (!encryptedDEK) return false;
|
||||||
|
|
||||||
|
const DEK = this.decryptDEK(encryptedDEK, KEK);
|
||||||
|
|
||||||
|
// Clean temporary keys
|
||||||
|
KEK.fill(0);
|
||||||
|
DEK.fill(0);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private cleanupExpiredSessions(): void {
|
||||||
|
const now = Date.now();
|
||||||
|
const expiredUsers: string[] = [];
|
||||||
|
|
||||||
|
for (const [userId, session] of this.userSessions.entries()) {
|
||||||
|
if (now > session.expiresAt || now - session.lastActivity > UserCrypto.MAX_INACTIVITY) {
|
||||||
|
session.dataKey.fill(0); // Securely clear key
|
||||||
|
expiredUsers.push(userId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expiredUsers.forEach(userId => {
|
||||||
|
this.userSessions.delete(userId);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (expiredUsers.length > 0) {
|
||||||
|
databaseLogger.info(`Cleaned up ${expiredUsers.length} expired sessions`, {
|
||||||
|
operation: "session_cleanup",
|
||||||
|
count: expiredUsers.length,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ===== Database operations and encryption methods (simplified version) =====
|
||||||
|
|
||||||
|
private async generateKEKSalt(): Promise<KEKSalt> {
|
||||||
|
return {
|
||||||
|
salt: crypto.randomBytes(32).toString("hex"),
|
||||||
|
iterations: UserCrypto.PBKDF2_ITERATIONS,
|
||||||
|
algorithm: "pbkdf2-sha256",
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private deriveKEK(password: string, kekSalt: KEKSalt): Buffer {
|
||||||
|
return crypto.pbkdf2Sync(
|
||||||
|
password,
|
||||||
|
Buffer.from(kekSalt.salt, "hex"),
|
||||||
|
kekSalt.iterations,
|
||||||
|
UserCrypto.KEK_LENGTH,
|
||||||
|
"sha256"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private encryptDEK(dek: Buffer, kek: Buffer): EncryptedDEK {
|
||||||
|
const iv = crypto.randomBytes(16);
|
||||||
|
const cipher = crypto.createCipheriv("aes-256-gcm", kek, iv);
|
||||||
|
|
||||||
|
let encrypted = cipher.update(dek);
|
||||||
|
encrypted = Buffer.concat([encrypted, cipher.final()]);
|
||||||
|
const tag = cipher.getAuthTag();
|
||||||
|
|
||||||
|
return {
|
||||||
|
data: encrypted.toString("hex"),
|
||||||
|
iv: iv.toString("hex"),
|
||||||
|
tag: tag.toString("hex"),
|
||||||
|
algorithm: "aes-256-gcm",
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private decryptDEK(encryptedDEK: EncryptedDEK, kek: Buffer): Buffer {
|
||||||
|
const decipher = crypto.createDecipheriv(
|
||||||
|
"aes-256-gcm",
|
||||||
|
kek,
|
||||||
|
Buffer.from(encryptedDEK.iv, "hex")
|
||||||
|
);
|
||||||
|
|
||||||
|
decipher.setAuthTag(Buffer.from(encryptedDEK.tag, "hex"));
|
||||||
|
let decrypted = decipher.update(Buffer.from(encryptedDEK.data, "hex"));
|
||||||
|
decrypted = Buffer.concat([decrypted, decipher.final()]);
|
||||||
|
|
||||||
|
return decrypted;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Database operation methods
|
||||||
|
private async storeKEKSalt(userId: string, kekSalt: KEKSalt): Promise<void> {
|
||||||
|
const key = `user_kek_salt_${userId}`;
|
||||||
|
const value = JSON.stringify(kekSalt);
|
||||||
|
|
||||||
|
const existing = await getDb().select().from(settings).where(eq(settings.key, key));
|
||||||
|
|
||||||
|
if (existing.length > 0) {
|
||||||
|
await getDb().update(settings).set({ value }).where(eq(settings.key, key));
|
||||||
|
} else {
|
||||||
|
await getDb().insert(settings).values({ key, value });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async getKEKSalt(userId: string): Promise<KEKSalt | null> {
|
||||||
|
try {
|
||||||
|
const key = `user_kek_salt_${userId}`;
|
||||||
|
const result = await getDb().select().from(settings).where(eq(settings.key, key));
|
||||||
|
|
||||||
|
if (result.length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return JSON.parse(result[0].value);
|
||||||
|
} catch (error) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async storeEncryptedDEK(userId: string, encryptedDEK: EncryptedDEK): Promise<void> {
|
||||||
|
const key = `user_encrypted_dek_${userId}`;
|
||||||
|
const value = JSON.stringify(encryptedDEK);
|
||||||
|
|
||||||
|
const existing = await getDb().select().from(settings).where(eq(settings.key, key));
|
||||||
|
|
||||||
|
if (existing.length > 0) {
|
||||||
|
await getDb().update(settings).set({ value }).where(eq(settings.key, key));
|
||||||
|
} else {
|
||||||
|
await getDb().insert(settings).values({ key, value });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async getEncryptedDEK(userId: string): Promise<EncryptedDEK | null> {
|
||||||
|
try {
|
||||||
|
const key = `user_encrypted_dek_${userId}`;
|
||||||
|
const result = await getDb().select().from(settings).where(eq(settings.key, key));
|
||||||
|
|
||||||
|
if (result.length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return JSON.parse(result[0].value);
|
||||||
|
} catch (error) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { UserCrypto, type KEKSalt, type EncryptedDEK };
|
||||||
250
src/backend/utils/user-data-export.ts
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
import { getDb } from "../database/db/index.js";
|
||||||
|
import { users, sshData, sshCredentials, fileManagerRecent, fileManagerPinned, fileManagerShortcuts, dismissedAlerts } from "../database/db/schema.js";
|
||||||
|
import { eq } from "drizzle-orm";
|
||||||
|
import { DataCrypto } from "./data-crypto.js";
|
||||||
|
import { databaseLogger } from "./logger.js";
|
||||||
|
import crypto from "crypto";
|
||||||
|
|
||||||
|
interface UserExportData {
|
||||||
|
version: string;
|
||||||
|
exportedAt: string;
|
||||||
|
userId: string;
|
||||||
|
username: string;
|
||||||
|
userData: {
|
||||||
|
sshHosts: any[];
|
||||||
|
sshCredentials: any[];
|
||||||
|
fileManagerData: {
|
||||||
|
recent: any[];
|
||||||
|
pinned: any[];
|
||||||
|
shortcuts: any[];
|
||||||
|
};
|
||||||
|
dismissedAlerts: any[];
|
||||||
|
};
|
||||||
|
metadata: {
|
||||||
|
totalRecords: number;
|
||||||
|
encrypted: boolean;
|
||||||
|
exportType: 'user_data' | 'system_config' | 'all';
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* UserDataExport - User-level data import/export
|
||||||
|
*
|
||||||
|
* Linus principles:
|
||||||
|
* - Users own their data and should be able to export freely
|
||||||
|
* - Simple and direct, no complex permission checks
|
||||||
|
* - Support both encrypted and plaintext formats
|
||||||
|
* - Don't break existing system architecture
|
||||||
|
*/
|
||||||
|
class UserDataExport {
|
||||||
|
private static readonly EXPORT_VERSION = "v2.0";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Export user data
|
||||||
|
*/
|
||||||
|
static async exportUserData(
|
||||||
|
userId: string,
|
||||||
|
options: {
|
||||||
|
format?: 'encrypted' | 'plaintext';
|
||||||
|
scope?: 'user_data' | 'all';
|
||||||
|
includeCredentials?: boolean;
|
||||||
|
} = {}
|
||||||
|
): Promise<UserExportData> {
|
||||||
|
const { format = 'encrypted', scope = 'user_data', includeCredentials = true } = options;
|
||||||
|
|
||||||
|
try {
|
||||||
|
databaseLogger.info("Starting user data export", {
|
||||||
|
operation: "user_data_export",
|
||||||
|
userId,
|
||||||
|
format,
|
||||||
|
scope,
|
||||||
|
includeCredentials,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify user exists
|
||||||
|
const user = await getDb().select().from(users).where(eq(users.id, userId));
|
||||||
|
if (!user || user.length === 0) {
|
||||||
|
throw new Error(`User not found: ${userId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const userRecord = user[0];
|
||||||
|
|
||||||
|
// Get user data key (if decryption needed)
|
||||||
|
let userDataKey: Buffer | null = null;
|
||||||
|
if (format === 'plaintext') {
|
||||||
|
userDataKey = DataCrypto.getUserDataKey(userId);
|
||||||
|
if (!userDataKey) {
|
||||||
|
throw new Error("User data not unlocked - password required for plaintext export");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export SSH host configurations
|
||||||
|
const sshHosts = await getDb().select().from(sshData).where(eq(sshData.userId, userId));
|
||||||
|
const processedSshHosts = format === 'plaintext' && userDataKey
|
||||||
|
? sshHosts.map(host => DataCrypto.decryptRecord("ssh_data", host, userId, userDataKey!))
|
||||||
|
: sshHosts;
|
||||||
|
|
||||||
|
// Export SSH credentials (if included)
|
||||||
|
let sshCredentialsData: any[] = [];
|
||||||
|
if (includeCredentials) {
|
||||||
|
const credentials = await getDb().select().from(sshCredentials).where(eq(sshCredentials.userId, userId));
|
||||||
|
sshCredentialsData = format === 'plaintext' && userDataKey
|
||||||
|
? credentials.map(cred => DataCrypto.decryptRecord("ssh_credentials", cred, userId, userDataKey!))
|
||||||
|
: credentials;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export file manager data
|
||||||
|
const [recentFiles, pinnedFiles, shortcuts] = await Promise.all([
|
||||||
|
getDb().select().from(fileManagerRecent).where(eq(fileManagerRecent.userId, userId)),
|
||||||
|
getDb().select().from(fileManagerPinned).where(eq(fileManagerPinned.userId, userId)),
|
||||||
|
getDb().select().from(fileManagerShortcuts).where(eq(fileManagerShortcuts.userId, userId)),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Export dismissed alerts
|
||||||
|
const alerts = await getDb().select().from(dismissedAlerts).where(eq(dismissedAlerts.userId, userId));
|
||||||
|
|
||||||
|
// Build export data
|
||||||
|
const exportData: UserExportData = {
|
||||||
|
version: this.EXPORT_VERSION,
|
||||||
|
exportedAt: new Date().toISOString(),
|
||||||
|
userId: userRecord.id,
|
||||||
|
username: userRecord.username,
|
||||||
|
userData: {
|
||||||
|
sshHosts: processedSshHosts,
|
||||||
|
sshCredentials: sshCredentialsData,
|
||||||
|
fileManagerData: {
|
||||||
|
recent: recentFiles,
|
||||||
|
pinned: pinnedFiles,
|
||||||
|
shortcuts: shortcuts,
|
||||||
|
},
|
||||||
|
dismissedAlerts: alerts,
|
||||||
|
},
|
||||||
|
metadata: {
|
||||||
|
totalRecords: processedSshHosts.length + sshCredentialsData.length + recentFiles.length + pinnedFiles.length + shortcuts.length + alerts.length,
|
||||||
|
encrypted: format === 'encrypted',
|
||||||
|
exportType: scope,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
databaseLogger.success("User data export completed", {
|
||||||
|
operation: "user_data_export_complete",
|
||||||
|
userId,
|
||||||
|
totalRecords: exportData.metadata.totalRecords,
|
||||||
|
format,
|
||||||
|
sshHosts: processedSshHosts.length,
|
||||||
|
sshCredentials: sshCredentialsData.length,
|
||||||
|
});
|
||||||
|
|
||||||
|
return exportData;
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("User data export failed", error, {
|
||||||
|
operation: "user_data_export_failed",
|
||||||
|
userId,
|
||||||
|
format,
|
||||||
|
scope,
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Export as JSON string
|
||||||
|
*/
|
||||||
|
static async exportUserDataToJSON(
|
||||||
|
userId: string,
|
||||||
|
options: {
|
||||||
|
format?: 'encrypted' | 'plaintext';
|
||||||
|
scope?: 'user_data' | 'all';
|
||||||
|
includeCredentials?: boolean;
|
||||||
|
pretty?: boolean;
|
||||||
|
} = {}
|
||||||
|
): Promise<string> {
|
||||||
|
const { pretty = true } = options;
|
||||||
|
const exportData = await this.exportUserData(userId, options);
|
||||||
|
return JSON.stringify(exportData, null, pretty ? 2 : 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate export data format
|
||||||
|
*/
|
||||||
|
static validateExportData(data: any): { valid: boolean; errors: string[] } {
|
||||||
|
const errors: string[] = [];
|
||||||
|
|
||||||
|
if (!data || typeof data !== 'object') {
|
||||||
|
errors.push("Export data must be an object");
|
||||||
|
return { valid: false, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!data.version) {
|
||||||
|
errors.push("Missing version field");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!data.userId) {
|
||||||
|
errors.push("Missing userId field");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!data.userData || typeof data.userData !== 'object') {
|
||||||
|
errors.push("Missing or invalid userData field");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!data.metadata || typeof data.metadata !== 'object') {
|
||||||
|
errors.push("Missing or invalid metadata field");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check required data fields
|
||||||
|
if (data.userData) {
|
||||||
|
const requiredFields = ['sshHosts', 'sshCredentials', 'fileManagerData', 'dismissedAlerts'];
|
||||||
|
for (const field of requiredFields) {
|
||||||
|
if (!Array.isArray(data.userData[field]) && !(field === 'fileManagerData' && typeof data.userData[field] === 'object')) {
|
||||||
|
errors.push(`Missing or invalid userData.${field} field`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.userData.fileManagerData && typeof data.userData.fileManagerData === 'object') {
|
||||||
|
const fmFields = ['recent', 'pinned', 'shortcuts'];
|
||||||
|
for (const field of fmFields) {
|
||||||
|
if (!Array.isArray(data.userData.fileManagerData[field])) {
|
||||||
|
errors.push(`Missing or invalid userData.fileManagerData.${field} field`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { valid: errors.length === 0, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get export data statistics
|
||||||
|
*/
|
||||||
|
static getExportStats(data: UserExportData): {
|
||||||
|
version: string;
|
||||||
|
exportedAt: string;
|
||||||
|
username: string;
|
||||||
|
totalRecords: number;
|
||||||
|
breakdown: {
|
||||||
|
sshHosts: number;
|
||||||
|
sshCredentials: number;
|
||||||
|
fileManagerItems: number;
|
||||||
|
dismissedAlerts: number;
|
||||||
|
};
|
||||||
|
encrypted: boolean;
|
||||||
|
} {
|
||||||
|
return {
|
||||||
|
version: data.version,
|
||||||
|
exportedAt: data.exportedAt,
|
||||||
|
username: data.username,
|
||||||
|
totalRecords: data.metadata.totalRecords,
|
||||||
|
breakdown: {
|
||||||
|
sshHosts: data.userData.sshHosts.length,
|
||||||
|
sshCredentials: data.userData.sshCredentials.length,
|
||||||
|
fileManagerItems: data.userData.fileManagerData.recent.length +
|
||||||
|
data.userData.fileManagerData.pinned.length +
|
||||||
|
data.userData.fileManagerData.shortcuts.length,
|
||||||
|
dismissedAlerts: data.userData.dismissedAlerts.length,
|
||||||
|
},
|
||||||
|
encrypted: data.metadata.encrypted,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { UserDataExport, type UserExportData };
|
||||||
432
src/backend/utils/user-data-import.ts
Normal file
@@ -0,0 +1,432 @@
|
|||||||
|
import { getDb } from "../database/db/index.js";
|
||||||
|
import { users, sshData, sshCredentials, fileManagerRecent, fileManagerPinned, fileManagerShortcuts, dismissedAlerts } from "../database/db/schema.js";
|
||||||
|
import { eq, and } from "drizzle-orm";
|
||||||
|
import { DataCrypto } from "./data-crypto.js";
|
||||||
|
import { UserDataExport, type UserExportData } from "./user-data-export.js";
|
||||||
|
import { databaseLogger } from "./logger.js";
|
||||||
|
import { nanoid } from "nanoid";
|
||||||
|
|
||||||
|
interface ImportOptions {
|
||||||
|
replaceExisting?: boolean;
|
||||||
|
skipCredentials?: boolean;
|
||||||
|
skipFileManagerData?: boolean;
|
||||||
|
dryRun?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ImportResult {
|
||||||
|
success: boolean;
|
||||||
|
summary: {
|
||||||
|
sshHostsImported: number;
|
||||||
|
sshCredentialsImported: number;
|
||||||
|
fileManagerItemsImported: number;
|
||||||
|
dismissedAlertsImported: number;
|
||||||
|
skippedItems: number;
|
||||||
|
errors: string[];
|
||||||
|
};
|
||||||
|
dryRun: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* UserDataImport - User data import
|
||||||
|
*
|
||||||
|
* Linus principles:
|
||||||
|
* - Import should not break existing data (unless explicitly requested)
|
||||||
|
* - Support dry-run mode for validation
|
||||||
|
* - Simple strategy for ID conflicts: regenerate
|
||||||
|
* - Error handling must be explicit, no silent failures
|
||||||
|
*/
|
||||||
|
class UserDataImport {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Import user data
|
||||||
|
*/
|
||||||
|
static async importUserData(
|
||||||
|
targetUserId: string,
|
||||||
|
exportData: UserExportData,
|
||||||
|
options: ImportOptions = {}
|
||||||
|
): Promise<ImportResult> {
|
||||||
|
const {
|
||||||
|
replaceExisting = false,
|
||||||
|
skipCredentials = false,
|
||||||
|
skipFileManagerData = false,
|
||||||
|
dryRun = false
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
try {
|
||||||
|
databaseLogger.info("Starting user data import", {
|
||||||
|
operation: "user_data_import",
|
||||||
|
targetUserId,
|
||||||
|
sourceUserId: exportData.userId,
|
||||||
|
sourceUsername: exportData.username,
|
||||||
|
dryRun,
|
||||||
|
replaceExisting,
|
||||||
|
skipCredentials,
|
||||||
|
skipFileManagerData,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify target user exists
|
||||||
|
const targetUser = await getDb().select().from(users).where(eq(users.id, targetUserId));
|
||||||
|
if (!targetUser || targetUser.length === 0) {
|
||||||
|
throw new Error(`Target user not found: ${targetUserId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate export data format
|
||||||
|
const validation = UserDataExport.validateExportData(exportData);
|
||||||
|
if (!validation.valid) {
|
||||||
|
throw new Error(`Invalid export data: ${validation.errors.join(', ')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify user data is unlocked (if data is encrypted)
|
||||||
|
let userDataKey: Buffer | null = null;
|
||||||
|
if (exportData.metadata.encrypted) {
|
||||||
|
userDataKey = DataCrypto.getUserDataKey(targetUserId);
|
||||||
|
if (!userDataKey) {
|
||||||
|
throw new Error("Target user data not unlocked - password required for encrypted import");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const result: ImportResult = {
|
||||||
|
success: false,
|
||||||
|
summary: {
|
||||||
|
sshHostsImported: 0,
|
||||||
|
sshCredentialsImported: 0,
|
||||||
|
fileManagerItemsImported: 0,
|
||||||
|
dismissedAlertsImported: 0,
|
||||||
|
skippedItems: 0,
|
||||||
|
errors: [],
|
||||||
|
},
|
||||||
|
dryRun,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Import SSH host configurations
|
||||||
|
if (exportData.userData.sshHosts && exportData.userData.sshHosts.length > 0) {
|
||||||
|
const importStats = await this.importSshHosts(
|
||||||
|
targetUserId,
|
||||||
|
exportData.userData.sshHosts,
|
||||||
|
{ replaceExisting, dryRun, userDataKey }
|
||||||
|
);
|
||||||
|
result.summary.sshHostsImported = importStats.imported;
|
||||||
|
result.summary.skippedItems += importStats.skipped;
|
||||||
|
result.summary.errors.push(...importStats.errors);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import SSH credentials
|
||||||
|
if (!skipCredentials && exportData.userData.sshCredentials && exportData.userData.sshCredentials.length > 0) {
|
||||||
|
const importStats = await this.importSshCredentials(
|
||||||
|
targetUserId,
|
||||||
|
exportData.userData.sshCredentials,
|
||||||
|
{ replaceExisting, dryRun, userDataKey }
|
||||||
|
);
|
||||||
|
result.summary.sshCredentialsImported = importStats.imported;
|
||||||
|
result.summary.skippedItems += importStats.skipped;
|
||||||
|
result.summary.errors.push(...importStats.errors);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import file manager data
|
||||||
|
if (!skipFileManagerData && exportData.userData.fileManagerData) {
|
||||||
|
const importStats = await this.importFileManagerData(
|
||||||
|
targetUserId,
|
||||||
|
exportData.userData.fileManagerData,
|
||||||
|
{ replaceExisting, dryRun }
|
||||||
|
);
|
||||||
|
result.summary.fileManagerItemsImported = importStats.imported;
|
||||||
|
result.summary.skippedItems += importStats.skipped;
|
||||||
|
result.summary.errors.push(...importStats.errors);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import dismissed alerts
|
||||||
|
if (exportData.userData.dismissedAlerts && exportData.userData.dismissedAlerts.length > 0) {
|
||||||
|
const importStats = await this.importDismissedAlerts(
|
||||||
|
targetUserId,
|
||||||
|
exportData.userData.dismissedAlerts,
|
||||||
|
{ replaceExisting, dryRun }
|
||||||
|
);
|
||||||
|
result.summary.dismissedAlertsImported = importStats.imported;
|
||||||
|
result.summary.skippedItems += importStats.skipped;
|
||||||
|
result.summary.errors.push(...importStats.errors);
|
||||||
|
}
|
||||||
|
|
||||||
|
result.success = result.summary.errors.length === 0;
|
||||||
|
|
||||||
|
databaseLogger.success("User data import completed", {
|
||||||
|
operation: "user_data_import_complete",
|
||||||
|
targetUserId,
|
||||||
|
dryRun,
|
||||||
|
...result.summary,
|
||||||
|
});
|
||||||
|
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
databaseLogger.error("User data import failed", error, {
|
||||||
|
operation: "user_data_import_failed",
|
||||||
|
targetUserId,
|
||||||
|
dryRun,
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Import SSH host configurations
|
||||||
|
*/
|
||||||
|
private static async importSshHosts(
|
||||||
|
targetUserId: string,
|
||||||
|
sshHosts: any[],
|
||||||
|
options: { replaceExisting: boolean; dryRun: boolean; userDataKey: Buffer | null }
|
||||||
|
) {
|
||||||
|
let imported = 0;
|
||||||
|
let skipped = 0;
|
||||||
|
const errors: string[] = [];
|
||||||
|
|
||||||
|
for (const host of sshHosts) {
|
||||||
|
try {
|
||||||
|
if (options.dryRun) {
|
||||||
|
imported++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate temporary ID for encryption context, then remove for database insert
|
||||||
|
const tempId = `import-ssh-${targetUserId}-${Date.now()}-${imported}`;
|
||||||
|
const newHostData = {
|
||||||
|
...host,
|
||||||
|
id: tempId, // Temporary ID for encryption context
|
||||||
|
userId: targetUserId,
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// If data needs re-encryption
|
||||||
|
let processedHostData = newHostData;
|
||||||
|
if (options.userDataKey) {
|
||||||
|
processedHostData = DataCrypto.encryptRecord("ssh_data", newHostData, targetUserId, options.userDataKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove temp ID to let database auto-generate real ID
|
||||||
|
delete processedHostData.id;
|
||||||
|
|
||||||
|
await getDb().insert(sshData).values(processedHostData);
|
||||||
|
imported++;
|
||||||
|
} catch (error) {
|
||||||
|
errors.push(`SSH host import failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
|
skipped++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { imported, skipped, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Import SSH credentials
|
||||||
|
*/
|
||||||
|
private static async importSshCredentials(
|
||||||
|
targetUserId: string,
|
||||||
|
credentials: any[],
|
||||||
|
options: { replaceExisting: boolean; dryRun: boolean; userDataKey: Buffer | null }
|
||||||
|
) {
|
||||||
|
let imported = 0;
|
||||||
|
let skipped = 0;
|
||||||
|
const errors: string[] = [];
|
||||||
|
|
||||||
|
for (const credential of credentials) {
|
||||||
|
try {
|
||||||
|
if (options.dryRun) {
|
||||||
|
imported++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate temporary ID for encryption context, then remove for database insert
|
||||||
|
const tempCredId = `import-cred-${targetUserId}-${Date.now()}-${imported}`;
|
||||||
|
const newCredentialData = {
|
||||||
|
...credential,
|
||||||
|
id: tempCredId, // Temporary ID for encryption context
|
||||||
|
userId: targetUserId,
|
||||||
|
usageCount: 0, // Reset usage count
|
||||||
|
lastUsed: null,
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// If data needs re-encryption
|
||||||
|
let processedCredentialData = newCredentialData;
|
||||||
|
if (options.userDataKey) {
|
||||||
|
processedCredentialData = DataCrypto.encryptRecord("ssh_credentials", newCredentialData, targetUserId, options.userDataKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove temp ID to let database auto-generate real ID
|
||||||
|
delete processedCredentialData.id;
|
||||||
|
|
||||||
|
await getDb().insert(sshCredentials).values(processedCredentialData);
|
||||||
|
imported++;
|
||||||
|
} catch (error) {
|
||||||
|
errors.push(`SSH credential import failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
|
skipped++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { imported, skipped, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Import file manager data
|
||||||
|
*/
|
||||||
|
private static async importFileManagerData(
|
||||||
|
targetUserId: string,
|
||||||
|
fileManagerData: any,
|
||||||
|
options: { replaceExisting: boolean; dryRun: boolean }
|
||||||
|
) {
|
||||||
|
let imported = 0;
|
||||||
|
let skipped = 0;
|
||||||
|
const errors: string[] = [];
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Import recent files
|
||||||
|
if (fileManagerData.recent && Array.isArray(fileManagerData.recent)) {
|
||||||
|
for (const item of fileManagerData.recent) {
|
||||||
|
try {
|
||||||
|
if (!options.dryRun) {
|
||||||
|
const newItem = {
|
||||||
|
...item,
|
||||||
|
id: undefined,
|
||||||
|
userId: targetUserId,
|
||||||
|
lastOpened: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
await getDb().insert(fileManagerRecent).values(newItem);
|
||||||
|
}
|
||||||
|
imported++;
|
||||||
|
} catch (error) {
|
||||||
|
errors.push(`Recent file import failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
|
skipped++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import pinned files
|
||||||
|
if (fileManagerData.pinned && Array.isArray(fileManagerData.pinned)) {
|
||||||
|
for (const item of fileManagerData.pinned) {
|
||||||
|
try {
|
||||||
|
if (!options.dryRun) {
|
||||||
|
const newItem = {
|
||||||
|
...item,
|
||||||
|
id: undefined,
|
||||||
|
userId: targetUserId,
|
||||||
|
pinnedAt: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
await getDb().insert(fileManagerPinned).values(newItem);
|
||||||
|
}
|
||||||
|
imported++;
|
||||||
|
} catch (error) {
|
||||||
|
errors.push(`Pinned file import failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
|
skipped++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import shortcuts
|
||||||
|
if (fileManagerData.shortcuts && Array.isArray(fileManagerData.shortcuts)) {
|
||||||
|
for (const item of fileManagerData.shortcuts) {
|
||||||
|
try {
|
||||||
|
if (!options.dryRun) {
|
||||||
|
const newItem = {
|
||||||
|
...item,
|
||||||
|
id: undefined,
|
||||||
|
userId: targetUserId,
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
await getDb().insert(fileManagerShortcuts).values(newItem);
|
||||||
|
}
|
||||||
|
imported++;
|
||||||
|
} catch (error) {
|
||||||
|
errors.push(`Shortcut import failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
|
skipped++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
errors.push(`File manager data import failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { imported, skipped, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Import dismissed alerts
|
||||||
|
*/
|
||||||
|
private static async importDismissedAlerts(
|
||||||
|
targetUserId: string,
|
||||||
|
alerts: any[],
|
||||||
|
options: { replaceExisting: boolean; dryRun: boolean }
|
||||||
|
) {
|
||||||
|
let imported = 0;
|
||||||
|
let skipped = 0;
|
||||||
|
const errors: string[] = [];
|
||||||
|
|
||||||
|
for (const alert of alerts) {
|
||||||
|
try {
|
||||||
|
if (options.dryRun) {
|
||||||
|
imported++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if alert already exists
|
||||||
|
const existing = await getDb()
|
||||||
|
.select()
|
||||||
|
.from(dismissedAlerts)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(dismissedAlerts.userId, targetUserId),
|
||||||
|
eq(dismissedAlerts.alertId, alert.alertId)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (existing.length > 0 && !options.replaceExisting) {
|
||||||
|
skipped++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const newAlert = {
|
||||||
|
...alert,
|
||||||
|
id: undefined,
|
||||||
|
userId: targetUserId,
|
||||||
|
dismissedAt: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
if (existing.length > 0 && options.replaceExisting) {
|
||||||
|
await getDb()
|
||||||
|
.update(dismissedAlerts)
|
||||||
|
.set(newAlert)
|
||||||
|
.where(eq(dismissedAlerts.id, existing[0].id));
|
||||||
|
} else {
|
||||||
|
await getDb().insert(dismissedAlerts).values(newAlert);
|
||||||
|
}
|
||||||
|
|
||||||
|
imported++;
|
||||||
|
} catch (error) {
|
||||||
|
errors.push(`Dismissed alert import failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
|
skipped++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { imported, skipped, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Import from JSON string
|
||||||
|
*/
|
||||||
|
static async importUserDataFromJSON(
|
||||||
|
targetUserId: string,
|
||||||
|
jsonData: string,
|
||||||
|
options: ImportOptions = {}
|
||||||
|
): Promise<ImportResult> {
|
||||||
|
try {
|
||||||
|
const exportData: UserExportData = JSON.parse(jsonData);
|
||||||
|
return await this.importUserData(targetUserId, exportData, options);
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof SyntaxError) {
|
||||||
|
throw new Error("Invalid JSON format in import data");
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { UserDataImport, type ImportOptions, type ImportResult };
|
||||||
@@ -150,7 +150,10 @@
|
|||||||
"generateRSA": "Generate RSA",
|
"generateRSA": "Generate RSA",
|
||||||
"keyPairGeneratedSuccessfully": "{{keyType}} key pair generated successfully",
|
"keyPairGeneratedSuccessfully": "{{keyType}} key pair generated successfully",
|
||||||
"failedToGenerateKeyPair": "Failed to generate key pair",
|
"failedToGenerateKeyPair": "Failed to generate key pair",
|
||||||
"generateKeyPairNote": "Generate a new SSH key pair directly. This will replace any existing keys in the form."
|
"generateKeyPairNote": "Generate a new SSH key pair directly. This will replace any existing keys in the form.",
|
||||||
|
"invalidKey": "Invalid Key",
|
||||||
|
"detectionError": "Detection Error",
|
||||||
|
"unknown": "Unknown"
|
||||||
},
|
},
|
||||||
"sshTools": {
|
"sshTools": {
|
||||||
"title": "SSH Tools",
|
"title": "SSH Tools",
|
||||||
@@ -191,6 +194,7 @@
|
|||||||
},
|
},
|
||||||
"common": {
|
"common": {
|
||||||
"close": "Close",
|
"close": "Close",
|
||||||
|
"minimize": "Minimize",
|
||||||
"online": "Online",
|
"online": "Online",
|
||||||
"offline": "Offline",
|
"offline": "Offline",
|
||||||
"maintenance": "Maintenance",
|
"maintenance": "Maintenance",
|
||||||
@@ -376,6 +380,7 @@
|
|||||||
"overrideUserInfoUrl": "Override User Info URL (not required)",
|
"overrideUserInfoUrl": "Override User Info URL (not required)",
|
||||||
"databaseSecurity": "Database Security",
|
"databaseSecurity": "Database Security",
|
||||||
"encryptionStatus": "Encryption Status",
|
"encryptionStatus": "Encryption Status",
|
||||||
|
"encryptionEnabled": "Encryption Enabled",
|
||||||
"enabled": "Enabled",
|
"enabled": "Enabled",
|
||||||
"disabled": "Disabled",
|
"disabled": "Disabled",
|
||||||
"keyId": "Key ID",
|
"keyId": "Key ID",
|
||||||
@@ -487,7 +492,11 @@
|
|||||||
"createBackup": "Create Backup",
|
"createBackup": "Create Backup",
|
||||||
"exportImport": "Export/Import",
|
"exportImport": "Export/Import",
|
||||||
"export": "Export",
|
"export": "Export",
|
||||||
"import": "Import"
|
"import": "Import",
|
||||||
|
"passwordRequired": "Password required",
|
||||||
|
"confirmExport": "Confirm Export",
|
||||||
|
"exportDescription": "Export SSH hosts and credentials as SQLite file",
|
||||||
|
"importDescription": "Import SQLite file with incremental merge (skips duplicates)"
|
||||||
},
|
},
|
||||||
"hosts": {
|
"hosts": {
|
||||||
"title": "Host Manager",
|
"title": "Host Manager",
|
||||||
@@ -564,6 +573,8 @@
|
|||||||
"sshpassRequired": "Sshpass Required For Password Authentication",
|
"sshpassRequired": "Sshpass Required For Password Authentication",
|
||||||
"sshpassRequiredDesc": "For password authentication in tunnels, sshpass must be installed on the system.",
|
"sshpassRequiredDesc": "For password authentication in tunnels, sshpass must be installed on the system.",
|
||||||
"otherInstallMethods": "Other installation methods:",
|
"otherInstallMethods": "Other installation methods:",
|
||||||
|
"debianUbuntuEquivalent": "(Debian/Ubuntu) or the equivalent for your OS.",
|
||||||
|
"or": "or",
|
||||||
"centosRhelFedora": "CentOS/RHEL/Fedora",
|
"centosRhelFedora": "CentOS/RHEL/Fedora",
|
||||||
"macos": "macOS",
|
"macos": "macOS",
|
||||||
"windows": "Windows",
|
"windows": "Windows",
|
||||||
@@ -576,8 +587,6 @@
|
|||||||
"upload": "Upload",
|
"upload": "Upload",
|
||||||
"authentication": "Authentication",
|
"authentication": "Authentication",
|
||||||
"password": "Password",
|
"password": "Password",
|
||||||
"requirePassword": "Require Password",
|
|
||||||
"requirePasswordDescription": "When disabled, sessions can be saved without entering a password",
|
|
||||||
"key": "Key",
|
"key": "Key",
|
||||||
"credential": "Credential",
|
"credential": "Credential",
|
||||||
"selectCredential": "Select Credential",
|
"selectCredential": "Select Credential",
|
||||||
@@ -647,7 +656,10 @@
|
|||||||
"reconnecting": "Reconnecting... ({{attempt}}/{{max}})",
|
"reconnecting": "Reconnecting... ({{attempt}}/{{max}})",
|
||||||
"reconnected": "Reconnected successfully",
|
"reconnected": "Reconnected successfully",
|
||||||
"maxReconnectAttemptsReached": "Maximum reconnection attempts reached",
|
"maxReconnectAttemptsReached": "Maximum reconnection attempts reached",
|
||||||
"connectionTimeout": "Connection timeout"
|
"connectionTimeout": "Connection timeout",
|
||||||
|
"terminalTitle": "Terminal - {{host}}",
|
||||||
|
"terminalWithPath": "Terminal - {{host}}:{{path}}",
|
||||||
|
"runTitle": "Running {{command}} - {{host}}"
|
||||||
},
|
},
|
||||||
"fileManager": {
|
"fileManager": {
|
||||||
"title": "File Manager",
|
"title": "File Manager",
|
||||||
@@ -655,7 +667,14 @@
|
|||||||
"folder": "Folder",
|
"folder": "Folder",
|
||||||
"connectToSsh": "Connect to SSH to use file operations",
|
"connectToSsh": "Connect to SSH to use file operations",
|
||||||
"uploadFile": "Upload File",
|
"uploadFile": "Upload File",
|
||||||
"downloadFile": "Download File",
|
"downloadFile": "Download",
|
||||||
|
"edit": "Edit",
|
||||||
|
"preview": "Preview",
|
||||||
|
"previous": "Previous",
|
||||||
|
"next": "Next",
|
||||||
|
"pageXOfY": "Page {{current}} of {{total}}",
|
||||||
|
"zoomOut": "Zoom Out",
|
||||||
|
"zoomIn": "Zoom In",
|
||||||
"newFile": "New File",
|
"newFile": "New File",
|
||||||
"newFolder": "New Folder",
|
"newFolder": "New Folder",
|
||||||
"rename": "Rename",
|
"rename": "Rename",
|
||||||
@@ -663,7 +682,7 @@
|
|||||||
"deleteItem": "Delete Item",
|
"deleteItem": "Delete Item",
|
||||||
"currentPath": "Current Path",
|
"currentPath": "Current Path",
|
||||||
"uploadFileTitle": "Upload File",
|
"uploadFileTitle": "Upload File",
|
||||||
"maxFileSize": "Max: 100MB (JSON) / 200MB (Binary)",
|
"maxFileSize": "Max: 1GB (JSON) / 5GB (Binary) - Large files supported",
|
||||||
"removeFile": "Remove File",
|
"removeFile": "Remove File",
|
||||||
"clickToSelectFile": "Click to select a file",
|
"clickToSelectFile": "Click to select a file",
|
||||||
"chooseFile": "Choose File",
|
"chooseFile": "Choose File",
|
||||||
@@ -722,12 +741,13 @@
|
|||||||
"properties": "Properties",
|
"properties": "Properties",
|
||||||
"preview": "Preview",
|
"preview": "Preview",
|
||||||
"refresh": "Refresh",
|
"refresh": "Refresh",
|
||||||
"downloadFiles": "Download {{count}} files",
|
"downloadFiles": "Download {{count}} files to Browser",
|
||||||
"copyFiles": "Copy {{count}} items",
|
"copyFiles": "Copy {{count}} items",
|
||||||
"cutFiles": "Cut {{count}} items",
|
"cutFiles": "Cut {{count}} items",
|
||||||
"deleteFiles": "Delete {{count}} items",
|
"deleteFiles": "Delete {{count}} items",
|
||||||
"filesCopiedToClipboard": "{{count}} items copied to clipboard",
|
"filesCopiedToClipboard": "{{count}} items copied to clipboard",
|
||||||
"filesCutToClipboard": "{{count}} items cut to clipboard",
|
"filesCutToClipboard": "{{count}} items cut to clipboard",
|
||||||
|
"movedItems": "Moved {{count}} items",
|
||||||
"failedToDeleteItem": "Failed to delete item",
|
"failedToDeleteItem": "Failed to delete item",
|
||||||
"itemRenamedSuccessfully": "{{type}} renamed successfully",
|
"itemRenamedSuccessfully": "{{type}} renamed successfully",
|
||||||
"failedToRenameItem": "Failed to rename item",
|
"failedToRenameItem": "Failed to rename item",
|
||||||
@@ -793,7 +813,7 @@
|
|||||||
"dragFilesToWindowToDownload": "Drag files outside window to download",
|
"dragFilesToWindowToDownload": "Drag files outside window to download",
|
||||||
"openTerminalHere": "Open Terminal Here",
|
"openTerminalHere": "Open Terminal Here",
|
||||||
"run": "Run",
|
"run": "Run",
|
||||||
"saveToSystem": "Save to System",
|
"saveToSystem": "Save as...",
|
||||||
"selectLocationToSave": "Select Location to Save",
|
"selectLocationToSave": "Select Location to Save",
|
||||||
"openTerminalInFolder": "Open Terminal in This Folder",
|
"openTerminalInFolder": "Open Terminal in This Folder",
|
||||||
"openTerminalInFileLocation": "Open Terminal at File Location",
|
"openTerminalInFileLocation": "Open Terminal at File Location",
|
||||||
@@ -816,12 +836,86 @@
|
|||||||
"clearAllRecentFiles": "Clear all recent files",
|
"clearAllRecentFiles": "Clear all recent files",
|
||||||
"unpinFile": "Unpin file",
|
"unpinFile": "Unpin file",
|
||||||
"removeShortcut": "Remove shortcut",
|
"removeShortcut": "Remove shortcut",
|
||||||
"saveFilesToSystem": "Save {{count}} files to system",
|
"saveFilesToSystem": "Save {{count}} files as...",
|
||||||
"saveToSystem": "Save to system",
|
"saveToSystem": "Save as...",
|
||||||
"pinFile": "Pin file",
|
"pinFile": "Pin file",
|
||||||
"addToShortcuts": "Add to shortcuts",
|
"addToShortcuts": "Add to shortcuts",
|
||||||
"selectLocationToSave": "Select location to save",
|
"selectLocationToSave": "Select location to save",
|
||||||
"downloadToDefaultLocation": "Download to default location"
|
"downloadToDefaultLocation": "Download to default location",
|
||||||
|
"pasteFailed": "Paste failed",
|
||||||
|
"noUndoableActions": "No undoable actions",
|
||||||
|
"undoCopySuccess": "Undid copy operation: Deleted {{count}} copied files",
|
||||||
|
"undoCopyFailedDelete": "Undo failed: Could not delete any copied files",
|
||||||
|
"undoCopyFailedNoInfo": "Undo failed: Could not find copied file information",
|
||||||
|
"undoMoveSuccess": "Undid move operation: Moved {{count}} files back to original location",
|
||||||
|
"undoMoveFailedMove": "Undo failed: Could not move any files back",
|
||||||
|
"undoMoveFailedNoInfo": "Undo failed: Could not find moved file information",
|
||||||
|
"undoDeleteNotSupported": "Delete operation cannot be undone: Files have been permanently deleted from server",
|
||||||
|
"undoTypeNotSupported": "Unsupported undo operation type",
|
||||||
|
"undoOperationFailed": "Undo operation failed",
|
||||||
|
"unknownError": "Unknown error",
|
||||||
|
"enterPath": "Enter path...",
|
||||||
|
"editPath": "Edit path",
|
||||||
|
"confirm": "Confirm",
|
||||||
|
"cancel": "Cancel",
|
||||||
|
"folderName": "Folder name",
|
||||||
|
"find": "Find...",
|
||||||
|
"replaceWith": "Replace with...",
|
||||||
|
"replace": "Replace",
|
||||||
|
"replaceAll": "Replace All",
|
||||||
|
"downloadInstead": "Download Instead",
|
||||||
|
"keyboardShortcuts": "Keyboard Shortcuts",
|
||||||
|
"searchAndReplace": "Search & Replace",
|
||||||
|
"editing": "Editing",
|
||||||
|
"navigation": "Navigation",
|
||||||
|
"code": "Code",
|
||||||
|
"search": "Search",
|
||||||
|
"findNext": "Find Next",
|
||||||
|
"findPrevious": "Find Previous",
|
||||||
|
"save": "Save",
|
||||||
|
"selectAll": "Select All",
|
||||||
|
"undo": "Undo",
|
||||||
|
"redo": "Redo",
|
||||||
|
"goToLine": "Go to Line",
|
||||||
|
"moveLineUp": "Move Line Up",
|
||||||
|
"moveLineDown": "Move Line Down",
|
||||||
|
"toggleComment": "Toggle Comment",
|
||||||
|
"indent": "Indent",
|
||||||
|
"outdent": "Outdent",
|
||||||
|
"autoComplete": "Auto Complete",
|
||||||
|
"imageLoadError": "Failed to load image",
|
||||||
|
"zoomIn": "Zoom In",
|
||||||
|
"zoomOut": "Zoom Out",
|
||||||
|
"rotate": "Rotate",
|
||||||
|
"originalSize": "Original Size",
|
||||||
|
"startTyping": "Start typing...",
|
||||||
|
"unknownSize": "Unknown size",
|
||||||
|
"fileIsEmpty": "File is empty",
|
||||||
|
"modified": "Modified",
|
||||||
|
"largeFileWarning": "Large File Warning",
|
||||||
|
"largeFileWarningDesc": "This file is {{size}} in size, which may cause performance issues when opened as text.",
|
||||||
|
"fileNotFoundAndRemoved": "File \"{{name}}\" not found and has been removed from recent/pinned files",
|
||||||
|
"failedToLoadFile": "Failed to load file: {{error}}",
|
||||||
|
"serverErrorOccurred": "Server error occurred. Please try again later.",
|
||||||
|
"fileSavedSuccessfully": "File saved successfully",
|
||||||
|
"autoSaveFailed": "Auto-save failed",
|
||||||
|
"fileAutoSaved": "File auto-saved",
|
||||||
|
"fileDownloadedSuccessfully": "File downloaded successfully",
|
||||||
|
"moveFileFailed": "Failed to move {{name}}",
|
||||||
|
"moveOperationFailed": "Move operation failed",
|
||||||
|
"canOnlyCompareFiles": "Can only compare two files",
|
||||||
|
"comparingFiles": "Comparing files: {{file1}} and {{file2}}",
|
||||||
|
"dragFailed": "Drag operation failed",
|
||||||
|
"filePinnedSuccessfully": "File \"{{name}}\" pinned successfully",
|
||||||
|
"pinFileFailed": "Failed to pin file",
|
||||||
|
"fileUnpinnedSuccessfully": "File \"{{name}}\" unpinned successfully",
|
||||||
|
"unpinFileFailed": "Failed to unpin file",
|
||||||
|
"shortcutAddedSuccessfully": "Folder shortcut \"{{name}}\" added successfully",
|
||||||
|
"addShortcutFailed": "Failed to add shortcut",
|
||||||
|
"operationCompletedSuccessfully": "{{operation}} {{count}} items successfully",
|
||||||
|
"operationCompleted": "{{operation}} {{count}} items",
|
||||||
|
"downloadFileSuccess": "File {{name}} downloaded successfully",
|
||||||
|
"downloadFileFailed": "Download failed"
|
||||||
},
|
},
|
||||||
"tunnels": {
|
"tunnels": {
|
||||||
"title": "SSH Tunnels",
|
"title": "SSH Tunnels",
|
||||||
|
|||||||
@@ -149,7 +149,10 @@
|
|||||||
"generateRSA": "生成 RSA",
|
"generateRSA": "生成 RSA",
|
||||||
"keyPairGeneratedSuccessfully": "{{keyType}} 密钥对生成成功",
|
"keyPairGeneratedSuccessfully": "{{keyType}} 密钥对生成成功",
|
||||||
"failedToGenerateKeyPair": "生成密钥对失败",
|
"failedToGenerateKeyPair": "生成密钥对失败",
|
||||||
"generateKeyPairNote": "直接生成新的SSH密钥对。这将替换表单中的现有密钥。"
|
"generateKeyPairNote": "直接生成新的SSH密钥对。这将替换表单中的现有密钥。",
|
||||||
|
"invalidKey": "无效密钥",
|
||||||
|
"detectionError": "检测错误",
|
||||||
|
"unknown": "未知"
|
||||||
},
|
},
|
||||||
"sshTools": {
|
"sshTools": {
|
||||||
"title": "SSH 工具",
|
"title": "SSH 工具",
|
||||||
@@ -190,6 +193,7 @@
|
|||||||
},
|
},
|
||||||
"common": {
|
"common": {
|
||||||
"close": "关闭",
|
"close": "关闭",
|
||||||
|
"minimize": "最小化",
|
||||||
"online": "在线",
|
"online": "在线",
|
||||||
"offline": "离线",
|
"offline": "离线",
|
||||||
"maintenance": "维护中",
|
"maintenance": "维护中",
|
||||||
@@ -362,6 +366,7 @@
|
|||||||
"overrideUserInfoUrl": "覆盖用户信息 URL(非必填)",
|
"overrideUserInfoUrl": "覆盖用户信息 URL(非必填)",
|
||||||
"databaseSecurity": "数据库安全",
|
"databaseSecurity": "数据库安全",
|
||||||
"encryptionStatus": "加密状态",
|
"encryptionStatus": "加密状态",
|
||||||
|
"encryptionEnabled": "加密已启用",
|
||||||
"enabled": "已启用",
|
"enabled": "已启用",
|
||||||
"disabled": "已禁用",
|
"disabled": "已禁用",
|
||||||
"keyId": "密钥 ID",
|
"keyId": "密钥 ID",
|
||||||
@@ -473,7 +478,11 @@
|
|||||||
"createBackup": "创建备份",
|
"createBackup": "创建备份",
|
||||||
"exportImport": "导出/导入",
|
"exportImport": "导出/导入",
|
||||||
"export": "导出",
|
"export": "导出",
|
||||||
"import": "导入"
|
"import": "导入",
|
||||||
|
"passwordRequired": "密码为必填项",
|
||||||
|
"confirmExport": "确认导出",
|
||||||
|
"exportDescription": "将SSH主机和凭据导出为SQLite文件",
|
||||||
|
"importDescription": "导入SQLite文件并进行增量合并(跳过重复项)"
|
||||||
},
|
},
|
||||||
"hosts": {
|
"hosts": {
|
||||||
"title": "主机管理",
|
"title": "主机管理",
|
||||||
@@ -576,8 +585,6 @@
|
|||||||
"upload": "上传",
|
"upload": "上传",
|
||||||
"authentication": "认证方式",
|
"authentication": "认证方式",
|
||||||
"password": "密码",
|
"password": "密码",
|
||||||
"requirePassword": "要求密码",
|
|
||||||
"requirePasswordDescription": "禁用时,可以在不输入密码的情况下保存会话",
|
|
||||||
"key": "密钥",
|
"key": "密钥",
|
||||||
"credential": "凭证",
|
"credential": "凭证",
|
||||||
"selectCredential": "选择凭证",
|
"selectCredential": "选择凭证",
|
||||||
@@ -588,11 +595,21 @@
|
|||||||
"maxRetriesDescription": "隧道连接的最大重试次数。",
|
"maxRetriesDescription": "隧道连接的最大重试次数。",
|
||||||
"retryIntervalDescription": "重试尝试之间的等待时间。",
|
"retryIntervalDescription": "重试尝试之间的等待时间。",
|
||||||
"otherInstallMethods": "其他安装方法:",
|
"otherInstallMethods": "其他安装方法:",
|
||||||
|
"debianUbuntuEquivalent": "(Debian/Ubuntu) 或您的操作系统的等效命令。",
|
||||||
|
"or": "或",
|
||||||
|
"centosRhelFedora": "CentOS/RHEL/Fedora",
|
||||||
|
"macos": "macOS",
|
||||||
|
"windows": "Windows",
|
||||||
"sshpassOSInstructions": {
|
"sshpassOSInstructions": {
|
||||||
"centos": "CentOS/RHEL/Fedora: sudo yum install sshpass 或 sudo dnf install sshpass",
|
"centos": "CentOS/RHEL/Fedora: sudo yum install sshpass 或 sudo dnf install sshpass",
|
||||||
"macos": "macOS: brew install hudochenkov/sshpass/sshpass",
|
"macos": "macOS: brew install hudochenkov/sshpass/sshpass",
|
||||||
"windows": "Windows: 使用 WSL 或考虑使用 SSH 密钥认证"
|
"windows": "Windows: 使用 WSL 或考虑使用 SSH 密钥认证"
|
||||||
},
|
},
|
||||||
|
"sshServerConfigRequired": "SSH 服务器配置要求",
|
||||||
|
"sshServerConfigDesc": "对于隧道连接,SSH 服务器必须配置允许端口转发:",
|
||||||
|
"gatewayPortsYes": "绑定远程端口到所有接口",
|
||||||
|
"allowTcpForwardingYes": "启用端口转发",
|
||||||
|
"permitRootLoginYes": "如果使用 root 用户进行隧道连接",
|
||||||
"sshServerConfigReverse": "对于反向 SSH 隧道,端点 SSH 服务器必须允许:",
|
"sshServerConfigReverse": "对于反向 SSH 隧道,端点 SSH 服务器必须允许:",
|
||||||
"gatewayPorts": "GatewayPorts yes(绑定远程端口)",
|
"gatewayPorts": "GatewayPorts yes(绑定远程端口)",
|
||||||
"allowTcpForwarding": "AllowTcpForwarding yes(端口转发)",
|
"allowTcpForwarding": "AllowTcpForwarding yes(端口转发)",
|
||||||
@@ -635,6 +652,9 @@
|
|||||||
},
|
},
|
||||||
"terminal": {
|
"terminal": {
|
||||||
"title": "终端",
|
"title": "终端",
|
||||||
|
"terminalTitle": "终端 - {{host}}",
|
||||||
|
"terminalWithPath": "终端 - {{host}}:{{path}}",
|
||||||
|
"runTitle": "运行 {{command}} - {{host}}",
|
||||||
"connect": "连接主机",
|
"connect": "连接主机",
|
||||||
"disconnect": "断开连接",
|
"disconnect": "断开连接",
|
||||||
"clear": "清屏",
|
"clear": "清屏",
|
||||||
@@ -670,7 +690,14 @@
|
|||||||
"folder": "文件夹",
|
"folder": "文件夹",
|
||||||
"connectToSsh": "连接 SSH 以使用文件操作",
|
"connectToSsh": "连接 SSH 以使用文件操作",
|
||||||
"uploadFile": "上传文件",
|
"uploadFile": "上传文件",
|
||||||
"downloadFile": "下载文件",
|
"downloadFile": "下载",
|
||||||
|
"edit": "编辑",
|
||||||
|
"preview": "预览",
|
||||||
|
"previous": "上一页",
|
||||||
|
"next": "下一页",
|
||||||
|
"pageXOfY": "第 {{current}} 页,共 {{total}} 页",
|
||||||
|
"zoomOut": "缩小",
|
||||||
|
"zoomIn": "放大",
|
||||||
"newFile": "新建文件",
|
"newFile": "新建文件",
|
||||||
"newFolder": "新建文件夹",
|
"newFolder": "新建文件夹",
|
||||||
"rename": "重命名",
|
"rename": "重命名",
|
||||||
@@ -678,7 +705,7 @@
|
|||||||
"deleteItem": "删除项目",
|
"deleteItem": "删除项目",
|
||||||
"currentPath": "当前路径",
|
"currentPath": "当前路径",
|
||||||
"uploadFileTitle": "上传文件",
|
"uploadFileTitle": "上传文件",
|
||||||
"maxFileSize": "最大:100MB(JSON)/ 200MB(二进制)",
|
"maxFileSize": "最大:1GB(JSON)/ 5GB(二进制)- 支持大文件",
|
||||||
"removeFile": "移除文件",
|
"removeFile": "移除文件",
|
||||||
"clickToSelectFile": "点击选择文件",
|
"clickToSelectFile": "点击选择文件",
|
||||||
"chooseFile": "选择文件",
|
"chooseFile": "选择文件",
|
||||||
@@ -743,6 +770,15 @@
|
|||||||
"deleteFiles": "删除 {{count}} 个项目",
|
"deleteFiles": "删除 {{count}} 个项目",
|
||||||
"filesCopiedToClipboard": "{{count}} 个项目已复制到剪贴板",
|
"filesCopiedToClipboard": "{{count}} 个项目已复制到剪贴板",
|
||||||
"filesCutToClipboard": "{{count}} 个项目已剪切到剪贴板",
|
"filesCutToClipboard": "{{count}} 个项目已剪切到剪贴板",
|
||||||
|
"movedItems": "已移动 {{count}} 个项目",
|
||||||
|
"unknownSize": "未知大小",
|
||||||
|
"fileIsEmpty": "文件为空",
|
||||||
|
"modified": "修改时间",
|
||||||
|
"largeFileWarning": "大文件警告",
|
||||||
|
"largeFileWarningDesc": "此文件大小为 {{size}},以文本形式打开可能会导致性能问题。",
|
||||||
|
"fileNotFoundAndRemoved": "文件 \"{{name}}\" 未找到,已从最近访问/固定文件中移除",
|
||||||
|
"failedToLoadFile": "加载文件失败:{{error}}",
|
||||||
|
"serverErrorOccurred": "服务器错误,请稍后重试。",
|
||||||
"failedToDeleteItem": "删除项目失败",
|
"failedToDeleteItem": "删除项目失败",
|
||||||
"itemRenamedSuccessfully": "{{type}}重命名成功",
|
"itemRenamedSuccessfully": "{{type}}重命名成功",
|
||||||
"failedToRenameItem": "重命名项目失败",
|
"failedToRenameItem": "重命名项目失败",
|
||||||
@@ -783,7 +819,7 @@
|
|||||||
"dragFilesToWindowToDownload": "拖拽文件到窗口外下载",
|
"dragFilesToWindowToDownload": "拖拽文件到窗口外下载",
|
||||||
"openTerminalHere": "在此处打开终端",
|
"openTerminalHere": "在此处打开终端",
|
||||||
"run": "运行",
|
"run": "运行",
|
||||||
"saveToSystem": "保存到系统",
|
"saveToSystem": "另存为...",
|
||||||
"selectLocationToSave": "选择位置保存",
|
"selectLocationToSave": "选择位置保存",
|
||||||
"openTerminalInFolder": "在此文件夹打开终端",
|
"openTerminalInFolder": "在此文件夹打开终端",
|
||||||
"openTerminalInFileLocation": "在文件位置打开终端",
|
"openTerminalInFileLocation": "在文件位置打开终端",
|
||||||
@@ -823,12 +859,78 @@
|
|||||||
"clearAllRecentFiles": "清除所有最近访问",
|
"clearAllRecentFiles": "清除所有最近访问",
|
||||||
"unpinFile": "取消固定",
|
"unpinFile": "取消固定",
|
||||||
"removeShortcut": "移除快捷方式",
|
"removeShortcut": "移除快捷方式",
|
||||||
"saveFilesToSystem": "保存 {{count}} 个文件到系统",
|
"saveFilesToSystem": "另存 {{count}} 个文件为...",
|
||||||
"saveToSystem": "保存到系统",
|
"saveToSystem": "另存为...",
|
||||||
"pinFile": "固定文件",
|
"pinFile": "固定文件",
|
||||||
"addToShortcuts": "添加到快捷方式",
|
"addToShortcuts": "添加到快捷方式",
|
||||||
"selectLocationToSave": "选择位置保存",
|
"selectLocationToSave": "选择位置保存",
|
||||||
"downloadToDefaultLocation": "下载到默认位置"
|
"downloadToDefaultLocation": "下载到默认位置",
|
||||||
|
"pasteFailed": "粘贴失败",
|
||||||
|
"noUndoableActions": "没有可撤销的操作",
|
||||||
|
"undoCopySuccess": "已撤销复制操作:删除了 {{count}} 个复制的文件",
|
||||||
|
"undoCopyFailedDelete": "撤销失败:无法删除任何复制的文件",
|
||||||
|
"undoCopyFailedNoInfo": "撤销失败:找不到复制的文件信息",
|
||||||
|
"undoMoveSuccess": "已撤销移动操作:移回了 {{count}} 个文件到原位置",
|
||||||
|
"undoMoveFailedMove": "撤销失败:无法移回任何文件",
|
||||||
|
"undoMoveFailedNoInfo": "撤销失败:找不到移动的文件信息",
|
||||||
|
"undoDeleteNotSupported": "删除操作无法撤销:文件已从服务器永久删除",
|
||||||
|
"undoTypeNotSupported": "不支持撤销此类操作",
|
||||||
|
"undoOperationFailed": "撤销操作失败",
|
||||||
|
"unknownError": "未知错误",
|
||||||
|
"enterPath": "输入路径...",
|
||||||
|
"editPath": "编辑路径",
|
||||||
|
"confirm": "确认",
|
||||||
|
"cancel": "取消",
|
||||||
|
"folderName": "文件夹名",
|
||||||
|
"find": "查找...",
|
||||||
|
"replaceWith": "替换为...",
|
||||||
|
"replace": "替换",
|
||||||
|
"replaceAll": "全部替换",
|
||||||
|
"downloadInstead": "下载文件",
|
||||||
|
"keyboardShortcuts": "键盘快捷键",
|
||||||
|
"searchAndReplace": "搜索和替换",
|
||||||
|
"editing": "编辑",
|
||||||
|
"navigation": "导航",
|
||||||
|
"code": "代码",
|
||||||
|
"search": "搜索",
|
||||||
|
"findNext": "查找下一个",
|
||||||
|
"findPrevious": "查找上一个",
|
||||||
|
"save": "保存",
|
||||||
|
"selectAll": "全选",
|
||||||
|
"undo": "撤销",
|
||||||
|
"redo": "重做",
|
||||||
|
"goToLine": "跳转到行",
|
||||||
|
"moveLineUp": "向上移动行",
|
||||||
|
"moveLineDown": "向下移动行",
|
||||||
|
"toggleComment": "切换注释",
|
||||||
|
"indent": "增加缩进",
|
||||||
|
"outdent": "减少缩进",
|
||||||
|
"autoComplete": "自动补全",
|
||||||
|
"imageLoadError": "图片加载失败",
|
||||||
|
"zoomIn": "放大",
|
||||||
|
"zoomOut": "缩小",
|
||||||
|
"rotate": "旋转",
|
||||||
|
"originalSize": "原始大小",
|
||||||
|
"startTyping": "开始输入...",
|
||||||
|
"fileSavedSuccessfully": "文件保存成功",
|
||||||
|
"autoSaveFailed": "自动保存失败",
|
||||||
|
"fileAutoSaved": "文件已自动保存",
|
||||||
|
"fileDownloadedSuccessfully": "文件下载成功",
|
||||||
|
"moveFileFailed": "移动 {{name}} 失败",
|
||||||
|
"moveOperationFailed": "移动操作失败",
|
||||||
|
"canOnlyCompareFiles": "只能对比两个文件",
|
||||||
|
"comparingFiles": "正在对比文件:{{file1}} 与 {{file2}}",
|
||||||
|
"dragFailed": "拖拽失败",
|
||||||
|
"filePinnedSuccessfully": "文件\"{{name}}\"已固定",
|
||||||
|
"pinFileFailed": "固定文件失败",
|
||||||
|
"fileUnpinnedSuccessfully": "文件\"{{name}}\"已取消固定",
|
||||||
|
"unpinFileFailed": "取消固定失败",
|
||||||
|
"shortcutAddedSuccessfully": "文件夹快捷方式\"{{name}}\"已添加",
|
||||||
|
"addShortcutFailed": "添加快捷方式失败",
|
||||||
|
"operationCompletedSuccessfully": "已{{operation}} {{count}} 个项目",
|
||||||
|
"operationCompleted": "已{{operation}} {{count}} 个项目",
|
||||||
|
"downloadFileSuccess": "文件 {{name}} 下载成功",
|
||||||
|
"downloadFileFailed": "下载失败"
|
||||||
},
|
},
|
||||||
"tunnels": {
|
"tunnels": {
|
||||||
"title": "SSH 隧道",
|
"title": "SSH 隧道",
|
||||||
|
|||||||
2
src/types/electron.d.ts
vendored
@@ -18,7 +18,7 @@ export interface ElectronAPI {
|
|||||||
|
|
||||||
invoke: (channel: string, ...args: any[]) => Promise<any>;
|
invoke: (channel: string, ...args: any[]) => Promise<any>;
|
||||||
|
|
||||||
// 拖拽API
|
// Drag and drop API
|
||||||
createTempFile: (fileData: {
|
createTempFile: (fileData: {
|
||||||
fileName: string;
|
fileName: string;
|
||||||
content: string;
|
content: string;
|
||||||
|
|||||||
@@ -24,6 +24,12 @@ export interface SSHHost {
|
|||||||
key?: string;
|
key?: string;
|
||||||
keyPassword?: string;
|
keyPassword?: string;
|
||||||
keyType?: string;
|
keyType?: string;
|
||||||
|
|
||||||
|
// Autostart plaintext credentials
|
||||||
|
autostartPassword?: string;
|
||||||
|
autostartKey?: string;
|
||||||
|
autostartKeyPassword?: string;
|
||||||
|
|
||||||
credentialId?: number;
|
credentialId?: number;
|
||||||
userId?: string;
|
userId?: string;
|
||||||
enableTerminal: boolean;
|
enableTerminal: boolean;
|
||||||
@@ -101,6 +107,14 @@ export interface TunnelConnection {
|
|||||||
sourcePort: number;
|
sourcePort: number;
|
||||||
endpointPort: number;
|
endpointPort: number;
|
||||||
endpointHost: string;
|
endpointHost: string;
|
||||||
|
|
||||||
|
// Endpoint host credentials for tunnel authentication
|
||||||
|
endpointPassword?: string;
|
||||||
|
endpointKey?: string;
|
||||||
|
endpointKeyPassword?: string;
|
||||||
|
endpointAuthType?: string;
|
||||||
|
endpointKeyType?: string;
|
||||||
|
|
||||||
maxRetries: number;
|
maxRetries: number;
|
||||||
retryInterval: number;
|
retryInterval: number;
|
||||||
autoStart: boolean;
|
autoStart: boolean;
|
||||||
|
|||||||
@@ -30,8 +30,6 @@ import {
|
|||||||
Lock,
|
Lock,
|
||||||
Download,
|
Download,
|
||||||
Upload,
|
Upload,
|
||||||
HardDrive,
|
|
||||||
FileArchive,
|
|
||||||
} from "lucide-react";
|
} from "lucide-react";
|
||||||
import { toast } from "sonner";
|
import { toast } from "sonner";
|
||||||
import { useTranslation } from "react-i18next";
|
import { useTranslation } from "react-i18next";
|
||||||
@@ -93,19 +91,16 @@ export function AdminSettings({
|
|||||||
null,
|
null,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Database encryption state
|
// Simplified security state
|
||||||
const [encryptionStatus, setEncryptionStatus] = React.useState<any>(null);
|
const [securityInitialized, setSecurityInitialized] = React.useState(true);
|
||||||
const [encryptionLoading, setEncryptionLoading] = React.useState(false);
|
|
||||||
const [migrationLoading, setMigrationLoading] = React.useState(false);
|
|
||||||
const [migrationProgress, setMigrationProgress] = React.useState<string>("");
|
|
||||||
|
|
||||||
// Database migration state
|
// Database migration state
|
||||||
const [exportLoading, setExportLoading] = React.useState(false);
|
const [exportLoading, setExportLoading] = React.useState(false);
|
||||||
const [importLoading, setImportLoading] = React.useState(false);
|
const [importLoading, setImportLoading] = React.useState(false);
|
||||||
const [backupLoading, setBackupLoading] = React.useState(false);
|
|
||||||
const [importFile, setImportFile] = React.useState<File | null>(null);
|
const [importFile, setImportFile] = React.useState<File | null>(null);
|
||||||
const [exportPath, setExportPath] = React.useState<string>("");
|
const [exportPassword, setExportPassword] = React.useState("");
|
||||||
const [backupPath, setBackupPath] = React.useState<string>("");
|
const [showPasswordInput, setShowPasswordInput] = React.useState(false);
|
||||||
|
const [importPassword, setImportPassword] = React.useState("");
|
||||||
|
|
||||||
React.useEffect(() => {
|
React.useEffect(() => {
|
||||||
const jwt = getCookie("jwt");
|
const jwt = getCookie("jwt");
|
||||||
@@ -128,7 +123,6 @@ export function AdminSettings({
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
fetchUsers();
|
fetchUsers();
|
||||||
fetchEncryptionStatus();
|
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
React.useEffect(() => {
|
React.useEffect(() => {
|
||||||
@@ -277,111 +271,25 @@ export function AdminSettings({
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
const fetchEncryptionStatus = async () => {
|
const checkSecurityStatus = async () => {
|
||||||
if (isElectron()) {
|
// New v2-kek-dek system is always initialized
|
||||||
const serverUrl = (window as any).configuredServerUrl;
|
setSecurityInitialized(true);
|
||||||
if (!serverUrl) return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const jwt = getCookie("jwt");
|
|
||||||
const apiUrl = isElectron()
|
|
||||||
? `${(window as any).configuredServerUrl}/encryption/status`
|
|
||||||
: "http://localhost:8081/encryption/status";
|
|
||||||
|
|
||||||
const response = await fetch(apiUrl, {
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${jwt}`,
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (response.ok) {
|
|
||||||
const data = await response.json();
|
|
||||||
setEncryptionStatus(data);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Failed to fetch encryption status:", err);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleInitializeEncryption = async () => {
|
|
||||||
setEncryptionLoading(true);
|
|
||||||
try {
|
|
||||||
const jwt = getCookie("jwt");
|
|
||||||
const apiUrl = isElectron()
|
|
||||||
? `${(window as any).configuredServerUrl}/encryption/initialize`
|
|
||||||
: "http://localhost:8081/encryption/initialize";
|
|
||||||
|
|
||||||
const response = await fetch(apiUrl, {
|
|
||||||
method: "POST",
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${jwt}`,
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (response.ok) {
|
|
||||||
const result = await response.json();
|
|
||||||
toast.success("Database encryption initialized successfully!");
|
|
||||||
await fetchEncryptionStatus();
|
|
||||||
} else {
|
|
||||||
throw new Error("Failed to initialize encryption");
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
toast.error("Failed to initialize encryption");
|
|
||||||
} finally {
|
|
||||||
setEncryptionLoading(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleMigrateData = async (dryRun: boolean = false) => {
|
|
||||||
setMigrationLoading(true);
|
|
||||||
setMigrationProgress(
|
|
||||||
dryRun ? t("admin.runningVerification") : t("admin.startingMigration"),
|
|
||||||
);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const jwt = getCookie("jwt");
|
|
||||||
const apiUrl = isElectron()
|
|
||||||
? `${(window as any).configuredServerUrl}/encryption/migrate`
|
|
||||||
: "http://localhost:8081/encryption/migrate";
|
|
||||||
|
|
||||||
const response = await fetch(apiUrl, {
|
|
||||||
method: "POST",
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${jwt}`,
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
},
|
|
||||||
body: JSON.stringify({ dryRun }),
|
|
||||||
});
|
|
||||||
|
|
||||||
if (response.ok) {
|
|
||||||
const result = await response.json();
|
|
||||||
if (dryRun) {
|
|
||||||
toast.success(t("admin.verificationCompleted"));
|
|
||||||
setMigrationProgress(t("admin.verificationInProgress"));
|
|
||||||
} else {
|
|
||||||
toast.success(t("admin.dataMigrationCompleted"));
|
|
||||||
setMigrationProgress(t("admin.migrationCompleted"));
|
|
||||||
await fetchEncryptionStatus();
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
throw new Error("Migration failed");
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
toast.error(
|
|
||||||
dryRun ? t("admin.verificationFailed") : t("admin.migrationFailed"),
|
|
||||||
);
|
|
||||||
setMigrationProgress("Failed");
|
|
||||||
} finally {
|
|
||||||
setMigrationLoading(false);
|
|
||||||
setTimeout(() => setMigrationProgress(""), 3000);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Database export/import handlers
|
// Database export/import handlers
|
||||||
const handleExportDatabase = async () => {
|
const handleExportDatabase = async () => {
|
||||||
|
if (!showPasswordInput) {
|
||||||
|
setShowPasswordInput(true);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!exportPassword.trim()) {
|
||||||
|
toast.error(t("admin.passwordRequired"));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
setExportLoading(true);
|
setExportLoading(true);
|
||||||
try {
|
try {
|
||||||
const jwt = getCookie("jwt");
|
const jwt = getCookie("jwt");
|
||||||
@@ -395,15 +303,34 @@ export function AdminSettings({
|
|||||||
Authorization: `Bearer ${jwt}`,
|
Authorization: `Bearer ${jwt}`,
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
},
|
},
|
||||||
body: JSON.stringify({}),
|
body: JSON.stringify({ password: exportPassword }),
|
||||||
});
|
});
|
||||||
|
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
const result = await response.json();
|
// Handle file download
|
||||||
setExportPath(result.exportPath);
|
const blob = await response.blob();
|
||||||
|
const contentDisposition = response.headers.get('content-disposition');
|
||||||
|
const filename = contentDisposition?.match(/filename="([^"]+)"/)?.[1] || 'termix-export.sqlite';
|
||||||
|
|
||||||
|
const url = window.URL.createObjectURL(blob);
|
||||||
|
const a = document.createElement('a');
|
||||||
|
a.href = url;
|
||||||
|
a.download = filename;
|
||||||
|
document.body.appendChild(a);
|
||||||
|
a.click();
|
||||||
|
window.URL.revokeObjectURL(url);
|
||||||
|
document.body.removeChild(a);
|
||||||
|
|
||||||
toast.success(t("admin.databaseExportedSuccessfully"));
|
toast.success(t("admin.databaseExportedSuccessfully"));
|
||||||
|
setExportPassword("");
|
||||||
|
setShowPasswordInput(false);
|
||||||
} else {
|
} else {
|
||||||
throw new Error("Export failed");
|
const error = await response.json();
|
||||||
|
if (error.code === "PASSWORD_REQUIRED") {
|
||||||
|
toast.error(t("admin.passwordRequired"));
|
||||||
|
} else {
|
||||||
|
toast.error(error.error || t("admin.databaseExportFailed"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
toast.error(t("admin.databaseExportFailed"));
|
toast.error(t("admin.databaseExportFailed"));
|
||||||
@@ -418,6 +345,11 @@ export function AdminSettings({
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!importPassword.trim()) {
|
||||||
|
toast.error(t("admin.passwordRequired"));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
setImportLoading(true);
|
setImportLoading(true);
|
||||||
try {
|
try {
|
||||||
const jwt = getCookie("jwt");
|
const jwt = getCookie("jwt");
|
||||||
@@ -428,7 +360,7 @@ export function AdminSettings({
|
|||||||
// Create FormData for file upload
|
// Create FormData for file upload
|
||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
formData.append("file", importFile);
|
formData.append("file", importFile);
|
||||||
formData.append("backupCurrent", "true");
|
formData.append("password", importPassword);
|
||||||
|
|
||||||
const response = await fetch(apiUrl, {
|
const response = await fetch(apiUrl, {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
@@ -441,16 +373,34 @@ export function AdminSettings({
|
|||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
const result = await response.json();
|
const result = await response.json();
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
toast.success(t("admin.databaseImportedSuccessfully"));
|
const summary = result.summary;
|
||||||
|
const imported = summary.sshHostsImported + summary.sshCredentialsImported + summary.fileManagerItemsImported + summary.dismissedAlertsImported + (summary.settingsImported || 0);
|
||||||
|
const skipped = summary.skippedItems;
|
||||||
|
|
||||||
|
const details = [];
|
||||||
|
if (summary.sshHostsImported > 0) details.push(`${summary.sshHostsImported} SSH hosts`);
|
||||||
|
if (summary.sshCredentialsImported > 0) details.push(`${summary.sshCredentialsImported} credentials`);
|
||||||
|
if (summary.fileManagerItemsImported > 0) details.push(`${summary.fileManagerItemsImported} file manager items`);
|
||||||
|
if (summary.dismissedAlertsImported > 0) details.push(`${summary.dismissedAlertsImported} alerts`);
|
||||||
|
if (summary.settingsImported > 0) details.push(`${summary.settingsImported} settings`);
|
||||||
|
|
||||||
|
toast.success(
|
||||||
|
`Import completed: ${imported} items imported${details.length > 0 ? ` (${details.join(', ')})` : ''}, ${skipped} items skipped`
|
||||||
|
);
|
||||||
setImportFile(null);
|
setImportFile(null);
|
||||||
await fetchEncryptionStatus(); // Refresh status
|
setImportPassword("");
|
||||||
} else {
|
} else {
|
||||||
toast.error(
|
toast.error(
|
||||||
`${t("admin.databaseImportFailed")}: ${result.errors?.join(", ") || "Unknown error"}`,
|
`${t("admin.databaseImportFailed")}: ${result.summary?.errors?.join(", ") || "Unknown error"}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new Error("Import failed");
|
const error = await response.json();
|
||||||
|
if (error.code === "PASSWORD_REQUIRED") {
|
||||||
|
toast.error(t("admin.passwordRequired"));
|
||||||
|
} else {
|
||||||
|
toast.error(error.error || t("admin.databaseImportFailed"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
toast.error(t("admin.databaseImportFailed"));
|
toast.error(t("admin.databaseImportFailed"));
|
||||||
@@ -459,36 +409,6 @@ export function AdminSettings({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleCreateBackup = async () => {
|
|
||||||
setBackupLoading(true);
|
|
||||||
try {
|
|
||||||
const jwt = getCookie("jwt");
|
|
||||||
const apiUrl = isElectron()
|
|
||||||
? `${(window as any).configuredServerUrl}/database/backup`
|
|
||||||
: "http://localhost:8081/database/backup";
|
|
||||||
|
|
||||||
const response = await fetch(apiUrl, {
|
|
||||||
method: "POST",
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${jwt}`,
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
},
|
|
||||||
body: JSON.stringify({}),
|
|
||||||
});
|
|
||||||
|
|
||||||
if (response.ok) {
|
|
||||||
const result = await response.json();
|
|
||||||
setBackupPath(result.backupPath);
|
|
||||||
toast.success(t("admin.encryptedBackupCreatedSuccessfully"));
|
|
||||||
} else {
|
|
||||||
throw new Error("Backup failed");
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
toast.error(t("admin.backupCreationFailed"));
|
|
||||||
} finally {
|
|
||||||
setBackupLoading(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const topMarginPx = isTopbarOpen ? 74 : 26;
|
const topMarginPx = isTopbarOpen ? 74 : 26;
|
||||||
const leftMarginPx = sidebarState === "collapsed" ? 26 : 8;
|
const leftMarginPx = sidebarState === "collapsed" ? 26 : 8;
|
||||||
@@ -925,7 +845,7 @@ export function AdminSettings({
|
|||||||
</TabsContent>
|
</TabsContent>
|
||||||
|
|
||||||
<TabsContent value="security" className="space-y-6">
|
<TabsContent value="security" className="space-y-6">
|
||||||
<div className="space-y-6">
|
<div className="space-y-4">
|
||||||
<div className="flex items-center gap-3">
|
<div className="flex items-center gap-3">
|
||||||
<Database className="h-5 w-5" />
|
<Database className="h-5 w-5" />
|
||||||
<h3 className="text-lg font-semibold">
|
<h3 className="text-lg font-semibold">
|
||||||
@@ -933,242 +853,113 @@ export function AdminSettings({
|
|||||||
</h3>
|
</h3>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{encryptionStatus && (
|
{/* Simple status display - read only */}
|
||||||
<div className="space-y-4">
|
<div className="p-4 border rounded bg-card">
|
||||||
{/* Status Overview */}
|
|
||||||
<div className="grid gap-3 md:grid-cols-3">
|
|
||||||
<div className="p-3 border rounded bg-card">
|
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
{encryptionStatus.encryption?.enabled ? (
|
|
||||||
<Lock className="h-4 w-4 text-green-500" />
|
<Lock className="h-4 w-4 text-green-500" />
|
||||||
) : (
|
|
||||||
<Key className="h-4 w-4 text-yellow-500" />
|
|
||||||
)}
|
|
||||||
<div>
|
<div>
|
||||||
<div className="text-sm font-medium">
|
<div className="text-sm font-medium">{t("admin.encryptionStatus")}</div>
|
||||||
{t("admin.encryptionStatus")}
|
<div className="text-xs text-green-500">{t("admin.encryptionEnabled")}</div>
|
||||||
</div>
|
|
||||||
<div
|
|
||||||
className={`text-xs ${
|
|
||||||
encryptionStatus.encryption?.enabled
|
|
||||||
? "text-green-500"
|
|
||||||
: "text-yellow-500"
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
{encryptionStatus.encryption?.enabled
|
|
||||||
? t("admin.enabled")
|
|
||||||
: t("admin.disabled")}
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="p-3 border rounded bg-card">
|
{/* Data management functions - export/import */}
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<Shield className="h-4 w-4 text-blue-500" />
|
|
||||||
<div>
|
|
||||||
<div className="text-sm font-medium">
|
|
||||||
{t("admin.keyProtection")}
|
|
||||||
</div>
|
|
||||||
<div
|
|
||||||
className={`text-xs ${
|
|
||||||
encryptionStatus.encryption?.key?.kekProtected
|
|
||||||
? "text-green-500"
|
|
||||||
: "text-yellow-500"
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
{encryptionStatus.encryption?.key?.kekProtected
|
|
||||||
? t("admin.active")
|
|
||||||
: t("admin.legacy")}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="p-3 border rounded bg-card">
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<Database className="h-4 w-4 text-purple-500" />
|
|
||||||
<div>
|
|
||||||
<div className="text-sm font-medium">
|
|
||||||
{t("admin.dataStatus")}
|
|
||||||
</div>
|
|
||||||
<div
|
|
||||||
className={`text-xs ${
|
|
||||||
encryptionStatus.migration?.migrationCompleted
|
|
||||||
? "text-green-500"
|
|
||||||
: encryptionStatus.migration
|
|
||||||
?.migrationRequired
|
|
||||||
? "text-yellow-500"
|
|
||||||
: "text-muted-foreground"
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
{encryptionStatus.migration?.migrationCompleted
|
|
||||||
? t("admin.encrypted")
|
|
||||||
: encryptionStatus.migration?.migrationRequired
|
|
||||||
? t("admin.needsMigration")
|
|
||||||
: t("admin.ready")}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Actions */}
|
|
||||||
<div className="grid gap-3 md:grid-cols-2">
|
<div className="grid gap-3 md:grid-cols-2">
|
||||||
{!encryptionStatus.encryption?.key?.hasKey ? (
|
|
||||||
<div className="p-4 border rounded bg-card">
|
<div className="p-4 border rounded bg-card">
|
||||||
<div className="space-y-3">
|
<div className="space-y-3">
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<Shield className="h-4 w-4 text-blue-500" />
|
<Download className="h-4 w-4 text-blue-500" />
|
||||||
<h4 className="font-medium">
|
<h4 className="font-medium">{t("admin.export")}</h4>
|
||||||
{t("admin.initializeEncryption")}
|
|
||||||
</h4>
|
|
||||||
</div>
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{t("admin.exportDescription")}
|
||||||
|
</p>
|
||||||
|
{showPasswordInput && (
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor="export-password">Password</Label>
|
||||||
|
<PasswordInput
|
||||||
|
id="export-password"
|
||||||
|
value={exportPassword}
|
||||||
|
onChange={(e) => setExportPassword(e.target.value)}
|
||||||
|
placeholder="Enter your password"
|
||||||
|
onKeyDown={(e) => {
|
||||||
|
if (e.key === 'Enter') {
|
||||||
|
handleExportDatabase();
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
<Button
|
<Button
|
||||||
onClick={handleInitializeEncryption}
|
onClick={handleExportDatabase}
|
||||||
disabled={encryptionLoading}
|
disabled={exportLoading}
|
||||||
className="w-full"
|
className="w-full"
|
||||||
>
|
>
|
||||||
{encryptionLoading
|
{exportLoading
|
||||||
? t("admin.initializing")
|
? t("admin.exporting")
|
||||||
: t("admin.initialize")}
|
: showPasswordInput
|
||||||
|
? t("admin.confirmExport")
|
||||||
|
: t("admin.export")
|
||||||
|
}
|
||||||
</Button>
|
</Button>
|
||||||
</div>
|
{showPasswordInput && (
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
{encryptionStatus.migration?.migrationRequired && (
|
|
||||||
<div className="p-4 border rounded bg-card">
|
|
||||||
<div className="space-y-3">
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<Database className="h-4 w-4 text-yellow-500" />
|
|
||||||
<h4 className="font-medium">
|
|
||||||
{t("admin.migrateData")}
|
|
||||||
</h4>
|
|
||||||
</div>
|
|
||||||
{migrationProgress && (
|
|
||||||
<div className="text-sm text-blue-600">
|
|
||||||
{migrationProgress}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
<div className="flex gap-2">
|
|
||||||
<Button
|
<Button
|
||||||
onClick={() => handleMigrateData(true)}
|
|
||||||
disabled={migrationLoading}
|
|
||||||
variant="outline"
|
|
||||||
size="sm"
|
|
||||||
className="flex-1"
|
|
||||||
>
|
|
||||||
{t("admin.test")}
|
|
||||||
</Button>
|
|
||||||
<Button
|
|
||||||
onClick={() => handleMigrateData(false)}
|
|
||||||
disabled={migrationLoading}
|
|
||||||
size="sm"
|
|
||||||
className="flex-1"
|
|
||||||
>
|
|
||||||
{migrationLoading
|
|
||||||
? t("admin.migrating")
|
|
||||||
: t("admin.migrate")}
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="p-4 border rounded bg-card">
|
|
||||||
<div className="space-y-3">
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<Database className="h-4 w-4 text-blue-500" />
|
|
||||||
<h4 className="font-medium">
|
|
||||||
{t("admin.backup")}
|
|
||||||
</h4>
|
|
||||||
</div>
|
|
||||||
<Button
|
|
||||||
onClick={handleCreateBackup}
|
|
||||||
disabled={backupLoading}
|
|
||||||
variant="outline"
|
variant="outline"
|
||||||
|
onClick={() => {
|
||||||
|
setShowPasswordInput(false);
|
||||||
|
setExportPassword("");
|
||||||
|
}}
|
||||||
className="w-full"
|
className="w-full"
|
||||||
>
|
>
|
||||||
{backupLoading
|
Cancel
|
||||||
? t("admin.creatingBackup")
|
|
||||||
: t("admin.createBackup")}
|
|
||||||
</Button>
|
</Button>
|
||||||
{backupPath && (
|
|
||||||
<div className="p-2 bg-muted rounded border">
|
|
||||||
<div className="text-xs font-mono break-all">
|
|
||||||
{backupPath}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="p-4 border rounded bg-card">
|
<div className="p-4 border rounded bg-card">
|
||||||
<div className="space-y-3">
|
<div className="space-y-3">
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<Upload className="h-4 w-4 text-green-500" />
|
<Upload className="h-4 w-4 text-green-500" />
|
||||||
<h4 className="font-medium">
|
<h4 className="font-medium">{t("admin.import")}</h4>
|
||||||
{t("admin.exportImport")}
|
|
||||||
</h4>
|
|
||||||
</div>
|
</div>
|
||||||
<div className="space-y-2">
|
<p className="text-xs text-muted-foreground">
|
||||||
<Button
|
{t("admin.importDescription")}
|
||||||
onClick={handleExportDatabase}
|
</p>
|
||||||
disabled={exportLoading}
|
|
||||||
variant="outline"
|
|
||||||
size="sm"
|
|
||||||
className="w-full"
|
|
||||||
>
|
|
||||||
{exportLoading
|
|
||||||
? t("admin.exporting")
|
|
||||||
: t("admin.export")}
|
|
||||||
</Button>
|
|
||||||
{exportPath && (
|
|
||||||
<div className="p-2 bg-muted rounded border">
|
|
||||||
<div className="text-xs font-mono break-all">
|
|
||||||
{exportPath}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
<div className="space-y-2">
|
|
||||||
<input
|
<input
|
||||||
type="file"
|
type="file"
|
||||||
accept=".sqlite,.termix-export.sqlite,.db"
|
accept=".sqlite,.db"
|
||||||
onChange={(e) =>
|
onChange={(e) => setImportFile(e.target.files?.[0] || null)}
|
||||||
setImportFile(e.target.files?.[0] || null)
|
className="block w-full text-xs file:mr-2 file:py-1 file:px-2 file:rounded file:border-0 file:text-xs file:bg-muted file:text-foreground mb-2"
|
||||||
}
|
|
||||||
className="block w-full text-xs file:mr-2 file:py-1 file:px-2 file:rounded file:border-0 file:text-xs file:bg-muted file:text-foreground"
|
|
||||||
/>
|
/>
|
||||||
|
{importFile && (
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor="import-password">Password</Label>
|
||||||
|
<PasswordInput
|
||||||
|
id="import-password"
|
||||||
|
value={importPassword}
|
||||||
|
onChange={(e) => setImportPassword(e.target.value)}
|
||||||
|
placeholder="Enter your password"
|
||||||
|
onKeyDown={(e) => {
|
||||||
|
if (e.key === 'Enter') {
|
||||||
|
handleImportDatabase();
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
<Button
|
<Button
|
||||||
onClick={handleImportDatabase}
|
onClick={handleImportDatabase}
|
||||||
disabled={importLoading || !importFile}
|
disabled={importLoading || !importFile || !importPassword.trim()}
|
||||||
variant="outline"
|
|
||||||
size="sm"
|
|
||||||
className="w-full"
|
className="w-full"
|
||||||
>
|
>
|
||||||
{importLoading
|
{importLoading ? t("admin.importing") : t("admin.import")}
|
||||||
? t("admin.importing")
|
|
||||||
: t("admin.import")}
|
|
||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{!encryptionStatus && (
|
|
||||||
<div className="text-center py-8">
|
|
||||||
<div className="text-muted-foreground">
|
|
||||||
{t("admin.loadingEncryptionStatus")}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</TabsContent>
|
</TabsContent>
|
||||||
</Tabs>
|
</Tabs>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -28,6 +28,9 @@ import {
|
|||||||
generateKeyPair,
|
generateKeyPair,
|
||||||
} from "@/ui/main-axios";
|
} from "@/ui/main-axios";
|
||||||
import { useTranslation } from "react-i18next";
|
import { useTranslation } from "react-i18next";
|
||||||
|
import CodeMirror from "@uiw/react-codemirror";
|
||||||
|
import { oneDark } from "@codemirror/theme-one-dark";
|
||||||
|
import { EditorView } from "@codemirror/view";
|
||||||
import type {
|
import type {
|
||||||
Credential,
|
Credential,
|
||||||
CredentialEditorProps,
|
CredentialEditorProps,
|
||||||
@@ -312,9 +315,9 @@ export function CredentialEditor({
|
|||||||
"ssh-dss": "DSA (SSH)",
|
"ssh-dss": "DSA (SSH)",
|
||||||
"rsa-sha2-256": "RSA-SHA2-256",
|
"rsa-sha2-256": "RSA-SHA2-256",
|
||||||
"rsa-sha2-512": "RSA-SHA2-512",
|
"rsa-sha2-512": "RSA-SHA2-512",
|
||||||
invalid: "Invalid Key",
|
invalid: t("credentials.invalidKey"),
|
||||||
error: "Detection Error",
|
error: t("credentials.detectionError"),
|
||||||
unknown: "Unknown",
|
unknown: t("credentials.unknown"),
|
||||||
};
|
};
|
||||||
return keyTypeMap[keyType] || keyType;
|
return keyTypeMap[keyType] || keyType;
|
||||||
};
|
};
|
||||||
@@ -908,23 +911,39 @@ export function CredentialEditor({
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<FormControl>
|
<FormControl>
|
||||||
<textarea
|
<CodeMirror
|
||||||
placeholder={t(
|
|
||||||
"placeholders.pastePrivateKey",
|
|
||||||
)}
|
|
||||||
className="flex min-h-[120px] w-full rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50"
|
|
||||||
value={
|
value={
|
||||||
typeof field.value === "string"
|
typeof field.value === "string"
|
||||||
? field.value
|
? field.value
|
||||||
: ""
|
: ""
|
||||||
}
|
}
|
||||||
onChange={(e) => {
|
onChange={(value) => {
|
||||||
field.onChange(e.target.value);
|
field.onChange(value);
|
||||||
debouncedKeyDetection(
|
debouncedKeyDetection(
|
||||||
e.target.value,
|
value,
|
||||||
form.watch("keyPassword"),
|
form.watch("keyPassword"),
|
||||||
);
|
);
|
||||||
}}
|
}}
|
||||||
|
placeholder={t("placeholders.pastePrivateKey")}
|
||||||
|
theme={oneDark}
|
||||||
|
className="border border-input rounded-md"
|
||||||
|
minHeight="120px"
|
||||||
|
basicSetup={{
|
||||||
|
lineNumbers: true,
|
||||||
|
foldGutter: false,
|
||||||
|
dropCursor: false,
|
||||||
|
allowMultipleSelections: false,
|
||||||
|
highlightSelectionMatches: false,
|
||||||
|
searchKeymap: false,
|
||||||
|
scrollPastEnd: false,
|
||||||
|
}}
|
||||||
|
extensions={[
|
||||||
|
EditorView.theme({
|
||||||
|
".cm-scroller": {
|
||||||
|
overflow: "auto",
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
]}
|
||||||
/>
|
/>
|
||||||
</FormControl>
|
</FormControl>
|
||||||
{detectedKeyType && (
|
{detectedKeyType && (
|
||||||
@@ -1062,14 +1081,32 @@ export function CredentialEditor({
|
|||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
<FormControl>
|
<FormControl>
|
||||||
<textarea
|
<CodeMirror
|
||||||
placeholder={t("placeholders.pastePublicKey")}
|
|
||||||
className="flex min-h-[120px] w-full rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50"
|
|
||||||
value={field.value || ""}
|
value={field.value || ""}
|
||||||
onChange={(e) => {
|
onChange={(value) => {
|
||||||
field.onChange(e.target.value);
|
field.onChange(value);
|
||||||
debouncedPublicKeyDetection(e.target.value);
|
debouncedPublicKeyDetection(value);
|
||||||
}}
|
}}
|
||||||
|
placeholder={t("placeholders.pastePublicKey")}
|
||||||
|
theme={oneDark}
|
||||||
|
className="border border-input rounded-md"
|
||||||
|
minHeight="120px"
|
||||||
|
basicSetup={{
|
||||||
|
lineNumbers: true,
|
||||||
|
foldGutter: false,
|
||||||
|
dropCursor: false,
|
||||||
|
allowMultipleSelections: false,
|
||||||
|
highlightSelectionMatches: false,
|
||||||
|
searchKeymap: false,
|
||||||
|
scrollPastEnd: false,
|
||||||
|
}}
|
||||||
|
extensions={[
|
||||||
|
EditorView.theme({
|
||||||
|
".cm-scroller": {
|
||||||
|
overflow: "auto",
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
]}
|
||||||
/>
|
/>
|
||||||
</FormControl>
|
</FormControl>
|
||||||
<div className="text-xs text-muted-foreground mt-1">
|
<div className="text-xs text-muted-foreground mt-1">
|
||||||
|
|||||||
@@ -107,7 +107,7 @@ export function FileManagerContextMenu({
|
|||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!isVisible) return;
|
if (!isVisible) return;
|
||||||
|
|
||||||
// 调整菜单位置避免超出屏幕
|
// Adjust menu position to avoid going off screen
|
||||||
const adjustPosition = () => {
|
const adjustPosition = () => {
|
||||||
const menuWidth = 200;
|
const menuWidth = 200;
|
||||||
const menuHeight = 300;
|
const menuHeight = 300;
|
||||||
@@ -130,13 +130,13 @@ export function FileManagerContextMenu({
|
|||||||
|
|
||||||
adjustPosition();
|
adjustPosition();
|
||||||
|
|
||||||
// 延迟添加事件监听器,避免捕获到触发菜单的那次点击
|
// Delay adding event listeners to avoid capturing the click that triggered the menu
|
||||||
let cleanupFn: (() => void) | null = null;
|
let cleanupFn: (() => void) | null = null;
|
||||||
|
|
||||||
const timeoutId = setTimeout(() => {
|
const timeoutId = setTimeout(() => {
|
||||||
// 点击外部关闭菜单
|
// Click outside to close menu
|
||||||
const handleClickOutside = (event: MouseEvent) => {
|
const handleClickOutside = (event: MouseEvent) => {
|
||||||
// 检查点击是否在菜单内部
|
// Check if click is inside menu
|
||||||
const target = event.target as Element;
|
const target = event.target as Element;
|
||||||
const menuElement = document.querySelector("[data-context-menu]");
|
const menuElement = document.querySelector("[data-context-menu]");
|
||||||
|
|
||||||
@@ -145,13 +145,13 @@ export function FileManagerContextMenu({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 右键点击关闭菜单(Windows行为)
|
// Right-click to close menu (Windows behavior)
|
||||||
const handleRightClick = (event: MouseEvent) => {
|
const handleRightClick = (event: MouseEvent) => {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
onClose();
|
onClose();
|
||||||
};
|
};
|
||||||
|
|
||||||
// 键盘支持
|
// Keyboard support
|
||||||
const handleKeyDown = (event: KeyboardEvent) => {
|
const handleKeyDown = (event: KeyboardEvent) => {
|
||||||
if (event.key === "Escape") {
|
if (event.key === "Escape") {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
@@ -159,12 +159,12 @@ export function FileManagerContextMenu({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 窗口失焦关闭菜单
|
// Close menu on window blur
|
||||||
const handleBlur = () => {
|
const handleBlur = () => {
|
||||||
onClose();
|
onClose();
|
||||||
};
|
};
|
||||||
|
|
||||||
// 滚动时关闭菜单(Windows行为)
|
// Close menu on scroll (Windows behavior)
|
||||||
const handleScroll = () => {
|
const handleScroll = () => {
|
||||||
onClose();
|
onClose();
|
||||||
};
|
};
|
||||||
@@ -175,7 +175,7 @@ export function FileManagerContextMenu({
|
|||||||
window.addEventListener("blur", handleBlur);
|
window.addEventListener("blur", handleBlur);
|
||||||
window.addEventListener("scroll", handleScroll, true);
|
window.addEventListener("scroll", handleScroll, true);
|
||||||
|
|
||||||
// 设置清理函数
|
// Set cleanup function
|
||||||
cleanupFn = () => {
|
cleanupFn = () => {
|
||||||
document.removeEventListener("mousedown", handleClickOutside, true);
|
document.removeEventListener("mousedown", handleClickOutside, true);
|
||||||
document.removeEventListener("contextmenu", handleRightClick);
|
document.removeEventListener("contextmenu", handleRightClick);
|
||||||
@@ -183,7 +183,7 @@ export function FileManagerContextMenu({
|
|||||||
window.removeEventListener("blur", handleBlur);
|
window.removeEventListener("blur", handleBlur);
|
||||||
window.removeEventListener("scroll", handleScroll, true);
|
window.removeEventListener("scroll", handleScroll, true);
|
||||||
};
|
};
|
||||||
}, 50); // 50ms延迟,确保不会捕获到触发菜单的点击
|
}, 50); // 50ms delay to ensure we don't capture the click that triggered the menu
|
||||||
|
|
||||||
return () => {
|
return () => {
|
||||||
clearTimeout(timeoutId);
|
clearTimeout(timeoutId);
|
||||||
@@ -204,13 +204,13 @@ export function FileManagerContextMenu({
|
|||||||
(f) => f.type === "file" && f.executable,
|
(f) => f.type === "file" && f.executable,
|
||||||
);
|
);
|
||||||
|
|
||||||
// 构建菜单项
|
// Build menu items
|
||||||
const menuItems: MenuItem[] = [];
|
const menuItems: MenuItem[] = [];
|
||||||
|
|
||||||
if (isFileContext) {
|
if (isFileContext) {
|
||||||
// 文件/文件夹选中时的菜单
|
// Menu when files/folders are selected
|
||||||
|
|
||||||
// 打开终端功能 - 支持文件和文件夹
|
// Open terminal function - supports files and folders
|
||||||
if (onOpenTerminal) {
|
if (onOpenTerminal) {
|
||||||
const targetPath = isSingleFile
|
const targetPath = isSingleFile
|
||||||
? files[0].type === "directory"
|
? files[0].type === "directory"
|
||||||
@@ -225,11 +225,11 @@ export function FileManagerContextMenu({
|
|||||||
? t("fileManager.openTerminalInFolder")
|
? t("fileManager.openTerminalInFolder")
|
||||||
: t("fileManager.openTerminalInFileLocation"),
|
: t("fileManager.openTerminalInFileLocation"),
|
||||||
action: () => onOpenTerminal(targetPath),
|
action: () => onOpenTerminal(targetPath),
|
||||||
shortcut: "Ctrl+T",
|
shortcut: "Ctrl+Shift+T",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 运行可执行文件功能 - 仅对单个可执行文件显示
|
// Run executable file function - only show for single executable files
|
||||||
if (isSingleFile && hasExecutableFiles && onRunExecutable) {
|
if (isSingleFile && hasExecutableFiles && onRunExecutable) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <Play className="w-4 h-4" />,
|
icon: <Play className="w-4 h-4" />,
|
||||||
@@ -239,7 +239,7 @@ export function FileManagerContextMenu({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 添加分隔符(如果有上述功能)
|
// Add separator (if above functions exist)
|
||||||
if (
|
if (
|
||||||
onOpenTerminal ||
|
onOpenTerminal ||
|
||||||
(isSingleFile && hasExecutableFiles && onRunExecutable)
|
(isSingleFile && hasExecutableFiles && onRunExecutable)
|
||||||
@@ -247,7 +247,7 @@ export function FileManagerContextMenu({
|
|||||||
menuItems.push({ separator: true } as MenuItem);
|
menuItems.push({ separator: true } as MenuItem);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 预览功能
|
// Preview function
|
||||||
if (hasFiles && onPreview) {
|
if (hasFiles && onPreview) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <Eye className="w-4 h-4" />,
|
icon: <Eye className="w-4 h-4" />,
|
||||||
@@ -257,34 +257,19 @@ export function FileManagerContextMenu({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 下载功能
|
// Download function - unified download that uses best available method
|
||||||
if (hasFiles && onDownload) {
|
if (hasFiles && onDragToDesktop) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <Download className="w-4 h-4" />,
|
icon: <Download className="w-4 h-4" />,
|
||||||
label: isMultipleFiles
|
label: isMultipleFiles
|
||||||
? t("fileManager.downloadFiles", { count: files.length })
|
? t("fileManager.downloadFiles", { count: files.length })
|
||||||
: t("fileManager.downloadFile"),
|
: t("fileManager.downloadFile"),
|
||||||
action: () => onDownload(files),
|
action: () => onDragToDesktop(),
|
||||||
shortcut: "Ctrl+D",
|
shortcut: "Ctrl+D",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 拖拽到桌面菜单项(支持浏览器和桌面应用)
|
// PIN/UNPIN function - only show for single files
|
||||||
if (hasFiles && onDragToDesktop) {
|
|
||||||
const isModernBrowser = "showSaveFilePicker" in window;
|
|
||||||
menuItems.push({
|
|
||||||
icon: <ExternalLink className="w-4 h-4" />,
|
|
||||||
label: isMultipleFiles
|
|
||||||
? t("fileManager.saveFilesToSystem", { count: files.length })
|
|
||||||
: t("fileManager.saveToSystem"),
|
|
||||||
action: () => onDragToDesktop(),
|
|
||||||
shortcut: isModernBrowser
|
|
||||||
? t("fileManager.selectLocationToSave")
|
|
||||||
: t("fileManager.downloadToDefaultLocation"),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// PIN/UNPIN 功能 - 仅对单个文件显示
|
|
||||||
if (isSingleFile && files[0].type === "file") {
|
if (isSingleFile && files[0].type === "file") {
|
||||||
const isCurrentlyPinned = isPinned ? isPinned(files[0]) : false;
|
const isCurrentlyPinned = isPinned ? isPinned(files[0]) : false;
|
||||||
|
|
||||||
@@ -303,7 +288,7 @@ export function FileManagerContextMenu({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 添加文件夹快捷方式 - 仅对单个文件夹显示
|
// Add folder shortcut - only show for single folders
|
||||||
if (isSingleFile && files[0].type === "directory" && onAddShortcut) {
|
if (isSingleFile && files[0].type === "directory" && onAddShortcut) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <Bookmark className="w-4 h-4" />,
|
icon: <Bookmark className="w-4 h-4" />,
|
||||||
@@ -312,9 +297,9 @@ export function FileManagerContextMenu({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 添加分隔符(如果有上述功能)
|
// Add separator (if above functions exist)
|
||||||
if (
|
if (
|
||||||
(hasFiles && (onPreview || onDownload || onDragToDesktop)) ||
|
(hasFiles && (onPreview || onDragToDesktop)) ||
|
||||||
(isSingleFile &&
|
(isSingleFile &&
|
||||||
files[0].type === "file" &&
|
files[0].type === "file" &&
|
||||||
(onPinFile || onUnpinFile)) ||
|
(onPinFile || onUnpinFile)) ||
|
||||||
@@ -323,17 +308,17 @@ export function FileManagerContextMenu({
|
|||||||
menuItems.push({ separator: true } as MenuItem);
|
menuItems.push({ separator: true } as MenuItem);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 重命名功能
|
// Rename function
|
||||||
if (isSingleFile && onRename) {
|
if (isSingleFile && onRename) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <Edit3 className="w-4 h-4" />,
|
icon: <Edit3 className="w-4 h-4" />,
|
||||||
label: t("fileManager.rename"),
|
label: t("fileManager.rename"),
|
||||||
action: () => onRename(files[0]),
|
action: () => onRename(files[0]),
|
||||||
shortcut: "F2",
|
shortcut: "F6",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 复制功能
|
// Copy function
|
||||||
if (onCopy) {
|
if (onCopy) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <Copy className="w-4 h-4" />,
|
icon: <Copy className="w-4 h-4" />,
|
||||||
@@ -345,7 +330,7 @@ export function FileManagerContextMenu({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 剪切功能
|
// Cut function
|
||||||
if (onCut) {
|
if (onCut) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <Scissors className="w-4 h-4" />,
|
icon: <Scissors className="w-4 h-4" />,
|
||||||
@@ -357,12 +342,12 @@ export function FileManagerContextMenu({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 添加分隔符(如果有编辑功能)
|
// Add separator (if edit functions exist)
|
||||||
if ((isSingleFile && onRename) || onCopy || onCut) {
|
if ((isSingleFile && onRename) || onCopy || onCut) {
|
||||||
menuItems.push({ separator: true } as MenuItem);
|
menuItems.push({ separator: true } as MenuItem);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 删除功能
|
// Delete function
|
||||||
if (onDelete) {
|
if (onDelete) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <Trash2 className="w-4 h-4" />,
|
icon: <Trash2 className="w-4 h-4" />,
|
||||||
@@ -375,12 +360,12 @@ export function FileManagerContextMenu({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 添加分隔符(如果有删除功能)
|
// Add separator (if delete function exists)
|
||||||
if (onDelete) {
|
if (onDelete) {
|
||||||
menuItems.push({ separator: true } as MenuItem);
|
menuItems.push({ separator: true } as MenuItem);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 属性功能
|
// Properties function
|
||||||
if (isSingleFile && onProperties) {
|
if (isSingleFile && onProperties) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <Info className="w-4 h-4" />,
|
icon: <Info className="w-4 h-4" />,
|
||||||
@@ -389,19 +374,19 @@ export function FileManagerContextMenu({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// 空白区域右键菜单
|
// Empty area right-click menu
|
||||||
|
|
||||||
// 在当前目录打开终端
|
// Open terminal in current directory
|
||||||
if (onOpenTerminal && currentPath) {
|
if (onOpenTerminal && currentPath) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <Terminal className="w-4 h-4" />,
|
icon: <Terminal className="w-4 h-4" />,
|
||||||
label: t("fileManager.openTerminalHere"),
|
label: t("fileManager.openTerminalHere"),
|
||||||
action: () => onOpenTerminal(currentPath),
|
action: () => onOpenTerminal(currentPath),
|
||||||
shortcut: "Ctrl+T",
|
shortcut: "Ctrl+Shift+T",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 上传功能
|
// Upload function
|
||||||
if (onUpload) {
|
if (onUpload) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <Upload className="w-4 h-4" />,
|
icon: <Upload className="w-4 h-4" />,
|
||||||
@@ -411,12 +396,12 @@ export function FileManagerContextMenu({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 添加分隔符(如果有终端或上传功能)
|
// Add separator (if terminal or upload functions exist)
|
||||||
if ((onOpenTerminal && currentPath) || onUpload) {
|
if ((onOpenTerminal && currentPath) || onUpload) {
|
||||||
menuItems.push({ separator: true } as MenuItem);
|
menuItems.push({ separator: true } as MenuItem);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 新建文件夹
|
// New folder
|
||||||
if (onNewFolder) {
|
if (onNewFolder) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <FolderPlus className="w-4 h-4" />,
|
icon: <FolderPlus className="w-4 h-4" />,
|
||||||
@@ -426,7 +411,7 @@ export function FileManagerContextMenu({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 新建文件
|
// New file
|
||||||
if (onNewFile) {
|
if (onNewFile) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <FilePlus className="w-4 h-4" />,
|
icon: <FilePlus className="w-4 h-4" />,
|
||||||
@@ -436,22 +421,22 @@ export function FileManagerContextMenu({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 添加分隔符(如果有新建功能)
|
// Add separator (if new functions exist)
|
||||||
if (onNewFolder || onNewFile) {
|
if (onNewFolder || onNewFile) {
|
||||||
menuItems.push({ separator: true } as MenuItem);
|
menuItems.push({ separator: true } as MenuItem);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 刷新功能
|
// Refresh function
|
||||||
if (onRefresh) {
|
if (onRefresh) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <RefreshCw className="w-4 h-4" />,
|
icon: <RefreshCw className="w-4 h-4" />,
|
||||||
label: t("fileManager.refresh"),
|
label: t("fileManager.refresh"),
|
||||||
action: onRefresh,
|
action: onRefresh,
|
||||||
shortcut: "F5",
|
shortcut: "Ctrl+Y",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 粘贴功能
|
// Paste function
|
||||||
if (hasClipboard && onPaste) {
|
if (hasClipboard && onPaste) {
|
||||||
menuItems.push({
|
menuItems.push({
|
||||||
icon: <Clipboard className="w-4 h-4" />,
|
icon: <Clipboard className="w-4 h-4" />,
|
||||||
@@ -462,15 +447,15 @@ export function FileManagerContextMenu({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 过滤掉连续的分隔符
|
// Filter out consecutive separators
|
||||||
const filteredMenuItems = menuItems.filter((item, index) => {
|
const filteredMenuItems = menuItems.filter((item, index) => {
|
||||||
if (!item.separator) return true;
|
if (!item.separator) return true;
|
||||||
|
|
||||||
// 如果是分隔符,检查前一个和后一个是否也是分隔符
|
// If it's a separator, check if previous and next are also separators
|
||||||
const prevItem = index > 0 ? menuItems[index - 1] : null;
|
const prevItem = index > 0 ? menuItems[index - 1] : null;
|
||||||
const nextItem = index < menuItems.length - 1 ? menuItems[index + 1] : null;
|
const nextItem = index < menuItems.length - 1 ? menuItems[index + 1] : null;
|
||||||
|
|
||||||
// 如果前一个或后一个是分隔符,则过滤掉当前分隔符
|
// If previous or next is a separator, filter out current separator
|
||||||
if (prevItem?.separator || nextItem?.separator) {
|
if (prevItem?.separator || nextItem?.separator) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -478,7 +463,7 @@ export function FileManagerContextMenu({
|
|||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
|
|
||||||
// 移除开头和结尾的分隔符
|
// Remove separators at beginning and end
|
||||||
const finalMenuItems = filteredMenuItems.filter((item, index) => {
|
const finalMenuItems = filteredMenuItems.filter((item, index) => {
|
||||||
if (!item.separator) return true;
|
if (!item.separator) return true;
|
||||||
return index > 0 && index < filteredMenuItems.length - 1;
|
return index > 0 && index < filteredMenuItems.length - 1;
|
||||||
@@ -486,13 +471,13 @@ export function FileManagerContextMenu({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
{/* 透明遮罩层用于捕获点击事件 */}
|
{/* Transparent overlay to capture click events */}
|
||||||
<div className="fixed inset-0 z-40" />
|
<div className="fixed inset-0 z-[99990]" />
|
||||||
|
|
||||||
{/* 菜单本体 */}
|
{/* Menu body */}
|
||||||
<div
|
<div
|
||||||
data-context-menu
|
data-context-menu
|
||||||
className="fixed bg-dark-bg border border-dark-border rounded-lg shadow-xl min-w-[180px] max-w-[250px] z-50 overflow-hidden"
|
className="fixed bg-dark-bg border border-dark-border rounded-lg shadow-xl min-w-[180px] max-w-[250px] z-[99995] overflow-hidden"
|
||||||
style={{
|
style={{
|
||||||
left: menuPosition.x,
|
left: menuPosition.x,
|
||||||
top: menuPosition.y,
|
top: menuPosition.y,
|
||||||
|
|||||||
@@ -320,16 +320,26 @@ export function FileManagerFileEditor({
|
|||||||
EditorView.theme({
|
EditorView.theme({
|
||||||
"&": {
|
"&": {
|
||||||
backgroundColor: "var(--color-dark-bg-darkest) !important",
|
backgroundColor: "var(--color-dark-bg-darkest) !important",
|
||||||
|
height: "100%",
|
||||||
},
|
},
|
||||||
".cm-gutters": {
|
".cm-gutters": {
|
||||||
backgroundColor: "var(--color-dark-bg) !important",
|
backgroundColor: "var(--color-dark-bg) !important",
|
||||||
},
|
},
|
||||||
|
".cm-scroller": {
|
||||||
|
overflow: "auto",
|
||||||
|
},
|
||||||
|
".cm-editor": {
|
||||||
|
height: "100%",
|
||||||
|
},
|
||||||
}),
|
}),
|
||||||
]}
|
]}
|
||||||
onChange={(value: any) => onContentChange(value)}
|
onChange={(value: any) => onContentChange(value)}
|
||||||
theme={undefined}
|
theme={undefined}
|
||||||
height="100%"
|
height="100%"
|
||||||
basicSetup={{ lineNumbers: true }}
|
basicSetup={{
|
||||||
|
lineNumbers: true,
|
||||||
|
scrollPastEnd: false,
|
||||||
|
}}
|
||||||
className="min-h-full min-w-full flex-1"
|
className="min-h-full min-w-full flex-1"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -25,12 +25,20 @@ import {
|
|||||||
import { useTranslation } from "react-i18next";
|
import { useTranslation } from "react-i18next";
|
||||||
import type { FileItem } from "../../../types/index.js";
|
import type { FileItem } from "../../../types/index.js";
|
||||||
|
|
||||||
// 格式化文件大小
|
// Linus-style data structure: separate creation intent from actual files
|
||||||
|
interface CreateIntent {
|
||||||
|
id: string;
|
||||||
|
type: 'file' | 'directory';
|
||||||
|
defaultName: string;
|
||||||
|
currentName: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format file size
|
||||||
function formatFileSize(bytes?: number): string {
|
function formatFileSize(bytes?: number): string {
|
||||||
// 处理未定义或null的情况
|
// Handle undefined or null cases
|
||||||
if (bytes === undefined || bytes === null) return "-";
|
if (bytes === undefined || bytes === null) return "-";
|
||||||
|
|
||||||
// 0字节的文件显示为 "0 B"
|
// Display 0-byte files as "0 B"
|
||||||
if (bytes === 0) return "0 B";
|
if (bytes === 0) return "0 B";
|
||||||
|
|
||||||
const units = ["B", "KB", "MB", "GB", "TB"];
|
const units = ["B", "KB", "MB", "GB", "TB"];
|
||||||
@@ -42,7 +50,7 @@ function formatFileSize(bytes?: number): string {
|
|||||||
unitIndex++;
|
unitIndex++;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 对于小于10的数值显示一位小数,大于10的显示整数
|
// Display one decimal place for values less than 10, integers for values greater than 10
|
||||||
const formattedSize =
|
const formattedSize =
|
||||||
size < 10 && unitIndex > 0 ? size.toFixed(1) : Math.round(size).toString();
|
size < 10 && unitIndex > 0 ? size.toFixed(1) : Math.round(size).toString();
|
||||||
|
|
||||||
@@ -84,6 +92,11 @@ interface FileManagerGridProps {
|
|||||||
onFileDiff?: (file1: FileItem, file2: FileItem) => void;
|
onFileDiff?: (file1: FileItem, file2: FileItem) => void;
|
||||||
onSystemDragStart?: (files: FileItem[]) => void;
|
onSystemDragStart?: (files: FileItem[]) => void;
|
||||||
onSystemDragEnd?: (e: DragEvent) => void;
|
onSystemDragEnd?: (e: DragEvent) => void;
|
||||||
|
hasClipboard?: boolean;
|
||||||
|
// Linus-style creation intent props
|
||||||
|
createIntent?: CreateIntent | null;
|
||||||
|
onConfirmCreate?: (name: string) => void;
|
||||||
|
onCancelCreate?: () => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
const getFileIcon = (file: FileItem, viewMode: "grid" | "list" = "grid") => {
|
const getFileIcon = (file: FileItem, viewMode: "grid" | "list" = "grid") => {
|
||||||
@@ -182,19 +195,25 @@ export function FileManagerGrid({
|
|||||||
onFileDiff,
|
onFileDiff,
|
||||||
onSystemDragStart,
|
onSystemDragStart,
|
||||||
onSystemDragEnd,
|
onSystemDragEnd,
|
||||||
|
hasClipboard,
|
||||||
|
createIntent,
|
||||||
|
onConfirmCreate,
|
||||||
|
onCancelCreate,
|
||||||
}: FileManagerGridProps) {
|
}: FileManagerGridProps) {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const gridRef = useRef<HTMLDivElement>(null);
|
const gridRef = useRef<HTMLDivElement>(null);
|
||||||
const [editingName, setEditingName] = useState("");
|
const [editingName, setEditingName] = useState("");
|
||||||
|
|
||||||
// 统一拖拽状态管理
|
|
||||||
|
|
||||||
|
// Unified drag state management
|
||||||
const [dragState, setDragState] = useState<DragState>({
|
const [dragState, setDragState] = useState<DragState>({
|
||||||
type: "none",
|
type: "none",
|
||||||
files: [],
|
files: [],
|
||||||
counter: 0,
|
counter: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
// 全局鼠标移动监听 - 用于拖拽tooltip跟随
|
// Global mouse move listener - for drag tooltip following
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const handleGlobalMouseMove = (e: MouseEvent) => {
|
const handleGlobalMouseMove = (e: MouseEvent) => {
|
||||||
if (dragState.type === "internal" && dragState.files.length > 0) {
|
if (dragState.type === "internal" && dragState.files.length > 0) {
|
||||||
@@ -214,11 +233,11 @@ export function FileManagerGrid({
|
|||||||
|
|
||||||
const editInputRef = useRef<HTMLInputElement>(null);
|
const editInputRef = useRef<HTMLInputElement>(null);
|
||||||
|
|
||||||
// 开始编辑时设置初始名称
|
// Set initial name when starting edit
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (editingFile) {
|
if (editingFile) {
|
||||||
setEditingName(editingFile.name);
|
setEditingName(editingFile.name);
|
||||||
// 延迟聚焦以确保DOM已更新
|
// Delay focus to ensure DOM is updated
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
editInputRef.current?.focus();
|
editInputRef.current?.focus();
|
||||||
editInputRef.current?.select();
|
editInputRef.current?.select();
|
||||||
@@ -226,7 +245,7 @@ export function FileManagerGrid({
|
|||||||
}
|
}
|
||||||
}, [editingFile]);
|
}, [editingFile]);
|
||||||
|
|
||||||
// 处理编辑确认
|
// Handle edit confirmation
|
||||||
const handleEditConfirm = () => {
|
const handleEditConfirm = () => {
|
||||||
if (
|
if (
|
||||||
editingFile &&
|
editingFile &&
|
||||||
@@ -239,13 +258,13 @@ export function FileManagerGrid({
|
|||||||
onCancelEdit?.();
|
onCancelEdit?.();
|
||||||
};
|
};
|
||||||
|
|
||||||
// 处理编辑取消
|
// Handle edit cancellation
|
||||||
const handleEditCancel = () => {
|
const handleEditCancel = () => {
|
||||||
setEditingName("");
|
setEditingName("");
|
||||||
onCancelEdit?.();
|
onCancelEdit?.();
|
||||||
};
|
};
|
||||||
|
|
||||||
// 处理输入框按键
|
// Handle input key events
|
||||||
const handleEditKeyDown = (e: React.KeyboardEvent) => {
|
const handleEditKeyDown = (e: React.KeyboardEvent) => {
|
||||||
if (e.key === "Enter") {
|
if (e.key === "Enter") {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
@@ -256,9 +275,9 @@ export function FileManagerGrid({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 文件拖拽处理函数
|
// File drag handling function
|
||||||
const handleFileDragStart = (e: React.DragEvent, file: FileItem) => {
|
const handleFileDragStart = (e: React.DragEvent, file: FileItem) => {
|
||||||
// 如果拖拽的文件已选中,则拖拽所有选中的文件
|
// If dragged file is selected, drag all selected files
|
||||||
const filesToDrag = selectedFiles.includes(file) ? selectedFiles : [file];
|
const filesToDrag = selectedFiles.includes(file) ? selectedFiles : [file];
|
||||||
|
|
||||||
setDragState({
|
setDragState({
|
||||||
@@ -268,14 +287,14 @@ export function FileManagerGrid({
|
|||||||
mousePosition: { x: e.clientX, y: e.clientY },
|
mousePosition: { x: e.clientX, y: e.clientY },
|
||||||
});
|
});
|
||||||
|
|
||||||
// 设置拖拽数据,添加内部拖拽标识
|
// Set drag data, add internal drag identifier
|
||||||
const dragData = {
|
const dragData = {
|
||||||
type: "internal_files",
|
type: "internal_files",
|
||||||
files: filesToDrag.map((f) => f.path),
|
files: filesToDrag.map((f) => f.path),
|
||||||
};
|
};
|
||||||
e.dataTransfer.setData("text/plain", JSON.stringify(dragData));
|
e.dataTransfer.setData("text/plain", JSON.stringify(dragData));
|
||||||
|
|
||||||
// 触发系统级拖拽开始
|
// Trigger system-level drag start
|
||||||
onSystemDragStart?.(filesToDrag);
|
onSystemDragStart?.(filesToDrag);
|
||||||
e.dataTransfer.effectAllowed = "move";
|
e.dataTransfer.effectAllowed = "move";
|
||||||
};
|
};
|
||||||
@@ -284,7 +303,7 @@ export function FileManagerGrid({
|
|||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
|
|
||||||
// 只有拖拽到不同文件且不是被拖拽的文件时才设置目标
|
// Only set target when dragging to different file and not being dragged file
|
||||||
if (
|
if (
|
||||||
dragState.type === "internal" &&
|
dragState.type === "internal" &&
|
||||||
!dragState.files.some((f) => f.path === targetFile.path)
|
!dragState.files.some((f) => f.path === targetFile.path)
|
||||||
@@ -298,7 +317,7 @@ export function FileManagerGrid({
|
|||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
|
|
||||||
// 清除拖拽目标高亮
|
// Clear drag target highlight
|
||||||
if (dragState.target?.path === targetFile.path) {
|
if (dragState.target?.path === targetFile.path) {
|
||||||
setDragState((prev) => ({ ...prev, target: undefined }));
|
setDragState((prev) => ({ ...prev, target: undefined }));
|
||||||
}
|
}
|
||||||
@@ -313,7 +332,7 @@ export function FileManagerGrid({
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 检查是否拖拽到自身
|
// Check if dragging to self
|
||||||
const isDroppingOnSelf = dragState.files.some(
|
const isDroppingOnSelf = dragState.files.some(
|
||||||
(f) => f.path === targetFile.path,
|
(f) => f.path === targetFile.path,
|
||||||
);
|
);
|
||||||
@@ -323,13 +342,13 @@ export function FileManagerGrid({
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 判断拖拽行为:
|
// Determine drag behavior:
|
||||||
// 1. 文件/文件夹 拖拽到 文件夹 = 移动操作
|
// 1. File/folder drag to folder = move operation
|
||||||
// 2. 单个文件 拖拽到 单个文件 = diff对比
|
// 2. Single file drag to single file = diff comparison
|
||||||
// 3. 其他情况 = 无效操作
|
// 3. Other cases = invalid operation
|
||||||
|
|
||||||
if (targetFile.type === "directory") {
|
if (targetFile.type === "directory") {
|
||||||
// 移动操作
|
// Move operation
|
||||||
console.log(
|
console.log(
|
||||||
"Moving files to directory:",
|
"Moving files to directory:",
|
||||||
dragState.files.map((f) => f.name),
|
dragState.files.map((f) => f.name),
|
||||||
@@ -342,7 +361,7 @@ export function FileManagerGrid({
|
|||||||
dragState.files.length === 1 &&
|
dragState.files.length === 1 &&
|
||||||
dragState.files[0].type === "file"
|
dragState.files[0].type === "file"
|
||||||
) {
|
) {
|
||||||
// diff对比操作
|
// Diff comparison operation
|
||||||
console.log(
|
console.log(
|
||||||
"Comparing files:",
|
"Comparing files:",
|
||||||
dragState.files[0].name,
|
dragState.files[0].name,
|
||||||
@@ -351,7 +370,7 @@ export function FileManagerGrid({
|
|||||||
);
|
);
|
||||||
onFileDiff?.(dragState.files[0], targetFile);
|
onFileDiff?.(dragState.files[0], targetFile);
|
||||||
} else {
|
} else {
|
||||||
// 无效操作,给用户提示
|
// Invalid operation, notify user
|
||||||
console.log("Invalid drag operation");
|
console.log("Invalid drag operation");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -361,7 +380,7 @@ export function FileManagerGrid({
|
|||||||
const handleFileDragEnd = (e: React.DragEvent) => {
|
const handleFileDragEnd = (e: React.DragEvent) => {
|
||||||
setDragState({ type: "none", files: [], counter: 0 });
|
setDragState({ type: "none", files: [], counter: 0 });
|
||||||
|
|
||||||
// 触发系统级拖拽结束检测
|
// Trigger system-level drag end detection
|
||||||
onSystemDragEnd?.(e.nativeEvent);
|
onSystemDragEnd?.(e.nativeEvent);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -378,17 +397,17 @@ export function FileManagerGrid({
|
|||||||
} | null>(null);
|
} | null>(null);
|
||||||
const [justFinishedSelecting, setJustFinishedSelecting] = useState(false);
|
const [justFinishedSelecting, setJustFinishedSelecting] = useState(false);
|
||||||
|
|
||||||
// 导航历史管理
|
// Navigation history management
|
||||||
const [navigationHistory, setNavigationHistory] = useState<string[]>([
|
const [navigationHistory, setNavigationHistory] = useState<string[]>([
|
||||||
currentPath,
|
currentPath,
|
||||||
]);
|
]);
|
||||||
const [historyIndex, setHistoryIndex] = useState(0);
|
const [historyIndex, setHistoryIndex] = useState(0);
|
||||||
|
|
||||||
// 路径编辑状态
|
// Path editing state
|
||||||
const [isEditingPath, setIsEditingPath] = useState(false);
|
const [isEditingPath, setIsEditingPath] = useState(false);
|
||||||
const [editPathValue, setEditPathValue] = useState(currentPath);
|
const [editPathValue, setEditPathValue] = useState(currentPath);
|
||||||
|
|
||||||
// 更新导航历史
|
// Update navigation history
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const lastPath = navigationHistory[historyIndex];
|
const lastPath = navigationHistory[historyIndex];
|
||||||
if (currentPath !== lastPath) {
|
if (currentPath !== lastPath) {
|
||||||
@@ -399,7 +418,7 @@ export function FileManagerGrid({
|
|||||||
}
|
}
|
||||||
}, [currentPath]);
|
}, [currentPath]);
|
||||||
|
|
||||||
// 导航函数
|
// Navigation functions
|
||||||
const goBack = () => {
|
const goBack = () => {
|
||||||
if (historyIndex > 0) {
|
if (historyIndex > 0) {
|
||||||
const newIndex = historyIndex - 1;
|
const newIndex = historyIndex - 1;
|
||||||
@@ -427,7 +446,7 @@ export function FileManagerGrid({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 路径导航
|
// Path navigation
|
||||||
const pathParts = currentPath.split("/").filter(Boolean);
|
const pathParts = currentPath.split("/").filter(Boolean);
|
||||||
const navigateToPath = (index: number) => {
|
const navigateToPath = (index: number) => {
|
||||||
if (index === -1) {
|
if (index === -1) {
|
||||||
@@ -438,7 +457,7 @@ export function FileManagerGrid({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 路径编辑功能
|
// Path editing functionality
|
||||||
const startEditingPath = () => {
|
const startEditingPath = () => {
|
||||||
setEditPathValue(currentPath);
|
setEditPathValue(currentPath);
|
||||||
setIsEditingPath(true);
|
setIsEditingPath(true);
|
||||||
@@ -452,7 +471,7 @@ export function FileManagerGrid({
|
|||||||
const confirmEditingPath = () => {
|
const confirmEditingPath = () => {
|
||||||
const trimmedPath = editPathValue.trim();
|
const trimmedPath = editPathValue.trim();
|
||||||
if (trimmedPath) {
|
if (trimmedPath) {
|
||||||
// 确保路径以 / 开头
|
// Ensure path starts with /
|
||||||
const normalizedPath = trimmedPath.startsWith("/")
|
const normalizedPath = trimmedPath.startsWith("/")
|
||||||
? trimmedPath
|
? trimmedPath
|
||||||
: "/" + trimmedPath;
|
: "/" + trimmedPath;
|
||||||
@@ -471,24 +490,24 @@ export function FileManagerGrid({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 同步editPathValue与currentPath
|
// Sync editPathValue with currentPath
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!isEditingPath) {
|
if (!isEditingPath) {
|
||||||
setEditPathValue(currentPath);
|
setEditPathValue(currentPath);
|
||||||
}
|
}
|
||||||
}, [currentPath, isEditingPath]);
|
}, [currentPath, isEditingPath]);
|
||||||
|
|
||||||
// 拖放处理 - 区分内部文件拖拽和外部文件上传
|
// Drag and drop handling - distinguish internal file drag and external file upload
|
||||||
const handleDragEnter = useCallback(
|
const handleDragEnter = useCallback(
|
||||||
(e: React.DragEvent) => {
|
(e: React.DragEvent) => {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
|
|
||||||
// 检查是否是内部文件拖拽
|
// Check if it's internal file drag
|
||||||
const isInternalDrag = dragState.type === "internal";
|
const isInternalDrag = dragState.type === "internal";
|
||||||
|
|
||||||
if (!isInternalDrag) {
|
if (!isInternalDrag) {
|
||||||
// 只有外部文件拖拽才显示上传提示
|
// Only show upload prompt for external file drag
|
||||||
setDragState((prev) => ({
|
setDragState((prev) => ({
|
||||||
...prev,
|
...prev,
|
||||||
type: "external",
|
type: "external",
|
||||||
@@ -507,7 +526,7 @@ export function FileManagerGrid({
|
|||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
|
|
||||||
// 检查是否是内部文件拖拽
|
// Check if it's internal file drag
|
||||||
const isInternalDrag = dragState.type === "internal";
|
const isInternalDrag = dragState.type === "internal";
|
||||||
|
|
||||||
if (!isInternalDrag && dragState.type === "external") {
|
if (!isInternalDrag && dragState.type === "external") {
|
||||||
@@ -529,11 +548,11 @@ export function FileManagerGrid({
|
|||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
|
|
||||||
// 检查是否是内部文件拖拽
|
// Check if it's internal file drag
|
||||||
const isInternalDrag = dragState.type === "internal";
|
const isInternalDrag = dragState.type === "internal";
|
||||||
|
|
||||||
if (isInternalDrag) {
|
if (isInternalDrag) {
|
||||||
// 更新鼠标位置
|
// Update mouse position
|
||||||
setDragState((prev) => ({
|
setDragState((prev) => ({
|
||||||
...prev,
|
...prev,
|
||||||
mousePosition: { x: e.clientX, y: e.clientY },
|
mousePosition: { x: e.clientX, y: e.clientY },
|
||||||
@@ -546,15 +565,15 @@ export function FileManagerGrid({
|
|||||||
[dragState.type],
|
[dragState.type],
|
||||||
);
|
);
|
||||||
|
|
||||||
// 滚轮事件处理,确保滚动正常工作
|
// Mouse wheel event handling, ensure scrolling works normally
|
||||||
const handleWheel = useCallback((e: React.WheelEvent) => {
|
const handleWheel = useCallback((e: React.WheelEvent) => {
|
||||||
// 不阻止默认滚动行为,让浏览器自己处理滚动
|
// Don't prevent default scroll behavior, let browser handle scrolling
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// 框选功能实现
|
// Box selection functionality implementation
|
||||||
const handleMouseDown = useCallback((e: React.MouseEvent) => {
|
const handleMouseDown = useCallback((e: React.MouseEvent) => {
|
||||||
// 只在空白区域开始框选,避免干扰文件点击
|
// Only start box selection in empty area, avoid interfering with file clicks
|
||||||
if (e.target === e.currentTarget && e.button === 0) {
|
if (e.target === e.currentTarget && e.button === 0) {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
const rect = (e.currentTarget as HTMLElement).getBoundingClientRect();
|
const rect = (e.currentTarget as HTMLElement).getBoundingClientRect();
|
||||||
@@ -565,7 +584,7 @@ export function FileManagerGrid({
|
|||||||
setSelectionStart({ x: startX, y: startY });
|
setSelectionStart({ x: startX, y: startY });
|
||||||
setSelectionRect({ x: startX, y: startY, width: 0, height: 0 });
|
setSelectionRect({ x: startX, y: startY, width: 0, height: 0 });
|
||||||
|
|
||||||
// 重置刚完成框选的标志,准备新的框选
|
// Reset flag for just completed selection, prepare for new selection
|
||||||
setJustFinishedSelecting(false);
|
setJustFinishedSelecting(false);
|
||||||
}
|
}
|
||||||
}, []);
|
}, []);
|
||||||
@@ -584,7 +603,7 @@ export function FileManagerGrid({
|
|||||||
|
|
||||||
setSelectionRect({ x, y, width, height });
|
setSelectionRect({ x, y, width, height });
|
||||||
|
|
||||||
// 检测与文件项的交集,进行实时选择
|
// Detect intersection with file items, perform real-time selection
|
||||||
if (gridRef.current) {
|
if (gridRef.current) {
|
||||||
const fileElements =
|
const fileElements =
|
||||||
gridRef.current.querySelectorAll("[data-file-path]");
|
gridRef.current.querySelectorAll("[data-file-path]");
|
||||||
@@ -594,7 +613,7 @@ export function FileManagerGrid({
|
|||||||
const elementRect = element.getBoundingClientRect();
|
const elementRect = element.getBoundingClientRect();
|
||||||
const containerRect = gridRef.current!.getBoundingClientRect();
|
const containerRect = gridRef.current!.getBoundingClientRect();
|
||||||
|
|
||||||
// 简化坐标计算 - 直接使用相对于容器的坐标
|
// Simplify coordinate calculation - directly use coordinates relative to container
|
||||||
const relativeElementRect = {
|
const relativeElementRect = {
|
||||||
left: elementRect.left - containerRect.left,
|
left: elementRect.left - containerRect.left,
|
||||||
top: elementRect.top - containerRect.top,
|
top: elementRect.top - containerRect.top,
|
||||||
@@ -602,7 +621,7 @@ export function FileManagerGrid({
|
|||||||
bottom: elementRect.bottom - containerRect.top,
|
bottom: elementRect.bottom - containerRect.top,
|
||||||
};
|
};
|
||||||
|
|
||||||
// 选择框坐标
|
// Selection box coordinates
|
||||||
const selectionBox = {
|
const selectionBox = {
|
||||||
left: x,
|
left: x,
|
||||||
top: y,
|
top: y,
|
||||||
@@ -610,7 +629,7 @@ export function FileManagerGrid({
|
|||||||
bottom: y + height,
|
bottom: y + height,
|
||||||
};
|
};
|
||||||
|
|
||||||
// 检查是否相交
|
// Check if intersecting
|
||||||
const intersects = !(
|
const intersects = !(
|
||||||
relativeElementRect.right < selectionBox.left ||
|
relativeElementRect.right < selectionBox.left ||
|
||||||
relativeElementRect.left > selectionBox.right ||
|
relativeElementRect.left > selectionBox.right ||
|
||||||
@@ -629,7 +648,7 @@ export function FileManagerGrid({
|
|||||||
|
|
||||||
console.log("Total selected paths:", selectedPaths.length);
|
console.log("Total selected paths:", selectedPaths.length);
|
||||||
|
|
||||||
// 更新选中的文件
|
// Update selected files
|
||||||
const newSelection = files.filter((file) =>
|
const newSelection = files.filter((file) =>
|
||||||
selectedPaths.includes(file.path),
|
selectedPaths.includes(file.path),
|
||||||
);
|
);
|
||||||
@@ -651,7 +670,7 @@ export function FileManagerGrid({
|
|||||||
setSelectionStart(null);
|
setSelectionStart(null);
|
||||||
setSelectionRect(null);
|
setSelectionRect(null);
|
||||||
|
|
||||||
// 只有当移动距离足够大时才认为是框选,否则是点击
|
// Only consider as box selection when movement distance is large enough, otherwise it's a click
|
||||||
const startPos = selectionStart;
|
const startPos = selectionStart;
|
||||||
if (startPos) {
|
if (startPos) {
|
||||||
const rect = gridRef.current?.getBoundingClientRect();
|
const rect = gridRef.current?.getBoundingClientRect();
|
||||||
@@ -663,13 +682,13 @@ export function FileManagerGrid({
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (distance > 5) {
|
if (distance > 5) {
|
||||||
// 真正的框选,设置标志防止立即清空
|
// Real box selection, set flag to prevent immediate clearing
|
||||||
setJustFinishedSelecting(true);
|
setJustFinishedSelecting(true);
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
setJustFinishedSelecting(false);
|
setJustFinishedSelecting(false);
|
||||||
}, 50);
|
}, 50);
|
||||||
} else {
|
} else {
|
||||||
// 只是点击,不设置标志,让handleGridClick正常处理
|
// Just a click, don't set flag, let handleGridClick handle normally
|
||||||
setJustFinishedSelecting(false);
|
setJustFinishedSelecting(false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -679,7 +698,7 @@ export function FileManagerGrid({
|
|||||||
[isSelecting, selectionStart],
|
[isSelecting, selectionStart],
|
||||||
);
|
);
|
||||||
|
|
||||||
// 全局鼠标事件监听,确保在容器外也能结束框选
|
// Global mouse event listener, ensure box selection can end outside container
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const handleGlobalMouseUp = (e: MouseEvent) => {
|
const handleGlobalMouseUp = (e: MouseEvent) => {
|
||||||
if (isSelecting) {
|
if (isSelecting) {
|
||||||
@@ -687,7 +706,7 @@ export function FileManagerGrid({
|
|||||||
setSelectionStart(null);
|
setSelectionStart(null);
|
||||||
setSelectionRect(null);
|
setSelectionRect(null);
|
||||||
|
|
||||||
// 全局mouseup说明是拖拽框选,设置标志
|
// Global mouseup indicates drag box selection, set flag
|
||||||
setJustFinishedSelecting(true);
|
setJustFinishedSelecting(true);
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
setJustFinishedSelecting(false);
|
setJustFinishedSelecting(false);
|
||||||
@@ -727,31 +746,28 @@ export function FileManagerGrid({
|
|||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
|
|
||||||
if (dragState.type === "internal") {
|
if (dragState.type === "internal") {
|
||||||
// 内部拖拽到空白区域:触发下载
|
// Internal drag to empty area: just cancel the drag operation
|
||||||
console.log(
|
console.log("Internal drag to empty area - cancelling drag operation");
|
||||||
"Internal drag to empty area detected, triggering download",
|
// Do not trigger download here - system drag end will handle it if truly outside window
|
||||||
);
|
setDragState({ type: "none", files: [], counter: 0 });
|
||||||
if (onDownload && dragState.files.length > 0) {
|
|
||||||
onDownload(dragState.files);
|
|
||||||
}
|
|
||||||
} else if (dragState.type === "external") {
|
} else if (dragState.type === "external") {
|
||||||
// 外部拖拽:处理文件上传
|
// External drag: handle file upload
|
||||||
if (onUpload && e.dataTransfer.files.length > 0) {
|
if (onUpload && e.dataTransfer.files.length > 0) {
|
||||||
onUpload(e.dataTransfer.files);
|
onUpload(e.dataTransfer.files);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 重置拖拽状态
|
// Reset drag state
|
||||||
setDragState({ type: "none", files: [], counter: 0 });
|
setDragState({ type: "none", files: [], counter: 0 });
|
||||||
},
|
},
|
||||||
[onUpload, onDownload, dragState],
|
[onUpload, onDownload, dragState],
|
||||||
);
|
);
|
||||||
|
|
||||||
// 文件选择处理
|
// File selection handling
|
||||||
const handleFileClick = (file: FileItem, event: React.MouseEvent) => {
|
const handleFileClick = (file: FileItem, event: React.MouseEvent) => {
|
||||||
event.stopPropagation();
|
event.stopPropagation();
|
||||||
|
|
||||||
// 确保网格获得焦点以支持键盘事件
|
// Ensure grid gets focus to support keyboard events
|
||||||
if (gridRef.current) {
|
if (gridRef.current) {
|
||||||
gridRef.current.focus();
|
gridRef.current.focus();
|
||||||
}
|
}
|
||||||
@@ -764,11 +780,11 @@ export function FileManagerGrid({
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (event.detail === 2) {
|
if (event.detail === 2) {
|
||||||
// 双击打开
|
// Double click to open
|
||||||
console.log("Double click - opening file");
|
console.log("Double click - opening file");
|
||||||
onFileOpen(file);
|
onFileOpen(file);
|
||||||
} else {
|
} else {
|
||||||
// 单击选择
|
// Single click to select
|
||||||
const multiSelect = event.ctrlKey || event.metaKey;
|
const multiSelect = event.ctrlKey || event.metaKey;
|
||||||
const rangeSelect = event.shiftKey;
|
const rangeSelect = event.shiftKey;
|
||||||
|
|
||||||
@@ -780,7 +796,7 @@ export function FileManagerGrid({
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (rangeSelect && selectedFiles.length > 0) {
|
if (rangeSelect && selectedFiles.length > 0) {
|
||||||
// 范围选择 (Shift+点击)
|
// Range selection (Shift+click)
|
||||||
console.log("Range selection");
|
console.log("Range selection");
|
||||||
const lastSelected = selectedFiles[selectedFiles.length - 1];
|
const lastSelected = selectedFiles[selectedFiles.length - 1];
|
||||||
const currentIndex = files.findIndex((f) => f.path === file.path);
|
const currentIndex = files.findIndex((f) => f.path === file.path);
|
||||||
@@ -794,7 +810,7 @@ export function FileManagerGrid({
|
|||||||
onSelectionChange(rangeFiles);
|
onSelectionChange(rangeFiles);
|
||||||
}
|
}
|
||||||
} else if (multiSelect) {
|
} else if (multiSelect) {
|
||||||
// 多选 (Ctrl+点击)
|
// Multi-selection (Ctrl+click)
|
||||||
console.log("Multi selection");
|
console.log("Multi selection");
|
||||||
const isSelected = selectedFiles.some((f) => f.path === file.path);
|
const isSelected = selectedFiles.some((f) => f.path === file.path);
|
||||||
if (isSelected) {
|
if (isSelected) {
|
||||||
@@ -805,21 +821,21 @@ export function FileManagerGrid({
|
|||||||
onSelectionChange([...selectedFiles, file]);
|
onSelectionChange([...selectedFiles, file]);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// 单选
|
// Single selection
|
||||||
console.log("Single selection - should select only:", file.name);
|
console.log("Single selection - should select only:", file.name);
|
||||||
onSelectionChange([file]);
|
onSelectionChange([file]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 空白区域点击取消选择
|
// Click empty area to cancel selection
|
||||||
const handleGridClick = (event: React.MouseEvent) => {
|
const handleGridClick = (event: React.MouseEvent) => {
|
||||||
// 确保网格获得焦点以支持键盘事件
|
// Ensure grid gets focus to support keyboard events
|
||||||
if (gridRef.current) {
|
if (gridRef.current) {
|
||||||
gridRef.current.focus();
|
gridRef.current.focus();
|
||||||
}
|
}
|
||||||
|
|
||||||
// 如果刚完成框选,不要清空选择
|
// If just completed box selection, don't clear selection
|
||||||
if (
|
if (
|
||||||
event.target === event.currentTarget &&
|
event.target === event.currentTarget &&
|
||||||
!isSelecting &&
|
!isSelecting &&
|
||||||
@@ -829,10 +845,10 @@ export function FileManagerGrid({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 键盘支持
|
// Keyboard support
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const handleKeyDown = (event: KeyboardEvent) => {
|
const handleKeyDown = (event: KeyboardEvent) => {
|
||||||
// 检查是否有输入框或可编辑元素获得焦点,如果有则跳过
|
// Check if input box or editable element has focus, skip if so
|
||||||
const activeElement = document.activeElement;
|
const activeElement = document.activeElement;
|
||||||
if (
|
if (
|
||||||
activeElement &&
|
activeElement &&
|
||||||
@@ -879,7 +895,7 @@ export function FileManagerGrid({
|
|||||||
break;
|
break;
|
||||||
case "v":
|
case "v":
|
||||||
case "V":
|
case "V":
|
||||||
if ((event.ctrlKey || event.metaKey) && onPaste) {
|
if ((event.ctrlKey || event.metaKey) && onPaste && hasClipboard) {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
onPaste();
|
onPaste();
|
||||||
}
|
}
|
||||||
@@ -893,19 +909,22 @@ export function FileManagerGrid({
|
|||||||
break;
|
break;
|
||||||
case "Delete":
|
case "Delete":
|
||||||
if (selectedFiles.length > 0 && onDelete) {
|
if (selectedFiles.length > 0 && onDelete) {
|
||||||
// 触发删除操作
|
// Trigger delete operation
|
||||||
onDelete(selectedFiles);
|
onDelete(selectedFiles);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case "F2":
|
case "F6":
|
||||||
if (selectedFiles.length === 1) {
|
if (selectedFiles.length === 1 && onStartEdit) {
|
||||||
// 触发重命名
|
event.preventDefault();
|
||||||
console.log("Rename file:", selectedFiles[0]);
|
onStartEdit(selectedFiles[0]);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case "F5":
|
case "y":
|
||||||
|
case "Y":
|
||||||
|
if ((event.ctrlKey || event.metaKey)) {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
onRefresh();
|
onRefresh();
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -937,9 +956,9 @@ export function FileManagerGrid({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="h-full flex flex-col bg-dark-bg overflow-hidden">
|
<div className="h-full flex flex-col bg-dark-bg overflow-hidden">
|
||||||
{/* 工具栏和路径导航 */}
|
{/* Toolbar and path navigation */}
|
||||||
<div className="flex-shrink-0 border-b border-dark-border">
|
<div className="flex-shrink-0 border-b border-dark-border">
|
||||||
{/* 导航按钮 */}
|
{/* Navigation buttons */}
|
||||||
<div className="flex items-center gap-1 p-2 border-b border-dark-border">
|
<div className="flex items-center gap-1 p-2 border-b border-dark-border">
|
||||||
<button
|
<button
|
||||||
onClick={goBack}
|
onClick={goBack}
|
||||||
@@ -984,10 +1003,10 @@ export function FileManagerGrid({
|
|||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* 面包屑导航 */}
|
{/* Breadcrumb navigation */}
|
||||||
<div className="flex items-center px-3 py-2 text-sm">
|
<div className="flex items-center px-3 py-2 text-sm">
|
||||||
{isEditingPath ? (
|
{isEditingPath ? (
|
||||||
// 编辑模式:路径输入框
|
// Edit mode: path input box
|
||||||
<div className="flex-1 flex items-center gap-2">
|
<div className="flex-1 flex items-center gap-2">
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
@@ -1001,24 +1020,24 @@ export function FileManagerGrid({
|
|||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
className="flex-1 px-2 py-1 bg-dark-hover border border-dark-border rounded text-sm focus:outline-none focus:ring-1 focus:ring-primary"
|
className="flex-1 px-2 py-1 bg-dark-hover border border-dark-border rounded text-sm focus:outline-none focus:ring-1 focus:ring-primary"
|
||||||
placeholder="输入路径..."
|
placeholder={t("fileManager.enterPath")}
|
||||||
autoFocus
|
autoFocus
|
||||||
/>
|
/>
|
||||||
<button
|
<button
|
||||||
onClick={confirmEditingPath}
|
onClick={confirmEditingPath}
|
||||||
className="px-2 py-1 bg-primary text-primary-foreground rounded text-xs hover:bg-primary/80"
|
className="px-2 py-1 bg-primary text-primary-foreground rounded text-xs hover:bg-primary/80"
|
||||||
>
|
>
|
||||||
确认
|
{t("fileManager.confirm")}
|
||||||
</button>
|
</button>
|
||||||
<button
|
<button
|
||||||
onClick={cancelEditingPath}
|
onClick={cancelEditingPath}
|
||||||
className="px-2 py-1 bg-secondary text-secondary-foreground rounded text-xs hover:bg-secondary/80"
|
className="px-2 py-1 bg-secondary text-secondary-foreground rounded text-xs hover:bg-secondary/80"
|
||||||
>
|
>
|
||||||
取消
|
{t("fileManager.cancel")}
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
// 查看模式:面包屑导航
|
// View mode: breadcrumb navigation
|
||||||
<>
|
<>
|
||||||
<button
|
<button
|
||||||
onClick={() => navigateToPath(-1)}
|
onClick={() => navigateToPath(-1)}
|
||||||
@@ -1042,7 +1061,7 @@ export function FileManagerGrid({
|
|||||||
<button
|
<button
|
||||||
onClick={startEditingPath}
|
onClick={startEditingPath}
|
||||||
className="ml-2 p-1 rounded hover:bg-dark-hover opacity-60 hover:opacity-100"
|
className="ml-2 p-1 rounded hover:bg-dark-hover opacity-60 hover:opacity-100"
|
||||||
title="编辑路径"
|
title={t("fileManager.editPath")}
|
||||||
>
|
>
|
||||||
<Edit className="w-3 h-3" />
|
<Edit className="w-3 h-3" />
|
||||||
</button>
|
</button>
|
||||||
@@ -1051,7 +1070,7 @@ export function FileManagerGrid({
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* 主文件网格 - 滚动区域 */}
|
{/* Main file grid - scroll area */}
|
||||||
<div className="flex-1 relative overflow-hidden">
|
<div className="flex-1 relative overflow-hidden">
|
||||||
<div
|
<div
|
||||||
ref={gridRef}
|
ref={gridRef}
|
||||||
@@ -1072,7 +1091,7 @@ export function FileManagerGrid({
|
|||||||
onContextMenu={(e) => onContextMenu?.(e)}
|
onContextMenu={(e) => onContextMenu?.(e)}
|
||||||
tabIndex={0}
|
tabIndex={0}
|
||||||
>
|
>
|
||||||
{/* 拖拽提示覆盖层 */}
|
{/* Drag hint overlay */}
|
||||||
{dragState.type === "external" && (
|
{dragState.type === "external" && (
|
||||||
<div className="absolute inset-0 flex items-center justify-center bg-background/50 backdrop-blur-sm z-10 pointer-events-none animate-in fade-in-0">
|
<div className="absolute inset-0 flex items-center justify-center bg-background/50 backdrop-blur-sm z-10 pointer-events-none animate-in fade-in-0">
|
||||||
<div className="text-center p-8 bg-background/95 border-2 border-dashed border-primary rounded-lg shadow-lg">
|
<div className="text-center p-8 bg-background/95 border-2 border-dashed border-primary rounded-lg shadow-lg">
|
||||||
@@ -1087,7 +1106,7 @@ export function FileManagerGrid({
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{files.length === 0 ? (
|
{files.length === 0 && !createIntent ? (
|
||||||
<div className="h-full flex items-center justify-center p-8">
|
<div className="h-full flex items-center justify-center p-8">
|
||||||
<div className="text-center">
|
<div className="text-center">
|
||||||
<Folder className="w-16 h-16 mx-auto mb-4 text-muted-foreground/50" />
|
<Folder className="w-16 h-16 mx-auto mb-4 text-muted-foreground/50" />
|
||||||
@@ -1108,29 +1127,19 @@ export function FileManagerGrid({
|
|||||||
</div>
|
</div>
|
||||||
) : viewMode === "grid" ? (
|
) : viewMode === "grid" ? (
|
||||||
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-6 xl:grid-cols-8 gap-4">
|
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-6 xl:grid-cols-8 gap-4">
|
||||||
|
{/* Linus-style creation intent UI - pure separation */}
|
||||||
|
{createIntent && (
|
||||||
|
<CreateIntentGridItem
|
||||||
|
intent={createIntent}
|
||||||
|
onConfirm={onConfirmCreate}
|
||||||
|
onCancel={onCancelCreate}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
{files.map((file) => {
|
{files.map((file) => {
|
||||||
const isSelected = selectedFiles.some(
|
const isSelected = selectedFiles.some(
|
||||||
(f) => f.path === file.path,
|
(f) => f.path === file.path,
|
||||||
);
|
);
|
||||||
|
|
||||||
// 详细调试路径比较
|
|
||||||
if (selectedFiles.length > 0) {
|
|
||||||
console.log(`\n=== File: ${file.name} ===`);
|
|
||||||
console.log(`File path: "${file.path}"`);
|
|
||||||
console.log(
|
|
||||||
`Selected files:`,
|
|
||||||
selectedFiles.map((f) => `"${f.path}"`),
|
|
||||||
);
|
|
||||||
console.log(
|
|
||||||
`Path comparison results:`,
|
|
||||||
selectedFiles.map(
|
|
||||||
(f) =>
|
|
||||||
`"${f.path}" === "${file.path}" -> ${f.path === file.path}`,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
console.log(`Final isSelected: ${isSelected}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
key={file.path}
|
key={file.path}
|
||||||
@@ -1141,7 +1150,7 @@ export function FileManagerGrid({
|
|||||||
"hover:bg-accent hover:text-accent-foreground border-2 border-transparent",
|
"hover:bg-accent hover:text-accent-foreground border-2 border-transparent",
|
||||||
isSelected && "bg-primary/20 border-primary",
|
isSelected && "bg-primary/20 border-primary",
|
||||||
dragState.target?.path === file.path &&
|
dragState.target?.path === file.path &&
|
||||||
"bg-muted border-primary border-dashed",
|
"bg-muted border-primary border-dashed relative z-10",
|
||||||
dragState.files.some((f) => f.path === file.path) &&
|
dragState.files.some((f) => f.path === file.path) &&
|
||||||
"opacity-50",
|
"opacity-50",
|
||||||
)}
|
)}
|
||||||
@@ -1159,10 +1168,10 @@ export function FileManagerGrid({
|
|||||||
onDragEnd={handleFileDragEnd}
|
onDragEnd={handleFileDragEnd}
|
||||||
>
|
>
|
||||||
<div className="flex flex-col items-center text-center">
|
<div className="flex flex-col items-center text-center">
|
||||||
{/* 文件图标 */}
|
{/* File icon */}
|
||||||
<div className="mb-2">{getFileIcon(file, viewMode)}</div>
|
<div className="mb-2">{getFileIcon(file, viewMode)}</div>
|
||||||
|
|
||||||
{/* 文件名 */}
|
{/* File name */}
|
||||||
<div className="w-full flex flex-col items-center">
|
<div className="w-full flex flex-col items-center">
|
||||||
{editingFile?.path === file.path ? (
|
{editingFile?.path === file.path ? (
|
||||||
<input
|
<input
|
||||||
@@ -1181,15 +1190,8 @@ export function FileManagerGrid({
|
|||||||
/>
|
/>
|
||||||
) : (
|
) : (
|
||||||
<p
|
<p
|
||||||
className="text-xs text-foreground truncate cursor-pointer hover:bg-accent px-1 py-0.5 rounded transition-colors duration-150 w-fit max-w-full text-center"
|
className="text-xs text-foreground break-words px-1 py-0.5 rounded text-center leading-tight w-full"
|
||||||
title={`${file.name} (点击重命名)`}
|
title={file.name}
|
||||||
onClick={(e) => {
|
|
||||||
// 阻止文件选择事件
|
|
||||||
if (onStartEdit) {
|
|
||||||
e.stopPropagation();
|
|
||||||
onStartEdit(file);
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
>
|
>
|
||||||
{file.name}
|
{file.name}
|
||||||
</p>
|
</p>
|
||||||
@@ -1203,7 +1205,7 @@ export function FileManagerGrid({
|
|||||||
)}
|
)}
|
||||||
{file.type === "link" && file.linkTarget && (
|
{file.type === "link" && file.linkTarget && (
|
||||||
<p
|
<p
|
||||||
className="text-xs text-primary mt-1 truncate max-w-full"
|
className="text-xs text-primary mt-1 break-words w-full leading-tight"
|
||||||
title={file.linkTarget}
|
title={file.linkTarget}
|
||||||
>
|
>
|
||||||
→ {file.linkTarget}
|
→ {file.linkTarget}
|
||||||
@@ -1216,8 +1218,16 @@ export function FileManagerGrid({
|
|||||||
})}
|
})}
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
/* 列表视图 */
|
/* List view */
|
||||||
<div className="space-y-1">
|
<div className="space-y-1">
|
||||||
|
{/* Linus-style creation intent UI - list view */}
|
||||||
|
{createIntent && (
|
||||||
|
<CreateIntentListItem
|
||||||
|
intent={createIntent}
|
||||||
|
onConfirm={onConfirmCreate}
|
||||||
|
onCancel={onCancelCreate}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
{files.map((file) => {
|
{files.map((file) => {
|
||||||
const isSelected = selectedFiles.some(
|
const isSelected = selectedFiles.some(
|
||||||
(f) => f.path === file.path,
|
(f) => f.path === file.path,
|
||||||
@@ -1233,7 +1243,7 @@ export function FileManagerGrid({
|
|||||||
"hover:bg-accent hover:text-accent-foreground",
|
"hover:bg-accent hover:text-accent-foreground",
|
||||||
isSelected && "bg-primary/20",
|
isSelected && "bg-primary/20",
|
||||||
dragState.target?.path === file.path &&
|
dragState.target?.path === file.path &&
|
||||||
"bg-muted border-primary border-dashed",
|
"bg-muted border-primary border-dashed relative z-10",
|
||||||
dragState.files.some((f) => f.path === file.path) &&
|
dragState.files.some((f) => f.path === file.path) &&
|
||||||
"opacity-50",
|
"opacity-50",
|
||||||
)}
|
)}
|
||||||
@@ -1249,12 +1259,12 @@ export function FileManagerGrid({
|
|||||||
onDrop={(e) => handleFileDrop(e, file)}
|
onDrop={(e) => handleFileDrop(e, file)}
|
||||||
onDragEnd={handleFileDragEnd}
|
onDragEnd={handleFileDragEnd}
|
||||||
>
|
>
|
||||||
{/* 文件图标 */}
|
{/* File icon */}
|
||||||
<div className="flex-shrink-0">
|
<div className="flex-shrink-0">
|
||||||
{getFileIcon(file, viewMode)}
|
{getFileIcon(file, viewMode)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* 文件信息 */}
|
{/* File info */}
|
||||||
<div className="flex-1 min-w-0">
|
<div className="flex-1 min-w-0">
|
||||||
{editingFile?.path === file.path ? (
|
{editingFile?.path === file.path ? (
|
||||||
<input
|
<input
|
||||||
@@ -1273,22 +1283,15 @@ export function FileManagerGrid({
|
|||||||
/>
|
/>
|
||||||
) : (
|
) : (
|
||||||
<p
|
<p
|
||||||
className="text-sm text-foreground truncate cursor-pointer hover:bg-accent px-1 py-0.5 rounded transition-colors duration-150 w-fit max-w-full"
|
className="text-sm text-foreground break-words px-1 py-0.5 rounded leading-tight"
|
||||||
title={`${file.name} (点击重命名)`}
|
title={file.name}
|
||||||
onClick={(e) => {
|
|
||||||
// 阻止文件选择事件
|
|
||||||
if (onStartEdit) {
|
|
||||||
e.stopPropagation();
|
|
||||||
onStartEdit(file);
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
>
|
>
|
||||||
{file.name}
|
{file.name}
|
||||||
</p>
|
</p>
|
||||||
)}
|
)}
|
||||||
{file.type === "link" && file.linkTarget && (
|
{file.type === "link" && file.linkTarget && (
|
||||||
<p
|
<p
|
||||||
className="text-xs text-primary truncate"
|
className="text-xs text-primary break-words leading-tight"
|
||||||
title={file.linkTarget}
|
title={file.linkTarget}
|
||||||
>
|
>
|
||||||
→ {file.linkTarget}
|
→ {file.linkTarget}
|
||||||
@@ -1301,7 +1304,7 @@ export function FileManagerGrid({
|
|||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* 文件大小 */}
|
{/* File size */}
|
||||||
<div className="flex-shrink-0 text-right">
|
<div className="flex-shrink-0 text-right">
|
||||||
{file.type === "file" &&
|
{file.type === "file" &&
|
||||||
file.size !== undefined &&
|
file.size !== undefined &&
|
||||||
@@ -1312,7 +1315,7 @@ export function FileManagerGrid({
|
|||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* 权限信息 */}
|
{/* Permission info */}
|
||||||
<div className="flex-shrink-0 text-right w-20">
|
<div className="flex-shrink-0 text-right w-20">
|
||||||
{file.permissions && (
|
{file.permissions && (
|
||||||
<p className="text-xs text-muted-foreground font-mono">
|
<p className="text-xs text-muted-foreground font-mono">
|
||||||
@@ -1326,7 +1329,7 @@ export function FileManagerGrid({
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* 框选矩形 */}
|
{/* Selection rectangle */}
|
||||||
{isSelecting && selectionRect && (
|
{isSelecting && selectionRect && (
|
||||||
<div
|
<div
|
||||||
className="absolute pointer-events-none border-2 border-primary bg-primary/10 z-50"
|
className="absolute pointer-events-none border-2 border-primary bg-primary/10 z-50"
|
||||||
@@ -1341,7 +1344,7 @@ export function FileManagerGrid({
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* 状态栏 */}
|
{/* Status bar */}
|
||||||
<div className="flex-shrink-0 border-t border-dark-border px-4 py-2 text-xs text-muted-foreground">
|
<div className="flex-shrink-0 border-t border-dark-border px-4 py-2 text-xs text-muted-foreground">
|
||||||
<div className="flex justify-between items-center">
|
<div className="flex justify-between items-center">
|
||||||
<span>{t("fileManager.itemCount", { count: files.length })}</span>
|
<span>{t("fileManager.itemCount", { count: files.length })}</span>
|
||||||
@@ -1353,15 +1356,15 @@ export function FileManagerGrid({
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* 拖拽跟随tooltip */}
|
{/* Drag following tooltip */}
|
||||||
{dragState.type === "internal" &&
|
{dragState.type === "internal" &&
|
||||||
dragState.files.length > 0 &&
|
dragState.files.length > 0 &&
|
||||||
dragState.mousePosition && (
|
dragState.mousePosition && (
|
||||||
<div
|
<div
|
||||||
className="fixed z-50 pointer-events-none"
|
className="fixed z-[99999] pointer-events-none"
|
||||||
style={{
|
style={{
|
||||||
left: dragState.mousePosition.x + 16,
|
left: dragState.mousePosition.x + 24,
|
||||||
top: dragState.mousePosition.y - 8,
|
top: dragState.mousePosition.y - 40,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<div className="bg-background border border-border rounded-md shadow-md px-3 py-2 flex items-center gap-2">
|
<div className="bg-background border border-border rounded-md shadow-md px-3 py-2 flex items-center gap-2">
|
||||||
@@ -1370,14 +1373,14 @@ export function FileManagerGrid({
|
|||||||
<>
|
<>
|
||||||
<Move className="w-4 h-4 text-blue-500" />
|
<Move className="w-4 h-4 text-blue-500" />
|
||||||
<span className="text-sm font-medium text-foreground">
|
<span className="text-sm font-medium text-foreground">
|
||||||
移动到 {dragState.target.name}
|
Move to {dragState.target.name}
|
||||||
</span>
|
</span>
|
||||||
</>
|
</>
|
||||||
) : (
|
) : (
|
||||||
<>
|
<>
|
||||||
<GitCompare className="w-4 h-4 text-purple-500" />
|
<GitCompare className="w-4 h-4 text-purple-500" />
|
||||||
<span className="text-sm font-medium text-foreground">
|
<span className="text-sm font-medium text-foreground">
|
||||||
与 {dragState.target.name} 进行diff对比
|
Diff compare with {dragState.target.name}
|
||||||
</span>
|
</span>
|
||||||
</>
|
</>
|
||||||
)
|
)
|
||||||
@@ -1385,7 +1388,7 @@ export function FileManagerGrid({
|
|||||||
<>
|
<>
|
||||||
<Download className="w-4 h-4 text-green-500" />
|
<Download className="w-4 h-4 text-green-500" />
|
||||||
<span className="text-sm font-medium text-foreground">
|
<span className="text-sm font-medium text-foreground">
|
||||||
拖到窗口外下载 ({dragState.files.length} 个文件)
|
Drag outside window to download ({dragState.files.length} files)
|
||||||
</span>
|
</span>
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
@@ -1395,3 +1398,109 @@ export function FileManagerGrid({
|
|||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Linus-style creation intent component: Grid view
|
||||||
|
function CreateIntentGridItem({
|
||||||
|
intent,
|
||||||
|
onConfirm,
|
||||||
|
onCancel,
|
||||||
|
}: {
|
||||||
|
intent: CreateIntent;
|
||||||
|
onConfirm?: (name: string) => void;
|
||||||
|
onCancel?: () => void;
|
||||||
|
}) {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const [inputName, setInputName] = useState(intent.currentName);
|
||||||
|
const inputRef = useRef<HTMLInputElement>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
inputRef.current?.focus();
|
||||||
|
inputRef.current?.select();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const handleKeyDown = (e: React.KeyboardEvent) => {
|
||||||
|
if (e.key === "Enter") {
|
||||||
|
e.preventDefault();
|
||||||
|
onConfirm?.(inputName.trim());
|
||||||
|
} else if (e.key === "Escape") {
|
||||||
|
e.preventDefault();
|
||||||
|
onCancel?.();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="group p-3 rounded-lg border-2 border-dashed border-primary bg-primary/10 transition-all">
|
||||||
|
<div className="flex flex-col items-center text-center">
|
||||||
|
<div className="mb-2">
|
||||||
|
{intent.type === 'directory' ? (
|
||||||
|
<Folder className="w-8 h-8 text-primary" />
|
||||||
|
) : (
|
||||||
|
<File className="w-8 h-8 text-primary" />
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<input
|
||||||
|
ref={inputRef}
|
||||||
|
type="text"
|
||||||
|
value={inputName}
|
||||||
|
onChange={(e) => setInputName(e.target.value)}
|
||||||
|
onKeyDown={handleKeyDown}
|
||||||
|
onBlur={() => onConfirm?.(inputName.trim())}
|
||||||
|
className="w-full max-w-[120px] rounded-md border border-gray-300 dark:border-gray-600 bg-white dark:bg-gray-800 px-2 py-1 text-xs text-center text-foreground placeholder:text-muted-foreground focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[2px] outline-none"
|
||||||
|
placeholder={intent.type === 'directory' ? t('fileManager.folderName') : t('fileManager.fileName')}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Linus-style creation intent component: List view
|
||||||
|
function CreateIntentListItem({
|
||||||
|
intent,
|
||||||
|
onConfirm,
|
||||||
|
onCancel,
|
||||||
|
}: {
|
||||||
|
intent: CreateIntent;
|
||||||
|
onConfirm?: (name: string) => void;
|
||||||
|
onCancel?: () => void;
|
||||||
|
}) {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const [inputName, setInputName] = useState(intent.currentName);
|
||||||
|
const inputRef = useRef<HTMLInputElement>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
inputRef.current?.focus();
|
||||||
|
inputRef.current?.select();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const handleKeyDown = (e: React.KeyboardEvent) => {
|
||||||
|
if (e.key === "Enter") {
|
||||||
|
e.preventDefault();
|
||||||
|
onConfirm?.(inputName.trim());
|
||||||
|
} else if (e.key === "Escape") {
|
||||||
|
e.preventDefault();
|
||||||
|
onCancel?.();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex items-center gap-3 p-2 rounded border-2 border-dashed border-primary bg-primary/10 transition-all">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
{intent.type === 'directory' ? (
|
||||||
|
<Folder className="w-6 h-6 text-primary" />
|
||||||
|
) : (
|
||||||
|
<File className="w-6 h-6 text-primary" />
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<input
|
||||||
|
ref={inputRef}
|
||||||
|
type="text"
|
||||||
|
value={inputName}
|
||||||
|
onChange={(e) => setInputName(e.target.value)}
|
||||||
|
onKeyDown={handleKeyDown}
|
||||||
|
onBlur={() => onConfirm?.(inputName.trim())}
|
||||||
|
className="flex-1 min-w-0 max-w-[200px] rounded-md border border-gray-300 dark:border-gray-600 bg-white dark:bg-gray-800 px-2 py-1 text-sm text-foreground placeholder:text-muted-foreground focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[2px] outline-none"
|
||||||
|
placeholder={intent.type === 'directory' ? t('fileManager.folderName') : t('fileManager.fileName')}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|||||||
@@ -80,12 +80,12 @@ export function FileManagerOperations({
|
|||||||
);
|
);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// 读取文件内容 - 支持文本和二进制文件
|
// Read file content - support text and binary files
|
||||||
const content = await new Promise<string>((resolve, reject) => {
|
const content = await new Promise<string>((resolve, reject) => {
|
||||||
const reader = new FileReader();
|
const reader = new FileReader();
|
||||||
reader.onerror = () => reject(reader.error);
|
reader.onerror = () => reject(reader.error);
|
||||||
|
|
||||||
// 检查文件类型,决定读取方式
|
// Check file type to determine reading method
|
||||||
const isTextFile =
|
const isTextFile =
|
||||||
uploadFile.type.startsWith("text/") ||
|
uploadFile.type.startsWith("text/") ||
|
||||||
uploadFile.type === "application/json" ||
|
uploadFile.type === "application/json" ||
|
||||||
|
|||||||
@@ -38,9 +38,9 @@ interface FileManagerSidebarProps {
|
|||||||
currentPath: string;
|
currentPath: string;
|
||||||
onPathChange: (path: string) => void;
|
onPathChange: (path: string) => void;
|
||||||
onLoadDirectory?: (path: string) => void;
|
onLoadDirectory?: (path: string) => void;
|
||||||
onFileOpen?: (file: SidebarItem) => void; // 新增:处理文件打开
|
onFileOpen?: (file: SidebarItem) => void; // Added: handle file opening
|
||||||
sshSessionId?: string;
|
sshSessionId?: string;
|
||||||
refreshTrigger?: number; // 用于触发数据刷新
|
refreshTrigger?: number; // Used to trigger data refresh
|
||||||
}
|
}
|
||||||
|
|
||||||
export function FileManagerSidebar({
|
export function FileManagerSidebar({
|
||||||
@@ -61,7 +61,7 @@ export function FileManagerSidebar({
|
|||||||
new Set(["root"]),
|
new Set(["root"]),
|
||||||
);
|
);
|
||||||
|
|
||||||
// 右键菜单状态
|
// Right-click menu state
|
||||||
const [contextMenu, setContextMenu] = useState<{
|
const [contextMenu, setContextMenu] = useState<{
|
||||||
x: number;
|
x: number;
|
||||||
y: number;
|
y: number;
|
||||||
@@ -74,12 +74,12 @@ export function FileManagerSidebar({
|
|||||||
item: null,
|
item: null,
|
||||||
});
|
});
|
||||||
|
|
||||||
// 加载快捷功能数据
|
// Load quick access data
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
loadQuickAccessData();
|
loadQuickAccessData();
|
||||||
}, [currentHost, refreshTrigger]);
|
}, [currentHost, refreshTrigger]);
|
||||||
|
|
||||||
// 加载目录树(依赖sshSessionId)
|
// Load directory tree (depends on sshSessionId)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (sshSessionId) {
|
if (sshSessionId) {
|
||||||
loadDirectoryTree();
|
loadDirectoryTree();
|
||||||
@@ -90,7 +90,7 @@ export function FileManagerSidebar({
|
|||||||
if (!currentHost?.id) return;
|
if (!currentHost?.id) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// 加载最近访问文件(限制5个)
|
// Load recent files (limit to 5)
|
||||||
const recentData = await getRecentFiles(currentHost.id);
|
const recentData = await getRecentFiles(currentHost.id);
|
||||||
const recentItems = recentData.slice(0, 5).map((item: any) => ({
|
const recentItems = recentData.slice(0, 5).map((item: any) => ({
|
||||||
id: `recent-${item.id}`,
|
id: `recent-${item.id}`,
|
||||||
@@ -101,7 +101,7 @@ export function FileManagerSidebar({
|
|||||||
}));
|
}));
|
||||||
setRecentItems(recentItems);
|
setRecentItems(recentItems);
|
||||||
|
|
||||||
// 加载固定文件
|
// Load pinned files
|
||||||
const pinnedData = await getPinnedFiles(currentHost.id);
|
const pinnedData = await getPinnedFiles(currentHost.id);
|
||||||
const pinnedItems = pinnedData.map((item: any) => ({
|
const pinnedItems = pinnedData.map((item: any) => ({
|
||||||
id: `pinned-${item.id}`,
|
id: `pinned-${item.id}`,
|
||||||
@@ -111,7 +111,7 @@ export function FileManagerSidebar({
|
|||||||
}));
|
}));
|
||||||
setPinnedItems(pinnedItems);
|
setPinnedItems(pinnedItems);
|
||||||
|
|
||||||
// 加载文件夹快捷方式
|
// Load folder shortcuts
|
||||||
const shortcutData = await getFolderShortcuts(currentHost.id);
|
const shortcutData = await getFolderShortcuts(currentHost.id);
|
||||||
const shortcutItems = shortcutData.map((item: any) => ({
|
const shortcutItems = shortcutData.map((item: any) => ({
|
||||||
id: `shortcut-${item.id}`,
|
id: `shortcut-${item.id}`,
|
||||||
@@ -122,20 +122,20 @@ export function FileManagerSidebar({
|
|||||||
setShortcuts(shortcutItems);
|
setShortcuts(shortcutItems);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Failed to load quick access data:", error);
|
console.error("Failed to load quick access data:", error);
|
||||||
// 如果加载失败,保持空数组
|
// If loading fails, keep empty arrays
|
||||||
setRecentItems([]);
|
setRecentItems([]);
|
||||||
setPinnedItems([]);
|
setPinnedItems([]);
|
||||||
setShortcuts([]);
|
setShortcuts([]);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 删除功能实现
|
// Delete functionality implementation
|
||||||
const handleRemoveRecentFile = async (item: SidebarItem) => {
|
const handleRemoveRecentFile = async (item: SidebarItem) => {
|
||||||
if (!currentHost?.id) return;
|
if (!currentHost?.id) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await removeRecentFile(currentHost.id, item.path);
|
await removeRecentFile(currentHost.id, item.path);
|
||||||
loadQuickAccessData(); // 重新加载数据
|
loadQuickAccessData(); // Reload data
|
||||||
toast.success(
|
toast.success(
|
||||||
t("fileManager.removedFromRecentFiles", { name: item.name }),
|
t("fileManager.removedFromRecentFiles", { name: item.name }),
|
||||||
);
|
);
|
||||||
@@ -150,7 +150,7 @@ export function FileManagerSidebar({
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await removePinnedFile(currentHost.id, item.path);
|
await removePinnedFile(currentHost.id, item.path);
|
||||||
loadQuickAccessData(); // 重新加载数据
|
loadQuickAccessData(); // Reload data
|
||||||
toast.success(t("fileManager.unpinnedSuccessfully", { name: item.name }));
|
toast.success(t("fileManager.unpinnedSuccessfully", { name: item.name }));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Failed to unpin file:", error);
|
console.error("Failed to unpin file:", error);
|
||||||
@@ -163,7 +163,7 @@ export function FileManagerSidebar({
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await removeFolderShortcut(currentHost.id, item.path);
|
await removeFolderShortcut(currentHost.id, item.path);
|
||||||
loadQuickAccessData(); // 重新加载数据
|
loadQuickAccessData(); // Reload data
|
||||||
toast.success(t("fileManager.removedShortcut", { name: item.name }));
|
toast.success(t("fileManager.removedShortcut", { name: item.name }));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Failed to remove shortcut:", error);
|
console.error("Failed to remove shortcut:", error);
|
||||||
@@ -175,11 +175,11 @@ export function FileManagerSidebar({
|
|||||||
if (!currentHost?.id || recentItems.length === 0) return;
|
if (!currentHost?.id || recentItems.length === 0) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// 批量删除所有recent文件
|
// Batch delete all recent files
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
recentItems.map((item) => removeRecentFile(currentHost.id, item.path)),
|
recentItems.map((item) => removeRecentFile(currentHost.id, item.path)),
|
||||||
);
|
);
|
||||||
loadQuickAccessData(); // 重新加载数据
|
loadQuickAccessData(); // Reload data
|
||||||
toast.success(t("fileManager.clearedAllRecentFiles"));
|
toast.success(t("fileManager.clearedAllRecentFiles"));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Failed to clear recent files:", error);
|
console.error("Failed to clear recent files:", error);
|
||||||
@@ -187,7 +187,7 @@ export function FileManagerSidebar({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 右键菜单处理
|
// Right-click menu handling
|
||||||
const handleContextMenu = (e: React.MouseEvent, item: SidebarItem) => {
|
const handleContextMenu = (e: React.MouseEvent, item: SidebarItem) => {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
@@ -204,7 +204,7 @@ export function FileManagerSidebar({
|
|||||||
setContextMenu((prev) => ({ ...prev, isVisible: false, item: null }));
|
setContextMenu((prev) => ({ ...prev, isVisible: false, item: null }));
|
||||||
};
|
};
|
||||||
|
|
||||||
// 点击外部关闭菜单
|
// Click outside to close menu
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!contextMenu.isVisible) return;
|
if (!contextMenu.isVisible) return;
|
||||||
|
|
||||||
@@ -223,7 +223,7 @@ export function FileManagerSidebar({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 延迟添加监听器,避免立即触发
|
// Delay adding listeners to avoid immediate trigger
|
||||||
const timeoutId = setTimeout(() => {
|
const timeoutId = setTimeout(() => {
|
||||||
document.addEventListener("mousedown", handleClickOutside);
|
document.addEventListener("mousedown", handleClickOutside);
|
||||||
document.addEventListener("keydown", handleKeyDown);
|
document.addEventListener("keydown", handleKeyDown);
|
||||||
@@ -240,10 +240,10 @@ export function FileManagerSidebar({
|
|||||||
if (!sshSessionId) return;
|
if (!sshSessionId) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// 加载根目录
|
// Load root directory
|
||||||
const response = await listSSHFiles(sshSessionId, "/");
|
const response = await listSSHFiles(sshSessionId, "/");
|
||||||
|
|
||||||
// listSSHFiles 现在总是返回 {files: Array, path: string} 格式
|
// listSSHFiles now always returns {files: Array, path: string} format
|
||||||
const rootFiles = response.files || [];
|
const rootFiles = response.files || [];
|
||||||
const rootFolders = rootFiles.filter(
|
const rootFolders = rootFiles.filter(
|
||||||
(item: any) => item.type === "directory",
|
(item: any) => item.type === "directory",
|
||||||
@@ -255,7 +255,7 @@ export function FileManagerSidebar({
|
|||||||
path: folder.path,
|
path: folder.path,
|
||||||
type: "folder" as const,
|
type: "folder" as const,
|
||||||
isExpanded: false,
|
isExpanded: false,
|
||||||
children: [], // 子目录将按需加载
|
children: [], // Subdirectories will be loaded on demand
|
||||||
}));
|
}));
|
||||||
|
|
||||||
setDirectoryTree([
|
setDirectoryTree([
|
||||||
@@ -270,7 +270,7 @@ export function FileManagerSidebar({
|
|||||||
]);
|
]);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Failed to load directory tree:", error);
|
console.error("Failed to load directory tree:", error);
|
||||||
// 如果加载失败,显示简单的根目录
|
// If loading fails, show simple root directory
|
||||||
setDirectoryTree([
|
setDirectoryTree([
|
||||||
{
|
{
|
||||||
id: "root",
|
id: "root",
|
||||||
@@ -289,17 +289,17 @@ export function FileManagerSidebar({
|
|||||||
toggleFolder(item.id, item.path);
|
toggleFolder(item.id, item.path);
|
||||||
onPathChange(item.path);
|
onPathChange(item.path);
|
||||||
} else if (item.type === "recent" || item.type === "pinned") {
|
} else if (item.type === "recent" || item.type === "pinned") {
|
||||||
// 对于文件类型,调用文件打开回调
|
// For file types, call file open callback
|
||||||
if (onFileOpen) {
|
if (onFileOpen) {
|
||||||
onFileOpen(item);
|
onFileOpen(item);
|
||||||
} else {
|
} else {
|
||||||
// 如果没有文件打开回调,切换到文件所在目录
|
// If no file open callback, switch to file directory
|
||||||
const directory =
|
const directory =
|
||||||
item.path.substring(0, item.path.lastIndexOf("/")) || "/";
|
item.path.substring(0, item.path.lastIndexOf("/")) || "/";
|
||||||
onPathChange(directory);
|
onPathChange(directory);
|
||||||
}
|
}
|
||||||
} else if (item.type === "shortcut") {
|
} else if (item.type === "shortcut") {
|
||||||
// 文件夹快捷方式直接切换到目录
|
// Folder shortcuts directly switch to directory
|
||||||
onPathChange(item.path);
|
onPathChange(item.path);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -312,12 +312,12 @@ export function FileManagerSidebar({
|
|||||||
} else {
|
} else {
|
||||||
newExpanded.add(folderId);
|
newExpanded.add(folderId);
|
||||||
|
|
||||||
// 按需加载子目录
|
// Load subdirectories on demand
|
||||||
if (sshSessionId && folderPath && folderPath !== "/") {
|
if (sshSessionId && folderPath && folderPath !== "/") {
|
||||||
try {
|
try {
|
||||||
const subResponse = await listSSHFiles(sshSessionId, folderPath);
|
const subResponse = await listSSHFiles(sshSessionId, folderPath);
|
||||||
|
|
||||||
// listSSHFiles 现在总是返回 {files: Array, path: string} 格式
|
// listSSHFiles now always returns {files: Array, path: string} format
|
||||||
const subFiles = subResponse.files || [];
|
const subFiles = subResponse.files || [];
|
||||||
const subFolders = subFiles.filter(
|
const subFolders = subFiles.filter(
|
||||||
(item: any) => item.type === "directory",
|
(item: any) => item.type === "directory",
|
||||||
@@ -332,7 +332,7 @@ export function FileManagerSidebar({
|
|||||||
children: [],
|
children: [],
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// 更新目录树,为当前文件夹添加子目录
|
// Update directory tree, add subdirectories for current folder
|
||||||
setDirectoryTree((prevTree) => {
|
setDirectoryTree((prevTree) => {
|
||||||
const updateChildren = (items: SidebarItem[]): SidebarItem[] => {
|
const updateChildren = (items: SidebarItem[]): SidebarItem[] => {
|
||||||
return items.map((item) => {
|
return items.map((item) => {
|
||||||
@@ -370,7 +370,7 @@ export function FileManagerSidebar({
|
|||||||
style={{ paddingLeft: `${12 + level * 16}px`, paddingRight: "12px" }}
|
style={{ paddingLeft: `${12 + level * 16}px`, paddingRight: "12px" }}
|
||||||
onClick={() => handleItemClick(item)}
|
onClick={() => handleItemClick(item)}
|
||||||
onContextMenu={(e) => {
|
onContextMenu={(e) => {
|
||||||
// 只有快捷功能项才需要右键菜单
|
// Only quick access items need right-click menu
|
||||||
if (
|
if (
|
||||||
item.type === "recent" ||
|
item.type === "recent" ||
|
||||||
item.type === "pinned" ||
|
item.type === "pinned" ||
|
||||||
@@ -447,7 +447,7 @@ export function FileManagerSidebar({
|
|||||||
<div className="h-full flex flex-col bg-dark-bg border-r border-dark-border">
|
<div className="h-full flex flex-col bg-dark-bg border-r border-dark-border">
|
||||||
<div className="flex-1 relative overflow-hidden">
|
<div className="flex-1 relative overflow-hidden">
|
||||||
<div className="absolute inset-1.5 overflow-y-auto thin-scrollbar space-y-4">
|
<div className="absolute inset-1.5 overflow-y-auto thin-scrollbar space-y-4">
|
||||||
{/* 快捷功能区域 */}
|
{/* Quick access area */}
|
||||||
{renderSection(
|
{renderSection(
|
||||||
t("fileManager.recent"),
|
t("fileManager.recent"),
|
||||||
<Clock className="w-3 h-3" />,
|
<Clock className="w-3 h-3" />,
|
||||||
@@ -464,7 +464,7 @@ export function FileManagerSidebar({
|
|||||||
shortcuts,
|
shortcuts,
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* 目录树 */}
|
{/* Directory tree */}
|
||||||
<div
|
<div
|
||||||
className={cn(
|
className={cn(
|
||||||
hasQuickAccessItems && "pt-4 border-t border-dark-border",
|
hasQuickAccessItems && "pt-4 border-t border-dark-border",
|
||||||
@@ -482,7 +482,7 @@ export function FileManagerSidebar({
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* 右键菜单 */}
|
{/* Right-click menu */}
|
||||||
{contextMenu.isVisible && contextMenu.item && (
|
{contextMenu.isVisible && contextMenu.item && (
|
||||||
<>
|
<>
|
||||||
<div className="fixed inset-0 z-40" />
|
<div className="fixed inset-0 z-40" />
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import React, { useState, useEffect } from "react";
|
|||||||
import { DiffEditor } from "@monaco-editor/react";
|
import { DiffEditor } from "@monaco-editor/react";
|
||||||
import { Button } from "@/components/ui/button";
|
import { Button } from "@/components/ui/button";
|
||||||
import { toast } from "sonner";
|
import { toast } from "sonner";
|
||||||
|
import { useTranslation } from "react-i18next";
|
||||||
import {
|
import {
|
||||||
Download,
|
Download,
|
||||||
RefreshCw,
|
RefreshCw,
|
||||||
@@ -35,6 +36,7 @@ export function DiffViewer({
|
|||||||
onDownload1,
|
onDownload1,
|
||||||
onDownload2,
|
onDownload2,
|
||||||
}: DiffViewerProps) {
|
}: DiffViewerProps) {
|
||||||
|
const { t } = useTranslation();
|
||||||
const [content1, setContent1] = useState<string>("");
|
const [content1, setContent1] = useState<string>("");
|
||||||
const [content2, setContent2] = useState<string>("");
|
const [content2, setContent2] = useState<string>("");
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
@@ -44,7 +46,7 @@ export function DiffViewer({
|
|||||||
);
|
);
|
||||||
const [showLineNumbers, setShowLineNumbers] = useState(true);
|
const [showLineNumbers, setShowLineNumbers] = useState(true);
|
||||||
|
|
||||||
// 确保SSH连接有效
|
// Ensure SSH connection is valid
|
||||||
const ensureSSHConnection = async () => {
|
const ensureSSHConnection = async () => {
|
||||||
try {
|
try {
|
||||||
const status = await getSSHStatus(sshSessionId);
|
const status = await getSSHStatus(sshSessionId);
|
||||||
@@ -68,10 +70,10 @@ export function DiffViewer({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 加载文件内容
|
// Load file contents
|
||||||
const loadFileContents = async () => {
|
const loadFileContents = async () => {
|
||||||
if (file1.type !== "file" || file2.type !== "file") {
|
if (file1.type !== "file" || file2.type !== "file") {
|
||||||
setError("只能对比文件类型的项目");
|
setError(t("fileManager.canOnlyCompareFiles"));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -79,10 +81,10 @@ export function DiffViewer({
|
|||||||
setIsLoading(true);
|
setIsLoading(true);
|
||||||
setError(null);
|
setError(null);
|
||||||
|
|
||||||
// 确保SSH连接有效
|
// Ensure SSH connection is valid
|
||||||
await ensureSSHConnection();
|
await ensureSSHConnection();
|
||||||
|
|
||||||
// 并行加载两个文件
|
// Load both files in parallel
|
||||||
const [response1, response2] = await Promise.all([
|
const [response1, response2] = await Promise.all([
|
||||||
readSSHFile(sshSessionId, file1.path),
|
readSSHFile(sshSessionId, file1.path),
|
||||||
readSSHFile(sshSessionId, file2.path),
|
readSSHFile(sshSessionId, file2.path),
|
||||||
@@ -95,17 +97,23 @@ export function DiffViewer({
|
|||||||
|
|
||||||
const errorData = error?.response?.data;
|
const errorData = error?.response?.data;
|
||||||
if (errorData?.tooLarge) {
|
if (errorData?.tooLarge) {
|
||||||
setError(`文件过大: ${errorData.error}`);
|
setError(t("fileManager.fileTooLarge", { error: errorData.error }));
|
||||||
} else if (
|
} else if (
|
||||||
error.message?.includes("connection") ||
|
error.message?.includes("connection") ||
|
||||||
error.message?.includes("established")
|
error.message?.includes("established")
|
||||||
) {
|
) {
|
||||||
setError(
|
setError(
|
||||||
`SSH连接失败。请检查与 ${sshHost.name} (${sshHost.ip}:${sshHost.port}) 的连接`,
|
t("fileManager.sshConnectionFailed", {
|
||||||
|
name: sshHost.name,
|
||||||
|
ip: sshHost.ip,
|
||||||
|
port: sshHost.port
|
||||||
|
}),
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
setError(
|
setError(
|
||||||
`加载文件失败: ${error.message || errorData?.error || "未知错误"}`,
|
t("fileManager.loadFileFailed", {
|
||||||
|
error: error.message || errorData?.error || t("fileManager.unknownError")
|
||||||
|
}),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
@@ -113,7 +121,7 @@ export function DiffViewer({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 下载文件
|
// Download file
|
||||||
const handleDownloadFile = async (file: FileItem) => {
|
const handleDownloadFile = async (file: FileItem) => {
|
||||||
try {
|
try {
|
||||||
await ensureSSHConnection();
|
await ensureSSHConnection();
|
||||||
@@ -139,15 +147,15 @@ export function DiffViewer({
|
|||||||
document.body.removeChild(link);
|
document.body.removeChild(link);
|
||||||
URL.revokeObjectURL(url);
|
URL.revokeObjectURL(url);
|
||||||
|
|
||||||
toast.success(`文件下载成功: ${file.name}`);
|
toast.success(t("fileManager.downloadFileSuccess", { name: file.name }));
|
||||||
}
|
}
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
console.error("Failed to download file:", error);
|
console.error("Failed to download file:", error);
|
||||||
toast.error(`下载失败: ${error.message || "未知错误"}`);
|
toast.error(t("fileManager.downloadFileFailed") + ": " + (error.message || t("fileManager.unknownError")));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 获取文件语言类型
|
// Get file language type
|
||||||
const getFileLanguage = (fileName: string): string => {
|
const getFileLanguage = (fileName: string): string => {
|
||||||
const ext = fileName.split(".").pop()?.toLowerCase();
|
const ext = fileName.split(".").pop()?.toLowerCase();
|
||||||
const languageMap: Record<string, string> = {
|
const languageMap: Record<string, string> = {
|
||||||
@@ -182,7 +190,7 @@ export function DiffViewer({
|
|||||||
return languageMap[ext || ""] || "plaintext";
|
return languageMap[ext || ""] || "plaintext";
|
||||||
};
|
};
|
||||||
|
|
||||||
// 初始加载
|
// Initial load
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
loadFileContents();
|
loadFileContents();
|
||||||
}, [file1, file2, sshSessionId]);
|
}, [file1, file2, sshSessionId]);
|
||||||
@@ -192,7 +200,7 @@ export function DiffViewer({
|
|||||||
<div className="h-full flex items-center justify-center bg-dark-bg">
|
<div className="h-full flex items-center justify-center bg-dark-bg">
|
||||||
<div className="text-center">
|
<div className="text-center">
|
||||||
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-blue-500 mx-auto mb-2"></div>
|
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-blue-500 mx-auto mb-2"></div>
|
||||||
<p className="text-sm text-muted-foreground">正在加载文件对比...</p>
|
<p className="text-sm text-muted-foreground">{t("fileManager.loadingFileComparison")}</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
@@ -206,7 +214,7 @@ export function DiffViewer({
|
|||||||
<p className="text-red-500 mb-4">{error}</p>
|
<p className="text-red-500 mb-4">{error}</p>
|
||||||
<Button onClick={loadFileContents} variant="outline">
|
<Button onClick={loadFileContents} variant="outline">
|
||||||
<RefreshCw className="w-4 h-4 mr-2" />
|
<RefreshCw className="w-4 h-4 mr-2" />
|
||||||
重新加载
|
{t("fileManager.reload")}
|
||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -215,12 +223,12 @@ export function DiffViewer({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="h-full flex flex-col bg-dark-bg">
|
<div className="h-full flex flex-col bg-dark-bg">
|
||||||
{/* 工具栏 */}
|
{/* Toolbar */}
|
||||||
<div className="flex-shrink-0 border-b border-dark-border p-3">
|
<div className="flex-shrink-0 border-b border-dark-border p-3">
|
||||||
<div className="flex items-center justify-between">
|
<div className="flex items-center justify-between">
|
||||||
<div className="flex items-center gap-4">
|
<div className="flex items-center gap-4">
|
||||||
<div className="text-sm">
|
<div className="text-sm">
|
||||||
<span className="text-muted-foreground">对比:</span>
|
<span className="text-muted-foreground">{t("fileManager.compare")}:</span>
|
||||||
<span className="font-medium text-green-400 mx-2">
|
<span className="font-medium text-green-400 mx-2">
|
||||||
{file1.name}
|
{file1.name}
|
||||||
</span>
|
</span>
|
||||||
@@ -230,7 +238,7 @@ export function DiffViewer({
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
{/* 视图切换 */}
|
{/* View toggle */}
|
||||||
<Button
|
<Button
|
||||||
variant="outline"
|
variant="outline"
|
||||||
size="sm"
|
size="sm"
|
||||||
@@ -240,10 +248,10 @@ export function DiffViewer({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
{diffMode === "side-by-side" ? "并排" : "内联"}
|
{diffMode === "side-by-side" ? t("fileManager.sideBySide") : t("fileManager.inline")}
|
||||||
</Button>
|
</Button>
|
||||||
|
|
||||||
{/* 行号切换 */}
|
{/* Line number toggle */}
|
||||||
<Button
|
<Button
|
||||||
variant="outline"
|
variant="outline"
|
||||||
size="sm"
|
size="sm"
|
||||||
@@ -256,12 +264,12 @@ export function DiffViewer({
|
|||||||
)}
|
)}
|
||||||
</Button>
|
</Button>
|
||||||
|
|
||||||
{/* 下载按钮 */}
|
{/* Download buttons */}
|
||||||
<Button
|
<Button
|
||||||
variant="outline"
|
variant="outline"
|
||||||
size="sm"
|
size="sm"
|
||||||
onClick={() => handleDownloadFile(file1)}
|
onClick={() => handleDownloadFile(file1)}
|
||||||
title={`下载 ${file1.name}`}
|
title={t("fileManager.downloadFile", { name: file1.name })}
|
||||||
>
|
>
|
||||||
<Download className="w-4 h-4 mr-1" />
|
<Download className="w-4 h-4 mr-1" />
|
||||||
{file1.name}
|
{file1.name}
|
||||||
@@ -271,13 +279,13 @@ export function DiffViewer({
|
|||||||
variant="outline"
|
variant="outline"
|
||||||
size="sm"
|
size="sm"
|
||||||
onClick={() => handleDownloadFile(file2)}
|
onClick={() => handleDownloadFile(file2)}
|
||||||
title={`下载 ${file2.name}`}
|
title={t("fileManager.downloadFile", { name: file2.name })}
|
||||||
>
|
>
|
||||||
<Download className="w-4 h-4 mr-1" />
|
<Download className="w-4 h-4 mr-1" />
|
||||||
{file2.name}
|
{file2.name}
|
||||||
</Button>
|
</Button>
|
||||||
|
|
||||||
{/* 刷新按钮 */}
|
{/* Refresh button */}
|
||||||
<Button variant="outline" size="sm" onClick={loadFileContents}>
|
<Button variant="outline" size="sm" onClick={loadFileContents}>
|
||||||
<RefreshCw className="w-4 h-4" />
|
<RefreshCw className="w-4 h-4" />
|
||||||
</Button>
|
</Button>
|
||||||
@@ -285,7 +293,7 @@ export function DiffViewer({
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Diff编辑器 */}
|
{/* Diff editor */}
|
||||||
<div className="flex-1">
|
<div className="flex-1">
|
||||||
<DiffEditor
|
<DiffEditor
|
||||||
original={content1}
|
original={content1}
|
||||||
@@ -314,7 +322,7 @@ export function DiffViewer({
|
|||||||
<div className="h-full flex items-center justify-center">
|
<div className="h-full flex items-center justify-center">
|
||||||
<div className="text-center">
|
<div className="text-center">
|
||||||
<div className="animate-spin rounded-full h-6 w-6 border-b-2 border-blue-500 mx-auto mb-2"></div>
|
<div className="animate-spin rounded-full h-6 w-6 border-b-2 border-blue-500 mx-auto mb-2"></div>
|
||||||
<p className="text-sm text-muted-foreground">初始化编辑器...</p>
|
<p className="text-sm text-muted-foreground">{t("fileManager.initializingEditor")}</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import React from "react";
|
|||||||
import { DraggableWindow } from "./DraggableWindow";
|
import { DraggableWindow } from "./DraggableWindow";
|
||||||
import { DiffViewer } from "./DiffViewer";
|
import { DiffViewer } from "./DiffViewer";
|
||||||
import { useWindowManager } from "./WindowManager";
|
import { useWindowManager } from "./WindowManager";
|
||||||
|
import { useTranslation } from "react-i18next";
|
||||||
import type { FileItem, SSHHost } from "../../../../types/index.js";
|
import type { FileItem, SSHHost } from "../../../../types/index.js";
|
||||||
|
|
||||||
interface DiffWindowProps {
|
interface DiffWindowProps {
|
||||||
@@ -23,20 +24,17 @@ export function DiffWindow({
|
|||||||
initialX = 150,
|
initialX = 150,
|
||||||
initialY = 100,
|
initialY = 100,
|
||||||
}: DiffWindowProps) {
|
}: DiffWindowProps) {
|
||||||
const { closeWindow, minimizeWindow, maximizeWindow, focusWindow, windows } =
|
const { t } = useTranslation();
|
||||||
|
const { closeWindow, maximizeWindow, focusWindow, windows } =
|
||||||
useWindowManager();
|
useWindowManager();
|
||||||
|
|
||||||
const currentWindow = windows.find((w) => w.id === windowId);
|
const currentWindow = windows.find((w) => w.id === windowId);
|
||||||
|
|
||||||
// 窗口操作处理
|
// Window operation handling
|
||||||
const handleClose = () => {
|
const handleClose = () => {
|
||||||
closeWindow(windowId);
|
closeWindow(windowId);
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleMinimize = () => {
|
|
||||||
minimizeWindow(windowId);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleMaximize = () => {
|
const handleMaximize = () => {
|
||||||
maximizeWindow(windowId);
|
maximizeWindow(windowId);
|
||||||
};
|
};
|
||||||
@@ -51,7 +49,7 @@ export function DiffWindow({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<DraggableWindow
|
<DraggableWindow
|
||||||
title={`文件对比: ${file1.name} ↔ ${file2.name}`}
|
title={t("fileManager.fileComparison", { file1: file1.name, file2: file2.name })}
|
||||||
initialX={initialX}
|
initialX={initialX}
|
||||||
initialY={initialY}
|
initialY={initialY}
|
||||||
initialWidth={1200}
|
initialWidth={1200}
|
||||||
@@ -59,7 +57,6 @@ export function DiffWindow({
|
|||||||
minWidth={800}
|
minWidth={800}
|
||||||
minHeight={500}
|
minHeight={500}
|
||||||
onClose={handleClose}
|
onClose={handleClose}
|
||||||
onMinimize={handleMinimize}
|
|
||||||
onMaximize={handleMaximize}
|
onMaximize={handleMaximize}
|
||||||
onFocus={handleFocus}
|
onFocus={handleFocus}
|
||||||
isMaximized={currentWindow.isMaximized}
|
isMaximized={currentWindow.isMaximized}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import React, { useState, useRef, useCallback, useEffect } from "react";
|
import React, { useState, useRef, useCallback, useEffect } from "react";
|
||||||
import { cn } from "@/lib/utils";
|
import { cn } from "@/lib/utils";
|
||||||
import { Minus, Square, X, Maximize2, Minimize2 } from "lucide-react";
|
import { Minus, Square, X, Maximize2, Minimize2 } from "lucide-react";
|
||||||
|
import { useTranslation } from "react-i18next";
|
||||||
|
|
||||||
interface DraggableWindowProps {
|
interface DraggableWindowProps {
|
||||||
title: string;
|
title: string;
|
||||||
@@ -17,6 +18,7 @@ interface DraggableWindowProps {
|
|||||||
isMaximized?: boolean;
|
isMaximized?: boolean;
|
||||||
zIndex?: number;
|
zIndex?: number;
|
||||||
onFocus?: () => void;
|
onFocus?: () => void;
|
||||||
|
targetSize?: { width: number; height: number };
|
||||||
}
|
}
|
||||||
|
|
||||||
export function DraggableWindow({
|
export function DraggableWindow({
|
||||||
@@ -34,8 +36,10 @@ export function DraggableWindow({
|
|||||||
isMaximized = false,
|
isMaximized = false,
|
||||||
zIndex = 1000,
|
zIndex = 1000,
|
||||||
onFocus,
|
onFocus,
|
||||||
|
targetSize,
|
||||||
}: DraggableWindowProps) {
|
}: DraggableWindowProps) {
|
||||||
// 窗口状态
|
const { t } = useTranslation();
|
||||||
|
// Window state
|
||||||
const [position, setPosition] = useState({ x: initialX, y: initialY });
|
const [position, setPosition] = useState({ x: initialX, y: initialY });
|
||||||
const [size, setSize] = useState({
|
const [size, setSize] = useState({
|
||||||
width: initialWidth,
|
width: initialWidth,
|
||||||
@@ -45,19 +49,54 @@ export function DraggableWindow({
|
|||||||
const [isResizing, setIsResizing] = useState(false);
|
const [isResizing, setIsResizing] = useState(false);
|
||||||
const [resizeDirection, setResizeDirection] = useState<string>("");
|
const [resizeDirection, setResizeDirection] = useState<string>("");
|
||||||
|
|
||||||
// 拖拽开始位置
|
// Drag and resize start positions
|
||||||
const [dragStart, setDragStart] = useState({ x: 0, y: 0 });
|
const [dragStart, setDragStart] = useState({ x: 0, y: 0 });
|
||||||
const [windowStart, setWindowStart] = useState({ x: 0, y: 0 });
|
const [windowStart, setWindowStart] = useState({ x: 0, y: 0 });
|
||||||
|
const [sizeStart, setSizeStart] = useState({ width: 0, height: 0 });
|
||||||
|
|
||||||
const windowRef = useRef<HTMLDivElement>(null);
|
const windowRef = useRef<HTMLDivElement>(null);
|
||||||
const titleBarRef = useRef<HTMLDivElement>(null);
|
const titleBarRef = useRef<HTMLDivElement>(null);
|
||||||
|
|
||||||
// 处理窗口焦点
|
// Handle target size changes for media files
|
||||||
|
useEffect(() => {
|
||||||
|
if (targetSize && !isMaximized) {
|
||||||
|
const maxWidth = Math.min(window.innerWidth * 0.9, 1200);
|
||||||
|
const maxHeight = Math.min(window.innerHeight * 0.8, 800);
|
||||||
|
|
||||||
|
// Calculate appropriate window size maintaining aspect ratio
|
||||||
|
let newWidth = Math.min(targetSize.width + 50, maxWidth); // Add padding for UI
|
||||||
|
let newHeight = Math.min(targetSize.height + 150, maxHeight); // Add padding for header/footer
|
||||||
|
|
||||||
|
// If still too large, scale down maintaining aspect ratio
|
||||||
|
if (newWidth > maxWidth || newHeight > maxHeight) {
|
||||||
|
const widthRatio = maxWidth / newWidth;
|
||||||
|
const heightRatio = maxHeight / newHeight;
|
||||||
|
const scale = Math.min(widthRatio, heightRatio);
|
||||||
|
|
||||||
|
newWidth = Math.floor(newWidth * scale);
|
||||||
|
newHeight = Math.floor(newHeight * scale);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure minimum size
|
||||||
|
newWidth = Math.max(newWidth, minWidth);
|
||||||
|
newHeight = Math.max(newHeight, minHeight);
|
||||||
|
|
||||||
|
setSize({ width: newWidth, height: newHeight });
|
||||||
|
|
||||||
|
// Center the window
|
||||||
|
setPosition({
|
||||||
|
x: Math.max(0, (window.innerWidth - newWidth) / 2),
|
||||||
|
y: Math.max(0, (window.innerHeight - newHeight) / 2)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, [targetSize, isMaximized, minWidth, minHeight]);
|
||||||
|
|
||||||
|
// Handle window focus
|
||||||
const handleWindowClick = useCallback(() => {
|
const handleWindowClick = useCallback(() => {
|
||||||
onFocus?.();
|
onFocus?.();
|
||||||
}, [onFocus]);
|
}, [onFocus]);
|
||||||
|
|
||||||
// 拖拽处理
|
// Drag handling
|
||||||
const handleMouseDown = useCallback(
|
const handleMouseDown = useCallback(
|
||||||
(e: React.MouseEvent) => {
|
(e: React.MouseEvent) => {
|
||||||
if (isMaximized) return;
|
if (isMaximized) return;
|
||||||
@@ -85,7 +124,7 @@ export function DraggableWindow({
|
|||||||
y: Math.max(
|
y: Math.max(
|
||||||
0,
|
0,
|
||||||
Math.min(window.innerHeight - 40, windowStart.y + deltaY),
|
Math.min(window.innerHeight - 40, windowStart.y + deltaY),
|
||||||
), // 保持标题栏可见
|
), // Keep title bar visible
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -93,31 +132,44 @@ export function DraggableWindow({
|
|||||||
const deltaX = e.clientX - dragStart.x;
|
const deltaX = e.clientX - dragStart.x;
|
||||||
const deltaY = e.clientY - dragStart.y;
|
const deltaY = e.clientY - dragStart.y;
|
||||||
|
|
||||||
let newWidth = size.width;
|
let newWidth = sizeStart.width;
|
||||||
let newHeight = size.height;
|
let newHeight = sizeStart.height;
|
||||||
let newX = position.x;
|
let newX = windowStart.x;
|
||||||
let newY = position.y;
|
let newY = windowStart.y;
|
||||||
|
|
||||||
|
// Handle horizontal resizing
|
||||||
if (resizeDirection.includes("right")) {
|
if (resizeDirection.includes("right")) {
|
||||||
newWidth = Math.max(minWidth, windowStart.x + deltaX);
|
newWidth = Math.max(minWidth, sizeStart.width + deltaX);
|
||||||
}
|
}
|
||||||
if (resizeDirection.includes("left")) {
|
if (resizeDirection.includes("left")) {
|
||||||
newWidth = Math.max(minWidth, size.width - deltaX);
|
const widthChange = -deltaX;
|
||||||
newX = Math.min(
|
newWidth = Math.max(minWidth, sizeStart.width + widthChange);
|
||||||
windowStart.x + deltaX,
|
// Only move position if we're actually changing size
|
||||||
position.x + size.width - minWidth,
|
if (newWidth > minWidth || widthChange > 0) {
|
||||||
);
|
newX = windowStart.x - (newWidth - sizeStart.width);
|
||||||
|
} else {
|
||||||
|
newX = windowStart.x - (minWidth - sizeStart.width);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle vertical resizing
|
||||||
if (resizeDirection.includes("bottom")) {
|
if (resizeDirection.includes("bottom")) {
|
||||||
newHeight = Math.max(minHeight, windowStart.y + deltaY);
|
newHeight = Math.max(minHeight, sizeStart.height + deltaY);
|
||||||
}
|
}
|
||||||
if (resizeDirection.includes("top")) {
|
if (resizeDirection.includes("top")) {
|
||||||
newHeight = Math.max(minHeight, size.height - deltaY);
|
const heightChange = -deltaY;
|
||||||
newY = Math.min(
|
newHeight = Math.max(minHeight, sizeStart.height + heightChange);
|
||||||
windowStart.y + deltaY,
|
// Only move position if we're actually changing size
|
||||||
position.y + size.height - minHeight,
|
if (newHeight > minHeight || heightChange > 0) {
|
||||||
);
|
newY = windowStart.y - (newHeight - sizeStart.height);
|
||||||
|
} else {
|
||||||
|
newY = windowStart.y - (minHeight - sizeStart.height);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure window stays within viewport
|
||||||
|
newX = Math.max(0, Math.min(window.innerWidth - newWidth, newX));
|
||||||
|
newY = Math.max(0, Math.min(window.innerHeight - newHeight, newY));
|
||||||
|
|
||||||
setSize({ width: newWidth, height: newHeight });
|
setSize({ width: newWidth, height: newHeight });
|
||||||
setPosition({ x: newX, y: newY });
|
setPosition({ x: newX, y: newY });
|
||||||
@@ -129,6 +181,7 @@ export function DraggableWindow({
|
|||||||
isMaximized,
|
isMaximized,
|
||||||
dragStart,
|
dragStart,
|
||||||
windowStart,
|
windowStart,
|
||||||
|
sizeStart,
|
||||||
size,
|
size,
|
||||||
position,
|
position,
|
||||||
minWidth,
|
minWidth,
|
||||||
@@ -143,7 +196,7 @@ export function DraggableWindow({
|
|||||||
setResizeDirection("");
|
setResizeDirection("");
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// 调整大小处理
|
// Resize handling
|
||||||
const handleResizeStart = useCallback(
|
const handleResizeStart = useCallback(
|
||||||
(e: React.MouseEvent, direction: string) => {
|
(e: React.MouseEvent, direction: string) => {
|
||||||
if (isMaximized) return;
|
if (isMaximized) return;
|
||||||
@@ -153,13 +206,14 @@ export function DraggableWindow({
|
|||||||
setIsResizing(true);
|
setIsResizing(true);
|
||||||
setResizeDirection(direction);
|
setResizeDirection(direction);
|
||||||
setDragStart({ x: e.clientX, y: e.clientY });
|
setDragStart({ x: e.clientX, y: e.clientY });
|
||||||
setWindowStart({ x: size.width, y: size.height });
|
setWindowStart({ x: position.x, y: position.y });
|
||||||
|
setSizeStart({ width: size.width, height: size.height });
|
||||||
onFocus?.();
|
onFocus?.();
|
||||||
},
|
},
|
||||||
[isMaximized, size, onFocus],
|
[isMaximized, position, size, onFocus],
|
||||||
);
|
);
|
||||||
|
|
||||||
// 全局事件监听
|
// Global event listeners
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (isDragging || isResizing) {
|
if (isDragging || isResizing) {
|
||||||
document.addEventListener("mousemove", handleMouseMove);
|
document.addEventListener("mousemove", handleMouseMove);
|
||||||
@@ -176,7 +230,7 @@ export function DraggableWindow({
|
|||||||
}
|
}
|
||||||
}, [isDragging, isResizing, handleMouseMove, handleMouseUp]);
|
}, [isDragging, isResizing, handleMouseMove, handleMouseUp]);
|
||||||
|
|
||||||
// 双击标题栏最大化/还原
|
// Double-click title bar to maximize/restore
|
||||||
const handleTitleDoubleClick = useCallback(() => {
|
const handleTitleDoubleClick = useCallback(() => {
|
||||||
onMaximize?.();
|
onMaximize?.();
|
||||||
}, [onMaximize]);
|
}, [onMaximize]);
|
||||||
@@ -198,7 +252,7 @@ export function DraggableWindow({
|
|||||||
}}
|
}}
|
||||||
onClick={handleWindowClick}
|
onClick={handleWindowClick}
|
||||||
>
|
>
|
||||||
{/* 标题栏 */}
|
{/* Title bar */}
|
||||||
<div
|
<div
|
||||||
ref={titleBarRef}
|
ref={titleBarRef}
|
||||||
className={cn(
|
className={cn(
|
||||||
@@ -221,7 +275,7 @@ export function DraggableWindow({
|
|||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
onMinimize();
|
onMinimize();
|
||||||
}}
|
}}
|
||||||
title="最小化"
|
title={t("common.minimize")}
|
||||||
>
|
>
|
||||||
<Minus className="w-4 h-4" />
|
<Minus className="w-4 h-4" />
|
||||||
</button>
|
</button>
|
||||||
@@ -234,7 +288,7 @@ export function DraggableWindow({
|
|||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
onMaximize();
|
onMaximize();
|
||||||
}}
|
}}
|
||||||
title={isMaximized ? "还原" : "最大化"}
|
title={isMaximized ? t("common.restore") : t("common.maximize")}
|
||||||
>
|
>
|
||||||
{isMaximized ? (
|
{isMaximized ? (
|
||||||
<Minimize2 className="w-4 h-4" />
|
<Minimize2 className="w-4 h-4" />
|
||||||
@@ -250,14 +304,14 @@ export function DraggableWindow({
|
|||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
onClose();
|
onClose();
|
||||||
}}
|
}}
|
||||||
title="关闭"
|
title={t("common.close")}
|
||||||
>
|
>
|
||||||
<X className="w-4 h-4" />
|
<X className="w-4 h-4" />
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* 窗口内容 */}
|
{/* Window content */}
|
||||||
<div
|
<div
|
||||||
className="flex-1 overflow-auto"
|
className="flex-1 overflow-auto"
|
||||||
style={{ height: "calc(100% - 40px)" }}
|
style={{ height: "calc(100% - 40px)" }}
|
||||||
@@ -265,10 +319,10 @@ export function DraggableWindow({
|
|||||||
{children}
|
{children}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* 调整大小边框 - 只在非最大化时显示 */}
|
{/* Resize borders - only show when not maximized */}
|
||||||
{!isMaximized && (
|
{!isMaximized && (
|
||||||
<>
|
<>
|
||||||
{/* 边缘调整 */}
|
{/* Edge resize */}
|
||||||
<div
|
<div
|
||||||
className="absolute top-0 left-0 right-0 h-1 cursor-n-resize"
|
className="absolute top-0 left-0 right-0 h-1 cursor-n-resize"
|
||||||
onMouseDown={(e) => handleResizeStart(e, "top")}
|
onMouseDown={(e) => handleResizeStart(e, "top")}
|
||||||
@@ -286,7 +340,7 @@ export function DraggableWindow({
|
|||||||
onMouseDown={(e) => handleResizeStart(e, "right")}
|
onMouseDown={(e) => handleResizeStart(e, "right")}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
{/* 角落调整 */}
|
{/* Corner resize */}
|
||||||
<div
|
<div
|
||||||
className="absolute top-0 left-0 w-2 h-2 cursor-nw-resize"
|
className="absolute top-0 left-0 w-2 h-2 cursor-nw-resize"
|
||||||
onMouseDown={(e) => handleResizeStart(e, "top-left")}
|
onMouseDown={(e) => handleResizeStart(e, "top-left")}
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import {
|
|||||||
connectSSH,
|
connectSSH,
|
||||||
} from "@/ui/main-axios";
|
} from "@/ui/main-axios";
|
||||||
import { toast } from "sonner";
|
import { toast } from "sonner";
|
||||||
|
import { useTranslation } from "react-i18next";
|
||||||
|
|
||||||
interface FileItem {
|
interface FileItem {
|
||||||
name: string;
|
name: string;
|
||||||
@@ -43,7 +44,8 @@ interface FileWindowProps {
|
|||||||
sshHost: SSHHost;
|
sshHost: SSHHost;
|
||||||
initialX?: number;
|
initialX?: number;
|
||||||
initialY?: number;
|
initialY?: number;
|
||||||
// readOnly参数已移除,由FileViewer内部根据文件类型决定
|
onFileNotFound?: (file: FileItem) => void; // Callback for when file is not found
|
||||||
|
// readOnly parameter removed, determined internally by FileViewer based on file type
|
||||||
}
|
}
|
||||||
|
|
||||||
export function FileWindow({
|
export function FileWindow({
|
||||||
@@ -53,35 +55,38 @@ export function FileWindow({
|
|||||||
sshHost,
|
sshHost,
|
||||||
initialX = 100,
|
initialX = 100,
|
||||||
initialY = 100,
|
initialY = 100,
|
||||||
|
onFileNotFound,
|
||||||
}: FileWindowProps) {
|
}: FileWindowProps) {
|
||||||
const {
|
const {
|
||||||
closeWindow,
|
closeWindow,
|
||||||
minimizeWindow,
|
|
||||||
maximizeWindow,
|
maximizeWindow,
|
||||||
focusWindow,
|
focusWindow,
|
||||||
updateWindow,
|
updateWindow,
|
||||||
windows,
|
windows,
|
||||||
} = useWindowManager();
|
} = useWindowManager();
|
||||||
|
|
||||||
|
const { t } = useTranslation();
|
||||||
|
|
||||||
const [content, setContent] = useState<string>("");
|
const [content, setContent] = useState<string>("");
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
const [isEditable, setIsEditable] = useState(false);
|
const [isEditable, setIsEditable] = useState(false);
|
||||||
const [pendingContent, setPendingContent] = useState<string>("");
|
const [pendingContent, setPendingContent] = useState<string>("");
|
||||||
|
const [mediaDimensions, setMediaDimensions] = useState<{ width: number; height: number } | undefined>();
|
||||||
const autoSaveTimerRef = useRef<NodeJS.Timeout | null>(null);
|
const autoSaveTimerRef = useRef<NodeJS.Timeout | null>(null);
|
||||||
|
|
||||||
const currentWindow = windows.find((w) => w.id === windowId);
|
const currentWindow = windows.find((w) => w.id === windowId);
|
||||||
|
|
||||||
// 确保SSH连接有效
|
// Ensure SSH connection is valid
|
||||||
const ensureSSHConnection = async () => {
|
const ensureSSHConnection = async () => {
|
||||||
try {
|
try {
|
||||||
// 首先检查SSH连接状态
|
// First check SSH connection status
|
||||||
const status = await getSSHStatus(sshSessionId);
|
const status = await getSSHStatus(sshSessionId);
|
||||||
console.log("SSH connection status:", status);
|
console.log("SSH connection status:", status);
|
||||||
|
|
||||||
if (!status.connected) {
|
if (!status.connected) {
|
||||||
console.log("SSH not connected, attempting to reconnect...");
|
console.log("SSH not connected, attempting to reconnect...");
|
||||||
|
|
||||||
// 重新建立连接
|
// Re-establish connection
|
||||||
await connectSSH(sshSessionId, {
|
await connectSSH(sshSessionId, {
|
||||||
hostId: sshHost.id,
|
hostId: sshHost.id,
|
||||||
ip: sshHost.ip,
|
ip: sshHost.ip,
|
||||||
@@ -99,12 +104,12 @@ export function FileWindow({
|
|||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log("SSH connection check/reconnect failed:", error);
|
console.log("SSH connection check/reconnect failed:", error);
|
||||||
// 即使连接失败也尝试继续,让具体的API调用报错
|
// Even if connection fails, try to continue and let specific API calls handle errors
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 加载文件内容
|
// Load file content
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const loadFileContent = async () => {
|
const loadFileContent = async () => {
|
||||||
if (file.type !== "file") return;
|
if (file.type !== "file") return;
|
||||||
@@ -112,23 +117,23 @@ export function FileWindow({
|
|||||||
try {
|
try {
|
||||||
setIsLoading(true);
|
setIsLoading(true);
|
||||||
|
|
||||||
// 确保SSH连接有效
|
// Ensure SSH connection is valid
|
||||||
await ensureSSHConnection();
|
await ensureSSHConnection();
|
||||||
|
|
||||||
const response = await readSSHFile(sshSessionId, file.path);
|
const response = await readSSHFile(sshSessionId, file.path);
|
||||||
const fileContent = response.content || "";
|
const fileContent = response.content || "";
|
||||||
setContent(fileContent);
|
setContent(fileContent);
|
||||||
setPendingContent(fileContent); // 初始化待保存内容
|
setPendingContent(fileContent); // Initialize pending content
|
||||||
|
|
||||||
// 如果文件大小未知,根据内容计算大小
|
// If file size is unknown, calculate size based on content
|
||||||
if (!file.size) {
|
if (!file.size) {
|
||||||
const contentSize = new Blob([fileContent]).size;
|
const contentSize = new Blob([fileContent]).size;
|
||||||
file.size = contentSize;
|
file.size = contentSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 根据文件类型决定是否可编辑:除了媒体文件,其他都可编辑
|
// Determine if editable based on file type: all except media files are editable
|
||||||
const mediaExtensions = [
|
const mediaExtensions = [
|
||||||
// 图片文件
|
// Image files
|
||||||
"jpg",
|
"jpg",
|
||||||
"jpeg",
|
"jpeg",
|
||||||
"png",
|
"png",
|
||||||
@@ -138,7 +143,7 @@ export function FileWindow({
|
|||||||
"webp",
|
"webp",
|
||||||
"tiff",
|
"tiff",
|
||||||
"ico",
|
"ico",
|
||||||
// 音频文件
|
// Audio files
|
||||||
"mp3",
|
"mp3",
|
||||||
"wav",
|
"wav",
|
||||||
"ogg",
|
"ogg",
|
||||||
@@ -146,7 +151,7 @@ export function FileWindow({
|
|||||||
"flac",
|
"flac",
|
||||||
"m4a",
|
"m4a",
|
||||||
"wma",
|
"wma",
|
||||||
// 视频文件
|
// Video files
|
||||||
"mp4",
|
"mp4",
|
||||||
"avi",
|
"avi",
|
||||||
"mov",
|
"mov",
|
||||||
@@ -155,7 +160,7 @@ export function FileWindow({
|
|||||||
"mkv",
|
"mkv",
|
||||||
"webm",
|
"webm",
|
||||||
"m4v",
|
"m4v",
|
||||||
// 压缩文件
|
// Archive files
|
||||||
"zip",
|
"zip",
|
||||||
"rar",
|
"rar",
|
||||||
"7z",
|
"7z",
|
||||||
@@ -163,7 +168,7 @@ export function FileWindow({
|
|||||||
"gz",
|
"gz",
|
||||||
"bz2",
|
"bz2",
|
||||||
"xz",
|
"xz",
|
||||||
// 二进制文件
|
// Binary files
|
||||||
"exe",
|
"exe",
|
||||||
"dll",
|
"dll",
|
||||||
"so",
|
"so",
|
||||||
@@ -173,12 +178,12 @@ export function FileWindow({
|
|||||||
];
|
];
|
||||||
|
|
||||||
const extension = file.name.split(".").pop()?.toLowerCase();
|
const extension = file.name.split(".").pop()?.toLowerCase();
|
||||||
// 只有媒体文件和二进制文件不可编辑,其他所有文件都可编辑
|
// Only media files and binary files are not editable, all other files are editable
|
||||||
setIsEditable(!mediaExtensions.includes(extension || ""));
|
setIsEditable(!mediaExtensions.includes(extension || ""));
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
console.error("Failed to load file:", error);
|
console.error("Failed to load file:", error);
|
||||||
|
|
||||||
// 检查是否是大文件错误
|
// Check if it's a large file error
|
||||||
const errorData = error?.response?.data;
|
const errorData = error?.response?.data;
|
||||||
if (errorData?.tooLarge) {
|
if (errorData?.tooLarge) {
|
||||||
toast.error(`File too large: ${errorData.error}`, {
|
toast.error(`File too large: ${errorData.error}`, {
|
||||||
@@ -188,14 +193,38 @@ export function FileWindow({
|
|||||||
error.message?.includes("connection") ||
|
error.message?.includes("connection") ||
|
||||||
error.message?.includes("established")
|
error.message?.includes("established")
|
||||||
) {
|
) {
|
||||||
// 如果是连接错误,提供更明确的错误信息
|
// If connection error, provide more specific error message
|
||||||
toast.error(
|
toast.error(
|
||||||
`SSH connection failed. Please check your connection to ${sshHost.name} (${sshHost.ip}:${sshHost.port})`,
|
`SSH connection failed. Please check your connection to ${sshHost.name} (${sshHost.ip}:${sshHost.port})`,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
toast.error(
|
// Check if file not found (common error messages from cat command)
|
||||||
`Failed to load file: ${error.message || errorData?.error || "Unknown error"}`,
|
const errorMessage = errorData?.error || error.message || "Unknown error";
|
||||||
);
|
const isFileNotFound =
|
||||||
|
(error as any).isFileNotFound ||
|
||||||
|
errorData?.fileNotFound ||
|
||||||
|
error.response?.status === 404 ||
|
||||||
|
errorMessage.includes("File not found") ||
|
||||||
|
errorMessage.includes("No such file or directory") ||
|
||||||
|
errorMessage.includes("cannot access") ||
|
||||||
|
errorMessage.includes("not found") ||
|
||||||
|
errorMessage.includes("Resource not found");
|
||||||
|
|
||||||
|
if (isFileNotFound && onFileNotFound) {
|
||||||
|
// Notify parent component about the missing file for cleanup
|
||||||
|
onFileNotFound(file);
|
||||||
|
toast.error(t("fileManager.fileNotFoundAndRemoved", { name: file.name }));
|
||||||
|
|
||||||
|
// Close this window since the file doesn't exist
|
||||||
|
closeWindow(windowId);
|
||||||
|
return; // Exit early to prevent showing empty editor
|
||||||
|
} else {
|
||||||
|
toast.error(t("fileManager.failedToLoadFile", {
|
||||||
|
error: errorMessage.includes("Server error occurred") ?
|
||||||
|
t("fileManager.serverErrorOccurred") :
|
||||||
|
errorMessage
|
||||||
|
}));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
@@ -205,29 +234,29 @@ export function FileWindow({
|
|||||||
loadFileContent();
|
loadFileContent();
|
||||||
}, [file, sshSessionId, sshHost]);
|
}, [file, sshSessionId, sshHost]);
|
||||||
|
|
||||||
// 保存文件
|
// Save file
|
||||||
const handleSave = async (newContent: string) => {
|
const handleSave = async (newContent: string) => {
|
||||||
try {
|
try {
|
||||||
setIsLoading(true);
|
setIsLoading(true);
|
||||||
|
|
||||||
// 确保SSH连接有效
|
// Ensure SSH connection is valid
|
||||||
await ensureSSHConnection();
|
await ensureSSHConnection();
|
||||||
|
|
||||||
await writeSSHFile(sshSessionId, file.path, newContent);
|
await writeSSHFile(sshSessionId, file.path, newContent);
|
||||||
setContent(newContent);
|
setContent(newContent);
|
||||||
setPendingContent(""); // 清除待保存内容
|
setPendingContent(""); // Clear pending content
|
||||||
|
|
||||||
// 清除自动保存定时器
|
// Clear auto-save timer
|
||||||
if (autoSaveTimerRef.current) {
|
if (autoSaveTimerRef.current) {
|
||||||
clearTimeout(autoSaveTimerRef.current);
|
clearTimeout(autoSaveTimerRef.current);
|
||||||
autoSaveTimerRef.current = null;
|
autoSaveTimerRef.current = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
toast.success("File saved successfully");
|
toast.success(t("fileManager.fileSavedSuccessfully"));
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
console.error("Failed to save file:", error);
|
console.error("Failed to save file:", error);
|
||||||
|
|
||||||
// 如果是连接错误,提供更明确的错误信息
|
// If it's a connection error, provide more specific error message
|
||||||
if (
|
if (
|
||||||
error.message?.includes("connection") ||
|
error.message?.includes("connection") ||
|
||||||
error.message?.includes("established")
|
error.message?.includes("established")
|
||||||
@@ -236,36 +265,36 @@ export function FileWindow({
|
|||||||
`SSH connection failed. Please check your connection to ${sshHost.name} (${sshHost.ip}:${sshHost.port})`,
|
`SSH connection failed. Please check your connection to ${sshHost.name} (${sshHost.ip}:${sshHost.port})`,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
toast.error(`Failed to save file: ${error.message || "Unknown error"}`);
|
toast.error(`${t("fileManager.failedToSaveFile")}: ${error.message || t("fileManager.unknownError")}`);
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 处理内容变更 - 设置1分钟自动保存
|
// Handle content changes - set 1-minute auto-save
|
||||||
const handleContentChange = (newContent: string) => {
|
const handleContentChange = (newContent: string) => {
|
||||||
setPendingContent(newContent);
|
setPendingContent(newContent);
|
||||||
|
|
||||||
// 清除之前的定时器
|
// Clear previous timer
|
||||||
if (autoSaveTimerRef.current) {
|
if (autoSaveTimerRef.current) {
|
||||||
clearTimeout(autoSaveTimerRef.current);
|
clearTimeout(autoSaveTimerRef.current);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 设置新的1分钟自动保存定时器
|
// Set new 1-minute auto-save timer
|
||||||
autoSaveTimerRef.current = setTimeout(async () => {
|
autoSaveTimerRef.current = setTimeout(async () => {
|
||||||
try {
|
try {
|
||||||
console.log("Auto-saving file...");
|
console.log("Auto-saving file...");
|
||||||
await handleSave(newContent);
|
await handleSave(newContent);
|
||||||
toast.success("File auto-saved");
|
toast.success(t("fileManager.fileAutoSaved"));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Auto-save failed:", error);
|
console.error("Auto-save failed:", error);
|
||||||
toast.error("Auto-save failed");
|
toast.error(t("fileManager.autoSaveFailed"));
|
||||||
}
|
}
|
||||||
}, 60000); // 1分钟 = 60000毫秒
|
}, 60000); // 1 minute = 60000 milliseconds
|
||||||
};
|
};
|
||||||
|
|
||||||
// 清理定时器
|
// Cleanup timer
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
return () => {
|
return () => {
|
||||||
if (autoSaveTimerRef.current) {
|
if (autoSaveTimerRef.current) {
|
||||||
@@ -274,10 +303,10 @@ export function FileWindow({
|
|||||||
};
|
};
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// 下载文件
|
// Download file
|
||||||
const handleDownload = async () => {
|
const handleDownload = async () => {
|
||||||
try {
|
try {
|
||||||
// 确保SSH连接有效
|
// Ensure SSH connection is valid
|
||||||
await ensureSSHConnection();
|
await ensureSSHConnection();
|
||||||
|
|
||||||
const response = await downloadSSHFile(sshSessionId, file.path);
|
const response = await downloadSSHFile(sshSessionId, file.path);
|
||||||
@@ -303,12 +332,12 @@ export function FileWindow({
|
|||||||
document.body.removeChild(link);
|
document.body.removeChild(link);
|
||||||
URL.revokeObjectURL(url);
|
URL.revokeObjectURL(url);
|
||||||
|
|
||||||
toast.success("File downloaded successfully");
|
toast.success(t("fileManager.fileDownloadedSuccessfully"));
|
||||||
}
|
}
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
console.error("Failed to download file:", error);
|
console.error("Failed to download file:", error);
|
||||||
|
|
||||||
// 如果是连接错误,提供更明确的错误信息
|
// If it's a connection error, provide more specific error message
|
||||||
if (
|
if (
|
||||||
error.message?.includes("connection") ||
|
error.message?.includes("connection") ||
|
||||||
error.message?.includes("established")
|
error.message?.includes("established")
|
||||||
@@ -324,15 +353,11 @@ export function FileWindow({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 窗口操作处理
|
// Window operation handling
|
||||||
const handleClose = () => {
|
const handleClose = () => {
|
||||||
closeWindow(windowId);
|
closeWindow(windowId);
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleMinimize = () => {
|
|
||||||
minimizeWindow(windowId);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleMaximize = () => {
|
const handleMaximize = () => {
|
||||||
maximizeWindow(windowId);
|
maximizeWindow(windowId);
|
||||||
};
|
};
|
||||||
@@ -341,6 +366,12 @@ export function FileWindow({
|
|||||||
focusWindow(windowId);
|
focusWindow(windowId);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Handle media dimensions change
|
||||||
|
const handleMediaDimensionsChange = (dimensions: { width: number; height: number }) => {
|
||||||
|
console.log('Media dimensions received:', dimensions);
|
||||||
|
setMediaDimensions(dimensions);
|
||||||
|
};
|
||||||
|
|
||||||
if (!currentWindow) {
|
if (!currentWindow) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@@ -355,21 +386,22 @@ export function FileWindow({
|
|||||||
minWidth={400}
|
minWidth={400}
|
||||||
minHeight={300}
|
minHeight={300}
|
||||||
onClose={handleClose}
|
onClose={handleClose}
|
||||||
onMinimize={handleMinimize}
|
|
||||||
onMaximize={handleMaximize}
|
onMaximize={handleMaximize}
|
||||||
onFocus={handleFocus}
|
onFocus={handleFocus}
|
||||||
isMaximized={currentWindow.isMaximized}
|
isMaximized={currentWindow.isMaximized}
|
||||||
zIndex={currentWindow.zIndex}
|
zIndex={currentWindow.zIndex}
|
||||||
|
targetSize={mediaDimensions}
|
||||||
>
|
>
|
||||||
<FileViewer
|
<FileViewer
|
||||||
file={file}
|
file={file}
|
||||||
content={pendingContent || content}
|
content={pendingContent || content}
|
||||||
savedContent={content}
|
savedContent={content}
|
||||||
isLoading={isLoading}
|
isLoading={isLoading}
|
||||||
isEditable={isEditable} // 移除强制只读模式,由FileViewer内部控制
|
isEditable={isEditable} // Remove forced read-only mode, controlled internally by FileViewer
|
||||||
onContentChange={handleContentChange}
|
onContentChange={handleContentChange}
|
||||||
onSave={(newContent) => handleSave(newContent)}
|
onSave={(newContent) => handleSave(newContent)}
|
||||||
onDownload={handleDownload}
|
onDownload={handleDownload}
|
||||||
|
onMediaDimensionsChange={handleMediaDimensionsChange}
|
||||||
/>
|
/>
|
||||||
</DraggableWindow>
|
</DraggableWindow>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import React from "react";
|
|||||||
import { DraggableWindow } from "./DraggableWindow";
|
import { DraggableWindow } from "./DraggableWindow";
|
||||||
import { Terminal } from "../../Terminal/Terminal";
|
import { Terminal } from "../../Terminal/Terminal";
|
||||||
import { useWindowManager } from "./WindowManager";
|
import { useWindowManager } from "./WindowManager";
|
||||||
|
import { useTranslation } from "react-i18next";
|
||||||
|
|
||||||
interface SSHHost {
|
interface SSHHost {
|
||||||
id: number;
|
id: number;
|
||||||
@@ -34,10 +35,11 @@ export function TerminalWindow({
|
|||||||
initialY = 150,
|
initialY = 150,
|
||||||
executeCommand,
|
executeCommand,
|
||||||
}: TerminalWindowProps) {
|
}: TerminalWindowProps) {
|
||||||
|
const { t } = useTranslation();
|
||||||
const { closeWindow, minimizeWindow, maximizeWindow, focusWindow, windows } =
|
const { closeWindow, minimizeWindow, maximizeWindow, focusWindow, windows } =
|
||||||
useWindowManager();
|
useWindowManager();
|
||||||
|
|
||||||
// 获取当前窗口状态
|
// Get current window state
|
||||||
const currentWindow = windows.find((w) => w.id === windowId);
|
const currentWindow = windows.find((w) => w.id === windowId);
|
||||||
if (!currentWindow) {
|
if (!currentWindow) {
|
||||||
console.warn(`Window with id ${windowId} not found`);
|
console.warn(`Window with id ${windowId} not found`);
|
||||||
@@ -61,10 +63,10 @@ export function TerminalWindow({
|
|||||||
};
|
};
|
||||||
|
|
||||||
const terminalTitle = executeCommand
|
const terminalTitle = executeCommand
|
||||||
? `运行 - ${hostConfig.name}:${executeCommand}`
|
? t("terminal.runTitle", { host: hostConfig.name, command: executeCommand })
|
||||||
: initialPath
|
: initialPath
|
||||||
? `终端 - ${hostConfig.name}:${initialPath}`
|
? t("terminal.terminalWithPath", { host: hostConfig.name, path: initialPath })
|
||||||
: `终端 - ${hostConfig.name}`;
|
: t("terminal.terminalTitle", { host: hostConfig.name });
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<DraggableWindow
|
<DraggableWindow
|
||||||
|
|||||||
@@ -35,13 +35,13 @@ export function WindowManager({ children }: WindowManagerProps) {
|
|||||||
const nextZIndex = useRef(1000);
|
const nextZIndex = useRef(1000);
|
||||||
const windowCounter = useRef(0);
|
const windowCounter = useRef(0);
|
||||||
|
|
||||||
// 打开新窗口
|
// Open new window
|
||||||
const openWindow = useCallback(
|
const openWindow = useCallback(
|
||||||
(windowData: Omit<WindowInstance, "id" | "zIndex">) => {
|
(windowData: Omit<WindowInstance, "id" | "zIndex">) => {
|
||||||
const id = `window-${++windowCounter.current}`;
|
const id = `window-${++windowCounter.current}`;
|
||||||
const zIndex = ++nextZIndex.current;
|
const zIndex = ++nextZIndex.current;
|
||||||
|
|
||||||
// 计算偏移位置,避免窗口完全重叠
|
// Calculate offset position to avoid windows completely overlapping
|
||||||
const offset = (windows.length % 5) * 30;
|
const offset = (windows.length % 5) * 30;
|
||||||
const adjustedX = windowData.x + offset;
|
const adjustedX = windowData.x + offset;
|
||||||
const adjustedY = windowData.y + offset;
|
const adjustedY = windowData.y + offset;
|
||||||
@@ -60,12 +60,12 @@ export function WindowManager({ children }: WindowManagerProps) {
|
|||||||
[windows.length],
|
[windows.length],
|
||||||
);
|
);
|
||||||
|
|
||||||
// 关闭窗口
|
// Close window
|
||||||
const closeWindow = useCallback((id: string) => {
|
const closeWindow = useCallback((id: string) => {
|
||||||
setWindows((prev) => prev.filter((w) => w.id !== id));
|
setWindows((prev) => prev.filter((w) => w.id !== id));
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// 最小化窗口
|
// Minimize window
|
||||||
const minimizeWindow = useCallback((id: string) => {
|
const minimizeWindow = useCallback((id: string) => {
|
||||||
setWindows((prev) =>
|
setWindows((prev) =>
|
||||||
prev.map((w) =>
|
prev.map((w) =>
|
||||||
@@ -74,7 +74,7 @@ export function WindowManager({ children }: WindowManagerProps) {
|
|||||||
);
|
);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// 最大化/还原窗口
|
// Maximize/restore window
|
||||||
const maximizeWindow = useCallback((id: string) => {
|
const maximizeWindow = useCallback((id: string) => {
|
||||||
setWindows((prev) =>
|
setWindows((prev) =>
|
||||||
prev.map((w) =>
|
prev.map((w) =>
|
||||||
@@ -83,7 +83,7 @@ export function WindowManager({ children }: WindowManagerProps) {
|
|||||||
);
|
);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// 聚焦窗口 (置于顶层)
|
// Focus window (bring to top)
|
||||||
const focusWindow = useCallback((id: string) => {
|
const focusWindow = useCallback((id: string) => {
|
||||||
setWindows((prev) => {
|
setWindows((prev) => {
|
||||||
const targetWindow = prev.find((w) => w.id === id);
|
const targetWindow = prev.find((w) => w.id === id);
|
||||||
@@ -94,7 +94,7 @@ export function WindowManager({ children }: WindowManagerProps) {
|
|||||||
});
|
});
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// 更新窗口属性
|
// Update window properties
|
||||||
const updateWindow = useCallback(
|
const updateWindow = useCallback(
|
||||||
(id: string, updates: Partial<WindowInstance>) => {
|
(id: string, updates: Partial<WindowInstance>) => {
|
||||||
setWindows((prev) =>
|
setWindows((prev) =>
|
||||||
@@ -117,7 +117,7 @@ export function WindowManager({ children }: WindowManagerProps) {
|
|||||||
return (
|
return (
|
||||||
<WindowManagerContext.Provider value={contextValue}>
|
<WindowManagerContext.Provider value={contextValue}>
|
||||||
{children}
|
{children}
|
||||||
{/* 渲染所有窗口 */}
|
{/* Render all windows */}
|
||||||
<div className="window-container">
|
<div className="window-container">
|
||||||
{windows.map((window) => (
|
{windows.map((window) => (
|
||||||
<div key={window.id}>
|
<div key={window.id}>
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ interface UseDragAndDropProps {
|
|||||||
export function useDragAndDrop({
|
export function useDragAndDrop({
|
||||||
onFilesDropped,
|
onFilesDropped,
|
||||||
onError,
|
onError,
|
||||||
maxFileSize = 100, // 100MB default
|
maxFileSize = 5120, // 5GB default - much more reasonable
|
||||||
allowedTypes = [], // empty means all types allowed
|
allowedTypes = [], // empty means all types allowed
|
||||||
}: UseDragAndDropProps) {
|
}: UseDragAndDropProps) {
|
||||||
const [state, setState] = useState<DragAndDropState>({
|
const [state, setState] = useState<DragAndDropState>({
|
||||||
|
|||||||
@@ -30,9 +30,14 @@ import {
|
|||||||
getCredentials,
|
getCredentials,
|
||||||
getSSHHosts,
|
getSSHHosts,
|
||||||
updateSSHHost,
|
updateSSHHost,
|
||||||
|
enableAutoStart,
|
||||||
|
disableAutoStart,
|
||||||
} from "@/ui/main-axios.ts";
|
} from "@/ui/main-axios.ts";
|
||||||
import { useTranslation } from "react-i18next";
|
import { useTranslation } from "react-i18next";
|
||||||
import { CredentialSelector } from "@/ui/Desktop/Apps/Credentials/CredentialSelector.tsx";
|
import { CredentialSelector } from "@/ui/Desktop/Apps/Credentials/CredentialSelector.tsx";
|
||||||
|
import CodeMirror from "@uiw/react-codemirror";
|
||||||
|
import { oneDark } from "@codemirror/theme-one-dark";
|
||||||
|
import { EditorView } from "@codemirror/view";
|
||||||
|
|
||||||
interface SSHHost {
|
interface SSHHost {
|
||||||
id: number;
|
id: number;
|
||||||
@@ -45,7 +50,6 @@ interface SSHHost {
|
|||||||
pin: boolean;
|
pin: boolean;
|
||||||
authType: string;
|
authType: string;
|
||||||
password?: string;
|
password?: string;
|
||||||
requirePassword?: boolean;
|
|
||||||
key?: string;
|
key?: string;
|
||||||
keyPassword?: string;
|
keyPassword?: string;
|
||||||
keyType?: string;
|
keyType?: string;
|
||||||
@@ -173,7 +177,6 @@ export function HostManagerEditor({
|
|||||||
authType: z.enum(["password", "key", "credential"]),
|
authType: z.enum(["password", "key", "credential"]),
|
||||||
credentialId: z.number().optional().nullable(),
|
credentialId: z.number().optional().nullable(),
|
||||||
password: z.string().optional(),
|
password: z.string().optional(),
|
||||||
requirePassword: z.boolean().default(true),
|
|
||||||
key: z.any().optional().nullable(),
|
key: z.any().optional().nullable(),
|
||||||
keyPassword: z.string().optional(),
|
keyPassword: z.string().optional(),
|
||||||
keyType: z
|
keyType: z
|
||||||
@@ -207,18 +210,7 @@ export function HostManagerEditor({
|
|||||||
defaultPath: z.string().optional(),
|
defaultPath: z.string().optional(),
|
||||||
})
|
})
|
||||||
.superRefine((data, ctx) => {
|
.superRefine((data, ctx) => {
|
||||||
if (data.authType === "password") {
|
if (data.authType === "key") {
|
||||||
if (
|
|
||||||
data.requirePassword &&
|
|
||||||
(!data.password || data.password.trim() === "")
|
|
||||||
) {
|
|
||||||
ctx.addIssue({
|
|
||||||
code: z.ZodIssueCode.custom,
|
|
||||||
message: t("hosts.passwordRequired"),
|
|
||||||
path: ["password"],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} else if (data.authType === "key") {
|
|
||||||
if (
|
if (
|
||||||
!data.key ||
|
!data.key ||
|
||||||
(typeof data.key === "string" && data.key.trim() === "")
|
(typeof data.key === "string" && data.key.trim() === "")
|
||||||
@@ -279,7 +271,6 @@ export function HostManagerEditor({
|
|||||||
authType: "password" as const,
|
authType: "password" as const,
|
||||||
credentialId: null,
|
credentialId: null,
|
||||||
password: "",
|
password: "",
|
||||||
requirePassword: true,
|
|
||||||
key: null,
|
key: null,
|
||||||
keyPassword: "",
|
keyPassword: "",
|
||||||
keyType: "auto" as const,
|
keyType: "auto" as const,
|
||||||
@@ -336,7 +327,6 @@ export function HostManagerEditor({
|
|||||||
authType: defaultAuthType as "password" | "key" | "credential",
|
authType: defaultAuthType as "password" | "key" | "credential",
|
||||||
credentialId: null,
|
credentialId: null,
|
||||||
password: "",
|
password: "",
|
||||||
requirePassword: cleanedHost.requirePassword ?? true,
|
|
||||||
key: null,
|
key: null,
|
||||||
keyPassword: "",
|
keyPassword: "",
|
||||||
keyType: "auto" as const,
|
keyType: "auto" as const,
|
||||||
@@ -372,7 +362,6 @@ export function HostManagerEditor({
|
|||||||
authType: "password" as const,
|
authType: "password" as const,
|
||||||
credentialId: null,
|
credentialId: null,
|
||||||
password: "",
|
password: "",
|
||||||
requirePassword: true,
|
|
||||||
key: null,
|
key: null,
|
||||||
keyPassword: "",
|
keyPassword: "",
|
||||||
keyType: "auto" as const,
|
keyType: "auto" as const,
|
||||||
@@ -452,20 +441,45 @@ export function HostManagerEditor({
|
|||||||
submitData.keyType = data.keyType;
|
submitData.keyType = data.keyType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let savedHost;
|
||||||
if (editingHost && editingHost.id) {
|
if (editingHost && editingHost.id) {
|
||||||
const updatedHost = await updateSSHHost(editingHost.id, submitData);
|
savedHost = await updateSSHHost(editingHost.id, submitData);
|
||||||
toast.success(t("hosts.hostUpdatedSuccessfully", { name: data.name }));
|
toast.success(t("hosts.hostUpdatedSuccessfully", { name: data.name }));
|
||||||
|
} else {
|
||||||
|
savedHost = await createSSHHost(submitData);
|
||||||
|
toast.success(t("hosts.hostAddedSuccessfully", { name: data.name }));
|
||||||
|
}
|
||||||
|
|
||||||
if (onFormSubmit) {
|
// Handle AutoStart plaintext cache management
|
||||||
onFormSubmit(updatedHost);
|
if (savedHost && savedHost.id && data.tunnelConnections) {
|
||||||
|
const hasAutoStartTunnels = data.tunnelConnections.some(tunnel => tunnel.autoStart);
|
||||||
|
|
||||||
|
if (hasAutoStartTunnels) {
|
||||||
|
// User has enabled autoStart on some tunnels
|
||||||
|
// Need to ensure plaintext cache exists for this host
|
||||||
|
try {
|
||||||
|
await enableAutoStart(savedHost.id);
|
||||||
|
console.log(`AutoStart plaintext cache enabled for SSH host ${savedHost.id}`);
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Failed to enable AutoStart plaintext cache for SSH host ${savedHost.id}:`, error);
|
||||||
|
// Don't fail the whole operation if cache setup fails
|
||||||
|
toast.warning(t("hosts.autoStartEnableFailed", { name: data.name }));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const newHost = await createSSHHost(submitData);
|
// User has disabled autoStart on all tunnels
|
||||||
toast.success(t("hosts.hostAddedSuccessfully", { name: data.name }));
|
// Clean up plaintext cache for this host
|
||||||
|
try {
|
||||||
|
await disableAutoStart(savedHost.id);
|
||||||
|
console.log(`AutoStart plaintext cache disabled for SSH host ${savedHost.id}`);
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Failed to disable AutoStart plaintext cache for SSH host ${savedHost.id}:`, error);
|
||||||
|
// Don't fail the whole operation
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (onFormSubmit) {
|
if (onFormSubmit) {
|
||||||
onFormSubmit(newHost);
|
onFormSubmit(savedHost);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
window.dispatchEvent(new CustomEvent("ssh-hosts:changed"));
|
window.dispatchEvent(new CustomEvent("ssh-hosts:changed"));
|
||||||
@@ -879,24 +893,6 @@ export function HostManagerEditor({
|
|||||||
</TabsTrigger>
|
</TabsTrigger>
|
||||||
</TabsList>
|
</TabsList>
|
||||||
<TabsContent value="password">
|
<TabsContent value="password">
|
||||||
<FormField
|
|
||||||
control={form.control}
|
|
||||||
name="requirePassword"
|
|
||||||
render={({ field }) => (
|
|
||||||
<FormItem className="mb-4">
|
|
||||||
<FormLabel>{t("hosts.requirePassword")}</FormLabel>
|
|
||||||
<FormControl>
|
|
||||||
<Switch
|
|
||||||
checked={field.value}
|
|
||||||
onCheckedChange={field.onChange}
|
|
||||||
/>
|
|
||||||
</FormControl>
|
|
||||||
<FormDescription>
|
|
||||||
{t("hosts.requirePasswordDescription")}
|
|
||||||
</FormDescription>
|
|
||||||
</FormItem>
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
<FormField
|
<FormField
|
||||||
control={form.control}
|
control={form.control}
|
||||||
name="password"
|
name="password"
|
||||||
@@ -906,7 +902,6 @@ export function HostManagerEditor({
|
|||||||
<FormControl>
|
<FormControl>
|
||||||
<PasswordInput
|
<PasswordInput
|
||||||
placeholder={t("placeholders.password")}
|
placeholder={t("placeholders.password")}
|
||||||
disabled={!form.watch("requirePassword")}
|
|
||||||
{...field}
|
{...field}
|
||||||
/>
|
/>
|
||||||
</FormControl>
|
</FormControl>
|
||||||
@@ -988,19 +983,33 @@ export function HostManagerEditor({
|
|||||||
<FormItem className="mb-4">
|
<FormItem className="mb-4">
|
||||||
<FormLabel>{t("hosts.sshPrivateKey")}</FormLabel>
|
<FormLabel>{t("hosts.sshPrivateKey")}</FormLabel>
|
||||||
<FormControl>
|
<FormControl>
|
||||||
<textarea
|
<CodeMirror
|
||||||
placeholder={t(
|
|
||||||
"placeholders.pastePrivateKey",
|
|
||||||
)}
|
|
||||||
className="flex min-h-[120px] w-full rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50"
|
|
||||||
value={
|
value={
|
||||||
typeof field.value === "string"
|
typeof field.value === "string"
|
||||||
? field.value
|
? field.value
|
||||||
: ""
|
: ""
|
||||||
}
|
}
|
||||||
onChange={(e) =>
|
onChange={(value) => field.onChange(value)}
|
||||||
field.onChange(e.target.value)
|
placeholder={t("placeholders.pastePrivateKey")}
|
||||||
}
|
theme={oneDark}
|
||||||
|
className="border border-input rounded-md"
|
||||||
|
minHeight="120px"
|
||||||
|
basicSetup={{
|
||||||
|
lineNumbers: true,
|
||||||
|
foldGutter: false,
|
||||||
|
dropCursor: false,
|
||||||
|
allowMultipleSelections: false,
|
||||||
|
highlightSelectionMatches: false,
|
||||||
|
searchKeymap: false,
|
||||||
|
scrollPastEnd: false,
|
||||||
|
}}
|
||||||
|
extensions={[
|
||||||
|
EditorView.theme({
|
||||||
|
".cm-scroller": {
|
||||||
|
overflow: "auto",
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
]}
|
||||||
/>
|
/>
|
||||||
</FormControl>
|
</FormControl>
|
||||||
</FormItem>
|
</FormItem>
|
||||||
@@ -1149,7 +1158,7 @@ export function HostManagerEditor({
|
|||||||
<code className="bg-muted px-1 rounded inline">
|
<code className="bg-muted px-1 rounded inline">
|
||||||
sudo apt install sshpass
|
sudo apt install sshpass
|
||||||
</code>{" "}
|
</code>{" "}
|
||||||
(Debian/Ubuntu) or the equivalent for your OS.
|
{t("hosts.debianUbuntuEquivalent")}
|
||||||
</div>
|
</div>
|
||||||
<div className="mt-2">
|
<div className="mt-2">
|
||||||
<strong>{t("hosts.otherInstallMethods")}</strong>
|
<strong>{t("hosts.otherInstallMethods")}</strong>
|
||||||
@@ -1158,7 +1167,7 @@ export function HostManagerEditor({
|
|||||||
<code className="bg-muted px-1 rounded inline">
|
<code className="bg-muted px-1 rounded inline">
|
||||||
sudo yum install sshpass
|
sudo yum install sshpass
|
||||||
</code>{" "}
|
</code>{" "}
|
||||||
or{" "}
|
{t("hosts.or")}{" "}
|
||||||
<code className="bg-muted px-1 rounded inline">
|
<code className="bg-muted px-1 rounded inline">
|
||||||
sudo dnf install sshpass
|
sudo dnf install sshpass
|
||||||
</code>
|
</code>
|
||||||
|
|||||||
@@ -36,6 +36,22 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
},
|
},
|
||||||
ref,
|
ref,
|
||||||
) {
|
) {
|
||||||
|
// DEBUG: Add global JWT test function (only once)
|
||||||
|
if (typeof window !== 'undefined' && !(window as any).testJWT) {
|
||||||
|
(window as any).testJWT = () => {
|
||||||
|
const jwt = getCookie("jwt");
|
||||||
|
console.log("Manual JWT Test:", {
|
||||||
|
isElectron: isElectron(),
|
||||||
|
rawCookie: document.cookie,
|
||||||
|
localStorage: localStorage.getItem("jwt"),
|
||||||
|
getCookieResult: jwt,
|
||||||
|
jwtLength: jwt?.length || 0,
|
||||||
|
jwtFirst20: jwt?.substring(0, 20) || "empty"
|
||||||
|
});
|
||||||
|
return jwt;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { instance: terminal, ref: xtermRef } = useXTerm();
|
const { instance: terminal, ref: xtermRef } = useXTerm();
|
||||||
const fitAddonRef = useRef<FitAddon | null>(null);
|
const fitAddonRef = useRef<FitAddon | null>(null);
|
||||||
@@ -47,6 +63,7 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
const [isConnected, setIsConnected] = useState(false);
|
const [isConnected, setIsConnected] = useState(false);
|
||||||
const [isConnecting, setIsConnecting] = useState(false);
|
const [isConnecting, setIsConnecting] = useState(false);
|
||||||
const [connectionError, setConnectionError] = useState<string | null>(null);
|
const [connectionError, setConnectionError] = useState<string | null>(null);
|
||||||
|
const [isAuthenticated, setIsAuthenticated] = useState(false);
|
||||||
const isVisibleRef = useRef<boolean>(false);
|
const isVisibleRef = useRef<boolean>(false);
|
||||||
const reconnectTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
const reconnectTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||||
const reconnectAttempts = useRef(0);
|
const reconnectAttempts = useRef(0);
|
||||||
@@ -54,6 +71,7 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
const isUnmountingRef = useRef(false);
|
const isUnmountingRef = useRef(false);
|
||||||
const shouldNotReconnectRef = useRef(false);
|
const shouldNotReconnectRef = useRef(false);
|
||||||
const isReconnectingRef = useRef(false);
|
const isReconnectingRef = useRef(false);
|
||||||
|
const isConnectingRef = useRef(false);
|
||||||
const connectionTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
const connectionTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||||
|
|
||||||
const lastSentSizeRef = useRef<{ cols: number; rows: number } | null>(null);
|
const lastSentSizeRef = useRef<{ cols: number; rows: number } | null>(null);
|
||||||
@@ -65,6 +83,36 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
isVisibleRef.current = isVisible;
|
isVisibleRef.current = isVisible;
|
||||||
}, [isVisible]);
|
}, [isVisible]);
|
||||||
|
|
||||||
|
// Monitor authentication state - Linus principle: explicit state management
|
||||||
|
useEffect(() => {
|
||||||
|
const checkAuth = () => {
|
||||||
|
const jwtToken = getCookie("jwt");
|
||||||
|
const isAuth = !!(jwtToken && jwtToken.trim() !== "");
|
||||||
|
|
||||||
|
// Only update state if it actually changed - prevent unnecessary re-renders
|
||||||
|
setIsAuthenticated(prev => {
|
||||||
|
if (prev !== isAuth) {
|
||||||
|
console.debug("Auth State Changed:", {
|
||||||
|
from: prev,
|
||||||
|
to: isAuth,
|
||||||
|
jwtPresent: !!jwtToken,
|
||||||
|
timestamp: new Date().toISOString()
|
||||||
|
});
|
||||||
|
return isAuth;
|
||||||
|
}
|
||||||
|
return prev; // No change, don't trigger re-render
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check immediately
|
||||||
|
checkAuth();
|
||||||
|
|
||||||
|
// Reduced frequency - check every 5 seconds instead of every second
|
||||||
|
const authCheckInterval = setInterval(checkAuth, 5000);
|
||||||
|
|
||||||
|
return () => clearInterval(authCheckInterval);
|
||||||
|
}, []); // No dependencies - prevent infinite loop
|
||||||
|
|
||||||
function hardRefresh() {
|
function hardRefresh() {
|
||||||
try {
|
try {
|
||||||
if (terminal && typeof (terminal as any).refresh === "function") {
|
if (terminal && typeof (terminal as any).refresh === "function") {
|
||||||
@@ -139,10 +187,7 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
[terminal],
|
[terminal],
|
||||||
);
|
);
|
||||||
|
|
||||||
useEffect(() => {
|
// Resize handling moved to AppView to avoid conflicts - Linus principle: eliminate duplicate complexity
|
||||||
window.addEventListener("resize", handleWindowResize);
|
|
||||||
return () => window.removeEventListener("resize", handleWindowResize);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
function handleWindowResize() {
|
function handleWindowResize() {
|
||||||
if (!isVisibleRef.current) return;
|
if (!isVisibleRef.current) return;
|
||||||
@@ -159,8 +204,10 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
if (
|
if (
|
||||||
isUnmountingRef.current ||
|
isUnmountingRef.current ||
|
||||||
shouldNotReconnectRef.current ||
|
shouldNotReconnectRef.current ||
|
||||||
isReconnectingRef.current
|
isReconnectingRef.current ||
|
||||||
|
isConnectingRef.current
|
||||||
) {
|
) {
|
||||||
|
console.debug("Skipping reconnection - already in progress or blocked");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -198,6 +245,15 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Verify authentication before attempting reconnection
|
||||||
|
const jwtToken = getCookie("jwt");
|
||||||
|
if (!jwtToken || jwtToken.trim() === "") {
|
||||||
|
console.warn("Reconnection cancelled - no authentication token");
|
||||||
|
isReconnectingRef.current = false;
|
||||||
|
setConnectionError("Authentication required for reconnection");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (terminal && hostConfig) {
|
if (terminal && hostConfig) {
|
||||||
terminal.clear();
|
terminal.clear();
|
||||||
const cols = terminal.cols;
|
const cols = terminal.cols;
|
||||||
@@ -210,14 +266,45 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
}
|
}
|
||||||
|
|
||||||
function connectToHost(cols: number, rows: number) {
|
function connectToHost(cols: number, rows: number) {
|
||||||
|
// Prevent duplicate connections - Linus principle: fail fast
|
||||||
|
if (isConnectingRef.current) {
|
||||||
|
console.debug("Skipping connection - already connecting");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
isConnectingRef.current = true;
|
||||||
|
|
||||||
const isDev =
|
const isDev =
|
||||||
process.env.NODE_ENV === "development" &&
|
process.env.NODE_ENV === "development" &&
|
||||||
(window.location.port === "3000" ||
|
(window.location.port === "3000" ||
|
||||||
window.location.port === "5173" ||
|
window.location.port === "5173" ||
|
||||||
window.location.port === "");
|
window.location.port === "");
|
||||||
|
|
||||||
const wsUrl = isDev
|
// Get JWT token for WebSocket authentication (from cookie, not localStorage)
|
||||||
? "ws://localhost:8082"
|
const jwtToken = getCookie("jwt");
|
||||||
|
|
||||||
|
// DEBUG: Log authentication issues only
|
||||||
|
if (!jwtToken || jwtToken.trim() === "") {
|
||||||
|
console.debug("JWT Debug Info:", {
|
||||||
|
isElectron: isElectron(),
|
||||||
|
rawCookie: isElectron() ? localStorage.getItem("jwt") : document.cookie,
|
||||||
|
jwtToken: jwtToken,
|
||||||
|
isEmpty: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!jwtToken || jwtToken.trim() === "") {
|
||||||
|
console.error("No JWT token available for WebSocket connection");
|
||||||
|
setIsConnected(false);
|
||||||
|
setIsConnecting(false);
|
||||||
|
setConnectionError("Authentication required");
|
||||||
|
isConnectingRef.current = false; // Reset on auth failure
|
||||||
|
// Don't show toast here - let auth system handle it
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const baseWsUrl = isDev
|
||||||
|
? `${window.location.protocol === "https:" ? "wss" : "ws"}://localhost:8082`
|
||||||
: isElectron()
|
: isElectron()
|
||||||
? (() => {
|
? (() => {
|
||||||
const baseUrl =
|
const baseUrl =
|
||||||
@@ -226,9 +313,37 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
? "wss://"
|
? "wss://"
|
||||||
: "ws://";
|
: "ws://";
|
||||||
const wsHost = baseUrl.replace(/^https?:\/\//, "");
|
const wsHost = baseUrl.replace(/^https?:\/\//, "");
|
||||||
return `${wsProtocol}${wsHost}/ssh/websocket/`;
|
return `${wsProtocol}${wsHost.replace(':8081', ':8082')}/`;
|
||||||
})()
|
})()
|
||||||
: `${window.location.protocol === "https:" ? "wss" : "ws"}://${window.location.host}/ssh/websocket/`;
|
: `${window.location.protocol === "https:" ? "wss" : "ws"}://${window.location.hostname}:8082/`;
|
||||||
|
|
||||||
|
// Clean up existing connection to prevent duplicates - Linus principle: eliminate complexity
|
||||||
|
if (webSocketRef.current && webSocketRef.current.readyState !== WebSocket.CLOSED) {
|
||||||
|
console.log("Closing existing WebSocket connection before creating new one");
|
||||||
|
webSocketRef.current.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear existing intervals/timeouts
|
||||||
|
if (pingIntervalRef.current) {
|
||||||
|
clearInterval(pingIntervalRef.current);
|
||||||
|
pingIntervalRef.current = null;
|
||||||
|
}
|
||||||
|
if (connectionTimeoutRef.current) {
|
||||||
|
clearTimeout(connectionTimeoutRef.current);
|
||||||
|
connectionTimeoutRef.current = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add JWT token as query parameter for authentication
|
||||||
|
const wsUrl = `${baseWsUrl}?token=${encodeURIComponent(jwtToken)}`;
|
||||||
|
|
||||||
|
// DEBUG: Log WebSocket connection details
|
||||||
|
console.log("Creating WebSocket connection:", {
|
||||||
|
baseWsUrl,
|
||||||
|
jwtTokenLength: jwtToken.length,
|
||||||
|
jwtTokenStart: jwtToken.substring(0, 20),
|
||||||
|
encodedTokenLength: encodeURIComponent(jwtToken).length,
|
||||||
|
wsUrl: wsUrl.length > 100 ? `${wsUrl.substring(0, 100)}...` : wsUrl
|
||||||
|
});
|
||||||
|
|
||||||
const ws = new WebSocket(wsUrl);
|
const ws = new WebSocket(wsUrl);
|
||||||
webSocketRef.current = ws;
|
webSocketRef.current = ws;
|
||||||
@@ -324,6 +439,7 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
} else if (msg.type === "connected") {
|
} else if (msg.type === "connected") {
|
||||||
setIsConnected(true);
|
setIsConnected(true);
|
||||||
setIsConnecting(false);
|
setIsConnecting(false);
|
||||||
|
isConnectingRef.current = false; // Clear connecting state
|
||||||
if (connectionTimeoutRef.current) {
|
if (connectionTimeoutRef.current) {
|
||||||
clearTimeout(connectionTimeoutRef.current);
|
clearTimeout(connectionTimeoutRef.current);
|
||||||
connectionTimeoutRef.current = null;
|
connectionTimeoutRef.current = null;
|
||||||
@@ -351,9 +467,28 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
|
|
||||||
ws.addEventListener("close", (event) => {
|
ws.addEventListener("close", (event) => {
|
||||||
setIsConnected(false);
|
setIsConnected(false);
|
||||||
|
isConnectingRef.current = false; // Clear connecting state
|
||||||
if (terminal) {
|
if (terminal) {
|
||||||
terminal.clear();
|
terminal.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handle authentication errors (code 1008)
|
||||||
|
if (event.code === 1008) {
|
||||||
|
console.error("WebSocket authentication failed:", event.reason);
|
||||||
|
setConnectionError("Authentication failed - please re-login");
|
||||||
|
setIsConnecting(false);
|
||||||
|
shouldNotReconnectRef.current = true;
|
||||||
|
|
||||||
|
// Clear invalid JWT token
|
||||||
|
localStorage.removeItem("jwt");
|
||||||
|
|
||||||
|
// Show authentication error message
|
||||||
|
toast.error("Authentication failed. Please log in again.");
|
||||||
|
|
||||||
|
// Don't attempt to reconnect on auth failure
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
setIsConnecting(true);
|
setIsConnecting(true);
|
||||||
if (
|
if (
|
||||||
!wasDisconnectedBySSH.current &&
|
!wasDisconnectedBySSH.current &&
|
||||||
@@ -366,6 +501,7 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
|
|
||||||
ws.addEventListener("error", (event) => {
|
ws.addEventListener("error", (event) => {
|
||||||
setIsConnected(false);
|
setIsConnected(false);
|
||||||
|
isConnectingRef.current = false; // Clear connecting state
|
||||||
setConnectionError(t("terminal.websocketError"));
|
setConnectionError(t("terminal.websocketError"));
|
||||||
if (terminal) {
|
if (terminal) {
|
||||||
terminal.clear();
|
terminal.clear();
|
||||||
@@ -410,6 +546,12 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!terminal || !xtermRef.current || !hostConfig) return;
|
if (!terminal || !xtermRef.current || !hostConfig) return;
|
||||||
|
|
||||||
|
// Critical auth check - prevent terminal setup without authentication - Linus principle: fail fast
|
||||||
|
if (!isAuthenticated) {
|
||||||
|
console.debug("Terminal setup delayed - waiting for authentication");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
terminal.options = {
|
terminal.options = {
|
||||||
cursorBlink: true,
|
cursorBlink: true,
|
||||||
cursorStyle: "bar",
|
cursorStyle: "bar",
|
||||||
@@ -515,33 +657,55 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
fitAddonRef.current?.fit();
|
fitAddonRef.current?.fit();
|
||||||
if (terminal) scheduleNotify(terminal.cols, terminal.rows);
|
if (terminal) scheduleNotify(terminal.cols, terminal.rows);
|
||||||
hardRefresh();
|
hardRefresh();
|
||||||
}, 100);
|
}, 150); // Increased debounce for better stability
|
||||||
});
|
});
|
||||||
|
|
||||||
resizeObserver.observe(xtermRef.current);
|
resizeObserver.observe(xtermRef.current);
|
||||||
|
|
||||||
|
// Show terminal immediately - better UX, no unnecessary delays
|
||||||
|
setVisible(true);
|
||||||
|
|
||||||
const readyFonts =
|
const readyFonts =
|
||||||
(document as any).fonts?.ready instanceof Promise
|
(document as any).fonts?.ready instanceof Promise
|
||||||
? (document as any).fonts.ready
|
? (document as any).fonts.ready
|
||||||
: Promise.resolve();
|
: Promise.resolve();
|
||||||
|
|
||||||
readyFonts.then(() => {
|
readyFonts.then(() => {
|
||||||
setTimeout(() => {
|
// Fixed delay and authentication check - Linus principle: eliminate race conditions
|
||||||
fitAddon.fit();
|
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
fitAddon.fit();
|
fitAddon.fit();
|
||||||
if (terminal) scheduleNotify(terminal.cols, terminal.rows);
|
if (terminal) scheduleNotify(terminal.cols, terminal.rows);
|
||||||
hardRefresh();
|
hardRefresh();
|
||||||
setVisible(true);
|
|
||||||
if (terminal && !splitScreen) {
|
if (terminal && !splitScreen) {
|
||||||
terminal.focus();
|
terminal.focus();
|
||||||
}
|
}
|
||||||
}, 0);
|
|
||||||
|
// Verify authentication before attempting WebSocket connection
|
||||||
|
const jwtToken = getCookie("jwt");
|
||||||
|
|
||||||
|
// DEBUG: Log only authentication failures
|
||||||
|
if (!jwtToken || jwtToken.trim() === "") {
|
||||||
|
console.debug("ReadyFonts Auth Check Failed:", {
|
||||||
|
isAuthenticated: isAuthenticated,
|
||||||
|
jwtPresent: !!jwtToken
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!jwtToken || jwtToken.trim() === "") {
|
||||||
|
console.warn("WebSocket connection delayed - no authentication token");
|
||||||
|
setIsConnected(false);
|
||||||
|
setIsConnecting(false);
|
||||||
|
setConnectionError("Authentication required");
|
||||||
|
// Don't show toast here - let auth system handle it
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const cols = terminal.cols;
|
const cols = terminal.cols;
|
||||||
const rows = terminal.rows;
|
const rows = terminal.rows;
|
||||||
|
|
||||||
connectToHost(cols, rows);
|
connectToHost(cols, rows);
|
||||||
}, 300);
|
}, 200); // Increased from 100ms to 200ms for auth stability
|
||||||
});
|
});
|
||||||
|
|
||||||
return () => {
|
return () => {
|
||||||
@@ -564,7 +728,7 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
}
|
}
|
||||||
webSocketRef.current?.close();
|
webSocketRef.current?.close();
|
||||||
};
|
};
|
||||||
}, [xtermRef, terminal, hostConfig]);
|
}, [xtermRef, terminal, hostConfig]); // Removed isAuthenticated to prevent infinite loop
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (isVisible && fitAddonRef.current) {
|
if (isVisible && fitAddonRef.current) {
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import {
|
|||||||
getUserInfo,
|
getUserInfo,
|
||||||
getRegistrationAllowed,
|
getRegistrationAllowed,
|
||||||
getOIDCConfig,
|
getOIDCConfig,
|
||||||
getUserCount,
|
getSetupRequired,
|
||||||
initiatePasswordReset,
|
initiatePasswordReset,
|
||||||
verifyPasswordResetCode,
|
verifyPasswordResetCode,
|
||||||
completePasswordReset,
|
completePasswordReset,
|
||||||
@@ -124,9 +124,9 @@ export function HomepageAuth({
|
|||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
getUserCount()
|
getSetupRequired()
|
||||||
.then((res) => {
|
.then((res) => {
|
||||||
if (res.count === 0) {
|
if (res.setup_required) {
|
||||||
setFirstUser(true);
|
setFirstUser(true);
|
||||||
setTab("signup");
|
setTab("signup");
|
||||||
} else {
|
} else {
|
||||||
@@ -182,6 +182,17 @@ export function HomepageAuth({
|
|||||||
}
|
}
|
||||||
|
|
||||||
setCookie("jwt", res.token);
|
setCookie("jwt", res.token);
|
||||||
|
|
||||||
|
// DEBUG: Verify JWT was set correctly
|
||||||
|
const verifyJWT = getCookie("jwt");
|
||||||
|
console.log("JWT Set Debug:", {
|
||||||
|
originalToken: res.token.substring(0, 20) + "...",
|
||||||
|
retrievedToken: verifyJWT ? verifyJWT.substring(0, 20) + "..." : null,
|
||||||
|
match: res.token === verifyJWT,
|
||||||
|
tokenLength: res.token.length,
|
||||||
|
retrievedLength: verifyJWT?.length || 0
|
||||||
|
});
|
||||||
|
|
||||||
[meRes] = await Promise.all([getUserInfo()]);
|
[meRes] = await Promise.all([getUserInfo()]);
|
||||||
|
|
||||||
setInternalLoggedIn(true);
|
setInternalLoggedIn(true);
|
||||||
|
|||||||
@@ -11,7 +11,8 @@ import { ClipboardAddon } from "@xterm/addon-clipboard";
|
|||||||
import { Unicode11Addon } from "@xterm/addon-unicode11";
|
import { Unicode11Addon } from "@xterm/addon-unicode11";
|
||||||
import { WebLinksAddon } from "@xterm/addon-web-links";
|
import { WebLinksAddon } from "@xterm/addon-web-links";
|
||||||
import { useTranslation } from "react-i18next";
|
import { useTranslation } from "react-i18next";
|
||||||
import { isElectron } from "@/ui/main-axios.ts";
|
import { isElectron, getCookie } from "@/ui/main-axios.ts";
|
||||||
|
import { toast } from "sonner";
|
||||||
|
|
||||||
interface SSHTerminalProps {
|
interface SSHTerminalProps {
|
||||||
hostConfig: any;
|
hostConfig: any;
|
||||||
@@ -31,7 +32,12 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
const wasDisconnectedBySSH = useRef(false);
|
const wasDisconnectedBySSH = useRef(false);
|
||||||
const pingIntervalRef = useRef<NodeJS.Timeout | null>(null);
|
const pingIntervalRef = useRef<NodeJS.Timeout | null>(null);
|
||||||
const [visible, setVisible] = useState(false);
|
const [visible, setVisible] = useState(false);
|
||||||
|
const [isConnected, setIsConnected] = useState(false);
|
||||||
|
const [isConnecting, setIsConnecting] = useState(false);
|
||||||
|
const [connectionError, setConnectionError] = useState<string | null>(null);
|
||||||
|
const [isAuthenticated, setIsAuthenticated] = useState(false);
|
||||||
const isVisibleRef = useRef<boolean>(false);
|
const isVisibleRef = useRef<boolean>(false);
|
||||||
|
const isConnectingRef = useRef(false);
|
||||||
|
|
||||||
const lastSentSizeRef = useRef<{ cols: number; rows: number } | null>(null);
|
const lastSentSizeRef = useRef<{ cols: number; rows: number } | null>(null);
|
||||||
const pendingSizeRef = useRef<{ cols: number; rows: number } | null>(null);
|
const pendingSizeRef = useRef<{ cols: number; rows: number } | null>(null);
|
||||||
@@ -42,6 +48,36 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
isVisibleRef.current = isVisible;
|
isVisibleRef.current = isVisible;
|
||||||
}, [isVisible]);
|
}, [isVisible]);
|
||||||
|
|
||||||
|
// Monitor authentication state - Linus principle: explicit state management
|
||||||
|
useEffect(() => {
|
||||||
|
const checkAuth = () => {
|
||||||
|
const jwtToken = getCookie("jwt");
|
||||||
|
const isAuth = !!(jwtToken && jwtToken.trim() !== "");
|
||||||
|
|
||||||
|
// Only update state if it actually changed - prevent unnecessary re-renders
|
||||||
|
setIsAuthenticated(prev => {
|
||||||
|
if (prev !== isAuth) {
|
||||||
|
console.debug("Mobile Auth State Changed:", {
|
||||||
|
from: prev,
|
||||||
|
to: isAuth,
|
||||||
|
jwtPresent: !!jwtToken,
|
||||||
|
timestamp: new Date().toISOString()
|
||||||
|
});
|
||||||
|
return isAuth;
|
||||||
|
}
|
||||||
|
return prev; // No change, don't trigger re-render
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check immediately
|
||||||
|
checkAuth();
|
||||||
|
|
||||||
|
// Reduced frequency - check every 5 seconds instead of every second
|
||||||
|
const authCheckInterval = setInterval(checkAuth, 5000);
|
||||||
|
|
||||||
|
return () => clearInterval(authCheckInterval);
|
||||||
|
}, []); // No dependencies - prevent infinite loop
|
||||||
|
|
||||||
function hardRefresh() {
|
function hardRefresh() {
|
||||||
try {
|
try {
|
||||||
if (terminal && typeof (terminal as any).refresh === "function") {
|
if (terminal && typeof (terminal as any).refresh === "function") {
|
||||||
@@ -103,10 +139,7 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
[terminal],
|
[terminal],
|
||||||
);
|
);
|
||||||
|
|
||||||
useEffect(() => {
|
// Resize handling optimized to avoid conflicts - Linus principle: eliminate duplicate complexity
|
||||||
window.addEventListener("resize", handleWindowResize);
|
|
||||||
return () => window.removeEventListener("resize", handleWindowResize);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
function handleWindowResize() {
|
function handleWindowResize() {
|
||||||
if (!isVisibleRef.current) return;
|
if (!isVisibleRef.current) return;
|
||||||
@@ -141,8 +174,10 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
else if (msg.type === "error")
|
else if (msg.type === "error")
|
||||||
terminal.writeln(`\r\n[${t("terminal.error")}] ${msg.message}`);
|
terminal.writeln(`\r\n[${t("terminal.error")}] ${msg.message}`);
|
||||||
else if (msg.type === "connected") {
|
else if (msg.type === "connected") {
|
||||||
|
isConnectingRef.current = false; // Clear connecting state
|
||||||
} else if (msg.type === "disconnected") {
|
} else if (msg.type === "disconnected") {
|
||||||
wasDisconnectedBySSH.current = true;
|
wasDisconnectedBySSH.current = true;
|
||||||
|
isConnectingRef.current = false; // Clear connecting state
|
||||||
terminal.writeln(
|
terminal.writeln(
|
||||||
`\r\n[${msg.message || t("terminal.disconnected")}]`,
|
`\r\n[${msg.message || t("terminal.disconnected")}]`,
|
||||||
);
|
);
|
||||||
@@ -150,13 +185,28 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
} catch (error) {}
|
} catch (error) {}
|
||||||
});
|
});
|
||||||
|
|
||||||
ws.addEventListener("close", () => {
|
ws.addEventListener("close", (event) => {
|
||||||
|
isConnectingRef.current = false; // Clear connecting state
|
||||||
|
|
||||||
|
// Handle authentication errors (code 1008)
|
||||||
|
if (event.code === 1008) {
|
||||||
|
console.error("WebSocket authentication failed:", event.reason);
|
||||||
|
terminal.writeln(`\r\n[Authentication failed - please re-login]`);
|
||||||
|
|
||||||
|
// Clear invalid JWT token
|
||||||
|
localStorage.removeItem("jwt");
|
||||||
|
|
||||||
|
// Don't attempt to reconnect on auth failure
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (!wasDisconnectedBySSH.current) {
|
if (!wasDisconnectedBySSH.current) {
|
||||||
terminal.writeln(`\r\n[${t("terminal.connectionClosed")}]`);
|
terminal.writeln(`\r\n[${t("terminal.connectionClosed")}]`);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
ws.addEventListener("error", () => {
|
ws.addEventListener("error", () => {
|
||||||
|
isConnectingRef.current = false; // Clear connecting state
|
||||||
terminal.writeln(`\r\n[${t("terminal.connectionError")}]`);
|
terminal.writeln(`\r\n[${t("terminal.connectionError")}]`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -164,6 +214,12 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!terminal || !xtermRef.current || !hostConfig) return;
|
if (!terminal || !xtermRef.current || !hostConfig) return;
|
||||||
|
|
||||||
|
// Critical auth check - prevent terminal setup without authentication - Linus principle: fail fast
|
||||||
|
if (!isAuthenticated) {
|
||||||
|
console.debug("Terminal setup delayed - waiting for authentication");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
terminal.options = {
|
terminal.options = {
|
||||||
cursorBlink: false,
|
cursorBlink: false,
|
||||||
cursorStyle: "bar",
|
cursorStyle: "bar",
|
||||||
@@ -215,7 +271,7 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
fitAddonRef.current?.fit();
|
fitAddonRef.current?.fit();
|
||||||
if (terminal) scheduleNotify(terminal.cols, terminal.rows);
|
if (terminal) scheduleNotify(terminal.cols, terminal.rows);
|
||||||
hardRefresh();
|
hardRefresh();
|
||||||
}, 100);
|
}, 150); // Increased debounce for better stability
|
||||||
});
|
});
|
||||||
|
|
||||||
resizeObserver.observe(xtermRef.current);
|
resizeObserver.observe(xtermRef.current);
|
||||||
@@ -224,15 +280,26 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
(document as any).fonts?.ready instanceof Promise
|
(document as any).fonts?.ready instanceof Promise
|
||||||
? (document as any).fonts.ready
|
? (document as any).fonts.ready
|
||||||
: Promise.resolve();
|
: Promise.resolve();
|
||||||
|
// Show terminal immediately - better UX for mobile
|
||||||
|
setVisible(true);
|
||||||
|
|
||||||
readyFonts.then(() => {
|
readyFonts.then(() => {
|
||||||
setTimeout(() => {
|
// Fixed delay and authentication check - Linus principle: eliminate race conditions
|
||||||
fitAddon.fit();
|
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
fitAddon.fit();
|
fitAddon.fit();
|
||||||
if (terminal) scheduleNotify(terminal.cols, terminal.rows);
|
if (terminal) scheduleNotify(terminal.cols, terminal.rows);
|
||||||
hardRefresh();
|
hardRefresh();
|
||||||
setVisible(true);
|
|
||||||
}, 0);
|
// Verify authentication before attempting WebSocket connection
|
||||||
|
const jwtToken = getCookie("jwt");
|
||||||
|
if (!jwtToken || jwtToken.trim() === "") {
|
||||||
|
console.warn("WebSocket connection delayed - no authentication token");
|
||||||
|
setIsConnected(false);
|
||||||
|
setIsConnecting(false);
|
||||||
|
setConnectionError("Authentication required");
|
||||||
|
// Don't show toast here - let auth system handle it
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const cols = terminal.cols;
|
const cols = terminal.cols;
|
||||||
const rows = terminal.rows;
|
const rows = terminal.rows;
|
||||||
@@ -243,8 +310,8 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
window.location.port === "5173" ||
|
window.location.port === "5173" ||
|
||||||
window.location.port === "");
|
window.location.port === "");
|
||||||
|
|
||||||
const wsUrl = isDev
|
const baseWsUrl = isDev
|
||||||
? "ws://localhost:8082"
|
? `${window.location.protocol === "https:" ? "wss" : "ws"}://localhost:8082`
|
||||||
: isElectron()
|
: isElectron()
|
||||||
? (() => {
|
? (() => {
|
||||||
const baseUrl =
|
const baseUrl =
|
||||||
@@ -254,16 +321,42 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
? "wss://"
|
? "wss://"
|
||||||
: "ws://";
|
: "ws://";
|
||||||
const wsHost = baseUrl.replace(/^https?:\/\//, "");
|
const wsHost = baseUrl.replace(/^https?:\/\//, "");
|
||||||
return `${wsProtocol}${wsHost}/ssh/websocket/`;
|
return `${wsProtocol}${wsHost.replace(':8081', ':8082')}/ssh/websocket/`;
|
||||||
})()
|
})()
|
||||||
: `${window.location.protocol === "https:" ? "wss" : "ws"}://${window.location.host}/ssh/websocket/`;
|
: `${window.location.protocol === "https:" ? "wss" : "ws"}://${window.location.host}/ssh/websocket/`;
|
||||||
|
|
||||||
|
// Prevent duplicate connections - Linus principle: fail fast
|
||||||
|
if (isConnectingRef.current) {
|
||||||
|
console.debug("Skipping connection - already connecting");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
isConnectingRef.current = true;
|
||||||
|
|
||||||
|
// Clean up existing connection to prevent duplicates - Linus principle: eliminate complexity
|
||||||
|
if (webSocketRef.current && webSocketRef.current.readyState !== WebSocket.CLOSED) {
|
||||||
|
console.log("Closing existing WebSocket connection before creating new one");
|
||||||
|
webSocketRef.current.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear existing ping interval
|
||||||
|
if (pingIntervalRef.current) {
|
||||||
|
clearInterval(pingIntervalRef.current);
|
||||||
|
pingIntervalRef.current = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add JWT token as query parameter for authentication
|
||||||
|
const wsUrl = `${baseWsUrl}?token=${encodeURIComponent(jwtToken)}`;
|
||||||
|
|
||||||
|
setIsConnecting(true);
|
||||||
|
setConnectionError(null);
|
||||||
|
|
||||||
const ws = new WebSocket(wsUrl);
|
const ws = new WebSocket(wsUrl);
|
||||||
webSocketRef.current = ws;
|
webSocketRef.current = ws;
|
||||||
wasDisconnectedBySSH.current = false;
|
wasDisconnectedBySSH.current = false;
|
||||||
|
|
||||||
setupWebSocketListeners(ws, cols, rows);
|
setupWebSocketListeners(ws, cols, rows);
|
||||||
}, 300);
|
}, 200); // Increased from 100ms to 200ms for auth stability
|
||||||
});
|
});
|
||||||
|
|
||||||
return () => {
|
return () => {
|
||||||
@@ -276,7 +369,7 @@ export const Terminal = forwardRef<any, SSHTerminalProps>(function SSHTerminal(
|
|||||||
}
|
}
|
||||||
webSocketRef.current?.close();
|
webSocketRef.current?.close();
|
||||||
};
|
};
|
||||||
}, [xtermRef, terminal, hostConfig]);
|
}, [xtermRef, terminal, hostConfig]); // Removed isAuthenticated to prevent infinite loop
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (isVisible && fitAddonRef.current) {
|
if (isVisible && fitAddonRef.current) {
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import {
|
|||||||
getUserInfo,
|
getUserInfo,
|
||||||
getRegistrationAllowed,
|
getRegistrationAllowed,
|
||||||
getOIDCConfig,
|
getOIDCConfig,
|
||||||
getUserCount,
|
getSetupRequired,
|
||||||
initiatePasswordReset,
|
initiatePasswordReset,
|
||||||
verifyPasswordResetCode,
|
verifyPasswordResetCode,
|
||||||
completePasswordReset,
|
completePasswordReset,
|
||||||
@@ -111,9 +111,9 @@ export function HomepageAuth({
|
|||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
getUserCount()
|
getSetupRequired()
|
||||||
.then((res) => {
|
.then((res) => {
|
||||||
if (res.count === 0) {
|
if (res.setup_required) {
|
||||||
setFirstUser(true);
|
setFirstUser(true);
|
||||||
setTab("signup");
|
setTab("signup");
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import { cn } from "@/lib/utils";
|
import { cn } from "@/lib/utils";
|
||||||
|
import { useTranslation } from "react-i18next";
|
||||||
import {
|
import {
|
||||||
Download,
|
Download,
|
||||||
FileDown,
|
FileDown,
|
||||||
@@ -30,6 +31,8 @@ export function DragIndicator({
|
|||||||
error,
|
error,
|
||||||
className,
|
className,
|
||||||
}: DragIndicatorProps) {
|
}: DragIndicatorProps) {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
|
||||||
if (!isVisible) return null;
|
if (!isVisible) return null;
|
||||||
|
|
||||||
const getIcon = () => {
|
const getIcon = () => {
|
||||||
@@ -54,18 +57,22 @@ export function DragIndicator({
|
|||||||
|
|
||||||
const getStatusText = () => {
|
const getStatusText = () => {
|
||||||
if (error) {
|
if (error) {
|
||||||
return `错误: ${error}`;
|
return t("dragIndicator.error", { error });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isDragging) {
|
if (isDragging) {
|
||||||
return `正在拖拽${fileName ? ` ${fileName}` : ""}到桌面...`;
|
return t("dragIndicator.dragging", { fileName: fileName || "" });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isDownloading) {
|
if (isDownloading) {
|
||||||
return `正在准备拖拽${fileName ? ` ${fileName}` : ""}...`;
|
return t("dragIndicator.preparing", { fileName: fileName || "" });
|
||||||
}
|
}
|
||||||
|
|
||||||
return `准备拖拽${fileCount > 1 ? ` ${fileCount} 个文件` : fileName ? ` ${fileName}` : ""}`;
|
if (fileCount > 1) {
|
||||||
|
return t("dragIndicator.readyMultiple", { count: fileCount });
|
||||||
|
}
|
||||||
|
|
||||||
|
return t("dragIndicator.readySingle", { fileName: fileName || "" });
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -79,17 +86,17 @@ export function DragIndicator({
|
|||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
<div className="flex items-start gap-3">
|
<div className="flex items-start gap-3">
|
||||||
{/* 图标 */}
|
{/* Icon */}
|
||||||
<div className="flex-shrink-0 mt-0.5">{getIcon()}</div>
|
<div className="flex-shrink-0 mt-0.5">{getIcon()}</div>
|
||||||
|
|
||||||
{/* 内容 */}
|
{/* Content */}
|
||||||
<div className="flex-1 min-w-0">
|
<div className="flex-1 min-w-0">
|
||||||
{/* 标题 */}
|
{/* Title */}
|
||||||
<div className="text-sm font-medium text-foreground mb-2">
|
<div className="text-sm font-medium text-foreground mb-2">
|
||||||
{fileCount > 1 ? "批量拖拽到桌面" : "拖拽到桌面"}
|
{fileCount > 1 ? t("dragIndicator.batchDrag") : t("dragIndicator.dragToDesktop")}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* 状态文字 */}
|
{/* Status text */}
|
||||||
<div
|
<div
|
||||||
className={cn(
|
className={cn(
|
||||||
"text-xs mb-3",
|
"text-xs mb-3",
|
||||||
@@ -103,7 +110,7 @@ export function DragIndicator({
|
|||||||
{getStatusText()}
|
{getStatusText()}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* 进度条 */}
|
{/* Progress bar */}
|
||||||
{(isDownloading || isDragging) && !error && (
|
{(isDownloading || isDragging) && !error && (
|
||||||
<div className="w-full bg-dark-border rounded-full h-2 mb-2">
|
<div className="w-full bg-dark-border rounded-full h-2 mb-2">
|
||||||
<div
|
<div
|
||||||
@@ -116,24 +123,24 @@ export function DragIndicator({
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* 进度百分比 */}
|
{/* Progress percentage */}
|
||||||
{(isDownloading || isDragging) && !error && (
|
{(isDownloading || isDragging) && !error && (
|
||||||
<div className="text-xs text-muted-foreground">
|
<div className="text-xs text-muted-foreground">
|
||||||
{progress.toFixed(0)}%
|
{progress.toFixed(0)}%
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* 拖拽提示 */}
|
{/* Drag hint */}
|
||||||
{isDragging && !error && (
|
{isDragging && !error && (
|
||||||
<div className="text-xs text-green-500 mt-2 flex items-center gap-1">
|
<div className="text-xs text-green-500 mt-2 flex items-center gap-1">
|
||||||
<Download className="w-3 h-3" />
|
<Download className="w-3 h-3" />
|
||||||
现在可以拖拽到桌面任意位置
|
{t("dragIndicator.canDragAnywhere")}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* 动画效果的背景 */}
|
{/* Background with animation effect */}
|
||||||
{isDragging && !error && (
|
{isDragging && !error && (
|
||||||
<div className="absolute inset-0 rounded-lg bg-green-500/5 animate-pulse" />
|
<div className="absolute inset-0 rounded-lg bg-green-500/5 animate-pulse" />
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ export function useDragToDesktop({
|
|||||||
error: null,
|
error: null,
|
||||||
});
|
});
|
||||||
|
|
||||||
// 检查是否在Electron环境中
|
// Check if running in Electron environment
|
||||||
const isElectron = () => {
|
const isElectron = () => {
|
||||||
return (
|
return (
|
||||||
typeof window !== "undefined" &&
|
typeof window !== "undefined" &&
|
||||||
@@ -41,20 +41,20 @@ export function useDragToDesktop({
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
// 拖拽单个文件到桌面
|
// Drag single file to desktop
|
||||||
const dragFileToDesktop = useCallback(
|
const dragFileToDesktop = useCallback(
|
||||||
async (file: FileItem, options: DragToDesktopOptions = {}) => {
|
async (file: FileItem, options: DragToDesktopOptions = {}) => {
|
||||||
const { enableToast = true, onSuccess, onError } = options;
|
const { enableToast = true, onSuccess, onError } = options;
|
||||||
|
|
||||||
if (!isElectron()) {
|
if (!isElectron()) {
|
||||||
const error = "拖拽到桌面功能仅在桌面应用中可用";
|
const error = "Drag to desktop feature is only available in desktop application";
|
||||||
if (enableToast) toast.error(error);
|
if (enableToast) toast.error(error);
|
||||||
onError?.(error);
|
onError?.(error);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (file.type !== "file") {
|
if (file.type !== "file") {
|
||||||
const error = "只能拖拽文件到桌面";
|
const error = "Only files can be dragged to desktop";
|
||||||
if (enableToast) toast.error(error);
|
if (enableToast) toast.error(error);
|
||||||
onError?.(error);
|
onError?.(error);
|
||||||
return false;
|
return false;
|
||||||
@@ -68,16 +68,16 @@ export function useDragToDesktop({
|
|||||||
error: null,
|
error: null,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// 下载文件内容
|
// Download file content
|
||||||
const response = await downloadSSHFile(sshSessionId, file.path);
|
const response = await downloadSSHFile(sshSessionId, file.path);
|
||||||
|
|
||||||
if (!response?.content) {
|
if (!response?.content) {
|
||||||
throw new Error("无法获取文件内容");
|
throw new Error("Unable to get file content");
|
||||||
}
|
}
|
||||||
|
|
||||||
setState((prev) => ({ ...prev, progress: 50 }));
|
setState((prev) => ({ ...prev, progress: 50 }));
|
||||||
|
|
||||||
// 创建临时文件
|
// Create temporary file
|
||||||
const tempResult = await window.electronAPI.createTempFile({
|
const tempResult = await window.electronAPI.createTempFile({
|
||||||
fileName: file.name,
|
fileName: file.name,
|
||||||
content: response.content,
|
content: response.content,
|
||||||
@@ -85,30 +85,30 @@ export function useDragToDesktop({
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (!tempResult.success) {
|
if (!tempResult.success) {
|
||||||
throw new Error(tempResult.error || "创建临时文件失败");
|
throw new Error(tempResult.error || "Failed to create temporary file");
|
||||||
}
|
}
|
||||||
|
|
||||||
setState((prev) => ({ ...prev, progress: 80, isDragging: true }));
|
setState((prev) => ({ ...prev, progress: 80, isDragging: true }));
|
||||||
|
|
||||||
// 开始拖拽
|
// Start dragging
|
||||||
const dragResult = await window.electronAPI.startDragToDesktop({
|
const dragResult = await window.electronAPI.startDragToDesktop({
|
||||||
tempId: tempResult.tempId,
|
tempId: tempResult.tempId,
|
||||||
fileName: file.name,
|
fileName: file.name,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!dragResult.success) {
|
if (!dragResult.success) {
|
||||||
throw new Error(dragResult.error || "开始拖拽失败");
|
throw new Error(dragResult.error || "Failed to start dragging");
|
||||||
}
|
}
|
||||||
|
|
||||||
setState((prev) => ({ ...prev, progress: 100 }));
|
setState((prev) => ({ ...prev, progress: 100 }));
|
||||||
|
|
||||||
if (enableToast) {
|
if (enableToast) {
|
||||||
toast.success(`正在拖拽 ${file.name} 到桌面`);
|
toast.success(`Dragging ${file.name} to desktop`);
|
||||||
}
|
}
|
||||||
|
|
||||||
onSuccess?.();
|
onSuccess?.();
|
||||||
|
|
||||||
// 延迟清理临时文件(给用户时间完成拖拽)
|
// Delayed cleanup of temporary file (give user time to complete drag)
|
||||||
setTimeout(async () => {
|
setTimeout(async () => {
|
||||||
await window.electronAPI.cleanupTempFile(tempResult.tempId);
|
await window.electronAPI.cleanupTempFile(tempResult.tempId);
|
||||||
setState((prev) => ({
|
setState((prev) => ({
|
||||||
@@ -117,12 +117,12 @@ export function useDragToDesktop({
|
|||||||
isDownloading: false,
|
isDownloading: false,
|
||||||
progress: 0,
|
progress: 0,
|
||||||
}));
|
}));
|
||||||
}, 10000); // 10秒后清理
|
}, 10000); // Cleanup after 10 seconds
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
console.error("拖拽到桌面失败:", error);
|
console.error("Failed to drag to desktop:", error);
|
||||||
const errorMessage = error.message || "拖拽失败";
|
const errorMessage = error.message || "Drag failed";
|
||||||
|
|
||||||
setState((prev) => ({
|
setState((prev) => ({
|
||||||
...prev,
|
...prev,
|
||||||
@@ -133,7 +133,7 @@ export function useDragToDesktop({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
if (enableToast) {
|
if (enableToast) {
|
||||||
toast.error(`拖拽失败: ${errorMessage}`);
|
toast.error(`Drag failed: ${errorMessage}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
onError?.(errorMessage);
|
onError?.(errorMessage);
|
||||||
@@ -143,13 +143,13 @@ export function useDragToDesktop({
|
|||||||
[sshSessionId, sshHost],
|
[sshSessionId, sshHost],
|
||||||
);
|
);
|
||||||
|
|
||||||
// 拖拽多个文件到桌面(批量操作)
|
// Drag multiple files to desktop (batch operation)
|
||||||
const dragFilesToDesktop = useCallback(
|
const dragFilesToDesktop = useCallback(
|
||||||
async (files: FileItem[], options: DragToDesktopOptions = {}) => {
|
async (files: FileItem[], options: DragToDesktopOptions = {}) => {
|
||||||
const { enableToast = true, onSuccess, onError } = options;
|
const { enableToast = true, onSuccess, onError } = options;
|
||||||
|
|
||||||
if (!isElectron()) {
|
if (!isElectron()) {
|
||||||
const error = "拖拽到桌面功能仅在桌面应用中可用";
|
const error = "Drag to desktop feature is only available in desktop application";
|
||||||
if (enableToast) toast.error(error);
|
if (enableToast) toast.error(error);
|
||||||
onError?.(error);
|
onError?.(error);
|
||||||
return false;
|
return false;
|
||||||
@@ -157,7 +157,7 @@ export function useDragToDesktop({
|
|||||||
|
|
||||||
const fileList = files.filter((f) => f.type === "file");
|
const fileList = files.filter((f) => f.type === "file");
|
||||||
if (fileList.length === 0) {
|
if (fileList.length === 0) {
|
||||||
const error = "没有可拖拽的文件";
|
const error = "No files available for dragging";
|
||||||
if (enableToast) toast.error(error);
|
if (enableToast) toast.error(error);
|
||||||
onError?.(error);
|
onError?.(error);
|
||||||
return false;
|
return false;
|
||||||
@@ -175,7 +175,7 @@ export function useDragToDesktop({
|
|||||||
error: null,
|
error: null,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// 批量下载文件
|
// Batch download files
|
||||||
const downloadPromises = fileList.map((file) =>
|
const downloadPromises = fileList.map((file) =>
|
||||||
downloadSSHFile(sshSessionId, file.path),
|
downloadSSHFile(sshSessionId, file.path),
|
||||||
);
|
);
|
||||||
@@ -183,7 +183,7 @@ export function useDragToDesktop({
|
|||||||
const responses = await Promise.all(downloadPromises);
|
const responses = await Promise.all(downloadPromises);
|
||||||
setState((prev) => ({ ...prev, progress: 40 }));
|
setState((prev) => ({ ...prev, progress: 40 }));
|
||||||
|
|
||||||
// 创建临时文件夹结构
|
// Create temporary folder structure
|
||||||
const folderName = `Files_${Date.now()}`;
|
const folderName = `Files_${Date.now()}`;
|
||||||
const filesData = fileList.map((file, index) => ({
|
const filesData = fileList.map((file, index) => ({
|
||||||
relativePath: file.name,
|
relativePath: file.name,
|
||||||
@@ -197,30 +197,30 @@ export function useDragToDesktop({
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (!tempResult.success) {
|
if (!tempResult.success) {
|
||||||
throw new Error(tempResult.error || "创建临时文件夹失败");
|
throw new Error(tempResult.error || "Failed to create temporary folder");
|
||||||
}
|
}
|
||||||
|
|
||||||
setState((prev) => ({ ...prev, progress: 80, isDragging: true }));
|
setState((prev) => ({ ...prev, progress: 80, isDragging: true }));
|
||||||
|
|
||||||
// 开始拖拽文件夹
|
// Start dragging folder
|
||||||
const dragResult = await window.electronAPI.startDragToDesktop({
|
const dragResult = await window.electronAPI.startDragToDesktop({
|
||||||
tempId: tempResult.tempId,
|
tempId: tempResult.tempId,
|
||||||
fileName: folderName,
|
fileName: folderName,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!dragResult.success) {
|
if (!dragResult.success) {
|
||||||
throw new Error(dragResult.error || "开始拖拽失败");
|
throw new Error(dragResult.error || "Failed to start dragging");
|
||||||
}
|
}
|
||||||
|
|
||||||
setState((prev) => ({ ...prev, progress: 100 }));
|
setState((prev) => ({ ...prev, progress: 100 }));
|
||||||
|
|
||||||
if (enableToast) {
|
if (enableToast) {
|
||||||
toast.success(`正在拖拽 ${fileList.length} 个文件到桌面`);
|
toast.success(`Dragging ${fileList.length} files to desktop`);
|
||||||
}
|
}
|
||||||
|
|
||||||
onSuccess?.();
|
onSuccess?.();
|
||||||
|
|
||||||
// 延迟清理临时文件夹
|
// Delayed cleanup of temporary folder
|
||||||
setTimeout(async () => {
|
setTimeout(async () => {
|
||||||
await window.electronAPI.cleanupTempFile(tempResult.tempId);
|
await window.electronAPI.cleanupTempFile(tempResult.tempId);
|
||||||
setState((prev) => ({
|
setState((prev) => ({
|
||||||
@@ -229,12 +229,12 @@ export function useDragToDesktop({
|
|||||||
isDownloading: false,
|
isDownloading: false,
|
||||||
progress: 0,
|
progress: 0,
|
||||||
}));
|
}));
|
||||||
}, 15000); // 15秒后清理
|
}, 15000); // Cleanup after 15 seconds
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
console.error("批量拖拽到桌面失败:", error);
|
console.error("Failed to batch drag to desktop:", error);
|
||||||
const errorMessage = error.message || "批量拖拽失败";
|
const errorMessage = error.message || "Batch drag failed";
|
||||||
|
|
||||||
setState((prev) => ({
|
setState((prev) => ({
|
||||||
...prev,
|
...prev,
|
||||||
@@ -245,7 +245,7 @@ export function useDragToDesktop({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
if (enableToast) {
|
if (enableToast) {
|
||||||
toast.error(`批量拖拽失败: ${errorMessage}`);
|
toast.error(`Batch drag failed: ${errorMessage}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
onError?.(errorMessage);
|
onError?.(errorMessage);
|
||||||
@@ -255,31 +255,31 @@ export function useDragToDesktop({
|
|||||||
[sshSessionId, sshHost, dragFileToDesktop],
|
[sshSessionId, sshHost, dragFileToDesktop],
|
||||||
);
|
);
|
||||||
|
|
||||||
// 拖拽文件夹到桌面
|
// Drag folder to desktop
|
||||||
const dragFolderToDesktop = useCallback(
|
const dragFolderToDesktop = useCallback(
|
||||||
async (folder: FileItem, options: DragToDesktopOptions = {}) => {
|
async (folder: FileItem, options: DragToDesktopOptions = {}) => {
|
||||||
const { enableToast = true, onSuccess, onError } = options;
|
const { enableToast = true, onSuccess, onError } = options;
|
||||||
|
|
||||||
if (!isElectron()) {
|
if (!isElectron()) {
|
||||||
const error = "拖拽到桌面功能仅在桌面应用中可用";
|
const error = "Drag to desktop feature is only available in desktop application";
|
||||||
if (enableToast) toast.error(error);
|
if (enableToast) toast.error(error);
|
||||||
onError?.(error);
|
onError?.(error);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (folder.type !== "directory") {
|
if (folder.type !== "directory") {
|
||||||
const error = "只能拖拽文件夹类型";
|
const error = "Only folder types can be dragged";
|
||||||
if (enableToast) toast.error(error);
|
if (enableToast) toast.error(error);
|
||||||
onError?.(error);
|
onError?.(error);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (enableToast) {
|
if (enableToast) {
|
||||||
toast.info("文件夹拖拽功能开发中...");
|
toast.info("Folder drag functionality is under development...");
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: 实现文件夹递归下载和拖拽
|
// TODO: Implement recursive folder download and drag
|
||||||
// 这需要额外的API来递归获取文件夹内容
|
// This requires additional API to recursively get folder contents
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ export function useDragToSystemDesktop({
|
|||||||
options: DragToSystemOptions;
|
options: DragToSystemOptions;
|
||||||
} | null>(null);
|
} | null>(null);
|
||||||
|
|
||||||
// 目录记忆功能
|
// Directory memory functionality
|
||||||
const getLastSaveDirectory = async () => {
|
const getLastSaveDirectory = async () => {
|
||||||
try {
|
try {
|
||||||
if ("indexedDB" in window) {
|
if ("indexedDB" in window) {
|
||||||
@@ -61,7 +61,7 @@ export function useDragToSystemDesktop({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log("无法获取上次保存目录:", error);
|
console.log("Unable to get last save directory:", error);
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
};
|
};
|
||||||
@@ -79,18 +79,18 @@ export function useDragToSystemDesktop({
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log("无法保存目录记录:", error);
|
console.log("Unable to save directory record:", error);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 检查File System Access API支持
|
// Check File System Access API support
|
||||||
const isFileSystemAPISupported = () => {
|
const isFileSystemAPISupported = () => {
|
||||||
return "showSaveFilePicker" in window;
|
return "showSaveFilePicker" in window;
|
||||||
};
|
};
|
||||||
|
|
||||||
// 检查拖拽是否离开窗口边界
|
// Check if drag has left window boundaries
|
||||||
const isDraggedOutsideWindow = (e: DragEvent) => {
|
const isDraggedOutsideWindow = (e: DragEvent) => {
|
||||||
const margin = 50; // 增加容差边距
|
const margin = 50; // Increase tolerance margin
|
||||||
return (
|
return (
|
||||||
e.clientX < margin ||
|
e.clientX < margin ||
|
||||||
e.clientX > window.innerWidth - margin ||
|
e.clientX > window.innerWidth - margin ||
|
||||||
@@ -99,14 +99,14 @@ export function useDragToSystemDesktop({
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
// 创建文件blob
|
// Create file blob
|
||||||
const createFileBlob = async (file: FileItem): Promise<Blob> => {
|
const createFileBlob = async (file: FileItem): Promise<Blob> => {
|
||||||
const response = await downloadSSHFile(sshSessionId, file.path);
|
const response = await downloadSSHFile(sshSessionId, file.path);
|
||||||
if (!response?.content) {
|
if (!response?.content) {
|
||||||
throw new Error(`无法获取文件 ${file.name} 的内容`);
|
throw new Error(`Unable to get content for file ${file.name}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// base64转换为blob
|
// Convert base64 to blob
|
||||||
const binaryString = atob(response.content);
|
const binaryString = atob(response.content);
|
||||||
const bytes = new Uint8Array(binaryString.length);
|
const bytes = new Uint8Array(binaryString.length);
|
||||||
for (let i = 0; i < binaryString.length; i++) {
|
for (let i = 0; i < binaryString.length; i++) {
|
||||||
@@ -116,9 +116,9 @@ export function useDragToSystemDesktop({
|
|||||||
return new Blob([bytes]);
|
return new Blob([bytes]);
|
||||||
};
|
};
|
||||||
|
|
||||||
// 创建ZIP文件(用于多文件下载)
|
// Create ZIP file (for multi-file download)
|
||||||
const createZipBlob = async (files: FileItem[]): Promise<Blob> => {
|
const createZipBlob = async (files: FileItem[]): Promise<Blob> => {
|
||||||
// 这里需要一个轻量级的zip库,先用简单方案
|
// A lightweight zip library is needed here, using simple approach for now
|
||||||
const JSZip = (await import("jszip")).default;
|
const JSZip = (await import("jszip")).default;
|
||||||
const zip = new JSZip();
|
const zip = new JSZip();
|
||||||
|
|
||||||
@@ -130,42 +130,8 @@ export function useDragToSystemDesktop({
|
|||||||
return await zip.generateAsync({ type: "blob" });
|
return await zip.generateAsync({ type: "blob" });
|
||||||
};
|
};
|
||||||
|
|
||||||
// 使用File System Access API保存文件
|
|
||||||
const saveFileWithSystemAPI = async (blob: Blob, suggestedName: string) => {
|
|
||||||
try {
|
|
||||||
// 获取上次保存的目录句柄
|
|
||||||
const lastDirHandle = await getLastSaveDirectory();
|
|
||||||
|
|
||||||
const fileHandle = await (window as any).showSaveFilePicker({
|
// Fallback solution: traditional download
|
||||||
suggestedName,
|
|
||||||
startIn: lastDirHandle || "desktop", // 优先使用上次目录,否则桌面
|
|
||||||
types: [
|
|
||||||
{
|
|
||||||
description: "文件",
|
|
||||||
accept: {
|
|
||||||
"*/*": [".txt", ".jpg", ".png", ".pdf", ".zip", ".tar", ".gz"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
// 保存当前目录句柄以便下次使用
|
|
||||||
await saveLastDirectory(fileHandle);
|
|
||||||
|
|
||||||
const writable = await fileHandle.createWritable();
|
|
||||||
await writable.write(blob);
|
|
||||||
await writable.close();
|
|
||||||
|
|
||||||
return true;
|
|
||||||
} catch (error: any) {
|
|
||||||
if (error.name === "AbortError") {
|
|
||||||
return false; // 用户取消
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// 降级方案:传统下载
|
|
||||||
const fallbackDownload = (blob: Blob, fileName: string) => {
|
const fallbackDownload = (blob: Blob, fileName: string) => {
|
||||||
const url = URL.createObjectURL(blob);
|
const url = URL.createObjectURL(blob);
|
||||||
const a = document.createElement("a");
|
const a = document.createElement("a");
|
||||||
@@ -177,22 +143,22 @@ export function useDragToSystemDesktop({
|
|||||||
URL.revokeObjectURL(url);
|
URL.revokeObjectURL(url);
|
||||||
};
|
};
|
||||||
|
|
||||||
// 处理拖拽到系统桌面
|
// Handle drag to system desktop
|
||||||
const handleDragToSystem = useCallback(
|
const handleDragToSystem = useCallback(
|
||||||
async (files: FileItem[], options: DragToSystemOptions = {}) => {
|
async (files: FileItem[], options: DragToSystemOptions = {}) => {
|
||||||
const { enableToast = true, onSuccess, onError } = options;
|
const { enableToast = true, onSuccess, onError } = options;
|
||||||
|
|
||||||
if (files.length === 0) {
|
if (files.length === 0) {
|
||||||
const error = "没有可拖拽的文件";
|
const error = "No files available for dragging";
|
||||||
if (enableToast) toast.error(error);
|
if (enableToast) toast.error(error);
|
||||||
onError?.(error);
|
onError?.(error);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 过滤出文件类型
|
// Filter out file types
|
||||||
const fileList = files.filter((f) => f.type === "file");
|
const fileList = files.filter((f) => f.type === "file");
|
||||||
if (fileList.length === 0) {
|
if (fileList.length === 0) {
|
||||||
const error = "只能拖拽文件到桌面";
|
const error = "Only files can be dragged to desktop";
|
||||||
if (enableToast) toast.error(error);
|
if (enableToast) toast.error(error);
|
||||||
onError?.(error);
|
onError?.(error);
|
||||||
return false;
|
return false;
|
||||||
@@ -206,28 +172,30 @@ export function useDragToSystemDesktop({
|
|||||||
error: null,
|
error: null,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let blob: Blob;
|
// Determine file name first (synchronously)
|
||||||
let fileName: string;
|
const fileName = fileList.length === 1
|
||||||
|
? fileList[0].name
|
||||||
|
: `files_${Date.now()}.zip`;
|
||||||
|
|
||||||
if (fileList.length === 1) {
|
// For File System Access API, get the file handle FIRST to preserve user gesture
|
||||||
// 单文件
|
let fileHandle: any = null;
|
||||||
blob = await createFileBlob(fileList[0]);
|
|
||||||
fileName = fileList[0].name;
|
|
||||||
setState((prev) => ({ ...prev, progress: 70 }));
|
|
||||||
} else {
|
|
||||||
// 多文件打包成ZIP
|
|
||||||
blob = await createZipBlob(fileList);
|
|
||||||
fileName = `files_${Date.now()}.zip`;
|
|
||||||
setState((prev) => ({ ...prev, progress: 70 }));
|
|
||||||
}
|
|
||||||
|
|
||||||
setState((prev) => ({ ...prev, progress: 90 }));
|
|
||||||
|
|
||||||
// 优先使用File System Access API
|
|
||||||
if (isFileSystemAPISupported()) {
|
if (isFileSystemAPISupported()) {
|
||||||
const saved = await saveFileWithSystemAPI(blob, fileName);
|
try {
|
||||||
if (!saved) {
|
fileHandle = await (window as any).showSaveFilePicker({
|
||||||
// 用户取消了
|
suggestedName: fileName,
|
||||||
|
startIn: "desktop",
|
||||||
|
types: [
|
||||||
|
{
|
||||||
|
description: "Files",
|
||||||
|
accept: {
|
||||||
|
"*/*": [".txt", ".jpg", ".png", ".pdf", ".zip", ".tar", ".gz"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error.name === "AbortError") {
|
||||||
|
// User cancelled
|
||||||
setState((prev) => ({
|
setState((prev) => ({
|
||||||
...prev,
|
...prev,
|
||||||
isDownloading: false,
|
isDownloading: false,
|
||||||
@@ -235,11 +203,36 @@ export function useDragToSystemDesktop({
|
|||||||
}));
|
}));
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now create the blob (after getting file handle)
|
||||||
|
let blob: Blob;
|
||||||
|
if (fileList.length === 1) {
|
||||||
|
// Single file
|
||||||
|
blob = await createFileBlob(fileList[0]);
|
||||||
|
setState((prev) => ({ ...prev, progress: 70 }));
|
||||||
} else {
|
} else {
|
||||||
// 降级到传统下载
|
// Package multiple files into ZIP
|
||||||
|
blob = await createZipBlob(fileList);
|
||||||
|
setState((prev) => ({ ...prev, progress: 70 }));
|
||||||
|
}
|
||||||
|
|
||||||
|
setState((prev) => ({ ...prev, progress: 90 }));
|
||||||
|
|
||||||
|
// Save the file
|
||||||
|
if (fileHandle) {
|
||||||
|
// Use File System Access API with pre-obtained handle
|
||||||
|
await saveLastDirectory(fileHandle);
|
||||||
|
const writable = await fileHandle.createWritable();
|
||||||
|
await writable.write(blob);
|
||||||
|
await writable.close();
|
||||||
|
} else {
|
||||||
|
// Fallback to traditional download
|
||||||
fallbackDownload(blob, fileName);
|
fallbackDownload(blob, fileName);
|
||||||
if (enableToast) {
|
if (enableToast) {
|
||||||
toast.info("由于浏览器限制,文件将下载到默认下载目录");
|
toast.info("Due to browser limitations, file will be downloaded to default download directory");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -248,22 +241,22 @@ export function useDragToSystemDesktop({
|
|||||||
if (enableToast) {
|
if (enableToast) {
|
||||||
toast.success(
|
toast.success(
|
||||||
fileList.length === 1
|
fileList.length === 1
|
||||||
? `${fileName} 已保存到指定位置`
|
? `${fileName} saved to specified location`
|
||||||
: `${fileList.length} 个文件已打包保存`,
|
: `${fileList.length} files packaged and saved`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
onSuccess?.();
|
onSuccess?.();
|
||||||
|
|
||||||
// 重置状态
|
// Reset state
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
setState((prev) => ({ ...prev, isDownloading: false, progress: 0 }));
|
setState((prev) => ({ ...prev, isDownloading: false, progress: 0 }));
|
||||||
}, 1000);
|
}, 1000);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
console.error("拖拽到桌面失败:", error);
|
console.error("Failed to drag to desktop:", error);
|
||||||
const errorMessage = error.message || "保存失败";
|
const errorMessage = error.message || "Save failed";
|
||||||
|
|
||||||
setState((prev) => ({
|
setState((prev) => ({
|
||||||
...prev,
|
...prev,
|
||||||
@@ -273,7 +266,7 @@ export function useDragToSystemDesktop({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
if (enableToast) {
|
if (enableToast) {
|
||||||
toast.error(`保存失败: ${errorMessage}`);
|
toast.error(`Save failed: ${errorMessage}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
onError?.(errorMessage);
|
onError?.(errorMessage);
|
||||||
@@ -283,7 +276,7 @@ export function useDragToSystemDesktop({
|
|||||||
[sshSessionId],
|
[sshSessionId],
|
||||||
);
|
);
|
||||||
|
|
||||||
// 开始拖拽(记录拖拽数据)
|
// Start dragging (record drag data)
|
||||||
const startDragToSystem = useCallback(
|
const startDragToSystem = useCallback(
|
||||||
(files: FileItem[], options: DragToSystemOptions = {}) => {
|
(files: FileItem[], options: DragToSystemOptions = {}) => {
|
||||||
dragDataRef.current = { files, options };
|
dragDataRef.current = { files, options };
|
||||||
@@ -292,29 +285,27 @@ export function useDragToSystemDesktop({
|
|||||||
[],
|
[],
|
||||||
);
|
);
|
||||||
|
|
||||||
// 结束拖拽检测
|
// End drag detection
|
||||||
const handleDragEnd = useCallback(
|
const handleDragEnd = useCallback(
|
||||||
(e: DragEvent) => {
|
(e: DragEvent) => {
|
||||||
if (!dragDataRef.current) return;
|
if (!dragDataRef.current) return;
|
||||||
|
|
||||||
const { files, options } = dragDataRef.current;
|
const { files, options } = dragDataRef.current;
|
||||||
|
|
||||||
// 检查是否拖拽到窗口外
|
// Check if dragged outside window
|
||||||
if (isDraggedOutsideWindow(e)) {
|
if (isDraggedOutsideWindow(e)) {
|
||||||
// 延迟执行,避免与其他拖拽事件冲突
|
// Execute immediately to preserve user gesture context for showSaveFilePicker
|
||||||
setTimeout(() => {
|
|
||||||
handleDragToSystem(files, options);
|
handleDragToSystem(files, options);
|
||||||
}, 100);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 清理拖拽状态
|
// Clean up drag state
|
||||||
dragDataRef.current = null;
|
dragDataRef.current = null;
|
||||||
setState((prev) => ({ ...prev, isDragging: false }));
|
setState((prev) => ({ ...prev, isDragging: false }));
|
||||||
},
|
},
|
||||||
[handleDragToSystem],
|
[handleDragToSystem],
|
||||||
);
|
);
|
||||||
|
|
||||||
// 取消拖拽
|
// Cancel dragging
|
||||||
const cancelDragToSystem = useCallback(() => {
|
const cancelDragToSystem = useCallback(() => {
|
||||||
dragDataRef.current = null;
|
dragDataRef.current = null;
|
||||||
setState((prev) => ({ ...prev, isDragging: false, error: null }));
|
setState((prev) => ({ ...prev, isDragging: false, error: null }));
|
||||||
@@ -326,6 +317,6 @@ export function useDragToSystemDesktop({
|
|||||||
startDragToSystem,
|
startDragToSystem,
|
||||||
handleDragEnd,
|
handleDragEnd,
|
||||||
cancelDragToSystem,
|
cancelDragToSystem,
|
||||||
handleDragToSystem, // 直接调用版本
|
handleDragToSystem, // Direct call version
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -123,8 +123,10 @@ export function getCookie(name: string): string | undefined {
|
|||||||
} else {
|
} else {
|
||||||
const value = `; ${document.cookie}`;
|
const value = `; ${document.cookie}`;
|
||||||
const parts = value.split(`; ${name}=`);
|
const parts = value.split(`; ${name}=`);
|
||||||
const token =
|
const encodedToken =
|
||||||
parts.length === 2 ? parts.pop()?.split(";").shift() : undefined;
|
parts.length === 2 ? parts.pop()?.split(";").shift() : undefined;
|
||||||
|
// Decode the token since setCookie uses encodeURIComponent
|
||||||
|
const token = encodedToken ? decodeURIComponent(encodedToken) : undefined;
|
||||||
return token;
|
return token;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -278,6 +280,27 @@ function createApiInstance(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handle DEK (Data Encryption Key) invalidation
|
||||||
|
if (status === 423) {
|
||||||
|
const errorData = error.response?.data;
|
||||||
|
if (errorData?.error === "DATA_LOCKED" || errorData?.message?.includes("DATA_LOCKED")) {
|
||||||
|
// DEK session has expired (likely due to server restart or timeout)
|
||||||
|
// Force logout to require re-authentication and DEK unlock
|
||||||
|
if (isElectron()) {
|
||||||
|
localStorage.removeItem("jwt");
|
||||||
|
} else {
|
||||||
|
document.cookie =
|
||||||
|
"jwt=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/;";
|
||||||
|
localStorage.removeItem("jwt");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trigger a page reload to redirect to login
|
||||||
|
if (typeof window !== "undefined") {
|
||||||
|
setTimeout(() => window.location.reload(), 100);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return Promise.reject(error);
|
return Promise.reject(error);
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
@@ -376,7 +399,10 @@ if (isElectron()) {
|
|||||||
|
|
||||||
function getApiUrl(path: string, defaultPort: number): string {
|
function getApiUrl(path: string, defaultPort: number): string {
|
||||||
if (isDev()) {
|
if (isDev()) {
|
||||||
return `http://${apiHost}:${defaultPort}${path}`;
|
// Auto-detect HTTPS in development
|
||||||
|
const protocol = window.location.protocol === "https:" ? "https" : "http";
|
||||||
|
const sslPort = protocol === "https" ? 8443 : defaultPort;
|
||||||
|
return `${protocol}://${apiHost}:${sslPort}${path}`;
|
||||||
} else if (isElectron()) {
|
} else if (isElectron()) {
|
||||||
if (configuredServerUrl) {
|
if (configuredServerUrl) {
|
||||||
const baseUrl = configuredServerUrl.replace(/\/$/, "");
|
const baseUrl = configuredServerUrl.replace(/\/$/, "");
|
||||||
@@ -737,6 +763,48 @@ export async function getSSHHostById(hostId: number): Promise<SSHHost> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// SSH AUTOSTART MANAGEMENT
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
export async function enableAutoStart(sshConfigId: number): Promise<any> {
|
||||||
|
try {
|
||||||
|
const response = await sshHostApi.post("/autostart/enable", { sshConfigId });
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
handleApiError(error, "enable autostart");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function disableAutoStart(sshConfigId: number): Promise<any> {
|
||||||
|
try {
|
||||||
|
const response = await sshHostApi.delete("/autostart/disable", {
|
||||||
|
data: { sshConfigId }
|
||||||
|
});
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
handleApiError(error, "disable autostart");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getAutoStartStatus(): Promise<{
|
||||||
|
autostart_configs: Array<{
|
||||||
|
sshConfigId: number;
|
||||||
|
host: string;
|
||||||
|
port: number;
|
||||||
|
username: string;
|
||||||
|
authType: string;
|
||||||
|
}>;
|
||||||
|
total_count: number;
|
||||||
|
}> {
|
||||||
|
try {
|
||||||
|
const response = await sshHostApi.get("/autostart/status");
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
handleApiError(error, "fetch autostart status");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// TUNNEL MANAGEMENT
|
// TUNNEL MANAGEMENT
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -955,6 +1023,17 @@ export async function getSSHStatus(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function keepSSHAlive(sessionId: string): Promise<any> {
|
||||||
|
try {
|
||||||
|
const response = await fileManagerApi.post("/ssh/keepalive", {
|
||||||
|
sessionId,
|
||||||
|
});
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
handleApiError(error, "SSH keepalive");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function listSSHFiles(
|
export async function listSSHFiles(
|
||||||
sessionId: string,
|
sessionId: string,
|
||||||
path: string,
|
path: string,
|
||||||
@@ -966,7 +1045,7 @@ export async function listSSHFiles(
|
|||||||
return response.data || { files: [], path };
|
return response.data || { files: [], path };
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleApiError(error, "list SSH files");
|
handleApiError(error, "list SSH files");
|
||||||
return { files: [], path }; // 确保总是返回正确格式
|
return { files: [], path }; // Ensure always return correct format
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -993,7 +1072,14 @@ export async function readSSHFile(
|
|||||||
params: { sessionId, path },
|
params: { sessionId, path },
|
||||||
});
|
});
|
||||||
return response.data;
|
return response.data;
|
||||||
} catch (error) {
|
} catch (error: any) {
|
||||||
|
// Preserve fileNotFound information for 404 errors
|
||||||
|
if (error.response?.status === 404) {
|
||||||
|
const customError = new Error("File not found");
|
||||||
|
(customError as any).response = error.response;
|
||||||
|
(customError as any).isFileNotFound = error.response.data?.fileNotFound || true;
|
||||||
|
throw customError;
|
||||||
|
}
|
||||||
handleApiError(error, "read SSH file");
|
handleApiError(error, "read SSH file");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1155,7 +1241,7 @@ export async function copySSHItem(
|
|||||||
userId,
|
userId,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
timeout: 60000, // 60秒超时,因为文件复制可能需要更长时间
|
timeout: 60000, // 60 second timeout as file copying may take longer
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
return response.data;
|
return response.data;
|
||||||
@@ -1201,6 +1287,8 @@ export async function moveSSHItem(
|
|||||||
newPath,
|
newPath,
|
||||||
hostId,
|
hostId,
|
||||||
userId,
|
userId,
|
||||||
|
}, {
|
||||||
|
timeout: 60000, // 60 second timeout for move operations
|
||||||
});
|
});
|
||||||
return response.data;
|
return response.data;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -1446,6 +1534,15 @@ export async function getOIDCConfig(): Promise<any> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function getSetupRequired(): Promise<{ setup_required: boolean }> {
|
||||||
|
try {
|
||||||
|
const response = await authApi.get("/users/setup-required");
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
handleApiError(error, "check setup status");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function getUserCount(): Promise<UserCount> {
|
export async function getUserCount(): Promise<UserCount> {
|
||||||
try {
|
try {
|
||||||
const response = await authApi.get("/users/count");
|
const response = await authApi.get("/users/count");
|
||||||
|
|||||||
@@ -1,294 +0,0 @@
|
|||||||
<TabsContent value="key">
|
|
||||||
<div className="space-y-6">
|
|
||||||
{/* Private Key Section */}
|
|
||||||
<div className="space-y-4">
|
|
||||||
<FormLabel className="text-sm font-medium">
|
|
||||||
{t("credentials.sshPrivateKey")}
|
|
||||||
</FormLabel>
|
|
||||||
|
|
||||||
<div className="grid grid-cols-2 gap-4">
|
|
||||||
{/* File Upload */}
|
|
||||||
<Controller
|
|
||||||
control={form.control}
|
|
||||||
name="key"
|
|
||||||
render={({ field }) => (
|
|
||||||
<FormItem className="flex flex-col">
|
|
||||||
<FormLabel className="text-xs text-muted-foreground">
|
|
||||||
{t("hosts.uploadFile")}
|
|
||||||
</FormLabel>
|
|
||||||
<FormControl>
|
|
||||||
<div className="relative inline-block w-full">
|
|
||||||
<input
|
|
||||||
id="key-upload"
|
|
||||||
type="file"
|
|
||||||
accept="*,.pem,.key,.txt,.ppk"
|
|
||||||
onChange={async (e) => {
|
|
||||||
const file = e.target.files?.[0];
|
|
||||||
if (file) {
|
|
||||||
field.onChange(file);
|
|
||||||
try {
|
|
||||||
const fileContent = await file.text();
|
|
||||||
debouncedKeyDetection(
|
|
||||||
fileContent,
|
|
||||||
form.watch("keyPassword"),
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Failed to read uploaded file:", error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
className="absolute inset-0 w-full h-full opacity-0 cursor-pointer"
|
|
||||||
/>
|
|
||||||
<Button
|
|
||||||
type="button"
|
|
||||||
variant="outline"
|
|
||||||
className="w-full justify-start text-left"
|
|
||||||
>
|
|
||||||
<span className="truncate">
|
|
||||||
{field.value instanceof File
|
|
||||||
? field.value.name
|
|
||||||
: t("credentials.upload")}
|
|
||||||
</span>
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
</FormControl>
|
|
||||||
</FormItem>
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
|
|
||||||
{/* Text Input */}
|
|
||||||
<Controller
|
|
||||||
control={form.control}
|
|
||||||
name="key"
|
|
||||||
render={({ field }) => (
|
|
||||||
<FormItem className="flex flex-col">
|
|
||||||
<FormLabel className="text-xs text-muted-foreground">
|
|
||||||
{t("hosts.pasteKey")}
|
|
||||||
</FormLabel>
|
|
||||||
<FormControl>
|
|
||||||
<textarea
|
|
||||||
placeholder={t("placeholders.pastePrivateKey")}
|
|
||||||
className="flex min-h-[120px] w-full rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50"
|
|
||||||
value={typeof field.value === "string" ? field.value : ""}
|
|
||||||
onChange={(e) => {
|
|
||||||
field.onChange(e.target.value);
|
|
||||||
debouncedKeyDetection(
|
|
||||||
e.target.value,
|
|
||||||
form.watch("keyPassword"),
|
|
||||||
);
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</FormControl>
|
|
||||||
</FormItem>
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Key type detection display */}
|
|
||||||
{detectedKeyType && (
|
|
||||||
<div className="text-sm">
|
|
||||||
<span className="text-muted-foreground">
|
|
||||||
{t("credentials.detectedKeyType")}:{" "}
|
|
||||||
</span>
|
|
||||||
<span
|
|
||||||
className={`font-medium ${
|
|
||||||
detectedKeyType === "invalid" || detectedKeyType === "error"
|
|
||||||
? "text-destructive"
|
|
||||||
: "text-green-600"
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
{getFriendlyKeyTypeName(detectedKeyType)}
|
|
||||||
</span>
|
|
||||||
{keyDetectionLoading && (
|
|
||||||
<span className="ml-2 text-muted-foreground">
|
|
||||||
({t("credentials.detecting")}...)
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Show existing private key for editing */}
|
|
||||||
{editingCredential && fullCredentialDetails?.key && (
|
|
||||||
<FormItem>
|
|
||||||
<FormLabel>
|
|
||||||
{t("credentials.sshPrivateKey")} ({t("hosts.existingKey")})
|
|
||||||
</FormLabel>
|
|
||||||
<FormControl>
|
|
||||||
<textarea
|
|
||||||
readOnly
|
|
||||||
className="flex min-h-[120px] w-full rounded-md border border-input bg-muted px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50"
|
|
||||||
value={fullCredentialDetails.key}
|
|
||||||
/>
|
|
||||||
</FormControl>
|
|
||||||
<div className="text-xs text-muted-foreground mt-1">
|
|
||||||
{t("credentials.currentKeyContent")}
|
|
||||||
</div>
|
|
||||||
{fullCredentialDetails?.detectedKeyType && (
|
|
||||||
<div className="text-sm mt-2">
|
|
||||||
<span className="text-muted-foreground">Key type: </span>
|
|
||||||
<span className="font-medium text-green-600">
|
|
||||||
{getFriendlyKeyTypeName(fullCredentialDetails.detectedKeyType)}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</FormItem>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Public Key Section */}
|
|
||||||
<div className="space-y-4">
|
|
||||||
<FormLabel className="text-sm font-medium">
|
|
||||||
{t("credentials.sshPublicKey")} ({t("credentials.optional")})
|
|
||||||
</FormLabel>
|
|
||||||
|
|
||||||
<div className="grid grid-cols-2 gap-4">
|
|
||||||
{/* File Upload */}
|
|
||||||
<Controller
|
|
||||||
control={form.control}
|
|
||||||
name="publicKey"
|
|
||||||
render={({ field }) => (
|
|
||||||
<FormItem className="flex flex-col">
|
|
||||||
<FormLabel className="text-xs text-muted-foreground">
|
|
||||||
{t("hosts.uploadFile")}
|
|
||||||
</FormLabel>
|
|
||||||
<FormControl>
|
|
||||||
<div className="relative inline-block w-full">
|
|
||||||
<input
|
|
||||||
id="public-key-upload"
|
|
||||||
type="file"
|
|
||||||
accept="*,.pub,.txt"
|
|
||||||
onChange={async (e) => {
|
|
||||||
const file = e.target.files?.[0];
|
|
||||||
if (file) {
|
|
||||||
try {
|
|
||||||
const fileContent = await file.text();
|
|
||||||
field.onChange(fileContent);
|
|
||||||
debouncedPublicKeyDetection(fileContent);
|
|
||||||
} catch (error) {
|
|
||||||
console.error(
|
|
||||||
"Failed to read uploaded public key file:",
|
|
||||||
error,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
className="absolute inset-0 w-full h-full opacity-0 cursor-pointer"
|
|
||||||
/>
|
|
||||||
<Button
|
|
||||||
type="button"
|
|
||||||
variant="outline"
|
|
||||||
className="w-full justify-start text-left"
|
|
||||||
>
|
|
||||||
<span className="truncate">
|
|
||||||
{field.value
|
|
||||||
? t("credentials.publicKeyUploaded")
|
|
||||||
: t("credentials.uploadPublicKey")}
|
|
||||||
</span>
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
</FormControl>
|
|
||||||
</FormItem>
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
|
|
||||||
{/* Text Input */}
|
|
||||||
<Controller
|
|
||||||
control={form.control}
|
|
||||||
name="publicKey"
|
|
||||||
render={({ field }) => (
|
|
||||||
<FormItem className="flex flex-col">
|
|
||||||
<FormLabel className="text-xs text-muted-foreground">
|
|
||||||
{t("hosts.pasteKey")}
|
|
||||||
</FormLabel>
|
|
||||||
<FormControl>
|
|
||||||
<textarea
|
|
||||||
placeholder={t("placeholders.pastePublicKey")}
|
|
||||||
className="flex min-h-[80px] w-full rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50"
|
|
||||||
value={field.value || ""}
|
|
||||||
onChange={(e) => {
|
|
||||||
field.onChange(e.target.value);
|
|
||||||
debouncedPublicKeyDetection(e.target.value);
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</FormControl>
|
|
||||||
</FormItem>
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Public key type detection */}
|
|
||||||
{detectedPublicKeyType && form.watch("publicKey") && (
|
|
||||||
<div className="text-sm">
|
|
||||||
<span className="text-muted-foreground">
|
|
||||||
{t("credentials.detectedKeyType")}:{" "}
|
|
||||||
</span>
|
|
||||||
<span
|
|
||||||
className={`font-medium ${
|
|
||||||
detectedPublicKeyType === "invalid" ||
|
|
||||||
detectedPublicKeyType === "error"
|
|
||||||
? "text-destructive"
|
|
||||||
: "text-green-600"
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
{getFriendlyKeyTypeName(detectedPublicKeyType)}
|
|
||||||
</span>
|
|
||||||
{publicKeyDetectionLoading && (
|
|
||||||
<span className="ml-2 text-muted-foreground">
|
|
||||||
({t("credentials.detecting")}...)
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="text-xs text-muted-foreground">
|
|
||||||
{t("credentials.publicKeyNote")}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Show existing public key for editing */}
|
|
||||||
{editingCredential && fullCredentialDetails?.publicKey && (
|
|
||||||
<FormItem>
|
|
||||||
<FormLabel>
|
|
||||||
{t("credentials.sshPublicKey")} ({t("hosts.existingKey")})
|
|
||||||
</FormLabel>
|
|
||||||
<FormControl>
|
|
||||||
<textarea
|
|
||||||
readOnly
|
|
||||||
className="flex min-h-[80px] w-full rounded-md border border-input bg-muted px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50"
|
|
||||||
value={fullCredentialDetails.publicKey}
|
|
||||||
/>
|
|
||||||
</FormControl>
|
|
||||||
<div className="text-xs text-muted-foreground mt-1">
|
|
||||||
{t("credentials.currentPublicKeyContent")}
|
|
||||||
</div>
|
|
||||||
</FormItem>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Generate Public Key Button */}
|
|
||||||
{form.watch("key") && (
|
|
||||||
<div className="mt-4">
|
|
||||||
<Button
|
|
||||||
type="button"
|
|
||||||
variant="outline"
|
|
||||||
size="sm"
|
|
||||||
onClick={handleGeneratePublicKey}
|
|
||||||
disabled={generatePublicKeyLoading}
|
|
||||||
className="w-full"
|
|
||||||
>
|
|
||||||
{generatePublicKeyLoading ? (
|
|
||||||
<>
|
|
||||||
<span className="mr-2">{t("credentials.generating")}...</span>
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
<span>{t("credentials.generatePublicKey")}</span>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</Button>
|
|
||||||
<p className="text-xs text-muted-foreground mt-2 text-center">
|
|
||||||
{t("credentials.generatePublicKeyNote")}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</TabsContent>;
|
|
||||||
@@ -1,8 +1,17 @@
|
|||||||
import path from "path";
|
import path from "path";
|
||||||
|
import fs from "fs";
|
||||||
import tailwindcss from "@tailwindcss/vite";
|
import tailwindcss from "@tailwindcss/vite";
|
||||||
import { defineConfig } from "vite";
|
import { defineConfig } from "vite";
|
||||||
import react from "@vitejs/plugin-react-swc";
|
import react from "@vitejs/plugin-react-swc";
|
||||||
|
|
||||||
|
// SSL certificate paths
|
||||||
|
const sslCertPath = path.join(process.cwd(), "ssl/termix.crt");
|
||||||
|
const sslKeyPath = path.join(process.cwd(), "ssl/termix.key");
|
||||||
|
|
||||||
|
// Check if SSL certificates exist and HTTPS is requested
|
||||||
|
const hasSSL = fs.existsSync(sslCertPath) && fs.existsSync(sslKeyPath);
|
||||||
|
const useHTTPS = process.env.VITE_HTTPS === "true" && hasSSL;
|
||||||
|
|
||||||
// https://vite.dev/config/
|
// https://vite.dev/config/
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
plugins: [react(), tailwindcss()],
|
plugins: [react(), tailwindcss()],
|
||||||
@@ -12,4 +21,12 @@ export default defineConfig({
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
base: "./",
|
base: "./",
|
||||||
|
server: {
|
||||||
|
https: useHTTPS ? {
|
||||||
|
cert: fs.readFileSync(sslCertPath),
|
||||||
|
key: fs.readFileSync(sslKeyPath),
|
||||||
|
} : false,
|
||||||
|
port: 5173,
|
||||||
|
host: "localhost",
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|||||||
Ignoring Docker-related files like
Dockerfile*,docker-compose*.yml, and.dockerignoreitself is a crucial best practice to prevent issues with recursive builds or including unnecessary context in the Docker build. This is well done and helps maintain a clean and efficient build process.