diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index f8b484b2..eccbe4d9 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -42,22 +42,18 @@ jobs: ALL_TAGS=() if [ "$BUILD_TYPE" = "Production" ]; then - # Production build → push release + latest to both GHCR and Docker Hub TAGS+=("release-$VERSION" "latest") for tag in "${TAGS[@]}"; do ALL_TAGS+=("ghcr.io/lukegus/termix:$tag") ALL_TAGS+=("docker.io/bugattiguy527/termix:$tag") done else - # Dev build → push only dev-x.x.x to GHCR TAGS+=("dev-$VERSION") for tag in "${TAGS[@]}"; do ALL_TAGS+=("ghcr.io/lukegus/termix:$tag") done fi - echo "ALL_TAGS=${ALL_TAGS[*]}" >> $GITHUB_ENV - echo "All tags to build:" printf '%s\n' "${ALL_TAGS[@]}" - name: Login to GHCR diff --git a/.github/workflows/electron.yml b/.github/workflows/electron.yml index 73ce4ec5..46892be0 100644 --- a/.github/workflows/electron.yml +++ b/.github/workflows/electron.yml @@ -43,7 +43,6 @@ jobs: - name: Install dependencies run: | - # Retry npm ci up to 3 times on failure $maxAttempts = 3 $attempt = 1 while ($attempt -le $maxAttempts) { @@ -55,7 +54,6 @@ jobs: Write-Error "npm ci failed after $maxAttempts attempts" exit 1 } - Write-Host "npm ci attempt $attempt failed, retrying in 10 seconds..." Start-Sleep -Seconds 10 $attempt++ } @@ -66,14 +64,12 @@ jobs: run: | $VERSION = (Get-Content package.json | ConvertFrom-Json).version echo "version=$VERSION" >> $env:GITHUB_OUTPUT - echo "Building version: $VERSION" - name: Build Windows (All Architectures) run: npm run build && npx electron-builder --win --x64 --ia32 - name: List release files run: | - echo "Contents of release directory:" dir release - name: Upload Windows x64 NSIS Installer @@ -154,16 +150,14 @@ jobs: - name: Install dependencies run: | - # Retry npm ci up to 3 times on failure - for i in 1 2 3; do + for i in 1 2 3; + do if npm ci; then break else if [ $i -eq 3 ]; then - echo "npm ci failed after 3 attempts" exit 1 fi - echo "npm ci attempt $i failed, retrying in 10 seconds..." sleep 10 fi done @@ -179,41 +173,30 @@ jobs: VERSION=$(node -p "require('./package.json').version") cd release - # Rename x64 AppImage to use 'x64' if [ -f "termix_linux_x86_64_${VERSION}_appimage.AppImage" ]; then mv "termix_linux_x86_64_${VERSION}_appimage.AppImage" "termix_linux_x64_${VERSION}_appimage.AppImage" - echo "Renamed x64 AppImage to use 'x64' arch" fi - # Rename x64 deb to use 'x64' if [ -f "termix_linux_amd64_${VERSION}_deb.deb" ]; then mv "termix_linux_amd64_${VERSION}_deb.deb" "termix_linux_x64_${VERSION}_deb.deb" - echo "Renamed x64 deb to use 'x64' arch" fi - # Rename x64 tar.gz if it exists if [ -f "termix-${VERSION}.tar.gz" ]; then mv "termix-${VERSION}.tar.gz" "termix_linux_x64_${VERSION}_portable.tar.gz" - echo "Renamed x64 tar.gz" fi - # Rename arm64 tar.gz if it exists if [ -f "termix-${VERSION}-arm64.tar.gz" ]; then mv "termix-${VERSION}-arm64.tar.gz" "termix_linux_arm64_${VERSION}_portable.tar.gz" - echo "Renamed arm64 tar.gz" fi - # Rename armv7l tar.gz if it exists if [ -f "termix-${VERSION}-armv7l.tar.gz" ]; then mv "termix-${VERSION}-armv7l.tar.gz" "termix_linux_armv7l_${VERSION}_portable.tar.gz" - echo "Renamed armv7l tar.gz" fi cd .. - name: List release files run: | - echo "Contents of release directory:" ls -la release/ - name: Upload Linux x64 AppImage @@ -299,16 +282,14 @@ jobs: - name: Install dependencies run: | - # Retry npm ci up to 3 times on failure - for i in 1 2 3; do + for i in 1 2 3; + do if npm ci; then break else if [ $i -eq 3 ]; then - echo "npm ci failed after 3 attempts" exit 1 fi - echo "npm ci attempt $i failed, retrying in 10 seconds..." sleep 10 fi done @@ -320,9 +301,6 @@ jobs: run: | if [ -n "${{ secrets.MAC_BUILD_CERTIFICATE_BASE64 }}" ] && [ -n "${{ secrets.MAC_P12_PASSWORD }}" ]; then echo "has_certs=true" >> $GITHUB_OUTPUT - else - echo "has_certs=false" >> $GITHUB_OUTPUT - echo "⚠️ Code signing certificates not configured. MAS build will be unsigned." fi - name: Import Code Signing Certificates @@ -337,36 +315,26 @@ jobs: INSTALLER_CERT_PATH=$RUNNER_TEMP/installer_certificate.p12 KEYCHAIN_PATH=$RUNNER_TEMP/app-signing.keychain-db - # Decode certificates echo -n "$MAC_BUILD_CERTIFICATE_BASE64" | base64 --decode -o $APP_CERT_PATH if [ -n "$MAC_INSTALLER_CERTIFICATE_BASE64" ]; then - echo "Decoding installer certificate..." echo -n "$MAC_INSTALLER_CERTIFICATE_BASE64" | base64 --decode -o $INSTALLER_CERT_PATH else - echo "⚠️ MAC_INSTALLER_CERTIFICATE_BASE64 is empty" fi - # Create and configure keychain security create-keychain -p "$MAC_KEYCHAIN_PASSWORD" $KEYCHAIN_PATH security set-keychain-settings -lut 21600 $KEYCHAIN_PATH security unlock-keychain -p "$MAC_KEYCHAIN_PASSWORD" $KEYCHAIN_PATH - # Import application certificate - echo "Importing application certificate..." security import $APP_CERT_PATH -P "$MAC_P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH - # Import installer certificate if it exists if [ -f "$INSTALLER_CERT_PATH" ]; then - echo "Importing installer certificate..." security import $INSTALLER_CERT_PATH -P "$MAC_P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH else - echo "⚠️ Installer certificate file not found, skipping import" fi security list-keychain -d user -s $KEYCHAIN_PATH - echo "Imported certificates:" security find-identity -v -p codesigning $KEYCHAIN_PATH - name: Build macOS App Store Package @@ -374,32 +342,21 @@ jobs: env: ELECTRON_BUILDER_ALLOW_UNRESOLVED_DEPENDENCIES: true run: | - # Get current version for display CURRENT_VERSION=$(node -p "require('./package.json').version") BUILD_VERSION="${{ github.run_number }}" - echo "✅ Package version: $CURRENT_VERSION (unchanged)" - echo "✅ Build number for Apple: $BUILD_VERSION" - - # Build MAS with custom buildVersion npm run build && npx electron-builder --mac mas --universal --config.buildVersion="$BUILD_VERSION" - name: Clean up MAS keychain before DMG build if: steps.check_certs.outputs.has_certs == 'true' run: | security delete-keychain $RUNNER_TEMP/app-signing.keychain-db || true - echo "Cleaned up MAS keychain" - name: Check for Developer ID Certificates id: check_dev_id_certs run: | if [ -n "${{ secrets.DEVELOPER_ID_CERTIFICATE_BASE64 }}" ] && [ -n "${{ secrets.DEVELOPER_ID_P12_PASSWORD }}" ]; then echo "has_dev_id_certs=true" >> $GITHUB_OUTPUT - echo "✅ Developer ID certificates configured for DMG signing" - else - echo "has_dev_id_certs=false" >> $GITHUB_OUTPUT - echo "⚠️ Developer ID certificates not configured. DMG will be unsigned." - echo "Add DEVELOPER_ID_CERTIFICATE_BASE64 and DEVELOPER_ID_P12_PASSWORD secrets to enable DMG signing." fi - name: Import Developer ID Certificates @@ -414,34 +371,25 @@ jobs: DEV_INSTALLER_CERT_PATH=$RUNNER_TEMP/dev_installer_certificate.p12 KEYCHAIN_PATH=$RUNNER_TEMP/dev-signing.keychain-db - # Decode Developer ID certificate echo -n "$DEVELOPER_ID_CERTIFICATE_BASE64" | base64 --decode -o $DEV_CERT_PATH if [ -n "$DEVELOPER_ID_INSTALLER_CERTIFICATE_BASE64" ]; then - echo "Decoding Developer ID installer certificate..." echo -n "$DEVELOPER_ID_INSTALLER_CERTIFICATE_BASE64" | base64 --decode -o $DEV_INSTALLER_CERT_PATH else - echo "⚠️ DEVELOPER_ID_INSTALLER_CERTIFICATE_BASE64 is empty (optional)" fi - # Create and configure keychain security create-keychain -p "$MAC_KEYCHAIN_PASSWORD" $KEYCHAIN_PATH security set-keychain-settings -lut 21600 $KEYCHAIN_PATH security unlock-keychain -p "$MAC_KEYCHAIN_PASSWORD" $KEYCHAIN_PATH - # Import Developer ID Application certificate - echo "Importing Developer ID Application certificate..." security import $DEV_CERT_PATH -P "$DEVELOPER_ID_P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH - # Import Developer ID Installer certificate if it exists if [ -f "$DEV_INSTALLER_CERT_PATH" ]; then - echo "Importing Developer ID Installer certificate..." security import $DEV_INSTALLER_CERT_PATH -P "$DEVELOPER_ID_P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH fi security list-keychain -d user -s $KEYCHAIN_PATH - echo "Imported Developer ID certificates:" security find-identity -v -p codesigning $KEYCHAIN_PATH - name: Build macOS DMG @@ -452,19 +400,15 @@ jobs: APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }} APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }} run: | - # Build DMG without running npm run build again (already built above or skip if no certs) if [ "${{ steps.check_certs.outputs.has_certs }}" == "true" ]; then - # Frontend already built, just package DMG npx electron-builder --mac dmg --universal --x64 --arm64 --publish never else - # No certs, need to build frontend first npm run build && npx electron-builder --mac dmg --universal --x64 --arm64 --publish never fi - name: List release directory if: steps.check_certs.outputs.has_certs == 'true' run: | - echo "Contents of release directory:" ls -R release/ || echo "Release directory not found" - name: Upload macOS MAS PKG @@ -506,15 +450,6 @@ jobs: run: | if [ -n "${{ secrets.APPLE_KEY_ID }}" ] && [ -n "${{ secrets.APPLE_ISSUER_ID }}" ] && [ -n "${{ secrets.APPLE_KEY_CONTENT }}" ]; then echo "has_credentials=true" >> $GITHUB_OUTPUT - if [ "${{ github.event.inputs.artifact_destination }}" == "submit" ]; then - echo "✅ App Store Connect API credentials found. Will deploy to TestFlight." - else - echo "ℹ️ App Store Connect API credentials found, but store submission is disabled." - fi - else - echo "has_credentials=false" >> $GITHUB_OUTPUT - echo "⚠️ App Store Connect API credentials not configured. Skipping deployment." - echo "Add APPLE_KEY_ID, APPLE_ISSUER_ID, and APPLE_KEY_CONTENT secrets to enable automatic deployment." fi - name: Setup Ruby for Fastlane @@ -528,29 +463,22 @@ jobs: if: steps.check_asc_creds.outputs.has_credentials == 'true' && github.event.inputs.artifact_destination == 'submit' run: | gem install fastlane -N - fastlane --version - name: Deploy to App Store Connect (TestFlight) if: steps.check_asc_creds.outputs.has_credentials == 'true' && github.event.inputs.artifact_destination == 'submit' run: | PKG_FILE=$(find release -name "*.pkg" -type f | head -n 1) if [ -z "$PKG_FILE" ]; then - echo "Error: No .pkg file found in release directory" exit 1 fi - echo "Found package: $PKG_FILE" - # Create API key file mkdir -p ~/private_keys echo "${{ secrets.APPLE_KEY_CONTENT }}" | base64 --decode > ~/private_keys/AuthKey_${{ secrets.APPLE_KEY_ID }}.p8 - # Upload to App Store Connect using xcrun altool xcrun altool --upload-app -f "$PKG_FILE" \ --type macos \ --apiKey "${{ secrets.APPLE_KEY_ID }}" \ --apiIssuer "${{ secrets.APPLE_ISSUER_ID }}" - - echo "✅ Upload complete! Build will appear in App Store Connect after processing (10-30 minutes)" continue-on-error: true - name: Clean up keychains @@ -577,7 +505,6 @@ jobs: run: | $VERSION = (Get-Content package.json | ConvertFrom-Json).version echo "version=$VERSION" >> $env:GITHUB_OUTPUT - echo "Building Chocolatey package for version: $VERSION" - name: Download Windows x64 MSI artifact uses: actions/download-artifact@v4 @@ -595,8 +522,6 @@ jobs: echo "msi_name=$MSI_NAME" >> $env:GITHUB_OUTPUT echo "checksum=$CHECKSUM" >> $env:GITHUB_OUTPUT - echo "MSI File: $MSI_NAME" - echo "SHA256: $CHECKSUM" - name: Prepare Chocolatey package run: | @@ -604,33 +529,20 @@ jobs: $CHECKSUM = "${{ steps.msi-info.outputs.checksum }}" $MSI_NAME = "${{ steps.msi-info.outputs.msi_name }}" - # Construct the download URL with the actual release tag format $DOWNLOAD_URL = "https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$MSI_NAME" - # Copy chocolatey files to build directory New-Item -ItemType Directory -Force -Path "choco-build" Copy-Item -Path "chocolatey\*" -Destination "choco-build" -Recurse -Force - # Update chocolateyinstall.ps1 with actual values $installScript = Get-Content "choco-build\tools\chocolateyinstall.ps1" -Raw -Encoding UTF8 $installScript = $installScript -replace 'DOWNLOAD_URL_PLACEHOLDER', $DOWNLOAD_URL $installScript = $installScript -replace 'CHECKSUM_PLACEHOLDER', $CHECKSUM [System.IO.File]::WriteAllText("$PWD\choco-build\tools\chocolateyinstall.ps1", $installScript, [System.Text.UTF8Encoding]::new($false)) - # Update nuspec with version (preserve UTF-8 encoding without BOM) $nuspec = Get-Content "choco-build\termix-ssh.nuspec" -Raw -Encoding UTF8 $nuspec = $nuspec -replace 'VERSION_PLACEHOLDER', $VERSION [System.IO.File]::WriteAllText("$PWD\choco-build\termix-ssh.nuspec", $nuspec, [System.Text.UTF8Encoding]::new($false)) - echo "Chocolatey package prepared for version $VERSION" - echo "Download URL: $DOWNLOAD_URL" - - # Verify the nuspec is valid - echo "" - echo "Verifying nuspec content:" - Get-Content "choco-build\termix-ssh.nuspec" -Head 10 - echo "" - - name: Install Chocolatey run: | Set-ExecutionPolicy Bypass -Scope Process -Force @@ -640,29 +552,17 @@ jobs: - name: Pack Chocolatey package run: | cd choco-build - echo "Packing Chocolatey package..." choco pack termix-ssh.nuspec if ($LASTEXITCODE -ne 0) { - echo "❌ Failed to pack Chocolatey package" - exit 1 + throw "Chocolatey push failed with exit code $LASTEXITCODE" } - echo "" - echo "✅ Package created successfully" - echo "Package contents:" - Get-ChildItem *.nupkg | ForEach-Object { echo $_.Name } - - name: Check for Chocolatey API Key id: check_choco_key run: | if ("${{ secrets.CHOCOLATEY_API_KEY }}" -ne "") { echo "has_key=true" >> $env:GITHUB_OUTPUT - echo "✅ Chocolatey API key found. Will push to Chocolatey." - } else { - echo "has_key=false" >> $env:GITHUB_OUTPUT - echo "⚠️ Chocolatey API key not configured. Package will be created but not pushed." - echo "Add CHOCOLATEY_API_KEY secret to enable automatic submission." } - name: Push to Chocolatey @@ -675,29 +575,10 @@ jobs: try { choco push "termix-ssh.$VERSION.nupkg" --source https://push.chocolatey.org/ if ($LASTEXITCODE -eq 0) { - echo "" - echo "✅ Package pushed to Chocolatey successfully!" - echo "View at: https://community.chocolatey.org/packages/termix-ssh/$VERSION" } else { throw "Chocolatey push failed with exit code $LASTEXITCODE" } } catch { - echo "" - echo "❌ Failed to push to Chocolatey" - echo "" - echo "Common reasons:" - echo "1. Package ID 'termix-ssh' is already owned by another user" - echo "2. You need to register/claim the package ID first" - echo "3. API key doesn't have push permissions" - echo "" - echo "Solutions:" - echo "1. Check if package exists: https://community.chocolatey.org/packages/termix-ssh" - echo "2. If it exists and is yours, contact Chocolatey support to claim it" - echo "3. Register a new package ID at: https://community.chocolatey.org/" - echo "" - echo "The package artifact has been saved for manual submission." - echo "" - exit 1 } - name: Upload Chocolatey package as artifact @@ -727,7 +608,6 @@ jobs: RELEASE_DATE=$(date +%Y-%m-%d) echo "version=$VERSION" >> $GITHUB_OUTPUT echo "release_date=$RELEASE_DATE" >> $GITHUB_OUTPUT - echo "Building Flatpak submission for version: $VERSION" - name: Download Linux x64 AppImage artifact uses: actions/download-artifact@v4 @@ -746,12 +626,10 @@ jobs: run: | VERSION="${{ steps.package-version.outputs.version }}" - # x64 AppImage APPIMAGE_X64_FILE=$(find artifact-x64 -name "*.AppImage" -type f | head -n 1) APPIMAGE_X64_NAME=$(basename "$APPIMAGE_X64_FILE") CHECKSUM_X64=$(sha256sum "$APPIMAGE_X64_FILE" | awk '{print $1}') - # arm64 AppImage APPIMAGE_ARM64_FILE=$(find artifact-arm64 -name "*.AppImage" -type f | head -n 1) APPIMAGE_ARM64_NAME=$(basename "$APPIMAGE_ARM64_FILE") CHECKSUM_ARM64=$(sha256sum "$APPIMAGE_ARM64_FILE" | awk '{print $1}') @@ -761,11 +639,6 @@ jobs: echo "appimage_arm64_name=$APPIMAGE_ARM64_NAME" >> $GITHUB_OUTPUT echo "checksum_arm64=$CHECKSUM_ARM64" >> $GITHUB_OUTPUT - echo "x64 AppImage: $APPIMAGE_X64_NAME" - echo "x64 SHA256: $CHECKSUM_X64" - echo "arm64 AppImage: $APPIMAGE_ARM64_NAME" - echo "arm64 SHA256: $CHECKSUM_ARM64" - - name: Install ImageMagick for icon generation run: | sudo apt-get update @@ -780,101 +653,26 @@ jobs: APPIMAGE_X64_NAME="${{ steps.appimage-info.outputs.appimage_x64_name }}" APPIMAGE_ARM64_NAME="${{ steps.appimage-info.outputs.appimage_arm64_name }}" - # Create submission directory mkdir -p flatpak-submission - # Copy Flatpak files to submission directory cp flatpak/com.karmaa.termix.yml flatpak-submission/ cp flatpak/com.karmaa.termix.desktop flatpak-submission/ cp flatpak/com.karmaa.termix.metainfo.xml flatpak-submission/ cp flatpak/flathub.json flatpak-submission/ - # Copy and prepare icons cp public/icon.svg flatpak-submission/com.karmaa.termix.svg convert public/icon.png -resize 256x256 flatpak-submission/icon-256.png convert public/icon.png -resize 128x128 flatpak-submission/icon-128.png - # Update manifest with version and checksums sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak-submission/com.karmaa.termix.yml sed -i "s/CHECKSUM_X64_PLACEHOLDER/$CHECKSUM_X64/g" flatpak-submission/com.karmaa.termix.yml sed -i "s/CHECKSUM_ARM64_PLACEHOLDER/$CHECKSUM_ARM64/g" flatpak-submission/com.karmaa.termix.yml - # Update metainfo with version and date sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak-submission/com.karmaa.termix.metainfo.xml sed -i "s/DATE_PLACEHOLDER/$RELEASE_DATE/g" flatpak-submission/com.karmaa.termix.metainfo.xml - echo "✅ Flatpak submission files prepared for version $VERSION" - echo "x64 Download URL: https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$APPIMAGE_X64_NAME" - echo "arm64 Download URL: https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$APPIMAGE_ARM64_NAME" - - - name: Create submission instructions - run: | - cat > flatpak-submission/SUBMISSION_INSTRUCTIONS.md << 'EOF' - # Flathub Submission Instructions for Termix - - ## Automatic Submission (Recommended) - - All files needed for Flathub submission are in this artifact. Follow these steps: - - 1. **Fork the Flathub repository**: - - Go to https://github.com/flathub/flathub - - Click "Fork" button - - 2. **Clone your fork**: - ```bash - git clone https://github.com/YOUR-USERNAME/flathub.git - cd flathub - git checkout -b com.karmaa.termix - ``` - - 3. **Copy all files from this artifact** to the root of your flathub fork - - 4. **Commit and push**: - ```bash - git add . - git commit -m "Add Termix ${{ steps.package-version.outputs.version }}" - git push origin com.karmaa.termix - ``` - - 5. **Create Pull Request**: - - Go to https://github.com/YOUR-USERNAME/flathub - - Click "Compare & pull request" - - Submit PR to flathub/flathub - - ## Files in this submission: - - - `com.karmaa.termix.yml` - Flatpak manifest - - `com.karmaa.termix.desktop` - Desktop entry - - `com.karmaa.termix.metainfo.xml` - AppStream metadata - - `flathub.json` - Flathub configuration - - `com.karmaa.termix.svg` - SVG icon - - `icon-256.png` - 256x256 icon - - `icon-128.png` - 128x128 icon - - ## Version Information: - - - Version: ${{ steps.package-version.outputs.version }} - - Release Date: ${{ steps.package-version.outputs.release_date }} - - x64 AppImage SHA256: ${{ steps.appimage-info.outputs.checksum_x64 }} - - arm64 AppImage SHA256: ${{ steps.appimage-info.outputs.checksum_arm64 }} - - ## After Submission: - - 1. Flathub maintainers will review your submission (usually 1-5 days) - 2. They may request changes - be responsive to feedback - 3. Once approved, Termix will be available via: `flatpak install flathub com.karmaa.termix` - - ## Resources: - - - [Flathub Submission Guidelines](https://docs.flathub.org/docs/for-app-authors/submission) - - [Flatpak Documentation](https://docs.flatpak.org/) - EOF - - echo "✅ Created submission instructions" - - name: List submission files run: | - echo "Flatpak submission files:" ls -la flatpak-submission/ - name: Upload Flatpak submission as artifact @@ -884,19 +682,6 @@ jobs: path: flatpak-submission/* retention-days: 30 - - name: Display next steps - run: | - echo "" - echo "🎉 Flatpak submission files ready!" - echo "" - echo "📦 Download the 'flatpak-submission' artifact and follow SUBMISSION_INSTRUCTIONS.md" - echo "" - echo "Quick summary:" - echo "1. Fork https://github.com/flathub/flathub" - echo "2. Copy artifact files to your fork" - echo "3. Create PR to flathub/flathub" - echo "" - submit-to-homebrew: runs-on: macos-latest if: github.event.inputs.artifact_destination == 'submit' @@ -915,7 +700,6 @@ jobs: run: | VERSION=$(node -p "require('./package.json').version") echo "version=$VERSION" >> $GITHUB_OUTPUT - echo "Building Homebrew Cask for version: $VERSION" - name: Download macOS Universal DMG artifact uses: actions/download-artifact@v4 @@ -933,8 +717,6 @@ jobs: echo "dmg_name=$DMG_NAME" >> $GITHUB_OUTPUT echo "checksum=$CHECKSUM" >> $GITHUB_OUTPUT - echo "DMG File: $DMG_NAME" - echo "SHA256: $CHECKSUM" - name: Prepare Homebrew submission files run: | @@ -942,155 +724,24 @@ jobs: CHECKSUM="${{ steps.dmg-info.outputs.checksum }}" DMG_NAME="${{ steps.dmg-info.outputs.dmg_name }}" - # Create submission directory mkdir -p homebrew-submission/Casks/t - # Copy Homebrew cask file cp homebrew/termix.rb homebrew-submission/Casks/t/termix.rb cp homebrew/README.md homebrew-submission/ - # Update cask with version and checksum sed -i '' "s/VERSION_PLACEHOLDER/$VERSION/g" homebrew-submission/Casks/t/termix.rb sed -i '' "s/CHECKSUM_PLACEHOLDER/$CHECKSUM/g" homebrew-submission/Casks/t/termix.rb - echo "✅ Homebrew Cask prepared for version $VERSION" - echo "Download URL: https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$DMG_NAME" - - name: Verify Cask syntax run: | - # Install Homebrew if not present (should be on macos-latest) if ! command -v brew &> /dev/null; then - echo "Installing Homebrew..." /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" fi - # Basic syntax check ruby -c homebrew-submission/Casks/t/termix.rb - echo "✅ Cask syntax is valid" - - - name: Create submission instructions - run: | - cat > homebrew-submission/SUBMISSION_INSTRUCTIONS.md << 'EOF' - # Homebrew Cask Submission Instructions for Termix - - ## Option 1: Submit to Official Homebrew Cask (Recommended) - - ### Prerequisites - - macOS with Homebrew installed - - GitHub account - - ### Steps - - 1. **Fork the Homebrew Cask repository**: - - Go to https://github.com/Homebrew/homebrew-cask - - Click "Fork" button - - 2. **Clone your fork**: - ```bash - git clone https://github.com/YOUR-USERNAME/homebrew-cask.git - cd homebrew-cask - git checkout -b termix - ``` - - 3. **Copy the cask file**: - - Copy `Casks/t/termix.rb` from this artifact to your fork at `Casks/t/termix.rb` - - Note: Casks are organized by first letter in subdirectories - - 4. **Test the cask locally**: - ```bash - brew install --cask ./Casks/t/termix.rb - brew uninstall --cask termix - ``` - - 5. **Run audit checks**: - ```bash - brew audit --cask --online ./Casks/t/termix.rb - brew style ./Casks/t/termix.rb - ``` - - 6. **Commit and push**: - ```bash - git add Casks/t/termix.rb - git commit -m "Add Termix ${{ steps.package-version.outputs.version }}" - git push origin termix - ``` - - 7. **Create Pull Request**: - - Go to https://github.com/YOUR-USERNAME/homebrew-cask - - Click "Compare & pull request" - - Fill in the PR template - - Submit to Homebrew/homebrew-cask - - ### PR Requirements - - Your PR should include: - - Clear commit message: "Add Termix X.Y.Z" or "Update Termix to X.Y.Z" - - All audit checks passing - - Working download URL - - Valid SHA256 checksum - - ## Option 2: Create Your Own Tap (Alternative) - - If you want more control and faster updates: - - 1. **Create a tap repository**: - - Create repo: `Termix-SSH/homebrew-termix` - - Add `Casks/termix.rb` to the repo - - 2. **Users install with**: - ```bash - brew tap termix-ssh/termix - brew install --cask termix - ``` - - ### Advantages of Custom Tap - - No approval process - - Instant updates - - Full control - - Can include beta versions - - ### Disadvantages - - Less discoverable - - Users must add tap first - - You maintain it yourself - - ## Files in this submission: - - - `Casks/t/termix.rb` - Homebrew Cask formula - - `README.md` - Detailed documentation - - `SUBMISSION_INSTRUCTIONS.md` - This file - - ## Version Information: - - - Version: ${{ steps.package-version.outputs.version }} - - DMG SHA256: ${{ steps.dmg-info.outputs.checksum }} - - DMG URL: https://github.com/Termix-SSH/Termix/releases/download/release-${{ steps.package-version.outputs.version }}-tag/${{ steps.dmg-info.outputs.dmg_name }} - - ## After Submission: - - ### Official Homebrew Cask: - 1. Maintainers will review (usually 24-48 hours) - 2. May request changes or fixes - 3. Once merged, users can install with: `brew install --cask termix` - 4. Homebrew bot will auto-update for future releases - - ### Custom Tap: - 1. Push to your tap repository - 2. Immediately available to users - 3. Update the cask file for each new release - - ## Resources: - - - [Homebrew Cask Documentation](https://docs.brew.sh/Cask-Cookbook) - - [Acceptable Casks](https://docs.brew.sh/Acceptable-Casks) - - [How to Open a PR](https://docs.brew.sh/How-To-Open-a-Homebrew-Pull-Request) - EOF - - echo "✅ Created submission instructions" - name: List submission files run: | - echo "Homebrew submission files:" find homebrew-submission -type f - name: Upload Homebrew submission as artifact @@ -1100,18 +751,6 @@ jobs: path: homebrew-submission/* retention-days: 30 - - name: Display next steps - run: | - echo "" - echo "🍺 Homebrew Cask ready!" - echo "" - echo "📦 Download the 'homebrew-submission' artifact and follow SUBMISSION_INSTRUCTIONS.md" - echo "" - echo "Quick summary:" - echo "Option 1 (Recommended): Fork https://github.com/Homebrew/homebrew-cask and submit PR" - echo "Option 2 (Alternative): Create your own tap at Termix-SSH/homebrew-termix" - echo "" - upload-to-release: runs-on: blacksmith-4vcpu-ubuntu-2404 if: github.event.inputs.artifact_destination == 'release' @@ -1128,49 +767,25 @@ jobs: - name: Get latest release id: get_release run: | - echo "Fetching latest release from ${{ github.repository }}..." - LATEST_RELEASE=$(gh release list --repo ${{ github.repository }} --limit 1 --json tagName,name,isLatest -q '.[0]') - - if [ -z "$LATEST_RELEASE" ]; then - echo "ERROR: No releases found in ${{ github.repository }}" - exit 1 - fi - - RELEASE_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tagName') - RELEASE_NAME=$(echo "$LATEST_RELEASE" | jq -r '.name') - - echo "tag=$RELEASE_TAG" >> $GITHUB_OUTPUT - echo "name=$RELEASE_NAME" >> $GITHUB_OUTPUT - echo "Latest release: $RELEASE_NAME ($RELEASE_TAG)" - env: + LATEST_RELEASE=$(gh release list --repo ${{ github.repository }} --limit 1 --json tagName,name,isLatest -q '.[0]') env: GH_TOKEN: ${{ github.token }} - name: Display artifact structure run: | - echo "Artifact structure:" ls -R artifacts/ - name: Upload artifacts to latest release run: | - RELEASE_TAG="${{ steps.get_release.outputs.tag }}" - echo "Uploading artifacts to release: $RELEASE_TAG" - echo "" - cd artifacts for dir in */; do - echo "Processing directory: $dir" cd "$dir" - for file in *; do + for file in *; + do if [ -f "$file" ]; then - echo "Uploading: $file" gh release upload "$RELEASE_TAG" "$file" --repo ${{ github.repository }} --clobber - echo "✓ $file uploaded successfully" fi done cd .. done - - echo "" - echo "All artifacts uploaded to: https://github.com/${{ github.repository }}/releases/tag/$RELEASE_TAG" env: GH_TOKEN: ${{ github.token }} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3760e9d9..9db7959d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -103,4 +103,4 @@ This will start the backend and the frontend Vite server. You can access Termix If you need help or want to request a feature with Termix, visit the [Issues](https://github.com/Termix-SSH/Support/issues) page, log in, and press `New Issue`. Please be as detailed as possible in your issue, preferably written in English. You can also join the [Discord](https://discord.gg/jVQGdvHDrf) server and visit the support -channel, however, response times may be longer. \ No newline at end of file +channel, however, response times may be longer. diff --git a/chocolatey/termix-ssh.nuspec b/chocolatey/termix-ssh.nuspec index cba8f817..b5de0b13 100644 --- a/chocolatey/termix-ssh.nuspec +++ b/chocolatey/termix-ssh.nuspec @@ -16,7 +16,8 @@ https://github.com/Termix-SSH/Support/issues docker ssh self-hosted file-management ssh-tunnel termix server-management terminal Termix is a web-based server management platform with SSH terminal, tunneling, and file editing capabilities. - Termix is an open-source, forever-free, self-hosted all-in-one server management platform. It provides a web-based solution for managing your servers and infrastructure through a single, intuitive interface. + +Termix is an open-source, forever-free, self-hosted all-in-one server management platform. It provides a web-based solution for managing your servers and infrastructure through a single, intuitive interface. Termix offers: - SSH terminal access @@ -24,7 +25,8 @@ Termix offers: - Remote file management - Server monitoring and management -This package installs the desktop application version of Termix. +This package installs the desktop application version of Termix. + https://github.com/Termix-SSH/Termix/releases diff --git a/chocolatey/tools/chocolateyuninstall.ps1 b/chocolatey/tools/chocolateyuninstall.ps1 index 95457b73..48a5e18c 100644 --- a/chocolatey/tools/chocolateyuninstall.ps1 +++ b/chocolatey/tools/chocolateyuninstall.ps1 @@ -29,6 +29,5 @@ if ($key.Count -eq 1) { } elseif ($key.Count -gt 1) { Write-Warning "$($key.Count) matches found!" Write-Warning "To prevent accidental data loss, no programs will be uninstalled." - Write-Warning "Please alert package maintainer the following keys were matched:" $key | % {Write-Warning "- $($_.DisplayName)"} } diff --git a/docker/nginx.conf b/docker/nginx.conf index bae07954..afec21e9 100644 --- a/docker/nginx.conf +++ b/docker/nginx.conf @@ -23,18 +23,14 @@ http { listen ${PORT}; server_name localhost; - # X-Frame-Options removed to allow Electron iframe embedding - # add_header X-Frame-Options DENY always; add_header X-Content-Type-Options nosniff always; add_header X-XSS-Protection "1; mode=block" always; - # CORS headers for Electron iframe - reflect the origin for credentials support add_header Access-Control-Allow-Origin $http_origin always; add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS, PATCH" always; add_header Access-Control-Allow-Headers "Origin, X-Requested-With, Content-Type, Accept, Authorization" always; add_header Access-Control-Allow-Credentials "true" always; - # Serve static assets directly location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ { root /usr/share/nginx/html; expires 1y; @@ -98,7 +94,7 @@ http { proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; - + proxy_connect_timeout 60s; proxy_send_timeout 300s; proxy_read_timeout 300s; @@ -107,18 +103,18 @@ http { location ~ ^/database(/.*)?$ { client_max_body_size 5G; client_body_timeout 300s; - + proxy_pass http://127.0.0.1:30001; proxy_http_version 1.1; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; - + proxy_connect_timeout 60s; proxy_send_timeout 300s; proxy_read_timeout 300s; - + proxy_request_buffering off; proxy_buffering off; } @@ -126,18 +122,18 @@ http { location ~ ^/db(/.*)?$ { client_max_body_size 5G; client_body_timeout 300s; - + proxy_pass http://127.0.0.1:30001; proxy_http_version 1.1; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; - + proxy_connect_timeout 60s; proxy_send_timeout 300s; proxy_read_timeout 300s; - + proxy_request_buffering off; proxy_buffering off; } @@ -222,18 +218,18 @@ http { location /ssh/file_manager/ssh/ { client_max_body_size 5G; client_body_timeout 300s; - + proxy_pass http://127.0.0.1:30004; proxy_http_version 1.1; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; - + proxy_connect_timeout 60s; proxy_send_timeout 300s; proxy_read_timeout 300s; - + proxy_request_buffering off; proxy_buffering off; } @@ -288,4 +284,4 @@ http { root /usr/share/nginx/html; } } -} \ No newline at end of file +} diff --git a/electron/main.cjs b/electron/main.cjs index d28a9fc5..0d3a2c6e 100644 --- a/electron/main.cjs +++ b/electron/main.cjs @@ -1,4 +1,11 @@ -const { app, BrowserWindow, shell, ipcMain, dialog, Menu } = require("electron"); +const { + app, + BrowserWindow, + shell, + ipcMain, + dialog, + Menu, +} = require("electron"); const path = require("path"); const fs = require("fs"); const os = require("os"); @@ -57,37 +64,38 @@ function createWindow() { } if (isDev) { - mainWindow.loadURL("http://localhost:5173"); + mainWindow.loadURL("http:://localhost:5173"); mainWindow.webContents.openDevTools(); } else { const indexPath = path.join(__dirname, "..", "dist", "index.html"); mainWindow.loadFile(indexPath); } - // Allow iframes to load from any origin by removing X-Frame-Options headers mainWindow.webContents.session.webRequest.onHeadersReceived( (details, callback) => { const headers = details.responseHeaders; - // Remove headers that block iframe embedding if (headers) { delete headers["x-frame-options"]; delete headers["X-Frame-Options"]; - // Modify CSP to allow framing if (headers["content-security-policy"]) { - headers["content-security-policy"] = headers["content-security-policy"] - .map(value => value.replace(/frame-ancestors[^;]*/gi, '')) - .filter(value => value.trim().length > 0); + headers["content-security-policy"] = headers[ + "content-security-policy" + ] + .map((value) => value.replace(/frame-ancestors[^;]*/gi, "")) + .filter((value) => value.trim().length > 0); if (headers["content-security-policy"].length === 0) { delete headers["content-security-policy"]; } } if (headers["Content-Security-Policy"]) { - headers["Content-Security-Policy"] = headers["Content-Security-Policy"] - .map(value => value.replace(/frame-ancestors[^;]*/gi, '')) - .filter(value => value.trim().length > 0); + headers["Content-Security-Policy"] = headers[ + "Content-Security-Policy" + ] + .map((value) => value.replace(/frame-ancestors[^;]*/gi, "")) + .filter((value) => value.trim().length > 0); if (headers["Content-Security-Policy"].length === 0) { delete headers["Content-Security-Policy"]; @@ -96,7 +104,7 @@ function createWindow() { } callback({ responseHeaders: headers }); - } + }, ); mainWindow.once("ready-to-show", () => { diff --git a/flatpak/com.karmaa.termix.yml b/flatpak/com.karmaa.termix.yml index ce4091e5..4405a10f 100644 --- a/flatpak/com.karmaa.termix.yml +++ b/flatpak/com.karmaa.termix.yml @@ -1,9 +1,9 @@ app-id: com.karmaa.termix runtime: org.freedesktop.Platform -runtime-version: '23.08' +runtime-version: "23.08" sdk: org.freedesktop.Sdk base: org.electronjs.Electron2.BaseApp -base-version: '23.08' +base-version: "23.08" command: termix separate-locales: false diff --git a/flatpak/prepare-flatpak.sh b/flatpak/prepare-flatpak.sh index 43afe818..05162b64 100644 --- a/flatpak/prepare-flatpak.sh +++ b/flatpak/prepare-flatpak.sh @@ -1,9 +1,6 @@ #!/bin/bash set -e -# This script prepares the Flatpak submission files -# It should be run from the repository root - VERSION="$1" CHECKSUM="$2" RELEASE_DATE="$3" @@ -16,39 +13,22 @@ fi echo "Preparing Flatpak submission for version $VERSION" -# Copy icon files cp public/icon.svg flatpak/com.karmaa.termix.svg echo "✓ Copied SVG icon" -# Generate PNG icons if ImageMagick is available if command -v convert &> /dev/null; then convert public/icon.png -resize 256x256 flatpak/icon-256.png convert public/icon.png -resize 128x128 flatpak/icon-128.png echo "✓ Generated PNG icons" else - # Fallback: just copy the original PNG cp public/icon.png flatpak/icon-256.png cp public/icon.png flatpak/icon-128.png echo "⚠ ImageMagick not found, using original icon" fi -# Update manifest with version and checksum sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak/com.karmaa.termix.yml sed -i "s/CHECKSUM_PLACEHOLDER/$CHECKSUM/g" flatpak/com.karmaa.termix.yml echo "✓ Updated manifest with version $VERSION" -# Update metainfo with version and date sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak/com.karmaa.termix.metainfo.xml sed -i "s/DATE_PLACEHOLDER/$RELEASE_DATE/g" flatpak/com.karmaa.termix.metainfo.xml -echo "✓ Updated metainfo with version $VERSION and date $RELEASE_DATE" - -echo "" -echo "✅ Flatpak submission files prepared!" -echo "" -echo "Next steps:" -echo "1. Review the files in the flatpak/ directory" -echo "2. Fork https://github.com/flathub/flathub" -echo "3. Create a new branch named 'com.karmaa.termix'" -echo "4. Copy all files from flatpak/ to the root of your fork" -echo "5. Commit and push to your fork" -echo "6. Open a PR to flathub/flathub" diff --git a/homebrew/README.md b/homebrew/README.md deleted file mode 100644 index 08b0f68f..00000000 --- a/homebrew/README.md +++ /dev/null @@ -1,154 +0,0 @@ -# Homebrew Cask for Termix - -This directory contains the Homebrew Cask formula for installing Termix on macOS. - -## Files - -- **termix.rb** - Homebrew Cask formula - -## What is a Homebrew Cask? - -Homebrew Casks are used to install GUI macOS applications. Unlike formulae (which are for command-line tools), casks handle: - -- Downloading DMG/PKG installers -- Installing .app bundles to /Applications -- Managing application preferences and cache cleanup - -## Submission Options - -You have two options for distributing Termix via Homebrew: - -### Option 1: Submit to Official Homebrew Cask (Recommended) - -Submit to the official homebrew-cask repository for maximum visibility. - -**Advantages:** - -- Discoverable by all Homebrew users -- Built-in update checking -- Official Homebrew support - -**Process:** - -1. Download the `homebrew-submission` artifact from GitHub Actions (when using "submit" option) -2. Fork https://github.com/Homebrew/homebrew-cask -3. Create a new branch: `git checkout -b termix` -4. Add the cask file: `Casks/t/termix.rb` (note the subdirectory by first letter) -5. Test locally: `brew install --cask ./Casks/t/termix.rb` -6. Run audit: `brew audit --cask --online ./Casks/t/termix.rb` -7. Commit and push to your fork -8. Create a PR to Homebrew/homebrew-cask - -**Requirements for acceptance:** - -- App must be stable (not beta/alpha) -- Source code must be public -- No analytics/tracking without opt-in -- Pass all brew audit checks - -### Option 2: Create Your Own Tap - -Create a custom Homebrew tap for more control and faster updates. - -**Advantages:** - -- Full control over updates -- No approval process -- Can include beta/alpha releases - -**Process:** - -1. Create a new repository: `Termix-SSH/homebrew-termix` -2. Add the cask file to: `Casks/termix.rb` -3. Users install with: `brew install --cask termix-ssh/termix/termix` - -## Installation (for users) - -### From Official Homebrew Cask (after approval): - -```bash -brew install --cask termix -``` - -### From Custom Tap: - -```bash -# Add the tap -brew tap termix-ssh/termix - -# Install the cask -brew install --cask termix -``` - -## Updating the Cask - -When you release a new version: - -### For Official Homebrew Cask: - -1. Homebrew bot usually auto-updates within hours -2. Or manually submit a PR with the new version/checksum - -### For Custom Tap: - -1. Update the version and sha256 in termix.rb -2. Commit and push to your tap repository -3. Users run: `brew upgrade --cask termix` - -## Testing Locally - -Before submitting, test the cask: - -```bash -# Install from local file -brew install --cask ./homebrew/termix.rb - -# Verify it works -open /Applications/Termix.app - -# Uninstall -brew uninstall --cask termix - -# Run audit checks -brew audit --cask --online ./homebrew/termix.rb - -# Style check -brew style ./homebrew/termix.rb -``` - -## Automated Submission Preparation - -The GitHub Actions workflow automatically prepares the Homebrew submission when you select "submit": - -1. Builds macOS universal DMG -2. Calculates SHA256 checksum -3. Updates the cask file with version and checksum -4. Creates a `homebrew-submission` artifact - -Download the artifact and follow the submission instructions included. - -## Cask File Structure - -The cask file (`termix.rb`) includes: - -- **version** - Automatically set from package.json -- **sha256** - Checksum of the universal DMG for security -- **url** - Download URL from GitHub releases -- **name** - Display name -- **desc** - Short description -- **homepage** - Project homepage -- **livecheck** - Automatic update detection -- **app** - The .app bundle to install -- **zap** - Files to remove on complete uninstall - -## Requirements - -- macOS 10.15 (Catalina) or later -- Homebrew 4.0.0 or later -- Universal DMG must be code-signed and notarized (already handled by your build process) - -## Resources - -- [Homebrew Cask Documentation](https://docs.brew.sh/Cask-Cookbook) -- [Cask Submission Guidelines](https://github.com/Homebrew/homebrew-cask/blob/master/CONTRIBUTING.md) -- [Homebrew Formula Cookbook](https://docs.brew.sh/Formula-Cookbook) diff --git a/src/backend/dashboard.ts b/src/backend/dashboard.ts index 5522b67d..acf95fff 100644 --- a/src/backend/dashboard.ts +++ b/src/backend/dashboard.ts @@ -12,10 +12,8 @@ import type { AuthenticatedRequest } from "../types/index.js"; const app = express(); const authManager = AuthManager.getInstance(); -// Track server start time const serverStartTime = Date.now(); -// In-memory rate limiter for activity logging const activityRateLimiter = new Map(); const RATE_LIMIT_MS = 1000; // 1 second window @@ -60,7 +58,6 @@ app.use(express.json({ limit: "1mb" })); app.use(authManager.createAuthMiddleware()); -// Get server uptime app.get("/uptime", async (req, res) => { try { const uptimeMs = Date.now() - serverStartTime; @@ -80,7 +77,6 @@ app.get("/uptime", async (req, res) => { } }); -// Get recent activity for current user app.get("/activity/recent", async (req, res) => { try { const userId = (req as AuthenticatedRequest).userId; @@ -112,7 +108,6 @@ app.get("/activity/recent", async (req, res) => { } }); -// Log new activity app.post("/activity/log", async (req, res) => { try { const userId = (req as AuthenticatedRequest).userId; @@ -138,22 +133,18 @@ app.post("/activity/log", async (req, res) => { }); } - // In-memory rate limiting to prevent duplicate requests const rateLimitKey = `${userId}:${hostId}:${type}`; const now = Date.now(); const lastLogged = activityRateLimiter.get(rateLimitKey); if (lastLogged && now - lastLogged < RATE_LIMIT_MS) { - // Too soon after last request, reject as duplicate return res.json({ message: "Activity already logged recently (rate limited)", }); } - // Update rate limiter activityRateLimiter.set(rateLimitKey, now); - // Clean up old entries from rate limiter (keep it from growing indefinitely) if (activityRateLimiter.size > 10000) { const entriesToDelete: string[] = []; for (const [key, timestamp] of activityRateLimiter.entries()) { @@ -164,7 +155,6 @@ app.post("/activity/log", async (req, res) => { entriesToDelete.forEach((key) => activityRateLimiter.delete(key)); } - // Verify the host belongs to the user const hosts = await SimpleDBOps.select( getDb() .select() @@ -178,7 +168,6 @@ app.post("/activity/log", async (req, res) => { return res.status(404).json({ error: "Host not found" }); } - // Insert new activity const result = (await SimpleDBOps.insert( recentActivity, "recent_activity", @@ -191,7 +180,6 @@ app.post("/activity/log", async (req, res) => { userId, )) as unknown as { id: number }; - // Keep only the last 100 activities per user to prevent bloat const allActivities = await SimpleDBOps.select( getDb() .select() @@ -216,7 +204,6 @@ app.post("/activity/log", async (req, res) => { } }); -// Reset recent activity for current user app.delete("/activity/reset", async (req, res) => { try { const userId = (req as AuthenticatedRequest).userId; @@ -228,7 +215,6 @@ app.delete("/activity/reset", async (req, res) => { }); } - // Delete all activities for the user await SimpleDBOps.delete( recentActivity, "recent_activity", diff --git a/src/backend/database/database.ts b/src/backend/database/database.ts index e912cdc9..7bc524cd 100644 --- a/src/backend/database/database.ts +++ b/src/backend/database/database.ts @@ -915,7 +915,6 @@ app.post( const isOidcUser = !!userRecords[0].is_oidc; if (!isOidcUser) { - // Local accounts still prove knowledge of the password so their DEK can be derived again. if (!password) { return res.status(400).json({ error: "Password required for import", @@ -928,7 +927,6 @@ app.post( return res.status(401).json({ error: "Invalid password" }); } } else if (!DataCrypto.getUserDataKey(userId)) { - // OIDC users skip the password prompt; make sure their DEK is unlocked via the OIDC session. const oidcUnlocked = await authManager.authenticateOIDCUser(userId); if (!oidcUnlocked) { return res.status(403).json({ @@ -947,7 +945,6 @@ app.post( let userDataKey = DataCrypto.getUserDataKey(userId); if (!userDataKey && isOidcUser) { - // authenticateOIDCUser lazily provisions the session key; retry the fetch when it succeeds. const oidcUnlocked = await authManager.authenticateOIDCUser(userId); if (oidcUnlocked) { userDataKey = DataCrypto.getUserDataKey(userId); @@ -1425,7 +1422,6 @@ app.use( err: unknown, req: express.Request, res: express.Response, - // eslint-disable-next-line @typescript-eslint/no-unused-vars _next: express.NextFunction, ) => { apiLogger.error("Unhandled error in request", err, { @@ -1482,17 +1478,13 @@ app.get( if (status.hasUnencryptedDb) { try { unencryptedSize = fs.statSync(dbPath).size; - } catch { - // Ignore file access errors - } + } catch {} } if (status.hasEncryptedDb) { try { encryptedSize = fs.statSync(encryptedDbPath).size; - } catch { - // Ignore file access errors - } + } catch {} } res.json({ diff --git a/src/backend/database/db/index.ts b/src/backend/database/db/index.ts index c949dcf0..8ea5d66f 100644 --- a/src/backend/database/db/index.ts +++ b/src/backend/database/db/index.ts @@ -12,10 +12,6 @@ import { DatabaseSaveTrigger } from "../../utils/database-save-trigger.js"; const dataDir = process.env.DATA_DIR || "./db/data"; const dbDir = path.resolve(dataDir); if (!fs.existsSync(dbDir)) { - databaseLogger.info(`Creating database directory`, { - operation: "db_init", - path: dbDir, - }); fs.mkdirSync(dbDir, { recursive: true }); } @@ -31,7 +27,6 @@ let sqlite: Database.Database; async function initializeDatabaseAsync(): Promise { const systemCrypto = SystemCrypto.getInstance(); - // Ensure database key is initialized await systemCrypto.getDatabaseKey(); if (enableFileEncryption) { try { @@ -41,18 +36,11 @@ async function initializeDatabaseAsync(): Promise { memoryDatabase = new Database(decryptedBuffer); - // Count sessions after loading try { const sessionCount = memoryDatabase .prepare("SELECT COUNT(*) as count FROM sessions") .get() as { count: number }; - databaseLogger.info("Database loaded from encrypted file", { - operation: "db_load", - sessionCount: sessionCount.count, - bufferSize: decryptedBuffer.length, - }); } catch (countError) { - // Ignore count errors } } else { const migration = new DatabaseMigration(dataDir); @@ -297,9 +285,6 @@ async function initializeCompleteDatabase(): Promise { try { sqlite.prepare("DELETE FROM sessions").run(); - databaseLogger.info("All sessions cleared on startup", { - operation: "db_init_session_cleanup", - }); } catch (e) { databaseLogger.warn("Could not clear sessions on startup", { operation: "db_init_session_cleanup_failed", @@ -453,7 +438,6 @@ const migrateSchema = () => { addColumnIfNotExists("file_manager_pinned", "host_id", "INTEGER NOT NULL"); addColumnIfNotExists("file_manager_shortcuts", "host_id", "INTEGER NOT NULL"); - // Create sessions table if it doesn't exist (for existing databases) try { sqlite .prepare("SELECT id FROM sessions LIMIT 1") @@ -473,9 +457,6 @@ const migrateSchema = () => { FOREIGN KEY (user_id) REFERENCES users (id) ); `); - databaseLogger.info("Sessions table created via migration", { - operation: "schema_migration", - }); } catch (createError) { databaseLogger.warn("Failed to create sessions table", { operation: "schema_migration", @@ -499,18 +480,11 @@ async function saveMemoryDatabaseToFile() { fs.mkdirSync(dataDir, { recursive: true }); } - // Count sessions before saving try { const sessionCount = memoryDatabase .prepare("SELECT COUNT(*) as count FROM sessions") .get() as { count: number }; - databaseLogger.info("Saving database to file", { - operation: "db_save", - sessionCount: sessionCount.count, - bufferSize: buffer.length, - }); } catch (countError) { - // Ignore count errors } if (enableFileEncryption) { @@ -605,18 +579,15 @@ async function cleanupDatabase() { try { fs.unlinkSync(path.join(tempDir, file)); } catch { - // Ignore cleanup errors } } try { fs.rmdirSync(tempDir); } catch { - // Ignore cleanup errors } } } catch { - // Ignore cleanup errors } } @@ -625,7 +596,6 @@ process.on("exit", () => { try { sqlite.close(); } catch { - // Ignore close errors on exit } } }); diff --git a/src/backend/database/routes/users.ts b/src/backend/database/routes/users.ts index a30adacd..1e471111 100644 --- a/src/backend/database/routes/users.ts +++ b/src/backend/database/routes/users.ts @@ -336,14 +336,10 @@ router.post("/oidc-config", authenticateJWT, async (req, res) => { userId, adminDataKey, ); - authLogger.info("OIDC configuration encrypted with admin data key", { - operation: "oidc_config_encrypt", - userId, - }); } else { encryptedConfig = { ...config, - client_secret: `encrypted:${Buffer.from(client_secret).toString("base64")}`, // Simple base64 encoding + client_secret: `encrypted:${Buffer.from(client_secret).toString("base64")}`, }; authLogger.warn( "OIDC configuration stored with basic encoding - admin should re-save with password", @@ -421,7 +417,6 @@ router.get("/oidc-config", async (req, res) => { const config = JSON.parse((row as Record).value as string); - // Only return public fields needed for login page const publicConfig = { client_id: config.client_id, issuer_url: config.issuer_url, @@ -661,7 +656,6 @@ router.get("/oidc/callback", async (req, res) => { config.client_id, ); } catch { - // Fallback to manual decoding try { const parts = (tokenData.id_token as string).split("."); if (parts.length === 3) { @@ -812,7 +806,6 @@ router.get("/oidc/callback", async (req, res) => { }); } - // Detect platform and device info const deviceInfo = parseUserAgent(req); const token = await authManager.generateJWTToken(userRecord.id, { deviceType: deviceInfo.type, @@ -838,7 +831,6 @@ router.get("/oidc/callback", async (req, res) => { const redirectUrl = new URL(frontendUrl); redirectUrl.searchParams.set("success", "true"); - // Calculate max age based on device type const maxAge = deviceInfo.type === "desktop" || deviceInfo.type === "mobile" ? 30 * 24 * 60 * 60 * 1000 @@ -965,7 +957,6 @@ router.post("/login", async (req, res) => { }); } - // Detect platform and device info const deviceInfo = parseUserAgent(req); const token = await authManager.generateJWTToken(userRecord.id, { deviceType: deviceInfo.type, @@ -995,7 +986,6 @@ router.post("/login", async (req, res) => { response.token = token; } - // Calculate max age based on device type const maxAge = deviceInfo.type === "desktop" || deviceInfo.type === "mobile" ? 30 * 24 * 60 * 60 * 1000 @@ -1018,7 +1008,6 @@ router.post("/logout", authenticateJWT, async (req, res) => { const userId = authReq.userId; if (userId) { - // Get sessionId from JWT if available const token = req.cookies?.jwt || req.headers["authorization"]?.split(" ")[1]; let sessionId: string | undefined; @@ -1027,9 +1016,7 @@ router.post("/logout", authenticateJWT, async (req, res) => { try { const payload = await authManager.verifyJWTToken(token); sessionId = payload?.sessionId; - } catch (error) { - // Ignore token verification errors during logout - } + } catch (error) {} } await authManager.logoutUser(userId, sessionId); @@ -1435,7 +1422,6 @@ router.post("/complete-reset", async (req, res) => { const saltRounds = parseInt(process.env.SALT || "10", 10); const password_hash = await bcrypt.hash(newPassword, saltRounds); - // Check if user is logged in and data is unlocked let userIdFromJwt: string | null = null; const cookie = req.cookies?.jwt; let header: string | undefined; @@ -1452,7 +1438,6 @@ router.post("/complete-reset", async (req, res) => { } if (userIdFromJwt === userId) { - // Logged-in user: preserve data try { const success = await authManager.resetUserPasswordWithPreservedDEK( userId, @@ -1491,15 +1476,12 @@ router.post("/complete-reset", async (req, res) => { }); } } else { - // Logged-out user: data is lost await db .update(users) .set({ password_hash }) .where(eq(users.username, username)); try { - // Delete all encrypted data since we're creating a new DEK - // The old DEK is lost, so old encrypted data becomes unreadable await db .delete(sshCredentialUsage) .where(eq(sshCredentialUsage.userId, userId)); @@ -1524,11 +1506,9 @@ router.post("/complete-reset", async (req, res) => { .delete(sshCredentials) .where(eq(sshCredentials.userId, userId)); - // Now setup new encryption with new DEK await authManager.registerUser(userId, newPassword); authManager.logoutUser(userId); - // Clear TOTP settings await db .update(users) .set({ @@ -1597,13 +1577,11 @@ router.post("/change-password", authenticateJWT, async (req, res) => { return res.status(404).json({ error: "User not found" }); } - // Verify old password for login hash const isMatch = await bcrypt.compare(oldPassword, user[0].password_hash); if (!isMatch) { return res.status(401).json({ error: "Incorrect current password" }); } - // Change encryption keys and login hash const success = await authManager.changeUserPassword( userId, oldPassword, @@ -1619,7 +1597,7 @@ router.post("/change-password", authenticateJWT, async (req, res) => { const password_hash = await bcrypt.hash(newPassword, saltRounds); await db.update(users).set({ password_hash }).where(eq(users.id, userId)); - authManager.logoutUser(userId); // Log out user for security + authManager.logoutUser(userId); res.json({ message: "Password changed successfully. Please log in again." }); }); @@ -1836,7 +1814,6 @@ router.post("/totp/verify-login", async (req, res) => { .where(eq(users.id, userRecord.id)); } - // Detect platform and device info const deviceInfo = parseUserAgent(req); const token = await authManager.generateJWTToken(userRecord.id, { deviceType: deviceInfo.type, @@ -1867,7 +1844,6 @@ router.post("/totp/verify-login", async (req, res) => { response.token = token; } - // Calculate max age based on device type const maxAge = deviceInfo.type === "desktop" || deviceInfo.type === "mobile" ? 30 * 24 * 60 * 60 * 1000 @@ -2230,7 +2206,6 @@ router.get("/data-status", authenticateJWT, async (req, res) => { const userId = (req as AuthenticatedRequest).userId; try { - // Data lock functionality has been removed - always return unlocked for authenticated users res.json({ unlocked: true, message: "Data is unlocked", @@ -2320,10 +2295,8 @@ router.get("/sessions", authenticateJWT, async (req, res) => { let sessionList; if (userRecord.is_admin) { - // Admin: Get all sessions with user info sessionList = await authManager.getAllSessions(); - // Join with users to get usernames const enrichedSessions = await Promise.all( sessionList.map(async (session) => { const sessionUser = await db @@ -2341,7 +2314,6 @@ router.get("/sessions", authenticateJWT, async (req, res) => { return res.json({ sessions: enrichedSessions }); } else { - // Regular user: Get only their own sessions sessionList = await authManager.getUserSessions(userId); return res.json({ sessions: sessionList }); } @@ -2369,7 +2341,6 @@ router.delete("/sessions/:sessionId", authenticateJWT, async (req, res) => { const userRecord = user[0]; - // Check if session exists const sessionRecords = await db .select() .from(sessions) @@ -2382,7 +2353,6 @@ router.delete("/sessions/:sessionId", authenticateJWT, async (req, res) => { const session = sessionRecords[0]; - // Non-admin users can only revoke their own sessions if (!userRecord.is_admin && session.userId !== userId) { return res .status(403) @@ -2421,19 +2391,15 @@ router.post("/sessions/revoke-all", authenticateJWT, async (req, res) => { const userRecord = user[0]; - // Determine which user's sessions to revoke let revokeUserId = userId; if (targetUserId && userRecord.is_admin) { - // Admin can revoke any user's sessions revokeUserId = targetUserId; } else if (targetUserId && targetUserId !== userId) { - // Non-admin can only revoke their own sessions return res.status(403).json({ error: "Not authorized to revoke sessions for other users", }); } - // Get current session ID if needed let currentSessionId: string | undefined; if (exceptCurrent) { const token = diff --git a/src/backend/ssh/file-manager.ts b/src/backend/ssh/file-manager.ts index 58e13b3e..349ddbaa 100644 --- a/src/backend/ssh/file-manager.ts +++ b/src/backend/ssh/file-manager.ts @@ -120,9 +120,7 @@ function cleanupSession(sessionId: string) { if (session) { try { session.client.end(); - } catch { - // Ignore connection close errors - } + } catch {} clearTimeout(session.timeout); delete sshSessions[sessionId]; } @@ -352,8 +350,6 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => { } config.password = resolvedCredentials.password; } else if (resolvedCredentials.authType === "none") { - // Use authHandler to control authentication flow - // This ensures we only try keyboard-interactive, not password auth config.authHandler = ( methodsLeft: string[] | null, partialSuccess: boolean, @@ -409,7 +405,6 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => { scheduleSessionCleanup(sessionId); res.json({ status: "success", message: "SSH connection established" }); - // Log activity to dashboard API if (hostId && userId) { (async () => { try { @@ -458,14 +453,6 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => { responseSent = true; if (authMethodNotAvailable && resolvedCredentials.authType === "none") { - fileLogger.info( - "Keyboard-interactive not available, requesting credentials", - { - operation: "file_connect_auth_not_available", - sessionId, - hostId, - }, - ); res.status(200).json({ status: "auth_required", message: @@ -557,51 +544,26 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => { prompt: prompts[totpPromptIndex].prompt, }); } else { - // Non-TOTP prompts (password, etc.) const hasStoredPassword = resolvedCredentials.password && resolvedCredentials.authType !== "none"; - // Check if this is a password prompt const passwordPromptIndex = prompts.findIndex((p) => /password/i.test(p.prompt), ); - // If no stored password (including authType "none"), prompt the user if (!hasStoredPassword && passwordPromptIndex !== -1) { if (responseSent) { - // Connection is already being handled, don't send duplicate responses - fileLogger.info( - "Skipping duplicate password prompt - response already sent", - { - operation: "keyboard_interactive_skip", - hostId, - sessionId, - }, - ); return; } responseSent = true; if (pendingTOTPSessions[sessionId]) { - // Session already waiting for TOTP, don't override - fileLogger.info("Skipping password prompt - TOTP session pending", { - operation: "keyboard_interactive_skip", - hostId, - sessionId, - }); return; } keyboardInteractiveResponded = true; - fileLogger.info("Requesting password from user (authType: none)", { - operation: "keyboard_interactive_password", - hostId, - sessionId, - prompt: prompts[passwordPromptIndex].prompt, - }); - pendingTOTPSessions[sessionId] = { client, finish, @@ -627,7 +589,6 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => { return; } - // Auto-respond with stored credentials if available const responses = prompts.map((p) => { if (/password/i.test(p.prompt) && resolvedCredentials.password) { return resolvedCredentials.password; @@ -679,9 +640,7 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => { delete pendingTOTPSessions[sessionId]; try { session.client.end(); - } catch { - // Ignore errors when closing timed out session - } + } catch {} fileLogger.warn("TOTP session timeout before code submission", { operation: "file_totp_verify", sessionId, @@ -693,7 +652,6 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => { .json({ error: "TOTP session timeout. Please reconnect." }); } - // Build responses for ALL prompts, just like in terminal.ts const responses = (session.prompts || []).map((p, index) => { if (index === session.totpPromptIndex) { return totpCode; @@ -704,22 +662,9 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => { return ""; }); - fileLogger.info("Full keyboard-interactive response for file manager", { - operation: "file_totp_full_response", - sessionId, - userId, - totalPrompts: session.prompts?.length || 0, - responsesProvided: responses.filter((r) => r !== "").length, - }); - let responseSent = false; let responseTimeout: NodeJS.Timeout; - // Don't remove event listeners - just add our own 'once' handlers - // The ssh2 library manages multiple listeners correctly - // Removing them can cause the connection to become unstable - - // CRITICAL: Attach event listeners BEFORE calling finish() to avoid race condition session.client.once("ready", () => { if (responseSent) return; responseSent = true; @@ -727,8 +672,6 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => { delete pendingTOTPSessions[sessionId]; - // Add a small delay to let SSH2 stabilize the connection after keyboard-interactive - // This prevents "Not connected" errors when immediately trying to exec commands setTimeout(() => { sshSessions[sessionId] = { client: session.client, @@ -742,7 +685,6 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => { message: "TOTP verified, SSH connection established", }); - // Log activity to dashboard API after connection is stable if (session.hostId && session.userId) { (async () => { try { @@ -789,7 +731,7 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => { } })(); } - }, 200); // Give SSH2 connection 200ms to fully stabilize after keyboard-interactive + }, 200); }); session.client.once("error", (err) => { @@ -822,7 +764,6 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => { } }, 60000); - // Now that event listeners are attached, submit the TOTP response session.finish(responses); }); @@ -2493,15 +2434,6 @@ app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => { : code; const cleanOutput = output.replace(/EXIT_CODE:\d+$/, "").trim(); - fileLogger.info("File execution completed", { - operation: "execute_file", - sessionId, - filePath, - exitCode: actualExitCode, - outputLength: cleanOutput.length, - errorLength: errorOutput.length, - }); - res.json({ success: true, exitCode: actualExitCode, diff --git a/src/backend/ssh/server-stats.ts b/src/backend/ssh/server-stats.ts index 7728f6a5..23722537 100644 --- a/src/backend/ssh/server-stats.ts +++ b/src/backend/ssh/server-stats.ts @@ -112,8 +112,6 @@ class SSHConnectionPool { ); if (totpPrompt) { - // Record TOTP failure as permanent - never retry - // The recordFailure method will log this once authFailureTracker.recordFailure(host.id, "TOTP", true); client.end(); reject( @@ -158,9 +156,7 @@ class SSHConnectionPool { if (!conn.inUse && now - conn.lastUsed > maxAge) { try { conn.client.end(); - } catch { - // Ignore errors when closing stale connections - } + } catch {} return false; } return true; @@ -180,9 +176,7 @@ class SSHConnectionPool { for (const conn of connections) { try { conn.client.end(); - } catch { - // Ignore errors when closing connections during cleanup - } + } catch {} } } this.connections.clear(); @@ -220,9 +214,7 @@ class RequestQueue { if (request) { try { await request(); - } catch { - // Ignore errors from queued requests - } + } catch {} } } @@ -272,13 +264,13 @@ interface AuthFailureRecord { count: number; lastFailure: number; reason: "TOTP" | "AUTH" | "TIMEOUT"; - permanent: boolean; // If true, don't retry at all + permanent: boolean; } class AuthFailureTracker { private failures = new Map(); private maxRetries = 3; - private backoffBase = 60000; // 1 minute base backoff + private backoffBase = 60000; recordFailure( hostId: number, @@ -305,17 +297,14 @@ class AuthFailureTracker { const record = this.failures.get(hostId); if (!record) return false; - // Always skip TOTP hosts if (record.reason === "TOTP" || record.permanent) { return true; } - // Skip if we've exceeded max retries if (record.count >= this.maxRetries) { return true; } - // Calculate exponential backoff const backoffTime = this.backoffBase * Math.pow(2, record.count - 1); const timeSinceFailure = Date.now() - record.lastFailure; @@ -351,11 +340,9 @@ class AuthFailureTracker { reset(hostId: number): void { this.failures.delete(hostId); - // Don't log reset - it's not important } cleanup(): void { - // Clean up old failures (older than 1 hour) const maxAge = 60 * 60 * 1000; const now = Date.now(); @@ -459,7 +446,6 @@ class PollingManager { const statsConfig = this.parseStatsConfig(host.statsConfig); const existingConfig = this.pollingConfigs.get(host.id); - // Clear existing timers if they exist if (existingConfig) { if (existingConfig.statusTimer) { clearInterval(existingConfig.statusTimer); @@ -474,35 +460,27 @@ class PollingManager { statsConfig, }; - // Start status polling if enabled if (statsConfig.statusCheckEnabled) { const intervalMs = statsConfig.statusCheckInterval * 1000; - // Poll immediately (don't await - let it run in background) this.pollHostStatus(host); - // Then set up interval to poll periodically config.statusTimer = setInterval(() => { this.pollHostStatus(host); }, intervalMs); } else { - // Remove status if monitoring is disabled this.statusStore.delete(host.id); } - // Start metrics polling if enabled if (statsConfig.metricsEnabled) { const intervalMs = statsConfig.metricsInterval * 1000; - // Poll immediately (don't await - let it run in background) this.pollHostMetrics(host); - // Then set up interval to poll periodically config.metricsTimer = setInterval(() => { this.pollHostMetrics(host); }, intervalMs); } else { - // Remove metrics if monitoring is disabled this.metricsStore.delete(host.id); } @@ -576,12 +554,10 @@ class PollingManager { } async refreshHostPolling(userId: string): Promise { - // Stop all current polling for (const hostId of this.pollingConfigs.keys()) { this.stopPollingForHost(hostId); } - // Reinitialize await this.initializePolling(userId); } @@ -1019,10 +995,8 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{ os: string | null; }; }> { - // Check if we should skip this host due to auth failures if (authFailureTracker.shouldSkip(host.id)) { const reason = authFailureTracker.getSkipReason(host.id); - // Don't log - just skip silently to avoid spam throw new Error(reason || "Authentication failed"); } @@ -1166,7 +1140,6 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{ availableHuman = null; } - // Collect network interfaces const interfaces: Array<{ name: string; ip: string; @@ -1225,7 +1198,6 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{ } } catch (e) {} - // Collect uptime let uptimeSeconds: number | null = null; let uptimeFormatted: string | null = null; try { @@ -1242,7 +1214,6 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{ } } catch (e) {} - // Collect process information let totalProcesses: number | null = null; let runningProcesses: number | null = null; const topProcesses: Array<{ @@ -1285,7 +1256,6 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{ runningProcesses = Number(runningCount.stdout.trim()); } catch (e) {} - // Collect system information let hostname: string | null = null; let kernel: string | null = null; let os: string | null = null; @@ -1338,25 +1308,20 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{ return result; }); } catch (error) { - // Record authentication failures for backoff if (error instanceof Error) { if (error.message.includes("TOTP authentication required")) { - // TOTP failures are already recorded in keyboard-interactive handler throw error; } else if ( error.message.includes("No password available") || error.message.includes("Unsupported authentication type") || error.message.includes("No SSH key available") ) { - // Configuration errors - permanent failures, don't retry - // recordFailure will log once when first detected authFailureTracker.recordFailure(host.id, "AUTH", true); } else if ( error.message.includes("authentication") || error.message.includes("Permission denied") || error.message.includes("All configured authentication methods failed") ) { - // recordFailure will log once when first detected authFailureTracker.recordFailure(host.id, "AUTH"); } else if ( error.message.includes("timeout") || @@ -1384,9 +1349,7 @@ function tcpPing( settled = true; try { socket.destroy(); - } catch { - // Ignore errors when destroying socket - } + } catch {} resolve(result); }; @@ -1409,7 +1372,6 @@ app.get("/status", async (req, res) => { }); } - // Initialize polling if no hosts are being polled yet const statuses = pollingManager.getAllStatuses(); if (statuses.size === 0) { await pollingManager.initializePolling(userId); @@ -1433,7 +1395,6 @@ app.get("/status/:id", validateHostId, async (req, res) => { }); } - // Initialize polling if no hosts are being polled yet const statuses = pollingManager.getAllStatuses(); if (statuses.size === 0) { await pollingManager.initializePolling(userId); @@ -1520,7 +1481,6 @@ app.listen(PORT, async () => { }); } - // Cleanup old auth failures every 10 minutes setInterval( () => { authFailureTracker.cleanup(); diff --git a/src/backend/ssh/terminal.ts b/src/backend/ssh/terminal.ts index f28f5408..f4be377c 100644 --- a/src/backend/ssh/terminal.ts +++ b/src/backend/ssh/terminal.ts @@ -333,15 +333,9 @@ wss.on("connection", async (ws: WebSocket, req) => { } case "password_response": { - const passwordData = data as TOTPResponseData; // Same structure + const passwordData = data as TOTPResponseData; if (keyboardInteractiveFinish && passwordData?.code) { const password = passwordData.code; - sshLogger.info("Password received from user", { - operation: "password_response", - userId, - passwordLength: password.length, - }); - keyboardInteractiveFinish([password]); keyboardInteractiveFinish = null; } else { @@ -374,7 +368,6 @@ wss.on("connection", async (ws: WebSocket, req) => { keyPassword?: string; }; - // Update the host config with provided credentials if (credentialsData.password) { credentialsData.hostConfig.password = credentialsData.password; credentialsData.hostConfig.authType = "password"; @@ -384,10 +377,8 @@ wss.on("connection", async (ws: WebSocket, req) => { credentialsData.hostConfig.authType = "key"; } - // Cleanup existing connection if any cleanupSSH(); - // Reconnect with new credentials const reconnectData: ConnectToHostData = { cols: credentialsData.cols, rows: credentialsData.rows, @@ -555,8 +546,6 @@ wss.on("connection", async (ws: WebSocket, req) => { sshConn.on("ready", () => { clearTimeout(connectionTimeout); - // Immediately try to create shell - don't delay as it can cause connection to be cleaned up - // The connection is already ready at this point if (!sshConn) { sshLogger.warn( "SSH connection was cleaned up before shell could be created", @@ -666,11 +655,9 @@ wss.on("connection", async (ws: WebSocket, req) => { JSON.stringify({ type: "connected", message: "SSH connected" }), ); - // Log activity to dashboard API if (id && hostConfig.userId) { (async () => { try { - // Fetch host name from database const hosts = await SimpleDBOps.select( getDb() .select() @@ -790,8 +777,6 @@ wss.on("connection", async (ws: WebSocket, req) => { prompts: Array<{ prompt: string; echo: boolean }>, finish: (responses: string[]) => void, ) => { - // Notify frontend that keyboard-interactive is available (e.g., for Warpgate OIDC) - // This allows the terminal to be displayed immediately so user can see auth prompts if (resolvedCredentials.authType === "none") { ws.send( JSON.stringify({ @@ -846,37 +831,19 @@ wss.on("connection", async (ws: WebSocket, req) => { resolvedCredentials.password && resolvedCredentials.authType !== "none"; - // Check if this is a password prompt const passwordPromptIndex = prompts.findIndex((p) => /password/i.test(p.prompt), ); - // If no stored password (including authType "none"), prompt the user if (!hasStoredPassword && passwordPromptIndex !== -1) { - // Don't block duplicate password prompts - some servers (like Warpgate) may ask multiple times if (keyboardInteractiveResponded && totpPromptSent) { - // Only block if we already sent a TOTP prompt - sshLogger.info( - "Skipping duplicate password prompt after TOTP sent", - { - operation: "keyboard_interactive_skip", - hostId: id, - }, - ); return; } keyboardInteractiveResponded = true; - sshLogger.info("Requesting password from user (authType: none)", { - operation: "keyboard_interactive_password", - hostId: id, - prompt: prompts[passwordPromptIndex].prompt, - }); - keyboardInteractiveFinish = (userResponses: string[]) => { const userInput = (userResponses[0] || "").trim(); - // Build responses for all prompts const responses = prompts.map((p, index) => { if (index === passwordPromptIndex) { return userInput; @@ -884,16 +851,6 @@ wss.on("connection", async (ws: WebSocket, req) => { return ""; }); - sshLogger.info( - "User-provided password being sent to SSH server", - { - operation: "interactive_password_verification", - hostId: id, - passwordLength: userInput.length, - totalPrompts: prompts.length, - }, - ); - finish(responses); }; @@ -906,8 +863,6 @@ wss.on("connection", async (ws: WebSocket, req) => { return; } - // Auto-respond with stored credentials if available - // Allow multiple responses - the server might ask multiple times during auth flow const responses = prompts.map((p) => { if (/password/i.test(p.prompt) && resolvedCredentials.password) { return resolvedCredentials.password; @@ -991,28 +946,15 @@ wss.on("connection", async (ws: WebSocket, req) => { }; if (resolvedCredentials.authType === "none") { - // For "none" auth type, allow natural SSH negotiation - // The authHandler will try keyboard-interactive if available, otherwise notify frontend - // This allows for Warpgate OIDC and other interactive auth scenarios connectConfig.authHandler = ( methodsLeft: string[] | null, partialSuccess: boolean, callback: (nextMethod: string | false) => void, ) => { if (methodsLeft && methodsLeft.length > 0) { - // Prefer keyboard-interactive if available if (methodsLeft.includes("keyboard-interactive")) { callback("keyboard-interactive"); } else { - // No keyboard-interactive available - notify frontend to show auth dialog - sshLogger.info( - "Server does not support keyboard-interactive auth for 'none' auth type", - { - operation: "ssh_auth_handler_no_keyboard", - hostId: id, - methodsLeft, - }, - ); ws.send( JSON.stringify({ type: "auth_method_not_available", @@ -1024,11 +966,6 @@ wss.on("connection", async (ws: WebSocket, req) => { callback(false); } } else { - // No methods left or empty - try to proceed without auth - sshLogger.info("No auth methods available, proceeding without auth", { - operation: "ssh_auth_no_methods", - hostId: id, - }); callback(false); } }; diff --git a/src/backend/ssh/tunnel.ts b/src/backend/ssh/tunnel.ts index 72629b6d..2fb97069 100644 --- a/src/backend/ssh/tunnel.ts +++ b/src/backend/ssh/tunnel.ts @@ -217,9 +217,7 @@ function cleanupTunnelResources( if (verification?.timeout) clearTimeout(verification.timeout); try { verification?.conn.end(); - } catch { - // Ignore errors - } + } catch {} tunnelVerifications.delete(tunnelName); } @@ -284,9 +282,7 @@ function handleDisconnect( const verification = tunnelVerifications.get(tunnelName); if (verification?.timeout) clearTimeout(verification.timeout); verification?.conn.end(); - } catch { - // Ignore errors - } + } catch {} tunnelVerifications.delete(tunnelName); } @@ -642,9 +638,7 @@ async function connectSSHTunnel( try { conn.end(); - } catch { - // Ignore errors - } + } catch {} activeTunnels.delete(tunnelName); @@ -784,9 +778,7 @@ async function connectSSHTunnel( const verification = tunnelVerifications.get(tunnelName); if (verification?.timeout) clearTimeout(verification.timeout); verification?.conn.end(); - } catch { - // Ignore errors - } + } catch {} tunnelVerifications.delete(tunnelName); } @@ -837,13 +829,9 @@ async function connectSSHTunnel( } }); - stream.stdout?.on("data", () => { - // Silently consume stdout data - }); + stream.stdout?.on("data", () => {}); - stream.on("error", () => { - // Silently consume stream errors - }); + stream.on("error", () => {}); stream.stderr.on("data", (data) => { const errorMsg = data.toString().trim(); @@ -1222,9 +1210,7 @@ async function killRemoteTunnelByMarker( executeNextKillCommand(); }); - stream.on("data", () => { - // Silently consume stream data - }); + stream.on("data", () => {}); stream.stderr.on("data", (data) => { const output = data.toString().trim(); diff --git a/src/backend/starter.ts b/src/backend/starter.ts index db78f71e..4ab019a6 100644 --- a/src/backend/starter.ts +++ b/src/backend/starter.ts @@ -21,9 +21,7 @@ import { systemLogger, versionLogger } from "./utils/logger.js"; if (persistentConfig.parsed) { Object.assign(process.env, persistentConfig.parsed); } - } catch { - // Ignore errors if .env file doesn't exist - } + } catch {} let version = "unknown"; diff --git a/src/backend/utils/auth-manager.ts b/src/backend/utils/auth-manager.ts index ec2d2bf0..adeeff77 100644 --- a/src/backend/utils/auth-manager.ts +++ b/src/backend/utils/auth-manager.ts @@ -54,7 +54,6 @@ class AuthManager { this.invalidateUserTokens(userId); }); - // Run session cleanup every 5 minutes setInterval( () => { this.cleanupExpiredSessions().catch((error) => { @@ -162,16 +161,15 @@ class AuthManager { ): Promise { const jwtSecret = await this.systemCrypto.getJWTSecret(); - // Determine expiration based on device type let expiresIn = options.expiresIn; if (!expiresIn && !options.pendingTOTP) { if (options.deviceType === "desktop" || options.deviceType === "mobile") { - expiresIn = "30d"; // 30 days for desktop and mobile + expiresIn = "30d"; } else { - expiresIn = "7d"; // 7 days for web + expiresIn = "7d"; } } else if (!expiresIn) { - expiresIn = "7d"; // Default + expiresIn = "7d"; } const payload: JWTPayload = { userId }; @@ -179,23 +177,19 @@ class AuthManager { payload.pendingTOTP = true; } - // Create session in database if not a temporary TOTP token if (!options.pendingTOTP && options.deviceType && options.deviceInfo) { const sessionId = nanoid(); payload.sessionId = sessionId; - // Generate the token first to get it for storage const token = jwt.sign(payload, jwtSecret, { expiresIn, } as jwt.SignOptions); - // Calculate expiration timestamp const expirationMs = this.parseExpiresIn(expiresIn); const now = new Date(); const expiresAt = new Date(now.getTime() + expirationMs).toISOString(); const createdAt = now.toISOString(); - // Store session in database try { await db.insert(sessions).values({ id: sessionId, @@ -208,27 +202,11 @@ class AuthManager { lastActiveAt: createdAt, }); - databaseLogger.info("Session created", { - operation: "session_create", - userId, - sessionId, - deviceType: options.deviceType, - expiresAt, - }); - - // Immediately save database to disk to ensure session persists across restarts try { const { saveMemoryDatabaseToFile } = await import( "../database/db/index.js" ); await saveMemoryDatabaseToFile(); - databaseLogger.info( - "Database saved immediately after session creation", - { - operation: "session_create_db_save", - sessionId, - }, - ); } catch (saveError) { databaseLogger.error( "Failed to save database after session creation", @@ -245,7 +223,6 @@ class AuthManager { userId, sessionId, }); - // Continue anyway - session tracking is non-critical } return token; @@ -259,7 +236,7 @@ class AuthManager { */ private parseExpiresIn(expiresIn: string): number { const match = expiresIn.match(/^(\d+)([smhd])$/); - if (!match) return 7 * 24 * 60 * 60 * 1000; // Default 7 days + if (!match) return 7 * 24 * 60 * 60 * 1000; const value = parseInt(match[1]); const unit = match[2]; @@ -282,26 +259,8 @@ class AuthManager { try { const jwtSecret = await this.systemCrypto.getJWTSecret(); - databaseLogger.info("Attempting JWT verification", { - operation: "jwt_verify_attempt", - tokenLength: token.length, - secretLength: jwtSecret.length, - }); - const payload = jwt.verify(token, jwtSecret) as JWTPayload; - databaseLogger.info("JWT signature verified successfully", { - operation: "jwt_signature_verified", - userId: payload.userId, - sessionId: payload.sessionId, - hasExpiration: !!payload.exp, - expiresAt: payload.exp - ? new Date(payload.exp * 1000).toISOString() - : "N/A", - }); - - // For tokens with sessionId, verify the session exists in database - // This ensures revoked sessions are rejected even after backend restart if (payload.sessionId) { try { const sessionRecords = await db @@ -322,13 +281,6 @@ class AuthManager { ); return null; } - - databaseLogger.info("Session found in database", { - operation: "jwt_session_found", - sessionId: payload.sessionId, - userId: payload.userId, - sessionExpiresAt: sessionRecords[0].expiresAt, - }); } catch (dbError) { databaseLogger.error( "Failed to check session in database during JWT verification", @@ -338,15 +290,8 @@ class AuthManager { sessionId: payload.sessionId, }, ); - // Continue anyway - database errors shouldn't block valid JWTs } } - - databaseLogger.info("JWT verification successful", { - operation: "jwt_verify_success", - userId: payload.userId, - sessionId: payload.sessionId, - }); return payload; } catch (error) { databaseLogger.warn("JWT verification failed", { @@ -358,35 +303,14 @@ class AuthManager { } } - invalidateJWTToken(token: string): void { - // No-op: Token invalidation is now handled through database session deletion - databaseLogger.info( - "Token invalidation requested (handled via session deletion)", - { - operation: "token_invalidate", - }, - ); - } + invalidateJWTToken(token: string): void {} - invalidateUserTokens(userId: string): void { - databaseLogger.info("User tokens invalidation requested due to data lock", { - operation: "user_tokens_invalidate", - userId, - }); - // Session cleanup will happen through revokeAllUserSessions if needed - } + invalidateUserTokens(userId: string): void {} async revokeSession(sessionId: string): Promise { try { - // Delete the session from database - // The JWT will be invalidated because verifyJWTToken checks for session existence await db.delete(sessions).where(eq(sessions.id, sessionId)); - databaseLogger.info("Session deleted", { - operation: "session_delete", - sessionId, - }); - return true; } catch (error) { databaseLogger.error("Failed to delete session", error, { @@ -402,7 +326,6 @@ class AuthManager { exceptSessionId?: string, ): Promise { try { - // Get session count before deletion const userSessions = await db .select() .from(sessions) @@ -412,8 +335,6 @@ class AuthManager { (s) => !exceptSessionId || s.id !== exceptSessionId, ).length; - // Delete sessions from database - // JWTs will be invalidated because verifyJWTToken checks for session existence if (exceptSessionId) { await db .delete(sessions) @@ -427,13 +348,6 @@ class AuthManager { await db.delete(sessions).where(eq(sessions.userId, userId)); } - databaseLogger.info("User sessions deleted", { - operation: "user_sessions_delete", - userId, - exceptSessionId, - deletedCount, - }); - return deletedCount; } catch (error) { databaseLogger.error("Failed to delete user sessions", error, { @@ -446,7 +360,6 @@ class AuthManager { async cleanupExpiredSessions(): Promise { try { - // Get expired sessions count const expiredSessions = await db .select() .from(sessions) @@ -454,19 +367,10 @@ class AuthManager { const expiredCount = expiredSessions.length; - // Delete expired sessions - // JWTs will be invalidated because verifyJWTToken checks for session existence await db .delete(sessions) .where(sql`${sessions.expiresAt} < datetime('now')`); - if (expiredCount > 0) { - databaseLogger.info("Expired sessions cleaned up", { - operation: "sessions_cleanup", - count: expiredCount, - }); - } - return expiredCount; } catch (error) { databaseLogger.error("Failed to cleanup expired sessions", error, { @@ -539,7 +443,6 @@ class AuthManager { return res.status(401).json({ error: "Invalid token" }); } - // Check session status if sessionId is present if (payload.sessionId) { try { const sessionRecords = await db @@ -557,9 +460,6 @@ class AuthManager { const session = sessionRecords[0]; - // Session exists, no need to check isRevoked since we delete sessions instead - - // Check if session has expired by comparing timestamps const sessionExpiryTime = new Date(session.expiresAt).getTime(); const currentTime = Date.now(); const isExpired = sessionExpiryTime < currentTime; @@ -579,7 +479,6 @@ class AuthManager { }); } - // Update lastActiveAt timestamp (async, non-blocking) db.update(sessions) .set({ lastActiveAt: new Date().toISOString() }) .where(eq(sessions.id, payload.sessionId)) @@ -596,7 +495,6 @@ class AuthManager { operation: "session_check_failed", sessionId: payload.sessionId, }); - // Continue anyway - session tracking failures shouldn't block auth } } @@ -614,14 +512,8 @@ class AuthManager { return res.status(401).json({ error: "Authentication required" }); } - // Try to get data key if available (may be null after restart) const dataKey = this.userCrypto.getUserDataKey(userId); authReq.dataKey = dataKey || undefined; - - // Note: Data key will be null after backend restart until user performs - // an operation that requires decryption. This is expected behavior. - // Individual routes that need encryption should check dataKey explicitly. - next(); }; } @@ -688,15 +580,9 @@ class AuthManager { async logoutUser(userId: string, sessionId?: string): Promise { this.userCrypto.logoutUser(userId); - // Delete the specific session from database if sessionId provided if (sessionId) { try { await db.delete(sessions).where(eq(sessions.id, sessionId)); - databaseLogger.info("Session deleted on logout", { - operation: "session_delete_logout", - userId, - sessionId, - }); } catch (error) { databaseLogger.error("Failed to delete session on logout", error, { operation: "session_delete_logout_failed", @@ -705,13 +591,8 @@ class AuthManager { }); } } else { - // If no sessionId, delete all sessions for this user try { await db.delete(sessions).where(eq(sessions.userId, userId)); - databaseLogger.info("All user sessions deleted on logout", { - operation: "sessions_delete_logout", - userId, - }); } catch (error) { databaseLogger.error( "Failed to delete user sessions on logout", diff --git a/src/backend/utils/auto-ssl-setup.ts b/src/backend/utils/auto-ssl-setup.ts index b5bb01ed..e45ce2ec 100644 --- a/src/backend/utils/auto-ssl-setup.ts +++ b/src/backend/utils/auto-ssl-setup.ts @@ -233,9 +233,7 @@ IP.3 = 0.0.0.0 let envContent = ""; try { envContent = await fs.readFile(this.ENV_FILE, "utf8"); - } catch { - // File doesn't exist yet, will create with SSL config - } + } catch {} let updatedContent = envContent; let hasChanges = false; diff --git a/src/backend/utils/data-crypto.ts b/src/backend/utils/data-crypto.ts index 579e1960..462d2956 100644 --- a/src/backend/utils/data-crypto.ts +++ b/src/backend/utils/data-crypto.ts @@ -393,18 +393,6 @@ class DataCrypto { result.success = result.errors.length === 0; - databaseLogger.info( - "User data re-encryption completed after password reset", - { - operation: "password_reset_reencrypt_completed", - userId, - success: result.success, - reencryptedTables: result.reencryptedTables, - reencryptedFieldsCount: result.reencryptedFieldsCount, - errorsCount: result.errors.length, - }, - ); - return result; } catch (error) { databaseLogger.error( diff --git a/src/backend/utils/database-migration.ts b/src/backend/utils/database-migration.ts index 153ef18d..38d75b15 100644 --- a/src/backend/utils/database-migration.ts +++ b/src/backend/utils/database-migration.ts @@ -1,4 +1,4 @@ -import Database from "better-sqlite3"; +/import Database from "better-sqlite3"; import fs from "fs"; import path from "path"; import { databaseLogger } from "./logger.js"; @@ -62,10 +62,6 @@ export class DatabaseMigration { "Empty unencrypted database found alongside encrypted database. Removing empty file."; try { fs.unlinkSync(this.unencryptedDbPath); - databaseLogger.info("Removed empty unencrypted database file", { - operation: "migration_cleanup_empty", - path: this.unencryptedDbPath, - }); } catch (error) { databaseLogger.warn("Failed to remove empty unencrypted database", { operation: "migration_cleanup_empty_failed", diff --git a/src/backend/utils/database-save-trigger.ts b/src/backend/utils/database-save-trigger.ts index 15bc05bc..b3c2da21 100644 --- a/src/backend/utils/database-save-trigger.ts +++ b/src/backend/utils/database-save-trigger.ts @@ -71,11 +71,6 @@ export class DatabaseSaveTrigger { this.pendingSave = true; try { - databaseLogger.info("Force saving database", { - operation: "db_save_trigger_force_start", - reason, - }); - await this.saveFunction(); } catch (error) { databaseLogger.error("Database force save failed", error, { @@ -110,9 +105,5 @@ export class DatabaseSaveTrigger { this.pendingSave = false; this.isInitialized = false; this.saveFunction = null; - - databaseLogger.info("Database save trigger cleaned up", { - operation: "db_save_trigger_cleanup", - }); } } diff --git a/src/backend/utils/lazy-field-encryption.ts b/src/backend/utils/lazy-field-encryption.ts index e64db2a3..6be7b44d 100644 --- a/src/backend/utils/lazy-field-encryption.ts +++ b/src/backend/utils/lazy-field-encryption.ts @@ -82,9 +82,7 @@ export class LazyFieldEncryption { legacyFieldName, ); return decrypted; - } catch { - // Ignore legacy format errors - } + } catch {} } const sensitiveFields = [ @@ -176,9 +174,7 @@ export class LazyFieldEncryption { wasPlaintext: false, wasLegacyEncryption: true, }; - } catch { - // Ignore legacy format errors - } + } catch {} } return { encrypted: fieldValue, diff --git a/src/backend/utils/simple-db-ops.ts b/src/backend/utils/simple-db-ops.ts index e2764dca..6fbd7a63 100644 --- a/src/backend/utils/simple-db-ops.ts +++ b/src/backend/utils/simple-db-ops.ts @@ -6,7 +6,6 @@ type TableName = "users" | "ssh_data" | "ssh_credentials" | "recent_activity"; class SimpleDBOps { static async insert>( - // eslint-disable-next-line @typescript-eslint/no-explicit-any table: SQLiteTable, tableName: TableName, data: T, @@ -91,7 +90,6 @@ class SimpleDBOps { } static async update>( - // eslint-disable-next-line @typescript-eslint/no-explicit-any table: SQLiteTable, tableName: TableName, where: unknown, @@ -110,7 +108,6 @@ class SimpleDBOps { const result = await getDb() .update(table) .set(encryptedData) - // eslint-disable-next-line @typescript-eslint/no-explicit-any .where(where as any) .returning(); @@ -127,14 +124,12 @@ class SimpleDBOps { } static async delete( - // eslint-disable-next-line @typescript-eslint/no-explicit-any table: SQLiteTable, tableName: TableName, where: unknown, ): Promise { const result = await getDb() .delete(table) - // eslint-disable-next-line @typescript-eslint/no-explicit-any .where(where as any) .returning(); diff --git a/src/backend/utils/ssh-key-utils.ts b/src/backend/utils/ssh-key-utils.ts index 2ef8c2e5..8cd3d3d3 100644 --- a/src/backend/utils/ssh-key-utils.ts +++ b/src/backend/utils/ssh-key-utils.ts @@ -84,9 +84,7 @@ function detectKeyTypeFromContent(keyContent: string): string { } else if (decodedString.includes("1.3.101.112")) { return "ssh-ed25519"; } - } catch { - // Cannot decode key, fallback to length-based detection - } + } catch {} if (content.length < 800) { return "ssh-ed25519"; @@ -142,9 +140,7 @@ function detectPublicKeyTypeFromContent(publicKeyContent: string): string { } else if (decodedString.includes("1.3.101.112")) { return "ssh-ed25519"; } - } catch { - // Cannot decode key, fallback to length-based detection - } + } catch {} if (content.length < 400) { return "ssh-ed25519"; @@ -246,9 +242,7 @@ export function parseSSHKey( useSSH2 = true; } - } catch { - // SSH2 parsing failed, will use fallback method - } + } catch {} } if (!useSSH2) { @@ -274,9 +268,7 @@ export function parseSSHKey( success: true, }; } - } catch { - // Fallback parsing also failed - } + } catch {} return { privateKey: privateKeyData, diff --git a/src/backend/utils/system-crypto.ts b/src/backend/utils/system-crypto.ts index 45653cce..c5c1ae63 100644 --- a/src/backend/utils/system-crypto.ts +++ b/src/backend/utils/system-crypto.ts @@ -107,9 +107,7 @@ class SystemCrypto { process.env.DATABASE_KEY = dbKeyMatch[1]; return; } - } catch { - // Ignore file read errors, will generate new key - } + } catch {} await this.generateAndGuideDatabaseKey(); } catch (error) { @@ -146,9 +144,7 @@ class SystemCrypto { process.env.INTERNAL_AUTH_TOKEN = tokenMatch[1]; return; } - } catch { - // Ignore file read errors, will generate new token - } + } catch {} await this.generateAndGuideInternalAuthToken(); } catch (error) { diff --git a/src/backend/utils/user-agent-parser.ts b/src/backend/utils/user-agent-parser.ts index ae6205f9..d8d1bd6d 100644 --- a/src/backend/utils/user-agent-parser.ts +++ b/src/backend/utils/user-agent-parser.ts @@ -7,59 +7,43 @@ export interface DeviceInfo { browser: string; version: string; os: string; - deviceInfo: string; // Formatted string like "Chrome 120 on Windows 11" + deviceInfo: string; } -/** - * Detect the platform type based on request headers - */ export function detectPlatform(req: Request): DeviceType { const userAgent = req.headers["user-agent"] || ""; const electronHeader = req.headers["x-electron-app"]; - // Electron app detection if (electronHeader === "true") { return "desktop"; } - // Mobile app detection if (userAgent.includes("Termix-Mobile")) { return "mobile"; } - // Default to web return "web"; } -/** - * Parse User-Agent string to extract device information - */ export function parseUserAgent(req: Request): DeviceInfo { const userAgent = req.headers["user-agent"] || "Unknown"; const platform = detectPlatform(req); - // For Electron if (platform === "desktop") { return parseElectronUserAgent(userAgent); } - // For Mobile app if (platform === "mobile") { return parseMobileUserAgent(userAgent); } - // For web browsers return parseWebUserAgent(userAgent); } -/** - * Parse Electron app user agent - */ function parseElectronUserAgent(userAgent: string): DeviceInfo { let os = "Unknown OS"; let version = "Unknown"; - // Detect OS if (userAgent.includes("Windows")) { os = parseWindowsVersion(userAgent); } else if (userAgent.includes("Mac OS X")) { @@ -68,7 +52,6 @@ function parseElectronUserAgent(userAgent: string): DeviceInfo { os = "Linux"; } - // Try to extract Electron version const electronMatch = userAgent.match(/Electron\/([\d.]+)/); if (electronMatch) { version = electronMatch[1]; @@ -83,23 +66,17 @@ function parseElectronUserAgent(userAgent: string): DeviceInfo { }; } -/** - * Parse mobile app user agent - */ function parseMobileUserAgent(userAgent: string): DeviceInfo { let os = "Unknown OS"; let version = "Unknown"; - // Check for Termix-Mobile/Platform format first (e.g., "Termix-Mobile/Android" or "Termix-Mobile/iOS") const termixPlatformMatch = userAgent.match(/Termix-Mobile\/(Android|iOS)/i); if (termixPlatformMatch) { const platform = termixPlatformMatch[1]; if (platform.toLowerCase() === "android") { - // Try to get Android version from full UA string const androidMatch = userAgent.match(/Android ([\d.]+)/); os = androidMatch ? `Android ${androidMatch[1]}` : "Android"; } else if (platform.toLowerCase() === "ios") { - // Try to get iOS version from full UA string const iosMatch = userAgent.match(/OS ([\d_]+)/); if (iosMatch) { const iosVersion = iosMatch[1].replace(/_/g, "."); @@ -109,7 +86,6 @@ function parseMobileUserAgent(userAgent: string): DeviceInfo { } } } else { - // Fallback: Check for standard Android/iOS patterns in the user agent if (userAgent.includes("Android")) { const androidMatch = userAgent.match(/Android ([\d.]+)/); os = androidMatch ? `Android ${androidMatch[1]}` : "Android"; @@ -128,8 +104,6 @@ function parseMobileUserAgent(userAgent: string): DeviceInfo { } } - // Try to extract app version (if included in UA) - // Match patterns like "Termix-Mobile/1.0.0" or just "Termix-Mobile" const versionMatch = userAgent.match( /Termix-Mobile\/(?:Android|iOS|)([\d.]+)/i, ); @@ -146,15 +120,11 @@ function parseMobileUserAgent(userAgent: string): DeviceInfo { }; } -/** - * Parse web browser user agent - */ function parseWebUserAgent(userAgent: string): DeviceInfo { let browser = "Unknown Browser"; let version = "Unknown"; let os = "Unknown OS"; - // Detect browser if (userAgent.includes("Edg/")) { const match = userAgent.match(/Edg\/([\d.]+)/); browser = "Edge"; @@ -177,7 +147,6 @@ function parseWebUserAgent(userAgent: string): DeviceInfo { version = match ? match[1] : "Unknown"; } - // Detect OS if (userAgent.includes("Windows")) { os = parseWindowsVersion(userAgent); } else if (userAgent.includes("Mac OS X")) { @@ -201,7 +170,6 @@ function parseWebUserAgent(userAgent: string): DeviceInfo { } } - // Shorten version to major.minor if (version !== "Unknown") { const versionParts = version.split("."); version = versionParts.slice(0, 2).join("."); @@ -216,9 +184,6 @@ function parseWebUserAgent(userAgent: string): DeviceInfo { }; } -/** - * Parse Windows version from user agent - */ function parseWindowsVersion(userAgent: string): string { if (userAgent.includes("Windows NT 10.0")) { return "Windows 10/11"; @@ -239,9 +204,6 @@ function parseWindowsVersion(userAgent: string): string { return "Windows"; } -/** - * Parse macOS version from user agent - */ function parseMacVersion(userAgent: string): string { const match = userAgent.match(/Mac OS X ([\d_]+)/); if (match) { @@ -250,7 +212,6 @@ function parseMacVersion(userAgent: string): string { const major = parseInt(parts[0]); const minor = parseInt(parts[1]); - // macOS naming if (major === 10) { if (minor >= 15) return `macOS ${major}.${minor}`; if (minor === 14) return "macOS Mojave"; diff --git a/src/constants/terminal-themes.ts b/src/constants/terminal-themes.ts index 967a574c..0786a952 100644 --- a/src/constants/terminal-themes.ts +++ b/src/constants/terminal-themes.ts @@ -28,7 +28,6 @@ export interface TerminalTheme { } export const TERMINAL_THEMES: Record = { - // Current default theme termix: { name: "Termix Default", category: "dark", @@ -666,14 +665,12 @@ export const TERMINAL_FONTS = [ }, ]; -// Cursor styles export const CURSOR_STYLES = [ { value: "block", label: "Block" }, { value: "underline", label: "Underline" }, { value: "bar", label: "Bar" }, ] as const; -// Bell styles export const BELL_STYLES = [ { value: "none", label: "None" }, { value: "sound", label: "Sound" }, @@ -681,16 +678,13 @@ export const BELL_STYLES = [ { value: "both", label: "Both" }, ] as const; -// Fast scroll modifiers export const FAST_SCROLL_MODIFIERS = [ { value: "alt", label: "Alt" }, { value: "ctrl", label: "Ctrl" }, { value: "shift", label: "Shift" }, ] as const; -// Default terminal configuration export const DEFAULT_TERMINAL_CONFIG = { - // Appearance cursorBlink: true, cursorStyle: "bar" as const, fontSize: 14, @@ -699,7 +693,6 @@ export const DEFAULT_TERMINAL_CONFIG = { lineHeight: 1.2, theme: "termix", - // Behavior scrollback: 10000, bellStyle: "none" as const, rightClickSelectsWord: false, @@ -707,7 +700,6 @@ export const DEFAULT_TERMINAL_CONFIG = { fastScrollSensitivity: 5, minimumContrastRatio: 1, - // Advanced backspaceMode: "normal" as const, agentForwarding: false, environmentVariables: [] as Array<{ key: string; value: string }>, diff --git a/src/types/index.ts b/src/types/index.ts index 09446742..868ec4a8 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -1,7 +1,3 @@ -// ============================================================================ -// CENTRAL TYPE DEFINITIONS -// ============================================================================ - import type { Client } from "ssh2"; import type { Request } from "express"; @@ -60,7 +56,7 @@ export interface SSHHostData { enableFileManager?: boolean; defaultPath?: string; tunnelConnections?: TunnelConnection[]; - statsConfig?: string | Record; // Can be string (from backend) or object (from form) + statsConfig?: string | Record; terminalConfig?: TerminalConfig; } @@ -110,7 +106,6 @@ export interface TunnelConnection { endpointPort: number; endpointHost: string; - // Endpoint host credentials for tunnel authentication endpointPassword?: string; endpointKey?: string; endpointKeyPassword?: string; @@ -255,16 +250,14 @@ export interface TermixAlert { // ============================================================================ export interface TerminalConfig { - // Appearance cursorBlink: boolean; cursorStyle: "block" | "underline" | "bar"; fontSize: number; fontFamily: string; letterSpacing: number; lineHeight: number; - theme: string; // Theme key from TERMINAL_THEMES + theme: string; - // Behavior scrollback: number; bellStyle: "none" | "sound" | "visual" | "both"; rightClickSelectsWord: boolean; @@ -272,7 +265,6 @@ export interface TerminalConfig { fastScrollSensitivity: number; minimumContrastRatio: number; - // Advanced backspaceMode: "normal" | "control-h"; agentForwarding: boolean; environmentVariables: Array<{ key: string; value: string }>; @@ -298,7 +290,7 @@ export interface TabContextTab { title: string; hostConfig?: SSHHost; terminalRef?: any; - initialTab?: string; // For ssh_manager: "host_viewer" | "add_host" | "credentials" | "add_credential" + initialTab?: string; } // ============================================================================ diff --git a/src/types/stats-widgets.ts b/src/types/stats-widgets.ts index fb3a08eb..eb450aa7 100644 --- a/src/types/stats-widgets.ts +++ b/src/types/stats-widgets.ts @@ -9,12 +9,10 @@ export type WidgetType = export interface StatsConfig { enabledWidgets: WidgetType[]; - // Status monitoring configuration statusCheckEnabled: boolean; - statusCheckInterval: number; // seconds (5-3600) - // Metrics monitoring configuration + statusCheckInterval: number; metricsEnabled: boolean; - metricsInterval: number; // seconds (5-3600) + metricsInterval: number; } export const DEFAULT_STATS_CONFIG: StatsConfig = { diff --git a/src/ui/Desktop/Admin/AdminSettings.tsx b/src/ui/Desktop/Admin/AdminSettings.tsx index ca31724f..45a3e4dc 100644 --- a/src/ui/Desktop/Admin/AdminSettings.tsx +++ b/src/ui/Desktop/Admin/AdminSettings.tsx @@ -153,7 +153,6 @@ export function AdminSettings({ toast.error(t("admin.failedToFetchOidcConfig")); } }); - // Capture the current session so we know whether to ask for a password later. getUserInfo() .then((info) => { if (info) { @@ -251,9 +250,7 @@ export function AdminSettings({ }; const handleTogglePasswordLogin = async (checked: boolean) => { - // If disabling password login, warn the user if (!checked) { - // Check if OIDC is configured const hasOIDCConfigured = oidcConfig.client_id && oidcConfig.client_secret && @@ -276,7 +273,6 @@ export function AdminSettings({ await updatePasswordLoginAllowed(checked); setAllowPasswordLogin(checked); - // Auto-disable registration when password login is disabled if (allowRegistration) { await updateRegistrationAllowed(false); setAllowRegistration(false); @@ -295,7 +291,6 @@ export function AdminSettings({ return; } - // Enabling password login - proceed normally setPasswordLoginLoading(true); try { await updatePasswordLoginAllowed(checked); @@ -493,7 +488,6 @@ export function AdminSettings({ const formData = new FormData(); formData.append("file", importFile); if (requiresImportPassword) { - // Preserve the existing password flow for non-OIDC accounts. formData.append("password", importPassword); } @@ -607,7 +601,6 @@ export function AdminSettings({ }; const handleRevokeSession = async (sessionId: string) => { - // Check if this is the current session const currentJWT = getCookie("jwt"); const currentSession = sessions.find((s) => s.jwtToken === currentJWT); const isCurrentSession = currentSession?.id === sessionId; @@ -641,7 +634,6 @@ export function AdminSettings({ if (response.ok) { toast.success(t("admin.sessionRevokedSuccessfully")); - // If user revoked their own session, reload the page after a brief delay if (isCurrentSession) { setTimeout(() => { window.location.reload(); @@ -661,7 +653,6 @@ export function AdminSettings({ }; const handleRevokeAllUserSessions = async (userId: string) => { - // Check if revoking sessions for current user const isCurrentUser = currentUser?.id === userId; confirmWithToast( @@ -701,7 +692,6 @@ export function AdminSettings({ data.message || t("admin.sessionsRevokedSuccessfully"), ); - // If revoking sessions for current user, reload the page after a brief delay if (isCurrentUser) { setTimeout(() => { window.location.reload(); @@ -978,7 +968,6 @@ export function AdminSettings({ type="button" variant="outline" onClick={async () => { - // Check if password login is enabled if (!allowPasswordLogin) { confirmWithToast( t("admin.confirmDisableOIDCWarning"), @@ -1469,7 +1458,6 @@ export function AdminSettings({ - {/* Only render the password field when a local account is performing the import. */} {importFile && requiresImportPassword && (
diff --git a/src/ui/Desktop/Apps/Credentials/CredentialEditor.tsx b/src/ui/Desktop/Apps/Credentials/CredentialEditor.tsx index d4e0d7d3..2dd10f58 100644 --- a/src/ui/Desktop/Apps/Credentials/CredentialEditor.tsx +++ b/src/ui/Desktop/Apps/Credentials/CredentialEditor.tsx @@ -80,7 +80,6 @@ export function CredentialEditor({ setFolders(uniqueFolders); } catch { - // Failed to load credentials } finally { setLoading(false); } diff --git a/src/ui/Desktop/Apps/Dashboard/Dashboard.tsx b/src/ui/Desktop/Apps/Dashboard/Dashboard.tsx index bbecf9ae..6b33cd5a 100644 --- a/src/ui/Desktop/Apps/Dashboard/Dashboard.tsx +++ b/src/ui/Desktop/Apps/Dashboard/Dashboard.tsx @@ -66,7 +66,6 @@ export function Dashboard({ const [userId, setUserId] = useState(null); const [dbError, setDbError] = useState(null); - // Dashboard data state const [uptime, setUptime] = useState("0d 0h 0m"); const [versionStatus, setVersionStatus] = useState< "up_to_date" | "requires_update" @@ -141,22 +140,18 @@ export function Dashboard({ } }, [isAuthenticated]); - // Fetch dashboard data useEffect(() => { if (!loggedIn) return; const fetchDashboardData = async () => { try { - // Fetch uptime const uptimeInfo = await getUptime(); setUptime(uptimeInfo.formatted); - // Fetch version info const versionInfo = await getVersionInfo(); setVersionText(`v${versionInfo.localVersion}`); setVersionStatus(versionInfo.status || "up_to_date"); - // Fetch database health try { await getDatabaseHealth(); setDbHealth("healthy"); @@ -164,25 +159,20 @@ export function Dashboard({ setDbHealth("error"); } - // Fetch total counts const hosts = await getSSHHosts(); setTotalServers(hosts.length); - // Count total tunnels across all hosts let totalTunnelsCount = 0; for (const host of hosts) { if (host.tunnelConnections) { try { - // tunnelConnections is already parsed as an array from the backend const tunnelConnections = Array.isArray(host.tunnelConnections) ? host.tunnelConnections : JSON.parse(host.tunnelConnections); if (Array.isArray(tunnelConnections)) { totalTunnelsCount += tunnelConnections.length; } - } catch { - // Ignore parse errors - } + } catch {} } } setTotalTunnels(totalTunnelsCount); @@ -190,13 +180,11 @@ export function Dashboard({ const credentials = await getCredentials(); setTotalCredentials(credentials.length); - // Fetch recent activity (35 items) setRecentActivityLoading(true); const activity = await getRecentActivity(35); setRecentActivity(activity); setRecentActivityLoading(false); - // Fetch server stats for first 5 servers setServerStatsLoading(true); const serversWithStats = await Promise.all( hosts.slice(0, 5).map(async (host: { id: number; name: string }) => { @@ -229,12 +217,10 @@ export function Dashboard({ fetchDashboardData(); - // Refresh every 30 seconds const interval = setInterval(fetchDashboardData, 30000); return () => clearInterval(interval); }, [loggedIn]); - // Handler for resetting recent activity const handleResetActivity = async () => { try { await resetRecentActivity(); @@ -244,9 +230,7 @@ export function Dashboard({ } }; - // Handler for opening a recent activity item const handleActivityClick = (item: RecentActivityItem) => { - // Find the host and open appropriate tab getSSHHosts().then((hosts) => { const host = hosts.find((h: { id: number }) => h.id === item.hostId); if (!host) return; @@ -267,7 +251,6 @@ export function Dashboard({ }); }; - // Quick Actions handlers const handleAddHost = () => { const sshManagerTab = tabList.find((t) => t.type === "ssh_manager"); if (sshManagerTab) { diff --git a/src/ui/Desktop/Apps/File Manager/FileManager.tsx b/src/ui/Desktop/Apps/File Manager/FileManager.tsx index 55d3fd4d..832b808c 100644 --- a/src/ui/Desktop/Apps/File Manager/FileManager.tsx +++ b/src/ui/Desktop/Apps/File Manager/FileManager.tsx @@ -226,9 +226,8 @@ function FileManagerContent({ initialHost, onClose }: FileManagerProps) { const currentLoadingPathRef = useRef(""); const keepaliveTimerRef = useRef(null); const activityLoggedRef = useRef(false); - const activityLoggingRef = useRef(false); // Prevent concurrent logging calls + const activityLoggingRef = useRef(false); - // Centralized activity logging to prevent duplicates const logFileManagerActivity = useCallback(async () => { if ( !currentHost?.id || @@ -238,7 +237,6 @@ function FileManagerContent({ initialHost, onClose }: FileManagerProps) { return; } - // Set flags IMMEDIATELY to prevent race conditions activityLoggingRef.current = true; activityLoggedRef.current = true; @@ -246,10 +244,8 @@ function FileManagerContent({ initialHost, onClose }: FileManagerProps) { const hostName = currentHost.name || `${currentHost.username}@${currentHost.ip}`; await logActivity("file_manager", currentHost.id, hostName); - // Don't reset activityLoggedRef on success - we want to prevent future calls } catch (err) { console.warn("Failed to log file manager activity:", err); - // Reset on error so it can be retried activityLoggedRef.current = false; } finally { activityLoggingRef.current = false; @@ -350,8 +346,6 @@ function FileManagerContent({ initialHost, onClose }: FileManagerProps) { clearSelection(); initialLoadDoneRef.current = true; - // Log activity for recent connections (after successful directory load) - // Only log if TOTP was not required (if TOTP is required, we'll log after verification) if (!result?.requires_totp) { logFileManagerActivity(); } @@ -1306,7 +1300,6 @@ function FileManagerContent({ initialHost, onClose }: FileManagerProps) { initialLoadDoneRef.current = true; toast.success(t("fileManager.connectedSuccessfully")); - // Log activity for recent connections (after successful directory load) logFileManagerActivity(); } catch (dirError: unknown) { console.error("Failed to load initial directory:", dirError); diff --git a/src/ui/Desktop/Apps/Host Manager/HostManager.tsx b/src/ui/Desktop/Apps/Host Manager/HostManager.tsx index 3aa29e99..36cb8107 100644 --- a/src/ui/Desktop/Apps/Host Manager/HostManager.tsx +++ b/src/ui/Desktop/Apps/Host Manager/HostManager.tsx @@ -34,21 +34,16 @@ export function HostManager({ const ignoreNextHostConfigChangeRef = useRef(false); const lastProcessedHostIdRef = useRef(undefined); - // Update editing host when hostConfig prop changes (from sidebar edit button) useEffect(() => { - // Skip if we should ignore this change if (ignoreNextHostConfigChangeRef.current) { ignoreNextHostConfigChangeRef.current = false; return; } - // Only process if this is an external edit request (from sidebar) if (hostConfig && initialTab === "add_host") { const currentHostId = hostConfig.id; - // Open editor if it's a different host OR same host but user is on viewer/credentials tabs if (currentHostId !== lastProcessedHostIdRef.current) { - // Different host - always open setEditingHost(hostConfig); setActiveTab("add_host"); lastProcessedHostIdRef.current = currentHostId; @@ -57,11 +52,9 @@ export function HostManager({ activeTab === "credentials" || activeTab === "add_credential" ) { - // Same host but user manually navigated away - reopen setEditingHost(hostConfig); setActiveTab("add_host"); } - // If same host and already on add_host tab, do nothing (don't block tab changes) } }, [hostConfig, initialTab]); @@ -72,11 +65,9 @@ export function HostManager({ }; const handleFormSubmit = () => { - // Ignore the next hostConfig change (which will come from ssh-hosts:changed event) ignoreNextHostConfigChangeRef.current = true; setEditingHost(null); setActiveTab("host_viewer"); - // Clear after a delay so the same host can be edited again setTimeout(() => { lastProcessedHostIdRef.current = undefined; }, 500); diff --git a/src/ui/Desktop/Apps/Host Manager/HostManagerEditor.tsx b/src/ui/Desktop/Apps/Host Manager/HostManagerEditor.tsx index a6d9186f..f46529db 100644 --- a/src/ui/Desktop/Apps/Host Manager/HostManagerEditor.tsx +++ b/src/ui/Desktop/Apps/Host Manager/HostManagerEditor.tsx @@ -129,7 +129,6 @@ export function HostManagerEditor({ ); const isSubmittingRef = useRef(false); - // Monitoring interval states const [statusIntervalUnit, setStatusIntervalUnit] = useState< "seconds" | "minutes" >("seconds"); @@ -168,9 +167,7 @@ export function HostManagerEditor({ setFolders(uniqueFolders); setSshConfigurations(uniqueConfigurations); - } catch { - // Failed to load hosts data - } + } catch {} }; fetchData(); @@ -199,9 +196,7 @@ export function HostManagerEditor({ setFolders(uniqueFolders); setSshConfigurations(uniqueConfigurations); - } catch { - // Failed to reload hosts after credential change - } + } catch {} }; window.addEventListener("credentials:changed", handleCredentialChange); @@ -319,7 +314,6 @@ export function HostManagerEditor({ }) .superRefine((data, ctx) => { if (data.authType === "none") { - // No credentials required for "none" auth type - will use keyboard-interactive return; } @@ -444,7 +438,6 @@ export function HostManagerEditor({ : "none"; setAuthTab(defaultAuthType); - // Parse statsConfig from JSON string if needed let parsedStatsConfig = DEFAULT_STATS_CONFIG; try { if (cleanedHost.statsConfig) { @@ -457,7 +450,6 @@ export function HostManagerEditor({ console.error("Failed to parse statsConfig:", error); } - // Merge with defaults to ensure all new fields are present parsedStatsConfig = { ...DEFAULT_STATS_CONFIG, ...parsedStatsConfig }; const formData = { @@ -552,7 +544,6 @@ export function HostManagerEditor({ data.name = `${data.username}@${data.ip}`; } - // Validate monitoring intervals if (data.statsConfig) { const statusInterval = data.statsConfig.statusCheckInterval || 30; const metricsInterval = data.statsConfig.metricsInterval || 30; @@ -663,7 +654,6 @@ export function HostManagerEditor({ window.dispatchEvent(new CustomEvent("ssh-hosts:changed")); - // Refresh backend polling to pick up new/updated host configuration const { refreshServerPolling } = await import("@/ui/main-axios.ts"); refreshServerPolling(); } catch { @@ -1391,7 +1381,6 @@ export function HostManagerEditor({ )} /> - {/* Font Family */} - {/* Font Size */} - {/* Letter Spacing */} - {/* Line Height */} - {/* Cursor Style */} - {/* Cursor Blink */} - {/* Behavior Settings */} Behavior - {/* Scrollback Buffer */} - {/* Bell Style */} - {/* Right Click Selects Word */} - {/* Fast Scroll Modifier */} - {/* Fast Scroll Sensitivity */} - {/* Minimum Contrast Ratio */} - {/* Advanced Settings */} Advanced - {/* Agent Forwarding */} - {/* Backspace Mode */} - {/* Startup Snippet */} - {/* Auto MOSH */} - {/* MOSH Command */} {form.watch("terminalConfig.autoMosh") && ( )} - {/* Environment Variables */}