v1.8.0 #429

Merged
LukeGus merged 198 commits from dev-1.8.0 into main 2025-11-05 16:36:16 +00:00
62 changed files with 121 additions and 1433 deletions
Showing only changes of commit e375878576 - Show all commits

View File

@@ -42,22 +42,18 @@ jobs:
ALL_TAGS=()
if [ "$BUILD_TYPE" = "Production" ]; then
# Production build → push release + latest to both GHCR and Docker Hub
TAGS+=("release-$VERSION" "latest")
for tag in "${TAGS[@]}"; do
ALL_TAGS+=("ghcr.io/lukegus/termix:$tag")
ALL_TAGS+=("docker.io/bugattiguy527/termix:$tag")
done
else
# Dev build → push only dev-x.x.x to GHCR
TAGS+=("dev-$VERSION")
for tag in "${TAGS[@]}"; do
ALL_TAGS+=("ghcr.io/lukegus/termix:$tag")
done
fi
echo "ALL_TAGS=${ALL_TAGS[*]}" >> $GITHUB_ENV
echo "All tags to build:"
printf '%s\n' "${ALL_TAGS[@]}"
- name: Login to GHCR

View File

@@ -43,7 +43,6 @@ jobs:
- name: Install dependencies
run: |
# Retry npm ci up to 3 times on failure
$maxAttempts = 3
$attempt = 1
while ($attempt -le $maxAttempts) {
@@ -55,7 +54,6 @@ jobs:
Write-Error "npm ci failed after $maxAttempts attempts"
exit 1
}
Write-Host "npm ci attempt $attempt failed, retrying in 10 seconds..."
Start-Sleep -Seconds 10
$attempt++
}
@@ -66,14 +64,12 @@ jobs:
run: |
$VERSION = (Get-Content package.json | ConvertFrom-Json).version
echo "version=$VERSION" >> $env:GITHUB_OUTPUT
echo "Building version: $VERSION"
- name: Build Windows (All Architectures)
run: npm run build && npx electron-builder --win --x64 --ia32
- name: List release files
run: |
echo "Contents of release directory:"
dir release
- name: Upload Windows x64 NSIS Installer
@@ -154,16 +150,14 @@ jobs:
- name: Install dependencies
run: |
# Retry npm ci up to 3 times on failure
for i in 1 2 3; do
for i in 1 2 3;
do
if npm ci; then
break
else
if [ $i -eq 3 ]; then
echo "npm ci failed after 3 attempts"
exit 1
fi
echo "npm ci attempt $i failed, retrying in 10 seconds..."
sleep 10
fi
done
@@ -179,41 +173,30 @@ jobs:
VERSION=$(node -p "require('./package.json').version")
cd release
# Rename x64 AppImage to use 'x64'
if [ -f "termix_linux_x86_64_${VERSION}_appimage.AppImage" ]; then
mv "termix_linux_x86_64_${VERSION}_appimage.AppImage" "termix_linux_x64_${VERSION}_appimage.AppImage"
echo "Renamed x64 AppImage to use 'x64' arch"
fi
# Rename x64 deb to use 'x64'
if [ -f "termix_linux_amd64_${VERSION}_deb.deb" ]; then
mv "termix_linux_amd64_${VERSION}_deb.deb" "termix_linux_x64_${VERSION}_deb.deb"
echo "Renamed x64 deb to use 'x64' arch"
fi
# Rename x64 tar.gz if it exists
if [ -f "termix-${VERSION}.tar.gz" ]; then
mv "termix-${VERSION}.tar.gz" "termix_linux_x64_${VERSION}_portable.tar.gz"
echo "Renamed x64 tar.gz"
fi
# Rename arm64 tar.gz if it exists
if [ -f "termix-${VERSION}-arm64.tar.gz" ]; then
mv "termix-${VERSION}-arm64.tar.gz" "termix_linux_arm64_${VERSION}_portable.tar.gz"
echo "Renamed arm64 tar.gz"
fi
# Rename armv7l tar.gz if it exists
if [ -f "termix-${VERSION}-armv7l.tar.gz" ]; then
mv "termix-${VERSION}-armv7l.tar.gz" "termix_linux_armv7l_${VERSION}_portable.tar.gz"
echo "Renamed armv7l tar.gz"
fi
cd ..
- name: List release files
run: |
echo "Contents of release directory:"
ls -la release/
- name: Upload Linux x64 AppImage
@@ -299,16 +282,14 @@ jobs:
- name: Install dependencies
run: |
# Retry npm ci up to 3 times on failure
for i in 1 2 3; do
for i in 1 2 3;
do
if npm ci; then
break
else
if [ $i -eq 3 ]; then
echo "npm ci failed after 3 attempts"
exit 1
fi
echo "npm ci attempt $i failed, retrying in 10 seconds..."
sleep 10
fi
done
@@ -320,9 +301,6 @@ jobs:
run: |
if [ -n "${{ secrets.MAC_BUILD_CERTIFICATE_BASE64 }}" ] && [ -n "${{ secrets.MAC_P12_PASSWORD }}" ]; then
echo "has_certs=true" >> $GITHUB_OUTPUT
else
echo "has_certs=false" >> $GITHUB_OUTPUT
echo "⚠️ Code signing certificates not configured. MAS build will be unsigned."
fi
- name: Import Code Signing Certificates
@@ -337,36 +315,26 @@ jobs:
INSTALLER_CERT_PATH=$RUNNER_TEMP/installer_certificate.p12
KEYCHAIN_PATH=$RUNNER_TEMP/app-signing.keychain-db
# Decode certificates
echo -n "$MAC_BUILD_CERTIFICATE_BASE64" | base64 --decode -o $APP_CERT_PATH
if [ -n "$MAC_INSTALLER_CERTIFICATE_BASE64" ]; then
echo "Decoding installer certificate..."
echo -n "$MAC_INSTALLER_CERTIFICATE_BASE64" | base64 --decode -o $INSTALLER_CERT_PATH
else
echo "⚠️ MAC_INSTALLER_CERTIFICATE_BASE64 is empty"
fi
# Create and configure keychain
security create-keychain -p "$MAC_KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
security set-keychain-settings -lut 21600 $KEYCHAIN_PATH
security unlock-keychain -p "$MAC_KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
# Import application certificate
echo "Importing application certificate..."
security import $APP_CERT_PATH -P "$MAC_P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
# Import installer certificate if it exists
if [ -f "$INSTALLER_CERT_PATH" ]; then
echo "Importing installer certificate..."
security import $INSTALLER_CERT_PATH -P "$MAC_P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
else
echo "⚠️ Installer certificate file not found, skipping import"
fi
security list-keychain -d user -s $KEYCHAIN_PATH
echo "Imported certificates:"
security find-identity -v -p codesigning $KEYCHAIN_PATH
- name: Build macOS App Store Package
@@ -374,32 +342,21 @@ jobs:
env:
ELECTRON_BUILDER_ALLOW_UNRESOLVED_DEPENDENCIES: true
run: |
# Get current version for display
CURRENT_VERSION=$(node -p "require('./package.json').version")
BUILD_VERSION="${{ github.run_number }}"
echo "✅ Package version: $CURRENT_VERSION (unchanged)"
echo "✅ Build number for Apple: $BUILD_VERSION"
# Build MAS with custom buildVersion
npm run build && npx electron-builder --mac mas --universal --config.buildVersion="$BUILD_VERSION"
- name: Clean up MAS keychain before DMG build
if: steps.check_certs.outputs.has_certs == 'true'
run: |
security delete-keychain $RUNNER_TEMP/app-signing.keychain-db || true
echo "Cleaned up MAS keychain"
- name: Check for Developer ID Certificates
id: check_dev_id_certs
run: |
if [ -n "${{ secrets.DEVELOPER_ID_CERTIFICATE_BASE64 }}" ] && [ -n "${{ secrets.DEVELOPER_ID_P12_PASSWORD }}" ]; then
echo "has_dev_id_certs=true" >> $GITHUB_OUTPUT
echo "✅ Developer ID certificates configured for DMG signing"
else
echo "has_dev_id_certs=false" >> $GITHUB_OUTPUT
echo "⚠️ Developer ID certificates not configured. DMG will be unsigned."
echo "Add DEVELOPER_ID_CERTIFICATE_BASE64 and DEVELOPER_ID_P12_PASSWORD secrets to enable DMG signing."
fi
- name: Import Developer ID Certificates
@@ -414,34 +371,25 @@ jobs:
DEV_INSTALLER_CERT_PATH=$RUNNER_TEMP/dev_installer_certificate.p12
KEYCHAIN_PATH=$RUNNER_TEMP/dev-signing.keychain-db
# Decode Developer ID certificate
echo -n "$DEVELOPER_ID_CERTIFICATE_BASE64" | base64 --decode -o $DEV_CERT_PATH
if [ -n "$DEVELOPER_ID_INSTALLER_CERTIFICATE_BASE64" ]; then
echo "Decoding Developer ID installer certificate..."
echo -n "$DEVELOPER_ID_INSTALLER_CERTIFICATE_BASE64" | base64 --decode -o $DEV_INSTALLER_CERT_PATH
else
echo "⚠️ DEVELOPER_ID_INSTALLER_CERTIFICATE_BASE64 is empty (optional)"
fi
# Create and configure keychain
security create-keychain -p "$MAC_KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
security set-keychain-settings -lut 21600 $KEYCHAIN_PATH
security unlock-keychain -p "$MAC_KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
# Import Developer ID Application certificate
echo "Importing Developer ID Application certificate..."
security import $DEV_CERT_PATH -P "$DEVELOPER_ID_P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
# Import Developer ID Installer certificate if it exists
if [ -f "$DEV_INSTALLER_CERT_PATH" ]; then
echo "Importing Developer ID Installer certificate..."
security import $DEV_INSTALLER_CERT_PATH -P "$DEVELOPER_ID_P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
fi
security list-keychain -d user -s $KEYCHAIN_PATH
echo "Imported Developer ID certificates:"
security find-identity -v -p codesigning $KEYCHAIN_PATH
- name: Build macOS DMG
@@ -452,19 +400,15 @@ jobs:
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
run: |
# Build DMG without running npm run build again (already built above or skip if no certs)
if [ "${{ steps.check_certs.outputs.has_certs }}" == "true" ]; then
# Frontend already built, just package DMG
npx electron-builder --mac dmg --universal --x64 --arm64 --publish never
else
# No certs, need to build frontend first
npm run build && npx electron-builder --mac dmg --universal --x64 --arm64 --publish never
fi
- name: List release directory
if: steps.check_certs.outputs.has_certs == 'true'
run: |
echo "Contents of release directory:"
ls -R release/ || echo "Release directory not found"
- name: Upload macOS MAS PKG
@@ -506,15 +450,6 @@ jobs:
run: |
if [ -n "${{ secrets.APPLE_KEY_ID }}" ] && [ -n "${{ secrets.APPLE_ISSUER_ID }}" ] && [ -n "${{ secrets.APPLE_KEY_CONTENT }}" ]; then
echo "has_credentials=true" >> $GITHUB_OUTPUT
if [ "${{ github.event.inputs.artifact_destination }}" == "submit" ]; then
echo "✅ App Store Connect API credentials found. Will deploy to TestFlight."
else
echo " App Store Connect API credentials found, but store submission is disabled."
fi
else
echo "has_credentials=false" >> $GITHUB_OUTPUT
echo "⚠️ App Store Connect API credentials not configured. Skipping deployment."
echo "Add APPLE_KEY_ID, APPLE_ISSUER_ID, and APPLE_KEY_CONTENT secrets to enable automatic deployment."
fi
- name: Setup Ruby for Fastlane
@@ -528,29 +463,22 @@ jobs:
if: steps.check_asc_creds.outputs.has_credentials == 'true' && github.event.inputs.artifact_destination == 'submit'
run: |
gem install fastlane -N
fastlane --version
- name: Deploy to App Store Connect (TestFlight)
if: steps.check_asc_creds.outputs.has_credentials == 'true' && github.event.inputs.artifact_destination == 'submit'
run: |
PKG_FILE=$(find release -name "*.pkg" -type f | head -n 1)
if [ -z "$PKG_FILE" ]; then
echo "Error: No .pkg file found in release directory"
exit 1
fi
echo "Found package: $PKG_FILE"
# Create API key file
mkdir -p ~/private_keys
echo "${{ secrets.APPLE_KEY_CONTENT }}" | base64 --decode > ~/private_keys/AuthKey_${{ secrets.APPLE_KEY_ID }}.p8
# Upload to App Store Connect using xcrun altool
xcrun altool --upload-app -f "$PKG_FILE" \
--type macos \
--apiKey "${{ secrets.APPLE_KEY_ID }}" \
--apiIssuer "${{ secrets.APPLE_ISSUER_ID }}"
echo "✅ Upload complete! Build will appear in App Store Connect after processing (10-30 minutes)"
continue-on-error: true
- name: Clean up keychains
@@ -577,7 +505,6 @@ jobs:
run: |
$VERSION = (Get-Content package.json | ConvertFrom-Json).version
echo "version=$VERSION" >> $env:GITHUB_OUTPUT
echo "Building Chocolatey package for version: $VERSION"
- name: Download Windows x64 MSI artifact
uses: actions/download-artifact@v4
@@ -595,8 +522,6 @@ jobs:
echo "msi_name=$MSI_NAME" >> $env:GITHUB_OUTPUT
echo "checksum=$CHECKSUM" >> $env:GITHUB_OUTPUT
echo "MSI File: $MSI_NAME"
echo "SHA256: $CHECKSUM"
- name: Prepare Chocolatey package
run: |
@@ -604,33 +529,20 @@ jobs:
$CHECKSUM = "${{ steps.msi-info.outputs.checksum }}"
$MSI_NAME = "${{ steps.msi-info.outputs.msi_name }}"
# Construct the download URL with the actual release tag format
$DOWNLOAD_URL = "https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$MSI_NAME"
# Copy chocolatey files to build directory
New-Item -ItemType Directory -Force -Path "choco-build"
Copy-Item -Path "chocolatey\*" -Destination "choco-build" -Recurse -Force
# Update chocolateyinstall.ps1 with actual values
$installScript = Get-Content "choco-build\tools\chocolateyinstall.ps1" -Raw -Encoding UTF8
$installScript = $installScript -replace 'DOWNLOAD_URL_PLACEHOLDER', $DOWNLOAD_URL
$installScript = $installScript -replace 'CHECKSUM_PLACEHOLDER', $CHECKSUM
[System.IO.File]::WriteAllText("$PWD\choco-build\tools\chocolateyinstall.ps1", $installScript, [System.Text.UTF8Encoding]::new($false))
# Update nuspec with version (preserve UTF-8 encoding without BOM)
$nuspec = Get-Content "choco-build\termix-ssh.nuspec" -Raw -Encoding UTF8
$nuspec = $nuspec -replace 'VERSION_PLACEHOLDER', $VERSION
[System.IO.File]::WriteAllText("$PWD\choco-build\termix-ssh.nuspec", $nuspec, [System.Text.UTF8Encoding]::new($false))
echo "Chocolatey package prepared for version $VERSION"
echo "Download URL: $DOWNLOAD_URL"
# Verify the nuspec is valid
echo ""
echo "Verifying nuspec content:"
Get-Content "choco-build\termix-ssh.nuspec" -Head 10
echo ""
- name: Install Chocolatey
run: |
Set-ExecutionPolicy Bypass -Scope Process -Force
@@ -640,29 +552,17 @@ jobs:
- name: Pack Chocolatey package
run: |
cd choco-build
echo "Packing Chocolatey package..."
choco pack termix-ssh.nuspec
if ($LASTEXITCODE -ne 0) {
echo "❌ Failed to pack Chocolatey package"
exit 1
throw "Chocolatey push failed with exit code $LASTEXITCODE"
}
echo ""
echo "✅ Package created successfully"
echo "Package contents:"
Get-ChildItem *.nupkg | ForEach-Object { echo $_.Name }
- name: Check for Chocolatey API Key
id: check_choco_key
run: |
if ("${{ secrets.CHOCOLATEY_API_KEY }}" -ne "") {
echo "has_key=true" >> $env:GITHUB_OUTPUT
echo "✅ Chocolatey API key found. Will push to Chocolatey."
} else {
echo "has_key=false" >> $env:GITHUB_OUTPUT
echo "⚠️ Chocolatey API key not configured. Package will be created but not pushed."
echo "Add CHOCOLATEY_API_KEY secret to enable automatic submission."
}
- name: Push to Chocolatey
@@ -675,29 +575,10 @@ jobs:
try {
choco push "termix-ssh.$VERSION.nupkg" --source https://push.chocolatey.org/
if ($LASTEXITCODE -eq 0) {
echo ""
echo "✅ Package pushed to Chocolatey successfully!"
echo "View at: https://community.chocolatey.org/packages/termix-ssh/$VERSION"
} else {
throw "Chocolatey push failed with exit code $LASTEXITCODE"
}
} catch {
echo ""
echo "❌ Failed to push to Chocolatey"
echo ""
echo "Common reasons:"
echo "1. Package ID 'termix-ssh' is already owned by another user"
echo "2. You need to register/claim the package ID first"
echo "3. API key doesn't have push permissions"
echo ""
echo "Solutions:"
echo "1. Check if package exists: https://community.chocolatey.org/packages/termix-ssh"
echo "2. If it exists and is yours, contact Chocolatey support to claim it"
echo "3. Register a new package ID at: https://community.chocolatey.org/"
echo ""
echo "The package artifact has been saved for manual submission."
echo ""
exit 1
}
- name: Upload Chocolatey package as artifact
@@ -727,7 +608,6 @@ jobs:
RELEASE_DATE=$(date +%Y-%m-%d)
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "release_date=$RELEASE_DATE" >> $GITHUB_OUTPUT
echo "Building Flatpak submission for version: $VERSION"
- name: Download Linux x64 AppImage artifact
uses: actions/download-artifact@v4
@@ -746,12 +626,10 @@ jobs:
run: |
VERSION="${{ steps.package-version.outputs.version }}"
# x64 AppImage
APPIMAGE_X64_FILE=$(find artifact-x64 -name "*.AppImage" -type f | head -n 1)
APPIMAGE_X64_NAME=$(basename "$APPIMAGE_X64_FILE")
CHECKSUM_X64=$(sha256sum "$APPIMAGE_X64_FILE" | awk '{print $1}')
# arm64 AppImage
APPIMAGE_ARM64_FILE=$(find artifact-arm64 -name "*.AppImage" -type f | head -n 1)
APPIMAGE_ARM64_NAME=$(basename "$APPIMAGE_ARM64_FILE")
CHECKSUM_ARM64=$(sha256sum "$APPIMAGE_ARM64_FILE" | awk '{print $1}')
@@ -761,11 +639,6 @@ jobs:
echo "appimage_arm64_name=$APPIMAGE_ARM64_NAME" >> $GITHUB_OUTPUT
echo "checksum_arm64=$CHECKSUM_ARM64" >> $GITHUB_OUTPUT
echo "x64 AppImage: $APPIMAGE_X64_NAME"
echo "x64 SHA256: $CHECKSUM_X64"
echo "arm64 AppImage: $APPIMAGE_ARM64_NAME"
echo "arm64 SHA256: $CHECKSUM_ARM64"
- name: Install ImageMagick for icon generation
run: |
sudo apt-get update
@@ -780,101 +653,26 @@ jobs:
APPIMAGE_X64_NAME="${{ steps.appimage-info.outputs.appimage_x64_name }}"
APPIMAGE_ARM64_NAME="${{ steps.appimage-info.outputs.appimage_arm64_name }}"
# Create submission directory
mkdir -p flatpak-submission
# Copy Flatpak files to submission directory
cp flatpak/com.karmaa.termix.yml flatpak-submission/
cp flatpak/com.karmaa.termix.desktop flatpak-submission/
cp flatpak/com.karmaa.termix.metainfo.xml flatpak-submission/
cp flatpak/flathub.json flatpak-submission/
# Copy and prepare icons
cp public/icon.svg flatpak-submission/com.karmaa.termix.svg
convert public/icon.png -resize 256x256 flatpak-submission/icon-256.png
convert public/icon.png -resize 128x128 flatpak-submission/icon-128.png
# Update manifest with version and checksums
sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak-submission/com.karmaa.termix.yml
sed -i "s/CHECKSUM_X64_PLACEHOLDER/$CHECKSUM_X64/g" flatpak-submission/com.karmaa.termix.yml
sed -i "s/CHECKSUM_ARM64_PLACEHOLDER/$CHECKSUM_ARM64/g" flatpak-submission/com.karmaa.termix.yml
# Update metainfo with version and date
sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak-submission/com.karmaa.termix.metainfo.xml
sed -i "s/DATE_PLACEHOLDER/$RELEASE_DATE/g" flatpak-submission/com.karmaa.termix.metainfo.xml
echo "✅ Flatpak submission files prepared for version $VERSION"
echo "x64 Download URL: https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$APPIMAGE_X64_NAME"
echo "arm64 Download URL: https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$APPIMAGE_ARM64_NAME"
- name: Create submission instructions
run: |
cat > flatpak-submission/SUBMISSION_INSTRUCTIONS.md << 'EOF'
# Flathub Submission Instructions for Termix
## Automatic Submission (Recommended)
All files needed for Flathub submission are in this artifact. Follow these steps:
1. **Fork the Flathub repository**:
- Go to https://github.com/flathub/flathub
- Click "Fork" button
2. **Clone your fork**:
```bash
git clone https://github.com/YOUR-USERNAME/flathub.git
cd flathub
git checkout -b com.karmaa.termix
```
3. **Copy all files from this artifact** to the root of your flathub fork
4. **Commit and push**:
```bash
git add .
git commit -m "Add Termix ${{ steps.package-version.outputs.version }}"
git push origin com.karmaa.termix
```
5. **Create Pull Request**:
- Go to https://github.com/YOUR-USERNAME/flathub
- Click "Compare & pull request"
- Submit PR to flathub/flathub
## Files in this submission:
- `com.karmaa.termix.yml` - Flatpak manifest
- `com.karmaa.termix.desktop` - Desktop entry
- `com.karmaa.termix.metainfo.xml` - AppStream metadata
- `flathub.json` - Flathub configuration
- `com.karmaa.termix.svg` - SVG icon
- `icon-256.png` - 256x256 icon
- `icon-128.png` - 128x128 icon
## Version Information:
- Version: ${{ steps.package-version.outputs.version }}
- Release Date: ${{ steps.package-version.outputs.release_date }}
- x64 AppImage SHA256: ${{ steps.appimage-info.outputs.checksum_x64 }}
- arm64 AppImage SHA256: ${{ steps.appimage-info.outputs.checksum_arm64 }}
## After Submission:
1. Flathub maintainers will review your submission (usually 1-5 days)
2. They may request changes - be responsive to feedback
3. Once approved, Termix will be available via: `flatpak install flathub com.karmaa.termix`
## Resources:
- [Flathub Submission Guidelines](https://docs.flathub.org/docs/for-app-authors/submission)
- [Flatpak Documentation](https://docs.flatpak.org/)
EOF
echo "✅ Created submission instructions"
- name: List submission files
run: |
echo "Flatpak submission files:"
ls -la flatpak-submission/
- name: Upload Flatpak submission as artifact
@@ -884,19 +682,6 @@ jobs:
path: flatpak-submission/*
retention-days: 30
- name: Display next steps
run: |
echo ""
echo "🎉 Flatpak submission files ready!"
echo ""
echo "📦 Download the 'flatpak-submission' artifact and follow SUBMISSION_INSTRUCTIONS.md"
echo ""
echo "Quick summary:"
echo "1. Fork https://github.com/flathub/flathub"
echo "2. Copy artifact files to your fork"
echo "3. Create PR to flathub/flathub"
echo ""
submit-to-homebrew:
runs-on: macos-latest
if: github.event.inputs.artifact_destination == 'submit'
@@ -915,7 +700,6 @@ jobs:
run: |
VERSION=$(node -p "require('./package.json').version")
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Building Homebrew Cask for version: $VERSION"
- name: Download macOS Universal DMG artifact
uses: actions/download-artifact@v4
@@ -933,8 +717,6 @@ jobs:
echo "dmg_name=$DMG_NAME" >> $GITHUB_OUTPUT
echo "checksum=$CHECKSUM" >> $GITHUB_OUTPUT
echo "DMG File: $DMG_NAME"
echo "SHA256: $CHECKSUM"
- name: Prepare Homebrew submission files
run: |
@@ -942,155 +724,24 @@ jobs:
CHECKSUM="${{ steps.dmg-info.outputs.checksum }}"
DMG_NAME="${{ steps.dmg-info.outputs.dmg_name }}"
# Create submission directory
mkdir -p homebrew-submission/Casks/t
# Copy Homebrew cask file
cp homebrew/termix.rb homebrew-submission/Casks/t/termix.rb
cp homebrew/README.md homebrew-submission/
# Update cask with version and checksum
sed -i '' "s/VERSION_PLACEHOLDER/$VERSION/g" homebrew-submission/Casks/t/termix.rb
sed -i '' "s/CHECKSUM_PLACEHOLDER/$CHECKSUM/g" homebrew-submission/Casks/t/termix.rb
echo "✅ Homebrew Cask prepared for version $VERSION"
echo "Download URL: https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$DMG_NAME"
- name: Verify Cask syntax
run: |
# Install Homebrew if not present (should be on macos-latest)
if ! command -v brew &> /dev/null; then
echo "Installing Homebrew..."
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
fi
# Basic syntax check
ruby -c homebrew-submission/Casks/t/termix.rb
echo "✅ Cask syntax is valid"
- name: Create submission instructions
run: |
cat > homebrew-submission/SUBMISSION_INSTRUCTIONS.md << 'EOF'
# Homebrew Cask Submission Instructions for Termix
## Option 1: Submit to Official Homebrew Cask (Recommended)
### Prerequisites
- macOS with Homebrew installed
- GitHub account
### Steps
1. **Fork the Homebrew Cask repository**:
- Go to https://github.com/Homebrew/homebrew-cask
- Click "Fork" button
2. **Clone your fork**:
```bash
git clone https://github.com/YOUR-USERNAME/homebrew-cask.git
cd homebrew-cask
git checkout -b termix
```
3. **Copy the cask file**:
- Copy `Casks/t/termix.rb` from this artifact to your fork at `Casks/t/termix.rb`
- Note: Casks are organized by first letter in subdirectories
4. **Test the cask locally**:
```bash
brew install --cask ./Casks/t/termix.rb
brew uninstall --cask termix
```
5. **Run audit checks**:
```bash
brew audit --cask --online ./Casks/t/termix.rb
brew style ./Casks/t/termix.rb
```
6. **Commit and push**:
```bash
git add Casks/t/termix.rb
git commit -m "Add Termix ${{ steps.package-version.outputs.version }}"
git push origin termix
```
7. **Create Pull Request**:
- Go to https://github.com/YOUR-USERNAME/homebrew-cask
- Click "Compare & pull request"
- Fill in the PR template
- Submit to Homebrew/homebrew-cask
### PR Requirements
Your PR should include:
- Clear commit message: "Add Termix X.Y.Z" or "Update Termix to X.Y.Z"
- All audit checks passing
- Working download URL
- Valid SHA256 checksum
## Option 2: Create Your Own Tap (Alternative)
If you want more control and faster updates:
1. **Create a tap repository**:
- Create repo: `Termix-SSH/homebrew-termix`
- Add `Casks/termix.rb` to the repo
2. **Users install with**:
```bash
brew tap termix-ssh/termix
brew install --cask termix
```
### Advantages of Custom Tap
- No approval process
- Instant updates
- Full control
- Can include beta versions
### Disadvantages
- Less discoverable
- Users must add tap first
- You maintain it yourself
## Files in this submission:
- `Casks/t/termix.rb` - Homebrew Cask formula
- `README.md` - Detailed documentation
- `SUBMISSION_INSTRUCTIONS.md` - This file
## Version Information:
- Version: ${{ steps.package-version.outputs.version }}
- DMG SHA256: ${{ steps.dmg-info.outputs.checksum }}
- DMG URL: https://github.com/Termix-SSH/Termix/releases/download/release-${{ steps.package-version.outputs.version }}-tag/${{ steps.dmg-info.outputs.dmg_name }}
## After Submission:
### Official Homebrew Cask:
1. Maintainers will review (usually 24-48 hours)
2. May request changes or fixes
3. Once merged, users can install with: `brew install --cask termix`
4. Homebrew bot will auto-update for future releases
### Custom Tap:
1. Push to your tap repository
2. Immediately available to users
3. Update the cask file for each new release
## Resources:
- [Homebrew Cask Documentation](https://docs.brew.sh/Cask-Cookbook)
- [Acceptable Casks](https://docs.brew.sh/Acceptable-Casks)
- [How to Open a PR](https://docs.brew.sh/How-To-Open-a-Homebrew-Pull-Request)
EOF
echo "✅ Created submission instructions"
- name: List submission files
run: |
echo "Homebrew submission files:"
find homebrew-submission -type f
- name: Upload Homebrew submission as artifact
@@ -1100,18 +751,6 @@ jobs:
path: homebrew-submission/*
retention-days: 30
- name: Display next steps
run: |
echo ""
echo "🍺 Homebrew Cask ready!"
echo ""
echo "📦 Download the 'homebrew-submission' artifact and follow SUBMISSION_INSTRUCTIONS.md"
echo ""
echo "Quick summary:"
echo "Option 1 (Recommended): Fork https://github.com/Homebrew/homebrew-cask and submit PR"
echo "Option 2 (Alternative): Create your own tap at Termix-SSH/homebrew-termix"
echo ""
upload-to-release:
runs-on: blacksmith-4vcpu-ubuntu-2404
if: github.event.inputs.artifact_destination == 'release'
@@ -1128,49 +767,25 @@ jobs:
- name: Get latest release
id: get_release
run: |
echo "Fetching latest release from ${{ github.repository }}..."
LATEST_RELEASE=$(gh release list --repo ${{ github.repository }} --limit 1 --json tagName,name,isLatest -q '.[0]')
if [ -z "$LATEST_RELEASE" ]; then
echo "ERROR: No releases found in ${{ github.repository }}"
exit 1
fi
RELEASE_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tagName')
RELEASE_NAME=$(echo "$LATEST_RELEASE" | jq -r '.name')
echo "tag=$RELEASE_TAG" >> $GITHUB_OUTPUT
echo "name=$RELEASE_NAME" >> $GITHUB_OUTPUT
echo "Latest release: $RELEASE_NAME ($RELEASE_TAG)"
env:
LATEST_RELEASE=$(gh release list --repo ${{ github.repository }} --limit 1 --json tagName,name,isLatest -q '.[0]') env:
GH_TOKEN: ${{ github.token }}
- name: Display artifact structure
run: |
echo "Artifact structure:"
ls -R artifacts/
- name: Upload artifacts to latest release
run: |
RELEASE_TAG="${{ steps.get_release.outputs.tag }}"
echo "Uploading artifacts to release: $RELEASE_TAG"
echo ""
cd artifacts
for dir in */; do
echo "Processing directory: $dir"
cd "$dir"
for file in *; do
for file in *;
do
if [ -f "$file" ]; then
echo "Uploading: $file"
gh release upload "$RELEASE_TAG" "$file" --repo ${{ github.repository }} --clobber
echo "✓ $file uploaded successfully"
fi
done
cd ..
done
echo ""
echo "All artifacts uploaded to: https://github.com/${{ github.repository }}/releases/tag/$RELEASE_TAG"
env:
GH_TOKEN: ${{ github.token }}

View File

@@ -16,7 +16,8 @@
<bugTrackerUrl>https://github.com/Termix-SSH/Support/issues</bugTrackerUrl>
<tags>docker ssh self-hosted file-management ssh-tunnel termix server-management terminal</tags>
<summary>Termix is a web-based server management platform with SSH terminal, tunneling, and file editing capabilities.</summary>
<description>Termix is an open-source, forever-free, self-hosted all-in-one server management platform. It provides a web-based solution for managing your servers and infrastructure through a single, intuitive interface.
<description>
Termix is an open-source, forever-free, self-hosted all-in-one server management platform. It provides a web-based solution for managing your servers and infrastructure through a single, intuitive interface.
Termix offers:
- SSH terminal access
@@ -24,7 +25,8 @@ Termix offers:
- Remote file management
- Server monitoring and management
This package installs the desktop application version of Termix.</description>
This package installs the desktop application version of Termix.
</description>
<releaseNotes>https://github.com/Termix-SSH/Termix/releases</releaseNotes>
</metadata>
<files>

View File

@@ -29,6 +29,5 @@ if ($key.Count -eq 1) {
} elseif ($key.Count -gt 1) {
Write-Warning "$($key.Count) matches found!"
Write-Warning "To prevent accidental data loss, no programs will be uninstalled."
Write-Warning "Please alert package maintainer the following keys were matched:"
$key | % {Write-Warning "- $($_.DisplayName)"}
}

View File

@@ -23,18 +23,14 @@ http {
listen ${PORT};
server_name localhost;
# X-Frame-Options removed to allow Electron iframe embedding
# add_header X-Frame-Options DENY always;
add_header X-Content-Type-Options nosniff always;
add_header X-XSS-Protection "1; mode=block" always;
# CORS headers for Electron iframe - reflect the origin for credentials support
add_header Access-Control-Allow-Origin $http_origin always;
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS, PATCH" always;
add_header Access-Control-Allow-Headers "Origin, X-Requested-With, Content-Type, Accept, Authorization" always;
add_header Access-Control-Allow-Credentials "true" always;
# Serve static assets directly
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
root /usr/share/nginx/html;
expires 1y;

View File

@@ -1,4 +1,11 @@
const { app, BrowserWindow, shell, ipcMain, dialog, Menu } = require("electron");
const {
app,
BrowserWindow,
shell,
ipcMain,
dialog,
Menu,
} = require("electron");
const path = require("path");
const fs = require("fs");
const os = require("os");
@@ -57,37 +64,38 @@ function createWindow() {
}
if (isDev) {
mainWindow.loadURL("http://localhost:5173");
mainWindow.loadURL("http:://localhost:5173");
mainWindow.webContents.openDevTools();
} else {
const indexPath = path.join(__dirname, "..", "dist", "index.html");
mainWindow.loadFile(indexPath);
}
// Allow iframes to load from any origin by removing X-Frame-Options headers
mainWindow.webContents.session.webRequest.onHeadersReceived(
(details, callback) => {
const headers = details.responseHeaders;
// Remove headers that block iframe embedding
if (headers) {
delete headers["x-frame-options"];
delete headers["X-Frame-Options"];
// Modify CSP to allow framing
if (headers["content-security-policy"]) {
headers["content-security-policy"] = headers["content-security-policy"]
.map(value => value.replace(/frame-ancestors[^;]*/gi, ''))
.filter(value => value.trim().length > 0);
headers["content-security-policy"] = headers[
"content-security-policy"
]
.map((value) => value.replace(/frame-ancestors[^;]*/gi, ""))
.filter((value) => value.trim().length > 0);
if (headers["content-security-policy"].length === 0) {
delete headers["content-security-policy"];
}
}
if (headers["Content-Security-Policy"]) {
headers["Content-Security-Policy"] = headers["Content-Security-Policy"]
.map(value => value.replace(/frame-ancestors[^;]*/gi, ''))
.filter(value => value.trim().length > 0);
headers["Content-Security-Policy"] = headers[
"Content-Security-Policy"
]
.map((value) => value.replace(/frame-ancestors[^;]*/gi, ""))
.filter((value) => value.trim().length > 0);
if (headers["Content-Security-Policy"].length === 0) {
delete headers["Content-Security-Policy"];
@@ -96,7 +104,7 @@ function createWindow() {
}
callback({ responseHeaders: headers });
}
},
);
mainWindow.once("ready-to-show", () => {

View File

@@ -1,9 +1,9 @@
app-id: com.karmaa.termix
runtime: org.freedesktop.Platform
runtime-version: '23.08'
runtime-version: "23.08"
sdk: org.freedesktop.Sdk
base: org.electronjs.Electron2.BaseApp
base-version: '23.08'
base-version: "23.08"
command: termix
separate-locales: false

View File

@@ -1,9 +1,6 @@
#!/bin/bash
set -e
# This script prepares the Flatpak submission files
# It should be run from the repository root
VERSION="$1"
CHECKSUM="$2"
RELEASE_DATE="$3"
@@ -16,39 +13,22 @@ fi
echo "Preparing Flatpak submission for version $VERSION"
# Copy icon files
cp public/icon.svg flatpak/com.karmaa.termix.svg
echo "✓ Copied SVG icon"
# Generate PNG icons if ImageMagick is available
if command -v convert &> /dev/null; then
convert public/icon.png -resize 256x256 flatpak/icon-256.png
convert public/icon.png -resize 128x128 flatpak/icon-128.png
echo "✓ Generated PNG icons"
else
# Fallback: just copy the original PNG
cp public/icon.png flatpak/icon-256.png
cp public/icon.png flatpak/icon-128.png
echo "⚠ ImageMagick not found, using original icon"
fi
# Update manifest with version and checksum
sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak/com.karmaa.termix.yml
sed -i "s/CHECKSUM_PLACEHOLDER/$CHECKSUM/g" flatpak/com.karmaa.termix.yml
echo "✓ Updated manifest with version $VERSION"
# Update metainfo with version and date
sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak/com.karmaa.termix.metainfo.xml
sed -i "s/DATE_PLACEHOLDER/$RELEASE_DATE/g" flatpak/com.karmaa.termix.metainfo.xml
echo "✓ Updated metainfo with version $VERSION and date $RELEASE_DATE"
echo ""
echo "✅ Flatpak submission files prepared!"
echo ""
echo "Next steps:"
echo "1. Review the files in the flatpak/ directory"
echo "2. Fork https://github.com/flathub/flathub"
echo "3. Create a new branch named 'com.karmaa.termix'"
echo "4. Copy all files from flatpak/ to the root of your fork"
echo "5. Commit and push to your fork"
echo "6. Open a PR to flathub/flathub"

View File

@@ -1,154 +0,0 @@
# Homebrew Cask for Termix
This directory contains the Homebrew Cask formula for installing Termix on macOS.
## Files
- **termix.rb** - Homebrew Cask formula
## What is a Homebrew Cask?
Homebrew Casks are used to install GUI macOS applications. Unlike formulae (which are for command-line tools), casks handle:
- Downloading DMG/PKG installers
- Installing .app bundles to /Applications
- Managing application preferences and cache cleanup
## Submission Options
You have two options for distributing Termix via Homebrew:
### Option 1: Submit to Official Homebrew Cask (Recommended)
Submit to the official homebrew-cask repository for maximum visibility.
**Advantages:**
- Discoverable by all Homebrew users
- Built-in update checking
- Official Homebrew support
**Process:**
1. Download the `homebrew-submission` artifact from GitHub Actions (when using "submit" option)
2. Fork https://github.com/Homebrew/homebrew-cask
3. Create a new branch: `git checkout -b termix`
4. Add the cask file: `Casks/t/termix.rb` (note the subdirectory by first letter)
5. Test locally: `brew install --cask ./Casks/t/termix.rb`
6. Run audit: `brew audit --cask --online ./Casks/t/termix.rb`
7. Commit and push to your fork
8. Create a PR to Homebrew/homebrew-cask
**Requirements for acceptance:**
- App must be stable (not beta/alpha)
- Source code must be public
- No analytics/tracking without opt-in
- Pass all brew audit checks
### Option 2: Create Your Own Tap
Create a custom Homebrew tap for more control and faster updates.
**Advantages:**
- Full control over updates
- No approval process
- Can include beta/alpha releases
**Process:**
1. Create a new repository: `Termix-SSH/homebrew-termix`
2. Add the cask file to: `Casks/termix.rb`
3. Users install with: `brew install --cask termix-ssh/termix/termix`
## Installation (for users)
### From Official Homebrew Cask (after approval):
```bash
brew install --cask termix
```
### From Custom Tap:
```bash
# Add the tap
brew tap termix-ssh/termix
# Install the cask
brew install --cask termix
```
## Updating the Cask
When you release a new version:
### For Official Homebrew Cask:
1. Homebrew bot usually auto-updates within hours
2. Or manually submit a PR with the new version/checksum
### For Custom Tap:
1. Update the version and sha256 in termix.rb
2. Commit and push to your tap repository
3. Users run: `brew upgrade --cask termix`
## Testing Locally
Before submitting, test the cask:
```bash
# Install from local file
brew install --cask ./homebrew/termix.rb
# Verify it works
open /Applications/Termix.app
# Uninstall
brew uninstall --cask termix
# Run audit checks
brew audit --cask --online ./homebrew/termix.rb
# Style check
brew style ./homebrew/termix.rb
```
## Automated Submission Preparation
The GitHub Actions workflow automatically prepares the Homebrew submission when you select "submit":
1. Builds macOS universal DMG
2. Calculates SHA256 checksum
3. Updates the cask file with version and checksum
4. Creates a `homebrew-submission` artifact
Download the artifact and follow the submission instructions included.
## Cask File Structure
The cask file (`termix.rb`) includes:
- **version** - Automatically set from package.json
- **sha256** - Checksum of the universal DMG for security
- **url** - Download URL from GitHub releases
- **name** - Display name
- **desc** - Short description
- **homepage** - Project homepage
- **livecheck** - Automatic update detection
- **app** - The .app bundle to install
- **zap** - Files to remove on complete uninstall
## Requirements
- macOS 10.15 (Catalina) or later
- Homebrew 4.0.0 or later
- Universal DMG must be code-signed and notarized (already handled by your build process)
## Resources
- [Homebrew Cask Documentation](https://docs.brew.sh/Cask-Cookbook)
- [Cask Submission Guidelines](https://github.com/Homebrew/homebrew-cask/blob/master/CONTRIBUTING.md)
- [Homebrew Formula Cookbook](https://docs.brew.sh/Formula-Cookbook)

View File

@@ -12,10 +12,8 @@ import type { AuthenticatedRequest } from "../types/index.js";
const app = express();
const authManager = AuthManager.getInstance();
// Track server start time
const serverStartTime = Date.now();
// In-memory rate limiter for activity logging
const activityRateLimiter = new Map<string, number>();
const RATE_LIMIT_MS = 1000; // 1 second window
@@ -60,7 +58,6 @@ app.use(express.json({ limit: "1mb" }));
app.use(authManager.createAuthMiddleware());
// Get server uptime
app.get("/uptime", async (req, res) => {
try {
const uptimeMs = Date.now() - serverStartTime;
@@ -80,7 +77,6 @@ app.get("/uptime", async (req, res) => {
}
});
// Get recent activity for current user
app.get("/activity/recent", async (req, res) => {
try {
const userId = (req as AuthenticatedRequest).userId;
@@ -112,7 +108,6 @@ app.get("/activity/recent", async (req, res) => {
}
});
// Log new activity
app.post("/activity/log", async (req, res) => {
try {
const userId = (req as AuthenticatedRequest).userId;
@@ -138,22 +133,18 @@ app.post("/activity/log", async (req, res) => {
});
}
// In-memory rate limiting to prevent duplicate requests
const rateLimitKey = `${userId}:${hostId}:${type}`;
const now = Date.now();
const lastLogged = activityRateLimiter.get(rateLimitKey);
if (lastLogged && now - lastLogged < RATE_LIMIT_MS) {
// Too soon after last request, reject as duplicate
return res.json({
message: "Activity already logged recently (rate limited)",
});
}
// Update rate limiter
activityRateLimiter.set(rateLimitKey, now);
// Clean up old entries from rate limiter (keep it from growing indefinitely)
if (activityRateLimiter.size > 10000) {
const entriesToDelete: string[] = [];
for (const [key, timestamp] of activityRateLimiter.entries()) {
@@ -164,7 +155,6 @@ app.post("/activity/log", async (req, res) => {
entriesToDelete.forEach((key) => activityRateLimiter.delete(key));
}
// Verify the host belongs to the user
const hosts = await SimpleDBOps.select(
getDb()
.select()
@@ -178,7 +168,6 @@ app.post("/activity/log", async (req, res) => {
return res.status(404).json({ error: "Host not found" });
}
// Insert new activity
const result = (await SimpleDBOps.insert(
recentActivity,
"recent_activity",
@@ -191,7 +180,6 @@ app.post("/activity/log", async (req, res) => {
userId,
)) as unknown as { id: number };
// Keep only the last 100 activities per user to prevent bloat
const allActivities = await SimpleDBOps.select(
getDb()
.select()
@@ -216,7 +204,6 @@ app.post("/activity/log", async (req, res) => {
}
});
// Reset recent activity for current user
app.delete("/activity/reset", async (req, res) => {
try {
const userId = (req as AuthenticatedRequest).userId;
@@ -228,7 +215,6 @@ app.delete("/activity/reset", async (req, res) => {
});
}
// Delete all activities for the user
await SimpleDBOps.delete(
recentActivity,
"recent_activity",

View File

@@ -915,7 +915,6 @@ app.post(
const isOidcUser = !!userRecords[0].is_oidc;
if (!isOidcUser) {
// Local accounts still prove knowledge of the password so their DEK can be derived again.
if (!password) {
return res.status(400).json({
error: "Password required for import",
@@ -928,7 +927,6 @@ app.post(
return res.status(401).json({ error: "Invalid password" });
}
} else if (!DataCrypto.getUserDataKey(userId)) {
// OIDC users skip the password prompt; make sure their DEK is unlocked via the OIDC session.
const oidcUnlocked = await authManager.authenticateOIDCUser(userId);
if (!oidcUnlocked) {
return res.status(403).json({
@@ -947,7 +945,6 @@ app.post(
let userDataKey = DataCrypto.getUserDataKey(userId);
if (!userDataKey && isOidcUser) {
// authenticateOIDCUser lazily provisions the session key; retry the fetch when it succeeds.
const oidcUnlocked = await authManager.authenticateOIDCUser(userId);
if (oidcUnlocked) {
userDataKey = DataCrypto.getUserDataKey(userId);
@@ -1425,7 +1422,6 @@ app.use(
err: unknown,
req: express.Request,
res: express.Response,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_next: express.NextFunction,
) => {
apiLogger.error("Unhandled error in request", err, {
@@ -1482,17 +1478,13 @@ app.get(
if (status.hasUnencryptedDb) {
try {
unencryptedSize = fs.statSync(dbPath).size;
} catch {
// Ignore file access errors
}
} catch {}
}
if (status.hasEncryptedDb) {
try {
encryptedSize = fs.statSync(encryptedDbPath).size;
} catch {
// Ignore file access errors
}
} catch {}
}
res.json({

View File

@@ -12,10 +12,6 @@ import { DatabaseSaveTrigger } from "../../utils/database-save-trigger.js";
const dataDir = process.env.DATA_DIR || "./db/data";
const dbDir = path.resolve(dataDir);
if (!fs.existsSync(dbDir)) {
databaseLogger.info(`Creating database directory`, {
operation: "db_init",
path: dbDir,
});
fs.mkdirSync(dbDir, { recursive: true });
}
@@ -31,7 +27,6 @@ let sqlite: Database.Database;
async function initializeDatabaseAsync(): Promise<void> {
const systemCrypto = SystemCrypto.getInstance();
// Ensure database key is initialized
await systemCrypto.getDatabaseKey();
if (enableFileEncryption) {
try {
@@ -41,18 +36,11 @@ async function initializeDatabaseAsync(): Promise<void> {
memoryDatabase = new Database(decryptedBuffer);
// Count sessions after loading
try {
const sessionCount = memoryDatabase
.prepare("SELECT COUNT(*) as count FROM sessions")
.get() as { count: number };
databaseLogger.info("Database loaded from encrypted file", {
operation: "db_load",
sessionCount: sessionCount.count,
bufferSize: decryptedBuffer.length,
});
} catch (countError) {
// Ignore count errors
}
} else {
const migration = new DatabaseMigration(dataDir);
@@ -297,9 +285,6 @@ async function initializeCompleteDatabase(): Promise<void> {
try {
sqlite.prepare("DELETE FROM sessions").run();
databaseLogger.info("All sessions cleared on startup", {
operation: "db_init_session_cleanup",
});
} catch (e) {
databaseLogger.warn("Could not clear sessions on startup", {
operation: "db_init_session_cleanup_failed",
@@ -453,7 +438,6 @@ const migrateSchema = () => {
addColumnIfNotExists("file_manager_pinned", "host_id", "INTEGER NOT NULL");
addColumnIfNotExists("file_manager_shortcuts", "host_id", "INTEGER NOT NULL");
// Create sessions table if it doesn't exist (for existing databases)
try {
sqlite
.prepare("SELECT id FROM sessions LIMIT 1")
@@ -473,9 +457,6 @@ const migrateSchema = () => {
FOREIGN KEY (user_id) REFERENCES users (id)
);
`);
databaseLogger.info("Sessions table created via migration", {
operation: "schema_migration",
});
} catch (createError) {
databaseLogger.warn("Failed to create sessions table", {
operation: "schema_migration",
@@ -499,18 +480,11 @@ async function saveMemoryDatabaseToFile() {
fs.mkdirSync(dataDir, { recursive: true });
}
// Count sessions before saving
try {
const sessionCount = memoryDatabase
.prepare("SELECT COUNT(*) as count FROM sessions")
.get() as { count: number };
databaseLogger.info("Saving database to file", {
operation: "db_save",
sessionCount: sessionCount.count,
bufferSize: buffer.length,
});
} catch (countError) {
// Ignore count errors
}
if (enableFileEncryption) {
@@ -605,18 +579,15 @@ async function cleanupDatabase() {
try {
fs.unlinkSync(path.join(tempDir, file));
} catch {
// Ignore cleanup errors
}
}
try {
fs.rmdirSync(tempDir);
} catch {
// Ignore cleanup errors
}
}
} catch {
// Ignore cleanup errors
}
}
@@ -625,7 +596,6 @@ process.on("exit", () => {
try {
sqlite.close();
} catch {
// Ignore close errors on exit
}
}
});

View File

@@ -336,14 +336,10 @@ router.post("/oidc-config", authenticateJWT, async (req, res) => {
userId,
adminDataKey,
);
authLogger.info("OIDC configuration encrypted with admin data key", {
operation: "oidc_config_encrypt",
userId,
});
} else {
encryptedConfig = {
...config,
client_secret: `encrypted:${Buffer.from(client_secret).toString("base64")}`, // Simple base64 encoding
client_secret: `encrypted:${Buffer.from(client_secret).toString("base64")}`,
};
authLogger.warn(
"OIDC configuration stored with basic encoding - admin should re-save with password",
@@ -421,7 +417,6 @@ router.get("/oidc-config", async (req, res) => {
const config = JSON.parse((row as Record<string, unknown>).value as string);
// Only return public fields needed for login page
const publicConfig = {
client_id: config.client_id,
issuer_url: config.issuer_url,
@@ -661,7 +656,6 @@ router.get("/oidc/callback", async (req, res) => {
config.client_id,
);
} catch {
// Fallback to manual decoding
try {
const parts = (tokenData.id_token as string).split(".");
if (parts.length === 3) {
@@ -812,7 +806,6 @@ router.get("/oidc/callback", async (req, res) => {
});
}
// Detect platform and device info
const deviceInfo = parseUserAgent(req);
const token = await authManager.generateJWTToken(userRecord.id, {
deviceType: deviceInfo.type,
@@ -838,7 +831,6 @@ router.get("/oidc/callback", async (req, res) => {
const redirectUrl = new URL(frontendUrl);
redirectUrl.searchParams.set("success", "true");
// Calculate max age based on device type
const maxAge =
deviceInfo.type === "desktop" || deviceInfo.type === "mobile"
? 30 * 24 * 60 * 60 * 1000
@@ -965,7 +957,6 @@ router.post("/login", async (req, res) => {
});
}
// Detect platform and device info
const deviceInfo = parseUserAgent(req);
const token = await authManager.generateJWTToken(userRecord.id, {
deviceType: deviceInfo.type,
@@ -995,7 +986,6 @@ router.post("/login", async (req, res) => {
response.token = token;
}
// Calculate max age based on device type
const maxAge =
deviceInfo.type === "desktop" || deviceInfo.type === "mobile"
? 30 * 24 * 60 * 60 * 1000
@@ -1018,7 +1008,6 @@ router.post("/logout", authenticateJWT, async (req, res) => {
const userId = authReq.userId;
if (userId) {
// Get sessionId from JWT if available
const token =
req.cookies?.jwt || req.headers["authorization"]?.split(" ")[1];
let sessionId: string | undefined;
@@ -1027,9 +1016,7 @@ router.post("/logout", authenticateJWT, async (req, res) => {
try {
const payload = await authManager.verifyJWTToken(token);
sessionId = payload?.sessionId;
} catch (error) {
// Ignore token verification errors during logout
}
} catch (error) {}
}
await authManager.logoutUser(userId, sessionId);
@@ -1435,7 +1422,6 @@ router.post("/complete-reset", async (req, res) => {
const saltRounds = parseInt(process.env.SALT || "10", 10);
const password_hash = await bcrypt.hash(newPassword, saltRounds);
// Check if user is logged in and data is unlocked
let userIdFromJwt: string | null = null;
const cookie = req.cookies?.jwt;
let header: string | undefined;
@@ -1452,7 +1438,6 @@ router.post("/complete-reset", async (req, res) => {
}
if (userIdFromJwt === userId) {
// Logged-in user: preserve data
try {
const success = await authManager.resetUserPasswordWithPreservedDEK(
userId,
@@ -1491,15 +1476,12 @@ router.post("/complete-reset", async (req, res) => {
});
}
} else {
// Logged-out user: data is lost
await db
.update(users)
.set({ password_hash })
.where(eq(users.username, username));
try {
// Delete all encrypted data since we're creating a new DEK
// The old DEK is lost, so old encrypted data becomes unreadable
await db
.delete(sshCredentialUsage)
.where(eq(sshCredentialUsage.userId, userId));
@@ -1524,11 +1506,9 @@ router.post("/complete-reset", async (req, res) => {
.delete(sshCredentials)
.where(eq(sshCredentials.userId, userId));
// Now setup new encryption with new DEK
await authManager.registerUser(userId, newPassword);
authManager.logoutUser(userId);
// Clear TOTP settings
await db
.update(users)
.set({
@@ -1597,13 +1577,11 @@ router.post("/change-password", authenticateJWT, async (req, res) => {
return res.status(404).json({ error: "User not found" });
}
// Verify old password for login hash
const isMatch = await bcrypt.compare(oldPassword, user[0].password_hash);
if (!isMatch) {
return res.status(401).json({ error: "Incorrect current password" });
}
// Change encryption keys and login hash
const success = await authManager.changeUserPassword(
userId,
oldPassword,
@@ -1619,7 +1597,7 @@ router.post("/change-password", authenticateJWT, async (req, res) => {
const password_hash = await bcrypt.hash(newPassword, saltRounds);
await db.update(users).set({ password_hash }).where(eq(users.id, userId));
authManager.logoutUser(userId); // Log out user for security
authManager.logoutUser(userId);
res.json({ message: "Password changed successfully. Please log in again." });
});
@@ -1836,7 +1814,6 @@ router.post("/totp/verify-login", async (req, res) => {
.where(eq(users.id, userRecord.id));
}
// Detect platform and device info
const deviceInfo = parseUserAgent(req);
const token = await authManager.generateJWTToken(userRecord.id, {
deviceType: deviceInfo.type,
@@ -1867,7 +1844,6 @@ router.post("/totp/verify-login", async (req, res) => {
response.token = token;
}
// Calculate max age based on device type
const maxAge =
deviceInfo.type === "desktop" || deviceInfo.type === "mobile"
? 30 * 24 * 60 * 60 * 1000
@@ -2230,7 +2206,6 @@ router.get("/data-status", authenticateJWT, async (req, res) => {
const userId = (req as AuthenticatedRequest).userId;
try {
// Data lock functionality has been removed - always return unlocked for authenticated users
res.json({
unlocked: true,
message: "Data is unlocked",
@@ -2320,10 +2295,8 @@ router.get("/sessions", authenticateJWT, async (req, res) => {
let sessionList;
if (userRecord.is_admin) {
// Admin: Get all sessions with user info
sessionList = await authManager.getAllSessions();
// Join with users to get usernames
const enrichedSessions = await Promise.all(
sessionList.map(async (session) => {
const sessionUser = await db
@@ -2341,7 +2314,6 @@ router.get("/sessions", authenticateJWT, async (req, res) => {
return res.json({ sessions: enrichedSessions });
} else {
// Regular user: Get only their own sessions
sessionList = await authManager.getUserSessions(userId);
return res.json({ sessions: sessionList });
}
@@ -2369,7 +2341,6 @@ router.delete("/sessions/:sessionId", authenticateJWT, async (req, res) => {
const userRecord = user[0];
// Check if session exists
const sessionRecords = await db
.select()
.from(sessions)
@@ -2382,7 +2353,6 @@ router.delete("/sessions/:sessionId", authenticateJWT, async (req, res) => {
const session = sessionRecords[0];
// Non-admin users can only revoke their own sessions
if (!userRecord.is_admin && session.userId !== userId) {
return res
.status(403)
@@ -2421,19 +2391,15 @@ router.post("/sessions/revoke-all", authenticateJWT, async (req, res) => {
const userRecord = user[0];
// Determine which user's sessions to revoke
let revokeUserId = userId;
if (targetUserId && userRecord.is_admin) {
// Admin can revoke any user's sessions
revokeUserId = targetUserId;
} else if (targetUserId && targetUserId !== userId) {
// Non-admin can only revoke their own sessions
return res.status(403).json({
error: "Not authorized to revoke sessions for other users",
});
}
// Get current session ID if needed
let currentSessionId: string | undefined;
if (exceptCurrent) {
const token =

View File

@@ -120,9 +120,7 @@ function cleanupSession(sessionId: string) {
if (session) {
try {
session.client.end();
} catch {
// Ignore connection close errors
}
} catch {}
clearTimeout(session.timeout);
delete sshSessions[sessionId];
}
@@ -352,8 +350,6 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
}
config.password = resolvedCredentials.password;
} else if (resolvedCredentials.authType === "none") {
// Use authHandler to control authentication flow
// This ensures we only try keyboard-interactive, not password auth
config.authHandler = (
methodsLeft: string[] | null,
partialSuccess: boolean,
@@ -409,7 +405,6 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
scheduleSessionCleanup(sessionId);
res.json({ status: "success", message: "SSH connection established" });
// Log activity to dashboard API
if (hostId && userId) {
(async () => {
try {
@@ -458,14 +453,6 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
responseSent = true;
if (authMethodNotAvailable && resolvedCredentials.authType === "none") {
fileLogger.info(
"Keyboard-interactive not available, requesting credentials",
{
operation: "file_connect_auth_not_available",
sessionId,
hostId,
},
);
res.status(200).json({
status: "auth_required",
message:
@@ -557,51 +544,26 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
prompt: prompts[totpPromptIndex].prompt,
});
} else {
// Non-TOTP prompts (password, etc.)
const hasStoredPassword =
resolvedCredentials.password &&
resolvedCredentials.authType !== "none";
// Check if this is a password prompt
const passwordPromptIndex = prompts.findIndex((p) =>
/password/i.test(p.prompt),
);
// If no stored password (including authType "none"), prompt the user
if (!hasStoredPassword && passwordPromptIndex !== -1) {
if (responseSent) {
// Connection is already being handled, don't send duplicate responses
fileLogger.info(
"Skipping duplicate password prompt - response already sent",
{
operation: "keyboard_interactive_skip",
hostId,
sessionId,
},
);
return;
}
responseSent = true;
if (pendingTOTPSessions[sessionId]) {
// Session already waiting for TOTP, don't override
fileLogger.info("Skipping password prompt - TOTP session pending", {
operation: "keyboard_interactive_skip",
hostId,
sessionId,
});
return;
}
keyboardInteractiveResponded = true;
fileLogger.info("Requesting password from user (authType: none)", {
operation: "keyboard_interactive_password",
hostId,
sessionId,
prompt: prompts[passwordPromptIndex].prompt,
});
pendingTOTPSessions[sessionId] = {
client,
finish,
@@ -627,7 +589,6 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
return;
}
// Auto-respond with stored credentials if available
const responses = prompts.map((p) => {
if (/password/i.test(p.prompt) && resolvedCredentials.password) {
return resolvedCredentials.password;
@@ -679,9 +640,7 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => {
delete pendingTOTPSessions[sessionId];
try {
session.client.end();
} catch {
// Ignore errors when closing timed out session
}
} catch {}
fileLogger.warn("TOTP session timeout before code submission", {
operation: "file_totp_verify",
sessionId,
@@ -693,7 +652,6 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => {
.json({ error: "TOTP session timeout. Please reconnect." });
}
// Build responses for ALL prompts, just like in terminal.ts
const responses = (session.prompts || []).map((p, index) => {
if (index === session.totpPromptIndex) {
return totpCode;
@@ -704,22 +662,9 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => {
return "";
});
fileLogger.info("Full keyboard-interactive response for file manager", {
operation: "file_totp_full_response",
sessionId,
userId,
totalPrompts: session.prompts?.length || 0,
responsesProvided: responses.filter((r) => r !== "").length,
});
let responseSent = false;
let responseTimeout: NodeJS.Timeout;
// Don't remove event listeners - just add our own 'once' handlers
// The ssh2 library manages multiple listeners correctly
// Removing them can cause the connection to become unstable
// CRITICAL: Attach event listeners BEFORE calling finish() to avoid race condition
session.client.once("ready", () => {
if (responseSent) return;
responseSent = true;
@@ -727,8 +672,6 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => {
delete pendingTOTPSessions[sessionId];
// Add a small delay to let SSH2 stabilize the connection after keyboard-interactive
// This prevents "Not connected" errors when immediately trying to exec commands
setTimeout(() => {
sshSessions[sessionId] = {
client: session.client,
@@ -742,7 +685,6 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => {
message: "TOTP verified, SSH connection established",
});
// Log activity to dashboard API after connection is stable
if (session.hostId && session.userId) {
(async () => {
try {
@@ -789,7 +731,7 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => {
}
})();
}
}, 200); // Give SSH2 connection 200ms to fully stabilize after keyboard-interactive
}, 200);
});
session.client.once("error", (err) => {
@@ -822,7 +764,6 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => {
}
}, 60000);
// Now that event listeners are attached, submit the TOTP response
session.finish(responses);
});
@@ -2493,15 +2434,6 @@ app.post("/ssh/file_manager/ssh/executeFile", async (req, res) => {
: code;
const cleanOutput = output.replace(/EXIT_CODE:\d+$/, "").trim();
fileLogger.info("File execution completed", {
operation: "execute_file",
sessionId,
filePath,
exitCode: actualExitCode,
outputLength: cleanOutput.length,
errorLength: errorOutput.length,
});
res.json({
success: true,
exitCode: actualExitCode,

View File

@@ -112,8 +112,6 @@ class SSHConnectionPool {
);
if (totpPrompt) {
// Record TOTP failure as permanent - never retry
// The recordFailure method will log this once
authFailureTracker.recordFailure(host.id, "TOTP", true);
client.end();
reject(
@@ -158,9 +156,7 @@ class SSHConnectionPool {
if (!conn.inUse && now - conn.lastUsed > maxAge) {
try {
conn.client.end();
} catch {
// Ignore errors when closing stale connections
}
} catch {}
return false;
}
return true;
@@ -180,9 +176,7 @@ class SSHConnectionPool {
for (const conn of connections) {
try {
conn.client.end();
} catch {
// Ignore errors when closing connections during cleanup
}
} catch {}
}
}
this.connections.clear();
@@ -220,9 +214,7 @@ class RequestQueue {
if (request) {
try {
await request();
} catch {
// Ignore errors from queued requests
}
} catch {}
}
}
@@ -272,13 +264,13 @@ interface AuthFailureRecord {
count: number;
lastFailure: number;
reason: "TOTP" | "AUTH" | "TIMEOUT";
permanent: boolean; // If true, don't retry at all
permanent: boolean;
}
class AuthFailureTracker {
private failures = new Map<number, AuthFailureRecord>();
private maxRetries = 3;
private backoffBase = 60000; // 1 minute base backoff
private backoffBase = 60000;
recordFailure(
hostId: number,
@@ -305,17 +297,14 @@ class AuthFailureTracker {
const record = this.failures.get(hostId);
if (!record) return false;
// Always skip TOTP hosts
if (record.reason === "TOTP" || record.permanent) {
return true;
}
// Skip if we've exceeded max retries
if (record.count >= this.maxRetries) {
return true;
}
// Calculate exponential backoff
const backoffTime = this.backoffBase * Math.pow(2, record.count - 1);
const timeSinceFailure = Date.now() - record.lastFailure;
@@ -351,11 +340,9 @@ class AuthFailureTracker {
reset(hostId: number): void {
this.failures.delete(hostId);
// Don't log reset - it's not important
}
cleanup(): void {
// Clean up old failures (older than 1 hour)
const maxAge = 60 * 60 * 1000;
const now = Date.now();
@@ -459,7 +446,6 @@ class PollingManager {
const statsConfig = this.parseStatsConfig(host.statsConfig);
const existingConfig = this.pollingConfigs.get(host.id);
// Clear existing timers if they exist
if (existingConfig) {
if (existingConfig.statusTimer) {
clearInterval(existingConfig.statusTimer);
@@ -474,35 +460,27 @@ class PollingManager {
statsConfig,
};
// Start status polling if enabled
if (statsConfig.statusCheckEnabled) {
const intervalMs = statsConfig.statusCheckInterval * 1000;
// Poll immediately (don't await - let it run in background)
this.pollHostStatus(host);
// Then set up interval to poll periodically
config.statusTimer = setInterval(() => {
this.pollHostStatus(host);
}, intervalMs);
} else {
// Remove status if monitoring is disabled
this.statusStore.delete(host.id);
}
// Start metrics polling if enabled
if (statsConfig.metricsEnabled) {
const intervalMs = statsConfig.metricsInterval * 1000;
// Poll immediately (don't await - let it run in background)
this.pollHostMetrics(host);
// Then set up interval to poll periodically
config.metricsTimer = setInterval(() => {
this.pollHostMetrics(host);
}, intervalMs);
} else {
// Remove metrics if monitoring is disabled
this.metricsStore.delete(host.id);
}
@@ -576,12 +554,10 @@ class PollingManager {
}
async refreshHostPolling(userId: string): Promise<void> {
// Stop all current polling
for (const hostId of this.pollingConfigs.keys()) {
this.stopPollingForHost(hostId);
}
// Reinitialize
await this.initializePolling(userId);
}
@@ -1019,10 +995,8 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{
os: string | null;
};
}> {
// Check if we should skip this host due to auth failures
if (authFailureTracker.shouldSkip(host.id)) {
const reason = authFailureTracker.getSkipReason(host.id);
// Don't log - just skip silently to avoid spam
throw new Error(reason || "Authentication failed");
}
@@ -1166,7 +1140,6 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{
availableHuman = null;
}
// Collect network interfaces
const interfaces: Array<{
name: string;
ip: string;
@@ -1225,7 +1198,6 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{
}
} catch (e) {}
// Collect uptime
let uptimeSeconds: number | null = null;
let uptimeFormatted: string | null = null;
try {
@@ -1242,7 +1214,6 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{
}
} catch (e) {}
// Collect process information
let totalProcesses: number | null = null;
let runningProcesses: number | null = null;
const topProcesses: Array<{
@@ -1285,7 +1256,6 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{
runningProcesses = Number(runningCount.stdout.trim());
} catch (e) {}
// Collect system information
let hostname: string | null = null;
let kernel: string | null = null;
let os: string | null = null;
@@ -1338,25 +1308,20 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{
return result;
});
} catch (error) {
// Record authentication failures for backoff
if (error instanceof Error) {
if (error.message.includes("TOTP authentication required")) {
// TOTP failures are already recorded in keyboard-interactive handler
throw error;
} else if (
error.message.includes("No password available") ||
error.message.includes("Unsupported authentication type") ||
error.message.includes("No SSH key available")
) {
// Configuration errors - permanent failures, don't retry
// recordFailure will log once when first detected
authFailureTracker.recordFailure(host.id, "AUTH", true);
} else if (
error.message.includes("authentication") ||
error.message.includes("Permission denied") ||
error.message.includes("All configured authentication methods failed")
) {
// recordFailure will log once when first detected
authFailureTracker.recordFailure(host.id, "AUTH");
} else if (
error.message.includes("timeout") ||
@@ -1384,9 +1349,7 @@ function tcpPing(
settled = true;
try {
socket.destroy();
} catch {
// Ignore errors when destroying socket
}
} catch {}
resolve(result);
};
@@ -1409,7 +1372,6 @@ app.get("/status", async (req, res) => {
});
}
// Initialize polling if no hosts are being polled yet
const statuses = pollingManager.getAllStatuses();
if (statuses.size === 0) {
await pollingManager.initializePolling(userId);
@@ -1433,7 +1395,6 @@ app.get("/status/:id", validateHostId, async (req, res) => {
});
}
// Initialize polling if no hosts are being polled yet
const statuses = pollingManager.getAllStatuses();
if (statuses.size === 0) {
await pollingManager.initializePolling(userId);
@@ -1520,7 +1481,6 @@ app.listen(PORT, async () => {
});
}
// Cleanup old auth failures every 10 minutes
setInterval(
() => {
authFailureTracker.cleanup();

View File

@@ -333,15 +333,9 @@ wss.on("connection", async (ws: WebSocket, req) => {
}
case "password_response": {
const passwordData = data as TOTPResponseData; // Same structure
const passwordData = data as TOTPResponseData;
if (keyboardInteractiveFinish && passwordData?.code) {
const password = passwordData.code;
sshLogger.info("Password received from user", {
operation: "password_response",
userId,
passwordLength: password.length,
});
keyboardInteractiveFinish([password]);
keyboardInteractiveFinish = null;
} else {
@@ -374,7 +368,6 @@ wss.on("connection", async (ws: WebSocket, req) => {
keyPassword?: string;
};
// Update the host config with provided credentials
if (credentialsData.password) {
credentialsData.hostConfig.password = credentialsData.password;
credentialsData.hostConfig.authType = "password";
@@ -384,10 +377,8 @@ wss.on("connection", async (ws: WebSocket, req) => {
credentialsData.hostConfig.authType = "key";
}
// Cleanup existing connection if any
cleanupSSH();
// Reconnect with new credentials
const reconnectData: ConnectToHostData = {
cols: credentialsData.cols,
rows: credentialsData.rows,
@@ -555,8 +546,6 @@ wss.on("connection", async (ws: WebSocket, req) => {
sshConn.on("ready", () => {
clearTimeout(connectionTimeout);
// Immediately try to create shell - don't delay as it can cause connection to be cleaned up
// The connection is already ready at this point
if (!sshConn) {
sshLogger.warn(
"SSH connection was cleaned up before shell could be created",
@@ -666,11 +655,9 @@ wss.on("connection", async (ws: WebSocket, req) => {
JSON.stringify({ type: "connected", message: "SSH connected" }),
);
// Log activity to dashboard API
if (id && hostConfig.userId) {
(async () => {
try {
// Fetch host name from database
const hosts = await SimpleDBOps.select(
getDb()
.select()
@@ -790,8 +777,6 @@ wss.on("connection", async (ws: WebSocket, req) => {
prompts: Array<{ prompt: string; echo: boolean }>,
finish: (responses: string[]) => void,
) => {
// Notify frontend that keyboard-interactive is available (e.g., for Warpgate OIDC)
// This allows the terminal to be displayed immediately so user can see auth prompts
if (resolvedCredentials.authType === "none") {
ws.send(
JSON.stringify({
@@ -846,37 +831,19 @@ wss.on("connection", async (ws: WebSocket, req) => {
resolvedCredentials.password &&
resolvedCredentials.authType !== "none";
// Check if this is a password prompt
const passwordPromptIndex = prompts.findIndex((p) =>
/password/i.test(p.prompt),
);
// If no stored password (including authType "none"), prompt the user
if (!hasStoredPassword && passwordPromptIndex !== -1) {
// Don't block duplicate password prompts - some servers (like Warpgate) may ask multiple times
if (keyboardInteractiveResponded && totpPromptSent) {
// Only block if we already sent a TOTP prompt
sshLogger.info(
"Skipping duplicate password prompt after TOTP sent",
{
operation: "keyboard_interactive_skip",
hostId: id,
},
);
return;
}
keyboardInteractiveResponded = true;
sshLogger.info("Requesting password from user (authType: none)", {
operation: "keyboard_interactive_password",
hostId: id,
prompt: prompts[passwordPromptIndex].prompt,
});
keyboardInteractiveFinish = (userResponses: string[]) => {
const userInput = (userResponses[0] || "").trim();
// Build responses for all prompts
const responses = prompts.map((p, index) => {
if (index === passwordPromptIndex) {
return userInput;
@@ -884,16 +851,6 @@ wss.on("connection", async (ws: WebSocket, req) => {
return "";
});
sshLogger.info(
"User-provided password being sent to SSH server",
{
operation: "interactive_password_verification",
hostId: id,
passwordLength: userInput.length,
totalPrompts: prompts.length,
},
);
finish(responses);
};
@@ -906,8 +863,6 @@ wss.on("connection", async (ws: WebSocket, req) => {
return;
}
// Auto-respond with stored credentials if available
// Allow multiple responses - the server might ask multiple times during auth flow
const responses = prompts.map((p) => {
if (/password/i.test(p.prompt) && resolvedCredentials.password) {
return resolvedCredentials.password;
@@ -991,28 +946,15 @@ wss.on("connection", async (ws: WebSocket, req) => {
};
if (resolvedCredentials.authType === "none") {
// For "none" auth type, allow natural SSH negotiation
// The authHandler will try keyboard-interactive if available, otherwise notify frontend
// This allows for Warpgate OIDC and other interactive auth scenarios
connectConfig.authHandler = (
methodsLeft: string[] | null,
partialSuccess: boolean,
callback: (nextMethod: string | false) => void,
) => {
if (methodsLeft && methodsLeft.length > 0) {
// Prefer keyboard-interactive if available
if (methodsLeft.includes("keyboard-interactive")) {
callback("keyboard-interactive");
} else {
// No keyboard-interactive available - notify frontend to show auth dialog
sshLogger.info(
"Server does not support keyboard-interactive auth for 'none' auth type",
{
operation: "ssh_auth_handler_no_keyboard",
hostId: id,
methodsLeft,
},
);
ws.send(
JSON.stringify({
type: "auth_method_not_available",
@@ -1024,11 +966,6 @@ wss.on("connection", async (ws: WebSocket, req) => {
callback(false);
}
} else {
// No methods left or empty - try to proceed without auth
sshLogger.info("No auth methods available, proceeding without auth", {
operation: "ssh_auth_no_methods",
hostId: id,
});
callback(false);
}
};

View File

@@ -217,9 +217,7 @@ function cleanupTunnelResources(
if (verification?.timeout) clearTimeout(verification.timeout);
try {
verification?.conn.end();
} catch {
// Ignore errors
}
} catch {}
tunnelVerifications.delete(tunnelName);
}
@@ -284,9 +282,7 @@ function handleDisconnect(
const verification = tunnelVerifications.get(tunnelName);
if (verification?.timeout) clearTimeout(verification.timeout);
verification?.conn.end();
} catch {
// Ignore errors
}
} catch {}
tunnelVerifications.delete(tunnelName);
}
@@ -642,9 +638,7 @@ async function connectSSHTunnel(
try {
conn.end();
} catch {
// Ignore errors
}
} catch {}
activeTunnels.delete(tunnelName);
@@ -784,9 +778,7 @@ async function connectSSHTunnel(
const verification = tunnelVerifications.get(tunnelName);
if (verification?.timeout) clearTimeout(verification.timeout);
verification?.conn.end();
} catch {
// Ignore errors
}
} catch {}
tunnelVerifications.delete(tunnelName);
}
@@ -837,13 +829,9 @@ async function connectSSHTunnel(
}
});
stream.stdout?.on("data", () => {
// Silently consume stdout data
});
stream.stdout?.on("data", () => {});
stream.on("error", () => {
// Silently consume stream errors
});
stream.on("error", () => {});
stream.stderr.on("data", (data) => {
const errorMsg = data.toString().trim();
@@ -1222,9 +1210,7 @@ async function killRemoteTunnelByMarker(
executeNextKillCommand();
});
stream.on("data", () => {
// Silently consume stream data
});
stream.on("data", () => {});
stream.stderr.on("data", (data) => {
const output = data.toString().trim();

View File

@@ -21,9 +21,7 @@ import { systemLogger, versionLogger } from "./utils/logger.js";
if (persistentConfig.parsed) {
Object.assign(process.env, persistentConfig.parsed);
}
} catch {
// Ignore errors if .env file doesn't exist
}
} catch {}
let version = "unknown";

View File

@@ -54,7 +54,6 @@ class AuthManager {
this.invalidateUserTokens(userId);
});
// Run session cleanup every 5 minutes
setInterval(
() => {
this.cleanupExpiredSessions().catch((error) => {
@@ -162,16 +161,15 @@ class AuthManager {
): Promise<string> {
const jwtSecret = await this.systemCrypto.getJWTSecret();
// Determine expiration based on device type
let expiresIn = options.expiresIn;
if (!expiresIn && !options.pendingTOTP) {
if (options.deviceType === "desktop" || options.deviceType === "mobile") {
expiresIn = "30d"; // 30 days for desktop and mobile
expiresIn = "30d";
} else {
expiresIn = "7d"; // 7 days for web
expiresIn = "7d";
}
} else if (!expiresIn) {
expiresIn = "7d"; // Default
expiresIn = "7d";
}
const payload: JWTPayload = { userId };
@@ -179,23 +177,19 @@ class AuthManager {
payload.pendingTOTP = true;
}
// Create session in database if not a temporary TOTP token
if (!options.pendingTOTP && options.deviceType && options.deviceInfo) {
const sessionId = nanoid();
payload.sessionId = sessionId;
// Generate the token first to get it for storage
const token = jwt.sign(payload, jwtSecret, {
expiresIn,
} as jwt.SignOptions);
// Calculate expiration timestamp
const expirationMs = this.parseExpiresIn(expiresIn);
const now = new Date();
const expiresAt = new Date(now.getTime() + expirationMs).toISOString();
const createdAt = now.toISOString();
// Store session in database
try {
await db.insert(sessions).values({
id: sessionId,
@@ -208,27 +202,11 @@ class AuthManager {
lastActiveAt: createdAt,
});
databaseLogger.info("Session created", {
operation: "session_create",
userId,
sessionId,
deviceType: options.deviceType,
expiresAt,
});
// Immediately save database to disk to ensure session persists across restarts
try {
const { saveMemoryDatabaseToFile } = await import(
"../database/db/index.js"
);
await saveMemoryDatabaseToFile();
databaseLogger.info(
"Database saved immediately after session creation",
{
operation: "session_create_db_save",
sessionId,
},
);
} catch (saveError) {
databaseLogger.error(
"Failed to save database after session creation",
@@ -245,7 +223,6 @@ class AuthManager {
userId,
sessionId,
});
// Continue anyway - session tracking is non-critical
}
return token;
@@ -259,7 +236,7 @@ class AuthManager {
*/
private parseExpiresIn(expiresIn: string): number {
const match = expiresIn.match(/^(\d+)([smhd])$/);
if (!match) return 7 * 24 * 60 * 60 * 1000; // Default 7 days
if (!match) return 7 * 24 * 60 * 60 * 1000;
const value = parseInt(match[1]);
const unit = match[2];
@@ -282,26 +259,8 @@ class AuthManager {
try {
const jwtSecret = await this.systemCrypto.getJWTSecret();
databaseLogger.info("Attempting JWT verification", {
operation: "jwt_verify_attempt",
tokenLength: token.length,
secretLength: jwtSecret.length,
});
const payload = jwt.verify(token, jwtSecret) as JWTPayload;
databaseLogger.info("JWT signature verified successfully", {
operation: "jwt_signature_verified",
userId: payload.userId,
sessionId: payload.sessionId,
hasExpiration: !!payload.exp,
expiresAt: payload.exp
? new Date(payload.exp * 1000).toISOString()
: "N/A",
});
// For tokens with sessionId, verify the session exists in database
// This ensures revoked sessions are rejected even after backend restart
if (payload.sessionId) {
try {
const sessionRecords = await db
@@ -322,13 +281,6 @@ class AuthManager {
);
return null;
}
databaseLogger.info("Session found in database", {
operation: "jwt_session_found",
sessionId: payload.sessionId,
userId: payload.userId,
sessionExpiresAt: sessionRecords[0].expiresAt,
});
} catch (dbError) {
databaseLogger.error(
"Failed to check session in database during JWT verification",
@@ -338,15 +290,8 @@ class AuthManager {
sessionId: payload.sessionId,
},
);
// Continue anyway - database errors shouldn't block valid JWTs
}
}
databaseLogger.info("JWT verification successful", {
operation: "jwt_verify_success",
userId: payload.userId,
sessionId: payload.sessionId,
});
return payload;
} catch (error) {
databaseLogger.warn("JWT verification failed", {
@@ -358,35 +303,14 @@ class AuthManager {
}
}
invalidateJWTToken(token: string): void {
// No-op: Token invalidation is now handled through database session deletion
databaseLogger.info(
"Token invalidation requested (handled via session deletion)",
{
operation: "token_invalidate",
},
);
}
invalidateJWTToken(token: string): void {}
invalidateUserTokens(userId: string): void {
databaseLogger.info("User tokens invalidation requested due to data lock", {
operation: "user_tokens_invalidate",
userId,
});
// Session cleanup will happen through revokeAllUserSessions if needed
}
invalidateUserTokens(userId: string): void {}
async revokeSession(sessionId: string): Promise<boolean> {
try {
// Delete the session from database
// The JWT will be invalidated because verifyJWTToken checks for session existence
await db.delete(sessions).where(eq(sessions.id, sessionId));
databaseLogger.info("Session deleted", {
operation: "session_delete",
sessionId,
});
return true;
} catch (error) {
databaseLogger.error("Failed to delete session", error, {
@@ -402,7 +326,6 @@ class AuthManager {
exceptSessionId?: string,
): Promise<number> {
try {
// Get session count before deletion
const userSessions = await db
.select()
.from(sessions)
@@ -412,8 +335,6 @@ class AuthManager {
(s) => !exceptSessionId || s.id !== exceptSessionId,
).length;
// Delete sessions from database
// JWTs will be invalidated because verifyJWTToken checks for session existence
if (exceptSessionId) {
await db
.delete(sessions)
@@ -427,13 +348,6 @@ class AuthManager {
await db.delete(sessions).where(eq(sessions.userId, userId));
}
databaseLogger.info("User sessions deleted", {
operation: "user_sessions_delete",
userId,
exceptSessionId,
deletedCount,
});
return deletedCount;
} catch (error) {
databaseLogger.error("Failed to delete user sessions", error, {
@@ -446,7 +360,6 @@ class AuthManager {
async cleanupExpiredSessions(): Promise<number> {
try {
// Get expired sessions count
const expiredSessions = await db
.select()
.from(sessions)
@@ -454,19 +367,10 @@ class AuthManager {
const expiredCount = expiredSessions.length;
// Delete expired sessions
// JWTs will be invalidated because verifyJWTToken checks for session existence
await db
.delete(sessions)
.where(sql`${sessions.expiresAt} < datetime('now')`);
if (expiredCount > 0) {
databaseLogger.info("Expired sessions cleaned up", {
operation: "sessions_cleanup",
count: expiredCount,
});
}
return expiredCount;
} catch (error) {
databaseLogger.error("Failed to cleanup expired sessions", error, {
@@ -539,7 +443,6 @@ class AuthManager {
return res.status(401).json({ error: "Invalid token" });
}
// Check session status if sessionId is present
if (payload.sessionId) {
try {
const sessionRecords = await db
@@ -557,9 +460,6 @@ class AuthManager {
const session = sessionRecords[0];
// Session exists, no need to check isRevoked since we delete sessions instead
// Check if session has expired by comparing timestamps
const sessionExpiryTime = new Date(session.expiresAt).getTime();
const currentTime = Date.now();
const isExpired = sessionExpiryTime < currentTime;
@@ -579,7 +479,6 @@ class AuthManager {
});
}
// Update lastActiveAt timestamp (async, non-blocking)
db.update(sessions)
.set({ lastActiveAt: new Date().toISOString() })
.where(eq(sessions.id, payload.sessionId))
@@ -596,7 +495,6 @@ class AuthManager {
operation: "session_check_failed",
sessionId: payload.sessionId,
});
// Continue anyway - session tracking failures shouldn't block auth
}
}
@@ -614,14 +512,8 @@ class AuthManager {
return res.status(401).json({ error: "Authentication required" });
}
// Try to get data key if available (may be null after restart)
const dataKey = this.userCrypto.getUserDataKey(userId);
authReq.dataKey = dataKey || undefined;
// Note: Data key will be null after backend restart until user performs
// an operation that requires decryption. This is expected behavior.
// Individual routes that need encryption should check dataKey explicitly.
next();
};
}
@@ -688,15 +580,9 @@ class AuthManager {
async logoutUser(userId: string, sessionId?: string): Promise<void> {
this.userCrypto.logoutUser(userId);
// Delete the specific session from database if sessionId provided
if (sessionId) {
try {
await db.delete(sessions).where(eq(sessions.id, sessionId));
databaseLogger.info("Session deleted on logout", {
operation: "session_delete_logout",
userId,
sessionId,
});
} catch (error) {
databaseLogger.error("Failed to delete session on logout", error, {
operation: "session_delete_logout_failed",
@@ -705,13 +591,8 @@ class AuthManager {
});
}
} else {
// If no sessionId, delete all sessions for this user
try {
await db.delete(sessions).where(eq(sessions.userId, userId));
databaseLogger.info("All user sessions deleted on logout", {
operation: "sessions_delete_logout",
userId,
});
} catch (error) {
databaseLogger.error(
"Failed to delete user sessions on logout",

View File

@@ -233,9 +233,7 @@ IP.3 = 0.0.0.0
let envContent = "";
try {
envContent = await fs.readFile(this.ENV_FILE, "utf8");
} catch {
// File doesn't exist yet, will create with SSL config
}
} catch {}
let updatedContent = envContent;
let hasChanges = false;

View File

@@ -393,18 +393,6 @@ class DataCrypto {
result.success = result.errors.length === 0;
databaseLogger.info(
"User data re-encryption completed after password reset",
{
operation: "password_reset_reencrypt_completed",
userId,
success: result.success,
reencryptedTables: result.reencryptedTables,
reencryptedFieldsCount: result.reencryptedFieldsCount,
errorsCount: result.errors.length,
},
);
return result;
} catch (error) {
databaseLogger.error(

View File

@@ -1,4 +1,4 @@
import Database from "better-sqlite3";
/import Database from "better-sqlite3";
import fs from "fs";
import path from "path";
import { databaseLogger } from "./logger.js";
@@ -62,10 +62,6 @@ export class DatabaseMigration {
"Empty unencrypted database found alongside encrypted database. Removing empty file.";
try {
fs.unlinkSync(this.unencryptedDbPath);
databaseLogger.info("Removed empty unencrypted database file", {
operation: "migration_cleanup_empty",
path: this.unencryptedDbPath,
});
} catch (error) {
databaseLogger.warn("Failed to remove empty unencrypted database", {
operation: "migration_cleanup_empty_failed",

View File

@@ -71,11 +71,6 @@ export class DatabaseSaveTrigger {
this.pendingSave = true;
try {
databaseLogger.info("Force saving database", {
operation: "db_save_trigger_force_start",
reason,
});
await this.saveFunction();
} catch (error) {
databaseLogger.error("Database force save failed", error, {
@@ -110,9 +105,5 @@ export class DatabaseSaveTrigger {
this.pendingSave = false;
this.isInitialized = false;
this.saveFunction = null;
databaseLogger.info("Database save trigger cleaned up", {
operation: "db_save_trigger_cleanup",
});
}
}

View File

@@ -82,9 +82,7 @@ export class LazyFieldEncryption {
legacyFieldName,
);
return decrypted;
} catch {
// Ignore legacy format errors
}
} catch {}
}
const sensitiveFields = [
@@ -176,9 +174,7 @@ export class LazyFieldEncryption {
wasPlaintext: false,
wasLegacyEncryption: true,
};
} catch {
// Ignore legacy format errors
}
} catch {}
}
return {
encrypted: fieldValue,

View File

@@ -6,7 +6,6 @@ type TableName = "users" | "ssh_data" | "ssh_credentials" | "recent_activity";
class SimpleDBOps {
static async insert<T extends Record<string, unknown>>(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
table: SQLiteTable<any>,
tableName: TableName,
data: T,
@@ -91,7 +90,6 @@ class SimpleDBOps {
}
static async update<T extends Record<string, unknown>>(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
table: SQLiteTable<any>,
tableName: TableName,
where: unknown,
@@ -110,7 +108,6 @@ class SimpleDBOps {
const result = await getDb()
.update(table)
.set(encryptedData)
// eslint-disable-next-line @typescript-eslint/no-explicit-any
.where(where as any)
.returning();
@@ -127,14 +124,12 @@ class SimpleDBOps {
}
static async delete(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
table: SQLiteTable<any>,
tableName: TableName,
where: unknown,
): Promise<unknown[]> {
const result = await getDb()
.delete(table)
// eslint-disable-next-line @typescript-eslint/no-explicit-any
.where(where as any)
.returning();

View File

@@ -84,9 +84,7 @@ function detectKeyTypeFromContent(keyContent: string): string {
} else if (decodedString.includes("1.3.101.112")) {
return "ssh-ed25519";
}
} catch {
// Cannot decode key, fallback to length-based detection
}
} catch {}
if (content.length < 800) {
return "ssh-ed25519";
@@ -142,9 +140,7 @@ function detectPublicKeyTypeFromContent(publicKeyContent: string): string {
} else if (decodedString.includes("1.3.101.112")) {
return "ssh-ed25519";
}
} catch {
// Cannot decode key, fallback to length-based detection
}
} catch {}
if (content.length < 400) {
return "ssh-ed25519";
@@ -246,9 +242,7 @@ export function parseSSHKey(
useSSH2 = true;
}
} catch {
// SSH2 parsing failed, will use fallback method
}
} catch {}
}
if (!useSSH2) {
@@ -274,9 +268,7 @@ export function parseSSHKey(
success: true,
};
}
} catch {
// Fallback parsing also failed
}
} catch {}
return {
privateKey: privateKeyData,

View File

@@ -107,9 +107,7 @@ class SystemCrypto {
process.env.DATABASE_KEY = dbKeyMatch[1];
return;
}
} catch {
// Ignore file read errors, will generate new key
}
} catch {}
await this.generateAndGuideDatabaseKey();
} catch (error) {
@@ -146,9 +144,7 @@ class SystemCrypto {
process.env.INTERNAL_AUTH_TOKEN = tokenMatch[1];
return;
}
} catch {
// Ignore file read errors, will generate new token
}
} catch {}
await this.generateAndGuideInternalAuthToken();
} catch (error) {

View File

@@ -7,59 +7,43 @@ export interface DeviceInfo {
browser: string;
version: string;
os: string;
deviceInfo: string; // Formatted string like "Chrome 120 on Windows 11"
deviceInfo: string;
}
/**
* Detect the platform type based on request headers
*/
export function detectPlatform(req: Request): DeviceType {
const userAgent = req.headers["user-agent"] || "";
const electronHeader = req.headers["x-electron-app"];
// Electron app detection
if (electronHeader === "true") {
return "desktop";
}
// Mobile app detection
if (userAgent.includes("Termix-Mobile")) {
return "mobile";
}
// Default to web
return "web";
}
/**
* Parse User-Agent string to extract device information
*/
export function parseUserAgent(req: Request): DeviceInfo {
const userAgent = req.headers["user-agent"] || "Unknown";
const platform = detectPlatform(req);
// For Electron
if (platform === "desktop") {
return parseElectronUserAgent(userAgent);
}
// For Mobile app
if (platform === "mobile") {
return parseMobileUserAgent(userAgent);
}
// For web browsers
return parseWebUserAgent(userAgent);
}
/**
* Parse Electron app user agent
*/
function parseElectronUserAgent(userAgent: string): DeviceInfo {
let os = "Unknown OS";
let version = "Unknown";
// Detect OS
if (userAgent.includes("Windows")) {
os = parseWindowsVersion(userAgent);
} else if (userAgent.includes("Mac OS X")) {
@@ -68,7 +52,6 @@ function parseElectronUserAgent(userAgent: string): DeviceInfo {
os = "Linux";
}
// Try to extract Electron version
const electronMatch = userAgent.match(/Electron\/([\d.]+)/);
if (electronMatch) {
version = electronMatch[1];
@@ -83,23 +66,17 @@ function parseElectronUserAgent(userAgent: string): DeviceInfo {
};
}
/**
* Parse mobile app user agent
*/
function parseMobileUserAgent(userAgent: string): DeviceInfo {
let os = "Unknown OS";
let version = "Unknown";
// Check for Termix-Mobile/Platform format first (e.g., "Termix-Mobile/Android" or "Termix-Mobile/iOS")
const termixPlatformMatch = userAgent.match(/Termix-Mobile\/(Android|iOS)/i);
if (termixPlatformMatch) {
const platform = termixPlatformMatch[1];
if (platform.toLowerCase() === "android") {
// Try to get Android version from full UA string
const androidMatch = userAgent.match(/Android ([\d.]+)/);
os = androidMatch ? `Android ${androidMatch[1]}` : "Android";
} else if (platform.toLowerCase() === "ios") {
// Try to get iOS version from full UA string
const iosMatch = userAgent.match(/OS ([\d_]+)/);
if (iosMatch) {
const iosVersion = iosMatch[1].replace(/_/g, ".");
@@ -109,7 +86,6 @@ function parseMobileUserAgent(userAgent: string): DeviceInfo {
}
}
} else {
// Fallback: Check for standard Android/iOS patterns in the user agent
if (userAgent.includes("Android")) {
const androidMatch = userAgent.match(/Android ([\d.]+)/);
os = androidMatch ? `Android ${androidMatch[1]}` : "Android";
@@ -128,8 +104,6 @@ function parseMobileUserAgent(userAgent: string): DeviceInfo {
}
}
// Try to extract app version (if included in UA)
// Match patterns like "Termix-Mobile/1.0.0" or just "Termix-Mobile"
const versionMatch = userAgent.match(
/Termix-Mobile\/(?:Android|iOS|)([\d.]+)/i,
);
@@ -146,15 +120,11 @@ function parseMobileUserAgent(userAgent: string): DeviceInfo {
};
}
/**
* Parse web browser user agent
*/
function parseWebUserAgent(userAgent: string): DeviceInfo {
let browser = "Unknown Browser";
let version = "Unknown";
let os = "Unknown OS";
// Detect browser
if (userAgent.includes("Edg/")) {
const match = userAgent.match(/Edg\/([\d.]+)/);
browser = "Edge";
@@ -177,7 +147,6 @@ function parseWebUserAgent(userAgent: string): DeviceInfo {
version = match ? match[1] : "Unknown";
}
// Detect OS
if (userAgent.includes("Windows")) {
os = parseWindowsVersion(userAgent);
} else if (userAgent.includes("Mac OS X")) {
@@ -201,7 +170,6 @@ function parseWebUserAgent(userAgent: string): DeviceInfo {
}
}
// Shorten version to major.minor
if (version !== "Unknown") {
const versionParts = version.split(".");
version = versionParts.slice(0, 2).join(".");
@@ -216,9 +184,6 @@ function parseWebUserAgent(userAgent: string): DeviceInfo {
};
}
/**
* Parse Windows version from user agent
*/
function parseWindowsVersion(userAgent: string): string {
if (userAgent.includes("Windows NT 10.0")) {
return "Windows 10/11";
@@ -239,9 +204,6 @@ function parseWindowsVersion(userAgent: string): string {
return "Windows";
}
/**
* Parse macOS version from user agent
*/
function parseMacVersion(userAgent: string): string {
const match = userAgent.match(/Mac OS X ([\d_]+)/);
if (match) {
@@ -250,7 +212,6 @@ function parseMacVersion(userAgent: string): string {
const major = parseInt(parts[0]);
const minor = parseInt(parts[1]);
// macOS naming
if (major === 10) {
if (minor >= 15) return `macOS ${major}.${minor}`;
if (minor === 14) return "macOS Mojave";

View File

@@ -28,7 +28,6 @@ export interface TerminalTheme {
}
export const TERMINAL_THEMES: Record<string, TerminalTheme> = {
// Current default theme
termix: {
name: "Termix Default",
category: "dark",
@@ -666,14 +665,12 @@ export const TERMINAL_FONTS = [
},
];
// Cursor styles
export const CURSOR_STYLES = [
{ value: "block", label: "Block" },
{ value: "underline", label: "Underline" },
{ value: "bar", label: "Bar" },
] as const;
// Bell styles
export const BELL_STYLES = [
{ value: "none", label: "None" },
{ value: "sound", label: "Sound" },
@@ -681,16 +678,13 @@ export const BELL_STYLES = [
{ value: "both", label: "Both" },
] as const;
// Fast scroll modifiers
export const FAST_SCROLL_MODIFIERS = [
{ value: "alt", label: "Alt" },
{ value: "ctrl", label: "Ctrl" },
{ value: "shift", label: "Shift" },
] as const;
// Default terminal configuration
export const DEFAULT_TERMINAL_CONFIG = {
// Appearance
cursorBlink: true,
cursorStyle: "bar" as const,
fontSize: 14,
@@ -699,7 +693,6 @@ export const DEFAULT_TERMINAL_CONFIG = {
lineHeight: 1.2,
theme: "termix",
// Behavior
scrollback: 10000,
bellStyle: "none" as const,
rightClickSelectsWord: false,
@@ -707,7 +700,6 @@ export const DEFAULT_TERMINAL_CONFIG = {
fastScrollSensitivity: 5,
minimumContrastRatio: 1,
// Advanced
backspaceMode: "normal" as const,
agentForwarding: false,
environmentVariables: [] as Array<{ key: string; value: string }>,

View File

@@ -1,7 +1,3 @@
// ============================================================================
// CENTRAL TYPE DEFINITIONS
// ============================================================================
import type { Client } from "ssh2";
import type { Request } from "express";
@@ -60,7 +56,7 @@ export interface SSHHostData {
enableFileManager?: boolean;
defaultPath?: string;
tunnelConnections?: TunnelConnection[];
statsConfig?: string | Record<string, unknown>; // Can be string (from backend) or object (from form)
statsConfig?: string | Record<string, unknown>;
terminalConfig?: TerminalConfig;
}
@@ -110,7 +106,6 @@ export interface TunnelConnection {
endpointPort: number;
endpointHost: string;
// Endpoint host credentials for tunnel authentication
endpointPassword?: string;
endpointKey?: string;
endpointKeyPassword?: string;
@@ -255,16 +250,14 @@ export interface TermixAlert {
// ============================================================================
export interface TerminalConfig {
// Appearance
cursorBlink: boolean;
cursorStyle: "block" | "underline" | "bar";
fontSize: number;
fontFamily: string;
letterSpacing: number;
lineHeight: number;
theme: string; // Theme key from TERMINAL_THEMES
theme: string;
// Behavior
scrollback: number;
bellStyle: "none" | "sound" | "visual" | "both";
rightClickSelectsWord: boolean;
@@ -272,7 +265,6 @@ export interface TerminalConfig {
fastScrollSensitivity: number;
minimumContrastRatio: number;
// Advanced
backspaceMode: "normal" | "control-h";
agentForwarding: boolean;
environmentVariables: Array<{ key: string; value: string }>;
@@ -298,7 +290,7 @@ export interface TabContextTab {
title: string;
hostConfig?: SSHHost;
terminalRef?: any;
initialTab?: string; // For ssh_manager: "host_viewer" | "add_host" | "credentials" | "add_credential"
initialTab?: string;
}
// ============================================================================

View File

@@ -9,12 +9,10 @@ export type WidgetType =
export interface StatsConfig {
enabledWidgets: WidgetType[];
// Status monitoring configuration
statusCheckEnabled: boolean;
statusCheckInterval: number; // seconds (5-3600)
// Metrics monitoring configuration
statusCheckInterval: number;
metricsEnabled: boolean;
metricsInterval: number; // seconds (5-3600)
metricsInterval: number;
}
export const DEFAULT_STATS_CONFIG: StatsConfig = {

View File

@@ -153,7 +153,6 @@ export function AdminSettings({
toast.error(t("admin.failedToFetchOidcConfig"));
}
});
// Capture the current session so we know whether to ask for a password later.
getUserInfo()
.then((info) => {
if (info) {
@@ -251,9 +250,7 @@ export function AdminSettings({
};
const handleTogglePasswordLogin = async (checked: boolean) => {
// If disabling password login, warn the user
if (!checked) {
// Check if OIDC is configured
const hasOIDCConfigured =
oidcConfig.client_id &&
oidcConfig.client_secret &&
@@ -276,7 +273,6 @@ export function AdminSettings({
await updatePasswordLoginAllowed(checked);
setAllowPasswordLogin(checked);
// Auto-disable registration when password login is disabled
if (allowRegistration) {
await updateRegistrationAllowed(false);
setAllowRegistration(false);
@@ -295,7 +291,6 @@ export function AdminSettings({
return;
}
// Enabling password login - proceed normally
setPasswordLoginLoading(true);
try {
await updatePasswordLoginAllowed(checked);
@@ -493,7 +488,6 @@ export function AdminSettings({
const formData = new FormData();
formData.append("file", importFile);
if (requiresImportPassword) {
// Preserve the existing password flow for non-OIDC accounts.
formData.append("password", importPassword);
}
@@ -607,7 +601,6 @@ export function AdminSettings({
};
const handleRevokeSession = async (sessionId: string) => {
// Check if this is the current session
const currentJWT = getCookie("jwt");
const currentSession = sessions.find((s) => s.jwtToken === currentJWT);
const isCurrentSession = currentSession?.id === sessionId;
@@ -641,7 +634,6 @@ export function AdminSettings({
if (response.ok) {
toast.success(t("admin.sessionRevokedSuccessfully"));
// If user revoked their own session, reload the page after a brief delay
if (isCurrentSession) {
setTimeout(() => {
window.location.reload();
@@ -661,7 +653,6 @@ export function AdminSettings({
};
const handleRevokeAllUserSessions = async (userId: string) => {
// Check if revoking sessions for current user
const isCurrentUser = currentUser?.id === userId;
confirmWithToast(
@@ -701,7 +692,6 @@ export function AdminSettings({
data.message || t("admin.sessionsRevokedSuccessfully"),
);
// If revoking sessions for current user, reload the page after a brief delay
if (isCurrentUser) {
setTimeout(() => {
window.location.reload();
@@ -978,7 +968,6 @@ export function AdminSettings({
type="button"
variant="outline"
onClick={async () => {
// Check if password login is enabled
if (!allowPasswordLogin) {
confirmWithToast(
t("admin.confirmDisableOIDCWarning"),
@@ -1469,7 +1458,6 @@ export function AdminSettings({
</span>
</Button>
</div>
{/* Only render the password field when a local account is performing the import. */}
{importFile && requiresImportPassword && (
<div className="space-y-2">
<Label htmlFor="import-password">Password</Label>

View File

@@ -80,7 +80,6 @@ export function CredentialEditor({
setFolders(uniqueFolders);
} catch {
// Failed to load credentials
} finally {
setLoading(false);
}

View File

@@ -66,7 +66,6 @@ export function Dashboard({
const [userId, setUserId] = useState<string | null>(null);
const [dbError, setDbError] = useState<string | null>(null);
// Dashboard data state
const [uptime, setUptime] = useState<string>("0d 0h 0m");
const [versionStatus, setVersionStatus] = useState<
"up_to_date" | "requires_update"
@@ -141,22 +140,18 @@ export function Dashboard({
}
}, [isAuthenticated]);
// Fetch dashboard data
useEffect(() => {
if (!loggedIn) return;
const fetchDashboardData = async () => {
try {
// Fetch uptime
const uptimeInfo = await getUptime();
setUptime(uptimeInfo.formatted);
// Fetch version info
const versionInfo = await getVersionInfo();
setVersionText(`v${versionInfo.localVersion}`);
setVersionStatus(versionInfo.status || "up_to_date");
// Fetch database health
try {
await getDatabaseHealth();
setDbHealth("healthy");
@@ -164,25 +159,20 @@ export function Dashboard({
setDbHealth("error");
}
// Fetch total counts
const hosts = await getSSHHosts();
setTotalServers(hosts.length);
// Count total tunnels across all hosts
let totalTunnelsCount = 0;
for (const host of hosts) {
if (host.tunnelConnections) {
try {
// tunnelConnections is already parsed as an array from the backend
const tunnelConnections = Array.isArray(host.tunnelConnections)
? host.tunnelConnections
: JSON.parse(host.tunnelConnections);
if (Array.isArray(tunnelConnections)) {
totalTunnelsCount += tunnelConnections.length;
}
} catch {
// Ignore parse errors
}
} catch {}
}
}
setTotalTunnels(totalTunnelsCount);
@@ -190,13 +180,11 @@ export function Dashboard({
const credentials = await getCredentials();
setTotalCredentials(credentials.length);
// Fetch recent activity (35 items)
setRecentActivityLoading(true);
const activity = await getRecentActivity(35);
setRecentActivity(activity);
setRecentActivityLoading(false);
// Fetch server stats for first 5 servers
setServerStatsLoading(true);
const serversWithStats = await Promise.all(
hosts.slice(0, 5).map(async (host: { id: number; name: string }) => {
@@ -229,12 +217,10 @@ export function Dashboard({
fetchDashboardData();
// Refresh every 30 seconds
const interval = setInterval(fetchDashboardData, 30000);
return () => clearInterval(interval);
}, [loggedIn]);
// Handler for resetting recent activity
const handleResetActivity = async () => {
try {
await resetRecentActivity();
@@ -244,9 +230,7 @@ export function Dashboard({
}
};
// Handler for opening a recent activity item
const handleActivityClick = (item: RecentActivityItem) => {
// Find the host and open appropriate tab
getSSHHosts().then((hosts) => {
const host = hosts.find((h: { id: number }) => h.id === item.hostId);
if (!host) return;
@@ -267,7 +251,6 @@ export function Dashboard({
});
};
// Quick Actions handlers
const handleAddHost = () => {
const sshManagerTab = tabList.find((t) => t.type === "ssh_manager");
if (sshManagerTab) {

View File

@@ -226,9 +226,8 @@ function FileManagerContent({ initialHost, onClose }: FileManagerProps) {
const currentLoadingPathRef = useRef<string>("");
const keepaliveTimerRef = useRef<NodeJS.Timeout | null>(null);
const activityLoggedRef = useRef(false);
const activityLoggingRef = useRef(false); // Prevent concurrent logging calls
const activityLoggingRef = useRef(false);
// Centralized activity logging to prevent duplicates
const logFileManagerActivity = useCallback(async () => {
if (
!currentHost?.id ||
@@ -238,7 +237,6 @@ function FileManagerContent({ initialHost, onClose }: FileManagerProps) {
return;
}
// Set flags IMMEDIATELY to prevent race conditions
activityLoggingRef.current = true;
activityLoggedRef.current = true;
@@ -246,10 +244,8 @@ function FileManagerContent({ initialHost, onClose }: FileManagerProps) {
const hostName =
currentHost.name || `${currentHost.username}@${currentHost.ip}`;
await logActivity("file_manager", currentHost.id, hostName);
// Don't reset activityLoggedRef on success - we want to prevent future calls
} catch (err) {
console.warn("Failed to log file manager activity:", err);
// Reset on error so it can be retried
activityLoggedRef.current = false;
} finally {
activityLoggingRef.current = false;
@@ -350,8 +346,6 @@ function FileManagerContent({ initialHost, onClose }: FileManagerProps) {
clearSelection();
initialLoadDoneRef.current = true;
// Log activity for recent connections (after successful directory load)
// Only log if TOTP was not required (if TOTP is required, we'll log after verification)
if (!result?.requires_totp) {
logFileManagerActivity();
}
@@ -1306,7 +1300,6 @@ function FileManagerContent({ initialHost, onClose }: FileManagerProps) {
initialLoadDoneRef.current = true;
toast.success(t("fileManager.connectedSuccessfully"));
// Log activity for recent connections (after successful directory load)
logFileManagerActivity();
} catch (dirError: unknown) {
console.error("Failed to load initial directory:", dirError);

View File

@@ -34,21 +34,16 @@ export function HostManager({
const ignoreNextHostConfigChangeRef = useRef<boolean>(false);
const lastProcessedHostIdRef = useRef<number | undefined>(undefined);
// Update editing host when hostConfig prop changes (from sidebar edit button)
useEffect(() => {
// Skip if we should ignore this change
if (ignoreNextHostConfigChangeRef.current) {
ignoreNextHostConfigChangeRef.current = false;
return;
}
// Only process if this is an external edit request (from sidebar)
if (hostConfig && initialTab === "add_host") {
const currentHostId = hostConfig.id;
// Open editor if it's a different host OR same host but user is on viewer/credentials tabs
if (currentHostId !== lastProcessedHostIdRef.current) {
// Different host - always open
setEditingHost(hostConfig);
setActiveTab("add_host");
lastProcessedHostIdRef.current = currentHostId;
@@ -57,11 +52,9 @@ export function HostManager({
activeTab === "credentials" ||
activeTab === "add_credential"
) {
// Same host but user manually navigated away - reopen
setEditingHost(hostConfig);
setActiveTab("add_host");
}
// If same host and already on add_host tab, do nothing (don't block tab changes)
}
}, [hostConfig, initialTab]);
@@ -72,11 +65,9 @@ export function HostManager({
};
const handleFormSubmit = () => {
// Ignore the next hostConfig change (which will come from ssh-hosts:changed event)
ignoreNextHostConfigChangeRef.current = true;
setEditingHost(null);
setActiveTab("host_viewer");
// Clear after a delay so the same host can be edited again
setTimeout(() => {
lastProcessedHostIdRef.current = undefined;
}, 500);

View File

@@ -129,7 +129,6 @@ export function HostManagerEditor({
);
const isSubmittingRef = useRef(false);
// Monitoring interval states
const [statusIntervalUnit, setStatusIntervalUnit] = useState<
"seconds" | "minutes"
>("seconds");
@@ -168,9 +167,7 @@ export function HostManagerEditor({
setFolders(uniqueFolders);
setSshConfigurations(uniqueConfigurations);
} catch {
// Failed to load hosts data
}
} catch {}
};
fetchData();
@@ -199,9 +196,7 @@ export function HostManagerEditor({
setFolders(uniqueFolders);
setSshConfigurations(uniqueConfigurations);
} catch {
// Failed to reload hosts after credential change
}
} catch {}
};
window.addEventListener("credentials:changed", handleCredentialChange);
@@ -319,7 +314,6 @@ export function HostManagerEditor({
})
.superRefine((data, ctx) => {
if (data.authType === "none") {
// No credentials required for "none" auth type - will use keyboard-interactive
return;
}
@@ -444,7 +438,6 @@ export function HostManagerEditor({
: "none";
setAuthTab(defaultAuthType);
// Parse statsConfig from JSON string if needed
let parsedStatsConfig = DEFAULT_STATS_CONFIG;
try {
if (cleanedHost.statsConfig) {
@@ -457,7 +450,6 @@ export function HostManagerEditor({
console.error("Failed to parse statsConfig:", error);
}
// Merge with defaults to ensure all new fields are present
parsedStatsConfig = { ...DEFAULT_STATS_CONFIG, ...parsedStatsConfig };
const formData = {
@@ -552,7 +544,6 @@ export function HostManagerEditor({
data.name = `${data.username}@${data.ip}`;
}
// Validate monitoring intervals
if (data.statsConfig) {
const statusInterval = data.statsConfig.statusCheckInterval || 30;
const metricsInterval = data.statsConfig.metricsInterval || 30;
@@ -663,7 +654,6 @@ export function HostManagerEditor({
window.dispatchEvent(new CustomEvent("ssh-hosts:changed"));
// Refresh backend polling to pick up new/updated host configuration
const { refreshServerPolling } = await import("@/ui/main-axios.ts");
refreshServerPolling();
} catch {
@@ -1391,7 +1381,6 @@ export function HostManagerEditor({
)}
/>
{/* Font Family */}
<FormField
control={form.control}
name="terminalConfig.fontFamily"
@@ -1425,7 +1414,6 @@ export function HostManagerEditor({
)}
/>
{/* Font Size */}
<FormField
control={form.control}
name="terminalConfig.fontSize"
@@ -1450,7 +1438,6 @@ export function HostManagerEditor({
)}
/>
{/* Letter Spacing */}
<FormField
control={form.control}
name="terminalConfig.letterSpacing"
@@ -1477,7 +1464,6 @@ export function HostManagerEditor({
)}
/>
{/* Line Height */}
<FormField
control={form.control}
name="terminalConfig.lineHeight"
@@ -1502,7 +1488,6 @@ export function HostManagerEditor({
)}
/>
{/* Cursor Style */}
<FormField
control={form.control}
name="terminalConfig.cursorStyle"
@@ -1533,7 +1518,6 @@ export function HostManagerEditor({
)}
/>
{/* Cursor Blink */}
<FormField
control={form.control}
name="terminalConfig.cursorBlink"
@@ -1557,11 +1541,9 @@ export function HostManagerEditor({
</AccordionContent>
</AccordionItem>
{/* Behavior Settings */}
<AccordionItem value="behavior">
<AccordionTrigger>Behavior</AccordionTrigger>
<AccordionContent className="space-y-4 pt-4">
{/* Scrollback Buffer */}
<FormField
control={form.control}
name="terminalConfig.scrollback"
@@ -1588,7 +1570,6 @@ export function HostManagerEditor({
)}
/>
{/* Bell Style */}
<FormField
control={form.control}
name="terminalConfig.bellStyle"
@@ -1623,7 +1604,6 @@ export function HostManagerEditor({
)}
/>
{/* Right Click Selects Word */}
<FormField
control={form.control}
name="terminalConfig.rightClickSelectsWord"
@@ -1645,7 +1625,6 @@ export function HostManagerEditor({
)}
/>
{/* Fast Scroll Modifier */}
<FormField
control={form.control}
name="terminalConfig.fastScrollModifier"
@@ -1674,7 +1653,6 @@ export function HostManagerEditor({
)}
/>
{/* Fast Scroll Sensitivity */}
<FormField
control={form.control}
name="terminalConfig.fastScrollSensitivity"
@@ -1701,7 +1679,6 @@ export function HostManagerEditor({
)}
/>
{/* Minimum Contrast Ratio */}
<FormField
control={form.control}
name="terminalConfig.minimumContrastRatio"
@@ -1731,11 +1708,9 @@ export function HostManagerEditor({
</AccordionContent>
</AccordionItem>
{/* Advanced Settings */}
<AccordionItem value="advanced">
<AccordionTrigger>Advanced</AccordionTrigger>
<AccordionContent className="space-y-4 pt-4">
{/* Agent Forwarding */}
<FormField
control={form.control}
name="terminalConfig.agentForwarding"
@@ -1758,7 +1733,6 @@ export function HostManagerEditor({
)}
/>
{/* Backspace Mode */}
<FormField
control={form.control}
name="terminalConfig.backspaceMode"
@@ -1790,7 +1764,6 @@ export function HostManagerEditor({
)}
/>
{/* Startup Snippet */}
<FormField
control={form.control}
name="terminalConfig.startupSnippetId"
@@ -1862,7 +1835,6 @@ export function HostManagerEditor({
)}
/>
{/* Auto MOSH */}
<FormField
control={form.control}
name="terminalConfig.autoMosh"
@@ -1884,7 +1856,6 @@ export function HostManagerEditor({
)}
/>
{/* MOSH Command */}
{form.watch("terminalConfig.autoMosh") && (
<FormField
control={form.control}
@@ -1906,7 +1877,6 @@ export function HostManagerEditor({
/>
)}
{/* Environment Variables */}
<div className="space-y-2">
<label className="text-sm font-medium">
Environment Variables
@@ -2395,9 +2365,7 @@ export function HostManagerEditor({
)}
</TabsContent>
<TabsContent value="statistics" className="space-y-6">
{/* Monitoring Configuration Section */}
<div className="space-y-4">
{/* Status Check Monitoring */}
<div className="space-y-3">
<FormField
control={form.control}
@@ -2463,7 +2431,6 @@ export function HostManagerEditor({
value: "seconds" | "minutes",
) => {
setStatusIntervalUnit(value);
// Convert current value to new unit
const currentSeconds = field.value || 30;
if (value === "minutes") {
const minutes = Math.round(
@@ -2496,7 +2463,6 @@ export function HostManagerEditor({
)}
</div>
{/* Metrics Monitoring */}
<div className="space-y-3">
<FormField
control={form.control}
@@ -2560,7 +2526,6 @@ export function HostManagerEditor({
value: "seconds" | "minutes",
) => {
setMetricsIntervalUnit(value);
// Convert current value to new unit
const currentSeconds = field.value || 30;
if (value === "minutes") {
const minutes = Math.round(
@@ -2594,7 +2559,6 @@ export function HostManagerEditor({
</div>
</div>
{/* Only show widget selection if metrics monitoring is enabled */}
{form.watch("statsConfig.metricsEnabled") && (
<>
<FormField

View File

@@ -126,7 +126,6 @@ export function HostManagerViewer({ onEditHost }: SSHManagerHostViewerProps) {
await fetchHosts();
window.dispatchEvent(new CustomEvent("ssh-hosts:changed"));
// Refresh backend polling to remove deleted host
const { refreshServerPolling } = await import("@/ui/main-axios.ts");
refreshServerPolling();
} catch {
@@ -392,7 +391,6 @@ export function HostManagerViewer({ onEditHost }: SSHManagerHostViewerProps) {
}
};
// Helper function to parse stats config and format monitoring status
const getMonitoringStatus = (host: SSHHost) => {
try {
const statsConfig = host.statsConfig

View File

@@ -80,7 +80,6 @@ export function Server({
const [isRefreshing, setIsRefreshing] = React.useState(false);
const [showStatsUI, setShowStatsUI] = React.useState(true);
// Parse stats config for monitoring settings
const statsConfig = React.useMemo((): StatsConfig => {
if (!currentHostConfig?.statsConfig) {
return DEFAULT_STATS_CONFIG;
@@ -181,7 +180,6 @@ export function Server({
window.removeEventListener("ssh-hosts:changed", handleHostsChanged);
}, [hostConfig?.id]);
// Separate effect for status monitoring
React.useEffect(() => {
if (!statusCheckEnabled || !currentHostConfig?.id || !isVisible) {
setServerStatus("offline");
@@ -207,7 +205,6 @@ export function Server({
} else if (err?.response?.status === 504) {
setServerStatus("offline");
} else if (err?.response?.status === 404) {
// Status not available - monitoring disabled
setServerStatus("offline");
} else {
setServerStatus("offline");
@@ -217,7 +214,7 @@ export function Server({
};
fetchStatus();
intervalId = window.setInterval(fetchStatus, 10000); // Poll backend every 10 seconds
intervalId = window.setInterval(fetchStatus, 10000);
return () => {
cancelled = true;
@@ -225,7 +222,6 @@ export function Server({
};
}, [currentHostConfig?.id, isVisible, statusCheckEnabled]);
// Separate effect for metrics monitoring
React.useEffect(() => {
if (!metricsEnabled || !currentHostConfig?.id || !isVisible) {
setShowStatsUI(false);
@@ -244,7 +240,6 @@ export function Server({
setMetrics(data);
setMetricsHistory((prev) => {
const newHistory = [...prev, data];
// Keep last 20 data points for chart
return newHistory.slice(-20);
});
setShowStatsUI(true);
@@ -256,7 +251,6 @@ export function Server({
response?: { status?: number; data?: { error?: string } };
};
if (err?.response?.status === 404) {
// Metrics not available - monitoring disabled
setMetrics(null);
setShowStatsUI(false);
} else if (
@@ -281,7 +275,7 @@ export function Server({
};
fetchMetrics();
intervalId = window.setInterval(fetchMetrics, 10000); // Poll backend every 10 seconds
intervalId = window.setInterval(fetchMetrics, 10000);
return () => {
cancelled = true;

View File

@@ -22,7 +22,6 @@ interface CpuWidgetProps {
export function CpuWidget({ metrics, metricsHistory }: CpuWidgetProps) {
const { t } = useTranslation();
// Prepare chart data
const chartData = React.useMemo(() => {
return metricsHistory.map((m, index) => ({
index,

View File

@@ -15,7 +15,6 @@ interface DiskWidgetProps {
export function DiskWidget({ metrics }: DiskWidgetProps) {
const { t } = useTranslation();
// Prepare radial chart data
const radialData = React.useMemo(() => {
const percent = metrics?.disk?.percent || 0;
return [

View File

@@ -22,7 +22,6 @@ interface MemoryWidgetProps {
export function MemoryWidget({ metrics, metricsHistory }: MemoryWidgetProps) {
const { t } = useTranslation();
// Prepare chart data
const chartData = React.useMemo(() => {
return metricsHistory.map((m, index) => ({
index,

View File

@@ -73,7 +73,6 @@ export function SnippetsSidebar({
try {
setLoading(true);
const data = await getSnippets();
// Defensive: ensure data is an array
setSnippets(Array.isArray(data) ? data : []);
} catch {
toast.error(t("snippets.failedToFetch"));
@@ -118,7 +117,6 @@ export function SnippetsSidebar({
};
const handleSubmit = async () => {
// Validate required fields
const errors = {
name: !formData.name.trim(),
content: !formData.content.trim(),
@@ -159,7 +157,6 @@ export function SnippetsSidebar({
const handleExecute = (snippet: Snippet) => {
if (selectedTabIds.length > 0) {
// Execute on selected terminals
selectedTabIds.forEach((tabId) => {
const tab = tabs.find((t: TabData) => t.id === tabId);
if (tab?.terminalRef?.current?.sendInput) {
@@ -173,7 +170,6 @@ export function SnippetsSidebar({
}),
);
} else {
// Execute on current terminal (legacy behavior)
onExecute(snippet.content);
toast.success(t("snippets.executeSuccess", { name: snippet.name }));
}
@@ -190,7 +186,6 @@ export function SnippetsSidebar({
return (
<>
{/* Overlay and Sidebar */}
<div
className="fixed top-0 left-0 right-0 bottom-0 z-[999999] flex justify-end pointer-events-auto isolate"
style={{
@@ -207,7 +202,6 @@ export function SnippetsSidebar({
}}
onClick={(e) => e.stopPropagation()}
>
{/* Header */}
<div className="flex items-center justify-between p-4 border-b border-dark-border">
<h2 className="text-lg font-semibold text-white">
{t("snippets.title")}
@@ -223,10 +217,8 @@ export function SnippetsSidebar({
</Button>
</div>
{/* Content */}
<div className="flex-1 overflow-y-auto p-4">
<div className="space-y-4">
{/* Terminal Selection */}
{terminalTabs.length > 0 && (
<>
<div className="space-y-2">
@@ -386,7 +378,6 @@ export function SnippetsSidebar({
</div>
</div>
{/* Create/Edit Dialog - centered modal */}
{showDialog && (
<div
className="fixed inset-0 flex items-center justify-center z-[9999999] bg-black/50 backdrop-blur-sm"

View File

@@ -122,14 +122,13 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
const isConnectingRef = useRef(false);
const connectionTimeoutRef = useRef<NodeJS.Timeout | null>(null);
const activityLoggedRef = useRef(false);
const activityLoggingRef = useRef(false); // Prevent concurrent logging calls
const activityLoggingRef = useRef(false);
const lastSentSizeRef = useRef<{ cols: number; rows: number } | null>(null);
const pendingSizeRef = useRef<{ cols: number; rows: number } | null>(null);
const notifyTimerRef = useRef<NodeJS.Timeout | null>(null);
const DEBOUNCE_MS = 140;
// Centralized activity logging to prevent duplicates
const logTerminalActivity = async () => {
if (
!hostConfig.id ||
@@ -139,7 +138,6 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
return;
}
// Set flags IMMEDIATELY to prevent race conditions
activityLoggingRef.current = true;
activityLoggedRef.current = true;
@@ -147,10 +145,8 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
const hostName =
hostConfig.name || `${hostConfig.username}@${hostConfig.ip}`;
await logActivity("terminal", hostConfig.id, hostName);
// Don't reset activityLoggedRef on success - we want to prevent future calls
} catch (err) {
console.warn("Failed to log terminal activity:", err);
// Reset on error so it can be retried
activityLoggedRef.current = false;
} finally {
activityLoggingRef.current = false;
@@ -193,9 +189,7 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
terminal as { refresh?: (start: number, end: number) => void }
).refresh(0, terminal.rows - 1);
}
} catch {
// Ignore terminal refresh errors
}
} catch {}
}
function performFit() {
@@ -250,7 +244,6 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
keyPassword?: string;
}) {
if (webSocketRef.current && terminal) {
// Send reconnect message with credentials
webSocketRef.current.send(
JSON.stringify({
type: "reconnect_with_credentials",
@@ -335,9 +328,7 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
scheduleNotify(cols, rows);
hardRefresh();
}
} catch {
// Ignore resize notification errors
}
} catch {}
},
refresh: () => hardRefresh(),
}),
@@ -587,18 +578,14 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
reconnectAttempts.current = 0;
isReconnectingRef.current = false;
// Log activity for recent connections
logTerminalActivity();
// Execute post-connection actions
setTimeout(async () => {
// Merge default config with host-specific config
const terminalConfig = {
...DEFAULT_TERMINAL_CONFIG,
...hostConfig.terminalConfig,
};
// Set environment variables
if (
terminalConfig.environmentVariables &&
terminalConfig.environmentVariables.length > 0
@@ -616,7 +603,6 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
}
}
// Execute startup snippet
if (terminalConfig.startupSnippetId) {
try {
const snippets = await getSnippets();
@@ -638,7 +624,6 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
}
}
// Execute MOSH command
if (terminalConfig.autoMosh && ws.readyState === 1) {
ws.send(
JSON.stringify({
@@ -675,8 +660,6 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
connectionTimeoutRef.current = null;
}
} else if (msg.type === "keyboard_interactive_available") {
// Keyboard-interactive auth is available (e.g., Warpgate OIDC)
// Show terminal immediately so user can see auth prompts
setKeyboardInteractiveDetected(true);
setIsConnecting(false);
if (connectionTimeoutRef.current) {
@@ -684,8 +667,6 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
connectionTimeoutRef.current = null;
}
} else if (msg.type === "auth_method_not_available") {
// Server doesn't support keyboard-interactive for "none" auth
// Show SSHAuthDialog for manual credential entry
setAuthDialogReason("no_keyboard");
setShowAuthDialog(true);
setIsConnecting(false);
@@ -751,9 +732,7 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
await navigator.clipboard.writeText(text);
return;
}
} catch {
// Clipboard API not available, fallback to textarea method
}
} catch {}
const textarea = document.createElement("textarea");
textarea.value = text;
textarea.style.position = "fixed";
@@ -773,26 +752,21 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
if (navigator.clipboard && navigator.clipboard.readText) {
return await navigator.clipboard.readText();
}
} catch {
// Clipboard read not available or not permitted
}
} catch {}
return "";
}
useEffect(() => {
if (!terminal || !xtermRef.current) return;
// Merge default config with host-specific config
const config = {
...DEFAULT_TERMINAL_CONFIG,
...hostConfig.terminalConfig,
};
// Get theme colors
const themeColors =
TERMINAL_THEMES[config.theme]?.colors || TERMINAL_THEMES.termix.colors;
// Get font family with fallback
const fontConfig = TERMINAL_FONTS.find(
(f) => f.value === config.fontFamily,
);
@@ -875,9 +849,7 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
const pasteText = await readTextFromClipboard();
if (pasteText) terminal.paste(pasteText);
}
} catch {
// Ignore clipboard operation errors
}
} catch {}
};
element?.addEventListener("contextmenu", handleContextMenu);
@@ -886,7 +858,6 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
navigator.platform.toUpperCase().indexOf("MAC") >= 0 ||
navigator.userAgent.toUpperCase().indexOf("MAC") >= 0;
// Handle backspace mode (Control-H)
if (
config.backspaceMode === "control-h" &&
e.key === "Backspace" &&
@@ -943,7 +914,7 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
resizeTimeout.current = setTimeout(() => {
if (!isVisibleRef.current || !isReady) return;
performFit();
}, 50); // Reduced from 150ms to 50ms for snappier response
}, 50);
});
resizeObserver.observe(xtermRef.current);
@@ -1022,31 +993,21 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
useEffect(() => {
if (!isVisible || !isReady || !fitAddonRef.current || !terminal) {
// Reset fitted state when becoming invisible
if (!isVisible && isFitted) {
setIsFitted(false);
}
return;
}
// When becoming visible, we need to:
// 1. Mark as not fitted
// 2. Clear any rendering artifacts
// 3. Fit to the container size
// 4. Mark as fitted (happens in performFit)
setIsFitted(false);
// Use double requestAnimationFrame to ensure container has laid out
let rafId1: number;
let rafId2: number;
rafId1 = requestAnimationFrame(() => {
rafId2 = requestAnimationFrame(() => {
// Force a hard refresh to clear any artifacts
hardRefresh();
// Fit the terminal to the new size
performFit();
// Focus will happen after isFitted becomes true
});
});
@@ -1056,7 +1017,6 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
};
}, [isVisible, isReady, splitScreen, terminal]);
// Focus the terminal after it's been fitted and is visible
useEffect(() => {
if (
isFitted &&
@@ -1066,7 +1026,6 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
terminal &&
!splitScreen
) {
// Use requestAnimationFrame to ensure the terminal is actually visible in the DOM
const rafId = requestAnimationFrame(() => {
terminal.focus();
});
@@ -1131,7 +1090,6 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
const style = document.createElement("style");
style.innerHTML = `
/* Import popular terminal fonts from Google Fonts */
@import url('https://fonts.googleapis.com/css2?family=JetBrains+Mono:ital,wght@0,400;0,700;1,400;1,700&display=swap');
@import url('https://fonts.googleapis.com/css2?family=Fira+Code:wght@400;700&display=swap');
@import url('https://fonts.googleapis.com/css2?family=Source+Code+Pro:ital,wght@0,400;0,700;1,400;1,700&display=swap');

View File

@@ -192,7 +192,6 @@ export function Tunnel({ filterHostKey }: SSHTunnelProps): React.ReactElement {
await fetchTunnelStatuses();
} catch {
// Ignore tunnel action errors
} finally {
setTunnelActions((prev) => ({ ...prev, [tunnelName]: false }));
}

View File

@@ -58,16 +58,12 @@ export function Auth({
}: AuthProps) {
const { t } = useTranslation();
// Detect if we're running in Electron's WebView/iframe
const isInElectronWebView = () => {
try {
// Check if we're in an iframe AND the parent is Electron
if (window.self !== window.top) {
// We're in an iframe, likely Electron's ElectronLoginForm
return true;
}
} catch (e) {
// Cross-origin iframe, can't access parent
return false;
}
return false;
@@ -108,7 +104,6 @@ export function Auth({
}, [loggedIn]);
useEffect(() => {
// Skip when in Electron WebView iframe
if (isInElectronWebView()) {
return;
}
@@ -119,7 +114,6 @@ export function Auth({
}, []);
useEffect(() => {
// Skip when in Electron WebView iframe
if (isInElectronWebView()) {
return;
}
@@ -136,7 +130,6 @@ export function Auth({
}, []);
useEffect(() => {
// Skip when in Electron WebView iframe
if (isInElectronWebView()) {
return;
}
@@ -159,8 +152,6 @@ export function Auth({
}, []);
useEffect(() => {
// Skip database health check when in Electron WebView iframe
// The parent Electron window will handle authentication
if (isInElectronWebView()) {
setDbHealthChecking(false);
setDbConnectionFailed(false);
@@ -615,7 +606,6 @@ export function Auth({
);
}
// Show ElectronLoginForm when Electron has a configured server and user is not logged in
if (isElectron() && currentServerUrl && !loggedIn && !authLoading) {
return (
<div
@@ -797,7 +787,6 @@ export function Auth({
{!loggedIn && !authLoading && !totpRequired && (
<>
{(() => {
// Check if any authentication method is available
const hasLogin = passwordLoginAllowed && !firstUser;
const hasSignup =
(passwordLoginAllowed || firstUser) && registrationAllowed;

View File

@@ -25,9 +25,7 @@ export function ElectronLoginForm({
const [currentUrl, setCurrentUrl] = useState(serverUrl);
useEffect(() => {
// Listen for messages from iframe
const handleMessage = async (event: MessageEvent) => {
// Only accept messages from our configured server
try {
const serverOrigin = new URL(serverUrl).origin;
if (event.origin !== serverOrigin) {
@@ -43,25 +41,17 @@ export function ElectronLoginForm({
!hasAuthenticatedRef.current &&
!isAuthenticating
) {
console.log(
"[ElectronLoginForm] Received auth success from iframe",
);
hasAuthenticatedRef.current = true;
setIsAuthenticating(true);
try {
// Save JWT to localStorage (Electron mode)
localStorage.setItem("jwt", data.token);
// Verify it was saved
const savedToken = localStorage.getItem("jwt");
if (!savedToken) {
throw new Error("Failed to save JWT to localStorage");
}
console.log("[ElectronLoginForm] JWT saved successfully");
// Small delay to ensure everything is saved
await new Promise((resolve) => setTimeout(resolve, 200));
onAuthSuccess();
@@ -86,37 +76,29 @@ export function ElectronLoginForm({
}, [serverUrl, isAuthenticating, onAuthSuccess, t]);
useEffect(() => {
// Inject script into iframe when it loads
const iframe = iframeRef.current;
if (!iframe) return;
const handleLoad = () => {
setLoading(false);
// Update current URL when iframe loads
try {
if (iframe.contentWindow) {
setCurrentUrl(iframe.contentWindow.location.href);
}
} catch (e) {
// Cross-origin, can't access - use serverUrl
setCurrentUrl(serverUrl);
}
try {
// Inject JavaScript to detect JWT
const injectedScript = `
(function() {
console.log('[Electron WebView] Script injected');
let hasNotified = false;
function postJWTToParent(token, source) {
if (hasNotified) return;
hasNotified = true;
console.log('[Electron WebView] Posting JWT to parent, source:', source);
try {
window.parent.postMessage({
type: 'AUTH_SUCCESS',
@@ -163,7 +145,6 @@ export function ElectronLoginForm({
return false;
}
// Intercept localStorage.setItem
const originalSetItem = localStorage.setItem;
localStorage.setItem = function(key, value) {
originalSetItem.apply(this, arguments);
@@ -172,7 +153,6 @@ export function ElectronLoginForm({
}
};
// Intercept sessionStorage.setItem
const originalSessionSetItem = sessionStorage.setItem;
sessionStorage.setItem = function(key, value) {
originalSessionSetItem.apply(this, arguments);
@@ -181,7 +161,6 @@ export function ElectronLoginForm({
}
};
// Poll for JWT
const intervalId = setInterval(() => {
if (hasNotified) {
clearInterval(intervalId);
@@ -192,17 +171,14 @@ export function ElectronLoginForm({
}
}, 500);
// Stop after 5 minutes
setTimeout(() => {
clearInterval(intervalId);
}, 300000);
// Initial check
checkAuth();
})();
`;
// Try to inject the script
try {
if (iframe.contentWindow) {
iframe.contentWindow.postMessage(
@@ -210,11 +186,9 @@ export function ElectronLoginForm({
"*",
);
// Also try direct execution if same origin
iframe.contentWindow.eval(injectedScript);
}
} catch (err) {
// Cross-origin restrictions - this is expected for external servers
console.warn(
"[ElectronLoginForm] Cannot inject script due to cross-origin restrictions",
);
@@ -250,12 +224,10 @@ export function ElectronLoginForm({
onChangeServer();
};
// Format URL for display (remove protocol)
const displayUrl = currentUrl.replace(/^https?:\/\//, "");
return (
<div className="fixed inset-0 w-screen h-screen bg-dark-bg flex flex-col">
{/* Navigation Bar */}
<div className="flex items-center justify-between p-4 bg-dark-bg border-b border-dark-border">
<button
onClick={handleBack}

View File

@@ -37,9 +37,7 @@ export function ElectronServerConfig({
if (config?.serverUrl) {
setServerUrl(config.serverUrl);
}
} catch {
// Ignore config loading errors
}
} catch {}
};
const handleSaveConfig = async () => {
@@ -54,7 +52,6 @@ export function ElectronServerConfig({
try {
let normalizedUrl = serverUrl.trim();
// Ensure URL has http:// or https://
if (
!normalizedUrl.startsWith("http://") &&
!normalizedUrl.startsWith("https://")

View File

@@ -60,9 +60,7 @@ function AppContent() {
localStorage.setItem("topNavbarOpen", JSON.stringify(isTopbarOpen));
}, [isTopbarOpen]);
const handleSelectView = () => {
// View switching is now handled by tabs context
};
const handleSelectView = () => {};
const handleAuthSuccess = (authData: {
isAdmin: boolean;

View File

@@ -133,8 +133,6 @@ export function AppView({
prev.splitScreenTabsStr !== allSplitScreenTab.join(",");
const tabIdsChanged = prev.terminalTabIds !== currentTabIds;
// Only trigger hideThenFit if tabs were added/removed (not just reordered)
// or if current tab or split screen changed
const isJustReorder =
!lengthChanged && tabIdsChanged && !currentTabChanged && !splitChanged;
@@ -145,7 +143,6 @@ export function AppView({
hideThenFit();
}
// Update the ref for next comparison
prevStateRef.current = {
terminalTabsLength: terminalTabs.length,
currentTab,
@@ -186,10 +183,8 @@ export function AppView({
const HEADER_H = 28;
// Create a stable map of terminal IDs to preserve component identity
const terminalIdMapRef = useRef<Set<number>>(new Set());
// Track all terminal IDs that have ever existed
useEffect(() => {
terminalTabs.forEach((t) => terminalIdMapRef.current.add(t.id));
}, [terminalTabs]);
@@ -240,8 +235,6 @@ export function AppView({
});
}
// Render in a STABLE order by ID to prevent React from unmounting
// Sort by ID instead of array position
const sortedTerminalTabs = [...terminalTabs].sort((a, b) => a.id - b.id);
return (
@@ -628,7 +621,6 @@ export function AppView({
const isTerminal = currentTabData?.type === "terminal";
const isSplitScreen = allSplitScreenTab.length > 0;
// Get terminal background color for the current tab
const terminalConfig = {
...DEFAULT_TERMINAL_CONFIG,
...(currentTabData?.hostConfig as any)?.terminalConfig,
@@ -642,7 +634,6 @@ export function AppView({
const leftMarginPx = sidebarState === "collapsed" ? 26 : 8;
const bottomMarginPx = 8;
// Determine background color based on current tab type
let containerBackground = "var(--color-dark-bg)";
if (isFileManager && !isSplitScreen) {
containerBackground = "var(--color-dark-bg-darkest)";

View File

@@ -33,12 +33,10 @@ export function Host({ host: initialHost }: HostProps): React.ReactElement {
? host.name
: `${host.username}@${host.ip}:${host.port}`;
// Update host when prop changes
useEffect(() => {
setHost(initialHost);
}, [initialHost]);
// Listen for host changes to immediately update config
useEffect(() => {
const handleHostsChanged = async () => {
const { getSSHHosts } = await import("@/ui/main-axios.ts");
@@ -54,7 +52,6 @@ export function Host({ host: initialHost }: HostProps): React.ReactElement {
window.removeEventListener("ssh-hosts:changed", handleHostsChanged);
}, [host.id]);
// Parse stats config for monitoring settings
const statsConfig = useMemo(() => {
try {
return host.statsConfig
@@ -68,7 +65,6 @@ export function Host({ host: initialHost }: HostProps): React.ReactElement {
const shouldShowStatus = statsConfig.statusCheckEnabled !== false;
useEffect(() => {
// Don't poll if status monitoring is disabled
if (!shouldShowStatus) {
setServerStatus("offline");
return;
@@ -90,7 +86,6 @@ export function Host({ host: initialHost }: HostProps): React.ReactElement {
} else if (err?.response?.status === 504) {
setServerStatus("degraded");
} else if (err?.response?.status === 404) {
// Status not available - monitoring disabled
setServerStatus("offline");
} else {
setServerStatus("offline");
@@ -100,7 +95,7 @@ export function Host({ host: initialHost }: HostProps): React.ReactElement {
};
fetchStatus();
const intervalId = window.setInterval(fetchStatus, 10000); // Poll backend every 10 seconds
const intervalId = window.setInterval(fetchStatus, 10000);
return () => {
cancelled = true;

View File

@@ -47,7 +47,6 @@ export function Tab({
}: TabProps): React.ReactElement {
const { t } = useTranslation();
// Firefox-style tab classes using cn utility
const tabBaseClasses = cn(
"relative flex items-center gap-1.5 px-3 w-full min-w-0",
"rounded-t-lg border-t-2 border-l-2 border-r-2",
@@ -65,7 +64,6 @@ export function Tab({
"bg-background/80 text-muted-foreground border-border hover:bg-background/90",
);
// Helper function to split title into base and suffix
const splitTitle = (fullTitle: string): { base: string; suffix: string } => {
const match = fullTitle.match(/^(.*?)(\s*\(\d+\))$/);
if (match) {

View File

@@ -1,4 +1,3 @@
/* eslint-disable react-refresh/only-export-components */
import React, {
createContext,
useContext,
@@ -97,24 +96,19 @@ export function TabProvider({ children }: TabProviderProps) {
}
const addTab = (tabData: Omit<Tab, "id">): number => {
// Check if an ssh_manager tab already exists
if (tabData.type === "ssh_manager") {
const existingTab = tabs.find((t) => t.type === "ssh_manager");
if (existingTab) {
// Update the existing tab with new data
// Create a new object reference to force React to detect the change
setTabs((prev) =>
prev.map((t) =>
t.id === existingTab.id
? {
...t,
// Keep the original title (Host Manager)
title: existingTab.title,
hostConfig: tabData.hostConfig
? { ...tabData.hostConfig }
: undefined,
initialTab: tabData.initialTab,
// Add a timestamp to force re-render
_updateTimestamp: Date.now(),
}
: t,
@@ -222,7 +216,6 @@ export function TabProvider({ children }: TabProviderProps) {
setTabs((prev) =>
prev.map((tab) => {
if (tab.hostConfig && tab.hostConfig.id === hostId) {
// Don't update the title for ssh_manager tabs - they should stay as "Host Manager"
if (tab.type === "ssh_manager") {
return {
...tab,
@@ -230,7 +223,6 @@ export function TabProvider({ children }: TabProviderProps) {
};
}
// For other tabs (terminal, server, file_manager), update both config and title
return {
...tab,
hostConfig: newHostConfig,

View File

@@ -103,7 +103,7 @@ export function TopNavbar({
React.useEffect(() => {
if (justDroppedTabId !== null) {
const timer = setTimeout(() => setJustDroppedTabId(null), 50); // Clear after a short delay
const timer = setTimeout(() => setJustDroppedTabId(null), 50);
return () => clearTimeout(timer);
}
}, [justDroppedTabId]);
@@ -138,7 +138,6 @@ export function TopNavbar({
const draggedIndex = dragState.draggedIndex;
// Build array of tab boundaries in ORIGINAL order
const tabBoundaries: {
index: number;
start: number;
@@ -158,25 +157,21 @@ export function TopNavbar({
end: accumulatedX + tabWidth,
mid: accumulatedX + tabWidth / 2,
});
accumulatedX += tabWidth + 4; // 4px gap
accumulatedX += tabWidth + 4;
});
if (tabBoundaries.length === 0) return null;
// Calculate the dragged tab's center in container coordinates
const containerRect = containerRef.current.getBoundingClientRect();
const draggedTab = tabBoundaries[draggedIndex];
// Convert absolute positions to container-relative coordinates
const currentX = dragState.currentX - containerRect.left;
const startX = dragState.startX - containerRect.left;
const offset = currentX - startX;
const draggedCenter = draggedTab.mid + offset;
// Determine target index based on where the dragged tab's center is
let newTargetIndex = draggedIndex;
if (offset < 0) {
// Moving left - find the leftmost tab whose midpoint we've passed
for (let i = draggedIndex - 1; i >= 0; i--) {
if (draggedCenter < tabBoundaries[i].mid) {
newTargetIndex = i;
@@ -185,7 +180,6 @@ export function TopNavbar({
}
}
} else if (offset > 0) {
// Moving right - find the rightmost tab whose midpoint we've passed
for (let i = draggedIndex + 1; i < tabBoundaries.length; i++) {
if (draggedCenter > tabBoundaries[i].mid) {
newTargetIndex = i;
@@ -193,18 +187,14 @@ export function TopNavbar({
break;
}
}
// Edge case: if dragged past the last tab, target should be at the very end
const lastTabIndex = tabBoundaries.length - 1;
if (lastTabIndex >= 0) {
// Ensure there's at least one tab
const lastTabEl = tabRefs.current.get(lastTabIndex);
if (lastTabEl) {
const lastTabRect = lastTabEl.getBoundingClientRect();
const containerRect = containerRef.current.getBoundingClientRect();
const lastTabEndInContainer = lastTabRect.right - containerRect.left;
if (currentX > lastTabEndInContainer) {
// When dragging past the last tab, insert at the very end
// Use the last valid index (length - 1) not length itself
newTargetIndex = lastTabIndex;
}
}
@@ -217,13 +207,11 @@ export function TopNavbar({
const handleDragOver = (e: React.DragEvent) => {
e.preventDefault();
// Firefox compatibility - track position via dragover
if (dragState.draggedIndex === null) return;
const containerRect = containerRef.current?.getBoundingClientRect();
if (!containerRect) return;
// Update currentX if we have a valid clientX (Firefox may not provide it in onDrag)
if (e.clientX !== 0) {
setDragState((prev) => ({
...prev,
@@ -253,7 +241,6 @@ export function TopNavbar({
if (fromIndex !== null && toIndex !== null && fromIndex !== toIndex) {
prevTabsRef.current = tabs;
// Set animation flag and clear drag state synchronously
flushSync(() => {
setIsInDropAnimation(true);
setDragState({
@@ -356,14 +343,13 @@ export function TopNavbar({
const isDraggingThisTab = dragState.draggedIndex === index;
const isTheDraggedTab = tab.id === dragState.draggedId;
const isDroppedAndSnapping = tab.id === justDroppedTabId; // New condition
const isDroppedAndSnapping = tab.id === justDroppedTabId;
const dragOffset = isDraggingThisTab
? dragState.currentX - dragState.startX
: 0;
let transform = "";
// Skip all transforms if we just dropped to prevent glitches
if (!isInDropAnimation) {
if (isDraggingThisTab) {
transform = `translateX(${dragOffset}px)`;
@@ -374,13 +360,11 @@ export function TopNavbar({
const draggedOriginalIndex = dragState.draggedIndex;
const currentTargetIndex = dragState.targetIndex;
// Determine if this tab should shift left or right
if (
draggedOriginalIndex < currentTargetIndex && // Dragging rightwards
index > draggedOriginalIndex && // This tab is to the right of the original position
index <= currentTargetIndex // This tab is at or before the target position
draggedOriginalIndex < currentTargetIndex &&
index > draggedOriginalIndex &&
index <= currentTargetIndex
) {
// Shift left to make space
const draggedTabWidth =
tabRefs.current
.get(draggedOriginalIndex)
@@ -388,11 +372,10 @@ export function TopNavbar({
const gap = 4;
transform = `translateX(-${draggedTabWidth + gap}px)`;
} else if (
draggedOriginalIndex > currentTargetIndex && // Dragging leftwards
index >= currentTargetIndex && // This tab is at or after the target position
index < draggedOriginalIndex // This tab is to the left of the original position
draggedOriginalIndex > currentTargetIndex &&
index >= currentTargetIndex &&
index < draggedOriginalIndex
) {
// Shift right to make space
const draggedTabWidth =
tabRefs.current
.get(draggedOriginalIndex)
@@ -424,7 +407,6 @@ export function TopNavbar({
onDragEnd={handleDragEnd}
e
onMouseDown={(e) => {
// Middle mouse button (button === 1)
if (e.button === 1 && !disableClose) {
e.preventDefault();
handleTabClose(tab.id);

View File

@@ -1,4 +1,3 @@
/* eslint-disable react-refresh/only-export-components */
import React, {
createContext,
useContext,

View File

@@ -101,9 +101,7 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
terminal as { refresh?: (start: number, end: number) => void }
).refresh(0, terminal.rows - 1);
}
} catch {
// Ignore terminal refresh errors
}
} catch {}
}
function performFit() {
@@ -177,9 +175,7 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
scheduleNotify(cols, rows);
hardRefresh();
}
} catch {
// Ignore resize notification errors
}
} catch {}
},
refresh: () => hardRefresh(),
}),
@@ -229,9 +225,7 @@ export const Terminal = forwardRef<TerminalHandle, SSHTerminalProps>(
`\r\n[${msg.message || t("terminal.disconnected")}]`,
);
}
} catch {
// Ignore message parsing errors
}
} catch {}
});
ws.addEventListener("close", (event) => {

View File

@@ -27,23 +27,16 @@ import {
} from "@/ui/main-axios.ts";
import { PasswordInput } from "@/components/ui/password-input.tsx";
/**
* Detect if we're running inside a React Native WebView
*/
function isReactNativeWebView(): boolean {
return typeof window !== "undefined" && !!(window as any).ReactNativeWebView;
}
/**
* Post JWT token to React Native WebView for mobile app authentication
*/
function postJWTToWebView() {
if (!isReactNativeWebView()) {
return;
}
try {
// Get JWT from localStorage or cookies
const jwt = getCookie("jwt") || localStorage.getItem("jwt");
if (!jwt) {
@@ -51,7 +44,6 @@ function postJWTToWebView() {
return;
}
// Post message to React Native
(window as any).ReactNativeWebView.postMessage(
JSON.stringify({
type: "AUTH_SUCCESS",
@@ -263,7 +255,6 @@ export function Auth({
userId: meRes.userId || null,
});
// Post JWT to React Native WebView if running in mobile app
postJWTToWebView();
setInternalLoggedIn(true);
@@ -431,7 +422,6 @@ export function Auth({
userId: res.userId || null,
});
// Post JWT to React Native WebView if running in mobile app
postJWTToWebView();
}, 100);
@@ -521,7 +511,6 @@ export function Auth({
userId: meRes.userId || null,
});
// Post JWT to React Native WebView if running in mobile app
postJWTToWebView();
setInternalLoggedIn(true);
@@ -670,7 +659,6 @@ export function Auth({
{!internalLoggedIn && !authLoading && !totpRequired && (
<>
{(() => {
// Check if any authentication method is available
const hasLogin = passwordLoginAllowed && !firstUser;
const hasSignup =
(passwordLoginAllowed || firstUser) && registrationAllowed;

View File

@@ -20,7 +20,6 @@ export function Host({ host, onHostConnect }: HostProps): React.ReactElement {
? host.name
: `${host.username}@${host.ip}:${host.port}`;
// Parse stats config for monitoring settings
const statsConfig = useMemo(() => {
try {
return host.statsConfig
@@ -34,7 +33,6 @@ export function Host({ host, onHostConnect }: HostProps): React.ReactElement {
const shouldShowStatus = statsConfig.statusCheckEnabled !== false;
useEffect(() => {
// Don't poll if status monitoring is disabled
if (!shouldShowStatus) {
setServerStatus("offline");
return;
@@ -56,7 +54,6 @@ export function Host({ host, onHostConnect }: HostProps): React.ReactElement {
} else if (err?.response?.status === 504) {
setServerStatus("degraded");
} else if (err?.response?.status === 404) {
// Status not available - monitoring disabled
setServerStatus("offline");
} else {
setServerStatus("offline");
@@ -67,7 +64,7 @@ export function Host({ host, onHostConnect }: HostProps): React.ReactElement {
fetchStatus();
const intervalId = window.setInterval(fetchStatus, 10000); // Poll backend every 10 seconds
const intervalId = window.setInterval(fetchStatus, 10000);
return () => {
cancelled = true;

View File

@@ -1,4 +1,3 @@
/* eslint-disable react-refresh/only-export-components */
import React, {
createContext,
useContext,

View File

@@ -48,9 +48,7 @@ export function useDragToSystemDesktop({ sshSessionId }: UseDragToSystemProps) {
store.put({ handle: dirHandle }, "lastSaveDir");
};
}
} catch {
// Failed to save directory handle
}
} catch {}
};
const isFileSystemAPISupported = () => {

View File

@@ -323,7 +323,6 @@ function createApiInstance(
if (isSessionExpired && typeof window !== "undefined") {
console.warn("Session expired - please log in again");
// Clear the JWT cookie to prevent reload loop
document.cookie =
"jwt=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/;";