diff --git a/.github/workflows/electron.yml b/.github/workflows/electron.yml
index bd1a5b4c..2fcd3d55 100644
--- a/.github/workflows/electron.yml
+++ b/.github/workflows/electron.yml
@@ -27,7 +27,7 @@ on:
jobs:
build-windows:
runs-on: windows-latest
- if: github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'windows' || github.event.inputs.build_type == ''
+ if: (github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'windows' || github.event.inputs.build_type == '') && github.event.inputs.artifact_destination != 'submit'
permissions:
contents: write
@@ -72,10 +72,6 @@ jobs:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: npm run build && npx electron-builder --win --x64 --ia32
- - name: List release files
- run: |
- dir release
-
- name: Upload Windows x64 NSIS Installer
uses: actions/upload-artifact@v4
if: hashFiles('release/termix_windows_x64_nsis.exe') != '' && github.event.inputs.artifact_destination != 'none'
@@ -136,7 +132,7 @@ jobs:
build-linux:
runs-on: blacksmith-4vcpu-ubuntu-2404
- if: github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'linux' || github.event.inputs.build_type == ''
+ if: (github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'linux' || github.event.inputs.build_type == '') && github.event.inputs.artifact_destination != 'submit'
permissions:
contents: write
@@ -199,17 +195,6 @@ jobs:
cd ..
- - name: List release files
- run: |
- ls -la release/
-
- - name: Debug electron-builder output
- if: always()
- run: |
- if [ -f "release/builder-debug.yml" ]; then
- cat release/builder-debug.yml
- fi
-
- name: Upload Linux x64 AppImage
uses: actions/upload-artifact@v4
if: hashFiles('release/termix_linux_x64_appimage.AppImage') != '' && github.event.inputs.artifact_destination != 'none'
@@ -282,6 +267,93 @@ jobs:
path: release/termix_linux_armv7l_portable.tar.gz
retention-days: 30
+ - name: Install Flatpak builder and dependencies
+ run: |
+ sudo apt-get update
+ sudo apt-get install -y flatpak flatpak-builder imagemagick
+
+ - name: Add Flathub repository
+ run: |
+ sudo flatpak remote-add --if-not-exists flathub https://flathub.org/repo/flathub.flatpakrepo
+
+ - name: Install Flatpak runtime and SDK
+ run: |
+ sudo flatpak install -y flathub org.freedesktop.Platform//24.08
+ sudo flatpak install -y flathub org.freedesktop.Sdk//24.08
+ sudo flatpak install -y flathub org.electronjs.Electron2.BaseApp//24.08
+
+ - name: Get version for Flatpak
+ id: flatpak-version
+ run: |
+ VERSION=$(node -p "require('./package.json').version")
+ RELEASE_DATE=$(date +%Y-%m-%d)
+ echo "version=$VERSION" >> $GITHUB_OUTPUT
+ echo "release_date=$RELEASE_DATE" >> $GITHUB_OUTPUT
+
+ - name: Prepare Flatpak files
+ run: |
+ VERSION="${{ steps.flatpak-version.outputs.version }}"
+ RELEASE_DATE="${{ steps.flatpak-version.outputs.release_date }}"
+
+ CHECKSUM_X64=$(sha256sum "release/termix_linux_x64_appimage.AppImage" | awk '{print $1}')
+ CHECKSUM_ARM64=$(sha256sum "release/termix_linux_arm64_appimage.AppImage" | awk '{print $1}')
+
+ mkdir -p flatpak-build
+ cp flatpak/com.karmaa.termix.yml flatpak-build/
+ cp flatpak/com.karmaa.termix.desktop flatpak-build/
+ cp flatpak/com.karmaa.termix.metainfo.xml flatpak-build/
+ cp public/icon.svg flatpak-build/com.karmaa.termix.svg
+ convert public/icon.png -resize 256x256 flatpak-build/icon-256.png
+ convert public/icon.png -resize 128x128 flatpak-build/icon-128.png
+
+ cd flatpak-build
+ sed -i "s|https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_x64_appimage.AppImage|file://$(realpath ../release/termix_linux_x64_appimage.AppImage)|g" com.karmaa.termix.yml
+ sed -i "s|https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_arm64_appimage.AppImage|file://$(realpath ../release/termix_linux_arm64_appimage.AppImage)|g" com.karmaa.termix.yml
+ sed -i "s/CHECKSUM_X64_PLACEHOLDER/$CHECKSUM_X64/g" com.karmaa.termix.yml
+ sed -i "s/CHECKSUM_ARM64_PLACEHOLDER/$CHECKSUM_ARM64/g" com.karmaa.termix.yml
+ sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" com.karmaa.termix.metainfo.xml
+ sed -i "s/DATE_PLACEHOLDER/$RELEASE_DATE/g" com.karmaa.termix.metainfo.xml
+
+ - name: Build Flatpak bundle
+ run: |
+ cd flatpak-build
+ flatpak-builder --repo=repo --force-clean --disable-rofiles-fuse build-dir com.karmaa.termix.yml
+
+ # Determine the architecture
+ ARCH=$(uname -m)
+ if [ "$ARCH" = "x86_64" ]; then
+ FLATPAK_ARCH="x86_64"
+ elif [ "$ARCH" = "aarch64" ]; then
+ FLATPAK_ARCH="aarch64"
+ else
+ FLATPAK_ARCH="$ARCH"
+ fi
+
+ # Build bundle for the current architecture
+ flatpak build-bundle repo ../release/termix_linux_flatpak.flatpak com.karmaa.termix --runtime-repo=https://flathub.org/repo/flathub.flatpakrepo
+
+ - name: Create flatpakref file
+ run: |
+ VERSION="${{ steps.flatpak-version.outputs.version }}"
+ cp flatpak/com.karmaa.termix.flatpakref release/
+ sed -i "s|VERSION_PLACEHOLDER|release-${VERSION}-tag|g" release/com.karmaa.termix.flatpakref
+
+ - name: Upload Flatpak bundle
+ uses: actions/upload-artifact@v4
+ if: hashFiles('release/termix_linux_flatpak.flatpak') != '' && github.event.inputs.artifact_destination != 'none'
+ with:
+ name: termix_linux_flatpak
+ path: release/termix_linux_flatpak.flatpak
+ retention-days: 30
+
+ - name: Upload Flatpakref
+ uses: actions/upload-artifact@v4
+ if: hashFiles('release/com.karmaa.termix.flatpakref') != '' && github.event.inputs.artifact_destination != 'none'
+ with:
+ name: termix_linux_flatpakref
+ path: release/com.karmaa.termix.flatpakref
+ retention-days: 30
+
build-macos:
runs-on: macos-latest
if: github.event.inputs.build_type == 'macos' || github.event.inputs.build_type == 'all'
@@ -425,11 +497,6 @@ jobs:
export GH_TOKEN="${{ secrets.GITHUB_TOKEN }}"
npx electron-builder --mac dmg --universal --x64 --arm64 --publish never
- - name: List release directory
- if: steps.check_certs.outputs.has_certs == 'true'
- run: |
- ls -R release/ || echo "Release directory not found"
-
- name: Upload macOS MAS PKG
if: steps.check_certs.outputs.has_certs == 'true' && hashFiles('release/termix_macos_universal_mas.pkg') != '' && (github.event.inputs.artifact_destination == 'file' || github.event.inputs.artifact_destination == 'release' || github.event.inputs.artifact_destination == 'submit')
uses: actions/upload-artifact@v4
@@ -463,42 +530,51 @@ jobs:
path: release/termix_macos_arm64_dmg.dmg
retention-days: 30
- - name: Check for App Store Connect API credentials
- if: steps.check_certs.outputs.has_certs == 'true'
- id: check_asc_creds
+ - name: Get version for Homebrew
+ id: homebrew-version
run: |
- if [ -n "${{ secrets.APPLE_KEY_ID }}" ] && [ -n "${{ secrets.APPLE_ISSUER_ID }}" ] && [ -n "${{ secrets.APPLE_KEY_CONTENT }}" ]; then
- echo "has_credentials=true" >> $GITHUB_OUTPUT
- fi
+ VERSION=$(node -p "require('./package.json').version")
+ echo "version=$VERSION" >> $GITHUB_OUTPUT
- - name: Setup Ruby for Fastlane
- if: steps.check_asc_creds.outputs.has_credentials == 'true' && github.event.inputs.artifact_destination == 'submit'
- uses: ruby/setup-ruby@v1
+ - name: Generate Homebrew Cask
+ if: hashFiles('release/termix_macos_universal_dmg.dmg') != '' && (github.event.inputs.artifact_destination == 'file' || github.event.inputs.artifact_destination == 'release')
+ run: |
+ VERSION="${{ steps.homebrew-version.outputs.version }}"
+ DMG_PATH="release/termix_macos_universal_dmg.dmg"
+
+ CHECKSUM=$(shasum -a 256 "$DMG_PATH" | awk '{print $1}')
+
+ mkdir -p homebrew-generated
+ cp Casks/termix.rb homebrew-generated/termix.rb
+
+ sed -i '' "s/VERSION_PLACEHOLDER/$VERSION/g" homebrew-generated/termix.rb
+ sed -i '' "s/CHECKSUM_PLACEHOLDER/$CHECKSUM/g" homebrew-generated/termix.rb
+ sed -i '' "s|version \".*\"|version \"$VERSION\"|g" homebrew-generated/termix.rb
+ sed -i '' "s|sha256 \".*\"|sha256 \"$CHECKSUM\"|g" homebrew-generated/termix.rb
+ sed -i '' "s|release-[0-9.]*-tag|release-$VERSION-tag|g" homebrew-generated/termix.rb
+
+ - name: Upload Homebrew Cask as artifact
+ uses: actions/upload-artifact@v4
+ if: hashFiles('homebrew-generated/termix.rb') != '' && github.event.inputs.artifact_destination == 'file'
with:
- ruby-version: "3.2"
- bundler-cache: false
+ name: termix_macos_homebrew_cask
+ path: homebrew-generated/termix.rb
+ retention-days: 30
- - name: Install Fastlane
- if: steps.check_asc_creds.outputs.has_credentials == 'true' && github.event.inputs.artifact_destination == 'submit'
+ - name: Upload Homebrew Cask to release
+ if: hashFiles('homebrew-generated/termix.rb') != '' && github.event.inputs.artifact_destination == 'release'
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
- gem install fastlane -N
+ VERSION="${{ steps.homebrew-version.outputs.version }}"
+ RELEASE_TAG="release-$VERSION-tag"
- - name: Deploy to App Store Connect (TestFlight)
- if: steps.check_asc_creds.outputs.has_credentials == 'true' && github.event.inputs.artifact_destination == 'submit'
- run: |
- PKG_FILE=$(find release -name "*.pkg" -type f | head -n 1)
- if [ -z "$PKG_FILE" ]; then
+ gh release list --repo ${{ github.repository }} --limit 100 | grep -q "$RELEASE_TAG" || {
+ echo "Release $RELEASE_TAG not found"
exit 1
- fi
+ }
- mkdir -p ~/private_keys
- echo "${{ secrets.APPLE_KEY_CONTENT }}" | base64 --decode > ~/private_keys/AuthKey_${{ secrets.APPLE_KEY_ID }}.p8
-
- xcrun altool --upload-app -f "$PKG_FILE" \
- --type macos \
- --apiKey "${{ secrets.APPLE_KEY_ID }}" \
- --apiIssuer "${{ secrets.APPLE_ISSUER_ID }}"
- continue-on-error: true
+ gh release upload "$RELEASE_TAG" homebrew-generated/termix.rb --repo ${{ github.repository }} --clobber
- name: Clean up keychains
if: always()
@@ -509,7 +585,6 @@ jobs:
submit-to-chocolatey:
runs-on: windows-latest
if: github.event.inputs.artifact_destination == 'submit'
- needs: [build-windows]
permissions:
contents: read
@@ -525,20 +600,25 @@ jobs:
$VERSION = (Get-Content package.json | ConvertFrom-Json).version
echo "version=$VERSION" >> $env:GITHUB_OUTPUT
- - name: Download Windows x64 MSI artifact
- uses: actions/download-artifact@v4
- with:
- name: termix_windows_x64_msi
- path: artifact
-
- - name: Get MSI file info
+ - name: Download and prepare MSI info from public release
id: msi-info
run: |
$VERSION = "${{ steps.package-version.outputs.version }}"
- $MSI_FILE = Get-ChildItem -Path artifact -Filter "*.msi" | Select-Object -First 1
- $MSI_NAME = $MSI_FILE.Name
- $CHECKSUM = (Get-FileHash -Path $MSI_FILE.FullName -Algorithm SHA256).Hash
+ $MSI_NAME = "termix_windows_x64_msi.msi"
+ $DOWNLOAD_URL = "https://github.com/Termix-SSH/Termix/releases/download/release-$($VERSION)-tag/$($MSI_NAME)"
+ Write-Host "Downloading from $DOWNLOAD_URL"
+ New-Item -ItemType Directory -Force -Path "release_asset"
+ $DOWNLOAD_PATH = "release_asset\$MSI_NAME"
+
+ try {
+ Invoke-WebRequest -Uri $DOWNLOAD_URL -OutFile $DOWNLOAD_PATH -UseBasicParsing
+ } catch {
+ Write-Error "Failed to download MSI from $DOWNLOAD_URL. Please ensure the release and asset exist."
+ exit 1
+ }
+
+ $CHECKSUM = (Get-FileHash -Path $DOWNLOAD_PATH -Algorithm SHA256).Hash
echo "msi_name=$MSI_NAME" >> $env:GITHUB_OUTPUT
echo "checksum=$CHECKSUM" >> $env:GITHUB_OUTPUT
@@ -610,7 +690,7 @@ jobs:
submit-to-flatpak:
runs-on: ubuntu-latest
if: github.event.inputs.artifact_destination == 'submit'
- needs: [build-linux]
+ needs: []
permissions:
contents: read
@@ -628,30 +708,27 @@ jobs:
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "release_date=$RELEASE_DATE" >> $GITHUB_OUTPUT
- - name: Download Linux x64 AppImage artifact
- uses: actions/download-artifact@v4
- with:
- name: termix_linux_x64_appimage
- path: artifact-x64
-
- - name: Download Linux arm64 AppImage artifact
- uses: actions/download-artifact@v4
- with:
- name: termix_linux_arm64_appimage
- path: artifact-arm64
-
- - name: Get AppImage file info
+ - name: Download and prepare AppImage info from public release
id: appimage-info
run: |
VERSION="${{ steps.package-version.outputs.version }}"
+ mkdir -p release_assets
- APPIMAGE_X64_FILE=$(find artifact-x64 -name "*.AppImage" -type f | head -n 1)
- APPIMAGE_X64_NAME=$(basename "$APPIMAGE_X64_FILE")
- CHECKSUM_X64=$(sha256sum "$APPIMAGE_X64_FILE" | awk '{print $1}')
+ APPIMAGE_X64_NAME="termix_linux_x64_appimage.AppImage"
+ URL_X64="https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$APPIMAGE_X64_NAME"
+ PATH_X64="release_assets/$APPIMAGE_X64_NAME"
+ echo "Downloading x64 AppImage from $URL_X64"
+ curl -L -o "$PATH_X64" "$URL_X64"
+ chmod +x "$PATH_X64"
+ CHECKSUM_X64=$(sha256sum "$PATH_X64" | awk '{print $1}')
- APPIMAGE_ARM64_FILE=$(find artifact-arm64 -name "*.AppImage" -type f | head -n 1)
- APPIMAGE_ARM64_NAME=$(basename "$APPIMAGE_ARM64_FILE")
- CHECKSUM_ARM64=$(sha256sum "$APPIMAGE_ARM64_FILE" | awk '{print $1}')
+ APPIMAGE_ARM64_NAME="termix_linux_arm64_appimage.AppImage"
+ URL_ARM64="https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$APPIMAGE_ARM64_NAME"
+ PATH_ARM64="release_assets/$APPIMAGE_ARM64_NAME"
+ echo "Downloading arm64 AppImage from $URL_ARM64"
+ curl -L -o "$PATH_ARM64" "$URL_ARM64"
+ chmod +x "$PATH_ARM64"
+ CHECKSUM_ARM64=$(sha256sum "$PATH_ARM64" | awk '{print $1}')
echo "appimage_x64_name=$APPIMAGE_X64_NAME" >> $GITHUB_OUTPUT
echo "checksum_x64=$CHECKSUM_X64" >> $GITHUB_OUTPUT
@@ -690,10 +767,6 @@ jobs:
sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak-submission/com.karmaa.termix.metainfo.xml
sed -i "s/DATE_PLACEHOLDER/$RELEASE_DATE/g" flatpak-submission/com.karmaa.termix.metainfo.xml
- - name: List submission files
- run: |
- ls -la flatpak-submission/
-
- name: Upload Flatpak submission as artifact
uses: actions/upload-artifact@v4
with:
@@ -704,7 +777,7 @@ jobs:
submit-to-homebrew:
runs-on: macos-latest
if: github.event.inputs.artifact_destination == 'submit'
- needs: [build-macos]
+ needs: []
permissions:
contents: read
@@ -720,19 +793,19 @@ jobs:
VERSION=$(node -p "require('./package.json').version")
echo "version=$VERSION" >> $GITHUB_OUTPUT
- - name: Download macOS Universal DMG artifact
- uses: actions/download-artifact@v4
- with:
- name: termix_macos_universal_dmg
- path: artifact
-
- - name: Get DMG file info
+ - name: Download and prepare DMG info from public release
id: dmg-info
run: |
VERSION="${{ steps.package-version.outputs.version }}"
- DMG_FILE=$(find artifact -name "*.dmg" -type f | head -n 1)
- DMG_NAME=$(basename "$DMG_FILE")
- CHECKSUM=$(shasum -a 256 "$DMG_FILE" | awk '{print $1}')
+ DMG_NAME="termix_macos_universal_dmg.dmg"
+ URL="https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$DMG_NAME"
+
+ mkdir -p release_asset
+ PATH="release_asset/$DMG_NAME"
+ echo "Downloading DMG from $URL"
+ curl -L -o "$PATH" "$URL"
+
+ CHECKSUM=$(shasum -a 256 "$PATH" | awk '{print $1}')
echo "dmg_name=$DMG_NAME" >> $GITHUB_OUTPUT
echo "checksum=$CHECKSUM" >> $GITHUB_OUTPUT
@@ -745,23 +818,15 @@ jobs:
mkdir -p homebrew-submission/Casks/t
- cp homebrew/termix.rb homebrew-submission/Casks/t/termix.rb
+ cp Casks/termix.rb homebrew-submission/Casks/t/termix.rb
sed -i '' "s/VERSION_PLACEHOLDER/$VERSION/g" homebrew-submission/Casks/t/termix.rb
sed -i '' "s/CHECKSUM_PLACEHOLDER/$CHECKSUM/g" homebrew-submission/Casks/t/termix.rb
- name: Verify Cask syntax
run: |
- if ! command -v brew &> /dev/null; then
- /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
- fi
-
ruby -c homebrew-submission/Casks/t/termix.rb
- - name: List submission files
- run: |
- find homebrew-submission -type f
-
- name: Upload Homebrew submission as artifact
uses: actions/upload-artifact@v4
with:
@@ -789,10 +854,6 @@ jobs:
env:
GH_TOKEN: ${{ github.token }}
- - name: Display artifact structure
- run: |
- ls -R artifacts/
-
- name: Upload artifacts to latest release
run: |
cd artifacts
@@ -808,3 +869,130 @@ jobs:
done
env:
GH_TOKEN: ${{ github.token }}
+
+ submit-to-testflight:
+ runs-on: macos-latest
+ if: github.event.inputs.artifact_destination == 'submit'
+ needs: []
+ permissions:
+ contents: write
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v5
+ with:
+ fetch-depth: 1
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ cache: "npm"
+
+ - name: Install dependencies
+ run: |
+ for i in 1 2 3;
+ do
+ if npm ci; then
+ break
+ else
+ if [ $i -eq 3 ]; then
+ exit 1
+ fi
+ sleep 10
+ fi
+ done
+ npm install --force @rollup/rollup-darwin-arm64
+ npm install dmg-license
+
+ - name: Check for Code Signing Certificates
+ id: check_certs
+ run: |
+ if [ -n "${{ secrets.MAC_BUILD_CERTIFICATE_BASE64 }}" ] && [ -n "${{ secrets.MAC_P12_PASSWORD }}" ]; then
+ echo "has_certs=true" >> $GITHUB_OUTPUT
+ fi
+
+ - name: Import Code Signing Certificates
+ if: steps.check_certs.outputs.has_certs == 'true'
+ env:
+ MAC_BUILD_CERTIFICATE_BASE64: ${{ secrets.MAC_BUILD_CERTIFICATE_BASE64 }}
+ MAC_INSTALLER_CERTIFICATE_BASE64: ${{ secrets.MAC_INSTALLER_CERTIFICATE_BASE64 }}
+ MAC_P12_PASSWORD: ${{ secrets.MAC_P12_PASSWORD }}
+ MAC_KEYCHAIN_PASSWORD: ${{ secrets.MAC_KEYCHAIN_PASSWORD }}
+ run: |
+ APP_CERT_PATH=$RUNNER_TEMP/app_certificate.p12
+ INSTALLER_CERT_PATH=$RUNNER_TEMP/installer_certificate.p12
+ KEYCHAIN_PATH=$RUNNER_TEMP/app-signing.keychain-db
+
+ echo -n "$MAC_BUILD_CERTIFICATE_BASE64" | base64 --decode -o $APP_CERT_PATH
+
+ if [ -n "$MAC_INSTALLER_CERTIFICATE_BASE64" ]; then
+ echo -n "$MAC_INSTALLER_CERTIFICATE_BASE64" | base64 --decode -o $INSTALLER_CERT_PATH
+ fi
+
+ security create-keychain -p "$MAC_KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
+ security set-keychain-settings -lut 21600 $KEYCHAIN_PATH
+ security unlock-keychain -p "$MAC_KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
+
+ security import $APP_CERT_PATH -P "$MAC_P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
+
+ if [ -f "$INSTALLER_CERT_PATH" ]; then
+ security import $INSTALLER_CERT_PATH -P "$MAC_P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
+ fi
+
+ security list-keychain -d user -s $KEYCHAIN_PATH
+
+ security find-identity -v -p codesigning $KEYCHAIN_PATH
+
+ - name: Build macOS App Store Package
+ if: steps.check_certs.outputs.has_certs == 'true'
+ env:
+ ELECTRON_BUILDER_ALLOW_UNRESOLVED_DEPENDENCIES: true
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ CURRENT_VERSION=$(node -p "require('./package.json').version")
+ BUILD_VERSION="${{ github.run_number }}"
+
+ npm run build && npx electron-builder --mac mas --universal --config.buildVersion="$BUILD_VERSION"
+
+ - name: Check for App Store Connect API credentials
+ id: check_asc_creds
+ run: |
+ if [ -n "${{ secrets.APPLE_KEY_ID }}" ] && [ -n "${{ secrets.APPLE_ISSUER_ID }}" ] && [ -n "${{ secrets.APPLE_KEY_CONTENT }}" ]; then
+ echo "has_credentials=true" >> $GITHUB_OUTPUT
+ fi
+
+ - name: Setup Ruby for Fastlane
+ if: steps.check_asc_creds.outputs.has_credentials == 'true'
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: "3.2"
+ bundler-cache: false
+
+ - name: Install Fastlane
+ if: steps.check_asc_creds.outputs.has_credentials == 'true'
+ run: |
+ gem install fastlane -N
+
+ - name: Deploy to App Store Connect (TestFlight)
+ if: steps.check_asc_creds.outputs.has_credentials == 'true'
+ run: |
+ PKG_FILE=$(find artifact-mas -name "*.pkg" -type f | head -n 1)
+ if [ -z "$PKG_FILE" ]; then
+ echo "PKG file not found, exiting."
+ exit 1
+ fi
+
+ mkdir -p ~/private_keys
+ echo "${{ secrets.APPLE_KEY_CONTENT }}" | base64 --decode > ~/private_keys/AuthKey_${{ secrets.APPLE_KEY_ID }}.p8
+
+ xcrun altool --upload-app -f "$PKG_FILE" \
+ --type macos \
+ --apiKey "${{ secrets.APPLE_KEY_ID }}" \
+ --apiIssuer "${{ secrets.APPLE_ISSUER_ID }}"
+ continue-on-error: true
+
+ - name: Clean up keychains
+ if: always()
+ run: |
+ security delete-keychain $RUNNER_TEMP/app-signing.keychain-db || true
diff --git a/.github/workflows/translate.yml b/.github/workflows/translate.yml
index 416c2e5b..7c9db568 100644
--- a/.github/workflows/translate.yml
+++ b/.github/workflows/translate.yml
@@ -3,26 +3,435 @@ name: Auto Translate
on:
workflow_dispatch:
+permissions:
+ contents: write
+ pull-requests: write
+
jobs:
- translate:
+ translate-zh:
runs-on: ubuntu-latest
-
steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Translate with i18n-ai-translate
- uses: taahamahdi/i18n-ai-translate@master
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
with:
- json-file-path: src/locales/en/translation.json
- api-key: ${{ secrets.GEMINI_API_KEY }}
- engine: gemini
- output-languages: de fr it ko pt-br ru zh
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t zh --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-zh
+ path: src/locales/zh.json
+ continue-on-error: true
- - name: Commit and push translations
+ translate-ru:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t ru --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-ru
+ path: src/locales/ru.json
+ continue-on-error: true
+
+ translate-pt:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t pt --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-pt
+ path: src/locales/pt.json
+ continue-on-error: true
+
+ translate-fr:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t fr --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-fr
+ path: src/locales/fr.json
+ continue-on-error: true
+
+ translate-es:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t es --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-es
+ path: src/locales/es.json
+ continue-on-error: true
+
+ translate-de:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t de --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-de
+ path: src/locales/de.json
+ continue-on-error: true
+
+ translate-hi:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t hi --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-hi
+ path: src/locales/hi.json
+ continue-on-error: true
+
+ translate-bn:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t bn --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-bn
+ path: src/locales/bn.json
+ continue-on-error: true
+
+ translate-ja:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t ja --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-ja
+ path: src/locales/ja.json
+ continue-on-error: true
+
+ translate-vi:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t vi --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-vi
+ path: src/locales/vi.json
+ continue-on-error: true
+
+ translate-tr:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t tr --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-tr
+ path: src/locales/tr.json
+ continue-on-error: true
+
+ translate-ko:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t ko --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-ko
+ path: src/locales/ko.json
+ continue-on-error: true
+
+ translate-it:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t it --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-it
+ path: src/locales/it.json
+ continue-on-error: true
+
+ translate-he:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t he --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-he
+ path: src/locales/he.json
+ continue-on-error: true
+
+ translate-ar:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t ar --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-ar
+ path: src/locales/ar.json
+ continue-on-error: true
+
+ translate-pl:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t pl --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-pl
+ path: src/locales/pl.json
+ continue-on-error: true
+
+ translate-nl:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t nl --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-nl
+ path: src/locales/nl.json
+ continue-on-error: true
+
+ translate-sv:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t sv --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-sv
+ path: src/locales/sv.json
+ continue-on-error: true
+
+ translate-id:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t id --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-id
+ path: src/locales/id.json
+ continue-on-error: true
+
+ translate-th:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t th --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-th
+ path: src/locales/th.json
+ continue-on-error: true
+
+ translate-uk:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t uk --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-uk
+ path: src/locales/uk.json
+ continue-on-error: true
+
+ translate-cs:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t cs --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-cs
+ path: src/locales/cs.json
+ continue-on-error: true
+
+ translate-ro:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t ro --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-ro
+ path: src/locales/ro.json
+ continue-on-error: true
+
+ translate-el:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t el --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-el
+ path: src/locales/el.json
+ continue-on-error: true
+
+ translate-nb:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+ - run: npx i18n-auto-translation -k ${{ secrets.GOOGLE_TRANSLATE_API_KEY }} -d "src/locales" -f en -t nb --maxLinesPerRequest 1
+ - uses: actions/upload-artifact@v4
+ with:
+ name: translations-nb
+ path: src/locales/nb.json
+ continue-on-error: true
+
+ create-pr:
+ needs:
+ [
+ translate-zh,
+ translate-ru,
+ translate-pt,
+ translate-fr,
+ translate-es,
+ translate-de,
+ translate-hi,
+ translate-bn,
+ translate-ja,
+ translate-vi,
+ translate-tr,
+ translate-ko,
+ translate-it,
+ translate-he,
+ translate-ar,
+ translate-pl,
+ translate-nl,
+ translate-sv,
+ translate-id,
+ translate-th,
+ translate-uk,
+ translate-cs,
+ translate-ro,
+ translate-el,
+ translate-nb,
+ ]
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ token: ${{ secrets.GHCR_TOKEN }}
+
+ - name: Download all artifacts
+ uses: actions/download-artifact@v4
+ with:
+ path: translations-temp
+
+ - name: Move translations to src/locales
run: |
- git config --local user.email "github-actions[bot]@users.noreply.github.com"
- git config --local user.name "github-actions[bot]"
- git add src/locales/
- git diff --staged --quiet || git commit -m "chore: auto-translate to multiple languages [skip ci]"
- git push
+ cp translations-temp/translations-zh/zh.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-ru/ru.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-pt/pt.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-fr/fr.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-es/es.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-de/de.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-hi/hi.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-bn/bn.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-ja/ja.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-vi/vi.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-tr/tr.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-ko/ko.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-it/it.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-he/he.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-ar/ar.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-pl/pl.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-nl/nl.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-sv/sv.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-id/id.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-th/th.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-uk/uk.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-cs/cs.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-ro/ro.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-el/el.json src/locales/ 2>/dev/null || true
+ cp translations-temp/translations-nb/nb.json src/locales/ 2>/dev/null || true
+ rm -rf translations-temp
+
+ - name: Create Pull Request
+ uses: peter-evans/create-pull-request@v6
+ with:
+ token: ${{ secrets.GHCR_TOKEN }}
+ commit-message: "chore: auto-translate to multiple languages"
+ branch: translations-update
+ delete-branch: true
+ title: "chore: Update translations for all languages"
diff --git a/.gitignore b/.gitignore
index af4f217b..c3d02880 100644
--- a/.gitignore
+++ b/.gitignore
@@ -28,3 +28,4 @@ dist-ssr
/.mcp.json
/nul
/.vscode/
+/CLAUDE.md
diff --git a/README-CN.md b/README-CN.md
index a882666c..593c6a23 100644
--- a/README-CN.md
+++ b/README-CN.md
@@ -51,20 +51,22 @@ Termix 是一个开源、永久免费、自托管的一体化服务器管理平
- **SSH 终端访问** - 功能齐全的终端,具有分屏支持(最多 4 个面板)和类似浏览器的选项卡系统。包括对自定义终端的支持,包括常见终端主题、字体和其他组件
- **SSH 隧道管理** - 创建和管理 SSH 隧道,具有自动重新连接和健康监控功能
- **远程文件管理器** - 直接在远程服务器上管理文件,支持查看和编辑代码、图像、音频和视频。无缝上传、下载、重命名、删除和移动文件
+- **Docker 管理** - 启动、停止、暂停、删除容器。查看容器统计信息。使用 docker exec 终端控制容器。它不是用来替代 Portainer 或 Dockge,而是用于简单管理你的容器而不是创建它们。
- **SSH 主机管理器** - 保存、组织和管理您的 SSH 连接,支持标签和文件夹,并轻松保存可重用的登录信息,同时能够自动部署 SSH 密钥
- **服务器统计** - 在任何 SSH 服务器上查看 CPU、内存和磁盘使用情况以及网络、正常运行时间和系统信息
- **仪表板** - 在仪表板上一目了然地查看服务器信息
+- **RBAC** - 创建角色并在用户/角色之间共享主机
- **用户认证** - 安全的用户管理,具有管理员控制以及 OIDC 和 2FA (TOTP) 支持。查看所有平台上的活动用户会话并撤销权限。将您的 OIDC/本地帐户链接在一起。
- **数据库加密** - 后端存储为加密的 SQLite 数据库文件。查看[文档](https://docs.termix.site/security)了解更多信息。
- **数据导出/导入** - 导出和导入 SSH 主机、凭据和文件管理器数据
- **自动 SSL 设置** - 内置 SSL 证书生成和管理,支持 HTTPS 重定向
-- **现代用户界面** - 使用 React、Tailwind CSS 和 Shadcn 构建的简洁的桌面/移动设备友好界面
-- **语言** - 内置支持英语、中文、德语和葡萄牙语
+- **现代用户界面** - 使用 React、Tailwind CSS 和 Shadcn 构建的简洁的桌面/移动设备友好界面。可选择基于深色或浅色模式的用户界面。
+- **语言** - 内置支持约 30 种语言(通过 Google 翻译批量翻译,结果可能有所不同)
- **平台支持** - 可作为 Web 应用程序、桌面应用程序(Windows、Linux 和 macOS)以及适用于 iOS 和 Android 的专用移动/平板电脑应用程序。
- **SSH 工具** - 创建可重用的命令片段,单击即可执行。在多个打开的终端上同时运行一个命令。
- **命令历史** - 自动完成并查看以前运行的 SSH 命令
- **命令面板** - 双击左 Shift 键可快速使用键盘访问 SSH 连接
-- **SSH 功能丰富** - 支持跳板机、warpgate、基于 TOTP 的连接等。
+- **SSH 功能丰富** - 支持跳板机、warpgate、基于 TOTP 的连接、SOCKS5、密码自动填充等。
# 计划功能
@@ -140,6 +142,12 @@ volumes:
+
+
+
+
+
+
@@ -147,7 +155,7 @@ volumes:
你的浏览器不支持 video 标签。
-视频和图像可能已过时。
+某些视频和图像可能已过时或可能无法完美展示功能。
# 许可证
diff --git a/README.md b/README.md
index 80fc69e9..9ddf04d6 100644
--- a/README.md
+++ b/README.md
@@ -45,7 +45,7 @@ If you would like, you can support the project here!\
Termix is an open-source, forever-free, self-hosted all-in-one server management platform. It provides a multi-platform
solution for managing your servers and infrastructure through a single, intuitive interface. Termix offers SSH terminal
-access, SSH tunneling capabilities, and remote file management, with many more tools to come. Termix is the perfect
+access, SSH tunneling capabilities, remote file management, and many other tools. Termix is the perfect
free and self-hosted alternative to Termius available for all platforms.
# Features
@@ -53,20 +53,22 @@ free and self-hosted alternative to Termius available for all platforms.
- **SSH Terminal Access** - Full-featured terminal with split-screen support (up to 4 panels) with a browser-like tab system. Includes support for customizing the terminal including common terminal themes, fonts, and other components
- **SSH Tunnel Management** - Create and manage SSH tunnels with automatic reconnection and health monitoring
- **Remote File Manager** - Manage files directly on remote servers with support for viewing and editing code, images, audio, and video. Upload, download, rename, delete, and move files seamlessly
+- **Docker Management** - Start, stop, pause, remove containers. View container stats. Control container using docker exec terminal. It was not made to replace Portainer or Dockge but rather to simply manage your containers compared to creating them.
- **SSH Host Manager** - Save, organize, and manage your SSH connections with tags and folders, and easily save reusable login info while being able to automate the deployment of SSH keys
- **Server Stats** - View CPU, memory, and disk usage along with network, uptime, and system information on any SSH server
- **Dashboard** - View server information at a glance on your dashboard
+- **RBAC** - Create roles and share hosts across users/roles
- **User Authentication** - Secure user management with admin controls and OIDC and 2FA (TOTP) support. View active user sessions across all platforms and revoke permissions. Link your OIDC/Local accounts together.
- **Database Encryption** - Backend stored as encrypted SQLite database files. View [docs](https://docs.termix.site/security) for more.
- **Data Export/Import** - Export and import SSH hosts, credentials, and file manager data
- **Automatic SSL Setup** - Built-in SSL certificate generation and management with HTTPS redirects
-- **Modern UI** - Clean desktop/mobile-friendly interface built with React, Tailwind CSS, and Shadcn
-- **Languages** - Built-in support for English, Chinese, German, and Portuguese
+- **Modern UI** - Clean desktop/mobile-friendly interface built with React, Tailwind CSS, and Shadcn. Choose between dark or light mode based UI.
+- **Languages** - Built-in support ~30 languages (bulk translated via Google Translate, results may vary ofc)
- **Platform Support** - Available as a web app, desktop application (Windows, Linux, and macOS), and dedicated mobile/tablet app for iOS and Android.
- **SSH Tools** - Create reusable command snippets that execute with a single click. Run one command simultaneously across multiple open terminals.
- **Command History** - Auto-complete and view previously ran SSH commands
- **Command Palette** - Double tap left shift to quickly access SSH connections with your keyboard
-- **SSH Feature Rich** - Supports jump hosts, warpgate, TOTP based connections, etc.
+- **SSH Feature Rich** - Supports jump hosts, warpgate, TOTP based connections, SOCKS5, password autofill, etc.
# Planned Features
@@ -124,7 +126,7 @@ If you need help or want to request a feature with Termix, visit the [Issues](ht
Please be as detailed as possible in your issue, preferably written in English. You can also join the [Discord](https://discord.gg/jVQGdvHDrf) server and visit the support
channel, however, response times may be longer.
-# Show-off
+# Screenshots
@@ -143,6 +145,12 @@ channel, however, response times may be longer.
+
+
+
+
+
+
@@ -150,7 +158,7 @@ channel, however, response times may be longer.
Your browser does not support the video tag.
-Videos and images may be out of date.
+Some videos and images may be out of date or may not perfectly showcase features.
# License
diff --git a/docker/Dockerfile b/docker/Dockerfile
index c67b6686..f29cfb3b 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -53,16 +53,18 @@ ENV DATA_DIR=/app/data \
RUN apt-get update && apt-get install -y nginx gettext-base openssl && \
rm -rf /var/lib/apt/lists/* && \
- mkdir -p /app/data /app/uploads && \
- chown -R node:node /app/data /app/uploads && \
- useradd -r -s /bin/false nginx
+ mkdir -p /app/data /app/uploads /app/nginx /app/nginx/logs /app/nginx/cache /app/nginx/client_body && \
+ chown -R node:node /app && \
+ chmod 755 /app/data /app/uploads /app/nginx && \
+ touch /app/nginx/nginx.conf && \
+ chown node:node /app/nginx/nginx.conf
-COPY docker/nginx.conf /etc/nginx/nginx.conf
-COPY docker/nginx-https.conf /etc/nginx/nginx-https.conf
+COPY docker/nginx.conf /app/nginx/nginx.conf.template
+COPY docker/nginx-https.conf /app/nginx/nginx-https.conf.template
-COPY --chown=nginx:nginx --from=frontend-builder /app/dist /usr/share/nginx/html
-COPY --chown=nginx:nginx --from=frontend-builder /app/src/locales /usr/share/nginx/html/locales
-COPY --chown=nginx:nginx --from=frontend-builder /app/public/fonts /usr/share/nginx/html/fonts
+COPY --chown=node:node --from=frontend-builder /app/dist /app/html
+COPY --chown=node:node --from=frontend-builder /app/src/locales /app/html/locales
+COPY --chown=node:node --from=frontend-builder /app/public/fonts /app/html/fonts
COPY --chown=node:node --from=production-deps /app/node_modules /app/node_modules
COPY --chown=node:node --from=backend-builder /app/dist/backend ./dist/backend
@@ -74,4 +76,7 @@ EXPOSE ${PORT} 30001 30002 30003 30004 30005 30006
COPY docker/entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
+
+USER node
+
CMD ["/entrypoint.sh"]
diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh
index 67d389c2..165c9ee2 100644
--- a/docker/entrypoint.sh
+++ b/docker/entrypoint.sh
@@ -11,24 +11,21 @@ echo "Configuring web UI to run on port: $PORT"
if [ "$ENABLE_SSL" = "true" ]; then
echo "SSL enabled - using HTTPS configuration with redirect"
- NGINX_CONF_SOURCE="/etc/nginx/nginx-https.conf"
+ NGINX_CONF_SOURCE="/app/nginx/nginx-https.conf.template"
else
echo "SSL disabled - using HTTP-only configuration (default)"
- NGINX_CONF_SOURCE="/etc/nginx/nginx.conf"
+ NGINX_CONF_SOURCE="/app/nginx/nginx.conf.template"
fi
-envsubst '${PORT} ${SSL_PORT} ${SSL_CERT_PATH} ${SSL_KEY_PATH}' < $NGINX_CONF_SOURCE > /etc/nginx/nginx.conf.tmp
-mv /etc/nginx/nginx.conf.tmp /etc/nginx/nginx.conf
+envsubst '${PORT} ${SSL_PORT} ${SSL_CERT_PATH} ${SSL_KEY_PATH}' < $NGINX_CONF_SOURCE > /app/nginx/nginx.conf
mkdir -p /app/data /app/uploads
-chown -R node:node /app/data /app/uploads
-chmod 755 /app/data /app/uploads
+chmod 755 /app/data /app/uploads 2>/dev/null || true
if [ "$ENABLE_SSL" = "true" ]; then
echo "Checking SSL certificate configuration..."
mkdir -p /app/data/ssl
- chown -R node:node /app/data/ssl
- chmod 755 /app/data/ssl
+ chmod 755 /app/data/ssl 2>/dev/null || true
DOMAIN=${SSL_DOMAIN:-localhost}
@@ -84,7 +81,6 @@ EOF
chmod 600 /app/data/ssl/termix.key
chmod 644 /app/data/ssl/termix.crt
- chown node:node /app/data/ssl/termix.key /app/data/ssl/termix.crt
rm -f /app/data/ssl/openssl.conf
@@ -93,7 +89,7 @@ EOF
fi
echo "Starting nginx..."
-nginx
+nginx -c /app/nginx/nginx.conf
echo "Starting backend services..."
cd /app
@@ -110,11 +106,7 @@ else
echo "Warning: package.json not found"
fi
-if command -v su-exec > /dev/null 2>&1; then
- su-exec node node dist/backend/backend/starter.js
-else
- su -s /bin/sh node -c "node dist/backend/backend/starter.js"
-fi
+node dist/backend/backend/starter.js
echo "All services started"
diff --git a/docker/nginx-https.conf b/docker/nginx-https.conf
index 5e6126bf..7788848b 100644
--- a/docker/nginx-https.conf
+++ b/docker/nginx-https.conf
@@ -1,11 +1,22 @@
+pid /app/nginx/nginx.pid;
+error_log /app/nginx/logs/error.log warn;
+
events {
worker_connections 1024;
}
http {
- include mime.types;
+ include /etc/nginx/mime.types;
default_type application/octet-stream;
+ access_log /app/nginx/logs/access.log;
+
+ client_body_temp_path /app/nginx/client_body;
+ proxy_temp_path /app/nginx/proxy_temp;
+ fastcgi_temp_path /app/nginx/fastcgi_temp;
+ uwsgi_temp_path /app/nginx/uwsgi_temp;
+ scgi_temp_path /app/nginx/scgi_temp;
+
sendfile on;
keepalive_timeout 65;
client_header_timeout 300s;
@@ -37,9 +48,17 @@ http {
add_header X-Content-Type-Options nosniff always;
add_header X-XSS-Protection "1; mode=block" always;
+ location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
+ root /app/html;
+ expires 1y;
+ add_header Cache-Control "public, immutable";
+ try_files $uri =404;
+ }
+
location / {
- root /usr/share/nginx/html;
+ root /app/html;
index index.html index.htm;
+ try_files $uri $uri/ /index.html;
}
location ~* \.map$ {
@@ -93,6 +112,15 @@ http {
proxy_set_header X-Forwarded-Proto $scheme;
}
+ location ~ ^/rbac(/.*)?$ {
+ proxy_pass http://127.0.0.1:30001;
+ proxy_http_version 1.1;
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ }
+
location ~ ^/credentials(/.*)?$ {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
@@ -283,6 +311,10 @@ http {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
+
+ proxy_connect_timeout 60s;
+ proxy_send_timeout 60s;
+ proxy_read_timeout 60s;
}
location ~ ^/uptime(/.*)?$ {
@@ -303,9 +335,45 @@ http {
proxy_set_header X-Forwarded-Proto $scheme;
}
+ location ^~ /docker/console/ {
+ proxy_pass http://127.0.0.1:30008/;
+ proxy_http_version 1.1;
+
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "upgrade";
+ proxy_set_header Host $host;
+ proxy_cache_bypass $http_upgrade;
+
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+
+ proxy_read_timeout 86400s;
+ proxy_send_timeout 86400s;
+ proxy_connect_timeout 10s;
+
+ proxy_buffering off;
+ proxy_request_buffering off;
+
+ proxy_next_upstream error timeout invalid_header http_500 http_502 http_503;
+ }
+
+ location ~ ^/docker(/.*)?$ {
+ proxy_pass http://127.0.0.1:30007;
+ proxy_http_version 1.1;
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+
+ proxy_connect_timeout 60s;
+ proxy_send_timeout 300s;
+ proxy_read_timeout 300s;
+ }
+
error_page 500 502 503 504 /50x.html;
location = /50x.html {
- root /usr/share/nginx/html;
+ root /app/html;
}
}
}
diff --git a/docker/nginx.conf b/docker/nginx.conf
index db5546f0..ac6b7112 100644
--- a/docker/nginx.conf
+++ b/docker/nginx.conf
@@ -1,11 +1,22 @@
+pid /app/nginx/nginx.pid;
+error_log /app/nginx/logs/error.log warn;
+
events {
worker_connections 1024;
}
http {
- include mime.types;
+ include /etc/nginx/mime.types;
default_type application/octet-stream;
+ access_log /app/nginx/logs/access.log;
+
+ client_body_temp_path /app/nginx/client_body;
+ proxy_temp_path /app/nginx/proxy_temp;
+ fastcgi_temp_path /app/nginx/fastcgi_temp;
+ uwsgi_temp_path /app/nginx/uwsgi_temp;
+ scgi_temp_path /app/nginx/scgi_temp;
+
sendfile on;
keepalive_timeout 65;
client_header_timeout 300s;
@@ -27,14 +38,14 @@ http {
add_header X-XSS-Protection "1; mode=block" always;
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
- root /usr/share/nginx/html;
+ root /app/html;
expires 1y;
add_header Cache-Control "public, immutable";
try_files $uri =404;
}
location / {
- root /usr/share/nginx/html;
+ root /app/html;
index index.html index.htm;
try_files $uri $uri/ /index.html;
}
@@ -90,6 +101,15 @@ http {
proxy_set_header X-Forwarded-Proto $scheme;
}
+ location ~ ^/rbac(/.*)?$ {
+ proxy_pass http://127.0.0.1:30001;
+ proxy_http_version 1.1;
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ }
+
location ~ ^/credentials(/.*)?$ {
proxy_pass http://127.0.0.1:30001;
proxy_http_version 1.1;
@@ -280,6 +300,10 @@ http {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
+
+ proxy_connect_timeout 60s;
+ proxy_send_timeout 60s;
+ proxy_read_timeout 60s;
}
location ~ ^/uptime(/.*)?$ {
@@ -300,9 +324,45 @@ http {
proxy_set_header X-Forwarded-Proto $scheme;
}
+ location ^~ /docker/console/ {
+ proxy_pass http://127.0.0.1:30008/;
+ proxy_http_version 1.1;
+
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "upgrade";
+ proxy_set_header Host $host;
+ proxy_cache_bypass $http_upgrade;
+
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+
+ proxy_read_timeout 86400s;
+ proxy_send_timeout 86400s;
+ proxy_connect_timeout 10s;
+
+ proxy_buffering off;
+ proxy_request_buffering off;
+
+ proxy_next_upstream error timeout invalid_header http_500 http_502 http_503;
+ }
+
+ location ~ ^/docker(/.*)?$ {
+ proxy_pass http://127.0.0.1:30007;
+ proxy_http_version 1.1;
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+
+ proxy_connect_timeout 60s;
+ proxy_send_timeout 300s;
+ proxy_read_timeout 300s;
+ }
+
error_page 500 502 503 504 /50x.html;
location = /50x.html {
- root /usr/share/nginx/html;
+ root /app/html;
}
}
}
diff --git a/electron-builder.json b/electron-builder.json
index 218153e1..8137f73d 100644
--- a/electron-builder.json
+++ b/electron-builder.json
@@ -124,5 +124,6 @@
"ITSAppUsesNonExemptEncryption": false,
"NSAppleEventsUsageDescription": "Termix needs access to control other applications for terminal operations."
}
- }
+ },
+ "generateUpdatesFilesForAllChannels": true
}
diff --git a/electron/main.cjs b/electron/main.cjs
index 97ced567..06dc9ea2 100644
--- a/electron/main.cjs
+++ b/electron/main.cjs
@@ -11,13 +11,9 @@ const fs = require("fs");
const os = require("os");
if (process.platform === "linux") {
- app.commandLine.appendSwitch("--no-sandbox");
- app.commandLine.appendSwitch("--disable-setuid-sandbox");
- app.commandLine.appendSwitch("--disable-dev-shm-usage");
+ app.commandLine.appendSwitch("--ozone-platform-hint=auto");
- app.disableHardwareAcceleration();
- app.commandLine.appendSwitch("--disable-gpu");
- app.commandLine.appendSwitch("--disable-gpu-compositing");
+ app.commandLine.appendSwitch("--enable-features=VaapiVideoDecoder");
}
app.commandLine.appendSwitch("--ignore-certificate-errors");
diff --git a/electron/preload.js b/electron/preload.js
index 1db1b356..ea1f3458 100644
--- a/electron/preload.js
+++ b/electron/preload.js
@@ -2,21 +2,6 @@ const { contextBridge, ipcRenderer } = require("electron");
contextBridge.exposeInMainWorld("electronAPI", {
getAppVersion: () => ipcRenderer.invoke("get-app-version"),
- getPlatform: () => ipcRenderer.invoke("get-platform"),
- checkElectronUpdate: () => ipcRenderer.invoke("check-electron-update"),
-
- getServerConfig: () => ipcRenderer.invoke("get-server-config"),
- saveServerConfig: (config) =>
- ipcRenderer.invoke("save-server-config", config),
- testServerConnection: (serverUrl) =>
- ipcRenderer.invoke("test-server-connection", serverUrl),
-
- showSaveDialog: (options) => ipcRenderer.invoke("show-save-dialog", options),
- showOpenDialog: (options) => ipcRenderer.invoke("show-open-dialog", options),
-
- onUpdateAvailable: (callback) => ipcRenderer.on("update-available", callback),
- onUpdateDownloaded: (callback) =>
- ipcRenderer.on("update-downloaded", callback),
removeAllListeners: (channel) => ipcRenderer.removeAllListeners(channel),
isElectron: true,
diff --git a/flatpak/com.karmaa.termix.desktop b/flatpak/com.karmaa.termix.desktop
index 3aabfd06..59d27c13 100644
--- a/flatpak/com.karmaa.termix.desktop
+++ b/flatpak/com.karmaa.termix.desktop
@@ -1,7 +1,7 @@
[Desktop Entry]
Name=Termix
Comment=Web-based server management platform with SSH terminal, tunneling, and file editing
-Exec=termix %U
+Exec=run.sh %U
Icon=com.karmaa.termix
Terminal=false
Type=Application
diff --git a/flatpak/com.karmaa.termix.flatpakref b/flatpak/com.karmaa.termix.flatpakref
new file mode 100644
index 00000000..7d2e9892
--- /dev/null
+++ b/flatpak/com.karmaa.termix.flatpakref
@@ -0,0 +1,12 @@
+[Flatpak Ref]
+Name=Termix
+Branch=stable
+Title=Termix - SSH Server Management Platform
+IsRuntime=false
+Url=https://github.com/Termix-SSH/Termix/releases/download/VERSION_PLACEHOLDER/termix_linux_flatpak.flatpak
+GPGKey=
+RuntimeRepo=https://flathub.org/repo/flathub.flatpakrepo
+Comment=Web-based server management platform with SSH terminal, tunneling, and file editing
+Description=Termix is an open-source, forever-free, self-hosted all-in-one server management platform. It provides SSH terminal access, tunneling capabilities, and remote file management.
+Icon=https://raw.githubusercontent.com/Termix-SSH/Termix/main/public/icon.png
+Homepage=https://github.com/Termix-SSH/Termix
diff --git a/flatpak/com.karmaa.termix.metainfo.xml b/flatpak/com.karmaa.termix.metainfo.xml
index 0c3c6895..335d902c 100644
--- a/flatpak/com.karmaa.termix.metainfo.xml
+++ b/flatpak/com.karmaa.termix.metainfo.xml
@@ -5,7 +5,7 @@
Web-based server management platform with SSH terminal, tunneling, and file editing
CC0-1.0
- GPL-3.0-or-later
+ Apache-2.0
bugattiguy527
diff --git a/flatpak/com.karmaa.termix.yml b/flatpak/com.karmaa.termix.yml
index 4405a10f..7b67c0e7 100644
--- a/flatpak/com.karmaa.termix.yml
+++ b/flatpak/com.karmaa.termix.yml
@@ -1,10 +1,10 @@
app-id: com.karmaa.termix
runtime: org.freedesktop.Platform
-runtime-version: "23.08"
+runtime-version: "24.08"
sdk: org.freedesktop.Sdk
base: org.electronjs.Electron2.BaseApp
-base-version: "23.08"
-command: termix
+base-version: "24.08"
+command: run.sh
separate-locales: false
finish-args:
@@ -16,8 +16,11 @@ finish-args:
- --device=dri
- --filesystem=home
- --socket=ssh-auth
- - --talk-name=org.freedesktop.Notifications
+ - --socket=session-bus
- --talk-name=org.freedesktop.secrets
+ - --env=ELECTRON_TRASH=gio
+ - --env=XCURSOR_PATH=/run/host/user-share/icons:/run/host/share/icons
+ - --env=ELECTRON_OZONE_PLATFORM_HINT=auto
modules:
- name: termix
@@ -30,6 +33,21 @@ modules:
- cp -r squashfs-root/resources /app/bin/
- cp -r squashfs-root/locales /app/bin/ || true
+ - cp squashfs-root/*.so /app/bin/ || true
+ - cp squashfs-root/*.pak /app/bin/ || true
+ - cp squashfs-root/*.bin /app/bin/ || true
+ - cp squashfs-root/*.dat /app/bin/ || true
+ - cp squashfs-root/*.json /app/bin/ || true
+
+ - |
+ cat > run.sh << 'EOF'
+ #!/bin/bash
+ export TMPDIR="$XDG_RUNTIME_DIR/app/$FLATPAK_ID"
+ exec zypak-wrapper /app/bin/termix "$@"
+ EOF
+ - chmod +x run.sh
+ - install -Dm755 run.sh /app/bin/run.sh
+
- install -Dm644 com.karmaa.termix.desktop /app/share/applications/com.karmaa.termix.desktop
- install -Dm644 com.karmaa.termix.metainfo.xml /app/share/metainfo/com.karmaa.termix.metainfo.xml
@@ -40,14 +58,14 @@ modules:
sources:
- type: file
- url: https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_x64_VERSION_PLACEHOLDER_appimage.AppImage
+ url: https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_x64_appimage.AppImage
sha256: CHECKSUM_X64_PLACEHOLDER
dest-filename: termix.AppImage
only-arches:
- x86_64
- type: file
- url: https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_arm64_VERSION_PLACEHOLDER_appimage.AppImage
+ url: https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_arm64_appimage.AppImage
sha256: CHECKSUM_ARM64_PLACEHOLDER
dest-filename: termix.AppImage
only-arches:
diff --git a/flatpak/prepare-flatpak.sh b/flatpak/prepare-flatpak.sh
deleted file mode 100644
index 05162b64..00000000
--- a/flatpak/prepare-flatpak.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash
-set -e
-
-VERSION="$1"
-CHECKSUM="$2"
-RELEASE_DATE="$3"
-
-if [ -z "$VERSION" ] || [ -z "$CHECKSUM" ] || [ -z "$RELEASE_DATE" ]; then
- echo "Usage: $0 "
- echo "Example: $0 1.8.0 abc123... 2025-10-26"
- exit 1
-fi
-
-echo "Preparing Flatpak submission for version $VERSION"
-
-cp public/icon.svg flatpak/com.karmaa.termix.svg
-echo "✓ Copied SVG icon"
-
-if command -v convert &> /dev/null; then
- convert public/icon.png -resize 256x256 flatpak/icon-256.png
- convert public/icon.png -resize 128x128 flatpak/icon-128.png
- echo "✓ Generated PNG icons"
-else
- cp public/icon.png flatpak/icon-256.png
- cp public/icon.png flatpak/icon-128.png
- echo "⚠ ImageMagick not found, using original icon"
-fi
-
-sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak/com.karmaa.termix.yml
-sed -i "s/CHECKSUM_PLACEHOLDER/$CHECKSUM/g" flatpak/com.karmaa.termix.yml
-echo "✓ Updated manifest with version $VERSION"
-
-sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak/com.karmaa.termix.metainfo.xml
-sed -i "s/DATE_PLACEHOLDER/$RELEASE_DATE/g" flatpak/com.karmaa.termix.metainfo.xml
diff --git a/package-lock.json b/package-lock.json
index 14f680ed..1c9d6013 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "termix",
- "version": "1.8.1",
+ "version": "1.10.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "termix",
- "version": "1.8.1",
+ "version": "1.10.0",
"dependencies": {
"@codemirror/autocomplete": "^6.18.7",
"@codemirror/commands": "^6.3.3",
@@ -16,6 +16,7 @@
"@hookform/resolvers": "^5.1.1",
"@monaco-editor/react": "^4.7.0",
"@radix-ui/react-accordion": "^1.2.11",
+ "@radix-ui/react-alert-dialog": "^1.1.15",
"@radix-ui/react-checkbox": "^1.3.2",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-dropdown-menu": "^2.1.15",
@@ -26,7 +27,7 @@
"@radix-ui/react-select": "^2.2.5",
"@radix-ui/react-separator": "^1.1.7",
"@radix-ui/react-slider": "^1.3.6",
- "@radix-ui/react-slot": "^1.2.3",
+ "@radix-ui/react-slot": "^1.2.4",
"@radix-ui/react-switch": "^1.2.5",
"@radix-ui/react-tabs": "^1.1.12",
"@radix-ui/react-tooltip": "^1.2.8",
@@ -38,6 +39,7 @@
"@types/qrcode": "^1.5.5",
"@types/speakeasy": "^2.0.10",
"@uiw/codemirror-extensions-langs": "^4.24.1",
+ "@uiw/codemirror-theme-github": "^4.25.4",
"@uiw/react-codemirror": "^4.24.1",
"@xterm/addon-clipboard": "^0.1.0",
"@xterm/addon-fit": "^0.10.0",
@@ -57,6 +59,7 @@
"dotenv": "^17.2.0",
"drizzle-orm": "^0.44.3",
"express": "^5.1.0",
+ "i18n-auto-translation": "^2.2.3",
"i18next": "^25.4.2",
"i18next-browser-languagedetector": "^8.2.0",
"jose": "^5.2.3",
@@ -84,6 +87,7 @@
"react-xtermjs": "^1.0.10",
"recharts": "^3.2.1",
"remark-gfm": "^4.0.1",
+ "socks": "^2.8.7",
"sonner": "^2.0.7",
"speakeasy": "^2.0.0",
"ssh2": "^1.16.0",
@@ -2250,6 +2254,94 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/@google-cloud/common": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-6.0.0.tgz",
+ "integrity": "sha512-IXh04DlkLMxWgYLIUYuHHKXKOUwPDzDgke1ykkkJPe48cGIS9kkL2U/o0pm4ankHLlvzLF/ma1eO86n/bkumIA==",
+ "dependencies": {
+ "@google-cloud/projectify": "^4.0.0",
+ "@google-cloud/promisify": "^4.0.0",
+ "arrify": "^2.0.0",
+ "duplexify": "^4.1.3",
+ "extend": "^3.0.2",
+ "google-auth-library": "^10.0.0-rc.1",
+ "html-entities": "^2.5.2",
+ "retry-request": "^8.0.0",
+ "teeny-request": "^10.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@google-cloud/common/node_modules/@google-cloud/promisify": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.1.0.tgz",
+ "integrity": "sha512-G/FQx5cE/+DqBbOpA5jKsegGwdPniU6PuIEMt+qxWgFxvxuFOzVmp6zYchtYuwAWV5/8Dgs0yAmjvNZv3uXLQg==",
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@google-cloud/projectify": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz",
+ "integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==",
+ "engines": {
+ "node": ">=14.0.0"
+ }
+ },
+ "node_modules/@google-cloud/promisify": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-5.0.0.tgz",
+ "integrity": "sha512-N8qS6dlORGHwk7WjGXKOSsLjIjNINCPicsOX6gyyLiYk7mq3MtII96NZ9N2ahwA2vnkLmZODOIH9rlNniYWvCQ==",
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@google-cloud/translate": {
+ "version": "9.2.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/translate/-/translate-9.2.0.tgz",
+ "integrity": "sha512-LBKoXMXsM6jyqD9RDO74E3Q8uUn9TWy7YwIrF+WS4I9erdI+VZHxmdffi4sFfQ196FeprfwMMAFa8Oy6u7G8xw==",
+ "dependencies": {
+ "@google-cloud/common": "^6.0.0",
+ "@google-cloud/promisify": "^5.0.0",
+ "arrify": "^2.0.0",
+ "extend": "^3.0.2",
+ "google-gax": "^5.0.0",
+ "is-html": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@grpc/grpc-js": {
+ "version": "1.14.3",
+ "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.14.3.tgz",
+ "integrity": "sha512-Iq8QQQ/7X3Sac15oB6p0FmUg/klxQvXLeileoqrTRGJYLV+/9tubbr9ipz0GKHjmXVsgFPo/+W+2cA8eNcR+XA==",
+ "dependencies": {
+ "@grpc/proto-loader": "^0.8.0",
+ "@js-sdsl/ordered-map": "^4.4.2"
+ },
+ "engines": {
+ "node": ">=12.10.0"
+ }
+ },
+ "node_modules/@grpc/proto-loader": {
+ "version": "0.8.0",
+ "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.8.0.tgz",
+ "integrity": "sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ==",
+ "dependencies": {
+ "lodash.camelcase": "^4.3.0",
+ "long": "^5.0.0",
+ "protobufjs": "^7.5.3",
+ "yargs": "^17.7.2"
+ },
+ "bin": {
+ "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js"
+ },
+ "engines": {
+ "node": ">=6"
+ }
+ },
"node_modules/@hapi/address": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/@hapi/address/-/address-5.1.1.tgz",
@@ -2387,7 +2479,6 @@
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz",
"integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==",
- "dev": true,
"license": "MIT",
"engines": {
"node": "20 || >=22"
@@ -2397,7 +2488,6 @@
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz",
"integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==",
- "dev": true,
"license": "MIT",
"dependencies": {
"@isaacs/balanced-match": "^4.0.1"
@@ -2410,7 +2500,6 @@
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
"integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
- "dev": true,
"license": "ISC",
"dependencies": {
"string-width": "^5.1.2",
@@ -2428,7 +2517,6 @@
"version": "6.2.3",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
"integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
@@ -2441,14 +2529,12 @@
"version": "9.2.2",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
"integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
- "dev": true,
"license": "MIT"
},
"node_modules/@isaacs/cliui/node_modules/string-width": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
"integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
- "dev": true,
"license": "MIT",
"dependencies": {
"eastasianwidth": "^0.2.0",
@@ -2466,7 +2552,6 @@
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
"integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
- "dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.1.0",
@@ -2525,6 +2610,15 @@
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
+ "node_modules/@js-sdsl/ordered-map": {
+ "version": "4.4.2",
+ "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz",
+ "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==",
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/js-sdsl"
+ }
+ },
"node_modules/@lezer/common": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.3.0.tgz",
@@ -3094,13 +3188,66 @@
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
"integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
- "dev": true,
"license": "MIT",
"optional": true,
"engines": {
"node": ">=14"
}
},
+ "node_modules/@protobufjs/aspromise": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz",
+ "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="
+ },
+ "node_modules/@protobufjs/base64": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz",
+ "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="
+ },
+ "node_modules/@protobufjs/codegen": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz",
+ "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg=="
+ },
+ "node_modules/@protobufjs/eventemitter": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz",
+ "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q=="
+ },
+ "node_modules/@protobufjs/fetch": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz",
+ "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==",
+ "dependencies": {
+ "@protobufjs/aspromise": "^1.1.1",
+ "@protobufjs/inquire": "^1.1.0"
+ }
+ },
+ "node_modules/@protobufjs/float": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz",
+ "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ=="
+ },
+ "node_modules/@protobufjs/inquire": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz",
+ "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q=="
+ },
+ "node_modules/@protobufjs/path": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz",
+ "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA=="
+ },
+ "node_modules/@protobufjs/pool": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz",
+ "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw=="
+ },
+ "node_modules/@protobufjs/utf8": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
+ "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="
+ },
"node_modules/@radix-ui/number": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@radix-ui/number/-/number-1.1.1.tgz",
@@ -3144,6 +3291,52 @@
}
}
},
+ "node_modules/@radix-ui/react-alert-dialog": {
+ "version": "1.1.15",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-alert-dialog/-/react-alert-dialog-1.1.15.tgz",
+ "integrity": "sha512-oTVLkEw5GpdRe29BqJ0LSDFWI3qu0vR1M0mUkOQWDIUnY/QIkLpgDMWuKxP94c2NAC2LGcgVhG1ImF3jkZ5wXw==",
+ "license": "MIT",
+ "dependencies": {
+ "@radix-ui/primitive": "1.1.3",
+ "@radix-ui/react-compose-refs": "1.1.2",
+ "@radix-ui/react-context": "1.1.2",
+ "@radix-ui/react-dialog": "1.1.15",
+ "@radix-ui/react-primitive": "2.1.3",
+ "@radix-ui/react-slot": "1.2.3"
+ },
+ "peerDependencies": {
+ "@types/react": "*",
+ "@types/react-dom": "*",
+ "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
+ "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ },
+ "@types/react-dom": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@radix-ui/react-alert-dialog/node_modules/@radix-ui/react-slot": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz",
+ "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==",
+ "license": "MIT",
+ "dependencies": {
+ "@radix-ui/react-compose-refs": "1.1.2"
+ },
+ "peerDependencies": {
+ "@types/react": "*",
+ "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ }
+ }
+ },
"node_modules/@radix-ui/react-arrow": {
"version": "1.1.7",
"resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.7.tgz",
@@ -3253,6 +3446,24 @@
}
}
},
+ "node_modules/@radix-ui/react-collection/node_modules/@radix-ui/react-slot": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz",
+ "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==",
+ "license": "MIT",
+ "dependencies": {
+ "@radix-ui/react-compose-refs": "1.1.2"
+ },
+ "peerDependencies": {
+ "@types/react": "*",
+ "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ }
+ }
+ },
"node_modules/@radix-ui/react-compose-refs": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.2.tgz",
@@ -3319,6 +3530,24 @@
}
}
},
+ "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-slot": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz",
+ "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==",
+ "license": "MIT",
+ "dependencies": {
+ "@radix-ui/react-compose-refs": "1.1.2"
+ },
+ "peerDependencies": {
+ "@types/react": "*",
+ "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ }
+ }
+ },
"node_modules/@radix-ui/react-direction": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.1.tgz",
@@ -3511,6 +3740,24 @@
}
}
},
+ "node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-slot": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz",
+ "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==",
+ "license": "MIT",
+ "dependencies": {
+ "@radix-ui/react-compose-refs": "1.1.2"
+ },
+ "peerDependencies": {
+ "@types/react": "*",
+ "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ }
+ }
+ },
"node_modules/@radix-ui/react-popover": {
"version": "1.1.15",
"resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.1.15.tgz",
@@ -3548,6 +3795,24 @@
}
}
},
+ "node_modules/@radix-ui/react-popover/node_modules/@radix-ui/react-slot": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz",
+ "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==",
+ "license": "MIT",
+ "dependencies": {
+ "@radix-ui/react-compose-refs": "1.1.2"
+ },
+ "peerDependencies": {
+ "@types/react": "*",
+ "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ }
+ }
+ },
"node_modules/@radix-ui/react-popper": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.8.tgz",
@@ -3651,6 +3916,24 @@
}
}
},
+ "node_modules/@radix-ui/react-primitive/node_modules/@radix-ui/react-slot": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz",
+ "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==",
+ "license": "MIT",
+ "dependencies": {
+ "@radix-ui/react-compose-refs": "1.1.2"
+ },
+ "peerDependencies": {
+ "@types/react": "*",
+ "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ }
+ }
+ },
"node_modules/@radix-ui/react-progress": {
"version": "1.1.7",
"resolved": "https://registry.npmjs.org/@radix-ui/react-progress/-/react-progress-1.1.7.tgz",
@@ -3780,6 +4063,24 @@
}
}
},
+ "node_modules/@radix-ui/react-select/node_modules/@radix-ui/react-slot": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz",
+ "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==",
+ "license": "MIT",
+ "dependencies": {
+ "@radix-ui/react-compose-refs": "1.1.2"
+ },
+ "peerDependencies": {
+ "@types/react": "*",
+ "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ }
+ }
+ },
"node_modules/@radix-ui/react-separator": {
"version": "1.1.7",
"resolved": "https://registry.npmjs.org/@radix-ui/react-separator/-/react-separator-1.1.7.tgz",
@@ -3837,9 +4138,9 @@
}
},
"node_modules/@radix-ui/react-slot": {
- "version": "1.2.3",
- "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz",
- "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==",
+ "version": "1.2.4",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.4.tgz",
+ "integrity": "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA==",
"license": "MIT",
"dependencies": {
"@radix-ui/react-compose-refs": "1.1.2"
@@ -3947,6 +4248,24 @@
}
}
},
+ "node_modules/@radix-ui/react-tooltip/node_modules/@radix-ui/react-slot": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz",
+ "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==",
+ "license": "MIT",
+ "dependencies": {
+ "@radix-ui/react-compose-refs": "1.1.2"
+ },
+ "peerDependencies": {
+ "@types/react": "*",
+ "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
+ },
+ "peerDependenciesMeta": {
+ "@types/react": {
+ "optional": true
+ }
+ }
+ },
"node_modules/@radix-ui/react-use-callback-ref": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.1.tgz",
@@ -4782,7 +5101,6 @@
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz",
"integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">= 10"
@@ -5603,6 +5921,37 @@
"@codemirror/language-data": ">=6.0.0"
}
},
+ "node_modules/@uiw/codemirror-theme-github": {
+ "version": "4.25.4",
+ "resolved": "https://registry.npmjs.org/@uiw/codemirror-theme-github/-/codemirror-theme-github-4.25.4.tgz",
+ "integrity": "sha512-M5zRT2vIpNsuKN0Lz+DwLnmhHW8Eddp1M9zC0hm3V+bvffmaSn/pUDey1eqGIv5xNNmjhqvDAz0a90xLYCzvSw==",
+ "license": "MIT",
+ "dependencies": {
+ "@uiw/codemirror-themes": "4.25.4"
+ },
+ "funding": {
+ "url": "https://jaywcjlove.github.io/#/sponsor"
+ }
+ },
+ "node_modules/@uiw/codemirror-themes": {
+ "version": "4.25.4",
+ "resolved": "https://registry.npmjs.org/@uiw/codemirror-themes/-/codemirror-themes-4.25.4.tgz",
+ "integrity": "sha512-2SLktItgcZC4p0+PfFusEbAHwbuAWe3bOOntCevVgHtrWGtGZX3IPv2k8IKZMgOXtAHyGKpJvT9/nspPn/uCQg==",
+ "license": "MIT",
+ "dependencies": {
+ "@codemirror/language": "^6.0.0",
+ "@codemirror/state": "^6.0.0",
+ "@codemirror/view": "^6.0.0"
+ },
+ "funding": {
+ "url": "https://jaywcjlove.github.io/#/sponsor"
+ },
+ "peerDependencies": {
+ "@codemirror/language": ">=6.0.0",
+ "@codemirror/state": ">=6.0.0",
+ "@codemirror/view": ">=6.0.0"
+ }
+ },
"node_modules/@uiw/react-codemirror": {
"version": "4.25.2",
"resolved": "https://registry.npmjs.org/@uiw/react-codemirror/-/react-codemirror-4.25.2.tgz",
@@ -5775,7 +6124,6 @@
"version": "7.1.4",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz",
"integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">= 14"
@@ -5845,7 +6193,6 @@
"version": "6.2.2",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
"integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
@@ -5985,6 +6332,14 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/arrify": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
+ "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==",
+ "engines": {
+ "node": ">=8"
+ }
+ },
"node_modules/asn1": {
"version": "0.2.6",
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz",
@@ -6074,7 +6429,6 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
- "dev": true,
"license": "MIT"
},
"node_modules/base32.js": {
@@ -6184,6 +6538,14 @@
"node": "20.x || 22.x || 23.x || 24.x"
}
},
+ "node_modules/bignumber.js": {
+ "version": "9.3.1",
+ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.3.1.tgz",
+ "integrity": "sha512-Ko0uX15oIUS7wJ3Rb30Fs6SkVbLmPBAKdlm7q9+ak9bbIeFf0MwuBsQV6z7+X768/cHsfg+WlysDWJcmthjsjQ==",
+ "engines": {
+ "node": "*"
+ }
+ },
"node_modules/bindings": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz",
@@ -6826,7 +7188,6 @@
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
"integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
- "dev": true,
"license": "ISC",
"dependencies": {
"string-width": "^4.2.0",
@@ -6841,7 +7202,6 @@
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
@@ -6851,7 +7211,6 @@
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
@@ -6864,7 +7223,6 @@
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
- "dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^4.0.0",
@@ -7396,7 +7754,6 @@
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
- "dev": true,
"license": "MIT",
"dependencies": {
"path-key": "^3.1.0",
@@ -7679,6 +8036,11 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/deep-object-diff": {
+ "version": "1.1.9",
+ "resolved": "https://registry.npmjs.org/deep-object-diff/-/deep-object-diff-1.1.9.tgz",
+ "integrity": "sha512-Rn+RuwkmkDwCi2/oXOFS9Gsr5lJZu/yTGpK7wAaAIE75CC+LCGEZHpY6VQJa/RoJcrmaA/docWJZvYohlNkWPA=="
+ },
"node_modules/defaults": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.4.tgz",
@@ -8142,11 +8504,34 @@
"node": ">= 0.4"
}
},
+ "node_modules/duplexify": {
+ "version": "4.1.3",
+ "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz",
+ "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==",
+ "dependencies": {
+ "end-of-stream": "^1.4.1",
+ "inherits": "^2.0.3",
+ "readable-stream": "^3.1.1",
+ "stream-shift": "^1.0.2"
+ }
+ },
+ "node_modules/duplexify/node_modules/readable-stream": {
+ "version": "3.6.2",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
+ "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
+ "dependencies": {
+ "inherits": "^2.0.3",
+ "string_decoder": "^1.1.1",
+ "util-deprecate": "^1.0.1"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
"node_modules/eastasianwidth": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
"integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==",
- "dev": true,
"license": "MIT"
},
"node_modules/ecdsa-sig-formatter": {
@@ -8579,7 +8964,6 @@
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
"integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
@@ -9376,7 +9760,6 @@
"version": "3.3.1",
"resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz",
"integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==",
- "dev": true,
"license": "ISC",
"dependencies": {
"cross-spawn": "^7.0.6",
@@ -9529,6 +9912,96 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/gaxios": {
+ "version": "7.1.3",
+ "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-7.1.3.tgz",
+ "integrity": "sha512-YGGyuEdVIjqxkxVH1pUTMY/XtmmsApXrCVv5EU25iX6inEPbV+VakJfLealkBtJN69AQmh1eGOdCl9Sm1UP6XQ==",
+ "dependencies": {
+ "extend": "^3.0.2",
+ "https-proxy-agent": "^7.0.1",
+ "node-fetch": "^3.3.2",
+ "rimraf": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/gaxios/node_modules/brace-expansion": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
+ "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
+ "dependencies": {
+ "balanced-match": "^1.0.0"
+ }
+ },
+ "node_modules/gaxios/node_modules/glob": {
+ "version": "10.5.0",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz",
+ "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==",
+ "dependencies": {
+ "foreground-child": "^3.1.0",
+ "jackspeak": "^3.1.2",
+ "minimatch": "^9.0.4",
+ "minipass": "^7.1.2",
+ "package-json-from-dist": "^1.0.0",
+ "path-scurry": "^1.11.1"
+ },
+ "bin": {
+ "glob": "dist/esm/bin.mjs"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/gaxios/node_modules/minimatch": {
+ "version": "9.0.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
+ "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
+ "dependencies": {
+ "brace-expansion": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/gaxios/node_modules/minipass": {
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz",
+ "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==",
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
+ }
+ },
+ "node_modules/gaxios/node_modules/rimraf": {
+ "version": "5.0.10",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz",
+ "integrity": "sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==",
+ "dependencies": {
+ "glob": "^10.3.7"
+ },
+ "bin": {
+ "rimraf": "dist/esm/bin.mjs"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/gcp-metadata": {
+ "version": "8.1.2",
+ "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-8.1.2.tgz",
+ "integrity": "sha512-zV/5HKTfCeKWnxG0Dmrw51hEWFGfcF2xiXqcA3+J90WDuP0SvoiSO5ORvcBsifmx/FoIjgQN3oNOGaQ5PhLFkg==",
+ "dependencies": {
+ "gaxios": "^7.0.0",
+ "google-logging-utils": "^1.0.0",
+ "json-bigint": "^1.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
"node_modules/gensync": {
"version": "1.0.0-beta.2",
"resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
@@ -9552,7 +10025,6 @@
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz",
"integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
@@ -9761,6 +10233,134 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/google-auth-library": {
+ "version": "10.5.0",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-10.5.0.tgz",
+ "integrity": "sha512-7ABviyMOlX5hIVD60YOfHw4/CxOfBhyduaYB+wbFWCWoni4N7SLcV46hrVRktuBbZjFC9ONyqamZITN7q3n32w==",
+ "dependencies": {
+ "base64-js": "^1.3.0",
+ "ecdsa-sig-formatter": "^1.0.11",
+ "gaxios": "^7.0.0",
+ "gcp-metadata": "^8.0.0",
+ "google-logging-utils": "^1.0.0",
+ "gtoken": "^8.0.0",
+ "jws": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/google-auth-library/node_modules/jwa": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.1.tgz",
+ "integrity": "sha512-hRF04fqJIP8Abbkq5NKGN0Bbr3JxlQ+qhZufXVr0DvujKy93ZCbXZMHDL4EOtodSbCWxOqR8MS1tXA5hwqCXDg==",
+ "dependencies": {
+ "buffer-equal-constant-time": "^1.0.1",
+ "ecdsa-sig-formatter": "1.0.11",
+ "safe-buffer": "^5.0.1"
+ }
+ },
+ "node_modules/google-auth-library/node_modules/jws": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
+ "integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
+ "dependencies": {
+ "jwa": "^2.0.1",
+ "safe-buffer": "^5.0.1"
+ }
+ },
+ "node_modules/google-gax": {
+ "version": "5.0.6",
+ "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-5.0.6.tgz",
+ "integrity": "sha512-1kGbqVQBZPAAu4+/R1XxPQKP0ydbNYoLAr4l0ZO2bMV0kLyLW4I1gAk++qBLWt7DPORTzmWRMsCZe86gDjShJA==",
+ "dependencies": {
+ "@grpc/grpc-js": "^1.12.6",
+ "@grpc/proto-loader": "^0.8.0",
+ "duplexify": "^4.1.3",
+ "google-auth-library": "^10.1.0",
+ "google-logging-utils": "^1.1.1",
+ "node-fetch": "^3.3.2",
+ "object-hash": "^3.0.0",
+ "proto3-json-serializer": "^3.0.0",
+ "protobufjs": "^7.5.3",
+ "retry-request": "^8.0.0",
+ "rimraf": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/google-gax/node_modules/brace-expansion": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
+ "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
+ "dependencies": {
+ "balanced-match": "^1.0.0"
+ }
+ },
+ "node_modules/google-gax/node_modules/glob": {
+ "version": "10.5.0",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz",
+ "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==",
+ "dependencies": {
+ "foreground-child": "^3.1.0",
+ "jackspeak": "^3.1.2",
+ "minimatch": "^9.0.4",
+ "minipass": "^7.1.2",
+ "package-json-from-dist": "^1.0.0",
+ "path-scurry": "^1.11.1"
+ },
+ "bin": {
+ "glob": "dist/esm/bin.mjs"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/google-gax/node_modules/minimatch": {
+ "version": "9.0.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
+ "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
+ "dependencies": {
+ "brace-expansion": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/google-gax/node_modules/minipass": {
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz",
+ "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==",
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
+ }
+ },
+ "node_modules/google-gax/node_modules/rimraf": {
+ "version": "5.0.10",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz",
+ "integrity": "sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==",
+ "dependencies": {
+ "glob": "^10.3.7"
+ },
+ "bin": {
+ "rimraf": "dist/esm/bin.mjs"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/google-logging-utils": {
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/google-logging-utils/-/google-logging-utils-1.1.3.tgz",
+ "integrity": "sha512-eAmLkjDjAFCVXg7A1unxHsLf961m6y17QFqXqAXGj/gVkKFrEICfStRfwUlGNfeCEjNRa32JEWOUTlYXPyyKvA==",
+ "engines": {
+ "node": ">=14"
+ }
+ },
"node_modules/gopd": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
@@ -9812,6 +10412,37 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/gtoken": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-8.0.0.tgz",
+ "integrity": "sha512-+CqsMbHPiSTdtSO14O51eMNlrp9N79gmeqmXeouJOhfucAedHw9noVe/n5uJk3tbKE6a+6ZCQg3RPhVhHByAIw==",
+ "dependencies": {
+ "gaxios": "^7.0.0",
+ "jws": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/gtoken/node_modules/jwa": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.1.tgz",
+ "integrity": "sha512-hRF04fqJIP8Abbkq5NKGN0Bbr3JxlQ+qhZufXVr0DvujKy93ZCbXZMHDL4EOtodSbCWxOqR8MS1tXA5hwqCXDg==",
+ "dependencies": {
+ "buffer-equal-constant-time": "^1.0.1",
+ "ecdsa-sig-formatter": "1.0.11",
+ "safe-buffer": "^5.0.1"
+ }
+ },
+ "node_modules/gtoken/node_modules/jws": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
+ "integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
+ "dependencies": {
+ "jwa": "^2.0.1",
+ "safe-buffer": "^5.0.1"
+ }
+ },
"node_modules/has-flag": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
@@ -10079,6 +10710,17 @@
"void-elements": "3.1.0"
}
},
+ "node_modules/html-tags": {
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-3.3.1.tgz",
+ "integrity": "sha512-ztqyC3kLto0e9WbNp0aeP+M3kTt+nbaIveGmUxAtZa+8iFgKLUOD4YKM5j+f3QD89bra7UeumolZHKuOXnTmeQ==",
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
"node_modules/html-url-attributes": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/html-url-attributes/-/html-url-attributes-3.0.1.tgz",
@@ -10153,7 +10795,6 @@
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
"integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
- "dev": true,
"license": "MIT",
"dependencies": {
"agent-base": "^7.1.2",
@@ -10189,6 +10830,162 @@
"url": "https://github.com/sponsors/typicode"
}
},
+ "node_modules/i18n-auto-translation": {
+ "version": "2.2.3",
+ "resolved": "https://registry.npmjs.org/i18n-auto-translation/-/i18n-auto-translation-2.2.3.tgz",
+ "integrity": "sha512-Gu3qGOq4mG8qBcEYqfvga5SQGsmfKjTakXdXqFV+FmMk12KGnpOcftzvp/7TcgXM6MvLDPenAf2M61a/R0N9Lw==",
+ "dependencies": {
+ "@google-cloud/translate": "9.2.0",
+ "axios": "1.12.2",
+ "deep-object-diff": "1.1.9",
+ "glob": "11.0.3",
+ "html-entities": "2.6.0",
+ "just-extend": "6.2.0",
+ "yargs": "18.0.0",
+ "yocto-spinner": "1.0.0"
+ },
+ "bin": {
+ "i18n-auto-translation": "dist/src/index.js"
+ },
+ "engines": {
+ "node": ">= 14.17"
+ }
+ },
+ "node_modules/i18n-auto-translation/node_modules/axios": {
+ "version": "1.12.2",
+ "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz",
+ "integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==",
+ "dependencies": {
+ "follow-redirects": "^1.15.6",
+ "form-data": "^4.0.4",
+ "proxy-from-env": "^1.1.0"
+ }
+ },
+ "node_modules/i18n-auto-translation/node_modules/cliui": {
+ "version": "9.0.1",
+ "resolved": "https://registry.npmjs.org/cliui/-/cliui-9.0.1.tgz",
+ "integrity": "sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w==",
+ "dependencies": {
+ "string-width": "^7.2.0",
+ "strip-ansi": "^7.1.0",
+ "wrap-ansi": "^9.0.0"
+ },
+ "engines": {
+ "node": ">=20"
+ }
+ },
+ "node_modules/i18n-auto-translation/node_modules/emoji-regex": {
+ "version": "10.6.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz",
+ "integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A=="
+ },
+ "node_modules/i18n-auto-translation/node_modules/glob": {
+ "version": "11.0.3",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz",
+ "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==",
+ "dependencies": {
+ "foreground-child": "^3.3.1",
+ "jackspeak": "^4.1.1",
+ "minimatch": "^10.0.3",
+ "minipass": "^7.1.2",
+ "package-json-from-dist": "^1.0.0",
+ "path-scurry": "^2.0.0"
+ },
+ "bin": {
+ "glob": "dist/esm/bin.mjs"
+ },
+ "engines": {
+ "node": "20 || >=22"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/i18n-auto-translation/node_modules/jackspeak": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
+ "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
+ "dependencies": {
+ "@isaacs/cliui": "^8.0.2"
+ },
+ "engines": {
+ "node": "20 || >=22"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/i18n-auto-translation/node_modules/lru-cache": {
+ "version": "11.2.4",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz",
+ "integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==",
+ "engines": {
+ "node": "20 || >=22"
+ }
+ },
+ "node_modules/i18n-auto-translation/node_modules/minipass": {
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz",
+ "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==",
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
+ }
+ },
+ "node_modules/i18n-auto-translation/node_modules/path-scurry": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.1.tgz",
+ "integrity": "sha512-oWyT4gICAu+kaA7QWk/jvCHWarMKNs6pXOGWKDTr7cw4IGcUbW+PeTfbaQiLGheFRpjo6O9J0PmyMfQPjH71oA==",
+ "dependencies": {
+ "lru-cache": "^11.0.0",
+ "minipass": "^7.1.2"
+ },
+ "engines": {
+ "node": "20 || >=22"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/i18n-auto-translation/node_modules/string-width": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
+ "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
+ "dependencies": {
+ "emoji-regex": "^10.3.0",
+ "get-east-asian-width": "^1.0.0",
+ "strip-ansi": "^7.1.0"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/i18n-auto-translation/node_modules/yargs": {
+ "version": "18.0.0",
+ "resolved": "https://registry.npmjs.org/yargs/-/yargs-18.0.0.tgz",
+ "integrity": "sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg==",
+ "dependencies": {
+ "cliui": "^9.0.1",
+ "escalade": "^3.1.1",
+ "get-caller-file": "^2.0.5",
+ "string-width": "^7.2.0",
+ "y18n": "^5.0.5",
+ "yargs-parser": "^22.0.0"
+ },
+ "engines": {
+ "node": "^20.19.0 || ^22.12.0 || >=23"
+ }
+ },
+ "node_modules/i18n-auto-translation/node_modules/yargs-parser": {
+ "version": "22.0.0",
+ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-22.0.0.tgz",
+ "integrity": "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw==",
+ "engines": {
+ "node": "^20.19.0 || ^22.12.0 || >=23"
+ }
+ },
"node_modules/i18next": {
"version": "25.6.0",
"resolved": "https://registry.npmjs.org/i18next/-/i18next-25.6.0.tgz",
@@ -10432,7 +11229,6 @@
"version": "10.0.1",
"resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.0.1.tgz",
"integrity": "sha512-NWv9YLW4PoW2B7xtzaS3NCot75m6nK7Icdv0o3lfMceJVRfSoQwqD4wEH5rLwoKJwUiZ/rfpiVBhnaF0FK4HoA==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">= 12"
@@ -10543,6 +11339,17 @@
"url": "https://github.com/sponsors/wooorm"
}
},
+ "node_modules/is-html": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/is-html/-/is-html-2.0.0.tgz",
+ "integrity": "sha512-S+OpgB5i7wzIue/YSE5hg0e5ZYfG3hhpNh9KGl6ayJ38p7ED6wxQLd1TV91xHpcTvw90KMJ9EwN3F/iNflHBVg==",
+ "dependencies": {
+ "html-tags": "^3.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
"node_modules/is-interactive": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz",
@@ -10647,14 +11454,12 @@
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
- "dev": true,
"license": "ISC"
},
"node_modules/jackspeak": {
"version": "3.4.3",
"resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
"integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
- "dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
"@isaacs/cliui": "^8.0.2"
@@ -10758,6 +11563,14 @@
"node": ">=6"
}
},
+ "node_modules/json-bigint": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz",
+ "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==",
+ "dependencies": {
+ "bignumber.js": "^9.0.0"
+ }
+ },
"node_modules/json-buffer": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz",
@@ -10881,6 +11694,11 @@
"setimmediate": "^1.0.5"
}
},
+ "node_modules/just-extend": {
+ "version": "6.2.0",
+ "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-6.2.0.tgz",
+ "integrity": "sha512-cYofQu2Xpom82S6qD778jBDpwvvy39s1l/hrYij2u9AMdQcGRpaBu6kY4mVhuno5kJVi1DAz4aiphA2WI1/OAw=="
+ },
"node_modules/jwa": {
"version": "1.4.2",
"resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.2.tgz",
@@ -11375,7 +12193,6 @@
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
"integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==",
- "dev": true,
"license": "MIT"
},
"node_modules/lodash.includes": {
@@ -11552,6 +12369,11 @@
"url": "https://github.com/chalk/slice-ansi?sponsor=1"
}
},
+ "node_modules/long": {
+ "version": "5.3.2",
+ "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz",
+ "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA=="
+ },
"node_modules/longest-streak": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz",
@@ -12786,7 +13608,6 @@
"version": "10.1.1",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz",
"integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==",
- "dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
"@isaacs/brace-expansion": "^5.0.0"
@@ -13179,6 +14000,14 @@
"node": ">=0.10.0"
}
},
+ "node_modules/object-hash": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz",
+ "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==",
+ "engines": {
+ "node": ">= 6"
+ }
+ },
"node_modules/object-inspect": {
"version": "1.13.4",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz",
@@ -13454,7 +14283,6 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz",
"integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==",
- "dev": true,
"license": "BlueOak-1.0.0"
},
"node_modules/pako": {
@@ -13559,7 +14387,6 @@
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
@@ -13569,7 +14396,6 @@
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
"integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
- "dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
"lru-cache": "^10.2.0",
@@ -13586,14 +14412,12 @@
"version": "10.4.3",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
"integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
- "dev": true,
"license": "ISC"
},
"node_modules/path-scurry/node_modules/minipass": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz",
"integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==",
- "dev": true,
"license": "ISC",
"engines": {
"node": ">=16 || 14 >=14.17"
@@ -13935,6 +14759,40 @@
"url": "https://github.com/sponsors/wooorm"
}
},
+ "node_modules/proto3-json-serializer": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/proto3-json-serializer/-/proto3-json-serializer-3.0.4.tgz",
+ "integrity": "sha512-E1sbAYg3aEbXrq0n1ojJkRHQJGE1kaE/O6GLA94y8rnJBfgvOPTOd1b9hOceQK1FFZI9qMh1vBERCyO2ifubcw==",
+ "dependencies": {
+ "protobufjs": "^7.4.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/protobufjs": {
+ "version": "7.5.4",
+ "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.4.tgz",
+ "integrity": "sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==",
+ "hasInstallScript": true,
+ "dependencies": {
+ "@protobufjs/aspromise": "^1.1.2",
+ "@protobufjs/base64": "^1.1.2",
+ "@protobufjs/codegen": "^2.0.4",
+ "@protobufjs/eventemitter": "^1.1.0",
+ "@protobufjs/fetch": "^1.1.0",
+ "@protobufjs/float": "^1.0.2",
+ "@protobufjs/inquire": "^1.1.0",
+ "@protobufjs/path": "^1.1.2",
+ "@protobufjs/pool": "^1.1.0",
+ "@protobufjs/utf8": "^1.1.0",
+ "@types/node": ">=13.7.0",
+ "long": "^5.0.0"
+ },
+ "engines": {
+ "node": ">=12.0.0"
+ }
+ },
"node_modules/proxy-addr": {
"version": "2.0.7",
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
@@ -14936,6 +15794,18 @@
"node": ">= 4"
}
},
+ "node_modules/retry-request": {
+ "version": "8.0.2",
+ "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-8.0.2.tgz",
+ "integrity": "sha512-JzFPAfklk1kjR1w76f0QOIhoDkNkSqW8wYKT08n9yysTmZfB+RQ2QoXoTAeOi1HD9ZipTyTAZg3c4pM/jeqgSw==",
+ "dependencies": {
+ "extend": "^3.0.2",
+ "teeny-request": "^10.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
"node_modules/reusify": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz",
@@ -15225,7 +16095,6 @@
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
- "dev": true,
"license": "MIT",
"dependencies": {
"shebang-regex": "^3.0.0"
@@ -15238,7 +16107,6 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
@@ -15333,7 +16201,6 @@
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
"integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==",
- "dev": true,
"license": "ISC",
"engines": {
"node": ">=14"
@@ -15420,7 +16287,6 @@
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz",
"integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">= 6.0.0",
@@ -15431,7 +16297,6 @@
"version": "2.8.7",
"resolved": "https://registry.npmjs.org/socks/-/socks-2.8.7.tgz",
"integrity": "sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==",
- "dev": true,
"license": "MIT",
"dependencies": {
"ip-address": "^10.0.1",
@@ -15611,6 +16476,19 @@
"node": ">= 0.8"
}
},
+ "node_modules/stream-events": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz",
+ "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==",
+ "dependencies": {
+ "stubs": "^3.0.0"
+ }
+ },
+ "node_modules/stream-shift": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz",
+ "integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ=="
+ },
"node_modules/streamsearch": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz",
@@ -15663,7 +16541,6 @@
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
- "dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^8.0.0",
@@ -15678,7 +16555,6 @@
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
@@ -15688,7 +16564,6 @@
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
@@ -15736,7 +16611,6 @@
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
"integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
- "dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^6.0.1"
@@ -15753,7 +16627,6 @@
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
@@ -15766,7 +16639,6 @@
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
@@ -15785,6 +16657,11 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/stubs": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
+ "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw=="
+ },
"node_modules/style-mod": {
"version": "4.1.3",
"resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.3.tgz",
@@ -15952,6 +16829,56 @@
"dev": true,
"license": "ISC"
},
+ "node_modules/teeny-request": {
+ "version": "10.1.0",
+ "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-10.1.0.tgz",
+ "integrity": "sha512-3ZnLvgWF29jikg1sAQ1g0o+lr5JX6sVgYvfUJazn7ZjJroDBUTWp44/+cFVX0bULjv4vci+rBD+oGVAkWqhUbw==",
+ "dependencies": {
+ "http-proxy-agent": "^5.0.0",
+ "https-proxy-agent": "^5.0.0",
+ "node-fetch": "^3.3.2",
+ "stream-events": "^1.0.5"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/teeny-request/node_modules/agent-base": {
+ "version": "6.0.2",
+ "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
+ "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
+ "dependencies": {
+ "debug": "4"
+ },
+ "engines": {
+ "node": ">= 6.0.0"
+ }
+ },
+ "node_modules/teeny-request/node_modules/http-proxy-agent": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz",
+ "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==",
+ "dependencies": {
+ "@tootallnate/once": "2",
+ "agent-base": "6",
+ "debug": "4"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/teeny-request/node_modules/https-proxy-agent": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
+ "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
+ "dependencies": {
+ "agent-base": "6",
+ "debug": "4"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
"node_modules/temp": {
"version": "0.9.4",
"resolved": "https://registry.npmjs.org/temp/-/temp-0.9.4.tgz",
@@ -16909,7 +17836,6 @@
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
- "dev": true,
"license": "ISC",
"dependencies": {
"isexe": "^2.0.0"
@@ -16950,7 +17876,6 @@
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz",
"integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==",
- "dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
@@ -16969,7 +17894,6 @@
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
- "dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^4.0.0",
@@ -16987,7 +17911,6 @@
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
@@ -16997,7 +17920,6 @@
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
@@ -17010,7 +17932,6 @@
"version": "6.2.3",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
"integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
- "dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
@@ -17023,14 +17944,12 @@
"version": "10.6.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz",
"integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==",
- "dev": true,
"license": "MIT"
},
"node_modules/wrap-ansi/node_modules/string-width": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
"integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
- "dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^10.3.0",
@@ -17094,7 +18013,6 @@
"version": "5.0.8",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
"integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
- "dev": true,
"license": "ISC",
"engines": {
"node": ">=10"
@@ -17124,7 +18042,6 @@
"version": "17.7.2",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
- "dev": true,
"license": "MIT",
"dependencies": {
"cliui": "^8.0.1",
@@ -17143,7 +18060,6 @@
"version": "21.1.1",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
"integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
- "dev": true,
"license": "ISC",
"engines": {
"node": ">=12"
@@ -17173,6 +18089,31 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/yocto-spinner": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/yocto-spinner/-/yocto-spinner-1.0.0.tgz",
+ "integrity": "sha512-VPX8P/+Z2Fnpx8PC/JELbxp3QRrBxjAekio6yulGtA5gKt9YyRc5ycCb+NHgZCbZ0kx9KxwZp7gC6UlrCcCdSQ==",
+ "dependencies": {
+ "yoctocolors": "^2.1.1"
+ },
+ "engines": {
+ "node": ">=18.19"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/yoctocolors": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/yoctocolors/-/yoctocolors-2.1.2.tgz",
+ "integrity": "sha512-CzhO+pFNo8ajLM2d2IW/R93ipy99LWjtwblvC1RsoSUMZgyLbYFr221TnSNT7GjGdYui6P459mw9JH/g/zW2ug==",
+ "engines": {
+ "node": ">=18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
"node_modules/youtube-video-element": {
"version": "1.6.2",
"resolved": "https://registry.npmjs.org/youtube-video-element/-/youtube-video-element-1.6.2.tgz",
diff --git a/package.json b/package.json
index a26dd5f8..412c290c 100644
--- a/package.json
+++ b/package.json
@@ -1,7 +1,7 @@
{
"name": "termix",
"private": true,
- "version": "1.9.0",
+ "version": "1.10.0",
"description": "A web-based server management platform with SSH terminal, tunneling, and file editing capabilities",
"author": "Karmaa",
"main": "electron/main.cjs",
@@ -35,6 +35,7 @@
"@hookform/resolvers": "^5.1.1",
"@monaco-editor/react": "^4.7.0",
"@radix-ui/react-accordion": "^1.2.11",
+ "@radix-ui/react-alert-dialog": "^1.1.15",
"@radix-ui/react-checkbox": "^1.3.2",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-dropdown-menu": "^2.1.15",
@@ -45,7 +46,7 @@
"@radix-ui/react-select": "^2.2.5",
"@radix-ui/react-separator": "^1.1.7",
"@radix-ui/react-slider": "^1.3.6",
- "@radix-ui/react-slot": "^1.2.3",
+ "@radix-ui/react-slot": "^1.2.4",
"@radix-ui/react-switch": "^1.2.5",
"@radix-ui/react-tabs": "^1.1.12",
"@radix-ui/react-tooltip": "^1.2.8",
@@ -57,6 +58,7 @@
"@types/qrcode": "^1.5.5",
"@types/speakeasy": "^2.0.10",
"@uiw/codemirror-extensions-langs": "^4.24.1",
+ "@uiw/codemirror-theme-github": "^4.25.4",
"@uiw/react-codemirror": "^4.24.1",
"@xterm/addon-clipboard": "^0.1.0",
"@xterm/addon-fit": "^0.10.0",
@@ -76,6 +78,7 @@
"dotenv": "^17.2.0",
"drizzle-orm": "^0.44.3",
"express": "^5.1.0",
+ "i18n-auto-translation": "^2.2.3",
"i18next": "^25.4.2",
"i18next-browser-languagedetector": "^8.2.0",
"jose": "^5.2.3",
@@ -103,6 +106,7 @@
"react-xtermjs": "^1.0.10",
"recharts": "^3.2.1",
"remark-gfm": "^4.0.1",
+ "socks": "^2.8.7",
"sonner": "^2.0.7",
"speakeasy": "^2.0.0",
"ssh2": "^1.16.0",
diff --git a/repo-images/Image 10.png b/repo-images/Image 10.png
new file mode 100644
index 00000000..efc0c012
Binary files /dev/null and b/repo-images/Image 10.png differ
diff --git a/repo-images/Image 4.png b/repo-images/Image 4.png
index e2dfa99a..a47da14f 100644
Binary files a/repo-images/Image 4.png and b/repo-images/Image 4.png differ
diff --git a/repo-images/Image 8.png b/repo-images/Image 8.png
new file mode 100644
index 00000000..03bf9daf
Binary files /dev/null and b/repo-images/Image 8.png differ
diff --git a/repo-images/Image 9.png b/repo-images/Image 9.png
new file mode 100644
index 00000000..28d9e70f
Binary files /dev/null and b/repo-images/Image 9.png differ
diff --git a/src/backend/dashboard.ts b/src/backend/dashboard.ts
index 465ab9d8..86978dbf 100644
--- a/src/backend/dashboard.ts
+++ b/src/backend/dashboard.ts
@@ -2,8 +2,8 @@ import express from "express";
import cors from "cors";
import cookieParser from "cookie-parser";
import { getDb } from "./database/db/index.js";
-import { recentActivity, sshData } from "./database/db/schema.js";
-import { eq, and, desc } from "drizzle-orm";
+import { recentActivity, sshData, hostAccess } from "./database/db/schema.js";
+import { eq, and, desc, or } from "drizzle-orm";
import { dashboardLogger } from "./utils/logger.js";
import { SimpleDBOps } from "./utils/simple-db-ops.js";
import { AuthManager } from "./utils/auth-manager.js";
@@ -15,7 +15,7 @@ const authManager = AuthManager.getInstance();
const serverStartTime = Date.now();
const activityRateLimiter = new Map();
-const RATE_LIMIT_MS = 1000; // 1 second window
+const RATE_LIMIT_MS = 1000;
app.use(
cors({
@@ -127,9 +127,18 @@ app.post("/activity/log", async (req, res) => {
});
}
- if (type !== "terminal" && type !== "file_manager") {
+ if (
+ ![
+ "terminal",
+ "file_manager",
+ "server_stats",
+ "tunnel",
+ "docker",
+ ].includes(type)
+ ) {
return res.status(400).json({
- error: "Invalid activity type. Must be 'terminal' or 'file_manager'",
+ error:
+ "Invalid activity type. Must be 'terminal', 'file_manager', 'server_stats', 'tunnel', or 'docker'",
});
}
@@ -155,7 +164,7 @@ app.post("/activity/log", async (req, res) => {
entriesToDelete.forEach((key) => activityRateLimiter.delete(key));
}
- const hosts = await SimpleDBOps.select(
+ const ownedHosts = await SimpleDBOps.select(
getDb()
.select()
.from(sshData)
@@ -164,8 +173,19 @@ app.post("/activity/log", async (req, res) => {
userId,
);
- if (hosts.length === 0) {
- return res.status(404).json({ error: "Host not found" });
+ if (ownedHosts.length === 0) {
+ const sharedHosts = await getDb()
+ .select()
+ .from(hostAccess)
+ .where(
+ and(eq(hostAccess.hostId, hostId), eq(hostAccess.userId, userId)),
+ );
+
+ if (sharedHosts.length === 0) {
+ return res
+ .status(404)
+ .json({ error: "Host not found or access denied" });
+ }
}
const result = (await SimpleDBOps.insert(
diff --git a/src/backend/database/database.ts b/src/backend/database/database.ts
index 1eca73d9..744f2889 100644
--- a/src/backend/database/database.ts
+++ b/src/backend/database/database.ts
@@ -8,6 +8,7 @@ import alertRoutes from "./routes/alerts.js";
import credentialsRoutes from "./routes/credentials.js";
import snippetsRoutes from "./routes/snippets.js";
import terminalRoutes from "./routes/terminal.js";
+import rbacRoutes from "./routes/rbac.js";
import cors from "cors";
import fetch from "node-fetch";
import fs from "fs";
@@ -1436,6 +1437,7 @@ app.use("/alerts", alertRoutes);
app.use("/credentials", credentialsRoutes);
app.use("/snippets", snippetsRoutes);
app.use("/terminal", terminalRoutes);
+app.use("/rbac", rbacRoutes);
app.use(
(
diff --git a/src/backend/database/db/index.ts b/src/backend/database/db/index.ts
index f9e1017f..0479b3e9 100644
--- a/src/backend/database/db/index.ts
+++ b/src/backend/database/db/index.ts
@@ -201,13 +201,21 @@ async function initializeCompleteDatabase(): Promise {
enable_tunnel INTEGER NOT NULL DEFAULT 1,
tunnel_connections TEXT,
enable_file_manager INTEGER NOT NULL DEFAULT 1,
+ enable_docker INTEGER NOT NULL DEFAULT 0,
default_path TEXT,
autostart_password TEXT,
autostart_key TEXT,
autostart_key_password TEXT,
force_keyboard_interactive TEXT,
stats_config TEXT,
+ docker_config TEXT,
terminal_config TEXT,
+ notes TEXT,
+ use_socks5 INTEGER,
+ socks5_host TEXT,
+ socks5_port INTEGER,
+ socks5_username TEXT,
+ socks5_password TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
@@ -328,6 +336,81 @@ async function initializeCompleteDatabase(): Promise {
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE
);
+ CREATE TABLE IF NOT EXISTS host_access (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ host_id INTEGER NOT NULL,
+ user_id TEXT,
+ role_id INTEGER,
+ granted_by TEXT NOT NULL,
+ permission_level TEXT NOT NULL DEFAULT 'use',
+ expires_at TEXT,
+ created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ last_accessed_at TEXT,
+ access_count INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
+ FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
+ FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
+ FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE CASCADE
+ );
+
+ CREATE TABLE IF NOT EXISTS roles (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL UNIQUE,
+ display_name TEXT NOT NULL,
+ description TEXT,
+ is_system INTEGER NOT NULL DEFAULT 0,
+ permissions TEXT,
+ created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
+ );
+
+ CREATE TABLE IF NOT EXISTS user_roles (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ user_id TEXT NOT NULL,
+ role_id INTEGER NOT NULL,
+ granted_by TEXT,
+ granted_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ UNIQUE(user_id, role_id),
+ FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
+ FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
+ FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE SET NULL
+ );
+
+ CREATE TABLE IF NOT EXISTS audit_logs (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ user_id TEXT NOT NULL,
+ username TEXT NOT NULL,
+ action TEXT NOT NULL,
+ resource_type TEXT NOT NULL,
+ resource_id TEXT,
+ resource_name TEXT,
+ details TEXT,
+ ip_address TEXT,
+ user_agent TEXT,
+ success INTEGER NOT NULL,
+ error_message TEXT,
+ timestamp TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
+ );
+
+ CREATE TABLE IF NOT EXISTS session_recordings (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ host_id INTEGER NOT NULL,
+ user_id TEXT NOT NULL,
+ access_id INTEGER,
+ started_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ ended_at TEXT,
+ duration INTEGER,
+ commands TEXT,
+ dangerous_actions TEXT,
+ recording_path TEXT,
+ terminated_by_owner INTEGER DEFAULT 0,
+ termination_reason TEXT,
+ FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
+ FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
+ FOREIGN KEY (access_id) REFERENCES host_access (id) ON DELETE SET NULL
+ );
+
`);
try {
@@ -486,11 +569,30 @@ const migrateSchema = () => {
addColumnIfNotExists("ssh_data", "stats_config", "TEXT");
addColumnIfNotExists("ssh_data", "terminal_config", "TEXT");
addColumnIfNotExists("ssh_data", "quick_actions", "TEXT");
+ addColumnIfNotExists(
+ "ssh_data",
+ "enable_docker",
+ "INTEGER NOT NULL DEFAULT 0",
+ );
+ addColumnIfNotExists("ssh_data", "docker_config", "TEXT");
+
+ addColumnIfNotExists("ssh_data", "notes", "TEXT");
+
+ addColumnIfNotExists("ssh_data", "use_socks5", "INTEGER");
+ addColumnIfNotExists("ssh_data", "socks5_host", "TEXT");
+ addColumnIfNotExists("ssh_data", "socks5_port", "INTEGER");
+ addColumnIfNotExists("ssh_data", "socks5_username", "TEXT");
+ addColumnIfNotExists("ssh_data", "socks5_password", "TEXT");
+ addColumnIfNotExists("ssh_data", "socks5_proxy_chain", "TEXT");
addColumnIfNotExists("ssh_credentials", "private_key", "TEXT");
addColumnIfNotExists("ssh_credentials", "public_key", "TEXT");
addColumnIfNotExists("ssh_credentials", "detected_key_type", "TEXT");
+ addColumnIfNotExists("ssh_credentials", "system_password", "TEXT");
+ addColumnIfNotExists("ssh_credentials", "system_key", "TEXT");
+ addColumnIfNotExists("ssh_credentials", "system_key_password", "TEXT");
+
addColumnIfNotExists("file_manager_recent", "host_id", "INTEGER NOT NULL");
addColumnIfNotExists("file_manager_pinned", "host_id", "INTEGER NOT NULL");
addColumnIfNotExists("file_manager_shortcuts", "host_id", "INTEGER NOT NULL");
@@ -551,6 +653,317 @@ const migrateSchema = () => {
}
}
+ try {
+ sqlite.prepare("SELECT id FROM host_access LIMIT 1").get();
+ } catch {
+ try {
+ sqlite.exec(`
+ CREATE TABLE IF NOT EXISTS host_access (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ host_id INTEGER NOT NULL,
+ user_id TEXT,
+ role_id INTEGER,
+ granted_by TEXT NOT NULL,
+ permission_level TEXT NOT NULL DEFAULT 'use',
+ expires_at TEXT,
+ created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ last_accessed_at TEXT,
+ access_count INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
+ FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
+ FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
+ FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE CASCADE
+ );
+ `);
+ } catch (createError) {
+ databaseLogger.warn("Failed to create host_access table", {
+ operation: "schema_migration",
+ error: createError,
+ });
+ }
+ }
+
+ try {
+ sqlite.prepare("SELECT role_id FROM host_access LIMIT 1").get();
+ } catch {
+ try {
+ sqlite.exec("ALTER TABLE host_access ADD COLUMN role_id INTEGER REFERENCES roles(id) ON DELETE CASCADE");
+ } catch (alterError) {
+ databaseLogger.warn("Failed to add role_id column", {
+ operation: "schema_migration",
+ error: alterError,
+ });
+ }
+ }
+
+ try {
+ sqlite.prepare("SELECT sudo_password FROM ssh_data LIMIT 1").get();
+ } catch {
+ try {
+ sqlite.exec("ALTER TABLE ssh_data ADD COLUMN sudo_password TEXT");
+ } catch (alterError) {
+ databaseLogger.warn("Failed to add sudo_password column", {
+ operation: "schema_migration",
+ error: alterError,
+ });
+ }
+ }
+
+ try {
+ sqlite.prepare("SELECT id FROM roles LIMIT 1").get();
+ } catch {
+ try {
+ sqlite.exec(`
+ CREATE TABLE IF NOT EXISTS roles (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL UNIQUE,
+ display_name TEXT NOT NULL,
+ description TEXT,
+ is_system INTEGER NOT NULL DEFAULT 0,
+ permissions TEXT,
+ created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
+ );
+ `);
+ } catch (createError) {
+ databaseLogger.warn("Failed to create roles table", {
+ operation: "schema_migration",
+ error: createError,
+ });
+ }
+ }
+
+ try {
+ sqlite.prepare("SELECT id FROM user_roles LIMIT 1").get();
+ } catch {
+ try {
+ sqlite.exec(`
+ CREATE TABLE IF NOT EXISTS user_roles (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ user_id TEXT NOT NULL,
+ role_id INTEGER NOT NULL,
+ granted_by TEXT,
+ granted_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ UNIQUE(user_id, role_id),
+ FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
+ FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
+ FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE SET NULL
+ );
+ `);
+ } catch (createError) {
+ databaseLogger.warn("Failed to create user_roles table", {
+ operation: "schema_migration",
+ error: createError,
+ });
+ }
+ }
+
+ try {
+ sqlite.prepare("SELECT id FROM audit_logs LIMIT 1").get();
+ } catch {
+ try {
+ sqlite.exec(`
+ CREATE TABLE IF NOT EXISTS audit_logs (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ user_id TEXT NOT NULL,
+ username TEXT NOT NULL,
+ action TEXT NOT NULL,
+ resource_type TEXT NOT NULL,
+ resource_id TEXT,
+ resource_name TEXT,
+ details TEXT,
+ ip_address TEXT,
+ user_agent TEXT,
+ success INTEGER NOT NULL,
+ error_message TEXT,
+ timestamp TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
+ );
+ `);
+ } catch (createError) {
+ databaseLogger.warn("Failed to create audit_logs table", {
+ operation: "schema_migration",
+ error: createError,
+ });
+ }
+ }
+
+ try {
+ sqlite.prepare("SELECT id FROM session_recordings LIMIT 1").get();
+ } catch {
+ try {
+ sqlite.exec(`
+ CREATE TABLE IF NOT EXISTS session_recordings (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ host_id INTEGER NOT NULL,
+ user_id TEXT NOT NULL,
+ access_id INTEGER,
+ started_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ ended_at TEXT,
+ duration INTEGER,
+ commands TEXT,
+ dangerous_actions TEXT,
+ recording_path TEXT,
+ terminated_by_owner INTEGER DEFAULT 0,
+ termination_reason TEXT,
+ FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
+ FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
+ FOREIGN KEY (access_id) REFERENCES host_access (id) ON DELETE SET NULL
+ );
+ `);
+ } catch (createError) {
+ databaseLogger.warn("Failed to create session_recordings table", {
+ operation: "schema_migration",
+ error: createError,
+ });
+ }
+ }
+
+ try {
+ sqlite.prepare("SELECT id FROM shared_credentials LIMIT 1").get();
+ } catch {
+ try {
+ sqlite.exec(`
+ CREATE TABLE IF NOT EXISTS shared_credentials (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ host_access_id INTEGER NOT NULL,
+ original_credential_id INTEGER NOT NULL,
+ target_user_id TEXT NOT NULL,
+ encrypted_username TEXT NOT NULL,
+ encrypted_auth_type TEXT NOT NULL,
+ encrypted_password TEXT,
+ encrypted_key TEXT,
+ encrypted_key_password TEXT,
+ encrypted_key_type TEXT,
+ created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ needs_re_encryption INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (host_access_id) REFERENCES host_access (id) ON DELETE CASCADE,
+ FOREIGN KEY (original_credential_id) REFERENCES ssh_credentials (id) ON DELETE CASCADE,
+ FOREIGN KEY (target_user_id) REFERENCES users (id) ON DELETE CASCADE
+ );
+ `);
+ } catch (createError) {
+ databaseLogger.warn("Failed to create shared_credentials table", {
+ operation: "schema_migration",
+ error: createError,
+ });
+ }
+ }
+
+ try {
+ const existingRoles = sqlite.prepare("SELECT name, is_system FROM roles").all() as Array<{ name: string; is_system: number }>;
+
+ try {
+ const validSystemRoles = ['admin', 'user'];
+ const unwantedRoleNames = ['superAdmin', 'powerUser', 'readonly', 'member'];
+ let deletedCount = 0;
+
+ const deleteByName = sqlite.prepare("DELETE FROM roles WHERE name = ?");
+ for (const roleName of unwantedRoleNames) {
+ const result = deleteByName.run(roleName);
+ if (result.changes > 0) {
+ deletedCount += result.changes;
+ }
+ }
+
+ const deleteOldSystemRole = sqlite.prepare("DELETE FROM roles WHERE name = ? AND is_system = 1");
+ for (const role of existingRoles) {
+ if (role.is_system === 1 && !validSystemRoles.includes(role.name) && !unwantedRoleNames.includes(role.name)) {
+ const result = deleteOldSystemRole.run(role.name);
+ if (result.changes > 0) {
+ deletedCount += result.changes;
+ }
+ }
+ }
+ } catch (cleanupError) {
+ databaseLogger.warn("Failed to clean up old system roles", {
+ operation: "schema_migration",
+ error: cleanupError,
+ });
+ }
+
+ const systemRoles = [
+ {
+ name: "admin",
+ displayName: "rbac.roles.admin",
+ description: "Administrator with full access",
+ permissions: null,
+ },
+ {
+ name: "user",
+ displayName: "rbac.roles.user",
+ description: "Regular user",
+ permissions: null,
+ },
+ ];
+
+ for (const role of systemRoles) {
+ const existingRole = sqlite.prepare("SELECT id FROM roles WHERE name = ?").get(role.name);
+ if (!existingRole) {
+ try {
+ sqlite.prepare(`
+ INSERT INTO roles (name, display_name, description, is_system, permissions)
+ VALUES (?, ?, ?, 1, ?)
+ `).run(role.name, role.displayName, role.description, role.permissions);
+ } catch (insertError) {
+ databaseLogger.warn(`Failed to create system role: ${role.name}`, {
+ operation: "schema_migration",
+ error: insertError,
+ });
+ }
+ }
+ }
+
+ try {
+ const adminUsers = sqlite.prepare("SELECT id FROM users WHERE is_admin = 1").all() as { id: string }[];
+ const normalUsers = sqlite.prepare("SELECT id FROM users WHERE is_admin = 0").all() as { id: string }[];
+
+ const adminRole = sqlite.prepare("SELECT id FROM roles WHERE name = 'admin'").get() as { id: number } | undefined;
+ const userRole = sqlite.prepare("SELECT id FROM roles WHERE name = 'user'").get() as { id: number } | undefined;
+
+ if (adminRole) {
+ const insertUserRole = sqlite.prepare(`
+ INSERT OR IGNORE INTO user_roles (user_id, role_id, granted_at)
+ VALUES (?, ?, CURRENT_TIMESTAMP)
+ `);
+
+ for (const admin of adminUsers) {
+ try {
+ insertUserRole.run(admin.id, adminRole.id);
+ } catch (error) {
+ // Ignore duplicate errors
+ }
+ }
+ }
+
+ if (userRole) {
+ const insertUserRole = sqlite.prepare(`
+ INSERT OR IGNORE INTO user_roles (user_id, role_id, granted_at)
+ VALUES (?, ?, CURRENT_TIMESTAMP)
+ `);
+
+ for (const user of normalUsers) {
+ try {
+ insertUserRole.run(user.id, userRole.id);
+ } catch (error) {
+ // Ignore duplicate errors
+ }
+ }
+ }
+ } catch (migrationError) {
+ databaseLogger.warn("Failed to migrate existing users to roles", {
+ operation: "schema_migration",
+ error: migrationError,
+ });
+ }
+ } catch (seedError) {
+ databaseLogger.warn("Failed to seed system roles", {
+ operation: "schema_migration",
+ error: seedError,
+ });
+ }
+
databaseLogger.success("Schema migration completed", {
operation: "schema_migration",
});
diff --git a/src/backend/database/db/schema.ts b/src/backend/database/db/schema.ts
index 074b4103..71c07653 100644
--- a/src/backend/database/db/schema.ts
+++ b/src/backend/database/db/schema.ts
@@ -66,6 +66,7 @@ export const sshData = sqliteTable("ssh_data", {
key: text("key", { length: 8192 }),
key_password: text("key_password"),
keyType: text("key_type"),
+ sudoPassword: text("sudo_password"),
autostartPassword: text("autostart_password"),
autostartKey: text("autostart_key", { length: 8192 }),
@@ -86,10 +87,22 @@ export const sshData = sqliteTable("ssh_data", {
enableFileManager: integer("enable_file_manager", { mode: "boolean" })
.notNull()
.default(true),
+ enableDocker: integer("enable_docker", { mode: "boolean" })
+ .notNull()
+ .default(false),
defaultPath: text("default_path"),
statsConfig: text("stats_config"),
terminalConfig: text("terminal_config"),
quickActions: text("quick_actions"),
+ notes: text("notes"),
+
+ useSocks5: integer("use_socks5", { mode: "boolean" }),
+ socks5Host: text("socks5_host"),
+ socks5Port: integer("socks5_port"),
+ socks5Username: text("socks5_username"),
+ socks5Password: text("socks5_password"),
+ socks5ProxyChain: text("socks5_proxy_chain"),
+
createdAt: text("created_at")
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
@@ -172,6 +185,11 @@ export const sshCredentials = sqliteTable("ssh_credentials", {
key_password: text("key_password"),
keyType: text("key_type"),
detectedKeyType: text("detected_key_type"),
+
+ systemPassword: text("system_password"),
+ systemKey: text("system_key", { length: 16384 }),
+ systemKeyPassword: text("system_key_password"),
+
usageCount: integer("usage_count").notNull().default(0),
lastUsed: text("last_used"),
createdAt: text("created_at")
@@ -276,3 +294,156 @@ export const commandHistory = sqliteTable("command_history", {
.notNull()
.default(sql`CURRENT_TIMESTAMP`),
});
+
+export const hostAccess = sqliteTable("host_access", {
+ id: integer("id").primaryKey({ autoIncrement: true }),
+ hostId: integer("host_id")
+ .notNull()
+ .references(() => sshData.id, { onDelete: "cascade" }),
+
+ userId: text("user_id")
+ .references(() => users.id, { onDelete: "cascade" }),
+ roleId: integer("role_id")
+ .references(() => roles.id, { onDelete: "cascade" }),
+
+ grantedBy: text("granted_by")
+ .notNull()
+ .references(() => users.id, { onDelete: "cascade" }),
+
+ permissionLevel: text("permission_level")
+ .notNull()
+ .default("view"),
+
+ expiresAt: text("expires_at"),
+
+ createdAt: text("created_at")
+ .notNull()
+ .default(sql`CURRENT_TIMESTAMP`),
+ lastAccessedAt: text("last_accessed_at"),
+ accessCount: integer("access_count").notNull().default(0),
+});
+
+export const sharedCredentials = sqliteTable("shared_credentials", {
+ id: integer("id").primaryKey({ autoIncrement: true }),
+
+ hostAccessId: integer("host_access_id")
+ .notNull()
+ .references(() => hostAccess.id, { onDelete: "cascade" }),
+
+ originalCredentialId: integer("original_credential_id")
+ .notNull()
+ .references(() => sshCredentials.id, { onDelete: "cascade" }),
+
+ targetUserId: text("target_user_id")
+ .notNull()
+ .references(() => users.id, { onDelete: "cascade" }),
+
+ encryptedUsername: text("encrypted_username").notNull(),
+ encryptedAuthType: text("encrypted_auth_type").notNull(),
+ encryptedPassword: text("encrypted_password"),
+ encryptedKey: text("encrypted_key", { length: 16384 }),
+ encryptedKeyPassword: text("encrypted_key_password"),
+ encryptedKeyType: text("encrypted_key_type"),
+
+ createdAt: text("created_at")
+ .notNull()
+ .default(sql`CURRENT_TIMESTAMP`),
+ updatedAt: text("updated_at")
+ .notNull()
+ .default(sql`CURRENT_TIMESTAMP`),
+
+ needsReEncryption: integer("needs_re_encryption", { mode: "boolean" })
+ .notNull()
+ .default(false),
+});
+
+export const roles = sqliteTable("roles", {
+ id: integer("id").primaryKey({ autoIncrement: true }),
+ name: text("name").notNull().unique(),
+ displayName: text("display_name").notNull(),
+ description: text("description"),
+
+ isSystem: integer("is_system", { mode: "boolean" })
+ .notNull()
+ .default(false),
+
+ permissions: text("permissions"),
+
+ createdAt: text("created_at")
+ .notNull()
+ .default(sql`CURRENT_TIMESTAMP`),
+ updatedAt: text("updated_at")
+ .notNull()
+ .default(sql`CURRENT_TIMESTAMP`),
+});
+
+export const userRoles = sqliteTable("user_roles", {
+ id: integer("id").primaryKey({ autoIncrement: true }),
+ userId: text("user_id")
+ .notNull()
+ .references(() => users.id, { onDelete: "cascade" }),
+ roleId: integer("role_id")
+ .notNull()
+ .references(() => roles.id, { onDelete: "cascade" }),
+
+ grantedBy: text("granted_by").references(() => users.id, {
+ onDelete: "set null",
+ }),
+ grantedAt: text("granted_at")
+ .notNull()
+ .default(sql`CURRENT_TIMESTAMP`),
+});
+
+export const auditLogs = sqliteTable("audit_logs", {
+ id: integer("id").primaryKey({ autoIncrement: true }),
+
+ userId: text("user_id")
+ .notNull()
+ .references(() => users.id, { onDelete: "cascade" }),
+ username: text("username").notNull(),
+
+ action: text("action").notNull(),
+ resourceType: text("resource_type").notNull(),
+ resourceId: text("resource_id"),
+ resourceName: text("resource_name"),
+
+ details: text("details"),
+ ipAddress: text("ip_address"),
+ userAgent: text("user_agent"),
+
+ success: integer("success", { mode: "boolean" }).notNull(),
+ errorMessage: text("error_message"),
+
+ timestamp: text("timestamp")
+ .notNull()
+ .default(sql`CURRENT_TIMESTAMP`),
+});
+
+export const sessionRecordings = sqliteTable("session_recordings", {
+ id: integer("id").primaryKey({ autoIncrement: true }),
+
+ hostId: integer("host_id")
+ .notNull()
+ .references(() => sshData.id, { onDelete: "cascade" }),
+ userId: text("user_id")
+ .notNull()
+ .references(() => users.id, { onDelete: "cascade" }),
+ accessId: integer("access_id").references(() => hostAccess.id, {
+ onDelete: "set null",
+ }),
+
+ startedAt: text("started_at")
+ .notNull()
+ .default(sql`CURRENT_TIMESTAMP`),
+ endedAt: text("ended_at"),
+ duration: integer("duration"),
+
+ commands: text("commands"),
+ dangerousActions: text("dangerous_actions"),
+
+ recordingPath: text("recording_path"),
+
+ terminatedByOwner: integer("terminated_by_owner", { mode: "boolean" })
+ .default(false),
+ terminationReason: text("termination_reason"),
+});
diff --git a/src/backend/database/routes/credentials.ts b/src/backend/database/routes/credentials.ts
index df9ab936..6d0b0ab5 100644
--- a/src/backend/database/routes/credentials.ts
+++ b/src/backend/database/routes/credentials.ts
@@ -1,7 +1,15 @@
-import type { AuthenticatedRequest } from "../../../types/index.js";
+import type {
+ AuthenticatedRequest,
+ CredentialBackend,
+} from "../../../types/index.js";
import express from "express";
import { db } from "../db/index.js";
-import { sshCredentials, sshCredentialUsage, sshData } from "../db/schema.js";
+import {
+ sshCredentials,
+ sshCredentialUsage,
+ sshData,
+ hostAccess,
+} from "../db/schema.js";
import { eq, and, desc, sql } from "drizzle-orm";
import type { Request, Response } from "express";
import { authLogger } from "../../utils/logger.js";
@@ -470,6 +478,14 @@ router.put(
userId,
);
+ const { SharedCredentialManager } =
+ await import("../../utils/shared-credential-manager.js");
+ const sharedCredManager = SharedCredentialManager.getInstance();
+ await sharedCredManager.updateSharedCredentialsForOriginal(
+ parseInt(id),
+ userId,
+ );
+
const credential = updated[0];
authLogger.success(
`SSH credential updated: ${credential.name} (${credential.authType}) by user ${userId}`,
@@ -524,8 +540,6 @@ router.delete(
return res.status(404).json({ error: "Credential not found" });
}
- // Update hosts using this credential to set credentialId to null
- // This prevents orphaned references before deletion
const hostsUsingCredential = await db
.select()
.from(sshData)
@@ -552,10 +566,32 @@ router.delete(
eq(sshData.userId, userId),
),
);
+
+ for (const host of hostsUsingCredential) {
+ const revokedShares = await db
+ .delete(hostAccess)
+ .where(eq(hostAccess.hostId, host.id))
+ .returning({ id: hostAccess.id });
+
+ if (revokedShares.length > 0) {
+ authLogger.info(
+ "Auto-revoked host shares due to credential deletion",
+ {
+ operation: "auto_revoke_shares",
+ hostId: host.id,
+ credentialId: parseInt(id),
+ revokedCount: revokedShares.length,
+ reason: "credential_deleted",
+ },
+ );
+ }
+ }
}
- // sshCredentialUsage will be automatically deleted by ON DELETE CASCADE
- // No need for manual deletion
+ const { SharedCredentialManager } =
+ await import("../../utils/shared-credential-manager.js");
+ const sharedCredManager = SharedCredentialManager.getInstance();
+ await sharedCredManager.deleteSharedCredentialsForOriginal(parseInt(id));
await db
.delete(sshCredentials)
@@ -1124,10 +1160,9 @@ router.post(
async function deploySSHKeyToHost(
hostConfig: Record,
- publicKey: string,
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- _credentialData: Record,
+ credData: CredentialBackend,
): Promise<{ success: boolean; message?: string; error?: string }> {
+ const publicKey = credData.public_key as string;
return new Promise((resolve) => {
const conn = new Client();
@@ -1248,7 +1283,7 @@ async function deploySSHKeyToHost(
.replace(/'/g, "'\\''");
conn.exec(
- `printf '%s\\n' '${escapedKey}' >> ~/.ssh/authorized_keys && chmod 600 ~/.ssh/authorized_keys`,
+ `printf '%s\\n' '${escapedKey} ${credData.name}@Termix' >> ~/.ssh/authorized_keys && chmod 600 ~/.ssh/authorized_keys`,
(err, stream) => {
if (err) {
clearTimeout(addTimeout);
@@ -1510,7 +1545,7 @@ router.post(
});
}
- const credData = credential[0];
+ const credData = credential[0] as unknown as CredentialBackend;
if (credData.authType !== "key") {
return res.status(400).json({
@@ -1519,7 +1554,7 @@ router.post(
});
}
- const publicKey = credData.public_key || credData.publicKey;
+ const publicKey = credData.public_key;
if (!publicKey) {
return res.status(400).json({
success: false,
@@ -1599,11 +1634,7 @@ router.post(
}
}
- const deployResult = await deploySSHKeyToHost(
- hostConfig,
- publicKey as string,
- credData,
- );
+ const deployResult = await deploySSHKeyToHost(hostConfig, credData);
if (deployResult.success) {
res.json({
diff --git a/src/backend/database/routes/rbac.ts b/src/backend/database/routes/rbac.ts
new file mode 100644
index 00000000..6e6f0033
--- /dev/null
+++ b/src/backend/database/routes/rbac.ts
@@ -0,0 +1,850 @@
+import type { AuthenticatedRequest } from "../../../types/index.js";
+import express from "express";
+import { db } from "../db/index.js";
+import {
+ hostAccess,
+ sshData,
+ users,
+ roles,
+ userRoles,
+ auditLogs,
+ sharedCredentials,
+} from "../db/schema.js";
+import { eq, and, desc, sql, or, isNull, gte } from "drizzle-orm";
+import type { Request, Response } from "express";
+import { databaseLogger } from "../../utils/logger.js";
+import { AuthManager } from "../../utils/auth-manager.js";
+import { PermissionManager } from "../../utils/permission-manager.js";
+
+const router = express.Router();
+
+const authManager = AuthManager.getInstance();
+const permissionManager = PermissionManager.getInstance();
+
+const authenticateJWT = authManager.createAuthMiddleware();
+
+function isNonEmptyString(value: unknown): value is string {
+ return typeof value === "string" && value.trim().length > 0;
+}
+
+//Share a host with a user or role
+//POST /rbac/host/:id/share
+router.post(
+ "/host/:id/share",
+ authenticateJWT,
+ async (req: AuthenticatedRequest, res: Response) => {
+ const hostId = parseInt(req.params.id, 10);
+ const userId = req.userId!;
+
+ if (isNaN(hostId)) {
+ return res.status(400).json({ error: "Invalid host ID" });
+ }
+
+ try {
+ const {
+ targetType = "user",
+ targetUserId,
+ targetRoleId,
+ durationHours,
+ permissionLevel = "view",
+ } = req.body;
+
+ if (!["user", "role"].includes(targetType)) {
+ return res
+ .status(400)
+ .json({ error: "Invalid target type. Must be 'user' or 'role'" });
+ }
+
+ if (targetType === "user" && !isNonEmptyString(targetUserId)) {
+ return res
+ .status(400)
+ .json({ error: "Target user ID is required when sharing with user" });
+ }
+ if (targetType === "role" && !targetRoleId) {
+ return res
+ .status(400)
+ .json({ error: "Target role ID is required when sharing with role" });
+ }
+
+ const host = await db
+ .select()
+ .from(sshData)
+ .where(and(eq(sshData.id, hostId), eq(sshData.userId, userId)))
+ .limit(1);
+
+ if (host.length === 0) {
+ databaseLogger.warn("Attempt to share host not owned by user", {
+ operation: "share_host",
+ userId,
+ hostId,
+ });
+ return res.status(403).json({ error: "Not host owner" });
+ }
+
+ if (!host[0].credentialId) {
+ return res.status(400).json({
+ error:
+ "Only hosts using credentials can be shared. Please create a credential and assign it to this host before sharing.",
+ code: "CREDENTIAL_REQUIRED_FOR_SHARING",
+ });
+ }
+
+ if (targetType === "user") {
+ const targetUser = await db
+ .select({ id: users.id, username: users.username })
+ .from(users)
+ .where(eq(users.id, targetUserId))
+ .limit(1);
+
+ if (targetUser.length === 0) {
+ return res.status(404).json({ error: "Target user not found" });
+ }
+ } else {
+ const targetRole = await db
+ .select({ id: roles.id, name: roles.name })
+ .from(roles)
+ .where(eq(roles.id, targetRoleId))
+ .limit(1);
+
+ if (targetRole.length === 0) {
+ return res.status(404).json({ error: "Target role not found" });
+ }
+ }
+
+ let expiresAt: string | null = null;
+ if (
+ durationHours &&
+ typeof durationHours === "number" &&
+ durationHours > 0
+ ) {
+ const expiryDate = new Date();
+ expiryDate.setHours(expiryDate.getHours() + durationHours);
+ expiresAt = expiryDate.toISOString();
+ }
+
+ const validLevels = ["view"];
+ if (!validLevels.includes(permissionLevel)) {
+ return res.status(400).json({
+ error: "Invalid permission level. Only 'view' is supported.",
+ validLevels,
+ });
+ }
+
+ const whereConditions = [eq(hostAccess.hostId, hostId)];
+ if (targetType === "user") {
+ whereConditions.push(eq(hostAccess.userId, targetUserId));
+ } else {
+ whereConditions.push(eq(hostAccess.roleId, targetRoleId));
+ }
+
+ const existing = await db
+ .select()
+ .from(hostAccess)
+ .where(and(...whereConditions))
+ .limit(1);
+
+ if (existing.length > 0) {
+ await db
+ .update(hostAccess)
+ .set({
+ permissionLevel,
+ expiresAt,
+ })
+ .where(eq(hostAccess.id, existing[0].id));
+
+ await db
+ .delete(sharedCredentials)
+ .where(eq(sharedCredentials.hostAccessId, existing[0].id));
+
+ const { SharedCredentialManager } =
+ await import("../../utils/shared-credential-manager.js");
+ const sharedCredManager = SharedCredentialManager.getInstance();
+ if (targetType === "user") {
+ await sharedCredManager.createSharedCredentialForUser(
+ existing[0].id,
+ host[0].credentialId,
+ targetUserId!,
+ userId,
+ );
+ } else {
+ await sharedCredManager.createSharedCredentialsForRole(
+ existing[0].id,
+ host[0].credentialId,
+ targetRoleId!,
+ userId,
+ );
+ }
+
+ return res.json({
+ success: true,
+ message: "Host access updated",
+ expiresAt,
+ });
+ }
+
+ const result = await db.insert(hostAccess).values({
+ hostId,
+ userId: targetType === "user" ? targetUserId : null,
+ roleId: targetType === "role" ? targetRoleId : null,
+ grantedBy: userId,
+ permissionLevel,
+ expiresAt,
+ });
+
+ const { SharedCredentialManager } =
+ await import("../../utils/shared-credential-manager.js");
+ const sharedCredManager = SharedCredentialManager.getInstance();
+
+ if (targetType === "user") {
+ await sharedCredManager.createSharedCredentialForUser(
+ result.lastInsertRowid as number,
+ host[0].credentialId,
+ targetUserId!,
+ userId,
+ );
+ } else {
+ await sharedCredManager.createSharedCredentialsForRole(
+ result.lastInsertRowid as number,
+ host[0].credentialId,
+ targetRoleId!,
+ userId,
+ );
+ }
+
+ res.json({
+ success: true,
+ message: `Host shared successfully with ${targetType}`,
+ expiresAt,
+ });
+ } catch (error) {
+ databaseLogger.error("Failed to share host", error, {
+ operation: "share_host",
+ hostId,
+ userId,
+ });
+ res.status(500).json({ error: "Failed to share host" });
+ }
+ },
+);
+
+// Revoke host access
+// DELETE /rbac/host/:id/access/:accessId
+router.delete(
+ "/host/:id/access/:accessId",
+ authenticateJWT,
+ async (req: AuthenticatedRequest, res: Response) => {
+ const hostId = parseInt(req.params.id, 10);
+ const accessId = parseInt(req.params.accessId, 10);
+ const userId = req.userId!;
+
+ if (isNaN(hostId) || isNaN(accessId)) {
+ return res.status(400).json({ error: "Invalid ID" });
+ }
+
+ try {
+ const host = await db
+ .select()
+ .from(sshData)
+ .where(and(eq(sshData.id, hostId), eq(sshData.userId, userId)))
+ .limit(1);
+
+ if (host.length === 0) {
+ return res.status(403).json({ error: "Not host owner" });
+ }
+
+ await db.delete(hostAccess).where(eq(hostAccess.id, accessId));
+
+ res.json({ success: true, message: "Access revoked" });
+ } catch (error) {
+ databaseLogger.error("Failed to revoke host access", error, {
+ operation: "revoke_host_access",
+ hostId,
+ accessId,
+ userId,
+ });
+ res.status(500).json({ error: "Failed to revoke access" });
+ }
+ },
+);
+
+// Get host access list
+// GET /rbac/host/:id/access
+router.get(
+ "/host/:id/access",
+ authenticateJWT,
+ async (req: AuthenticatedRequest, res: Response) => {
+ const hostId = parseInt(req.params.id, 10);
+ const userId = req.userId!;
+
+ if (isNaN(hostId)) {
+ return res.status(400).json({ error: "Invalid host ID" });
+ }
+
+ try {
+ const host = await db
+ .select()
+ .from(sshData)
+ .where(and(eq(sshData.id, hostId), eq(sshData.userId, userId)))
+ .limit(1);
+
+ if (host.length === 0) {
+ return res.status(403).json({ error: "Not host owner" });
+ }
+
+ const rawAccessList = await db
+ .select({
+ id: hostAccess.id,
+ userId: hostAccess.userId,
+ roleId: hostAccess.roleId,
+ username: users.username,
+ roleName: roles.name,
+ roleDisplayName: roles.displayName,
+ grantedBy: hostAccess.grantedBy,
+ grantedByUsername: sql`(SELECT username FROM users WHERE id = ${hostAccess.grantedBy})`,
+ permissionLevel: hostAccess.permissionLevel,
+ expiresAt: hostAccess.expiresAt,
+ createdAt: hostAccess.createdAt,
+ })
+ .from(hostAccess)
+ .leftJoin(users, eq(hostAccess.userId, users.id))
+ .leftJoin(roles, eq(hostAccess.roleId, roles.id))
+ .where(eq(hostAccess.hostId, hostId))
+ .orderBy(desc(hostAccess.createdAt));
+
+ const accessList = rawAccessList.map((access) => ({
+ id: access.id,
+ targetType: access.userId ? "user" : "role",
+ userId: access.userId,
+ roleId: access.roleId,
+ username: access.username,
+ roleName: access.roleName,
+ roleDisplayName: access.roleDisplayName,
+ grantedBy: access.grantedBy,
+ grantedByUsername: access.grantedByUsername,
+ permissionLevel: access.permissionLevel,
+ expiresAt: access.expiresAt,
+ createdAt: access.createdAt,
+ }));
+
+ res.json({ accessList });
+ } catch (error) {
+ databaseLogger.error("Failed to get host access list", error, {
+ operation: "get_host_access_list",
+ hostId,
+ userId,
+ });
+ res.status(500).json({ error: "Failed to get access list" });
+ }
+ },
+);
+
+// Get user's shared hosts (hosts shared WITH this user)
+// GET /rbac/shared-hosts
+router.get(
+ "/shared-hosts",
+ authenticateJWT,
+ async (req: AuthenticatedRequest, res: Response) => {
+ const userId = req.userId!;
+
+ try {
+ const now = new Date().toISOString();
+
+ const sharedHosts = await db
+ .select({
+ id: sshData.id,
+ name: sshData.name,
+ ip: sshData.ip,
+ port: sshData.port,
+ username: sshData.username,
+ folder: sshData.folder,
+ tags: sshData.tags,
+ permissionLevel: hostAccess.permissionLevel,
+ expiresAt: hostAccess.expiresAt,
+ grantedBy: hostAccess.grantedBy,
+ ownerUsername: users.username,
+ })
+ .from(hostAccess)
+ .innerJoin(sshData, eq(hostAccess.hostId, sshData.id))
+ .innerJoin(users, eq(sshData.userId, users.id))
+ .where(
+ and(
+ eq(hostAccess.userId, userId),
+ or(isNull(hostAccess.expiresAt), gte(hostAccess.expiresAt, now)),
+ ),
+ )
+ .orderBy(desc(hostAccess.createdAt));
+
+ res.json({ sharedHosts });
+ } catch (error) {
+ databaseLogger.error("Failed to get shared hosts", error, {
+ operation: "get_shared_hosts",
+ userId,
+ });
+ res.status(500).json({ error: "Failed to get shared hosts" });
+ }
+ },
+);
+
+// Get all roles
+// GET /rbac/roles
+router.get(
+ "/roles",
+ authenticateJWT,
+ permissionManager.requireAdmin(),
+ async (req: AuthenticatedRequest, res: Response) => {
+ try {
+ const allRoles = await db
+ .select()
+ .from(roles)
+ .orderBy(roles.isSystem, roles.name);
+
+ const rolesWithParsedPermissions = allRoles.map((role) => ({
+ ...role,
+ permissions: JSON.parse(role.permissions),
+ }));
+
+ res.json({ roles: rolesWithParsedPermissions });
+ } catch (error) {
+ databaseLogger.error("Failed to get roles", error, {
+ operation: "get_roles",
+ });
+ res.status(500).json({ error: "Failed to get roles" });
+ }
+ },
+);
+
+// Get all roles
+// GET /rbac/roles
+router.get(
+ "/roles",
+ authenticateJWT,
+ async (req: AuthenticatedRequest, res: Response) => {
+ try {
+ const rolesList = await db
+ .select({
+ id: roles.id,
+ name: roles.name,
+ displayName: roles.displayName,
+ description: roles.description,
+ isSystem: roles.isSystem,
+ createdAt: roles.createdAt,
+ updatedAt: roles.updatedAt,
+ })
+ .from(roles)
+ .orderBy(roles.isSystem, roles.name);
+
+ res.json({ roles: rolesList });
+ } catch (error) {
+ databaseLogger.error("Failed to get roles", error, {
+ operation: "get_roles",
+ });
+ res.status(500).json({ error: "Failed to get roles" });
+ }
+ },
+);
+
+// Create new role
+// POST /rbac/roles
+router.post(
+ "/roles",
+ authenticateJWT,
+ permissionManager.requireAdmin(),
+ async (req: AuthenticatedRequest, res: Response) => {
+ const { name, displayName, description } = req.body;
+
+ if (!isNonEmptyString(name) || !isNonEmptyString(displayName)) {
+ return res.status(400).json({
+ error: "Role name and display name are required",
+ });
+ }
+
+ if (!/^[a-z0-9_-]+$/.test(name)) {
+ return res.status(400).json({
+ error:
+ "Role name must contain only lowercase letters, numbers, underscores, and hyphens",
+ });
+ }
+
+ try {
+ const existing = await db
+ .select({ id: roles.id })
+ .from(roles)
+ .where(eq(roles.name, name))
+ .limit(1);
+
+ if (existing.length > 0) {
+ return res.status(409).json({
+ error: "A role with this name already exists",
+ });
+ }
+
+ const result = await db.insert(roles).values({
+ name,
+ displayName,
+ description: description || null,
+ isSystem: false,
+ permissions: null,
+ });
+
+ const newRoleId = result.lastInsertRowid;
+
+ res.status(201).json({
+ success: true,
+ roleId: newRoleId,
+ message: "Role created successfully",
+ });
+ } catch (error) {
+ databaseLogger.error("Failed to create role", error, {
+ operation: "create_role",
+ roleName: name,
+ });
+ res.status(500).json({ error: "Failed to create role" });
+ }
+ },
+);
+
+// Update role
+// PUT /rbac/roles/:id
+router.put(
+ "/roles/:id",
+ authenticateJWT,
+ permissionManager.requireAdmin(),
+ async (req: AuthenticatedRequest, res: Response) => {
+ const roleId = parseInt(req.params.id, 10);
+ const { displayName, description } = req.body;
+
+ if (isNaN(roleId)) {
+ return res.status(400).json({ error: "Invalid role ID" });
+ }
+
+ if (!displayName && description === undefined) {
+ return res.status(400).json({
+ error: "At least one field (displayName or description) is required",
+ });
+ }
+
+ try {
+ const existingRole = await db
+ .select({
+ id: roles.id,
+ name: roles.name,
+ isSystem: roles.isSystem,
+ })
+ .from(roles)
+ .where(eq(roles.id, roleId))
+ .limit(1);
+
+ if (existingRole.length === 0) {
+ return res.status(404).json({ error: "Role not found" });
+ }
+
+ const updates: {
+ displayName?: string;
+ description?: string | null;
+ updatedAt: string;
+ } = {
+ updatedAt: new Date().toISOString(),
+ };
+
+ if (displayName) {
+ updates.displayName = displayName;
+ }
+
+ if (description !== undefined) {
+ updates.description = description || null;
+ }
+
+ await db.update(roles).set(updates).where(eq(roles.id, roleId));
+
+ res.json({
+ success: true,
+ message: "Role updated successfully",
+ });
+ } catch (error) {
+ databaseLogger.error("Failed to update role", error, {
+ operation: "update_role",
+ roleId,
+ });
+ res.status(500).json({ error: "Failed to update role" });
+ }
+ },
+);
+
+// Delete role
+// DELETE /rbac/roles/:id
+router.delete(
+ "/roles/:id",
+ authenticateJWT,
+ permissionManager.requireAdmin(),
+ async (req: AuthenticatedRequest, res: Response) => {
+ const roleId = parseInt(req.params.id, 10);
+
+ if (isNaN(roleId)) {
+ return res.status(400).json({ error: "Invalid role ID" });
+ }
+
+ try {
+ const role = await db
+ .select({
+ id: roles.id,
+ name: roles.name,
+ isSystem: roles.isSystem,
+ })
+ .from(roles)
+ .where(eq(roles.id, roleId))
+ .limit(1);
+
+ if (role.length === 0) {
+ return res.status(404).json({ error: "Role not found" });
+ }
+
+ if (role[0].isSystem) {
+ return res.status(403).json({
+ error: "Cannot delete system roles",
+ });
+ }
+
+ const deletedUserRoles = await db
+ .delete(userRoles)
+ .where(eq(userRoles.roleId, roleId))
+ .returning({ userId: userRoles.userId });
+
+ for (const { userId } of deletedUserRoles) {
+ permissionManager.invalidateUserPermissionCache(userId);
+ }
+
+ const deletedHostAccess = await db
+ .delete(hostAccess)
+ .where(eq(hostAccess.roleId, roleId))
+ .returning({ id: hostAccess.id });
+
+ await db.delete(roles).where(eq(roles.id, roleId));
+
+ res.json({
+ success: true,
+ message: "Role deleted successfully",
+ });
+ } catch (error) {
+ databaseLogger.error("Failed to delete role", error, {
+ operation: "delete_role",
+ roleId,
+ });
+ res.status(500).json({ error: "Failed to delete role" });
+ }
+ },
+);
+
+// Assign role to user
+// POST /rbac/users/:userId/roles
+router.post(
+ "/users/:userId/roles",
+ authenticateJWT,
+ permissionManager.requireAdmin(),
+ async (req: AuthenticatedRequest, res: Response) => {
+ const targetUserId = req.params.userId;
+ const currentUserId = req.userId!;
+
+ try {
+ const { roleId } = req.body;
+
+ if (typeof roleId !== "number") {
+ return res.status(400).json({ error: "Role ID is required" });
+ }
+
+ const targetUser = await db
+ .select()
+ .from(users)
+ .where(eq(users.id, targetUserId))
+ .limit(1);
+
+ if (targetUser.length === 0) {
+ return res.status(404).json({ error: "User not found" });
+ }
+
+ const role = await db
+ .select()
+ .from(roles)
+ .where(eq(roles.id, roleId))
+ .limit(1);
+
+ if (role.length === 0) {
+ return res.status(404).json({ error: "Role not found" });
+ }
+
+ if (role[0].isSystem) {
+ return res.status(403).json({
+ error:
+ "System roles (admin, user) are automatically assigned and cannot be manually assigned",
+ });
+ }
+
+ const existing = await db
+ .select()
+ .from(userRoles)
+ .where(
+ and(eq(userRoles.userId, targetUserId), eq(userRoles.roleId, roleId)),
+ )
+ .limit(1);
+
+ if (existing.length > 0) {
+ return res.status(409).json({ error: "Role already assigned" });
+ }
+
+ await db.insert(userRoles).values({
+ userId: targetUserId,
+ roleId,
+ grantedBy: currentUserId,
+ });
+
+ const hostsSharedWithRole = await db
+ .select()
+ .from(hostAccess)
+ .innerJoin(sshData, eq(hostAccess.hostId, sshData.id))
+ .where(eq(hostAccess.roleId, roleId));
+
+ const { SharedCredentialManager } =
+ await import("../../utils/shared-credential-manager.js");
+ const sharedCredManager = SharedCredentialManager.getInstance();
+
+ for (const { host_access, ssh_data } of hostsSharedWithRole) {
+ if (ssh_data.credentialId) {
+ try {
+ await sharedCredManager.createSharedCredentialForUser(
+ host_access.id,
+ ssh_data.credentialId,
+ targetUserId,
+ ssh_data.userId,
+ );
+ } catch (error) {
+ databaseLogger.error(
+ "Failed to create shared credential for new role member",
+ error,
+ {
+ operation: "assign_role_create_credentials",
+ targetUserId,
+ roleId,
+ hostId: ssh_data.id,
+ },
+ );
+ }
+ }
+ }
+
+ permissionManager.invalidateUserPermissionCache(targetUserId);
+
+ res.json({
+ success: true,
+ message: "Role assigned successfully",
+ });
+ } catch (error) {
+ databaseLogger.error("Failed to assign role", error, {
+ operation: "assign_role",
+ targetUserId,
+ });
+ res.status(500).json({ error: "Failed to assign role" });
+ }
+ },
+);
+
+// Remove role from user
+// DELETE /rbac/users/:userId/roles/:roleId
+router.delete(
+ "/users/:userId/roles/:roleId",
+ authenticateJWT,
+ permissionManager.requireAdmin(),
+ async (req: AuthenticatedRequest, res: Response) => {
+ const targetUserId = req.params.userId;
+ const roleId = parseInt(req.params.roleId, 10);
+
+ if (isNaN(roleId)) {
+ return res.status(400).json({ error: "Invalid role ID" });
+ }
+
+ try {
+ const role = await db
+ .select({
+ id: roles.id,
+ name: roles.name,
+ isSystem: roles.isSystem,
+ })
+ .from(roles)
+ .where(eq(roles.id, roleId))
+ .limit(1);
+
+ if (role.length === 0) {
+ return res.status(404).json({ error: "Role not found" });
+ }
+
+ if (role[0].isSystem) {
+ return res.status(403).json({
+ error:
+ "System roles (admin, user) are automatically assigned and cannot be removed",
+ });
+ }
+
+ await db
+ .delete(userRoles)
+ .where(
+ and(eq(userRoles.userId, targetUserId), eq(userRoles.roleId, roleId)),
+ );
+
+ permissionManager.invalidateUserPermissionCache(targetUserId);
+
+ res.json({
+ success: true,
+ message: "Role removed successfully",
+ });
+ } catch (error) {
+ databaseLogger.error("Failed to remove role", error, {
+ operation: "remove_role",
+ targetUserId,
+ roleId,
+ });
+ res.status(500).json({ error: "Failed to remove role" });
+ }
+ },
+);
+
+// Get user's roles
+// GET /rbac/users/:userId/roles
+router.get(
+ "/users/:userId/roles",
+ authenticateJWT,
+ async (req: AuthenticatedRequest, res: Response) => {
+ const targetUserId = req.params.userId;
+ const currentUserId = req.userId!;
+
+ if (
+ targetUserId !== currentUserId &&
+ !(await permissionManager.isAdmin(currentUserId))
+ ) {
+ return res.status(403).json({ error: "Access denied" });
+ }
+
+ try {
+ const userRolesList = await db
+ .select({
+ id: userRoles.id,
+ roleId: roles.id,
+ roleName: roles.name,
+ roleDisplayName: roles.displayName,
+ description: roles.description,
+ isSystem: roles.isSystem,
+ grantedAt: userRoles.grantedAt,
+ })
+ .from(userRoles)
+ .innerJoin(roles, eq(userRoles.roleId, roles.id))
+ .where(eq(userRoles.userId, targetUserId));
+
+ res.json({ roles: userRolesList });
+ } catch (error) {
+ databaseLogger.error("Failed to get user roles", error, {
+ operation: "get_user_roles",
+ targetUserId,
+ });
+ res.status(500).json({ error: "Failed to get user roles" });
+ }
+ },
+);
+
+export default router;
diff --git a/src/backend/database/routes/ssh.ts b/src/backend/database/routes/ssh.ts
index 955135a4..8e7e9086 100644
--- a/src/backend/database/routes/ssh.ts
+++ b/src/backend/database/routes/ssh.ts
@@ -11,13 +11,27 @@ import {
sshFolders,
commandHistory,
recentActivity,
+ hostAccess,
+ userRoles,
+ sessionRecordings,
} from "../db/schema.js";
-import { eq, and, desc, isNotNull, or } from "drizzle-orm";
+import {
+ eq,
+ and,
+ desc,
+ isNotNull,
+ or,
+ isNull,
+ gte,
+ sql,
+ inArray,
+} from "drizzle-orm";
import type { Request, Response } from "express";
import multer from "multer";
import { sshLogger } from "../../utils/logger.js";
import { SimpleDBOps } from "../../utils/simple-db-ops.js";
import { AuthManager } from "../../utils/auth-manager.js";
+import { PermissionManager } from "../../utils/permission-manager.js";
import { DataCrypto } from "../../utils/data-crypto.js";
import { SystemCrypto } from "../../utils/system-crypto.js";
import { DatabaseSaveTrigger } from "../db/index.js";
@@ -35,6 +49,7 @@ function isValidPort(port: unknown): port is number {
}
const authManager = AuthManager.getInstance();
+const permissionManager = PermissionManager.getInstance();
const authenticateJWT = authManager.createAuthMiddleware();
const requireDataAccess = authManager.createDataAccessMiddleware();
@@ -231,10 +246,12 @@ router.post(
key,
keyPassword,
keyType,
+ sudoPassword,
pin,
enableTerminal,
enableTunnel,
enableFileManager,
+ enableDocker,
defaultPath,
tunnelConnections,
jumpHosts,
@@ -242,7 +259,16 @@ router.post(
statsConfig,
terminalConfig,
forceKeyboardInteractive,
+ notes,
+ useSocks5,
+ socks5Host,
+ socks5Port,
+ socks5Username,
+ socks5Password,
+ socks5ProxyChain,
+ overrideCredentialUsername,
} = hostData;
+
if (
!isNonEmptyString(userId) ||
!isNonEmptyString(ip) ||
@@ -269,6 +295,7 @@ router.post(
username,
authType: effectiveAuthType,
credentialId: credentialId || null,
+ overrideCredentialUsername: overrideCredentialUsername ? 1 : 0,
pin: pin ? 1 : 0,
enableTerminal: enableTerminal ? 1 : 0,
enableTunnel: enableTunnel ? 1 : 0,
@@ -280,10 +307,21 @@ router.post(
? JSON.stringify(quickActions)
: null,
enableFileManager: enableFileManager ? 1 : 0,
+ enableDocker: enableDocker ? 1 : 0,
defaultPath: defaultPath || null,
statsConfig: statsConfig ? JSON.stringify(statsConfig) : null,
terminalConfig: terminalConfig ? JSON.stringify(terminalConfig) : null,
forceKeyboardInteractive: forceKeyboardInteractive ? "true" : "false",
+ notes: notes || null,
+ sudoPassword: sudoPassword || null,
+ useSocks5: useSocks5 ? 1 : 0,
+ socks5Host: socks5Host || null,
+ socks5Port: socks5Port || null,
+ socks5Username: socks5Username || null,
+ socks5Password: socks5Password || null,
+ socks5ProxyChain: socks5ProxyChain
+ ? JSON.stringify(socks5ProxyChain)
+ : null,
};
if (effectiveAuthType === "password") {
@@ -341,12 +379,14 @@ router.post(
? JSON.parse(createdHost.jumpHosts as string)
: [],
enableFileManager: !!createdHost.enableFileManager,
+ enableDocker: !!createdHost.enableDocker,
statsConfig: createdHost.statsConfig
? JSON.parse(createdHost.statsConfig as string)
: undefined,
};
- const resolvedHost = (await resolveHostCredentials(baseHost)) || baseHost;
+ const resolvedHost =
+ (await resolveHostCredentials(baseHost, userId)) || baseHost;
sshLogger.success(
`SSH host created: ${name} (${ip}:${port}) by user ${userId}`,
@@ -453,10 +493,12 @@ router.put(
key,
keyPassword,
keyType,
+ sudoPassword,
pin,
enableTerminal,
enableTunnel,
enableFileManager,
+ enableDocker,
defaultPath,
tunnelConnections,
jumpHosts,
@@ -464,7 +506,16 @@ router.put(
statsConfig,
terminalConfig,
forceKeyboardInteractive,
+ notes,
+ useSocks5,
+ socks5Host,
+ socks5Port,
+ socks5Username,
+ socks5Password,
+ socks5ProxyChain,
+ overrideCredentialUsername,
} = hostData;
+
if (
!isNonEmptyString(userId) ||
!isNonEmptyString(ip) ||
@@ -492,6 +543,7 @@ router.put(
username,
authType: effectiveAuthType,
credentialId: credentialId || null,
+ overrideCredentialUsername: overrideCredentialUsername ? 1 : 0,
pin: pin ? 1 : 0,
enableTerminal: enableTerminal ? 1 : 0,
enableTunnel: enableTunnel ? 1 : 0,
@@ -503,10 +555,21 @@ router.put(
? JSON.stringify(quickActions)
: null,
enableFileManager: enableFileManager ? 1 : 0,
+ enableDocker: enableDocker ? 1 : 0,
defaultPath: defaultPath || null,
statsConfig: statsConfig ? JSON.stringify(statsConfig) : null,
terminalConfig: terminalConfig ? JSON.stringify(terminalConfig) : null,
forceKeyboardInteractive: forceKeyboardInteractive ? "true" : "false",
+ notes: notes || null,
+ sudoPassword: sudoPassword || null,
+ useSocks5: useSocks5 ? 1 : 0,
+ socks5Host: socks5Host || null,
+ socks5Port: socks5Port || null,
+ socks5Username: socks5Username || null,
+ socks5Password: socks5Password || null,
+ socks5ProxyChain: socks5ProxyChain
+ ? JSON.stringify(socks5ProxyChain)
+ : null,
};
if (effectiveAuthType === "password") {
@@ -535,23 +598,100 @@ router.put(
}
try {
+ const accessInfo = await permissionManager.canAccessHost(
+ userId,
+ Number(hostId),
+ "write",
+ );
+
+ if (!accessInfo.hasAccess) {
+ sshLogger.warn("User does not have permission to update host", {
+ operation: "host_update",
+ hostId: parseInt(hostId),
+ userId,
+ });
+ return res.status(403).json({ error: "Access denied" });
+ }
+
+ if (!accessInfo.isOwner) {
+ sshLogger.warn("Shared user attempted to update host (view-only)", {
+ operation: "host_update",
+ hostId: parseInt(hostId),
+ userId,
+ });
+ return res.status(403).json({
+ error: "Only the host owner can modify host configuration",
+ });
+ }
+
+ const hostRecord = await db
+ .select({
+ userId: sshData.userId,
+ credentialId: sshData.credentialId,
+ authType: sshData.authType,
+ })
+ .from(sshData)
+ .where(eq(sshData.id, Number(hostId)))
+ .limit(1);
+
+ if (hostRecord.length === 0) {
+ sshLogger.warn("Host not found for update", {
+ operation: "host_update",
+ hostId: parseInt(hostId),
+ userId,
+ });
+ return res.status(404).json({ error: "Host not found" });
+ }
+
+ const ownerId = hostRecord[0].userId;
+
+ if (
+ !accessInfo.isOwner &&
+ sshDataObj.credentialId !== undefined &&
+ sshDataObj.credentialId !== hostRecord[0].credentialId
+ ) {
+ return res.status(403).json({
+ error: "Only the host owner can change the credential",
+ });
+ }
+
+ if (
+ !accessInfo.isOwner &&
+ sshDataObj.authType !== undefined &&
+ sshDataObj.authType !== hostRecord[0].authType
+ ) {
+ return res.status(403).json({
+ error: "Only the host owner can change the authentication type",
+ });
+ }
+
+ if (sshDataObj.credentialId !== undefined) {
+ if (
+ hostRecord[0].credentialId !== null &&
+ sshDataObj.credentialId === null
+ ) {
+ const revokedShares = await db
+ .delete(hostAccess)
+ .where(eq(hostAccess.hostId, Number(hostId)))
+ .returning({ id: hostAccess.id, userId: hostAccess.userId });
+ }
+ }
+
await SimpleDBOps.update(
sshData,
"ssh_data",
- and(eq(sshData.id, Number(hostId)), eq(sshData.userId, userId)),
+ eq(sshData.id, Number(hostId)),
sshDataObj,
- userId,
+ ownerId,
);
const updatedHosts = await SimpleDBOps.select(
db
.select()
.from(sshData)
- .where(
- and(eq(sshData.id, Number(hostId)), eq(sshData.userId, userId)),
- ),
+ .where(eq(sshData.id, Number(hostId))),
"ssh_data",
- userId,
+ ownerId,
);
if (updatedHosts.length === 0) {
@@ -582,12 +722,17 @@ router.put(
? JSON.parse(updatedHost.jumpHosts as string)
: [],
enableFileManager: !!updatedHost.enableFileManager,
+ enableDocker: !!updatedHost.enableDocker,
statsConfig: updatedHost.statsConfig
? JSON.parse(updatedHost.statsConfig as string)
: undefined,
+ dockerConfig: updatedHost.dockerConfig
+ ? JSON.parse(updatedHost.dockerConfig as string)
+ : undefined,
};
- const resolvedHost = (await resolveHostCredentials(baseHost)) || baseHost;
+ const resolvedHost =
+ (await resolveHostCredentials(baseHost, userId)) || baseHost;
sshLogger.success(
`SSH host updated: ${name} (${ip}:${port}) by user ${userId}`,
@@ -656,11 +801,115 @@ router.get(
return res.status(400).json({ error: "Invalid userId" });
}
try {
- const data = await SimpleDBOps.select(
- db.select().from(sshData).where(eq(sshData.userId, userId)),
- "ssh_data",
- userId,
- );
+ const now = new Date().toISOString();
+
+ const userRoleIds = await db
+ .select({ roleId: userRoles.roleId })
+ .from(userRoles)
+ .where(eq(userRoles.userId, userId));
+ const roleIds = userRoleIds.map((r) => r.roleId);
+
+ const rawData = await db
+ .select({
+ id: sshData.id,
+ userId: sshData.userId,
+ name: sshData.name,
+ ip: sshData.ip,
+ port: sshData.port,
+ username: sshData.username,
+ folder: sshData.folder,
+ tags: sshData.tags,
+ pin: sshData.pin,
+ authType: sshData.authType,
+ password: sshData.password,
+ key: sshData.key,
+ keyPassword: sshData.key_password,
+ keyType: sshData.keyType,
+ enableTerminal: sshData.enableTerminal,
+ enableTunnel: sshData.enableTunnel,
+ tunnelConnections: sshData.tunnelConnections,
+ jumpHosts: sshData.jumpHosts,
+ enableFileManager: sshData.enableFileManager,
+ defaultPath: sshData.defaultPath,
+ autostartPassword: sshData.autostartPassword,
+ autostartKey: sshData.autostartKey,
+ autostartKeyPassword: sshData.autostartKeyPassword,
+ forceKeyboardInteractive: sshData.forceKeyboardInteractive,
+ statsConfig: sshData.statsConfig,
+ terminalConfig: sshData.terminalConfig,
+ createdAt: sshData.createdAt,
+ updatedAt: sshData.updatedAt,
+ credentialId: sshData.credentialId,
+ overrideCredentialUsername: sshData.overrideCredentialUsername,
+ quickActions: sshData.quickActions,
+ notes: sshData.notes,
+ enableDocker: sshData.enableDocker,
+ useSocks5: sshData.useSocks5,
+ socks5Host: sshData.socks5Host,
+ socks5Port: sshData.socks5Port,
+ socks5Username: sshData.socks5Username,
+ socks5Password: sshData.socks5Password,
+ socks5ProxyChain: sshData.socks5ProxyChain,
+
+ ownerId: sshData.userId,
+ isShared: sql`${hostAccess.id} IS NOT NULL`,
+ permissionLevel: hostAccess.permissionLevel,
+ expiresAt: hostAccess.expiresAt,
+ })
+ .from(sshData)
+ .leftJoin(
+ hostAccess,
+ and(
+ eq(hostAccess.hostId, sshData.id),
+ or(
+ eq(hostAccess.userId, userId),
+ roleIds.length > 0
+ ? inArray(hostAccess.roleId, roleIds)
+ : sql`false`,
+ ),
+ or(isNull(hostAccess.expiresAt), gte(hostAccess.expiresAt, now)),
+ ),
+ )
+ .where(
+ or(
+ eq(sshData.userId, userId),
+ and(
+ eq(hostAccess.userId, userId),
+ or(isNull(hostAccess.expiresAt), gte(hostAccess.expiresAt, now)),
+ ),
+ roleIds.length > 0
+ ? and(
+ inArray(hostAccess.roleId, roleIds),
+ or(
+ isNull(hostAccess.expiresAt),
+ gte(hostAccess.expiresAt, now),
+ ),
+ )
+ : sql`false`,
+ ),
+ );
+
+ const ownHosts = rawData.filter((row) => row.userId === userId);
+ const sharedHosts = rawData.filter((row) => row.userId !== userId);
+
+ let decryptedOwnHosts: any[] = [];
+ try {
+ decryptedOwnHosts = await SimpleDBOps.select(
+ Promise.resolve(ownHosts),
+ "ssh_data",
+ userId,
+ );
+ } catch (decryptError) {
+ sshLogger.error("Failed to decrypt own hosts", decryptError, {
+ operation: "host_fetch_own_decrypt_failed",
+ userId,
+ });
+ decryptedOwnHosts = [];
+ }
+
+ const sanitizedSharedHosts = sharedHosts;
+
+ const data = [...decryptedOwnHosts, ...sanitizedSharedHosts];
const result = await Promise.all(
data.map(async (row: Record) => {
@@ -683,6 +932,7 @@ router.get(
? JSON.parse(row.quickActions as string)
: [],
enableFileManager: !!row.enableFileManager,
+ enableDocker: !!row.enableDocker,
statsConfig: row.statsConfig
? JSON.parse(row.statsConfig as string)
: undefined,
@@ -690,9 +940,18 @@ router.get(
? JSON.parse(row.terminalConfig as string)
: undefined,
forceKeyboardInteractive: row.forceKeyboardInteractive === "true",
+ socks5ProxyChain: row.socks5ProxyChain
+ ? JSON.parse(row.socks5ProxyChain as string)
+ : [],
+
+ isShared: !!row.isShared,
+ permissionLevel: row.permissionLevel || undefined,
+ sharedExpiresAt: row.expiresAt || undefined,
};
- return (await resolveHostCredentials(baseHost)) || baseHost;
+ const resolved =
+ (await resolveHostCredentials(baseHost, userId)) || baseHost;
+ return resolved;
}),
);
@@ -765,9 +1024,12 @@ router.get(
? JSON.parse(host.terminalConfig)
: undefined,
forceKeyboardInteractive: host.forceKeyboardInteractive === "true",
+ socks5ProxyChain: host.socks5ProxyChain
+ ? JSON.parse(host.socks5ProxyChain)
+ : [],
};
- res.json((await resolveHostCredentials(result)) || result);
+ res.json((await resolveHostCredentials(result, userId)) || result);
} catch (err) {
sshLogger.error("Failed to fetch SSH host by ID from database", err, {
operation: "host_fetch_by_id",
@@ -811,7 +1073,7 @@ router.get(
const host = hosts[0];
- const resolvedHost = (await resolveHostCredentials(host)) || host;
+ const resolvedHost = (await resolveHostCredentials(host, userId)) || host;
const exportData = {
name: resolvedHost.name,
@@ -836,6 +1098,9 @@ router.get(
tunnelConnections: resolvedHost.tunnelConnections
? JSON.parse(resolvedHost.tunnelConnections as string)
: [],
+ socks5ProxyChain: resolvedHost.socks5ProxyChain
+ ? JSON.parse(resolvedHost.socks5ProxyChain as string)
+ : [],
};
sshLogger.success("Host exported with decrypted credentials", {
@@ -893,57 +1158,33 @@ router.delete(
await db
.delete(fileManagerRecent)
- .where(
- and(
- eq(fileManagerRecent.hostId, numericHostId),
- eq(fileManagerRecent.userId, userId),
- ),
- );
+ .where(eq(fileManagerRecent.hostId, numericHostId));
await db
.delete(fileManagerPinned)
- .where(
- and(
- eq(fileManagerPinned.hostId, numericHostId),
- eq(fileManagerPinned.userId, userId),
- ),
- );
+ .where(eq(fileManagerPinned.hostId, numericHostId));
await db
.delete(fileManagerShortcuts)
- .where(
- and(
- eq(fileManagerShortcuts.hostId, numericHostId),
- eq(fileManagerShortcuts.userId, userId),
- ),
- );
+ .where(eq(fileManagerShortcuts.hostId, numericHostId));
await db
.delete(commandHistory)
- .where(
- and(
- eq(commandHistory.hostId, numericHostId),
- eq(commandHistory.userId, userId),
- ),
- );
+ .where(eq(commandHistory.hostId, numericHostId));
await db
.delete(sshCredentialUsage)
- .where(
- and(
- eq(sshCredentialUsage.hostId, numericHostId),
- eq(sshCredentialUsage.userId, userId),
- ),
- );
+ .where(eq(sshCredentialUsage.hostId, numericHostId));
await db
.delete(recentActivity)
- .where(
- and(
- eq(recentActivity.hostId, numericHostId),
- eq(recentActivity.userId, userId),
- ),
- );
+ .where(eq(recentActivity.hostId, numericHostId));
+
+ await db.delete(hostAccess).where(eq(hostAccess.hostId, numericHostId));
+
+ await db
+ .delete(sessionRecordings)
+ .where(eq(sessionRecordings.hostId, numericHostId));
await db
.delete(sshData)
@@ -1450,11 +1691,54 @@ router.delete(
async function resolveHostCredentials(
host: Record,
+ requestingUserId?: string,
): Promise> {
try {
- if (host.credentialId && host.userId) {
+ if (host.credentialId && (host.userId || host.ownerId)) {
const credentialId = host.credentialId as number;
- const userId = host.userId as string;
+ const ownerId = (host.ownerId || host.userId) as string;
+
+ if (requestingUserId && requestingUserId !== ownerId) {
+ try {
+ const { SharedCredentialManager } =
+ await import("../../utils/shared-credential-manager.js");
+ const sharedCredManager = SharedCredentialManager.getInstance();
+ const sharedCred = await sharedCredManager.getSharedCredentialForUser(
+ host.id as number,
+ requestingUserId,
+ );
+
+ if (sharedCred) {
+ const resolvedHost: Record = {
+ ...host,
+ authType: sharedCred.authType,
+ password: sharedCred.password,
+ key: sharedCred.key,
+ keyPassword: sharedCred.keyPassword,
+ keyType: sharedCred.keyType,
+ };
+
+ if (!host.overrideCredentialUsername) {
+ resolvedHost.username = sharedCred.username;
+ }
+
+ return resolvedHost;
+ }
+ } catch (sharedCredError) {
+ sshLogger.warn(
+ "Failed to get shared credential, falling back to owner credential",
+ {
+ operation: "resolve_shared_credential_fallback",
+ hostId: host.id as number,
+ requestingUserId,
+ error:
+ sharedCredError instanceof Error
+ ? sharedCredError.message
+ : "Unknown error",
+ },
+ );
+ }
+ }
const credentials = await SimpleDBOps.select(
db
@@ -1463,24 +1747,29 @@ async function resolveHostCredentials(
.where(
and(
eq(sshCredentials.id, credentialId),
- eq(sshCredentials.userId, userId),
+ eq(sshCredentials.userId, ownerId),
),
),
"ssh_credentials",
- userId,
+ ownerId,
);
if (credentials.length > 0) {
const credential = credentials[0];
- return {
+ const resolvedHost: Record = {
...host,
- username: credential.username,
authType: credential.auth_type || credential.authType,
password: credential.password,
key: credential.key,
keyPassword: credential.key_password || credential.keyPassword,
keyType: credential.key_type || credential.keyType,
};
+
+ if (!host.overrideCredentialUsername) {
+ resolvedHost.username = credential.username;
+ }
+
+ return resolvedHost;
}
}
@@ -1680,6 +1969,40 @@ router.delete(
});
}
+ const hostIds = hostsToDelete.map((host) => host.id);
+
+ if (hostIds.length > 0) {
+ await db
+ .delete(fileManagerRecent)
+ .where(inArray(fileManagerRecent.hostId, hostIds));
+
+ await db
+ .delete(fileManagerPinned)
+ .where(inArray(fileManagerPinned.hostId, hostIds));
+
+ await db
+ .delete(fileManagerShortcuts)
+ .where(inArray(fileManagerShortcuts.hostId, hostIds));
+
+ await db
+ .delete(commandHistory)
+ .where(inArray(commandHistory.hostId, hostIds));
+
+ await db
+ .delete(sshCredentialUsage)
+ .where(inArray(sshCredentialUsage.hostId, hostIds));
+
+ await db
+ .delete(recentActivity)
+ .where(inArray(recentActivity.hostId, hostIds));
+
+ await db.delete(hostAccess).where(inArray(hostAccess.hostId, hostIds));
+
+ await db
+ .delete(sessionRecordings)
+ .where(inArray(sessionRecordings.hostId, hostIds));
+ }
+
await db
.delete(sshData)
.where(and(eq(sshData.userId, userId), eq(sshData.folder, folderName)));
@@ -1782,10 +2105,12 @@ router.post(
continue;
}
- if (!["password", "key", "credential"].includes(hostData.authType)) {
+ if (
+ !["password", "key", "credential", "none"].includes(hostData.authType)
+ ) {
results.failed++;
results.errors.push(
- `Host ${i + 1}: Invalid authType. Must be 'password', 'key', or 'credential'`,
+ `Host ${i + 1}: Invalid authType. Must be 'password', 'key', 'credential', or 'none'`,
);
continue;
}
@@ -1840,13 +2165,38 @@ router.post(
enableTerminal: hostData.enableTerminal !== false,
enableTunnel: hostData.enableTunnel !== false,
enableFileManager: hostData.enableFileManager !== false,
+ enableDocker: hostData.enableDocker || false,
defaultPath: hostData.defaultPath || "/",
tunnelConnections: hostData.tunnelConnections
? JSON.stringify(hostData.tunnelConnections)
: "[]",
+ jumpHosts: hostData.jumpHosts
+ ? JSON.stringify(hostData.jumpHosts)
+ : null,
+ quickActions: hostData.quickActions
+ ? JSON.stringify(hostData.quickActions)
+ : null,
statsConfig: hostData.statsConfig
? JSON.stringify(hostData.statsConfig)
: null,
+ terminalConfig: hostData.terminalConfig
+ ? JSON.stringify(hostData.terminalConfig)
+ : null,
+ forceKeyboardInteractive: hostData.forceKeyboardInteractive
+ ? "true"
+ : "false",
+ notes: hostData.notes || null,
+ useSocks5: hostData.useSocks5 ? 1 : 0,
+ socks5Host: hostData.socks5Host || null,
+ socks5Port: hostData.socks5Port || null,
+ socks5Username: hostData.socks5Username || null,
+ socks5Password: hostData.socks5Password || null,
+ socks5ProxyChain: hostData.socks5ProxyChain
+ ? JSON.stringify(hostData.socks5ProxyChain)
+ : null,
+ overrideCredentialUsername: hostData.overrideCredentialUsername
+ ? 1
+ : 0,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
};
diff --git a/src/backend/database/routes/users.ts b/src/backend/database/routes/users.ts
index 473654d9..7d896ca7 100644
--- a/src/backend/database/routes/users.ts
+++ b/src/backend/database/routes/users.ts
@@ -15,6 +15,11 @@ import {
sshCredentialUsage,
recentActivity,
snippets,
+ snippetFolders,
+ sshFolders,
+ commandHistory,
+ roles,
+ userRoles,
} from "../db/schema.js";
import { eq, and } from "drizzle-orm";
import bcrypt from "bcryptjs";
@@ -134,6 +139,54 @@ function isNonEmptyString(val: unknown): val is string {
const authenticateJWT = authManager.createAuthMiddleware();
const requireAdmin = authManager.createAdminMiddleware();
+async function deleteUserAndRelatedData(userId: string): Promise {
+ try {
+ await db
+ .delete(sshCredentialUsage)
+ .where(eq(sshCredentialUsage.userId, userId));
+
+ await db
+ .delete(fileManagerRecent)
+ .where(eq(fileManagerRecent.userId, userId));
+ await db
+ .delete(fileManagerPinned)
+ .where(eq(fileManagerPinned.userId, userId));
+ await db
+ .delete(fileManagerShortcuts)
+ .where(eq(fileManagerShortcuts.userId, userId));
+
+ await db.delete(recentActivity).where(eq(recentActivity.userId, userId));
+ await db.delete(dismissedAlerts).where(eq(dismissedAlerts.userId, userId));
+
+ await db.delete(snippets).where(eq(snippets.userId, userId));
+ await db.delete(snippetFolders).where(eq(snippetFolders.userId, userId));
+
+ await db.delete(sshFolders).where(eq(sshFolders.userId, userId));
+
+ await db.delete(commandHistory).where(eq(commandHistory.userId, userId));
+
+ await db.delete(sshData).where(eq(sshData.userId, userId));
+ await db.delete(sshCredentials).where(eq(sshCredentials.userId, userId));
+
+ db.$client
+ .prepare("DELETE FROM settings WHERE key LIKE ?")
+ .run(`user_%_${userId}`);
+
+ await db.delete(users).where(eq(users.id, userId));
+
+ authLogger.success("User and all related data deleted successfully", {
+ operation: "delete_user_and_related_data_complete",
+ userId,
+ });
+ } catch (error) {
+ authLogger.error("Failed to delete user and related data", error, {
+ operation: "delete_user_and_related_data_failed",
+ userId,
+ });
+ throw error;
+ }
+}
+
// Route: Create traditional user (username/password)
// POST /users/create
router.post("/create", async (req, res) => {
@@ -210,6 +263,34 @@ router.post("/create", async (req, res) => {
totp_backup_codes: null,
});
+ try {
+ const defaultRoleName = isFirstUser ? "admin" : "user";
+ const defaultRole = await db
+ .select({ id: roles.id })
+ .from(roles)
+ .where(eq(roles.name, defaultRoleName))
+ .limit(1);
+
+ if (defaultRole.length > 0) {
+ await db.insert(userRoles).values({
+ userId: id,
+ roleId: defaultRole[0].id,
+ grantedBy: id,
+ });
+ } else {
+ authLogger.warn("Default role not found during user registration", {
+ operation: "assign_default_role",
+ userId: id,
+ roleName: defaultRoleName,
+ });
+ }
+ } catch (roleError) {
+ authLogger.error("Failed to assign default role", roleError, {
+ operation: "assign_default_role",
+ userId: id,
+ });
+ }
+
try {
await authManager.registerUser(id, password);
} catch (encryptionError) {
@@ -816,6 +897,41 @@ router.get("/oidc/callback", async (req, res) => {
scopes: String(config.scopes),
});
+ try {
+ const defaultRoleName = isFirstUser ? "admin" : "user";
+ const defaultRole = await db
+ .select({ id: roles.id })
+ .from(roles)
+ .where(eq(roles.name, defaultRoleName))
+ .limit(1);
+
+ if (defaultRole.length > 0) {
+ await db.insert(userRoles).values({
+ userId: id,
+ roleId: defaultRole[0].id,
+ grantedBy: id,
+ });
+ } else {
+ authLogger.warn(
+ "Default role not found during OIDC user registration",
+ {
+ operation: "assign_default_role_oidc",
+ userId: id,
+ roleName: defaultRoleName,
+ },
+ );
+ }
+ } catch (roleError) {
+ authLogger.error(
+ "Failed to assign default role to OIDC user",
+ roleError,
+ {
+ operation: "assign_default_role_oidc",
+ userId: id,
+ },
+ );
+ }
+
try {
const sessionDurationMs =
deviceInfo.type === "desktop" || deviceInfo.type === "mobile"
@@ -1055,6 +1171,19 @@ router.post("/login", async (req, res) => {
return res.status(401).json({ error: "Incorrect password" });
}
+ try {
+ const { SharedCredentialManager } =
+ await import("../../utils/shared-credential-manager.js");
+ const sharedCredManager = SharedCredentialManager.getInstance();
+ await sharedCredManager.reEncryptPendingCredentialsForUser(userRecord.id);
+ } catch (error) {
+ authLogger.warn("Failed to re-encrypt pending shared credentials", {
+ operation: "reencrypt_pending_credentials",
+ userId: userRecord.id,
+ error,
+ });
+ }
+
if (userRecord.totp_enabled) {
const tempToken = await authManager.generateJWTToken(userRecord.id, {
pendingTOTP: true,
@@ -1128,15 +1257,7 @@ router.post("/logout", authenticateJWT, async (req, res) => {
try {
const payload = await authManager.verifyJWTToken(token);
sessionId = payload?.sessionId;
- } catch (error) {
- authLogger.debug(
- "Token verification failed during logout (expected if token expired)",
- {
- operation: "logout_token_verify_failed",
- userId,
- },
- );
- }
+ } catch (error) {}
}
await authManager.logoutUser(userId, sessionId);
@@ -2252,36 +2373,8 @@ router.delete("/delete-user", authenticateJWT, async (req, res) => {
const targetUserId = targetUser[0].id;
- try {
- await db
- .delete(sshCredentialUsage)
- .where(eq(sshCredentialUsage.userId, targetUserId));
- await db
- .delete(fileManagerRecent)
- .where(eq(fileManagerRecent.userId, targetUserId));
- await db
- .delete(fileManagerPinned)
- .where(eq(fileManagerPinned.userId, targetUserId));
- await db
- .delete(fileManagerShortcuts)
- .where(eq(fileManagerShortcuts.userId, targetUserId));
- await db
- .delete(recentActivity)
- .where(eq(recentActivity.userId, targetUserId));
- await db
- .delete(dismissedAlerts)
- .where(eq(dismissedAlerts.userId, targetUserId));
- await db.delete(snippets).where(eq(snippets.userId, targetUserId));
- await db.delete(sshData).where(eq(sshData.userId, targetUserId));
- await db
- .delete(sshCredentials)
- .where(eq(sshCredentials.userId, targetUserId));
- } catch (cleanupError) {
- authLogger.error(`Cleanup failed for user ${username}:`, cleanupError);
- throw cleanupError;
- }
-
- await db.delete(users).where(eq(users.id, targetUserId));
+ // Use the comprehensive deletion utility
+ await deleteUserAndRelatedData(targetUserId);
authLogger.success(
`User ${username} deleted by admin ${adminUser[0].username}`,
@@ -2696,15 +2789,7 @@ router.post("/link-oidc-to-password", authenticateJWT, async (req, res) => {
await authManager.revokeAllUserSessions(oidcUserId);
authManager.logoutUser(oidcUserId);
- await db
- .delete(recentActivity)
- .where(eq(recentActivity.userId, oidcUserId));
-
- await db.delete(users).where(eq(users.id, oidcUserId));
-
- db.$client
- .prepare("DELETE FROM settings WHERE key LIKE ?")
- .run(`user_%_${oidcUserId}`);
+ await deleteUserAndRelatedData(oidcUserId);
try {
const { saveMemoryDatabaseToFile } = await import("../db/index.js");
diff --git a/scripts/enable-ssl.sh b/src/backend/scripts/enable-ssl.sh
similarity index 100%
rename from scripts/enable-ssl.sh
rename to src/backend/scripts/enable-ssl.sh
diff --git a/scripts/setup-ssl.sh b/src/backend/scripts/setup-ssl.sh
similarity index 100%
rename from scripts/setup-ssl.sh
rename to src/backend/scripts/setup-ssl.sh
diff --git a/src/backend/ssh/docker-console.ts b/src/backend/ssh/docker-console.ts
new file mode 100644
index 00000000..d6f7a6e1
--- /dev/null
+++ b/src/backend/ssh/docker-console.ts
@@ -0,0 +1,632 @@
+import { Client as SSHClient } from "ssh2";
+import { WebSocketServer, WebSocket } from "ws";
+import { parse as parseUrl } from "url";
+import { AuthManager } from "../utils/auth-manager.js";
+import { sshData, sshCredentials } from "../database/db/schema.js";
+import { and, eq } from "drizzle-orm";
+import { getDb } from "../database/db/index.js";
+import { SimpleDBOps } from "../utils/simple-db-ops.js";
+import { systemLogger } from "../utils/logger.js";
+import type { SSHHost } from "../../types/index.js";
+
+const dockerConsoleLogger = systemLogger;
+
+interface SSHSession {
+ client: SSHClient;
+ stream: any;
+ isConnected: boolean;
+ containerId?: string;
+ shell?: string;
+}
+
+const activeSessions = new Map();
+
+const wss = new WebSocketServer({
+ host: "0.0.0.0",
+ port: 30008,
+ verifyClient: async (info) => {
+ try {
+ const url = parseUrl(info.req.url || "", true);
+ const token = url.query.token as string;
+
+ if (!token) {
+ return false;
+ }
+
+ const authManager = AuthManager.getInstance();
+ const decoded = await authManager.verifyJWTToken(token);
+
+ if (!decoded || !decoded.userId) {
+ return false;
+ }
+
+ return true;
+ } catch (error) {
+ return false;
+ }
+ },
+});
+
+async function detectShell(
+ session: SSHSession,
+ containerId: string,
+): Promise {
+ const shells = ["bash", "sh", "ash"];
+
+ for (const shell of shells) {
+ try {
+ await new Promise((resolve, reject) => {
+ session.client.exec(
+ `docker exec ${containerId} which ${shell}`,
+ (err, stream) => {
+ if (err) return reject(err);
+
+ let output = "";
+ stream.on("data", (data: Buffer) => {
+ output += data.toString();
+ });
+
+ stream.on("close", (code: number) => {
+ if (code === 0 && output.trim()) {
+ resolve();
+ } else {
+ reject(new Error(`Shell ${shell} not found`));
+ }
+ });
+
+ stream.stderr.on("data", () => {
+ // Ignore stderr
+ });
+ },
+ );
+ });
+
+ return shell;
+ } catch {
+ continue;
+ }
+ }
+
+ return "sh";
+}
+
+async function createJumpHostChain(
+ jumpHosts: any[],
+ userId: string,
+): Promise {
+ if (!jumpHosts || jumpHosts.length === 0) {
+ return null;
+ }
+
+ let currentClient: SSHClient | null = null;
+
+ for (let i = 0; i < jumpHosts.length; i++) {
+ const jumpHostId = jumpHosts[i].hostId;
+
+ const jumpHostData = await SimpleDBOps.select(
+ getDb()
+ .select()
+ .from(sshData)
+ .where(and(eq(sshData.id, jumpHostId), eq(sshData.userId, userId))),
+ "ssh_data",
+ userId,
+ );
+
+ if (jumpHostData.length === 0) {
+ throw new Error(`Jump host ${jumpHostId} not found`);
+ }
+
+ const jumpHost = jumpHostData[0] as unknown as SSHHost;
+ if (typeof jumpHost.jumpHosts === "string" && jumpHost.jumpHosts) {
+ try {
+ jumpHost.jumpHosts = JSON.parse(jumpHost.jumpHosts);
+ } catch (e) {
+ dockerConsoleLogger.error("Failed to parse jump hosts", e, {
+ hostId: jumpHost.id,
+ });
+ jumpHost.jumpHosts = [];
+ }
+ }
+
+ let resolvedCredentials: any = {
+ password: jumpHost.password,
+ sshKey: jumpHost.key,
+ keyPassword: jumpHost.keyPassword,
+ authType: jumpHost.authType,
+ };
+
+ if (jumpHost.credentialId) {
+ const credentials = await SimpleDBOps.select(
+ getDb()
+ .select()
+ .from(sshCredentials)
+ .where(
+ and(
+ eq(sshCredentials.id, jumpHost.credentialId as number),
+ eq(sshCredentials.userId, userId),
+ ),
+ ),
+ "ssh_credentials",
+ userId,
+ );
+
+ if (credentials.length > 0) {
+ const credential = credentials[0];
+ resolvedCredentials = {
+ password: credential.password,
+ sshKey:
+ credential.private_key || credential.privateKey || credential.key,
+ keyPassword: credential.key_password || credential.keyPassword,
+ authType: credential.auth_type || credential.authType,
+ };
+ }
+ }
+
+ const client = new SSHClient();
+
+ const config: any = {
+ host: jumpHost.ip,
+ port: jumpHost.port || 22,
+ username: jumpHost.username,
+ tryKeyboard: true,
+ readyTimeout: 60000,
+ keepaliveInterval: 30000,
+ keepaliveCountMax: 120,
+ tcpKeepAlive: true,
+ tcpKeepAliveInitialDelay: 30000,
+ };
+
+ if (
+ resolvedCredentials.authType === "password" &&
+ resolvedCredentials.password
+ ) {
+ config.password = resolvedCredentials.password;
+ } else if (
+ resolvedCredentials.authType === "key" &&
+ resolvedCredentials.sshKey
+ ) {
+ const cleanKey = resolvedCredentials.sshKey
+ .trim()
+ .replace(/\r\n/g, "\n")
+ .replace(/\r/g, "\n");
+ config.privateKey = Buffer.from(cleanKey, "utf8");
+ if (resolvedCredentials.keyPassword) {
+ config.passphrase = resolvedCredentials.keyPassword;
+ }
+ }
+
+ if (currentClient) {
+ await new Promise((resolve, reject) => {
+ currentClient!.forwardOut(
+ "127.0.0.1",
+ 0,
+ jumpHost.ip,
+ jumpHost.port || 22,
+ (err, stream) => {
+ if (err) return reject(err);
+ config.sock = stream;
+ resolve();
+ },
+ );
+ });
+ }
+
+ await new Promise((resolve, reject) => {
+ client.on("ready", () => resolve());
+ client.on("error", reject);
+ client.connect(config);
+ });
+
+ currentClient = client;
+ }
+
+ return currentClient;
+}
+
+wss.on("connection", async (ws: WebSocket, req) => {
+ const userId = (req as any).userId;
+ const sessionId = `docker-console-${Date.now()}-${Math.random()}`;
+
+ let sshSession: SSHSession | null = null;
+
+ ws.on("message", async (data) => {
+ try {
+ const message = JSON.parse(data.toString());
+
+ switch (message.type) {
+ case "connect": {
+ const { hostConfig, containerId, shell, cols, rows } =
+ message.data as {
+ hostConfig: SSHHost;
+ containerId: string;
+ shell?: string;
+ cols?: number;
+ rows?: number;
+ };
+
+ if (
+ typeof hostConfig.jumpHosts === "string" &&
+ hostConfig.jumpHosts
+ ) {
+ try {
+ hostConfig.jumpHosts = JSON.parse(hostConfig.jumpHosts);
+ } catch (e) {
+ dockerConsoleLogger.error("Failed to parse jump hosts", e, {
+ hostId: hostConfig.id,
+ });
+ hostConfig.jumpHosts = [];
+ }
+ }
+
+ if (!hostConfig || !containerId) {
+ ws.send(
+ JSON.stringify({
+ type: "error",
+ message: "Host configuration and container ID are required",
+ }),
+ );
+ return;
+ }
+
+ if (!hostConfig.enableDocker) {
+ ws.send(
+ JSON.stringify({
+ type: "error",
+ message:
+ "Docker is not enabled for this host. Enable it in Host Settings.",
+ }),
+ );
+ return;
+ }
+
+ try {
+ let resolvedCredentials: any = {
+ password: hostConfig.password,
+ sshKey: hostConfig.key,
+ keyPassword: hostConfig.keyPassword,
+ authType: hostConfig.authType,
+ };
+
+ if (hostConfig.credentialId) {
+ const credentials = await SimpleDBOps.select(
+ getDb()
+ .select()
+ .from(sshCredentials)
+ .where(
+ and(
+ eq(sshCredentials.id, hostConfig.credentialId as number),
+ eq(sshCredentials.userId, userId),
+ ),
+ ),
+ "ssh_credentials",
+ userId,
+ );
+
+ if (credentials.length > 0) {
+ const credential = credentials[0];
+ resolvedCredentials = {
+ password: credential.password,
+ sshKey:
+ credential.private_key ||
+ credential.privateKey ||
+ credential.key,
+ keyPassword:
+ credential.key_password || credential.keyPassword,
+ authType: credential.auth_type || credential.authType,
+ };
+ }
+ }
+
+ const client = new SSHClient();
+
+ const config: any = {
+ host: hostConfig.ip,
+ port: hostConfig.port || 22,
+ username: hostConfig.username,
+ tryKeyboard: true,
+ readyTimeout: 60000,
+ keepaliveInterval: 30000,
+ keepaliveCountMax: 120,
+ tcpKeepAlive: true,
+ tcpKeepAliveInitialDelay: 30000,
+ };
+
+ if (
+ resolvedCredentials.authType === "password" &&
+ resolvedCredentials.password
+ ) {
+ config.password = resolvedCredentials.password;
+ } else if (
+ resolvedCredentials.authType === "key" &&
+ resolvedCredentials.sshKey
+ ) {
+ const cleanKey = resolvedCredentials.sshKey
+ .trim()
+ .replace(/\r\n/g, "\n")
+ .replace(/\r/g, "\n");
+ config.privateKey = Buffer.from(cleanKey, "utf8");
+ if (resolvedCredentials.keyPassword) {
+ config.passphrase = resolvedCredentials.keyPassword;
+ }
+ }
+
+ if (hostConfig.jumpHosts && hostConfig.jumpHosts.length > 0) {
+ const jumpClient = await createJumpHostChain(
+ hostConfig.jumpHosts,
+ userId,
+ );
+ if (jumpClient) {
+ const stream = await new Promise((resolve, reject) => {
+ jumpClient.forwardOut(
+ "127.0.0.1",
+ 0,
+ hostConfig.ip,
+ hostConfig.port || 22,
+ (err, stream) => {
+ if (err) return reject(err);
+ resolve(stream);
+ },
+ );
+ });
+ config.sock = stream;
+ }
+ }
+
+ await new Promise((resolve, reject) => {
+ client.on("ready", () => resolve());
+ client.on("error", reject);
+ client.connect(config);
+ });
+
+ sshSession = {
+ client,
+ stream: null,
+ isConnected: true,
+ containerId,
+ };
+
+ activeSessions.set(sessionId, sshSession);
+
+ let shellToUse = shell || "bash";
+
+ if (shell) {
+ try {
+ await new Promise((resolve, reject) => {
+ client.exec(
+ `docker exec ${containerId} which ${shell}`,
+ (err, stream) => {
+ if (err) return reject(err);
+
+ let output = "";
+ stream.on("data", (data: Buffer) => {
+ output += data.toString();
+ });
+
+ stream.on("close", (code: number) => {
+ if (code === 0 && output.trim()) {
+ resolve();
+ } else {
+ reject(new Error(`Shell ${shell} not available`));
+ }
+ });
+
+ stream.stderr.on("data", () => {
+ // Ignore stderr
+ });
+ },
+ );
+ });
+ } catch {
+ dockerConsoleLogger.warn(
+ `Requested shell ${shell} not found, detecting available shell`,
+ {
+ operation: "shell_validation",
+ sessionId,
+ containerId,
+ requestedShell: shell,
+ },
+ );
+ shellToUse = await detectShell(sshSession, containerId);
+ }
+ } else {
+ shellToUse = await detectShell(sshSession, containerId);
+ }
+
+ sshSession.shell = shellToUse;
+
+ const execCommand = `docker exec -it ${containerId} /bin/${shellToUse}`;
+
+ client.exec(
+ execCommand,
+ {
+ pty: {
+ term: "xterm-256color",
+ cols: cols || 80,
+ rows: rows || 24,
+ },
+ },
+ (err, stream) => {
+ if (err) {
+ dockerConsoleLogger.error(
+ "Failed to create docker exec",
+ err,
+ {
+ operation: "docker_exec",
+ sessionId,
+ containerId,
+ },
+ );
+
+ ws.send(
+ JSON.stringify({
+ type: "error",
+ message: `Failed to start console: ${err.message}`,
+ }),
+ );
+ return;
+ }
+
+ sshSession!.stream = stream;
+
+ stream.on("data", (data: Buffer) => {
+ if (ws.readyState === WebSocket.OPEN) {
+ ws.send(
+ JSON.stringify({
+ type: "output",
+ data: data.toString("utf8"),
+ }),
+ );
+ }
+ });
+
+ stream.stderr.on("data", (data: Buffer) => {});
+
+ stream.on("close", () => {
+ if (ws.readyState === WebSocket.OPEN) {
+ ws.send(
+ JSON.stringify({
+ type: "disconnected",
+ message: "Console session ended",
+ }),
+ );
+ }
+
+ if (sshSession) {
+ sshSession.client.end();
+ activeSessions.delete(sessionId);
+ }
+ });
+
+ ws.send(
+ JSON.stringify({
+ type: "connected",
+ data: {
+ shell: shellToUse,
+ requestedShell: shell,
+ shellChanged: shell && shell !== shellToUse,
+ },
+ }),
+ );
+ },
+ );
+ } catch (error) {
+ dockerConsoleLogger.error("Failed to connect to container", error, {
+ operation: "console_connect",
+ sessionId,
+ containerId: message.data.containerId,
+ });
+
+ ws.send(
+ JSON.stringify({
+ type: "error",
+ message:
+ error instanceof Error
+ ? error.message
+ : "Failed to connect to container",
+ }),
+ );
+ }
+ break;
+ }
+
+ case "input": {
+ if (sshSession && sshSession.stream) {
+ sshSession.stream.write(message.data);
+ }
+ break;
+ }
+
+ case "resize": {
+ if (sshSession && sshSession.stream) {
+ const { cols, rows } = message.data;
+ sshSession.stream.setWindow(rows, cols);
+ }
+ break;
+ }
+
+ case "disconnect": {
+ if (sshSession) {
+ if (sshSession.stream) {
+ sshSession.stream.end();
+ }
+ sshSession.client.end();
+ activeSessions.delete(sessionId);
+
+ ws.send(
+ JSON.stringify({
+ type: "disconnected",
+ message: "Disconnected from container",
+ }),
+ );
+ }
+ break;
+ }
+
+ case "ping": {
+ if (ws.readyState === WebSocket.OPEN) {
+ ws.send(JSON.stringify({ type: "pong" }));
+ }
+ break;
+ }
+
+ default:
+ dockerConsoleLogger.warn("Unknown message type", {
+ operation: "ws_message",
+ type: message.type,
+ });
+ }
+ } catch (error) {
+ dockerConsoleLogger.error("WebSocket message error", error, {
+ operation: "ws_message",
+ sessionId,
+ });
+
+ ws.send(
+ JSON.stringify({
+ type: "error",
+ message: error instanceof Error ? error.message : "An error occurred",
+ }),
+ );
+ }
+ });
+
+ ws.on("close", () => {
+ if (sshSession) {
+ if (sshSession.stream) {
+ sshSession.stream.end();
+ }
+ sshSession.client.end();
+ activeSessions.delete(sessionId);
+ }
+ });
+
+ ws.on("error", (error) => {
+ dockerConsoleLogger.error("WebSocket error", error, {
+ operation: "ws_error",
+ sessionId,
+ });
+
+ if (sshSession) {
+ if (sshSession.stream) {
+ sshSession.stream.end();
+ }
+ sshSession.client.end();
+ activeSessions.delete(sessionId);
+ }
+ });
+});
+
+process.on("SIGTERM", () => {
+ activeSessions.forEach((session, sessionId) => {
+ if (session.stream) {
+ session.stream.end();
+ }
+ session.client.end();
+ });
+
+ activeSessions.clear();
+
+ wss.close(() => {
+ process.exit(0);
+ });
+});
diff --git a/src/backend/ssh/docker.ts b/src/backend/ssh/docker.ts
new file mode 100644
index 00000000..ee984be4
--- /dev/null
+++ b/src/backend/ssh/docker.ts
@@ -0,0 +1,1904 @@
+import express from "express";
+import cors from "cors";
+import cookieParser from "cookie-parser";
+import axios from "axios";
+import { Client as SSHClient } from "ssh2";
+import type { ClientChannel } from "ssh2";
+import { getDb } from "../database/db/index.js";
+import { sshData, sshCredentials } from "../database/db/schema.js";
+import { eq, and } from "drizzle-orm";
+import { logger } from "../utils/logger.js";
+import { SimpleDBOps } from "../utils/simple-db-ops.js";
+import { AuthManager } from "../utils/auth-manager.js";
+import { createSocks5Connection } from "../utils/socks5-helper.js";
+import type { AuthenticatedRequest, SSHHost } from "../../types/index.js";
+
+const dockerLogger = logger;
+
+interface SSHSession {
+ client: SSHClient;
+ isConnected: boolean;
+ lastActive: number;
+ timeout?: NodeJS.Timeout;
+ activeOperations: number;
+ hostId?: number;
+}
+
+interface PendingTOTPSession {
+ client: SSHClient;
+ finish: (responses: string[]) => void;
+ config: any;
+ createdAt: number;
+ sessionId: string;
+ hostId?: number;
+ ip?: string;
+ port?: number;
+ username?: string;
+ userId?: string;
+ prompts?: Array<{ prompt: string; echo: boolean }>;
+ totpPromptIndex?: number;
+ resolvedPassword?: string;
+ totpAttempts: number;
+}
+
+const sshSessions: Record = {};
+const pendingTOTPSessions: Record = {};
+
+const SESSION_IDLE_TIMEOUT = 60 * 60 * 1000;
+
+setInterval(() => {
+ const now = Date.now();
+ Object.keys(pendingTOTPSessions).forEach((sessionId) => {
+ const session = pendingTOTPSessions[sessionId];
+ if (now - session.createdAt > 180000) {
+ try {
+ session.client.end();
+ } catch {}
+ delete pendingTOTPSessions[sessionId];
+ }
+ });
+}, 60000);
+
+function cleanupSession(sessionId: string) {
+ const session = sshSessions[sessionId];
+ if (session) {
+ if (session.activeOperations > 0) {
+ dockerLogger.warn(
+ `Deferring session cleanup for ${sessionId} - ${session.activeOperations} active operations`,
+ {
+ operation: "cleanup_deferred",
+ sessionId,
+ activeOperations: session.activeOperations,
+ },
+ );
+ scheduleSessionCleanup(sessionId);
+ return;
+ }
+
+ try {
+ session.client.end();
+ } catch (error) {}
+ clearTimeout(session.timeout);
+ delete sshSessions[sessionId];
+ }
+}
+
+function scheduleSessionCleanup(sessionId: string) {
+ const session = sshSessions[sessionId];
+ if (session) {
+ if (session.timeout) clearTimeout(session.timeout);
+
+ session.timeout = setTimeout(() => {
+ cleanupSession(sessionId);
+ }, SESSION_IDLE_TIMEOUT);
+ }
+}
+
+async function resolveJumpHost(
+ hostId: number,
+ userId: string,
+): Promise {
+ try {
+ const hosts = await SimpleDBOps.select(
+ getDb()
+ .select()
+ .from(sshData)
+ .where(and(eq(sshData.id, hostId), eq(sshData.userId, userId))),
+ "ssh_data",
+ userId,
+ );
+
+ if (hosts.length === 0) {
+ return null;
+ }
+
+ const host = hosts[0];
+
+ if (host.credentialId) {
+ const credentials = await SimpleDBOps.select(
+ getDb()
+ .select()
+ .from(sshCredentials)
+ .where(
+ and(
+ eq(sshCredentials.id, host.credentialId as number),
+ eq(sshCredentials.userId, userId),
+ ),
+ ),
+ "ssh_credentials",
+ userId,
+ );
+
+ if (credentials.length > 0) {
+ const credential = credentials[0];
+ return {
+ ...host,
+ password: credential.password,
+ key:
+ credential.private_key || credential.privateKey || credential.key,
+ keyPassword: credential.key_password || credential.keyPassword,
+ keyType: credential.key_type || credential.keyType,
+ authType: credential.auth_type || credential.authType,
+ };
+ }
+ }
+
+ return host;
+ } catch (error) {
+ dockerLogger.error("Failed to resolve jump host", error, {
+ operation: "resolve_jump_host",
+ hostId,
+ userId,
+ });
+ return null;
+ }
+}
+
+async function createJumpHostChain(
+ jumpHosts: Array<{ hostId: number }>,
+ userId: string,
+): Promise {
+ if (!jumpHosts || jumpHosts.length === 0) {
+ return null;
+ }
+
+ let currentClient: SSHClient | null = null;
+ const clients: SSHClient[] = [];
+
+ try {
+ const jumpHostConfigs = await Promise.all(
+ jumpHosts.map((jh) => resolveJumpHost(jh.hostId, userId)),
+ );
+
+ for (let i = 0; i < jumpHostConfigs.length; i++) {
+ if (!jumpHostConfigs[i]) {
+ dockerLogger.error(`Jump host ${i + 1} not found`, undefined, {
+ operation: "jump_host_chain",
+ hostId: jumpHosts[i].hostId,
+ });
+ clients.forEach((c) => c.end());
+ return null;
+ }
+ }
+
+ for (let i = 0; i < jumpHostConfigs.length; i++) {
+ const jumpHostConfig = jumpHostConfigs[i];
+
+ const jumpClient = new SSHClient();
+ clients.push(jumpClient);
+
+ const connected = await new Promise((resolve) => {
+ const timeout = setTimeout(() => {
+ resolve(false);
+ }, 30000);
+
+ jumpClient.on("ready", () => {
+ clearTimeout(timeout);
+ resolve(true);
+ });
+
+ jumpClient.on("error", (err) => {
+ clearTimeout(timeout);
+ dockerLogger.error(`Jump host ${i + 1} connection failed`, err, {
+ operation: "jump_host_connect",
+ hostId: jumpHostConfig.id,
+ ip: jumpHostConfig.ip,
+ });
+ resolve(false);
+ });
+
+ const connectConfig: any = {
+ host: jumpHostConfig.ip,
+ port: jumpHostConfig.port || 22,
+ username: jumpHostConfig.username,
+ tryKeyboard: true,
+ readyTimeout: 30000,
+ };
+
+ if (jumpHostConfig.authType === "password" && jumpHostConfig.password) {
+ connectConfig.password = jumpHostConfig.password;
+ } else if (jumpHostConfig.authType === "key" && jumpHostConfig.key) {
+ const cleanKey = jumpHostConfig.key
+ .trim()
+ .replace(/\r\n/g, "\n")
+ .replace(/\r/g, "\n");
+ connectConfig.privateKey = Buffer.from(cleanKey, "utf8");
+ if (jumpHostConfig.keyPassword) {
+ connectConfig.passphrase = jumpHostConfig.keyPassword;
+ }
+ }
+
+ if (currentClient) {
+ currentClient.forwardOut(
+ "127.0.0.1",
+ 0,
+ jumpHostConfig.ip,
+ jumpHostConfig.port || 22,
+ (err, stream) => {
+ if (err) {
+ clearTimeout(timeout);
+ resolve(false);
+ return;
+ }
+ connectConfig.sock = stream;
+ jumpClient.connect(connectConfig);
+ },
+ );
+ } else {
+ jumpClient.connect(connectConfig);
+ }
+ });
+
+ if (!connected) {
+ clients.forEach((c) => c.end());
+ return null;
+ }
+
+ currentClient = jumpClient;
+ }
+
+ return currentClient;
+ } catch (error) {
+ dockerLogger.error("Failed to create jump host chain", error, {
+ operation: "jump_host_chain",
+ });
+ clients.forEach((c) => c.end());
+ return null;
+ }
+}
+
+async function executeDockerCommand(
+ session: SSHSession,
+ command: string,
+): Promise {
+ return new Promise((resolve, reject) => {
+ session.client.exec(command, (err, stream) => {
+ if (err) {
+ dockerLogger.error("Docker command execution error", err, {
+ operation: "execute_docker_command",
+ command,
+ });
+ return reject(err);
+ }
+
+ let stdout = "";
+ let stderr = "";
+
+ stream.on("close", (code: number) => {
+ if (code !== 0) {
+ dockerLogger.error("Docker command failed", undefined, {
+ operation: "execute_docker_command",
+ command,
+ exitCode: code,
+ stderr,
+ });
+ reject(new Error(stderr || `Command exited with code ${code}`));
+ } else {
+ resolve(stdout);
+ }
+ });
+
+ stream.on("data", (data: Buffer) => {
+ stdout += data.toString();
+ });
+
+ stream.stderr.on("data", (data: Buffer) => {
+ stderr += data.toString();
+ });
+
+ stream.on("error", (streamErr: Error) => {
+ dockerLogger.error("Docker command stream error", streamErr, {
+ operation: "execute_docker_command",
+ command,
+ });
+ reject(streamErr);
+ });
+ });
+ });
+}
+
+const app = express();
+
+app.use(
+ cors({
+ origin: (origin, callback) => {
+ if (!origin) {
+ return callback(null, true);
+ }
+
+ if (origin.startsWith("https://")) {
+ return callback(null, true);
+ }
+
+ if (origin.startsWith("http://")) {
+ return callback(null, true);
+ }
+
+ const allowedOrigins = [
+ "http://localhost:5173",
+ "http://localhost:3000",
+ "http://127.0.0.1:5173",
+ "http://127.0.0.1:3000",
+ ];
+
+ if (allowedOrigins.includes(origin)) {
+ return callback(null, true);
+ }
+
+ return callback(new Error("Not allowed by CORS"));
+ },
+ credentials: true,
+ methods: ["GET", "POST", "PUT", "DELETE", "OPTIONS"],
+ allowedHeaders: [
+ "Content-Type",
+ "Authorization",
+ "User-Agent",
+ "X-Electron-App",
+ ],
+ }),
+);
+
+app.use(cookieParser());
+app.use(express.json({ limit: "100mb" }));
+app.use(express.urlencoded({ limit: "100mb", extended: true }));
+
+const authManager = AuthManager.getInstance();
+app.use(authManager.createAuthMiddleware());
+
+// POST /docker/ssh/connect - Establish SSH session
+app.post("/docker/ssh/connect", async (req, res) => {
+ const {
+ sessionId,
+ hostId,
+ userProvidedPassword,
+ userProvidedSshKey,
+ userProvidedKeyPassword,
+ forceKeyboardInteractive,
+ useSocks5,
+ socks5Host,
+ socks5Port,
+ socks5Username,
+ socks5Password,
+ socks5ProxyChain,
+ } = req.body;
+ const userId = (req as any).userId;
+
+ if (!userId) {
+ dockerLogger.error(
+ "Docker SSH connection rejected: no authenticated user",
+ {
+ operation: "docker_connect_auth",
+ sessionId,
+ },
+ );
+ return res.status(401).json({ error: "Authentication required" });
+ }
+
+ if (!SimpleDBOps.isUserDataUnlocked(userId)) {
+ return res.status(401).json({
+ error: "Session expired - please log in again",
+ code: "SESSION_EXPIRED",
+ });
+ }
+
+ if (!sessionId || !hostId) {
+ dockerLogger.warn("Missing Docker SSH connection parameters", {
+ operation: "docker_connect",
+ sessionId,
+ hasHostId: !!hostId,
+ });
+ return res.status(400).json({ error: "Missing sessionId or hostId" });
+ }
+
+ try {
+ const hosts = await SimpleDBOps.select(
+ getDb().select().from(sshData).where(eq(sshData.id, hostId)),
+ "ssh_data",
+ userId,
+ );
+
+ if (hosts.length === 0) {
+ return res.status(404).json({ error: "Host not found" });
+ }
+
+ const host = hosts[0] as unknown as SSHHost;
+
+ if (host.userId !== userId) {
+ const { PermissionManager } =
+ await import("../utils/permission-manager.js");
+ const permissionManager = PermissionManager.getInstance();
+ const accessInfo = await permissionManager.canAccessHost(
+ userId,
+ hostId,
+ "execute",
+ );
+
+ if (!accessInfo.hasAccess) {
+ dockerLogger.warn("User does not have access to host", {
+ operation: "docker_connect",
+ hostId,
+ userId,
+ });
+ return res.status(403).json({ error: "Access denied" });
+ }
+ }
+ if (typeof host.jumpHosts === "string" && host.jumpHosts) {
+ try {
+ host.jumpHosts = JSON.parse(host.jumpHosts);
+ } catch (e) {
+ dockerLogger.error("Failed to parse jump hosts", e, {
+ hostId: host.id,
+ });
+ host.jumpHosts = [];
+ }
+ }
+
+ if (!host.enableDocker) {
+ dockerLogger.warn("Docker not enabled for host", {
+ operation: "docker_connect",
+ hostId,
+ userId,
+ });
+ return res.status(403).json({
+ error:
+ "Docker is not enabled for this host. Enable it in Host Settings.",
+ code: "DOCKER_DISABLED",
+ });
+ }
+
+ if (sshSessions[sessionId]) {
+ cleanupSession(sessionId);
+ }
+
+ if (pendingTOTPSessions[sessionId]) {
+ try {
+ pendingTOTPSessions[sessionId].client.end();
+ } catch {}
+ delete pendingTOTPSessions[sessionId];
+ }
+
+ let resolvedCredentials: any = {
+ password: host.password,
+ sshKey: host.key,
+ keyPassword: host.keyPassword,
+ authType: host.authType,
+ };
+
+ if (userProvidedPassword) {
+ resolvedCredentials.password = userProvidedPassword;
+ }
+ if (userProvidedSshKey) {
+ resolvedCredentials.sshKey = userProvidedSshKey;
+ resolvedCredentials.authType = "key";
+ }
+ if (userProvidedKeyPassword) {
+ resolvedCredentials.keyPassword = userProvidedKeyPassword;
+ }
+
+ if (host.credentialId) {
+ const ownerId = host.userId;
+
+ if (userId !== ownerId) {
+ try {
+ const { SharedCredentialManager } =
+ await import("../utils/shared-credential-manager.js");
+ const sharedCredManager = SharedCredentialManager.getInstance();
+ const sharedCred = await sharedCredManager.getSharedCredentialForUser(
+ host.id,
+ userId,
+ );
+
+ if (sharedCred) {
+ resolvedCredentials = {
+ password: sharedCred.password,
+ sshKey: sharedCred.key,
+ keyPassword: sharedCred.keyPassword,
+ authType: sharedCred.authType,
+ };
+ }
+ } catch (error) {
+ dockerLogger.error("Failed to resolve shared credential", error, {
+ operation: "docker_connect",
+ hostId,
+ userId,
+ });
+ }
+ } else {
+ const credentials = await SimpleDBOps.select(
+ getDb()
+ .select()
+ .from(sshCredentials)
+ .where(
+ and(
+ eq(sshCredentials.id, host.credentialId as number),
+ eq(sshCredentials.userId, userId),
+ ),
+ ),
+ "ssh_credentials",
+ userId,
+ );
+
+ if (credentials.length > 0) {
+ const credential = credentials[0];
+ resolvedCredentials = {
+ password: credential.password,
+ sshKey:
+ credential.private_key || credential.privateKey || credential.key,
+ keyPassword: credential.key_password || credential.keyPassword,
+ authType: credential.auth_type || credential.authType,
+ };
+ }
+ }
+ }
+
+ const client = new SSHClient();
+
+ const config: any = {
+ host: host.ip,
+ port: host.port || 22,
+ username: host.username,
+ tryKeyboard: true,
+ keepaliveInterval: 30000,
+ keepaliveCountMax: 3,
+ readyTimeout: 60000,
+ tcpKeepAlive: true,
+ tcpKeepAliveInitialDelay: 30000,
+ };
+
+ if (resolvedCredentials.authType === "none") {
+ } else if (resolvedCredentials.authType === "password") {
+ if (resolvedCredentials.password) {
+ config.password = resolvedCredentials.password;
+ }
+ } else if (
+ resolvedCredentials.authType === "key" &&
+ resolvedCredentials.sshKey
+ ) {
+ try {
+ if (
+ !resolvedCredentials.sshKey.includes("-----BEGIN") ||
+ !resolvedCredentials.sshKey.includes("-----END")
+ ) {
+ dockerLogger.error("Invalid SSH key format", {
+ operation: "docker_connect",
+ sessionId,
+ hostId,
+ });
+ return res.status(400).json({
+ error: "Invalid private key format",
+ });
+ }
+
+ const cleanKey = resolvedCredentials.sshKey
+ .trim()
+ .replace(/\r\n/g, "\n")
+ .replace(/\r/g, "\n");
+ config.privateKey = Buffer.from(cleanKey, "utf8");
+ if (resolvedCredentials.keyPassword) {
+ config.passphrase = resolvedCredentials.keyPassword;
+ }
+ } catch (error) {
+ dockerLogger.error("SSH key processing error", error, {
+ operation: "docker_connect",
+ sessionId,
+ hostId,
+ });
+ return res.status(400).json({
+ error: "SSH key format error: Invalid private key format",
+ });
+ }
+ } else if (resolvedCredentials.authType === "key") {
+ dockerLogger.error(
+ "SSH key authentication requested but no key provided",
+ {
+ operation: "docker_connect",
+ sessionId,
+ hostId,
+ },
+ );
+ return res.status(400).json({
+ error: "SSH key authentication requested but no key provided",
+ });
+ }
+
+ let responseSent = false;
+ let keyboardInteractiveResponded = false;
+
+ client.on("ready", () => {
+ if (responseSent) return;
+ responseSent = true;
+
+ sshSessions[sessionId] = {
+ client,
+ isConnected: true,
+ lastActive: Date.now(),
+ activeOperations: 0,
+ hostId,
+ };
+
+ scheduleSessionCleanup(sessionId);
+
+ res.json({ success: true, message: "SSH connection established" });
+ });
+
+ client.on("error", (err) => {
+ if (responseSent) return;
+ responseSent = true;
+
+ dockerLogger.error("Docker SSH connection failed", err, {
+ operation: "docker_connect",
+ sessionId,
+ hostId,
+ userId,
+ });
+
+ if (
+ resolvedCredentials.authType === "none" &&
+ (err.message.includes("authentication") ||
+ err.message.includes("All configured authentication methods failed"))
+ ) {
+ res.json({
+ status: "auth_required",
+ reason: "no_keyboard",
+ });
+ } else {
+ res.status(500).json({
+ success: false,
+ message: err.message || "SSH connection failed",
+ });
+ }
+ });
+
+ client.on("close", () => {
+ if (sshSessions[sessionId]) {
+ sshSessions[sessionId].isConnected = false;
+ cleanupSession(sessionId);
+ }
+ });
+
+ client.on(
+ "keyboard-interactive",
+ (
+ name: string,
+ instructions: string,
+ instructionsLang: string,
+ prompts: Array<{ prompt: string; echo: boolean }>,
+ finish: (responses: string[]) => void,
+ ) => {
+ const totpPromptIndex = prompts.findIndex((p) =>
+ /verification code|verification_code|token|otp|2fa|authenticator|google.*auth/i.test(
+ p.prompt,
+ ),
+ );
+
+ if (totpPromptIndex !== -1) {
+ if (responseSent) {
+ const responses = prompts.map((p) => {
+ if (/password/i.test(p.prompt) && resolvedCredentials.password) {
+ return resolvedCredentials.password;
+ }
+ return "";
+ });
+ finish(responses);
+ return;
+ }
+ responseSent = true;
+
+ if (pendingTOTPSessions[sessionId]) {
+ const responses = prompts.map((p) => {
+ if (/password/i.test(p.prompt) && resolvedCredentials.password) {
+ return resolvedCredentials.password;
+ }
+ return "";
+ });
+ finish(responses);
+ return;
+ }
+
+ keyboardInteractiveResponded = true;
+
+ pendingTOTPSessions[sessionId] = {
+ client,
+ finish,
+ config,
+ createdAt: Date.now(),
+ sessionId,
+ hostId,
+ ip: host.ip,
+ port: host.port || 22,
+ username: host.username,
+ userId,
+ prompts,
+ totpPromptIndex,
+ resolvedPassword: resolvedCredentials.password,
+ totpAttempts: 0,
+ };
+
+ res.json({
+ requires_totp: true,
+ sessionId,
+ prompt: prompts[totpPromptIndex].prompt,
+ });
+ } else {
+ const passwordPromptIndex = prompts.findIndex((p) =>
+ /password/i.test(p.prompt),
+ );
+
+ if (
+ resolvedCredentials.authType === "none" &&
+ passwordPromptIndex !== -1
+ ) {
+ if (responseSent) return;
+ responseSent = true;
+ client.end();
+ res.json({
+ status: "auth_required",
+ reason: "no_keyboard",
+ });
+ return;
+ }
+
+ const hasStoredPassword =
+ resolvedCredentials.password &&
+ resolvedCredentials.authType !== "none";
+
+ if (!hasStoredPassword && passwordPromptIndex !== -1) {
+ if (responseSent) {
+ const responses = prompts.map((p) => {
+ if (
+ /password/i.test(p.prompt) &&
+ resolvedCredentials.password
+ ) {
+ return resolvedCredentials.password;
+ }
+ return "";
+ });
+ finish(responses);
+ return;
+ }
+ responseSent = true;
+
+ if (pendingTOTPSessions[sessionId]) {
+ const responses = prompts.map((p) => {
+ if (
+ /password/i.test(p.prompt) &&
+ resolvedCredentials.password
+ ) {
+ return resolvedCredentials.password;
+ }
+ return "";
+ });
+ finish(responses);
+ return;
+ }
+
+ keyboardInteractiveResponded = true;
+
+ pendingTOTPSessions[sessionId] = {
+ client,
+ finish,
+ config,
+ createdAt: Date.now(),
+ sessionId,
+ hostId,
+ ip: host.ip,
+ port: host.port || 22,
+ username: host.username,
+ userId,
+ prompts,
+ totpPromptIndex: passwordPromptIndex,
+ resolvedPassword: resolvedCredentials.password,
+ totpAttempts: 0,
+ };
+
+ res.json({
+ requires_totp: true,
+ sessionId,
+ prompt: prompts[passwordPromptIndex].prompt,
+ isPassword: true,
+ });
+ return;
+ }
+
+ const responses = prompts.map((p) => {
+ if (/password/i.test(p.prompt) && resolvedCredentials.password) {
+ return resolvedCredentials.password;
+ }
+ return "";
+ });
+ finish(responses);
+ }
+ },
+ );
+
+ if (
+ useSocks5 &&
+ (socks5Host || (socks5ProxyChain && (socks5ProxyChain as any).length > 0))
+ ) {
+ try {
+ const socks5Socket = await createSocks5Connection(
+ host.ip,
+ host.port || 22,
+ {
+ useSocks5,
+ socks5Host,
+ socks5Port,
+ socks5Username,
+ socks5Password,
+ socks5ProxyChain: socks5ProxyChain as any,
+ },
+ );
+
+ if (socks5Socket) {
+ config.sock = socks5Socket;
+ client.connect(config);
+ return;
+ }
+ } catch (socks5Error) {
+ dockerLogger.error("SOCKS5 connection failed", socks5Error, {
+ operation: "docker_socks5_connect",
+ sessionId,
+ hostId,
+ proxyHost: socks5Host,
+ proxyPort: socks5Port || 1080,
+ });
+ if (!responseSent) {
+ responseSent = true;
+ return res.status(500).json({
+ error:
+ "SOCKS5 proxy connection failed: " +
+ (socks5Error instanceof Error
+ ? socks5Error.message
+ : "Unknown error"),
+ });
+ }
+ return;
+ }
+ } else if (host.jumpHosts && host.jumpHosts.length > 0) {
+ const jumpClient = await createJumpHostChain(
+ host.jumpHosts as Array<{ hostId: number }>,
+ userId,
+ );
+
+ if (!jumpClient) {
+ return res.status(500).json({
+ error: "Failed to establish jump host chain",
+ });
+ }
+
+ jumpClient.forwardOut(
+ "127.0.0.1",
+ 0,
+ host.ip,
+ host.port || 22,
+ (err, stream) => {
+ if (err) {
+ dockerLogger.error("Failed to forward through jump host", err, {
+ operation: "docker_jump_forward",
+ sessionId,
+ hostId,
+ });
+ jumpClient.end();
+ if (!responseSent) {
+ responseSent = true;
+ return res.status(500).json({
+ error: "Failed to forward through jump host: " + err.message,
+ });
+ }
+ return;
+ }
+
+ config.sock = stream;
+ client.connect(config);
+ },
+ );
+ } else {
+ client.connect(config);
+ }
+ } catch (error) {
+ dockerLogger.error("Docker SSH connection error", error, {
+ operation: "docker_connect",
+ sessionId,
+ hostId,
+ userId,
+ });
+ res.status(500).json({
+ success: false,
+ message: error instanceof Error ? error.message : "Unknown error",
+ });
+ }
+});
+
+// POST /docker/ssh/disconnect - Close SSH session
+app.post("/docker/ssh/disconnect", async (req, res) => {
+ const { sessionId } = req.body;
+
+ if (!sessionId) {
+ return res.status(400).json({ error: "Session ID is required" });
+ }
+
+ cleanupSession(sessionId);
+
+ res.json({ success: true, message: "SSH session disconnected" });
+});
+
+// POST /docker/ssh/connect-totp - Verify TOTP and complete connection
+app.post("/docker/ssh/connect-totp", async (req, res) => {
+ const { sessionId, totpCode } = req.body;
+ const userId = (req as any).userId;
+
+ if (!userId) {
+ dockerLogger.error("TOTP verification rejected: no authenticated user", {
+ operation: "docker_totp_auth",
+ sessionId,
+ });
+ return res.status(401).json({ error: "Authentication required" });
+ }
+
+ if (!sessionId || !totpCode) {
+ return res.status(400).json({ error: "Session ID and TOTP code required" });
+ }
+
+ const session = pendingTOTPSessions[sessionId];
+
+ if (!session) {
+ dockerLogger.warn("TOTP session not found or expired", {
+ operation: "docker_totp_verify",
+ sessionId,
+ userId,
+ availableSessions: Object.keys(pendingTOTPSessions),
+ });
+ return res
+ .status(404)
+ .json({ error: "TOTP session expired. Please reconnect." });
+ }
+
+ if (Date.now() - session.createdAt > 180000) {
+ delete pendingTOTPSessions[sessionId];
+ try {
+ session.client.end();
+ } catch {}
+ dockerLogger.warn("TOTP session timeout before code submission", {
+ operation: "docker_totp_verify",
+ sessionId,
+ userId,
+ age: Date.now() - session.createdAt,
+ });
+ return res
+ .status(408)
+ .json({ error: "TOTP session timeout. Please reconnect." });
+ }
+
+ const responses = (session.prompts || []).map((p, index) => {
+ if (index === session.totpPromptIndex) {
+ return totpCode;
+ }
+ if (/password/i.test(p.prompt) && session.resolvedPassword) {
+ return session.resolvedPassword;
+ }
+ return "";
+ });
+
+ let responseSent = false;
+ let responseTimeout: NodeJS.Timeout;
+
+ session.client.once("ready", () => {
+ if (responseSent) return;
+ responseSent = true;
+ clearTimeout(responseTimeout);
+
+ delete pendingTOTPSessions[sessionId];
+
+ setTimeout(() => {
+ sshSessions[sessionId] = {
+ client: session.client,
+ isConnected: true,
+ lastActive: Date.now(),
+ activeOperations: 0,
+ hostId: session.hostId,
+ };
+ scheduleSessionCleanup(sessionId);
+
+ res.json({
+ status: "success",
+ message: "TOTP verified, SSH connection established",
+ });
+
+ if (session.hostId && session.userId) {
+ (async () => {
+ try {
+ const hosts = await SimpleDBOps.select(
+ getDb()
+ .select()
+ .from(sshData)
+ .where(
+ and(
+ eq(sshData.id, session.hostId!),
+ eq(sshData.userId, session.userId!),
+ ),
+ ),
+ "ssh_data",
+ session.userId!,
+ );
+
+ const hostName =
+ hosts.length > 0 && hosts[0].name
+ ? hosts[0].name
+ : `${session.username}@${session.ip}:${session.port}`;
+
+ await axios.post(
+ "http://localhost:30006/activity/log",
+ {
+ type: "docker",
+ hostId: session.hostId,
+ hostName,
+ },
+ {
+ headers: {
+ Authorization: `Bearer ${await authManager.generateJWTToken(session.userId!)}`,
+ },
+ },
+ );
+ } catch (error) {
+ dockerLogger.warn("Failed to log Docker activity (TOTP)", {
+ operation: "activity_log_error",
+ userId: session.userId,
+ hostId: session.hostId,
+ error: error instanceof Error ? error.message : "Unknown error",
+ });
+ }
+ })();
+ }
+ }, 200);
+ });
+
+ session.client.once("error", (err) => {
+ if (responseSent) return;
+ responseSent = true;
+ clearTimeout(responseTimeout);
+
+ delete pendingTOTPSessions[sessionId];
+
+ dockerLogger.error("TOTP verification failed", {
+ operation: "docker_totp_verify",
+ sessionId,
+ userId,
+ error: err.message,
+ });
+
+ res.status(401).json({ status: "error", message: "Invalid TOTP code" });
+ });
+
+ responseTimeout = setTimeout(() => {
+ if (!responseSent) {
+ responseSent = true;
+ delete pendingTOTPSessions[sessionId];
+ dockerLogger.warn("TOTP verification timeout", {
+ operation: "docker_totp_verify",
+ sessionId,
+ userId,
+ });
+ res.status(408).json({ error: "TOTP verification timeout" });
+ }
+ }, 60000);
+
+ session.finish(responses);
+});
+
+// POST /docker/ssh/keepalive - Keep session alive
+app.post("/docker/ssh/keepalive", async (req, res) => {
+ const { sessionId } = req.body;
+
+ if (!sessionId) {
+ return res.status(400).json({ error: "Session ID is required" });
+ }
+
+ const session = sshSessions[sessionId];
+
+ if (!session || !session.isConnected) {
+ return res.status(400).json({
+ error: "SSH session not found or not connected",
+ connected: false,
+ });
+ }
+
+ session.lastActive = Date.now();
+ scheduleSessionCleanup(sessionId);
+
+ res.json({
+ success: true,
+ connected: true,
+ message: "Session keepalive successful",
+ lastActive: session.lastActive,
+ });
+});
+
+// GET /docker/ssh/status - Check session status
+app.get("/docker/ssh/status", async (req, res) => {
+ const sessionId = req.query.sessionId as string;
+
+ if (!sessionId) {
+ return res.status(400).json({ error: "Session ID is required" });
+ }
+
+ const isConnected = !!sshSessions[sessionId]?.isConnected;
+
+ res.json({ success: true, connected: isConnected });
+});
+
+// GET /docker/validate/:sessionId - Validate Docker availability
+app.get("/docker/validate/:sessionId", async (req, res) => {
+ const { sessionId } = req.params;
+ const userId = (req as any).userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Authentication required" });
+ }
+
+ const session = sshSessions[sessionId];
+
+ if (!session || !session.isConnected) {
+ return res.status(400).json({
+ error: "SSH session not found or not connected",
+ });
+ }
+
+ session.lastActive = Date.now();
+ session.activeOperations++;
+
+ try {
+ try {
+ const versionOutput = await executeDockerCommand(
+ session,
+ "docker --version",
+ );
+ const versionMatch = versionOutput.match(/Docker version ([^\s,]+)/);
+ const version = versionMatch ? versionMatch[1] : "unknown";
+
+ try {
+ await executeDockerCommand(session, "docker ps >/dev/null 2>&1");
+
+ session.activeOperations--;
+ return res.json({
+ available: true,
+ version,
+ });
+ } catch (daemonError) {
+ session.activeOperations--;
+ const errorMsg =
+ daemonError instanceof Error ? daemonError.message : "";
+
+ if (errorMsg.includes("Cannot connect to the Docker daemon")) {
+ return res.json({
+ available: false,
+ error:
+ "Docker daemon is not running. Start it with: sudo systemctl start docker",
+ code: "DAEMON_NOT_RUNNING",
+ });
+ }
+
+ if (errorMsg.includes("permission denied")) {
+ return res.json({
+ available: false,
+ error:
+ "Permission denied. Add your user to the docker group: sudo usermod -aG docker $USER",
+ code: "PERMISSION_DENIED",
+ });
+ }
+
+ return res.json({
+ available: false,
+ error: errorMsg,
+ code: "DOCKER_ERROR",
+ });
+ }
+ } catch (installError) {
+ session.activeOperations--;
+ return res.json({
+ available: false,
+ error:
+ "Docker is not installed on this host. Please install Docker to use this feature.",
+ code: "NOT_INSTALLED",
+ });
+ }
+ } catch (error) {
+ session.activeOperations--;
+ dockerLogger.error("Docker validation error", error, {
+ operation: "docker_validate",
+ sessionId,
+ userId,
+ });
+
+ res.status(500).json({
+ available: false,
+ error: error instanceof Error ? error.message : "Validation failed",
+ });
+ }
+});
+
+// GET /docker/containers/:sessionId - List all containers
+app.get("/docker/containers/:sessionId", async (req, res) => {
+ const { sessionId } = req.params;
+ const all = req.query.all !== "false";
+ const userId = (req as any).userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Authentication required" });
+ }
+
+ const session = sshSessions[sessionId];
+
+ if (!session || !session.isConnected) {
+ return res.status(400).json({
+ error: "SSH session not found or not connected",
+ });
+ }
+
+ session.lastActive = Date.now();
+ session.activeOperations++;
+
+ try {
+ const allFlag = all ? "-a " : "";
+ const command = `docker ps ${allFlag}--format '{"id":"{{.ID}}","name":"{{.Names}}","image":"{{.Image}}","status":"{{.Status}}","state":"{{.State}}","ports":"{{.Ports}}","created":"{{.CreatedAt}}"}'`;
+
+ const output = await executeDockerCommand(session, command);
+
+ const containers = output
+ .split("\n")
+ .filter((line) => line.trim())
+ .map((line) => {
+ try {
+ return JSON.parse(line);
+ } catch (e) {
+ dockerLogger.warn("Failed to parse container line", {
+ operation: "parse_container",
+ line,
+ });
+ return null;
+ }
+ })
+ .filter((c) => c !== null);
+
+ session.activeOperations--;
+
+ res.json(containers);
+ } catch (error) {
+ session.activeOperations--;
+ dockerLogger.error("Failed to list Docker containers", error, {
+ operation: "list_containers",
+ sessionId,
+ userId,
+ });
+
+ res.status(500).json({
+ error:
+ error instanceof Error ? error.message : "Failed to list containers",
+ });
+ }
+});
+
+// GET /docker/containers/:sessionId/:containerId - Get container details
+app.get("/docker/containers/:sessionId/:containerId", async (req, res) => {
+ const { sessionId, containerId } = req.params;
+ const userId = (req as any).userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Authentication required" });
+ }
+
+ const session = sshSessions[sessionId];
+
+ if (!session || !session.isConnected) {
+ return res.status(400).json({
+ error: "SSH session not found or not connected",
+ });
+ }
+
+ session.lastActive = Date.now();
+ session.activeOperations++;
+
+ try {
+ const command = `docker inspect ${containerId}`;
+ const output = await executeDockerCommand(session, command);
+ const details = JSON.parse(output);
+
+ session.activeOperations--;
+
+ if (details && details.length > 0) {
+ res.json(details[0]);
+ } else {
+ res.status(404).json({
+ error: "Container not found",
+ code: "CONTAINER_NOT_FOUND",
+ });
+ }
+ } catch (error) {
+ session.activeOperations--;
+
+ const errorMsg = error instanceof Error ? error.message : "";
+ if (errorMsg.includes("No such container")) {
+ return res.status(404).json({
+ error: "Container not found",
+ code: "CONTAINER_NOT_FOUND",
+ });
+ }
+
+ dockerLogger.error("Failed to get container details", error, {
+ operation: "get_container_details",
+ sessionId,
+ containerId,
+ userId,
+ });
+
+ res.status(500).json({
+ error: errorMsg || "Failed to get container details",
+ });
+ }
+});
+
+// POST /docker/containers/:sessionId/:containerId/start - Start container
+app.post(
+ "/docker/containers/:sessionId/:containerId/start",
+ async (req, res) => {
+ const { sessionId, containerId } = req.params;
+ const userId = (req as any).userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Authentication required" });
+ }
+
+ const session = sshSessions[sessionId];
+
+ if (!session || !session.isConnected) {
+ return res.status(400).json({
+ error: "SSH session not found or not connected",
+ });
+ }
+
+ session.lastActive = Date.now();
+ session.activeOperations++;
+
+ try {
+ await executeDockerCommand(session, `docker start ${containerId}`);
+
+ session.activeOperations--;
+
+ res.json({
+ success: true,
+ message: "Container started successfully",
+ });
+ } catch (error) {
+ session.activeOperations--;
+
+ const errorMsg = error instanceof Error ? error.message : "";
+ if (errorMsg.includes("No such container")) {
+ return res.status(404).json({
+ success: false,
+ error: "Container not found",
+ code: "CONTAINER_NOT_FOUND",
+ });
+ }
+
+ dockerLogger.error("Failed to start container", error, {
+ operation: "start_container",
+ sessionId,
+ containerId,
+ userId,
+ });
+
+ res.status(500).json({
+ success: false,
+ error: errorMsg || "Failed to start container",
+ });
+ }
+ },
+);
+
+// POST /docker/containers/:sessionId/:containerId/stop - Stop container
+app.post(
+ "/docker/containers/:sessionId/:containerId/stop",
+ async (req, res) => {
+ const { sessionId, containerId } = req.params;
+ const userId = (req as any).userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Authentication required" });
+ }
+
+ const session = sshSessions[sessionId];
+
+ if (!session || !session.isConnected) {
+ return res.status(400).json({
+ error: "SSH session not found or not connected",
+ });
+ }
+
+ session.lastActive = Date.now();
+ session.activeOperations++;
+
+ try {
+ await executeDockerCommand(session, `docker stop ${containerId}`);
+
+ session.activeOperations--;
+
+ res.json({
+ success: true,
+ message: "Container stopped successfully",
+ });
+ } catch (error) {
+ session.activeOperations--;
+
+ const errorMsg = error instanceof Error ? error.message : "";
+ if (errorMsg.includes("No such container")) {
+ return res.status(404).json({
+ success: false,
+ error: "Container not found",
+ code: "CONTAINER_NOT_FOUND",
+ });
+ }
+
+ dockerLogger.error("Failed to stop container", error, {
+ operation: "stop_container",
+ sessionId,
+ containerId,
+ userId,
+ });
+
+ res.status(500).json({
+ success: false,
+ error: errorMsg || "Failed to stop container",
+ });
+ }
+ },
+);
+
+// POST /docker/containers/:sessionId/:containerId/restart - Restart container
+app.post(
+ "/docker/containers/:sessionId/:containerId/restart",
+ async (req, res) => {
+ const { sessionId, containerId } = req.params;
+ const userId = (req as any).userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Authentication required" });
+ }
+
+ const session = sshSessions[sessionId];
+
+ if (!session || !session.isConnected) {
+ return res.status(400).json({
+ error: "SSH session not found or not connected",
+ });
+ }
+
+ session.lastActive = Date.now();
+ session.activeOperations++;
+
+ try {
+ await executeDockerCommand(session, `docker restart ${containerId}`);
+
+ session.activeOperations--;
+
+ res.json({
+ success: true,
+ message: "Container restarted successfully",
+ });
+ } catch (error) {
+ session.activeOperations--;
+
+ const errorMsg = error instanceof Error ? error.message : "";
+ if (errorMsg.includes("No such container")) {
+ return res.status(404).json({
+ success: false,
+ error: "Container not found",
+ code: "CONTAINER_NOT_FOUND",
+ });
+ }
+
+ dockerLogger.error("Failed to restart container", error, {
+ operation: "restart_container",
+ sessionId,
+ containerId,
+ userId,
+ });
+
+ res.status(500).json({
+ success: false,
+ error: errorMsg || "Failed to restart container",
+ });
+ }
+ },
+);
+
+// POST /docker/containers/:sessionId/:containerId/pause - Pause container
+app.post(
+ "/docker/containers/:sessionId/:containerId/pause",
+ async (req, res) => {
+ const { sessionId, containerId } = req.params;
+ const userId = (req as any).userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Authentication required" });
+ }
+
+ const session = sshSessions[sessionId];
+
+ if (!session || !session.isConnected) {
+ return res.status(400).json({
+ error: "SSH session not found or not connected",
+ });
+ }
+
+ session.lastActive = Date.now();
+ session.activeOperations++;
+
+ try {
+ await executeDockerCommand(session, `docker pause ${containerId}`);
+
+ session.activeOperations--;
+
+ res.json({
+ success: true,
+ message: "Container paused successfully",
+ });
+ } catch (error) {
+ session.activeOperations--;
+
+ const errorMsg = error instanceof Error ? error.message : "";
+ if (errorMsg.includes("No such container")) {
+ return res.status(404).json({
+ success: false,
+ error: "Container not found",
+ code: "CONTAINER_NOT_FOUND",
+ });
+ }
+
+ dockerLogger.error("Failed to pause container", error, {
+ operation: "pause_container",
+ sessionId,
+ containerId,
+ userId,
+ });
+
+ res.status(500).json({
+ success: false,
+ error: errorMsg || "Failed to pause container",
+ });
+ }
+ },
+);
+
+// POST /docker/containers/:sessionId/:containerId/unpause - Unpause container
+app.post(
+ "/docker/containers/:sessionId/:containerId/unpause",
+ async (req, res) => {
+ const { sessionId, containerId } = req.params;
+ const userId = (req as any).userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Authentication required" });
+ }
+
+ const session = sshSessions[sessionId];
+
+ if (!session || !session.isConnected) {
+ return res.status(400).json({
+ error: "SSH session not found or not connected",
+ });
+ }
+
+ session.lastActive = Date.now();
+ session.activeOperations++;
+
+ try {
+ await executeDockerCommand(session, `docker unpause ${containerId}`);
+
+ session.activeOperations--;
+
+ res.json({
+ success: true,
+ message: "Container unpaused successfully",
+ });
+ } catch (error) {
+ session.activeOperations--;
+
+ const errorMsg = error instanceof Error ? error.message : "";
+ if (errorMsg.includes("No such container")) {
+ return res.status(404).json({
+ success: false,
+ error: "Container not found",
+ code: "CONTAINER_NOT_FOUND",
+ });
+ }
+
+ dockerLogger.error("Failed to unpause container", error, {
+ operation: "unpause_container",
+ sessionId,
+ containerId,
+ userId,
+ });
+
+ res.status(500).json({
+ success: false,
+ error: errorMsg || "Failed to unpause container",
+ });
+ }
+ },
+);
+
+// DELETE /docker/containers/:sessionId/:containerId/remove - Remove container
+app.delete(
+ "/docker/containers/:sessionId/:containerId/remove",
+ async (req, res) => {
+ const { sessionId, containerId } = req.params;
+ const force = req.query.force === "true";
+ const userId = (req as any).userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Authentication required" });
+ }
+
+ const session = sshSessions[sessionId];
+
+ if (!session || !session.isConnected) {
+ return res.status(400).json({
+ error: "SSH session not found or not connected",
+ });
+ }
+
+ session.lastActive = Date.now();
+ session.activeOperations++;
+
+ try {
+ const forceFlag = force ? "-f " : "";
+ await executeDockerCommand(
+ session,
+ `docker rm ${forceFlag}${containerId}`,
+ );
+
+ session.activeOperations--;
+
+ res.json({
+ success: true,
+ message: "Container removed successfully",
+ });
+ } catch (error) {
+ session.activeOperations--;
+
+ const errorMsg = error instanceof Error ? error.message : "";
+ if (errorMsg.includes("No such container")) {
+ return res.status(404).json({
+ success: false,
+ error: "Container not found",
+ code: "CONTAINER_NOT_FOUND",
+ });
+ }
+
+ if (errorMsg.includes("cannot remove a running container")) {
+ return res.status(400).json({
+ success: false,
+ error:
+ "Cannot remove a running container. Stop it first or use force.",
+ code: "CONTAINER_RUNNING",
+ });
+ }
+
+ dockerLogger.error("Failed to remove container", error, {
+ operation: "remove_container",
+ sessionId,
+ containerId,
+ userId,
+ });
+
+ res.status(500).json({
+ success: false,
+ error: errorMsg || "Failed to remove container",
+ });
+ }
+ },
+);
+
+// GET /docker/containers/:sessionId/:containerId/logs - Get container logs
+app.get("/docker/containers/:sessionId/:containerId/logs", async (req, res) => {
+ const { sessionId, containerId } = req.params;
+ const tail = req.query.tail ? parseInt(req.query.tail as string) : 100;
+ const timestamps = req.query.timestamps === "true";
+ const since = req.query.since as string;
+ const until = req.query.until as string;
+ const userId = (req as any).userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Authentication required" });
+ }
+
+ const session = sshSessions[sessionId];
+
+ if (!session || !session.isConnected) {
+ return res.status(400).json({
+ error: "SSH session not found or not connected",
+ });
+ }
+
+ session.lastActive = Date.now();
+ session.activeOperations++;
+
+ try {
+ let command = `docker logs ${containerId}`;
+
+ if (tail && tail > 0) {
+ command += ` --tail ${tail}`;
+ }
+
+ if (timestamps) {
+ command += " --timestamps";
+ }
+
+ if (since) {
+ command += ` --since ${since}`;
+ }
+
+ if (until) {
+ command += ` --until ${until}`;
+ }
+
+ const logs = await executeDockerCommand(session, command);
+
+ session.activeOperations--;
+
+ res.json({
+ success: true,
+ logs,
+ });
+ } catch (error) {
+ session.activeOperations--;
+
+ const errorMsg = error instanceof Error ? error.message : "";
+ if (errorMsg.includes("No such container")) {
+ return res.status(404).json({
+ success: false,
+ error: "Container not found",
+ code: "CONTAINER_NOT_FOUND",
+ });
+ }
+
+ dockerLogger.error("Failed to get container logs", error, {
+ operation: "get_logs",
+ sessionId,
+ containerId,
+ userId,
+ });
+
+ res.status(500).json({
+ success: false,
+ error: errorMsg || "Failed to get container logs",
+ });
+ }
+});
+
+// GET /docker/containers/:sessionId/:containerId/stats - Get container stats
+app.get(
+ "/docker/containers/:sessionId/:containerId/stats",
+ async (req, res) => {
+ const { sessionId, containerId } = req.params;
+ const userId = (req as any).userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Authentication required" });
+ }
+
+ const session = sshSessions[sessionId];
+
+ if (!session || !session.isConnected) {
+ return res.status(400).json({
+ error: "SSH session not found or not connected",
+ });
+ }
+
+ session.lastActive = Date.now();
+ session.activeOperations++;
+
+ try {
+ const command = `docker stats ${containerId} --no-stream --format '{"cpu":"{{.CPUPerc}}","memory":"{{.MemUsage}}","memoryPercent":"{{.MemPerc}}","netIO":"{{.NetIO}}","blockIO":"{{.BlockIO}}","pids":"{{.PIDs}}"}'`;
+
+ const output = await executeDockerCommand(session, command);
+ const rawStats = JSON.parse(output.trim());
+
+ const memoryParts = rawStats.memory.split(" / ");
+ const memoryUsed = memoryParts[0]?.trim() || "0B";
+ const memoryLimit = memoryParts[1]?.trim() || "0B";
+
+ const netIOParts = rawStats.netIO.split(" / ");
+ const netInput = netIOParts[0]?.trim() || "0B";
+ const netOutput = netIOParts[1]?.trim() || "0B";
+
+ const blockIOParts = rawStats.blockIO.split(" / ");
+ const blockRead = blockIOParts[0]?.trim() || "0B";
+ const blockWrite = blockIOParts[1]?.trim() || "0B";
+
+ const stats = {
+ cpu: rawStats.cpu,
+ memoryUsed,
+ memoryLimit,
+ memoryPercent: rawStats.memoryPercent,
+ netInput,
+ netOutput,
+ blockRead,
+ blockWrite,
+ pids: rawStats.pids,
+ };
+
+ session.activeOperations--;
+
+ res.json(stats);
+ } catch (error) {
+ session.activeOperations--;
+
+ const errorMsg = error instanceof Error ? error.message : "";
+ if (errorMsg.includes("No such container")) {
+ return res.status(404).json({
+ success: false,
+ error: "Container not found",
+ code: "CONTAINER_NOT_FOUND",
+ });
+ }
+
+ dockerLogger.error("Failed to get container stats", error, {
+ operation: "get_stats",
+ sessionId,
+ containerId,
+ userId,
+ });
+
+ res.status(500).json({
+ success: false,
+ error: errorMsg || "Failed to get container stats",
+ });
+ }
+ },
+);
+
+const PORT = 30007;
+
+app.listen(PORT, async () => {
+ try {
+ await authManager.initialize();
+ } catch (err) {
+ dockerLogger.error("Failed to initialize Docker backend", err, {
+ operation: "startup",
+ });
+ }
+});
+
+process.on("SIGINT", () => {
+ Object.keys(sshSessions).forEach((sessionId) => {
+ cleanupSession(sessionId);
+ });
+ process.exit(0);
+});
+
+process.on("SIGTERM", () => {
+ Object.keys(sshSessions).forEach((sessionId) => {
+ cleanupSession(sessionId);
+ });
+ process.exit(0);
+});
diff --git a/src/backend/ssh/file-manager.ts b/src/backend/ssh/file-manager.ts
index b6344b68..2769ed8c 100644
--- a/src/backend/ssh/file-manager.ts
+++ b/src/backend/ssh/file-manager.ts
@@ -10,6 +10,7 @@ import { fileLogger, sshLogger } from "../utils/logger.js";
import { SimpleDBOps } from "../utils/simple-db-ops.js";
import { AuthManager } from "../utils/auth-manager.js";
import type { AuthenticatedRequest } from "../../types/index.js";
+import { createSocks5Connection } from "../utils/socks5-helper.js";
function isExecutableFile(permissions: string, fileName: string): boolean {
const hasExecutePermission =
@@ -278,6 +279,7 @@ interface PendingTOTPSession {
prompts?: Array<{ prompt: string; echo: boolean }>;
totpPromptIndex?: number;
resolvedPassword?: string;
+ totpAttempts: number;
}
const sshSessions: Record = {};
@@ -356,6 +358,12 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
userProvidedPassword,
forceKeyboardInteractive,
jumpHosts,
+ useSocks5,
+ socks5Host,
+ socks5Port,
+ socks5Username,
+ socks5Password,
+ socks5ProxyChain,
} = req.body;
const userId = (req as AuthenticatedRequest).userId;
@@ -382,6 +390,15 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
if (sshSessions[sessionId]?.isConnected) {
cleanupSession(sessionId);
}
+
+ // Clean up any stale pending TOTP sessions
+ if (pendingTOTPSessions[sessionId]) {
+ try {
+ pendingTOTPSessions[sessionId].client.end();
+ } catch {}
+ delete pendingTOTPSessions[sessionId];
+ }
+
const client = new SSHClient();
let resolvedCredentials = { password, sshKey, keyPassword, authType };
@@ -545,9 +562,7 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
.json({ error: "Password required for password authentication" });
}
- if (!forceKeyboardInteractive) {
- config.password = resolvedCredentials.password;
- }
+ config.password = resolvedCredentials.password;
} else if (resolvedCredentials.authType === "none") {
} else {
fileLogger.warn(
@@ -713,6 +728,7 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
prompts,
totpPromptIndex,
resolvedPassword: resolvedCredentials.password,
+ totpAttempts: 0,
};
res.json({
@@ -785,6 +801,7 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
prompts,
totpPromptIndex: passwordPromptIndex,
resolvedPassword: resolvedCredentials.password,
+ totpAttempts: 0,
};
res.json({
@@ -808,7 +825,47 @@ app.post("/ssh/file_manager/ssh/connect", async (req, res) => {
},
);
- if (jumpHosts && jumpHosts.length > 0 && userId) {
+ if (
+ useSocks5 &&
+ (socks5Host || (socks5ProxyChain && (socks5ProxyChain as any).length > 0))
+ ) {
+ try {
+ const socks5Socket = await createSocks5Connection(ip, port, {
+ useSocks5,
+ socks5Host,
+ socks5Port,
+ socks5Username,
+ socks5Password,
+ socks5ProxyChain: socks5ProxyChain as any,
+ });
+
+ if (socks5Socket) {
+ config.sock = socks5Socket;
+ client.connect(config);
+ return;
+ } else {
+ fileLogger.error("SOCKS5 socket is null for SFTP", undefined, {
+ operation: "sftp_socks5_socket_null",
+ sessionId,
+ });
+ }
+ } catch (socks5Error) {
+ fileLogger.error("SOCKS5 connection failed", socks5Error, {
+ operation: "socks5_connect",
+ sessionId,
+ hostId,
+ proxyHost: socks5Host,
+ proxyPort: socks5Port || 1080,
+ });
+ return res.status(500).json({
+ error:
+ "SOCKS5 proxy connection failed: " +
+ (socks5Error instanceof Error
+ ? socks5Error.message
+ : "Unknown error"),
+ });
+ }
+ } else if (jumpHosts && jumpHosts.length > 0 && userId) {
try {
const jumpClient = await createJumpHostChain(jumpHosts, userId);
@@ -891,9 +948,7 @@ app.post("/ssh/file_manager/ssh/connect-totp", async (req, res) => {
delete pendingTOTPSessions[sessionId];
try {
session.client.end();
- } catch (error) {
- sshLogger.debug("Operation failed, continuing", { error });
- }
+ } catch (error) {}
fileLogger.warn("TOTP session timeout before code submission", {
operation: "file_totp_verify",
sessionId,
@@ -1385,7 +1440,7 @@ app.post("/ssh/file_manager/ssh/writeFile", async (req, res) => {
let fileBuffer;
try {
if (typeof content === "string") {
- fileBuffer = Buffer.from(content, "utf8");
+ fileBuffer = Buffer.from(content, "base64");
} else if (Buffer.isBuffer(content)) {
fileBuffer = content;
} else {
@@ -1461,7 +1516,22 @@ app.post("/ssh/file_manager/ssh/writeFile", async (req, res) => {
const tryFallbackMethod = () => {
try {
- const base64Content = Buffer.from(content, "utf8").toString("base64");
+ let contentBuffer: Buffer;
+ if (typeof content === "string") {
+ try {
+ contentBuffer = Buffer.from(content, "base64");
+ if (contentBuffer.toString("base64") !== content) {
+ contentBuffer = Buffer.from(content, "utf8");
+ }
+ } catch {
+ contentBuffer = Buffer.from(content, "utf8");
+ }
+ } else if (Buffer.isBuffer(content)) {
+ contentBuffer = content;
+ } else {
+ contentBuffer = Buffer.from(content);
+ }
+ const base64Content = contentBuffer.toString("base64");
const escapedPath = filePath.replace(/'/g, "'\"'\"'");
const writeCommand = `echo '${base64Content}' | base64 -d > '${escapedPath}' && echo "SUCCESS"`;
@@ -1579,7 +1649,7 @@ app.post("/ssh/file_manager/ssh/uploadFile", async (req, res) => {
let fileBuffer;
try {
if (typeof content === "string") {
- fileBuffer = Buffer.from(content, "utf8");
+ fileBuffer = Buffer.from(content, "base64");
} else if (Buffer.isBuffer(content)) {
fileBuffer = content;
} else {
@@ -1662,7 +1732,22 @@ app.post("/ssh/file_manager/ssh/uploadFile", async (req, res) => {
const tryFallbackMethod = () => {
try {
- const base64Content = Buffer.from(content, "utf8").toString("base64");
+ let contentBuffer: Buffer;
+ if (typeof content === "string") {
+ try {
+ contentBuffer = Buffer.from(content, "base64");
+ if (contentBuffer.toString("base64") !== content) {
+ contentBuffer = Buffer.from(content, "utf8");
+ }
+ } catch {
+ contentBuffer = Buffer.from(content, "utf8");
+ }
+ } else if (Buffer.isBuffer(content)) {
+ contentBuffer = content;
+ } else {
+ contentBuffer = Buffer.from(content);
+ }
+ const base64Content = contentBuffer.toString("base64");
const chunkSize = 1000000;
const chunks = [];
@@ -2940,21 +3025,10 @@ app.post("/ssh/file_manager/ssh/extractArchive", async (req, res) => {
let errorOutput = "";
- stream.on("data", (data: Buffer) => {
- fileLogger.debug("Extract stdout", {
- operation: "extract_archive",
- sessionId,
- output: data.toString(),
- });
- });
+ stream.on("data", (data: Buffer) => {});
stream.stderr.on("data", (data: Buffer) => {
errorOutput += data.toString();
- fileLogger.debug("Extract stderr", {
- operation: "extract_archive",
- sessionId,
- error: data.toString(),
- });
});
stream.on("close", (code: number) => {
@@ -3132,21 +3206,10 @@ app.post("/ssh/file_manager/ssh/compressFiles", async (req, res) => {
let errorOutput = "";
- stream.on("data", (data: Buffer) => {
- fileLogger.debug("Compress stdout", {
- operation: "compress_files",
- sessionId,
- output: data.toString(),
- });
- });
+ stream.on("data", (data: Buffer) => {});
stream.stderr.on("data", (data: Buffer) => {
errorOutput += data.toString();
- fileLogger.debug("Compress stderr", {
- operation: "compress_files",
- sessionId,
- error: data.toString(),
- });
});
stream.on("close", (code: number) => {
diff --git a/src/backend/ssh/server-stats.ts b/src/backend/ssh/server-stats.ts
index b4c958c9..e17f0491 100644
--- a/src/backend/ssh/server-stats.ts
+++ b/src/backend/ssh/server-stats.ts
@@ -9,7 +9,8 @@ import { eq, and } from "drizzle-orm";
import { statsLogger, sshLogger } from "../utils/logger.js";
import { SimpleDBOps } from "../utils/simple-db-ops.js";
import { AuthManager } from "../utils/auth-manager.js";
-import type { AuthenticatedRequest } from "../../types/index.js";
+import { PermissionManager } from "../utils/permission-manager.js";
+import type { AuthenticatedRequest, ProxyNode } from "../../types/index.js";
import { collectCpuMetrics } from "./widgets/cpu-collector.js";
import { collectMemoryMetrics } from "./widgets/memory-collector.js";
import { collectDiskMetrics } from "./widgets/disk-collector.js";
@@ -18,6 +19,7 @@ import { collectUptimeMetrics } from "./widgets/uptime-collector.js";
import { collectProcessesMetrics } from "./widgets/processes-collector.js";
import { collectSystemMetrics } from "./widgets/system-collector.js";
import { collectLoginStats } from "./widgets/login-stats-collector.js";
+import { createSocks5Connection } from "../utils/socks5-helper.js";
async function resolveJumpHost(
hostId: number,
@@ -193,6 +195,82 @@ interface PooledConnection {
hostKey: string;
}
+interface MetricsSession {
+ client: Client;
+ isConnected: boolean;
+ lastActive: number;
+ timeout?: NodeJS.Timeout;
+ activeOperations: number;
+ hostId: number;
+ userId: string;
+}
+
+interface PendingTOTPSession {
+ client: Client;
+ finish: (responses: string[]) => void;
+ config: ConnectConfig;
+ createdAt: number;
+ sessionId: string;
+ hostId: number;
+ userId: string;
+ prompts?: Array<{ prompt: string; echo: boolean }>;
+ totpPromptIndex?: number;
+ resolvedPassword?: string;
+ totpAttempts: number;
+}
+
+interface MetricsViewer {
+ sessionId: string;
+ userId: string;
+ hostId: number;
+ lastHeartbeat: number;
+}
+
+const metricsSessions: Record = {};
+const pendingTOTPSessions: Record = {};
+
+function cleanupMetricsSession(sessionId: string) {
+ const session = metricsSessions[sessionId];
+ if (session) {
+ if (session.activeOperations > 0) {
+ statsLogger.warn(
+ `Deferring metrics session cleanup - ${session.activeOperations} active operations`,
+ {
+ operation: "cleanup_deferred",
+ sessionId,
+ activeOperations: session.activeOperations,
+ },
+ );
+ scheduleMetricsSessionCleanup(sessionId);
+ return;
+ }
+
+ try {
+ session.client.end();
+ } catch (error) {}
+ clearTimeout(session.timeout);
+ delete metricsSessions[sessionId];
+ }
+}
+
+function scheduleMetricsSessionCleanup(sessionId: string) {
+ const session = metricsSessions[sessionId];
+ if (session) {
+ if (session.timeout) clearTimeout(session.timeout);
+
+ session.timeout = setTimeout(
+ () => {
+ cleanupMetricsSession(sessionId);
+ },
+ 30 * 60 * 1000,
+ );
+ }
+}
+
+function getSessionKey(hostId: number, userId: string): string {
+ return `${userId}:${hostId}`;
+}
+
class SSHConnectionPool {
private connections = new Map();
private maxConnectionsPerHost = 3;
@@ -204,23 +282,52 @@ class SSHConnectionPool {
() => {
this.cleanup();
},
- 5 * 60 * 1000,
+ 2 * 60 * 1000,
);
}
private getHostKey(host: SSHHostWithCredentials): string {
- return `${host.ip}:${host.port}:${host.username}`;
+ const socks5Key = host.useSocks5
+ ? `:socks5:${host.socks5Host}:${host.socks5Port}:${JSON.stringify(host.socks5ProxyChain || [])}`
+ : "";
+ return `${host.ip}:${host.port}:${host.username}${socks5Key}`;
+ }
+
+ private isConnectionHealthy(client: Client): boolean {
+ try {
+ const sock = (client as any)._sock;
+ if (sock && (sock.destroyed || !sock.writable)) {
+ return false;
+ }
+ return true;
+ } catch (error) {
+ return false;
+ }
}
async getConnection(host: SSHHostWithCredentials): Promise {
const hostKey = this.getHostKey(host);
- const connections = this.connections.get(hostKey) || [];
+ let connections = this.connections.get(hostKey) || [];
const available = connections.find((conn) => !conn.inUse);
if (available) {
- available.inUse = true;
- available.lastUsed = Date.now();
- return available.client;
+ if (!this.isConnectionHealthy(available.client)) {
+ statsLogger.warn("Removing unhealthy connection from pool", {
+ operation: "remove_dead_connection",
+ hostKey,
+ });
+ try {
+ available.client.end();
+ } catch (error) {
+ // Ignore cleanup errors
+ }
+ connections = connections.filter((c) => c !== available);
+ this.connections.set(hostKey, connections);
+ } else {
+ available.inUse = true;
+ available.lastUsed = Date.now();
+ return available.client;
+ }
}
if (connections.length < this.maxConnectionsPerHost) {
@@ -255,6 +362,7 @@ class SSHConnectionPool {
host: SSHHostWithCredentials,
): Promise {
return new Promise(async (resolve, reject) => {
+ const config = buildSshConfig(host);
const client = new Client();
const timeout = setTimeout(() => {
client.end();
@@ -280,20 +388,33 @@ class SSHConnectionPool {
prompts: Array<{ prompt: string; echo: boolean }>,
finish: (responses: string[]) => void,
) => {
- const totpPrompt = prompts.find((p) =>
+ const totpPromptIndex = prompts.findIndex((p) =>
/verification code|verification_code|token|otp|2fa|authenticator|google.*auth/i.test(
p.prompt,
),
);
- if (totpPrompt) {
- authFailureTracker.recordFailure(host.id, "TOTP", true);
- client.end();
- reject(
- new Error(
- "TOTP authentication required but not supported in Server Stats",
- ),
- );
+ if (totpPromptIndex !== -1) {
+ const sessionId = `totp-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
+
+ pendingTOTPSessions[sessionId] = {
+ client,
+ finish,
+ config,
+ createdAt: Date.now(),
+ sessionId,
+ hostId: host.id,
+ userId: host.userId!,
+ prompts: prompts.map((p) => ({
+ prompt: p.prompt,
+ echo: p.echo ?? false,
+ })),
+ totpPromptIndex,
+ resolvedPassword: host.password,
+ totpAttempts: 0,
+ };
+
+ return;
} else if (host.password) {
const responses = prompts.map((p) => {
if (/password/i.test(p.prompt)) {
@@ -309,7 +430,54 @@ class SSHConnectionPool {
);
try {
- const config = buildSshConfig(host);
+ if (
+ host.useSocks5 &&
+ (host.socks5Host ||
+ (host.socks5ProxyChain && host.socks5ProxyChain.length > 0))
+ ) {
+ try {
+ const socks5Socket = await createSocks5Connection(
+ host.ip,
+ host.port,
+ {
+ useSocks5: host.useSocks5,
+ socks5Host: host.socks5Host,
+ socks5Port: host.socks5Port,
+ socks5Username: host.socks5Username,
+ socks5Password: host.socks5Password,
+ socks5ProxyChain: host.socks5ProxyChain,
+ },
+ );
+
+ if (socks5Socket) {
+ config.sock = socks5Socket;
+ client.connect(config);
+ return;
+ } else {
+ statsLogger.error("SOCKS5 socket is null", undefined, {
+ operation: "socks5_socket_null",
+ hostIp: host.ip,
+ });
+ }
+ } catch (socks5Error) {
+ clearTimeout(timeout);
+ statsLogger.error("SOCKS5 connection error", socks5Error, {
+ operation: "socks5_connection_error",
+ hostIp: host.ip,
+ errorMessage:
+ socks5Error instanceof Error ? socks5Error.message : "Unknown",
+ });
+ reject(
+ new Error(
+ "SOCKS5 proxy connection failed: " +
+ (socks5Error instanceof Error
+ ? socks5Error.message
+ : "Unknown error"),
+ ),
+ );
+ return;
+ }
+ }
if (host.jumpHosts && host.jumpHosts.length > 0 && host.userId) {
const jumpClient = await createJumpHostChain(
@@ -364,9 +532,28 @@ class SSHConnectionPool {
}
}
+ clearHostConnections(host: SSHHostWithCredentials): void {
+ const hostKey = this.getHostKey(host);
+ const connections = this.connections.get(hostKey) || [];
+
+ for (const conn of connections) {
+ try {
+ conn.client.end();
+ } catch (error) {
+ statsLogger.error("Error closing connection during cleanup", error, {
+ operation: "clear_connection_error",
+ });
+ }
+ }
+
+ this.connections.delete(hostKey);
+ }
+
private cleanup(): void {
const now = Date.now();
const maxAge = 10 * 60 * 1000;
+ let totalCleaned = 0;
+ let totalUnhealthy = 0;
for (const [hostKey, connections] of this.connections.entries()) {
const activeConnections = connections.filter((conn) => {
@@ -374,6 +561,19 @@ class SSHConnectionPool {
try {
conn.client.end();
} catch (error) {}
+ totalCleaned++;
+ return false;
+ }
+ if (!this.isConnectionHealthy(conn.client)) {
+ statsLogger.warn("Removing unhealthy connection during cleanup", {
+ operation: "cleanup_unhealthy",
+ hostKey,
+ inUse: conn.inUse,
+ });
+ try {
+ conn.client.end();
+ } catch (error) {}
+ totalUnhealthy++;
return false;
}
return true;
@@ -387,6 +587,26 @@ class SSHConnectionPool {
}
}
+ clearAllConnections(): void {
+ for (const [hostKey, connections] of this.connections.entries()) {
+ for (const conn of connections) {
+ try {
+ conn.client.end();
+ } catch (error) {
+ statsLogger.error(
+ "Error closing connection during full cleanup",
+ error,
+ {
+ operation: "clear_all_error",
+ hostKey,
+ },
+ );
+ }
+ }
+ }
+ this.connections.clear();
+ }
+
destroy(): void {
clearInterval(this.cleanupInterval);
for (const connections of this.connections.values()) {
@@ -403,18 +623,34 @@ class SSHConnectionPool {
class RequestQueue {
private queues = new Map Promise>>();
private processing = new Set();
+ private requestTimeout = 60000;
async queueRequest(hostId: number, request: () => Promise): Promise {
- return new Promise((resolve, reject) => {
- const queue = this.queues.get(hostId) || [];
- queue.push(async () => {
+ return new Promise((resolve, reject) => {
+ const wrappedRequest = async () => {
try {
- const result = await request();
+ const result = await Promise.race([
+ request(),
+ new Promise((_, rej) =>
+ setTimeout(
+ () =>
+ rej(
+ new Error(
+ `Request timeout after ${this.requestTimeout}ms for host ${hostId}`,
+ ),
+ ),
+ this.requestTimeout,
+ ),
+ ),
+ ]);
resolve(result);
} catch (error) {
reject(error);
}
- });
+ };
+
+ const queue = this.queues.get(hostId) || [];
+ queue.push(wrappedRequest);
this.queues.set(hostId, queue);
this.processQueue(hostId);
});
@@ -436,7 +672,8 @@ class RequestQueue {
}
this.processing.delete(hostId);
- if (queue.length > 0) {
+ const currentQueue = this.queues.get(hostId);
+ if (currentQueue && currentQueue.length > 0) {
this.processQueue(hostId);
}
}
@@ -487,7 +724,7 @@ interface AuthFailureRecord {
class AuthFailureTracker {
private failures = new Map();
private maxRetries = 3;
- private backoffBase = 60000;
+ private backoffBase = 5000;
recordFailure(
hostId: number,
@@ -571,11 +808,75 @@ class AuthFailureTracker {
}
}
+class PollingBackoff {
+ private failures = new Map();
+ private baseDelay = 30000;
+ private maxDelay = 600000;
+ private maxRetries = 5;
+
+ recordFailure(hostId: number): void {
+ const existing = this.failures.get(hostId) || { count: 0, nextRetry: 0 };
+ const delay = Math.min(
+ this.baseDelay * Math.pow(2, existing.count),
+ this.maxDelay,
+ );
+ this.failures.set(hostId, {
+ count: existing.count + 1,
+ nextRetry: Date.now() + delay,
+ });
+ }
+
+ shouldSkip(hostId: number): boolean {
+ const backoff = this.failures.get(hostId);
+ if (!backoff) return false;
+
+ if (backoff.count >= this.maxRetries) {
+ return true;
+ }
+
+ return Date.now() < backoff.nextRetry;
+ }
+
+ getBackoffInfo(hostId: number): string | null {
+ const backoff = this.failures.get(hostId);
+ if (!backoff) return null;
+
+ if (backoff.count >= this.maxRetries) {
+ return `Max retries exceeded (${backoff.count} failures) - polling suspended`;
+ }
+
+ const remainingMs = backoff.nextRetry - Date.now();
+ if (remainingMs > 0) {
+ const remainingSec = Math.ceil(remainingMs / 1000);
+ return `Retry in ${remainingSec}s (attempt ${backoff.count}/${this.maxRetries})`;
+ }
+
+ return null;
+ }
+
+ reset(hostId: number): void {
+ this.failures.delete(hostId);
+ }
+
+ cleanup(): void {
+ const maxAge = 60 * 60 * 1000;
+ const now = Date.now();
+
+ for (const [hostId, backoff] of this.failures.entries()) {
+ if (backoff.count < this.maxRetries && now - backoff.nextRetry > maxAge) {
+ this.failures.delete(hostId);
+ }
+ }
+ }
+}
+
const connectionPool = new SSHConnectionPool();
const requestQueue = new RequestQueue();
const metricsCache = new MetricsCache();
const authFailureTracker = new AuthFailureTracker();
+const pollingBackoff = new PollingBackoff();
const authManager = AuthManager.getInstance();
+const permissionManager = PermissionManager.getInstance();
type HostStatus = "online" | "offline";
@@ -604,6 +905,13 @@ interface SSHHostWithCredentials {
createdAt: string;
updatedAt: string;
userId: string;
+
+ useSocks5?: boolean;
+ socks5Host?: string;
+ socks5Port?: number;
+ socks5Username?: string;
+ socks5Password?: string;
+ socks5ProxyChain?: ProxyNode[];
}
type StatusEntry = {
@@ -632,6 +940,7 @@ interface HostPollingConfig {
statsConfig: StatsConfig;
statusTimer?: NodeJS.Timeout;
metricsTimer?: NodeJS.Timeout;
+ viewerUserId?: string;
}
class PollingManager {
@@ -644,6 +953,15 @@ class PollingManager {
timestamp: number;
}
>();
+ private activeViewers = new Map>();
+ private viewerDetails = new Map();
+ private viewerCleanupInterval: NodeJS.Timeout;
+
+ constructor() {
+ this.viewerCleanupInterval = setInterval(() => {
+ this.cleanupInactiveViewers();
+ }, 60000);
+ }
parseStatsConfig(statsConfigStr?: string | StatsConfig): StatsConfig {
if (!statsConfigStr) {
@@ -680,8 +998,13 @@ class PollingManager {
return result;
}
- async startPollingForHost(host: SSHHostWithCredentials): Promise {
+ async startPollingForHost(
+ host: SSHHostWithCredentials,
+ options?: { statusOnly?: boolean; viewerUserId?: string },
+ ): Promise {
const statsConfig = this.parseStatsConfig(host.statsConfig);
+ const statusOnly = options?.statusOnly ?? false;
+ const viewerUserId = options?.viewerUserId;
const existingConfig = this.pollingConfigs.get(host.id);
@@ -706,32 +1029,33 @@ class PollingManager {
const config: HostPollingConfig = {
host,
statsConfig,
+ viewerUserId,
};
if (statsConfig.statusCheckEnabled) {
const intervalMs = statsConfig.statusCheckInterval * 1000;
- this.pollHostStatus(host);
+ this.pollHostStatus(host, viewerUserId);
config.statusTimer = setInterval(() => {
const latestConfig = this.pollingConfigs.get(host.id);
if (latestConfig && latestConfig.statsConfig.statusCheckEnabled) {
- this.pollHostStatus(latestConfig.host);
+ this.pollHostStatus(latestConfig.host, latestConfig.viewerUserId);
}
}, intervalMs);
} else {
this.statusStore.delete(host.id);
}
- if (statsConfig.metricsEnabled) {
+ if (!statusOnly && statsConfig.metricsEnabled) {
const intervalMs = statsConfig.metricsInterval * 1000;
- this.pollHostMetrics(host);
+ await this.pollHostMetrics(host, viewerUserId);
config.metricsTimer = setInterval(() => {
const latestConfig = this.pollingConfigs.get(host.id);
if (latestConfig && latestConfig.statsConfig.metricsEnabled) {
- this.pollHostMetrics(latestConfig.host);
+ this.pollHostMetrics(latestConfig.host, latestConfig.viewerUserId);
}
}, intervalMs);
} else {
@@ -741,48 +1065,80 @@ class PollingManager {
this.pollingConfigs.set(host.id, config);
}
- private async pollHostStatus(host: SSHHostWithCredentials): Promise {
+ private async pollHostStatus(
+ host: SSHHostWithCredentials,
+ viewerUserId?: string,
+ ): Promise {
+ const userId = viewerUserId || host.userId;
+ const refreshedHost = await fetchHostById(host.id, userId);
+ if (!refreshedHost) {
+ return;
+ }
+
try {
- const isOnline = await tcpPing(host.ip, host.port, 5000);
+ const isOnline = await tcpPing(
+ refreshedHost.ip,
+ refreshedHost.port,
+ 5000,
+ );
const statusEntry: StatusEntry = {
status: isOnline ? "online" : "offline",
lastChecked: new Date().toISOString(),
};
- this.statusStore.set(host.id, statusEntry);
+ this.statusStore.set(refreshedHost.id, statusEntry);
} catch (error) {
const statusEntry: StatusEntry = {
status: "offline",
lastChecked: new Date().toISOString(),
};
- this.statusStore.set(host.id, statusEntry);
+ this.statusStore.set(refreshedHost.id, statusEntry);
}
}
- private async pollHostMetrics(host: SSHHostWithCredentials): Promise {
- const config = this.pollingConfigs.get(host.id);
+ private async pollHostMetrics(
+ host: SSHHostWithCredentials,
+ viewerUserId?: string,
+ ): Promise {
+ const userId = viewerUserId || host.userId;
+ const refreshedHost = await fetchHostById(host.id, userId);
+ if (!refreshedHost) {
+ return;
+ }
+
+ const config = this.pollingConfigs.get(refreshedHost.id);
if (!config || !config.statsConfig.metricsEnabled) {
return;
}
- const currentHost = config.host;
+ const hasExistingMetrics = this.metricsStore.has(refreshedHost.id);
+
+ if (hasExistingMetrics && pollingBackoff.shouldSkip(host.id)) {
+ const backoffInfo = pollingBackoff.getBackoffInfo(host.id);
+ return;
+ }
try {
- const metrics = await collectMetrics(currentHost);
- this.metricsStore.set(currentHost.id, {
+ const metrics = await collectMetrics(refreshedHost);
+ this.metricsStore.set(refreshedHost.id, {
data: metrics,
timestamp: Date.now(),
});
+ pollingBackoff.reset(refreshedHost.id);
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : String(error);
- const latestConfig = this.pollingConfigs.get(currentHost.id);
+ pollingBackoff.recordFailure(refreshedHost.id);
+
+ const latestConfig = this.pollingConfigs.get(refreshedHost.id);
if (latestConfig && latestConfig.statsConfig.metricsEnabled) {
- statsLogger.warn("Failed to collect metrics for host", {
+ const backoffInfo = pollingBackoff.getBackoffInfo(refreshedHost.id);
+ statsLogger.error("Failed to collect metrics for host", {
operation: "metrics_poll_failed",
- hostId: currentHost.id,
- hostName: currentHost.name,
+ hostId: refreshedHost.id,
+ hostName: refreshedHost.name,
error: errorMessage,
+ backoff: backoffInfo,
});
}
}
@@ -807,6 +1163,14 @@ class PollingManager {
}
}
+ stopMetricsOnly(hostId: number): void {
+ const config = this.pollingConfigs.get(hostId);
+ if (config?.metricsTimer) {
+ clearInterval(config.metricsTimer);
+ config.metricsTimer = undefined;
+ }
+ }
+
getStatus(hostId: number): StatusEntry | undefined {
return this.statusStore.get(hostId);
}
@@ -827,7 +1191,7 @@ class PollingManager {
const hosts = await fetchAllHosts(userId);
for (const host of hosts) {
- await this.startPollingForHost(host);
+ await this.startPollingForHost(host, { statusOnly: true });
}
}
@@ -847,11 +1211,91 @@ class PollingManager {
}
for (const host of hosts) {
- await this.startPollingForHost(host);
+ await this.startPollingForHost(host, { statusOnly: true });
+ }
+ }
+
+ registerViewer(hostId: number, sessionId: string, userId: string): void {
+ if (!this.activeViewers.has(hostId)) {
+ this.activeViewers.set(hostId, new Set());
+ }
+ this.activeViewers.get(hostId)!.add(sessionId);
+
+ this.viewerDetails.set(sessionId, {
+ sessionId,
+ userId,
+ hostId,
+ lastHeartbeat: Date.now(),
+ });
+
+ if (this.activeViewers.get(hostId)!.size === 1) {
+ this.startMetricsForHost(hostId, userId);
+ }
+ }
+
+ updateHeartbeat(sessionId: string): boolean {
+ const viewer = this.viewerDetails.get(sessionId);
+ if (viewer) {
+ viewer.lastHeartbeat = Date.now();
+ return true;
+ }
+ return false;
+ }
+
+ unregisterViewer(hostId: number, sessionId: string): void {
+ const viewers = this.activeViewers.get(hostId);
+ if (viewers) {
+ viewers.delete(sessionId);
+
+ if (viewers.size === 0) {
+ this.activeViewers.delete(hostId);
+ this.stopMetricsForHost(hostId);
+ }
+ }
+ this.viewerDetails.delete(sessionId);
+ }
+
+ private async startMetricsForHost(
+ hostId: number,
+ userId: string,
+ ): Promise {
+ try {
+ const host = await fetchHostById(hostId, userId);
+ if (host) {
+ await this.startPollingForHost(host, { viewerUserId: userId });
+ }
+ } catch (error) {
+ statsLogger.error("Failed to start metrics polling", {
+ operation: "start_metrics_error",
+ hostId,
+ error: error instanceof Error ? error.message : String(error),
+ });
+ }
+ }
+
+ private stopMetricsForHost(hostId: number): void {
+ this.stopMetricsOnly(hostId);
+ }
+
+ private cleanupInactiveViewers(): void {
+ const now = Date.now();
+ const maxInactivity = 120000;
+
+ for (const [sessionId, viewer] of this.viewerDetails.entries()) {
+ if (now - viewer.lastHeartbeat > maxInactivity) {
+ statsLogger.warn("Cleaning up inactive viewer", {
+ operation: "cleanup_inactive_viewer",
+ sessionId,
+ hostId: viewer.hostId,
+ inactiveFor: Math.floor((now - viewer.lastHeartbeat) / 1000),
+ });
+ this.unregisterViewer(viewer.hostId, sessionId);
+ }
}
}
destroy(): void {
+ clearInterval(this.viewerCleanupInterval);
for (const hostId of this.pollingConfigs.keys()) {
this.stopPollingForHost(hostId);
}
@@ -954,11 +1398,23 @@ async function fetchHostById(
return undefined;
}
+ const accessInfo = await permissionManager.canAccessHost(
+ userId,
+ id,
+ "read",
+ );
+
+ if (!accessInfo.hasAccess) {
+ statsLogger.warn(`User ${userId} cannot access host ${id}`, {
+ operation: "fetch_host_access_denied",
+ userId,
+ hostId: id,
+ });
+ return undefined;
+ }
+
const hosts = await SimpleDBOps.select(
- getDb()
- .select()
- .from(sshData)
- .where(and(eq(sshData.id, id), eq(sshData.userId, userId))),
+ getDb().select().from(sshData).where(eq(sshData.id, id)),
"ssh_data",
userId,
);
@@ -1007,45 +1463,84 @@ async function resolveHostCredentials(
createdAt: host.createdAt,
updatedAt: host.updatedAt,
userId: host.userId,
+ useSocks5: !!host.useSocks5,
+ socks5Host: host.socks5Host || undefined,
+ socks5Port: host.socks5Port || undefined,
+ socks5Username: host.socks5Username || undefined,
+ socks5Password: host.socks5Password || undefined,
+ socks5ProxyChain: host.socks5ProxyChain
+ ? JSON.parse(host.socks5ProxyChain as string)
+ : undefined,
};
if (host.credentialId) {
try {
- const credentials = await SimpleDBOps.select(
- getDb()
- .select()
- .from(sshCredentials)
- .where(
- and(
- eq(sshCredentials.id, host.credentialId as number),
- eq(sshCredentials.userId, userId),
- ),
- ),
- "ssh_credentials",
- userId,
- );
+ const ownerId = host.userId;
+ const isSharedHost = userId !== ownerId;
- if (credentials.length > 0) {
- const credential = credentials[0];
- baseHost.credentialId = credential.id;
- baseHost.username = credential.username;
- baseHost.authType = credential.auth_type || credential.authType;
+ if (isSharedHost) {
+ const { SharedCredentialManager } =
+ await import("../utils/shared-credential-manager.js");
+ const sharedCredManager = SharedCredentialManager.getInstance();
+ const sharedCred = await sharedCredManager.getSharedCredentialForUser(
+ host.id as number,
+ userId,
+ );
- if (credential.password) {
- baseHost.password = credential.password;
+ baseHost.credentialId = host.credentialId;
+ baseHost.authType = sharedCred.authType;
+
+ if (!host.overrideCredentialUsername) {
+ baseHost.username = sharedCred.username;
}
- if (credential.key) {
- baseHost.key = credential.key;
+
+ if (sharedCred.password) {
+ baseHost.password = sharedCred.password;
}
- if (credential.key_password || credential.keyPassword) {
- baseHost.keyPassword =
- credential.key_password || credential.keyPassword;
+ if (sharedCred.key) {
+ baseHost.key = sharedCred.key;
}
- if (credential.key_type || credential.keyType) {
- baseHost.keyType = credential.key_type || credential.keyType;
+ if (sharedCred.keyPassword) {
+ baseHost.keyPassword = sharedCred.keyPassword;
+ }
+ if (sharedCred.keyType) {
+ baseHost.keyType = sharedCred.keyType;
}
} else {
- addLegacyCredentials(baseHost, host);
+ const credentials = await SimpleDBOps.select(
+ getDb()
+ .select()
+ .from(sshCredentials)
+ .where(eq(sshCredentials.id, host.credentialId as number)),
+ "ssh_credentials",
+ userId,
+ );
+
+ if (credentials.length > 0) {
+ const credential = credentials[0];
+ baseHost.credentialId = credential.id;
+ baseHost.authType = credential.auth_type || credential.authType;
+
+ if (!host.overrideCredentialUsername) {
+ baseHost.username = credential.username;
+ }
+
+ if (credential.password) {
+ baseHost.password = credential.password;
+ }
+ if (credential.key) {
+ baseHost.key = credential.key;
+ }
+ if (credential.key_password || credential.keyPassword) {
+ baseHost.keyPassword =
+ credential.key_password || credential.keyPassword;
+ }
+ if (credential.key_type || credential.keyType) {
+ baseHost.keyType = credential.key_type || credential.keyType;
+ }
+ } else {
+ addLegacyCredentials(baseHost, host);
+ }
}
} catch (error) {
statsLogger.warn(
@@ -1180,6 +1675,7 @@ function buildSshConfig(host: SSHHostWithCredentials): ConnectConfig {
);
throw new Error(`Invalid SSH key format for host ${host.ip}`);
}
+ } else if (host.authType === "none") {
} else {
throw new Error(
`Unsupported authentication type '${host.authType}' for host ${host.ip}`,
@@ -1194,6 +1690,7 @@ async function withSshConnection(
fn: (client: Client) => Promise,
): Promise {
const client = await connectionPool.getConnection(host);
+
try {
const result = await fn(client);
return result;
@@ -1262,8 +1759,11 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{
}
return requestQueue.queueRequest(host.id, async () => {
+ const sessionKey = getSessionKey(host.id, host.userId!);
+ const existingSession = metricsSessions[sessionKey];
+
try {
- return await withSshConnection(host, async (client) => {
+ const collectFn = async (client: Client) => {
const cpu = await collectCpuMetrics(client);
const memory = await collectMemoryMetrics(client);
const disk = await collectDiskMetrics(client);
@@ -1280,12 +1780,7 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{
};
try {
login_stats = await collectLoginStats(client);
- } catch (e) {
- statsLogger.debug("Failed to collect login stats", {
- operation: "login_stats_failed",
- error: e instanceof Error ? e.message : String(e),
- });
- }
+ } catch (e) {}
const result = {
cpu,
@@ -1300,7 +1795,20 @@ async function collectMetrics(host: SSHHostWithCredentials): Promise<{
metricsCache.set(host.id, result);
return result;
- });
+ };
+
+ if (existingSession && existingSession.isConnected) {
+ existingSession.activeOperations++;
+ try {
+ const result = await collectFn(existingSession.client);
+ existingSession.lastActive = Date.now();
+ return result;
+ } finally {
+ existingSession.activeOperations--;
+ }
+ } else {
+ return await withSshConnection(host, collectFn);
+ }
} catch (error) {
if (error instanceof Error) {
if (error.message.includes("TOTP authentication required")) {
@@ -1402,6 +1910,20 @@ app.get("/status/:id", validateHostId, async (req, res) => {
res.json(statusEntry);
});
+app.post("/clear-connections", async (req, res) => {
+ const userId = (req as AuthenticatedRequest).userId;
+
+ if (!SimpleDBOps.isUserDataUnlocked(userId)) {
+ return res.status(401).json({
+ error: "Session expired - please log in again",
+ code: "SESSION_EXPIRED",
+ });
+ }
+
+ connectionPool.clearAllConnections();
+ res.json({ message: "All SSH connections cleared" });
+});
+
app.post("/refresh", async (req, res) => {
const userId = (req as AuthenticatedRequest).userId;
@@ -1412,6 +1934,8 @@ app.post("/refresh", async (req, res) => {
});
}
+ connectionPool.clearAllConnections();
+
await pollingManager.refreshHostPolling(userId);
res.json({ message: "Polling refreshed" });
});
@@ -1434,6 +1958,8 @@ app.post("/host-updated", async (req, res) => {
try {
const host = await fetchHostById(hostId, userId);
if (host) {
+ connectionPool.clearHostConnections(host);
+
await pollingManager.startPollingForHost(host);
res.json({ message: "Host polling started" });
} else {
@@ -1514,6 +2040,457 @@ app.get("/metrics/:id", validateHostId, async (req, res) => {
});
});
+app.post("/metrics/start/:id", validateHostId, async (req, res) => {
+ const id = Number(req.params.id);
+ const userId = (req as AuthenticatedRequest).userId;
+
+ if (!SimpleDBOps.isUserDataUnlocked(userId)) {
+ return res.status(401).json({
+ error: "Session expired - please log in again",
+ code: "SESSION_EXPIRED",
+ });
+ }
+
+ try {
+ const host = await fetchHostById(id, userId);
+ if (!host) {
+ return res.status(404).json({ error: "Host not found" });
+ }
+
+ const sessionKey = getSessionKey(host.id, userId);
+
+ const existingSession = metricsSessions[sessionKey];
+ if (existingSession && existingSession.isConnected) {
+ return res.json({ success: true });
+ }
+
+ const config = buildSshConfig(host);
+ const client = new Client();
+
+ const connectionPromise = new Promise<{
+ success: boolean;
+ requires_totp?: boolean;
+ sessionId?: string;
+ prompt?: string;
+ viewerSessionId?: string;
+ }>((resolve, reject) => {
+ let isResolved = false;
+
+ const timeout = setTimeout(() => {
+ if (!isResolved) {
+ isResolved = true;
+ client.end();
+ reject(new Error("Connection timeout"));
+ }
+ }, 60000);
+
+ client.on(
+ "keyboard-interactive",
+ (name, instructions, instructionsLang, prompts, finish) => {
+ const totpPromptIndex = prompts.findIndex((p) =>
+ /verification code|verification_code|token|otp|2fa|authenticator|google.*auth/i.test(
+ p.prompt,
+ ),
+ );
+
+ if (totpPromptIndex !== -1) {
+ const sessionId = `totp-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
+
+ pendingTOTPSessions[sessionId] = {
+ client,
+ finish,
+ config,
+ createdAt: Date.now(),
+ sessionId,
+ hostId: host.id,
+ userId: host.userId!,
+ prompts: prompts.map((p) => ({
+ prompt: p.prompt,
+ echo: p.echo ?? false,
+ })),
+ totpPromptIndex,
+ resolvedPassword: host.password,
+ totpAttempts: 0,
+ };
+
+ clearTimeout(timeout);
+ if (!isResolved) {
+ isResolved = true;
+ resolve({
+ success: false,
+ requires_totp: true,
+ sessionId,
+ prompt: prompts[totpPromptIndex].prompt,
+ });
+ }
+ return;
+ } else {
+ const responses = prompts.map((p) => {
+ if (/password/i.test(p.prompt) && host.password) {
+ return host.password;
+ }
+ return "";
+ });
+ finish(responses);
+ }
+ },
+ );
+
+ client.on("ready", () => {
+ clearTimeout(timeout);
+ if (!isResolved) {
+ isResolved = true;
+
+ metricsSessions[sessionKey] = {
+ client,
+ isConnected: true,
+ lastActive: Date.now(),
+ activeOperations: 0,
+ hostId: host.id,
+ userId,
+ };
+ scheduleMetricsSessionCleanup(sessionKey);
+
+ const viewerSessionId = `viewer-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
+ pollingManager.registerViewer(host.id, viewerSessionId, userId);
+
+ resolve({ success: true, viewerSessionId });
+ }
+ });
+
+ client.on("error", (error) => {
+ clearTimeout(timeout);
+ if (!isResolved) {
+ isResolved = true;
+ statsLogger.error("SSH connection error in metrics/start", {
+ operation: "metrics_start_ssh_error",
+ hostId: host.id,
+ error: error instanceof Error ? error.message : String(error),
+ });
+ reject(error);
+ }
+ });
+
+ if (
+ host.useSocks5 &&
+ (host.socks5Host ||
+ (host.socks5ProxyChain && host.socks5ProxyChain.length > 0))
+ ) {
+ createSocks5Connection(host.ip, host.port, {
+ useSocks5: host.useSocks5,
+ socks5Host: host.socks5Host,
+ socks5Port: host.socks5Port,
+ socks5Username: host.socks5Username,
+ socks5Password: host.socks5Password,
+ socks5ProxyChain: host.socks5ProxyChain,
+ })
+ .then((socks5Socket) => {
+ if (socks5Socket) {
+ config.sock = socks5Socket;
+ }
+ client.connect(config);
+ })
+ .catch((error) => {
+ if (!isResolved) {
+ isResolved = true;
+ clearTimeout(timeout);
+ reject(error);
+ }
+ });
+ } else {
+ client.connect(config);
+ }
+ });
+
+ const result = await connectionPromise;
+ res.json(result);
+ } catch (error) {
+ statsLogger.error("Failed to start metrics collection", {
+ operation: "metrics_start_error",
+ hostId: id,
+ error: error instanceof Error ? error.message : String(error),
+ });
+ res.status(500).json({
+ error:
+ error instanceof Error
+ ? error.message
+ : "Failed to start metrics collection",
+ });
+ }
+});
+
+app.post("/metrics/stop/:id", validateHostId, async (req, res) => {
+ const id = Number(req.params.id);
+ const userId = (req as AuthenticatedRequest).userId;
+ const { viewerSessionId } = req.body;
+
+ if (!SimpleDBOps.isUserDataUnlocked(userId)) {
+ return res.status(401).json({
+ error: "Session expired - please log in again",
+ code: "SESSION_EXPIRED",
+ });
+ }
+
+ try {
+ const sessionKey = getSessionKey(id, userId);
+ const session = metricsSessions[sessionKey];
+
+ if (session) {
+ cleanupMetricsSession(sessionKey);
+ }
+
+ if (viewerSessionId && typeof viewerSessionId === "string") {
+ pollingManager.unregisterViewer(id, viewerSessionId);
+ } else {
+ pollingManager.stopMetricsOnly(id);
+ }
+
+ res.json({ success: true });
+ } catch (error) {
+ statsLogger.error("Failed to stop metrics collection", {
+ operation: "metrics_stop_error",
+ hostId: id,
+ error: error instanceof Error ? error.message : String(error),
+ });
+ res.status(500).json({
+ error:
+ error instanceof Error
+ ? error.message
+ : "Failed to stop metrics collection",
+ });
+ }
+});
+
+app.post("/metrics/connect-totp", async (req, res) => {
+ const { sessionId, totpCode } = req.body;
+ const userId = (req as AuthenticatedRequest).userId;
+
+ if (!SimpleDBOps.isUserDataUnlocked(userId)) {
+ return res.status(401).json({
+ error: "Session expired - please log in again",
+ code: "SESSION_EXPIRED",
+ });
+ }
+
+ if (!sessionId || !totpCode) {
+ return res.status(400).json({ error: "Missing sessionId or totpCode" });
+ }
+
+ const session = pendingTOTPSessions[sessionId];
+ if (!session) {
+ return res.status(404).json({ error: "TOTP session not found or expired" });
+ }
+
+ if (Date.now() - session.createdAt > 180000) {
+ delete pendingTOTPSessions[sessionId];
+ try {
+ session.client.end();
+ } catch {}
+ return res.status(408).json({ error: "TOTP session timeout" });
+ }
+
+ if (session.userId !== userId) {
+ return res.status(403).json({ error: "Unauthorized" });
+ }
+
+ session.totpAttempts++;
+ if (session.totpAttempts > 3) {
+ delete pendingTOTPSessions[sessionId];
+ try {
+ session.client.end();
+ } catch {}
+ return res.status(429).json({ error: "Too many TOTP attempts" });
+ }
+
+ try {
+ const responses = (session.prompts || []).map((p, idx) => {
+ if (idx === session.totpPromptIndex) {
+ return totpCode.trim();
+ } else if (/password/i.test(p.prompt) && session.resolvedPassword) {
+ return session.resolvedPassword;
+ }
+ return "";
+ });
+
+ const connectionPromise = new Promise((resolve, reject) => {
+ const timeout = setTimeout(() => {
+ reject(new Error("TOTP verification timeout"));
+ }, 30000);
+
+ session.client.once(
+ "keyboard-interactive",
+ (name, instructions, instructionsLang, prompts, finish) => {
+ statsLogger.warn("Second keyboard-interactive received after TOTP", {
+ operation: "totp_second_keyboard_interactive",
+ hostId: session.hostId,
+ sessionId,
+ prompts: prompts.map((p) => p.prompt),
+ });
+ const secondResponses = prompts.map((p) => {
+ if (/password/i.test(p.prompt) && session.resolvedPassword) {
+ return session.resolvedPassword;
+ }
+ return "";
+ });
+ finish(secondResponses);
+ },
+ );
+
+ session.client.once("ready", () => {
+ clearTimeout(timeout);
+ resolve();
+ });
+
+ session.client.once("error", (error) => {
+ clearTimeout(timeout);
+ statsLogger.error("SSH client error after TOTP", {
+ operation: "totp_client_error",
+ hostId: session.hostId,
+ sessionId,
+ error: error instanceof Error ? error.message : String(error),
+ });
+ reject(error);
+ });
+ });
+
+ session.finish(responses);
+
+ await connectionPromise;
+
+ const sessionKey = getSessionKey(session.hostId, userId);
+ metricsSessions[sessionKey] = {
+ client: session.client,
+ isConnected: true,
+ lastActive: Date.now(),
+ activeOperations: 0,
+ hostId: session.hostId,
+ userId,
+ };
+ scheduleMetricsSessionCleanup(sessionKey);
+
+ delete pendingTOTPSessions[sessionId];
+
+ const viewerSessionId = `viewer-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
+ pollingManager.registerViewer(session.hostId, viewerSessionId, userId);
+
+ res.json({ success: true, viewerSessionId });
+ } catch (error) {
+ statsLogger.error("TOTP verification failed", {
+ operation: "totp_verification_failed",
+ hostId: session.hostId,
+ sessionId,
+ error: error instanceof Error ? error.message : String(error),
+ });
+
+ if (session.totpAttempts >= 3) {
+ delete pendingTOTPSessions[sessionId];
+ try {
+ session.client.end();
+ } catch {}
+ }
+
+ res.status(401).json({
+ error: "TOTP verification failed",
+ attemptsRemaining: Math.max(0, 3 - session.totpAttempts),
+ });
+ }
+});
+
+app.post("/metrics/heartbeat", async (req, res) => {
+ const { viewerSessionId } = req.body;
+ const userId = (req as AuthenticatedRequest).userId;
+
+ if (!SimpleDBOps.isUserDataUnlocked(userId)) {
+ return res.status(401).json({
+ error: "Session expired - please log in again",
+ code: "SESSION_EXPIRED",
+ });
+ }
+
+ if (!viewerSessionId || typeof viewerSessionId !== "string") {
+ return res.status(400).json({ error: "Invalid viewerSessionId" });
+ }
+
+ try {
+ const success = pollingManager.updateHeartbeat(viewerSessionId);
+ if (success) {
+ res.json({ success: true });
+ } else {
+ res.status(404).json({ error: "Viewer session not found" });
+ }
+ } catch (error) {
+ statsLogger.error("Failed to update heartbeat", {
+ operation: "heartbeat_error",
+ viewerSessionId,
+ error: error instanceof Error ? error.message : String(error),
+ });
+ res.status(500).json({ error: "Failed to update heartbeat" });
+ }
+});
+
+app.post("/metrics/register-viewer", async (req, res) => {
+ const { hostId } = req.body;
+ const userId = (req as AuthenticatedRequest).userId;
+
+ if (!SimpleDBOps.isUserDataUnlocked(userId)) {
+ return res.status(401).json({
+ error: "Session expired - please log in again",
+ code: "SESSION_EXPIRED",
+ });
+ }
+
+ if (!hostId || typeof hostId !== "number") {
+ return res.status(400).json({ error: "Invalid hostId" });
+ }
+
+ try {
+ const viewerSessionId = `viewer-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
+ pollingManager.registerViewer(hostId, viewerSessionId, userId);
+ res.json({ success: true, viewerSessionId });
+ } catch (error) {
+ statsLogger.error("Failed to register viewer", {
+ operation: "register_viewer_error",
+ hostId,
+ userId,
+ error: error instanceof Error ? error.message : String(error),
+ });
+ res.status(500).json({ error: "Failed to register viewer" });
+ }
+});
+
+app.post("/metrics/unregister-viewer", async (req, res) => {
+ const { hostId, viewerSessionId } = req.body;
+ const userId = (req as AuthenticatedRequest).userId;
+
+ if (!SimpleDBOps.isUserDataUnlocked(userId)) {
+ return res.status(401).json({
+ error: "Session expired - please log in again",
+ code: "SESSION_EXPIRED",
+ });
+ }
+
+ if (!hostId || typeof hostId !== "number") {
+ return res.status(400).json({ error: "Invalid hostId" });
+ }
+
+ if (!viewerSessionId || typeof viewerSessionId !== "string") {
+ return res.status(400).json({ error: "Invalid viewerSessionId" });
+ }
+
+ try {
+ pollingManager.unregisterViewer(hostId, viewerSessionId);
+ res.json({ success: true });
+ } catch (error) {
+ statsLogger.error("Failed to unregister viewer", {
+ operation: "unregister_viewer_error",
+ hostId,
+ viewerSessionId,
+ error: error instanceof Error ? error.message : String(error),
+ });
+ res.status(500).json({ error: "Failed to unregister viewer" });
+ }
+});
+
process.on("SIGINT", () => {
pollingManager.destroy();
connectionPool.destroy();
@@ -1539,6 +2516,7 @@ app.listen(PORT, async () => {
setInterval(
() => {
authFailureTracker.cleanup();
+ pollingBackoff.cleanup();
},
10 * 60 * 1000,
);
diff --git a/src/backend/ssh/terminal.ts b/src/backend/ssh/terminal.ts
index 78c181e7..64223bfe 100644
--- a/src/backend/ssh/terminal.ts
+++ b/src/backend/ssh/terminal.ts
@@ -14,6 +14,7 @@ import { sshLogger } from "../utils/logger.js";
import { SimpleDBOps } from "../utils/simple-db-ops.js";
import { AuthManager } from "../utils/auth-manager.js";
import { UserCrypto } from "../utils/user-crypto.js";
+import { createSocks5Connection } from "../utils/socks5-helper.js";
interface ConnectToHostData {
cols: number;
@@ -32,6 +33,12 @@ interface ConnectToHostData {
userId?: string;
forceKeyboardInteractive?: boolean;
jumpHosts?: Array<{ hostId: number }>;
+ useSocks5?: boolean;
+ socks5Host?: string;
+ socks5Port?: number;
+ socks5Username?: string;
+ socks5Password?: string;
+ socks5ProxyChain?: unknown;
};
initialPath?: string;
executeCommand?: string;
@@ -130,10 +137,12 @@ async function createJumpHostChain(
const clients: Client[] = [];
try {
- for (let i = 0; i < jumpHosts.length; i++) {
- const jumpHostConfig = await resolveJumpHost(jumpHosts[i].hostId, userId);
+ const jumpHostConfigs = await Promise.all(
+ jumpHosts.map((jh) => resolveJumpHost(jh.hostId, userId)),
+ );
- if (!jumpHostConfig) {
+ for (let i = 0; i < jumpHostConfigs.length; i++) {
+ if (!jumpHostConfigs[i]) {
sshLogger.error(`Jump host ${i + 1} not found`, undefined, {
operation: "jump_host_chain",
hostId: jumpHosts[i].hostId,
@@ -141,6 +150,10 @@ async function createJumpHostChain(
clients.forEach((c) => c.end());
return null;
}
+ }
+
+ for (let i = 0; i < jumpHostConfigs.length; i++) {
+ const jumpHostConfig = jumpHostConfigs[i];
const jumpClient = new Client();
clients.push(jumpClient);
@@ -316,9 +329,10 @@ wss.on("connection", async (ws: WebSocket, req) => {
let sshConn: Client | null = null;
let sshStream: ClientChannel | null = null;
- let pingInterval: NodeJS.Timeout | null = null;
let keyboardInteractiveFinish: ((responses: string[]) => void) | null = null;
let totpPromptSent = false;
+ let totpAttempts = 0;
+ let totpTimeout: NodeJS.Timeout | null = null;
let isKeyboardInteractive = false;
let keyboardInteractiveResponded = false;
let isConnecting = false;
@@ -435,9 +449,15 @@ wss.on("connection", async (ws: WebSocket, req) => {
case "totp_response": {
const totpData = data as TOTPResponseData;
if (keyboardInteractiveFinish && totpData?.code) {
+ if (totpTimeout) {
+ clearTimeout(totpTimeout);
+ totpTimeout = null;
+ }
const totpCode = totpData.code;
+ totpAttempts++;
keyboardInteractiveFinish([totpCode]);
keyboardInteractiveFinish = null;
+ totpPromptSent = false;
} else {
sshLogger.warn("TOTP response received but no callback available", {
operation: "totp_response_error",
@@ -458,6 +478,10 @@ wss.on("connection", async (ws: WebSocket, req) => {
case "password_response": {
const passwordData = data as TOTPResponseData;
if (keyboardInteractiveFinish && passwordData?.code) {
+ if (totpTimeout) {
+ clearTimeout(totpTimeout);
+ totpTimeout = null;
+ }
const password = passwordData.code;
keyboardInteractiveFinish([password]);
keyboardInteractiveFinish = null;
@@ -597,6 +621,13 @@ wss.on("connection", async (ws: WebSocket, req) => {
isConnecting,
isConnected,
});
+ ws.send(
+ JSON.stringify({
+ type: "error",
+ message: "Connection already in progress",
+ code: "DUPLICATE_CONNECTION",
+ }),
+ );
return;
}
@@ -617,7 +648,7 @@ wss.on("connection", async (ws: WebSocket, req) => {
);
cleanupSSH(connectionTimeout);
}
- }, 120000);
+ }, 30000);
let resolvedCredentials = { password, key, keyPassword, keyType, authType };
let authMethodNotAvailable = false;
@@ -802,8 +833,6 @@ wss.on("connection", async (ws: WebSocket, req) => {
);
});
- setupPingInterval();
-
if (initialPath && initialPath.trim() !== "") {
const cdCommand = `cd "${initialPath.replace(/"/g, '\\"')}" && pwd\n`;
stream.write(cdCommand);
@@ -987,6 +1016,25 @@ wss.on("connection", async (ws: WebSocket, req) => {
finish(responses);
};
+
+ totpTimeout = setTimeout(() => {
+ if (keyboardInteractiveFinish) {
+ keyboardInteractiveFinish = null;
+ totpPromptSent = false;
+ sshLogger.warn("TOTP prompt timeout", {
+ operation: "totp_timeout",
+ hostId: id,
+ });
+ ws.send(
+ JSON.stringify({
+ type: "error",
+ message: "TOTP verification timeout. Please reconnect.",
+ }),
+ );
+ cleanupSSH(connectionTimeout);
+ }
+ }, 180000);
+
ws.send(
JSON.stringify({
type: "totp_required",
@@ -1021,6 +1069,24 @@ wss.on("connection", async (ws: WebSocket, req) => {
finish(responses);
};
+ totpTimeout = setTimeout(() => {
+ if (keyboardInteractiveFinish) {
+ keyboardInteractiveFinish = null;
+ keyboardInteractiveResponded = false;
+ sshLogger.warn("Password prompt timeout", {
+ operation: "password_timeout",
+ hostId: id,
+ });
+ ws.send(
+ JSON.stringify({
+ type: "error",
+ message: "Password verification timeout. Please reconnect.",
+ }),
+ );
+ cleanupSSH(connectionTimeout);
+ }
+ }, 180000);
+
ws.send(
JSON.stringify({
type: "password_required",
@@ -1049,10 +1115,10 @@ wss.on("connection", async (ws: WebSocket, req) => {
tryKeyboard: true,
keepaliveInterval: 30000,
keepaliveCountMax: 3,
- readyTimeout: 120000,
+ readyTimeout: 30000,
tcpKeepAlive: true,
tcpKeepAliveInitialDelay: 30000,
- timeout: 120000,
+ timeout: 30000,
env: {
TERM: "xterm-256color",
LANG: "en_US.UTF-8",
@@ -1128,9 +1194,7 @@ wss.on("connection", async (ws: WebSocket, req) => {
return;
}
- if (!hostConfig.forceKeyboardInteractive) {
- connectConfig.password = resolvedCredentials.password;
- }
+ connectConfig.password = resolvedCredentials.password;
} else if (
resolvedCredentials.authType === "key" &&
resolvedCredentials.key
@@ -1183,6 +1247,49 @@ wss.on("connection", async (ws: WebSocket, req) => {
return;
}
+ if (
+ hostConfig.useSocks5 &&
+ (hostConfig.socks5Host ||
+ (hostConfig.socks5ProxyChain &&
+ (hostConfig.socks5ProxyChain as any).length > 0))
+ ) {
+ try {
+ const socks5Socket = await createSocks5Connection(ip, port, {
+ useSocks5: hostConfig.useSocks5,
+ socks5Host: hostConfig.socks5Host,
+ socks5Port: hostConfig.socks5Port,
+ socks5Username: hostConfig.socks5Username,
+ socks5Password: hostConfig.socks5Password,
+ socks5ProxyChain: hostConfig.socks5ProxyChain as any,
+ });
+
+ if (socks5Socket) {
+ connectConfig.sock = socks5Socket;
+ sshConn.connect(connectConfig);
+ return;
+ }
+ } catch (socks5Error) {
+ sshLogger.error("SOCKS5 connection failed", socks5Error, {
+ operation: "socks5_connect",
+ hostId: id,
+ proxyHost: hostConfig.socks5Host,
+ proxyPort: hostConfig.socks5Port || 1080,
+ });
+ ws.send(
+ JSON.stringify({
+ type: "error",
+ message:
+ "SOCKS5 proxy connection failed: " +
+ (socks5Error instanceof Error
+ ? socks5Error.message
+ : "Unknown error"),
+ }),
+ );
+ cleanupSSH(connectionTimeout);
+ return;
+ }
+ }
+
if (
hostConfig.jumpHosts &&
hostConfig.jumpHosts.length > 0 &&
@@ -1279,9 +1386,9 @@ wss.on("connection", async (ws: WebSocket, req) => {
clearTimeout(timeoutId);
}
- if (pingInterval) {
- clearInterval(pingInterval);
- pingInterval = null;
+ if (totpTimeout) {
+ clearTimeout(totpTimeout);
+ totpTimeout = null;
}
if (sshStream) {
@@ -1309,35 +1416,21 @@ wss.on("connection", async (ws: WebSocket, req) => {
}
totpPromptSent = false;
+ totpAttempts = 0;
isKeyboardInteractive = false;
keyboardInteractiveResponded = false;
keyboardInteractiveFinish = null;
isConnecting = false;
isConnected = false;
-
- setTimeout(() => {
- isCleaningUp = false;
- }, 100);
+ isCleaningUp = false;
}
- function setupPingInterval() {
- pingInterval = setInterval(() => {
- if (sshConn && sshStream) {
- try {
- sshStream.write("\x00");
- } catch (e: unknown) {
- sshLogger.error(
- "SSH keepalive failed: " +
- (e instanceof Error ? e.message : "Unknown error"),
- );
- cleanupSSH();
- }
- } else if (!sshConn || !sshStream) {
- if (pingInterval) {
- clearInterval(pingInterval);
- pingInterval = null;
- }
- }
- }, 30000);
- }
+ // Note: PTY-level keepalive (writing \x00 to the stream) was removed.
+ // It was causing ^@ characters to appear in terminals with echoctl enabled.
+ // SSH-level keepalive is configured via connectConfig (keepaliveInterval,
+ // keepaliveCountMax, tcpKeepAlive), which handles connection health monitoring
+ // without producing visible output on the terminal.
+ //
+ // See: https://github.com/Termix-SSH/Support/issues/232
+ // See: https://github.com/Termix-SSH/Support/issues/309
});
diff --git a/src/backend/ssh/tunnel.ts b/src/backend/ssh/tunnel.ts
index 4365abb8..fbef615b 100644
--- a/src/backend/ssh/tunnel.ts
+++ b/src/backend/ssh/tunnel.ts
@@ -1,4 +1,4 @@
-import express from "express";
+import express, { type Response } from "express";
import cors from "cors";
import cookieParser from "cookie-parser";
import { Client } from "ssh2";
@@ -13,12 +13,16 @@ import type {
TunnelStatus,
VerificationData,
ErrorType,
+ AuthenticatedRequest,
} from "../../types/index.js";
import { CONNECTION_STATES } from "../../types/index.js";
import { tunnelLogger, sshLogger } from "../utils/logger.js";
import { SystemCrypto } from "../utils/system-crypto.js";
import { SimpleDBOps } from "../utils/simple-db-ops.js";
import { DataCrypto } from "../utils/data-crypto.js";
+import { createSocks5Connection } from "../utils/socks5-helper.js";
+import { AuthManager } from "../utils/auth-manager.js";
+import { PermissionManager } from "../utils/permission-manager.js";
const app = express();
app.use(
@@ -63,6 +67,10 @@ app.use(
app.use(cookieParser());
app.use(express.json());
+const authManager = AuthManager.getInstance();
+const permissionManager = PermissionManager.getInstance();
+const authenticateJWT = authManager.createAuthMiddleware();
+
const activeTunnels = new Map();
const retryCounters = new Map();
const connectionStatus = new Map();
@@ -77,6 +85,7 @@ const tunnelConnecting = new Set();
const tunnelConfigs = new Map();
const activeTunnelProcesses = new Map();
+const pendingTunnelOperations = new Map>();
function broadcastTunnelStatus(tunnelName: string, status: TunnelStatus): void {
if (
@@ -154,10 +163,75 @@ function getTunnelMarker(tunnelName: string) {
return `TUNNEL_MARKER_${tunnelName.replace(/[^a-zA-Z0-9]/g, "_")}`;
}
-function cleanupTunnelResources(
+function normalizeTunnelName(
+ hostId: number,
+ tunnelIndex: number,
+ displayName: string,
+ sourcePort: number,
+ endpointHost: string,
+ endpointPort: number,
+): string {
+ return `${hostId}::${tunnelIndex}::${displayName}::${sourcePort}::${endpointHost}::${endpointPort}`;
+}
+
+function parseTunnelName(tunnelName: string): {
+ hostId?: number;
+ tunnelIndex?: number;
+ displayName: string;
+ sourcePort: string;
+ endpointHost: string;
+ endpointPort: string;
+ isLegacyFormat: boolean;
+} {
+ const parts = tunnelName.split("::");
+
+ if (parts.length === 6) {
+ return {
+ hostId: parseInt(parts[0]),
+ tunnelIndex: parseInt(parts[1]),
+ displayName: parts[2],
+ sourcePort: parts[3],
+ endpointHost: parts[4],
+ endpointPort: parts[5],
+ isLegacyFormat: false,
+ };
+ }
+
+ tunnelLogger.warn(`Legacy tunnel name format: ${tunnelName}`);
+
+ const legacyParts = tunnelName.split("_");
+ return {
+ displayName: legacyParts[0] || "unknown",
+ sourcePort: legacyParts[legacyParts.length - 3] || "0",
+ endpointHost: legacyParts[legacyParts.length - 2] || "unknown",
+ endpointPort: legacyParts[legacyParts.length - 1] || "0",
+ isLegacyFormat: true,
+ };
+}
+
+function validateTunnelConfig(
+ tunnelName: string,
+ tunnelConfig: TunnelConfig,
+): boolean {
+ const parsed = parseTunnelName(tunnelName);
+
+ if (parsed.isLegacyFormat) {
+ return true;
+ }
+
+ return (
+ parsed.hostId === tunnelConfig.sourceHostId &&
+ parsed.tunnelIndex === tunnelConfig.tunnelIndex &&
+ String(parsed.sourcePort) === String(tunnelConfig.sourcePort) &&
+ parsed.endpointHost === tunnelConfig.endpointHost &&
+ String(parsed.endpointPort) === String(tunnelConfig.endpointPort)
+ );
+}
+
+async function cleanupTunnelResources(
tunnelName: string,
forceCleanup = false,
-): void {
+): Promise {
if (cleanupInProgress.has(tunnelName)) {
return;
}
@@ -170,13 +244,16 @@ function cleanupTunnelResources(
const tunnelConfig = tunnelConfigs.get(tunnelName);
if (tunnelConfig) {
- killRemoteTunnelByMarker(tunnelConfig, tunnelName, (err) => {
- cleanupInProgress.delete(tunnelName);
- if (err) {
- tunnelLogger.error(
- `Failed to kill remote tunnel for '${tunnelName}': ${err.message}`,
- );
- }
+ await new Promise((resolve) => {
+ killRemoteTunnelByMarker(tunnelConfig, tunnelName, (err) => {
+ cleanupInProgress.delete(tunnelName);
+ if (err) {
+ tunnelLogger.error(
+ `Failed to kill remote tunnel for '${tunnelName}': ${err.message}`,
+ );
+ }
+ resolve();
+ });
});
} else {
cleanupInProgress.delete(tunnelName);
@@ -272,11 +349,11 @@ function resetRetryState(tunnelName: string): void {
});
}
-function handleDisconnect(
+async function handleDisconnect(
tunnelName: string,
tunnelConfig: TunnelConfig | null,
shouldRetry = true,
-): void {
+): Promise {
if (tunnelVerifications.has(tunnelName)) {
try {
const verification = tunnelVerifications.get(tunnelName);
@@ -286,7 +363,11 @@ function handleDisconnect(
tunnelVerifications.delete(tunnelName);
}
- cleanupTunnelResources(tunnelName);
+ while (cleanupInProgress.has(tunnelName)) {
+ await new Promise((resolve) => setTimeout(resolve, 100));
+ }
+
+ await cleanupTunnelResources(tunnelName);
if (manualDisconnects.has(tunnelName)) {
resetRetryState(tunnelName);
@@ -490,43 +571,76 @@ async function connectSSHTunnel(
authMethod: tunnelConfig.sourceAuthMethod,
};
- if (tunnelConfig.sourceCredentialId && tunnelConfig.sourceUserId) {
- try {
- const userDataKey = DataCrypto.getUserDataKey(tunnelConfig.sourceUserId);
- if (userDataKey) {
- const credentials = await SimpleDBOps.select(
- getDb()
- .select()
- .from(sshCredentials)
- .where(
- and(
- eq(sshCredentials.id, tunnelConfig.sourceCredentialId),
- eq(sshCredentials.userId, tunnelConfig.sourceUserId),
- ),
- ),
- "ssh_credentials",
- tunnelConfig.sourceUserId,
- );
+ const effectiveUserId =
+ tunnelConfig.requestingUserId || tunnelConfig.sourceUserId;
- if (credentials.length > 0) {
- const credential = credentials[0];
- resolvedSourceCredentials = {
- password: credential.password as string | undefined,
- sshKey: (credential.private_key ||
- credential.privateKey ||
- credential.key) as string | undefined,
- keyPassword: (credential.key_password || credential.keyPassword) as
- | string
- | undefined,
- keyType: (credential.key_type || credential.keyType) as
- | string
- | undefined,
- authMethod: (credential.auth_type || credential.authType) as string,
- };
+ if (tunnelConfig.sourceCredentialId && effectiveUserId) {
+ try {
+ if (
+ tunnelConfig.requestingUserId &&
+ tunnelConfig.requestingUserId !== tunnelConfig.sourceUserId
+ ) {
+ const { SharedCredentialManager } =
+ await import("../utils/shared-credential-manager.js");
+ const sharedCredManager = SharedCredentialManager.getInstance();
+
+ if (tunnelConfig.sourceHostId) {
+ const sharedCred = await sharedCredManager.getSharedCredentialForUser(
+ tunnelConfig.sourceHostId,
+ tunnelConfig.requestingUserId,
+ );
+
+ if (sharedCred) {
+ resolvedSourceCredentials = {
+ password: sharedCred.password,
+ sshKey: sharedCred.key,
+ keyPassword: sharedCred.keyPassword,
+ keyType: sharedCred.keyType,
+ authMethod: sharedCred.authType,
+ };
+ } else {
+ const errorMessage = `Cannot connect tunnel '${tunnelName}': shared credentials not available`;
+ tunnelLogger.error(errorMessage);
+ broadcastTunnelStatus(tunnelName, {
+ connected: false,
+ status: CONNECTION_STATES.FAILED,
+ reason: errorMessage,
+ });
+ return;
+ }
+ }
+ } else {
+ const userDataKey = DataCrypto.getUserDataKey(effectiveUserId);
+ if (userDataKey) {
+ const credentials = await SimpleDBOps.select(
+ getDb()
+ .select()
+ .from(sshCredentials)
+ .where(eq(sshCredentials.id, tunnelConfig.sourceCredentialId)),
+ "ssh_credentials",
+ effectiveUserId,
+ );
+
+ if (credentials.length > 0) {
+ const credential = credentials[0];
+ resolvedSourceCredentials = {
+ password: credential.password as string | undefined,
+ sshKey: (credential.private_key ||
+ credential.privateKey ||
+ credential.key) as string | undefined,
+ keyPassword: (credential.key_password ||
+ credential.keyPassword) as string | undefined,
+ keyType: (credential.key_type || credential.keyType) as
+ | string
+ | undefined,
+ authMethod: (credential.auth_type ||
+ credential.authType) as string,
+ };
+ }
}
}
} catch (error) {
- tunnelLogger.warn("Failed to resolve source credentials from database", {
+ tunnelLogger.warn("Failed to resolve source credentials", {
operation: "tunnel_connect",
tunnelName,
credentialId: tunnelConfig.sourceCredentialId,
@@ -581,12 +695,7 @@ async function connectSSHTunnel(
getDb()
.select()
.from(sshCredentials)
- .where(
- and(
- eq(sshCredentials.id, tunnelConfig.endpointCredentialId),
- eq(sshCredentials.userId, tunnelConfig.endpointUserId),
- ),
- ),
+ .where(eq(sshCredentials.id, tunnelConfig.endpointCredentialId)),
"ssh_credentials",
tunnelConfig.endpointUserId,
);
@@ -1016,6 +1125,51 @@ async function connectSSHTunnel(
});
}
+ if (
+ tunnelConfig.useSocks5 &&
+ (tunnelConfig.socks5Host ||
+ (tunnelConfig.socks5ProxyChain &&
+ tunnelConfig.socks5ProxyChain.length > 0))
+ ) {
+ try {
+ const socks5Socket = await createSocks5Connection(
+ tunnelConfig.sourceIP,
+ tunnelConfig.sourceSSHPort,
+ {
+ useSocks5: tunnelConfig.useSocks5,
+ socks5Host: tunnelConfig.socks5Host,
+ socks5Port: tunnelConfig.socks5Port,
+ socks5Username: tunnelConfig.socks5Username,
+ socks5Password: tunnelConfig.socks5Password,
+ socks5ProxyChain: tunnelConfig.socks5ProxyChain,
+ },
+ );
+
+ if (socks5Socket) {
+ connOptions.sock = socks5Socket;
+ conn.connect(connOptions);
+ return;
+ }
+ } catch (socks5Error) {
+ tunnelLogger.error("SOCKS5 connection failed for tunnel", socks5Error, {
+ operation: "socks5_connect",
+ tunnelName,
+ proxyHost: tunnelConfig.socks5Host,
+ proxyPort: tunnelConfig.socks5Port || 1080,
+ });
+ broadcastTunnelStatus(tunnelName, {
+ connected: false,
+ status: CONNECTION_STATES.FAILED,
+ reason:
+ "SOCKS5 proxy connection failed: " +
+ (socks5Error instanceof Error
+ ? socks5Error.message
+ : "Unknown error"),
+ });
+ return;
+ }
+ }
+
conn.connect(connOptions);
}
@@ -1042,12 +1196,7 @@ async function killRemoteTunnelByMarker(
getDb()
.select()
.from(sshCredentials)
- .where(
- and(
- eq(sshCredentials.id, tunnelConfig.sourceCredentialId),
- eq(sshCredentials.userId, tunnelConfig.sourceUserId),
- ),
- ),
+ .where(eq(sshCredentials.id, tunnelConfig.sourceCredentialId)),
"ssh_credentials",
tunnelConfig.sourceUserId,
);
@@ -1248,7 +1397,57 @@ async function killRemoteTunnelByMarker(
callback(err);
});
- conn.connect(connOptions);
+ if (
+ tunnelConfig.useSocks5 &&
+ (tunnelConfig.socks5Host ||
+ (tunnelConfig.socks5ProxyChain &&
+ tunnelConfig.socks5ProxyChain.length > 0))
+ ) {
+ (async () => {
+ try {
+ const socks5Socket = await createSocks5Connection(
+ tunnelConfig.sourceIP,
+ tunnelConfig.sourceSSHPort,
+ {
+ useSocks5: tunnelConfig.useSocks5,
+ socks5Host: tunnelConfig.socks5Host,
+ socks5Port: tunnelConfig.socks5Port,
+ socks5Username: tunnelConfig.socks5Username,
+ socks5Password: tunnelConfig.socks5Password,
+ socks5ProxyChain: tunnelConfig.socks5ProxyChain,
+ },
+ );
+
+ if (socks5Socket) {
+ connOptions.sock = socks5Socket;
+ conn.connect(connOptions);
+ } else {
+ callback(new Error("Failed to create SOCKS5 connection"));
+ }
+ } catch (socks5Error) {
+ tunnelLogger.error(
+ "SOCKS5 connection failed for killing tunnel",
+ socks5Error,
+ {
+ operation: "socks5_connect_kill",
+ tunnelName,
+ proxyHost: tunnelConfig.socks5Host,
+ proxyPort: tunnelConfig.socks5Port || 1080,
+ },
+ );
+ callback(
+ new Error(
+ "SOCKS5 proxy connection failed: " +
+ (socks5Error instanceof Error
+ ? socks5Error.message
+ : "Unknown error"),
+ ),
+ );
+ }
+ })();
+ } else {
+ conn.connect(connOptions);
+ }
}
app.get("/ssh/tunnel/status", (req, res) => {
@@ -1266,103 +1465,291 @@ app.get("/ssh/tunnel/status/:tunnelName", (req, res) => {
res.json({ name: tunnelName, status });
});
-app.post("/ssh/tunnel/connect", (req, res) => {
- const tunnelConfig: TunnelConfig = req.body;
+app.post(
+ "/ssh/tunnel/connect",
+ authenticateJWT,
+ async (req: AuthenticatedRequest, res: Response) => {
+ const tunnelConfig: TunnelConfig = req.body;
+ const userId = req.userId;
- if (!tunnelConfig || !tunnelConfig.name) {
- return res.status(400).json({ error: "Invalid tunnel configuration" });
- }
+ if (!userId) {
+ return res.status(401).json({ error: "Authentication required" });
+ }
- const tunnelName = tunnelConfig.name;
+ if (!tunnelConfig || !tunnelConfig.name) {
+ return res.status(400).json({ error: "Invalid tunnel configuration" });
+ }
- cleanupTunnelResources(tunnelName);
+ const tunnelName = tunnelConfig.name;
- manualDisconnects.delete(tunnelName);
- retryCounters.delete(tunnelName);
- retryExhaustedTunnels.delete(tunnelName);
+ try {
+ if (!validateTunnelConfig(tunnelName, tunnelConfig)) {
+ tunnelLogger.error(`Tunnel config validation failed`, {
+ operation: "tunnel_connect",
+ tunnelName,
+ configHostId: tunnelConfig.sourceHostId,
+ configTunnelIndex: tunnelConfig.tunnelIndex,
+ });
+ return res.status(400).json({
+ error: "Tunnel configuration does not match tunnel name",
+ });
+ }
- tunnelConfigs.set(tunnelName, tunnelConfig);
+ if (tunnelConfig.sourceHostId) {
+ const accessInfo = await permissionManager.canAccessHost(
+ userId,
+ tunnelConfig.sourceHostId,
+ "read",
+ );
- connectSSHTunnel(tunnelConfig, 0).catch((error) => {
- tunnelLogger.error(
- `Failed to connect tunnel ${tunnelConfig.name}: ${error instanceof Error ? error.message : "Unknown error"}`,
- );
- });
+ if (!accessInfo.hasAccess) {
+ tunnelLogger.warn("User attempted tunnel connect without access", {
+ operation: "tunnel_connect_unauthorized",
+ userId,
+ hostId: tunnelConfig.sourceHostId,
+ tunnelName,
+ });
+ return res.status(403).json({ error: "Access denied to this host" });
+ }
- res.json({ message: "Connection request received", tunnelName });
-});
+ if (accessInfo.isShared && !accessInfo.isOwner) {
+ tunnelConfig.requestingUserId = userId;
+ }
+ }
-app.post("/ssh/tunnel/disconnect", (req, res) => {
- const { tunnelName } = req.body;
+ if (pendingTunnelOperations.has(tunnelName)) {
+ try {
+ await pendingTunnelOperations.get(tunnelName);
+ } catch (error) {
+ tunnelLogger.warn(`Previous tunnel operation failed`, { tunnelName });
+ }
+ }
- if (!tunnelName) {
- return res.status(400).json({ error: "Tunnel name required" });
- }
+ const operation = (async () => {
+ manualDisconnects.delete(tunnelName);
+ retryCounters.delete(tunnelName);
+ retryExhaustedTunnels.delete(tunnelName);
- manualDisconnects.add(tunnelName);
- retryCounters.delete(tunnelName);
- retryExhaustedTunnels.delete(tunnelName);
+ await cleanupTunnelResources(tunnelName);
- if (activeRetryTimers.has(tunnelName)) {
- clearTimeout(activeRetryTimers.get(tunnelName)!);
- activeRetryTimers.delete(tunnelName);
- }
+ if (tunnelConfigs.has(tunnelName)) {
+ const existingConfig = tunnelConfigs.get(tunnelName);
+ if (
+ existingConfig &&
+ (existingConfig.sourceHostId !== tunnelConfig.sourceHostId ||
+ existingConfig.tunnelIndex !== tunnelConfig.tunnelIndex)
+ ) {
+ throw new Error(`Tunnel name collision detected: ${tunnelName}`);
+ }
+ }
- cleanupTunnelResources(tunnelName, true);
+ if (!tunnelConfig.endpointIP || !tunnelConfig.endpointUsername) {
+ try {
+ const systemCrypto = SystemCrypto.getInstance();
+ const internalAuthToken = await systemCrypto.getInternalAuthToken();
- broadcastTunnelStatus(tunnelName, {
- connected: false,
- status: CONNECTION_STATES.DISCONNECTED,
- manualDisconnect: true,
- });
+ const allHostsResponse = await axios.get(
+ "http://localhost:30001/ssh/db/host/internal/all",
+ {
+ headers: {
+ "Content-Type": "application/json",
+ "X-Internal-Auth-Token": internalAuthToken,
+ },
+ },
+ );
- const tunnelConfig = tunnelConfigs.get(tunnelName) || null;
- handleDisconnect(tunnelName, tunnelConfig, false);
+ const allHosts: SSHHost[] = allHostsResponse.data || [];
+ const endpointHost = allHosts.find(
+ (h) =>
+ h.name === tunnelConfig.endpointHost ||
+ `${h.username}@${h.ip}` === tunnelConfig.endpointHost,
+ );
- setTimeout(() => {
- manualDisconnects.delete(tunnelName);
- }, 5000);
+ if (!endpointHost) {
+ throw new Error(
+ `Endpoint host '${tunnelConfig.endpointHost}' not found in database`,
+ );
+ }
- res.json({ message: "Disconnect request received", tunnelName });
-});
+ tunnelConfig.endpointIP = endpointHost.ip;
+ tunnelConfig.endpointSSHPort = endpointHost.port;
+ tunnelConfig.endpointUsername = endpointHost.username;
+ tunnelConfig.endpointPassword = endpointHost.password;
+ tunnelConfig.endpointAuthMethod = endpointHost.authType;
+ tunnelConfig.endpointSSHKey = endpointHost.key;
+ tunnelConfig.endpointKeyPassword = endpointHost.keyPassword;
+ tunnelConfig.endpointKeyType = endpointHost.keyType;
+ tunnelConfig.endpointCredentialId = endpointHost.credentialId;
+ tunnelConfig.endpointUserId = endpointHost.userId;
+ } catch (resolveError) {
+ tunnelLogger.error(
+ "Failed to resolve endpoint host",
+ resolveError,
+ {
+ operation: "tunnel_connect_resolve_endpoint_failed",
+ tunnelName,
+ endpointHost: tunnelConfig.endpointHost,
+ },
+ );
+ throw new Error(
+ `Failed to resolve endpoint host: ${resolveError instanceof Error ? resolveError.message : "Unknown error"}`,
+ );
+ }
+ }
-app.post("/ssh/tunnel/cancel", (req, res) => {
- const { tunnelName } = req.body;
+ tunnelConfigs.set(tunnelName, tunnelConfig);
+ await connectSSHTunnel(tunnelConfig, 0);
+ })();
- if (!tunnelName) {
- return res.status(400).json({ error: "Tunnel name required" });
- }
+ pendingTunnelOperations.set(tunnelName, operation);
- retryCounters.delete(tunnelName);
- retryExhaustedTunnels.delete(tunnelName);
+ res.json({ message: "Connection request received", tunnelName });
- if (activeRetryTimers.has(tunnelName)) {
- clearTimeout(activeRetryTimers.get(tunnelName)!);
- activeRetryTimers.delete(tunnelName);
- }
+ operation.finally(() => {
+ pendingTunnelOperations.delete(tunnelName);
+ });
+ } catch (error) {
+ tunnelLogger.error("Failed to process tunnel connect", error, {
+ operation: "tunnel_connect",
+ tunnelName,
+ userId,
+ });
+ res.status(500).json({ error: "Failed to connect tunnel" });
+ }
+ },
+);
- if (countdownIntervals.has(tunnelName)) {
- clearInterval(countdownIntervals.get(tunnelName)!);
- countdownIntervals.delete(tunnelName);
- }
+app.post(
+ "/ssh/tunnel/disconnect",
+ authenticateJWT,
+ async (req: AuthenticatedRequest, res: Response) => {
+ const { tunnelName } = req.body;
+ const userId = req.userId;
- cleanupTunnelResources(tunnelName, true);
+ if (!userId) {
+ return res.status(401).json({ error: "Authentication required" });
+ }
- broadcastTunnelStatus(tunnelName, {
- connected: false,
- status: CONNECTION_STATES.DISCONNECTED,
- manualDisconnect: true,
- });
+ if (!tunnelName) {
+ return res.status(400).json({ error: "Tunnel name required" });
+ }
- const tunnelConfig = tunnelConfigs.get(tunnelName) || null;
- handleDisconnect(tunnelName, tunnelConfig, false);
+ try {
+ const config = tunnelConfigs.get(tunnelName);
+ if (config && config.sourceHostId) {
+ const accessInfo = await permissionManager.canAccessHost(
+ userId,
+ config.sourceHostId,
+ "read",
+ );
+ if (!accessInfo.hasAccess) {
+ return res.status(403).json({ error: "Access denied" });
+ }
+ }
- setTimeout(() => {
- manualDisconnects.delete(tunnelName);
- }, 5000);
+ manualDisconnects.add(tunnelName);
+ retryCounters.delete(tunnelName);
+ retryExhaustedTunnels.delete(tunnelName);
- res.json({ message: "Cancel request received", tunnelName });
-});
+ if (activeRetryTimers.has(tunnelName)) {
+ clearTimeout(activeRetryTimers.get(tunnelName)!);
+ activeRetryTimers.delete(tunnelName);
+ }
+
+ await cleanupTunnelResources(tunnelName, true);
+
+ broadcastTunnelStatus(tunnelName, {
+ connected: false,
+ status: CONNECTION_STATES.DISCONNECTED,
+ manualDisconnect: true,
+ });
+
+ const tunnelConfig = tunnelConfigs.get(tunnelName) || null;
+ handleDisconnect(tunnelName, tunnelConfig, false);
+
+ setTimeout(() => {
+ manualDisconnects.delete(tunnelName);
+ }, 5000);
+
+ res.json({ message: "Disconnect request received", tunnelName });
+ } catch (error) {
+ tunnelLogger.error("Failed to disconnect tunnel", error, {
+ operation: "tunnel_disconnect",
+ tunnelName,
+ userId,
+ });
+ res.status(500).json({ error: "Failed to disconnect tunnel" });
+ }
+ },
+);
+
+app.post(
+ "/ssh/tunnel/cancel",
+ authenticateJWT,
+ async (req: AuthenticatedRequest, res: Response) => {
+ const { tunnelName } = req.body;
+ const userId = req.userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Authentication required" });
+ }
+
+ if (!tunnelName) {
+ return res.status(400).json({ error: "Tunnel name required" });
+ }
+
+ try {
+ const config = tunnelConfigs.get(tunnelName);
+ if (config && config.sourceHostId) {
+ const accessInfo = await permissionManager.canAccessHost(
+ userId,
+ config.sourceHostId,
+ "read",
+ );
+ if (!accessInfo.hasAccess) {
+ return res.status(403).json({ error: "Access denied" });
+ }
+ }
+
+ retryCounters.delete(tunnelName);
+ retryExhaustedTunnels.delete(tunnelName);
+
+ if (activeRetryTimers.has(tunnelName)) {
+ clearTimeout(activeRetryTimers.get(tunnelName)!);
+ activeRetryTimers.delete(tunnelName);
+ }
+
+ if (countdownIntervals.has(tunnelName)) {
+ clearInterval(countdownIntervals.get(tunnelName)!);
+ countdownIntervals.delete(tunnelName);
+ }
+
+ await cleanupTunnelResources(tunnelName, true);
+
+ broadcastTunnelStatus(tunnelName, {
+ connected: false,
+ status: CONNECTION_STATES.DISCONNECTED,
+ manualDisconnect: true,
+ });
+
+ const tunnelConfig = tunnelConfigs.get(tunnelName) || null;
+ handleDisconnect(tunnelName, tunnelConfig, false);
+
+ setTimeout(() => {
+ manualDisconnects.delete(tunnelName);
+ }, 5000);
+
+ res.json({ message: "Cancel request received", tunnelName });
+ } catch (error) {
+ tunnelLogger.error("Failed to cancel tunnel retry", error, {
+ operation: "tunnel_cancel",
+ tunnelName,
+ userId,
+ });
+ res.status(500).json({ error: "Failed to cancel tunnel retry" });
+ }
+ },
+);
async function initializeAutoStartTunnels(): Promise {
try {
@@ -1408,12 +1795,19 @@ async function initializeAutoStartTunnels(): Promise {
);
if (endpointHost) {
+ const tunnelIndex =
+ host.tunnelConnections.indexOf(tunnelConnection);
const tunnelConfig: TunnelConfig = {
- name: `${host.name || `${host.username}@${host.ip}`}_${
- tunnelConnection.sourcePort
- }_${tunnelConnection.endpointHost}_${
- tunnelConnection.endpointPort
- }`,
+ name: normalizeTunnelName(
+ host.id,
+ tunnelIndex,
+ host.name || `${host.username}@${host.ip}`,
+ tunnelConnection.sourcePort,
+ tunnelConnection.endpointHost,
+ tunnelConnection.endpointPort,
+ ),
+ sourceHostId: host.id,
+ tunnelIndex: tunnelIndex,
hostName: host.name || `${host.username}@${host.ip}`,
sourceIP: host.ip,
sourceSSHPort: host.port,
@@ -1429,6 +1823,7 @@ async function initializeAutoStartTunnels(): Promise {
endpointIP: endpointHost.ip,
endpointSSHPort: endpointHost.port,
endpointUsername: endpointHost.username,
+ endpointHost: tunnelConnection.endpointHost,
endpointPassword:
tunnelConnection.endpointPassword ||
endpointHost.autostartPassword ||
@@ -1453,6 +1848,11 @@ async function initializeAutoStartTunnels(): Promise {
retryInterval: tunnelConnection.retryInterval * 1000,
autoStart: tunnelConnection.autoStart,
isPinned: host.pin,
+ useSocks5: host.useSocks5,
+ socks5Host: host.socks5Host,
+ socks5Port: host.socks5Port,
+ socks5Username: host.socks5Username,
+ socks5Password: host.socks5Password,
};
autoStartTunnels.push(tunnelConfig);
diff --git a/src/backend/ssh/widgets/common-utils.ts b/src/backend/ssh/widgets/common-utils.ts
index bf8bf6aa..5394ba56 100644
--- a/src/backend/ssh/widgets/common-utils.ts
+++ b/src/backend/ssh/widgets/common-utils.ts
@@ -3,28 +3,87 @@ import type { Client } from "ssh2";
export function execCommand(
client: Client,
command: string,
+ timeoutMs = 30000,
): Promise<{
stdout: string;
stderr: string;
code: number | null;
}> {
return new Promise((resolve, reject) => {
- client.exec(command, { pty: false }, (err, stream) => {
- if (err) return reject(err);
+ let settled = false;
+ let stream: any = null;
+
+ const timeout = setTimeout(() => {
+ if (!settled) {
+ settled = true;
+ cleanup();
+ reject(new Error(`Command timeout after ${timeoutMs}ms: ${command}`));
+ }
+ }, timeoutMs);
+
+ const cleanup = () => {
+ clearTimeout(timeout);
+ if (stream) {
+ try {
+ stream.removeAllListeners();
+ if (stream.stderr) {
+ stream.stderr.removeAllListeners();
+ }
+ stream.destroy();
+ } catch (error) {
+ // Ignore cleanup errors
+ }
+ }
+ };
+
+ client.exec(command, { pty: false }, (err, _stream) => {
+ if (err) {
+ if (!settled) {
+ settled = true;
+ cleanup();
+ reject(err);
+ }
+ return;
+ }
+
+ stream = _stream;
let stdout = "";
let stderr = "";
let exitCode: number | null = null;
+
stream
.on("close", (code: number | undefined) => {
- exitCode = typeof code === "number" ? code : null;
- resolve({ stdout, stderr, code: exitCode });
+ if (!settled) {
+ settled = true;
+ exitCode = typeof code === "number" ? code : null;
+ cleanup();
+ resolve({ stdout, stderr, code: exitCode });
+ }
})
.on("data", (data: Buffer) => {
stdout += data.toString("utf8");
})
- .stderr.on("data", (data: Buffer) => {
- stderr += data.toString("utf8");
+ .on("error", (streamErr: Error) => {
+ if (!settled) {
+ settled = true;
+ cleanup();
+ reject(streamErr);
+ }
});
+
+ if (stream.stderr) {
+ stream.stderr
+ .on("data", (data: Buffer) => {
+ stderr += data.toString("utf8");
+ })
+ .on("error", (stderrErr: Error) => {
+ if (!settled) {
+ settled = true;
+ cleanup();
+ reject(stderrErr);
+ }
+ });
+ }
});
});
}
diff --git a/src/backend/ssh/widgets/cpu-collector.ts b/src/backend/ssh/widgets/cpu-collector.ts
index 359ae6ad..90eb579b 100644
--- a/src/backend/ssh/widgets/cpu-collector.ts
+++ b/src/backend/ssh/widgets/cpu-collector.ts
@@ -26,12 +26,20 @@ export async function collectCpuMetrics(client: Client): Promise<{
let loadTriplet: [number, number, number] | null = null;
try {
- const [stat1, loadAvgOut, coresOut] = await Promise.all([
- execCommand(client, "cat /proc/stat"),
- execCommand(client, "cat /proc/loadavg"),
- execCommand(
- client,
- "nproc 2>/dev/null || grep -c ^processor /proc/cpuinfo",
+ const [stat1, loadAvgOut, coresOut] = await Promise.race([
+ Promise.all([
+ execCommand(client, "cat /proc/stat"),
+ execCommand(client, "cat /proc/loadavg"),
+ execCommand(
+ client,
+ "nproc 2>/dev/null || grep -c ^processor /proc/cpuinfo",
+ ),
+ ]),
+ new Promise((_, reject) =>
+ setTimeout(
+ () => reject(new Error("CPU metrics collection timeout")),
+ 25000,
+ ),
),
]);
diff --git a/src/backend/ssh/widgets/login-stats-collector.ts b/src/backend/ssh/widgets/login-stats-collector.ts
index b34f3d80..a3894e74 100644
--- a/src/backend/ssh/widgets/login-stats-collector.ts
+++ b/src/backend/ssh/widgets/login-stats-collector.ts
@@ -1,5 +1,6 @@
import type { Client } from "ssh2";
import { execCommand } from "./common-utils.js";
+import { statsLogger } from "../../utils/logger.js";
export interface LoginRecord {
user: string;
@@ -46,10 +47,20 @@ export async function collectLoginStats(client: Client): Promise {
const timeStr = parts.slice(timeStart, timeStart + 5).join(" ");
if (user && user !== "wtmp" && tty !== "system") {
+ let parsedTime: string;
+ try {
+ const date = new Date(timeStr);
+ parsedTime = isNaN(date.getTime())
+ ? new Date().toISOString()
+ : date.toISOString();
+ } catch (e) {
+ parsedTime = new Date().toISOString();
+ }
+
recentLogins.push({
user,
ip,
- time: new Date(timeStr).toISOString(),
+ time: parsedTime,
status: "success",
});
if (ip !== "local") {
@@ -59,9 +70,7 @@ export async function collectLoginStats(client: Client): Promise {
}
}
}
- } catch (e) {
- // Ignore errors
- }
+ } catch (e) {}
try {
const failedOut = await execCommand(
@@ -96,12 +105,20 @@ export async function collectLoginStats(client: Client): Promise {
}
if (user && ip) {
+ let parsedTime: string;
+ try {
+ const date = timeStr ? new Date(timeStr) : new Date();
+ parsedTime = isNaN(date.getTime())
+ ? new Date().toISOString()
+ : date.toISOString();
+ } catch (e) {
+ parsedTime = new Date().toISOString();
+ }
+
failedLogins.push({
user,
ip,
- time: timeStr
- ? new Date(timeStr).toISOString()
- : new Date().toISOString(),
+ time: parsedTime,
status: "failed",
});
if (ip !== "unknown") {
@@ -109,9 +126,7 @@ export async function collectLoginStats(client: Client): Promise {
}
}
}
- } catch (e) {
- // Ignore errors
- }
+ } catch (e) {}
return {
recentLogins: recentLogins.slice(0, 10),
diff --git a/src/backend/ssh/widgets/network-collector.ts b/src/backend/ssh/widgets/network-collector.ts
index bd3a3bd9..c24b75e6 100644
--- a/src/backend/ssh/widgets/network-collector.ts
+++ b/src/backend/ssh/widgets/network-collector.ts
@@ -68,12 +68,7 @@ export async function collectNetworkMetrics(client: Client): Promise<{
txBytes: null,
});
}
- } catch (e) {
- statsLogger.debug("Failed to collect network interface stats", {
- operation: "network_stats_failed",
- error: e instanceof Error ? e.message : String(e),
- });
- }
+ } catch (e) {}
return { interfaces };
}
diff --git a/src/backend/ssh/widgets/processes-collector.ts b/src/backend/ssh/widgets/processes-collector.ts
index 1ee0f51c..09d62612 100644
--- a/src/backend/ssh/widgets/processes-collector.ts
+++ b/src/backend/ssh/widgets/processes-collector.ts
@@ -33,11 +33,13 @@ export async function collectProcessesMetrics(client: Client): Promise<{
for (let i = 1; i < Math.min(psLines.length, 11); i++) {
const parts = psLines[i].split(/\s+/);
if (parts.length >= 11) {
+ const cpuVal = Number(parts[2]);
+ const memVal = Number(parts[3]);
topProcesses.push({
pid: parts[1],
user: parts[0],
- cpu: parts[2],
- mem: parts[3],
+ cpu: Number.isFinite(cpuVal) ? cpuVal.toString() : "0",
+ mem: Number.isFinite(memVal) ? memVal.toString() : "0",
command: parts.slice(10).join(" ").substring(0, 50),
});
}
@@ -46,14 +48,13 @@ export async function collectProcessesMetrics(client: Client): Promise<{
const procCount = await execCommand(client, "ps aux | wc -l");
const runningCount = await execCommand(client, "ps aux | grep -c ' R '");
- totalProcesses = Number(procCount.stdout.trim()) - 1;
- runningProcesses = Number(runningCount.stdout.trim());
- } catch (e) {
- statsLogger.debug("Failed to collect process stats", {
- operation: "process_stats_failed",
- error: e instanceof Error ? e.message : String(e),
- });
- }
+
+ const totalCount = Number(procCount.stdout.trim()) - 1;
+ totalProcesses = Number.isFinite(totalCount) ? totalCount : null;
+
+ const runningCount2 = Number(runningCount.stdout.trim());
+ runningProcesses = Number.isFinite(runningCount2) ? runningCount2 : null;
+ } catch (e) {}
return {
total: totalProcesses,
diff --git a/src/backend/ssh/widgets/system-collector.ts b/src/backend/ssh/widgets/system-collector.ts
index e62c3ed0..c5007d55 100644
--- a/src/backend/ssh/widgets/system-collector.ts
+++ b/src/backend/ssh/widgets/system-collector.ts
@@ -23,10 +23,7 @@ export async function collectSystemMetrics(client: Client): Promise<{
kernel = kernelOut.stdout.trim() || null;
os = osOut.stdout.trim() || null;
} catch (e) {
- statsLogger.debug("Failed to collect system info", {
- operation: "system_info_failed",
- error: e instanceof Error ? e.message : String(e),
- });
+ // No error log
}
return {
diff --git a/src/backend/ssh/widgets/uptime-collector.ts b/src/backend/ssh/widgets/uptime-collector.ts
index 87e8dfcc..3571b8a0 100644
--- a/src/backend/ssh/widgets/uptime-collector.ts
+++ b/src/backend/ssh/widgets/uptime-collector.ts
@@ -21,12 +21,7 @@ export async function collectUptimeMetrics(client: Client): Promise<{
uptimeFormatted = `${days}d ${hours}h ${minutes}m`;
}
}
- } catch (e) {
- statsLogger.debug("Failed to collect uptime", {
- operation: "uptime_failed",
- error: e instanceof Error ? e.message : String(e),
- });
- }
+ } catch (e) {}
return {
seconds: uptimeSeconds,
diff --git a/src/backend/starter.ts b/src/backend/starter.ts
index b74c9b11..10bb8802 100644
--- a/src/backend/starter.ts
+++ b/src/backend/starter.ts
@@ -102,6 +102,8 @@ import { systemLogger, versionLogger } from "./utils/logger.js";
await import("./ssh/tunnel.js");
await import("./ssh/file-manager.js");
await import("./ssh/server-stats.js");
+ await import("./ssh/docker.js");
+ await import("./ssh/docker-console.js");
await import("./dashboard.js");
process.on("SIGINT", () => {
diff --git a/src/backend/utils/auth-manager.ts b/src/backend/utils/auth-manager.ts
index fd706176..10dd5662 100644
--- a/src/backend/utils/auth-manager.ts
+++ b/src/backend/utils/auth-manager.ts
@@ -154,9 +154,8 @@ class AuthManager {
return;
}
- const { getSqlite, saveMemoryDatabaseToFile } = await import(
- "../database/db/index.js"
- );
+ const { getSqlite, saveMemoryDatabaseToFile } =
+ await import("../database/db/index.js");
const sqlite = getSqlite();
@@ -169,6 +168,23 @@ class AuthManager {
if (migrationResult.migrated) {
await saveMemoryDatabaseToFile();
}
+
+ try {
+ const { CredentialSystemEncryptionMigration } =
+ await import("./credential-system-encryption-migration.js");
+ const credMigration = new CredentialSystemEncryptionMigration();
+ const credResult = await credMigration.migrateUserCredentials(userId);
+
+ if (credResult.migrated > 0) {
+ await saveMemoryDatabaseToFile();
+ }
+ } catch (error) {
+ databaseLogger.warn("Credential migration failed during login", {
+ operation: "login_credential_migration_failed",
+ userId,
+ error: error instanceof Error ? error.message : "Unknown error",
+ });
+ }
} catch (error) {
databaseLogger.error("Lazy encryption migration failed", error, {
operation: "lazy_encryption_migration_error",
@@ -231,9 +247,8 @@ class AuthManager {
});
try {
- const { saveMemoryDatabaseToFile } = await import(
- "../database/db/index.js"
- );
+ const { saveMemoryDatabaseToFile } =
+ await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(
@@ -334,9 +349,8 @@ class AuthManager {
await db.delete(sessions).where(eq(sessions.id, sessionId));
try {
- const { saveMemoryDatabaseToFile } = await import(
- "../database/db/index.js"
- );
+ const { saveMemoryDatabaseToFile } =
+ await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(
@@ -387,9 +401,8 @@ class AuthManager {
}
try {
- const { saveMemoryDatabaseToFile } = await import(
- "../database/db/index.js"
- );
+ const { saveMemoryDatabaseToFile } =
+ await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(
@@ -430,9 +443,8 @@ class AuthManager {
.where(sql`${sessions.expiresAt} < datetime('now')`);
try {
- const { saveMemoryDatabaseToFile } = await import(
- "../database/db/index.js"
- );
+ const { saveMemoryDatabaseToFile } =
+ await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(
@@ -568,9 +580,8 @@ class AuthManager {
.where(eq(sessions.id, payload.sessionId))
.then(async () => {
try {
- const { saveMemoryDatabaseToFile } = await import(
- "../database/db/index.js"
- );
+ const { saveMemoryDatabaseToFile } =
+ await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
const remainingSessions = await db
@@ -714,9 +725,8 @@ class AuthManager {
await db.delete(sessions).where(eq(sessions.id, sessionId));
try {
- const { saveMemoryDatabaseToFile } = await import(
- "../database/db/index.js"
- );
+ const { saveMemoryDatabaseToFile } =
+ await import("../database/db/index.js");
await saveMemoryDatabaseToFile();
} catch (saveError) {
databaseLogger.error(
diff --git a/src/backend/utils/credential-system-encryption-migration.ts b/src/backend/utils/credential-system-encryption-migration.ts
new file mode 100644
index 00000000..ffabd66a
--- /dev/null
+++ b/src/backend/utils/credential-system-encryption-migration.ts
@@ -0,0 +1,131 @@
+import { db } from "../database/db/index.js";
+import { sshCredentials } from "../database/db/schema.js";
+import { eq, and, or, isNull } from "drizzle-orm";
+import { DataCrypto } from "./data-crypto.js";
+import { SystemCrypto } from "./system-crypto.js";
+import { FieldCrypto } from "./field-crypto.js";
+import { databaseLogger } from "./logger.js";
+
+export class CredentialSystemEncryptionMigration {
+ async migrateUserCredentials(userId: string): Promise<{
+ migrated: number;
+ failed: number;
+ skipped: number;
+ }> {
+ try {
+ const userDEK = DataCrypto.getUserDataKey(userId);
+ if (!userDEK) {
+ throw new Error("User must be logged in to migrate credentials");
+ }
+
+ const systemCrypto = SystemCrypto.getInstance();
+ const CSKEK = await systemCrypto.getCredentialSharingKey();
+
+ const credentials = await db
+ .select()
+ .from(sshCredentials)
+ .where(
+ and(
+ eq(sshCredentials.userId, userId),
+ or(
+ isNull(sshCredentials.systemPassword),
+ isNull(sshCredentials.systemKey),
+ isNull(sshCredentials.systemKeyPassword),
+ ),
+ ),
+ );
+
+ let migrated = 0;
+ let failed = 0;
+ const skipped = 0;
+
+ for (const cred of credentials) {
+ try {
+ const plainPassword = cred.password
+ ? FieldCrypto.decryptField(
+ cred.password,
+ userDEK,
+ cred.id.toString(),
+ "password",
+ )
+ : null;
+
+ const plainKey = cred.key
+ ? FieldCrypto.decryptField(
+ cred.key,
+ userDEK,
+ cred.id.toString(),
+ "key",
+ )
+ : null;
+
+ const plainKeyPassword = cred.key_password
+ ? FieldCrypto.decryptField(
+ cred.key_password,
+ userDEK,
+ cred.id.toString(),
+ "key_password",
+ )
+ : null;
+
+ const systemPassword = plainPassword
+ ? FieldCrypto.encryptField(
+ plainPassword,
+ CSKEK,
+ cred.id.toString(),
+ "password",
+ )
+ : null;
+
+ const systemKey = plainKey
+ ? FieldCrypto.encryptField(
+ plainKey,
+ CSKEK,
+ cred.id.toString(),
+ "key",
+ )
+ : null;
+
+ const systemKeyPassword = plainKeyPassword
+ ? FieldCrypto.encryptField(
+ plainKeyPassword,
+ CSKEK,
+ cred.id.toString(),
+ "key_password",
+ )
+ : null;
+
+ await db
+ .update(sshCredentials)
+ .set({
+ systemPassword,
+ systemKey,
+ systemKeyPassword,
+ updatedAt: new Date().toISOString(),
+ })
+ .where(eq(sshCredentials.id, cred.id));
+
+ migrated++;
+ } catch (error) {
+ databaseLogger.error("Failed to migrate credential", error, {
+ credentialId: cred.id,
+ userId,
+ });
+ failed++;
+ }
+ }
+ return { migrated, failed, skipped };
+ } catch (error) {
+ databaseLogger.error(
+ "Credential system encryption migration failed",
+ error,
+ {
+ operation: "credential_migration_failed",
+ userId,
+ error: error instanceof Error ? error.message : "Unknown error",
+ },
+ );
+ throw error;
+ }
+ }
+}
diff --git a/src/backend/utils/data-crypto.ts b/src/backend/utils/data-crypto.ts
index 462d2956..4513b62c 100644
--- a/src/backend/utils/data-crypto.ts
+++ b/src/backend/utils/data-crypto.ts
@@ -475,6 +475,52 @@ class DataCrypto {
return false;
}
}
+
+ /**
+ * Encrypt sensitive credential fields with system key for offline sharing
+ * Returns an object with systemPassword, systemKey, systemKeyPassword fields
+ */
+ static async encryptRecordWithSystemKey>(
+ tableName: string,
+ record: T,
+ systemKey: Buffer,
+ ): Promise> {
+ const systemEncrypted: Record = {};
+ const recordId = record.id || "temp-" + Date.now();
+
+ if (tableName !== "ssh_credentials") {
+ return systemEncrypted as Partial;
+ }
+
+ if (record.password && typeof record.password === "string") {
+ systemEncrypted.systemPassword = FieldCrypto.encryptField(
+ record.password as string,
+ systemKey,
+ recordId as string,
+ "password",
+ );
+ }
+
+ if (record.key && typeof record.key === "string") {
+ systemEncrypted.systemKey = FieldCrypto.encryptField(
+ record.key as string,
+ systemKey,
+ recordId as string,
+ "key",
+ );
+ }
+
+ if (record.key_password && typeof record.key_password === "string") {
+ systemEncrypted.systemKeyPassword = FieldCrypto.encryptField(
+ record.key_password as string,
+ systemKey,
+ recordId as string,
+ "key_password",
+ );
+ }
+
+ return systemEncrypted as Partial;
+ }
}
export { DataCrypto };
diff --git a/src/backend/utils/database-file-encryption.ts b/src/backend/utils/database-file-encryption.ts
index f0adc96a..8ace6c46 100644
--- a/src/backend/utils/database-file-encryption.ts
+++ b/src/backend/utils/database-file-encryption.ts
@@ -327,11 +327,7 @@ class DatabaseFileEncryption {
fs.accessSync(envPath, fs.constants.R_OK);
envFileReadable = true;
}
- } catch (error) {
- databaseLogger.debug("Operation failed, continuing", {
- error: error instanceof Error ? error.message : String(error),
- });
- }
+ } catch (error) {}
databaseLogger.error(
"Database decryption authentication failed - possible causes: wrong DATABASE_KEY, corrupted files, or interrupted write",
diff --git a/src/backend/utils/logger.ts b/src/backend/utils/logger.ts
index 41f44982..cb5ff611 100644
--- a/src/backend/utils/logger.ts
+++ b/src/backend/utils/logger.ts
@@ -36,7 +36,7 @@ const SENSITIVE_FIELDS = [
const TRUNCATE_FIELDS = ["data", "content", "body", "response", "request"];
-class Logger {
+export class Logger {
private serviceName: string;
private serviceIcon: string;
private serviceColor: string;
diff --git a/src/backend/utils/permission-manager.ts b/src/backend/utils/permission-manager.ts
new file mode 100644
index 00000000..fdaafb2b
--- /dev/null
+++ b/src/backend/utils/permission-manager.ts
@@ -0,0 +1,436 @@
+import type { Request, Response, NextFunction } from "express";
+import { db } from "../database/db/index.js";
+import {
+ hostAccess,
+ roles,
+ userRoles,
+ sshData,
+ users,
+} from "../database/db/schema.js";
+import { eq, and, or, isNull, gte, sql } from "drizzle-orm";
+import { databaseLogger } from "./logger.js";
+
+interface AuthenticatedRequest extends Request {
+ userId?: string;
+ dataKey?: Buffer;
+}
+
+interface HostAccessInfo {
+ hasAccess: boolean;
+ isOwner: boolean;
+ isShared: boolean;
+ permissionLevel?: "view";
+ expiresAt?: string | null;
+}
+
+interface PermissionCheckResult {
+ allowed: boolean;
+ reason?: string;
+}
+
+class PermissionManager {
+ private static instance: PermissionManager;
+ private permissionCache: Map<
+ string,
+ { permissions: string[]; timestamp: number }
+ >;
+ private readonly CACHE_TTL = 5 * 60 * 1000;
+
+ private constructor() {
+ this.permissionCache = new Map();
+
+ setInterval(() => {
+ this.cleanupExpiredAccess().catch((error) => {
+ databaseLogger.error(
+ "Failed to run periodic host access cleanup",
+ error,
+ {
+ operation: "host_access_cleanup_periodic",
+ },
+ );
+ });
+ }, 60 * 1000);
+
+ setInterval(() => {
+ this.clearPermissionCache();
+ }, this.CACHE_TTL);
+ }
+
+ static getInstance(): PermissionManager {
+ if (!this.instance) {
+ this.instance = new PermissionManager();
+ }
+ return this.instance;
+ }
+
+ /**
+ * Clean up expired host access entries
+ */
+ private async cleanupExpiredAccess(): Promise {
+ try {
+ const now = new Date().toISOString();
+ const result = await db
+ .delete(hostAccess)
+ .where(
+ and(
+ sql`${hostAccess.expiresAt} IS NOT NULL`,
+ sql`${hostAccess.expiresAt} <= ${now}`,
+ ),
+ )
+ .returning({ id: hostAccess.id });
+ } catch (error) {
+ databaseLogger.error("Failed to cleanup expired host access", error, {
+ operation: "host_access_cleanup_failed",
+ });
+ }
+ }
+
+ /**
+ * Clear permission cache
+ */
+ private clearPermissionCache(): void {
+ this.permissionCache.clear();
+ }
+
+ /**
+ * Invalidate permission cache for a specific user
+ */
+ invalidateUserPermissionCache(userId: string): void {
+ this.permissionCache.delete(userId);
+ }
+
+ /**
+ * Get user permissions from roles
+ */
+ async getUserPermissions(userId: string): Promise {
+ const cached = this.permissionCache.get(userId);
+ if (cached && Date.now() - cached.timestamp < this.CACHE_TTL) {
+ return cached.permissions;
+ }
+
+ try {
+ const userRoleRecords = await db
+ .select({
+ permissions: roles.permissions,
+ })
+ .from(userRoles)
+ .innerJoin(roles, eq(userRoles.roleId, roles.id))
+ .where(eq(userRoles.userId, userId));
+
+ const allPermissions = new Set();
+ for (const record of userRoleRecords) {
+ try {
+ const permissions = JSON.parse(record.permissions) as string[];
+ for (const perm of permissions) {
+ allPermissions.add(perm);
+ }
+ } catch (parseError) {
+ databaseLogger.warn("Failed to parse role permissions", {
+ operation: "get_user_permissions",
+ userId,
+ error: parseError,
+ });
+ }
+ }
+
+ const permissionsArray = Array.from(allPermissions);
+
+ this.permissionCache.set(userId, {
+ permissions: permissionsArray,
+ timestamp: Date.now(),
+ });
+
+ return permissionsArray;
+ } catch (error) {
+ databaseLogger.error("Failed to get user permissions", error, {
+ operation: "get_user_permissions",
+ userId,
+ });
+ return [];
+ }
+ }
+
+ /**
+ * Check if user has a specific permission
+ * Supports wildcards: "hosts.*", "*"
+ */
+ async hasPermission(userId: string, permission: string): Promise {
+ const userPermissions = await this.getUserPermissions(userId);
+
+ if (userPermissions.includes("*")) {
+ return true;
+ }
+
+ if (userPermissions.includes(permission)) {
+ return true;
+ }
+
+ const parts = permission.split(".");
+ for (let i = parts.length; i > 0; i--) {
+ const wildcardPermission = parts.slice(0, i).join(".") + ".*";
+ if (userPermissions.includes(wildcardPermission)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ /**
+ * Check if user can access a specific host
+ */
+ async canAccessHost(
+ userId: string,
+ hostId: number,
+ action: "read" | "write" | "execute" | "delete" | "share" = "read",
+ ): Promise {
+ try {
+ const host = await db
+ .select()
+ .from(sshData)
+ .where(and(eq(sshData.id, hostId), eq(sshData.userId, userId)))
+ .limit(1);
+
+ if (host.length > 0) {
+ return {
+ hasAccess: true,
+ isOwner: true,
+ isShared: false,
+ };
+ }
+
+ const userRoleIds = await db
+ .select({ roleId: userRoles.roleId })
+ .from(userRoles)
+ .where(eq(userRoles.userId, userId));
+ const roleIds = userRoleIds.map((r) => r.roleId);
+
+ const now = new Date().toISOString();
+ const sharedAccess = await db
+ .select()
+ .from(hostAccess)
+ .where(
+ and(
+ eq(hostAccess.hostId, hostId),
+ or(
+ eq(hostAccess.userId, userId),
+ roleIds.length > 0
+ ? sql`${hostAccess.roleId} IN (${sql.join(
+ roleIds.map((id) => sql`${id}`),
+ sql`, `,
+ )})`
+ : sql`false`,
+ ),
+ or(isNull(hostAccess.expiresAt), gte(hostAccess.expiresAt, now)),
+ ),
+ )
+ .limit(1);
+
+ if (sharedAccess.length > 0) {
+ const access = sharedAccess[0];
+
+ if (action === "write" || action === "delete") {
+ return {
+ hasAccess: false,
+ isOwner: false,
+ isShared: true,
+ permissionLevel: access.permissionLevel as "view",
+ expiresAt: access.expiresAt,
+ };
+ }
+
+ try {
+ await db
+ .update(hostAccess)
+ .set({
+ lastAccessedAt: now,
+ })
+ .where(eq(hostAccess.id, access.id));
+ } catch (error) {
+ databaseLogger.warn("Failed to update host access timestamp", {
+ operation: "update_host_access_timestamp",
+ error,
+ });
+ }
+
+ return {
+ hasAccess: true,
+ isOwner: false,
+ isShared: true,
+ permissionLevel: access.permissionLevel as "view",
+ expiresAt: access.expiresAt,
+ };
+ }
+
+ return {
+ hasAccess: false,
+ isOwner: false,
+ isShared: false,
+ };
+ } catch (error) {
+ databaseLogger.error("Failed to check host access", error, {
+ operation: "can_access_host",
+ userId,
+ hostId,
+ action,
+ });
+ return {
+ hasAccess: false,
+ isOwner: false,
+ isShared: false,
+ };
+ }
+ }
+
+ /**
+ * Check if user is admin (backward compatibility)
+ */
+ async isAdmin(userId: string): Promise {
+ try {
+ const user = await db
+ .select({ isAdmin: users.is_admin })
+ .from(users)
+ .where(eq(users.id, userId))
+ .limit(1);
+
+ if (user.length > 0 && user[0].isAdmin) {
+ return true;
+ }
+
+ const adminRoles = await db
+ .select({ roleName: roles.name })
+ .from(userRoles)
+ .innerJoin(roles, eq(userRoles.roleId, roles.id))
+ .where(
+ and(
+ eq(userRoles.userId, userId),
+ or(eq(roles.name, "admin"), eq(roles.name, "super_admin")),
+ ),
+ );
+
+ return adminRoles.length > 0;
+ } catch (error) {
+ databaseLogger.error("Failed to check admin status", error, {
+ operation: "is_admin",
+ userId,
+ });
+ return false;
+ }
+ }
+
+ /**
+ * Middleware: Require specific permission
+ */
+ requirePermission(permission: string) {
+ return async (
+ req: AuthenticatedRequest,
+ res: Response,
+ next: NextFunction,
+ ) => {
+ const userId = req.userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Not authenticated" });
+ }
+
+ const hasPermission = await this.hasPermission(userId, permission);
+
+ if (!hasPermission) {
+ databaseLogger.warn("Permission denied", {
+ operation: "permission_check",
+ userId,
+ permission,
+ path: req.path,
+ });
+
+ return res.status(403).json({
+ error: "Insufficient permissions",
+ required: permission,
+ });
+ }
+
+ next();
+ };
+ }
+
+ /**
+ * Middleware: Require host access
+ */
+ requireHostAccess(
+ hostIdParam: string = "id",
+ action: "read" | "write" | "execute" | "delete" | "share" = "read",
+ ) {
+ return async (
+ req: AuthenticatedRequest,
+ res: Response,
+ next: NextFunction,
+ ) => {
+ const userId = req.userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Not authenticated" });
+ }
+
+ const hostId = parseInt(req.params[hostIdParam], 10);
+
+ if (isNaN(hostId)) {
+ return res.status(400).json({ error: "Invalid host ID" });
+ }
+
+ const accessInfo = await this.canAccessHost(userId, hostId, action);
+
+ if (!accessInfo.hasAccess) {
+ databaseLogger.warn("Host access denied", {
+ operation: "host_access_check",
+ userId,
+ hostId,
+ action,
+ });
+
+ return res.status(403).json({
+ error: "Access denied to host",
+ hostId,
+ action,
+ });
+ }
+
+ (req as any).hostAccessInfo = accessInfo;
+
+ next();
+ };
+ }
+
+ /**
+ * Middleware: Require admin role (backward compatible)
+ */
+ requireAdmin() {
+ return async (
+ req: AuthenticatedRequest,
+ res: Response,
+ next: NextFunction,
+ ) => {
+ const userId = req.userId;
+
+ if (!userId) {
+ return res.status(401).json({ error: "Not authenticated" });
+ }
+
+ const isAdmin = await this.isAdmin(userId);
+
+ if (!isAdmin) {
+ databaseLogger.warn("Admin access denied", {
+ operation: "admin_check",
+ userId,
+ path: req.path,
+ });
+
+ return res.status(403).json({ error: "Admin access required" });
+ }
+
+ next();
+ };
+ }
+}
+
+export { PermissionManager };
+export type { AuthenticatedRequest, HostAccessInfo, PermissionCheckResult };
diff --git a/src/backend/utils/shared-credential-manager.ts b/src/backend/utils/shared-credential-manager.ts
new file mode 100644
index 00000000..8fc0114a
--- /dev/null
+++ b/src/backend/utils/shared-credential-manager.ts
@@ -0,0 +1,700 @@
+import { db } from "../database/db/index.js";
+import {
+ sharedCredentials,
+ sshCredentials,
+ hostAccess,
+ users,
+ userRoles,
+ sshData,
+} from "../database/db/schema.js";
+import { eq, and } from "drizzle-orm";
+import { DataCrypto } from "./data-crypto.js";
+import { FieldCrypto } from "./field-crypto.js";
+import { databaseLogger } from "./logger.js";
+
+interface CredentialData {
+ username: string;
+ authType: string;
+ password?: string;
+ key?: string;
+ keyPassword?: string;
+ keyType?: string;
+}
+
+/**
+ * Manages shared credentials for RBAC host sharing.
+ * Creates per-user encrypted credential copies to enable credential sharing
+ * without requiring the credential owner to be online.
+ */
+class SharedCredentialManager {
+ private static instance: SharedCredentialManager;
+
+ private constructor() {}
+
+ static getInstance(): SharedCredentialManager {
+ if (!this.instance) {
+ this.instance = new SharedCredentialManager();
+ }
+ return this.instance;
+ }
+
+ /**
+ * Create shared credential for a specific user
+ * Called when sharing a host with a user
+ */
+ async createSharedCredentialForUser(
+ hostAccessId: number,
+ originalCredentialId: number,
+ targetUserId: string,
+ ownerId: string,
+ ): Promise {
+ try {
+ const ownerDEK = DataCrypto.getUserDataKey(ownerId);
+
+ if (ownerDEK) {
+ const targetDEK = DataCrypto.getUserDataKey(targetUserId);
+ if (!targetDEK) {
+ await this.createPendingSharedCredential(
+ hostAccessId,
+ originalCredentialId,
+ targetUserId,
+ );
+ return;
+ }
+
+ const credentialData = await this.getDecryptedCredential(
+ originalCredentialId,
+ ownerId,
+ ownerDEK,
+ );
+
+ const encryptedForTarget = this.encryptCredentialForUser(
+ credentialData,
+ targetUserId,
+ targetDEK,
+ hostAccessId,
+ );
+
+ await db.insert(sharedCredentials).values({
+ hostAccessId,
+ originalCredentialId,
+ targetUserId,
+ ...encryptedForTarget,
+ needsReEncryption: false,
+ });
+ } else {
+ const targetDEK = DataCrypto.getUserDataKey(targetUserId);
+ if (!targetDEK) {
+ await this.createPendingSharedCredential(
+ hostAccessId,
+ originalCredentialId,
+ targetUserId,
+ );
+ return;
+ }
+
+ const credentialData =
+ await this.getDecryptedCredentialViaSystemKey(originalCredentialId);
+
+ const encryptedForTarget = this.encryptCredentialForUser(
+ credentialData,
+ targetUserId,
+ targetDEK,
+ hostAccessId,
+ );
+
+ await db.insert(sharedCredentials).values({
+ hostAccessId,
+ originalCredentialId,
+ targetUserId,
+ ...encryptedForTarget,
+ needsReEncryption: false,
+ });
+ }
+ } catch (error) {
+ databaseLogger.error("Failed to create shared credential", error, {
+ operation: "create_shared_credential",
+ hostAccessId,
+ targetUserId,
+ });
+ throw error;
+ }
+ }
+
+ /**
+ * Create shared credentials for all users in a role
+ * Called when sharing a host with a role
+ */
+ async createSharedCredentialsForRole(
+ hostAccessId: number,
+ originalCredentialId: number,
+ roleId: number,
+ ownerId: string,
+ ): Promise {
+ try {
+ const roleUsers = await db
+ .select({ userId: userRoles.userId })
+ .from(userRoles)
+ .where(eq(userRoles.roleId, roleId));
+
+ for (const { userId } of roleUsers) {
+ try {
+ await this.createSharedCredentialForUser(
+ hostAccessId,
+ originalCredentialId,
+ userId,
+ ownerId,
+ );
+ } catch (error) {
+ databaseLogger.error(
+ "Failed to create shared credential for role member",
+ error,
+ {
+ operation: "create_shared_credentials_role",
+ hostAccessId,
+ roleId,
+ userId,
+ },
+ );
+ }
+ }
+ } catch (error) {
+ databaseLogger.error(
+ "Failed to create shared credentials for role",
+ error,
+ {
+ operation: "create_shared_credentials_role",
+ hostAccessId,
+ roleId,
+ },
+ );
+ throw error;
+ }
+ }
+
+ /**
+ * Get credential data for a shared user
+ * Called when a shared user connects to a host
+ */
+ async getSharedCredentialForUser(
+ hostId: number,
+ userId: string,
+ ): Promise {
+ try {
+ const userDEK = DataCrypto.getUserDataKey(userId);
+ if (!userDEK) {
+ throw new Error(`User ${userId} data not unlocked`);
+ }
+
+ const sharedCred = await db
+ .select()
+ .from(sharedCredentials)
+ .innerJoin(
+ hostAccess,
+ eq(sharedCredentials.hostAccessId, hostAccess.id),
+ )
+ .where(
+ and(
+ eq(hostAccess.hostId, hostId),
+ eq(sharedCredentials.targetUserId, userId),
+ ),
+ )
+ .limit(1);
+
+ if (sharedCred.length === 0) {
+ return null;
+ }
+
+ const cred = sharedCred[0].shared_credentials;
+
+ if (cred.needsReEncryption) {
+ databaseLogger.warn(
+ "Shared credential needs re-encryption but cannot be accessed yet",
+ {
+ operation: "get_shared_credential_pending",
+ hostId,
+ userId,
+ },
+ );
+ return null;
+ }
+
+ return this.decryptSharedCredential(cred, userDEK);
+ } catch (error) {
+ databaseLogger.error("Failed to get shared credential", error, {
+ operation: "get_shared_credential",
+ hostId,
+ userId,
+ });
+ throw error;
+ }
+ }
+
+ /**
+ * Update all shared credentials when original credential is updated
+ * Called when credential owner updates credential
+ */
+ async updateSharedCredentialsForOriginal(
+ credentialId: number,
+ ownerId: string,
+ ): Promise {
+ try {
+ const sharedCreds = await db
+ .select()
+ .from(sharedCredentials)
+ .where(eq(sharedCredentials.originalCredentialId, credentialId));
+
+ const ownerDEK = DataCrypto.getUserDataKey(ownerId);
+ let credentialData: CredentialData;
+
+ if (ownerDEK) {
+ credentialData = await this.getDecryptedCredential(
+ credentialId,
+ ownerId,
+ ownerDEK,
+ );
+ } else {
+ try {
+ credentialData =
+ await this.getDecryptedCredentialViaSystemKey(credentialId);
+ } catch (error) {
+ databaseLogger.warn(
+ "Cannot update shared credentials: owner offline and credential not migrated",
+ {
+ operation: "update_shared_credentials_failed",
+ credentialId,
+ ownerId,
+ error: error instanceof Error ? error.message : "Unknown error",
+ },
+ );
+ await db
+ .update(sharedCredentials)
+ .set({ needsReEncryption: true })
+ .where(eq(sharedCredentials.originalCredentialId, credentialId));
+ return;
+ }
+ }
+
+ for (const sharedCred of sharedCreds) {
+ const targetDEK = DataCrypto.getUserDataKey(sharedCred.targetUserId);
+
+ if (!targetDEK) {
+ await db
+ .update(sharedCredentials)
+ .set({ needsReEncryption: true })
+ .where(eq(sharedCredentials.id, sharedCred.id));
+ continue;
+ }
+
+ const encryptedForTarget = this.encryptCredentialForUser(
+ credentialData,
+ sharedCred.targetUserId,
+ targetDEK,
+ sharedCred.hostAccessId,
+ );
+
+ await db
+ .update(sharedCredentials)
+ .set({
+ ...encryptedForTarget,
+ needsReEncryption: false,
+ updatedAt: new Date().toISOString(),
+ })
+ .where(eq(sharedCredentials.id, sharedCred.id));
+ }
+ } catch (error) {
+ databaseLogger.error("Failed to update shared credentials", error, {
+ operation: "update_shared_credentials",
+ credentialId,
+ });
+ }
+ }
+
+ /**
+ * Delete shared credentials when original credential is deleted
+ * Called from credential deletion route
+ */
+ async deleteSharedCredentialsForOriginal(
+ credentialId: number,
+ ): Promise {
+ try {
+ const result = await db
+ .delete(sharedCredentials)
+ .where(eq(sharedCredentials.originalCredentialId, credentialId))
+ .returning({ id: sharedCredentials.id });
+ } catch (error) {
+ databaseLogger.error("Failed to delete shared credentials", error, {
+ operation: "delete_shared_credentials",
+ credentialId,
+ });
+ }
+ }
+
+ /**
+ * Re-encrypt pending shared credentials for a user when they log in
+ * Called during user login
+ */
+ async reEncryptPendingCredentialsForUser(userId: string): Promise {
+ try {
+ const userDEK = DataCrypto.getUserDataKey(userId);
+ if (!userDEK) {
+ return;
+ }
+
+ const pendingCreds = await db
+ .select()
+ .from(sharedCredentials)
+ .where(
+ and(
+ eq(sharedCredentials.targetUserId, userId),
+ eq(sharedCredentials.needsReEncryption, true),
+ ),
+ );
+
+ for (const cred of pendingCreds) {
+ await this.reEncryptSharedCredential(cred.id, userId);
+ }
+ } catch (error) {
+ databaseLogger.error("Failed to re-encrypt pending credentials", error, {
+ operation: "reencrypt_pending_credentials",
+ userId,
+ });
+ }
+ }
+
+ private async getDecryptedCredential(
+ credentialId: number,
+ ownerId: string,
+ ownerDEK: Buffer,
+ ): Promise {
+ const creds = await db
+ .select()
+ .from(sshCredentials)
+ .where(
+ and(
+ eq(sshCredentials.id, credentialId),
+ eq(sshCredentials.userId, ownerId),
+ ),
+ )
+ .limit(1);
+
+ if (creds.length === 0) {
+ throw new Error(`Credential ${credentialId} not found`);
+ }
+
+ const cred = creds[0];
+
+ return {
+ username: cred.username,
+ authType: cred.authType,
+ password: cred.password
+ ? this.decryptField(cred.password, ownerDEK, credentialId, "password")
+ : undefined,
+ key: cred.key
+ ? this.decryptField(cred.key, ownerDEK, credentialId, "key")
+ : undefined,
+ keyPassword: cred.key_password
+ ? this.decryptField(
+ cred.key_password,
+ ownerDEK,
+ credentialId,
+ "key_password",
+ )
+ : undefined,
+ keyType: cred.keyType,
+ };
+ }
+
+ /**
+ * Decrypt credential using system key (for offline sharing when owner is offline)
+ */
+ private async getDecryptedCredentialViaSystemKey(
+ credentialId: number,
+ ): Promise {
+ const creds = await db
+ .select()
+ .from(sshCredentials)
+ .where(eq(sshCredentials.id, credentialId))
+ .limit(1);
+
+ if (creds.length === 0) {
+ throw new Error(`Credential ${credentialId} not found`);
+ }
+
+ const cred = creds[0];
+
+ if (!cred.systemPassword && !cred.systemKey && !cred.systemKeyPassword) {
+ throw new Error(
+ "Credential not yet migrated for offline sharing. " +
+ "Please ask credential owner to log in to enable sharing.",
+ );
+ }
+
+ const { SystemCrypto } = await import("./system-crypto.js");
+ const systemCrypto = SystemCrypto.getInstance();
+ const CSKEK = await systemCrypto.getCredentialSharingKey();
+
+ return {
+ username: cred.username,
+ authType: cred.authType,
+ password: cred.systemPassword
+ ? this.decryptField(
+ cred.systemPassword,
+ CSKEK,
+ credentialId,
+ "password",
+ )
+ : undefined,
+ key: cred.systemKey
+ ? this.decryptField(cred.systemKey, CSKEK, credentialId, "key")
+ : undefined,
+ keyPassword: cred.systemKeyPassword
+ ? this.decryptField(
+ cred.systemKeyPassword,
+ CSKEK,
+ credentialId,
+ "key_password",
+ )
+ : undefined,
+ keyType: cred.keyType,
+ };
+ }
+
+ private encryptCredentialForUser(
+ credentialData: CredentialData,
+ targetUserId: string,
+ targetDEK: Buffer,
+ hostAccessId: number,
+ ): {
+ encryptedUsername: string;
+ encryptedAuthType: string;
+ encryptedPassword: string | null;
+ encryptedKey: string | null;
+ encryptedKeyPassword: string | null;
+ encryptedKeyType: string | null;
+ } {
+ const recordId = `shared-${hostAccessId}-${targetUserId}`;
+
+ return {
+ encryptedUsername: FieldCrypto.encryptField(
+ credentialData.username,
+ targetDEK,
+ recordId,
+ "username",
+ ),
+ encryptedAuthType: credentialData.authType,
+ encryptedPassword: credentialData.password
+ ? FieldCrypto.encryptField(
+ credentialData.password,
+ targetDEK,
+ recordId,
+ "password",
+ )
+ : null,
+ encryptedKey: credentialData.key
+ ? FieldCrypto.encryptField(
+ credentialData.key,
+ targetDEK,
+ recordId,
+ "key",
+ )
+ : null,
+ encryptedKeyPassword: credentialData.keyPassword
+ ? FieldCrypto.encryptField(
+ credentialData.keyPassword,
+ targetDEK,
+ recordId,
+ "key_password",
+ )
+ : null,
+ encryptedKeyType: credentialData.keyType || null,
+ };
+ }
+
+ private decryptSharedCredential(
+ sharedCred: typeof sharedCredentials.$inferSelect,
+ userDEK: Buffer,
+ ): CredentialData {
+ const recordId = `shared-${sharedCred.hostAccessId}-${sharedCred.targetUserId}`;
+
+ return {
+ username: FieldCrypto.decryptField(
+ sharedCred.encryptedUsername,
+ userDEK,
+ recordId,
+ "username",
+ ),
+ authType: sharedCred.encryptedAuthType,
+ password: sharedCred.encryptedPassword
+ ? FieldCrypto.decryptField(
+ sharedCred.encryptedPassword,
+ userDEK,
+ recordId,
+ "password",
+ )
+ : undefined,
+ key: sharedCred.encryptedKey
+ ? FieldCrypto.decryptField(
+ sharedCred.encryptedKey,
+ userDEK,
+ recordId,
+ "key",
+ )
+ : undefined,
+ keyPassword: sharedCred.encryptedKeyPassword
+ ? FieldCrypto.decryptField(
+ sharedCred.encryptedKeyPassword,
+ userDEK,
+ recordId,
+ "key_password",
+ )
+ : undefined,
+ keyType: sharedCred.encryptedKeyType || undefined,
+ };
+ }
+
+ private decryptField(
+ encryptedValue: string,
+ dek: Buffer,
+ recordId: number | string,
+ fieldName: string,
+ ): string {
+ try {
+ return FieldCrypto.decryptField(
+ encryptedValue,
+ dek,
+ recordId.toString(),
+ fieldName,
+ );
+ } catch (error) {
+ databaseLogger.warn("Field decryption failed, returning as-is", {
+ operation: "decrypt_field",
+ fieldName,
+ recordId,
+ });
+ return encryptedValue;
+ }
+ }
+
+ private async createPendingSharedCredential(
+ hostAccessId: number,
+ originalCredentialId: number,
+ targetUserId: string,
+ ): Promise {
+ await db.insert(sharedCredentials).values({
+ hostAccessId,
+ originalCredentialId,
+ targetUserId,
+ encryptedUsername: "",
+ encryptedAuthType: "",
+ needsReEncryption: true,
+ });
+
+ databaseLogger.info("Created pending shared credential", {
+ operation: "create_pending_shared_credential",
+ hostAccessId,
+ targetUserId,
+ });
+ }
+
+ private async reEncryptSharedCredential(
+ sharedCredId: number,
+ userId: string,
+ ): Promise {
+ try {
+ const sharedCred = await db
+ .select()
+ .from(sharedCredentials)
+ .where(eq(sharedCredentials.id, sharedCredId))
+ .limit(1);
+
+ if (sharedCred.length === 0) {
+ databaseLogger.warn("Re-encrypt: shared credential not found", {
+ operation: "reencrypt_not_found",
+ sharedCredId,
+ });
+ return;
+ }
+
+ const cred = sharedCred[0];
+
+ const access = await db
+ .select()
+ .from(hostAccess)
+ .innerJoin(sshData, eq(hostAccess.hostId, sshData.id))
+ .where(eq(hostAccess.id, cred.hostAccessId))
+ .limit(1);
+
+ if (access.length === 0) {
+ databaseLogger.warn("Re-encrypt: host access not found", {
+ operation: "reencrypt_access_not_found",
+ sharedCredId,
+ });
+ return;
+ }
+
+ const ownerId = access[0].ssh_data.userId;
+
+ const userDEK = DataCrypto.getUserDataKey(userId);
+ if (!userDEK) {
+ databaseLogger.warn("Re-encrypt: user DEK not available", {
+ operation: "reencrypt_user_offline",
+ sharedCredId,
+ userId,
+ });
+ return;
+ }
+
+ const ownerDEK = DataCrypto.getUserDataKey(ownerId);
+ let credentialData: CredentialData;
+
+ if (ownerDEK) {
+ credentialData = await this.getDecryptedCredential(
+ cred.originalCredentialId,
+ ownerId,
+ ownerDEK,
+ );
+ } else {
+ try {
+ credentialData = await this.getDecryptedCredentialViaSystemKey(
+ cred.originalCredentialId,
+ );
+ } catch (error) {
+ databaseLogger.warn(
+ "Re-encrypt: system key decryption failed, credential may not be migrated yet",
+ {
+ operation: "reencrypt_system_key_failed",
+ sharedCredId,
+ error: error instanceof Error ? error.message : "Unknown error",
+ },
+ );
+ return;
+ }
+ }
+
+ const encryptedForTarget = this.encryptCredentialForUser(
+ credentialData,
+ userId,
+ userDEK,
+ cred.hostAccessId,
+ );
+
+ await db
+ .update(sharedCredentials)
+ .set({
+ ...encryptedForTarget,
+ needsReEncryption: false,
+ updatedAt: new Date().toISOString(),
+ })
+ .where(eq(sharedCredentials.id, sharedCredId));
+ } catch (error) {
+ databaseLogger.error("Failed to re-encrypt shared credential", error, {
+ operation: "reencrypt_shared_credential",
+ sharedCredId,
+ userId,
+ });
+ }
+ }
+}
+
+export { SharedCredentialManager };
diff --git a/src/backend/utils/simple-db-ops.ts b/src/backend/utils/simple-db-ops.ts
index 6fbd7a63..12fbee1b 100644
--- a/src/backend/utils/simple-db-ops.ts
+++ b/src/backend/utils/simple-db-ops.ts
@@ -2,7 +2,12 @@ import { getDb, DatabaseSaveTrigger } from "../database/db/index.js";
import { DataCrypto } from "./data-crypto.js";
import type { SQLiteTable } from "drizzle-orm/sqlite-core";
-type TableName = "users" | "ssh_data" | "ssh_credentials" | "recent_activity";
+type TableName =
+ | "users"
+ | "ssh_data"
+ | "ssh_credentials"
+ | "recent_activity"
+ | "socks5_proxy_presets";
class SimpleDBOps {
static async insert>(
@@ -23,6 +28,20 @@ class SimpleDBOps {
userDataKey,
);
+ if (tableName === "ssh_credentials") {
+ const { SystemCrypto } = await import("./system-crypto.js");
+ const systemCrypto = SystemCrypto.getInstance();
+ const systemKey = await systemCrypto.getCredentialSharingKey();
+
+ const systemEncrypted = await DataCrypto.encryptRecordWithSystemKey(
+ tableName,
+ dataWithTempId,
+ systemKey,
+ );
+
+ Object.assign(encryptedData, systemEncrypted);
+ }
+
if (!data.id) {
delete encryptedData.id;
}
@@ -105,6 +124,20 @@ class SimpleDBOps {
userDataKey,
);
+ if (tableName === "ssh_credentials") {
+ const { SystemCrypto } = await import("./system-crypto.js");
+ const systemCrypto = SystemCrypto.getInstance();
+ const systemKey = await systemCrypto.getCredentialSharingKey();
+
+ const systemEncrypted = await DataCrypto.encryptRecordWithSystemKey(
+ tableName,
+ data,
+ systemKey,
+ );
+
+ Object.assign(encryptedData, systemEncrypted);
+ }
+
const result = await getDb()
.update(table)
.set(encryptedData)
diff --git a/src/backend/utils/socks5-helper.ts b/src/backend/utils/socks5-helper.ts
new file mode 100644
index 00000000..c02f375e
--- /dev/null
+++ b/src/backend/utils/socks5-helper.ts
@@ -0,0 +1,131 @@
+import { SocksClient } from "socks";
+import type { SocksClientOptions } from "socks";
+import net from "net";
+import { sshLogger } from "./logger.js";
+import type { ProxyNode } from "../../types/index.js";
+
+export interface SOCKS5Config {
+ useSocks5?: boolean;
+ socks5Host?: string;
+ socks5Port?: number;
+ socks5Username?: string;
+ socks5Password?: string;
+ socks5ProxyChain?: ProxyNode[];
+}
+
+/**
+ * Creates a SOCKS5 connection through a single proxy or a chain of proxies
+ * @param targetHost - Target SSH server hostname/IP
+ * @param targetPort - Target SSH server port
+ * @param socks5Config - SOCKS5 proxy configuration
+ * @returns Promise with connected socket or null if SOCKS5 is not enabled
+ */
+export async function createSocks5Connection(
+ targetHost: string,
+ targetPort: number,
+ socks5Config: SOCKS5Config,
+): Promise {
+ if (!socks5Config.useSocks5) {
+ return null;
+ }
+
+ if (
+ socks5Config.socks5ProxyChain &&
+ socks5Config.socks5ProxyChain.length > 0
+ ) {
+ return createProxyChainConnection(
+ targetHost,
+ targetPort,
+ socks5Config.socks5ProxyChain,
+ );
+ }
+
+ if (socks5Config.socks5Host) {
+ return createSingleProxyConnection(targetHost, targetPort, socks5Config);
+ }
+
+ return null;
+}
+
+/**
+ * Creates a connection through a single SOCKS proxy
+ */
+async function createSingleProxyConnection(
+ targetHost: string,
+ targetPort: number,
+ socks5Config: SOCKS5Config,
+): Promise {
+ const socksOptions: SocksClientOptions = {
+ proxy: {
+ host: socks5Config.socks5Host!,
+ port: socks5Config.socks5Port || 1080,
+ type: 5,
+ userId: socks5Config.socks5Username,
+ password: socks5Config.socks5Password,
+ },
+ command: "connect",
+ destination: {
+ host: targetHost,
+ port: targetPort,
+ },
+ };
+
+ try {
+ const info = await SocksClient.createConnection(socksOptions);
+
+ return info.socket;
+ } catch (error) {
+ sshLogger.error("SOCKS5 connection failed", error, {
+ operation: "socks5_connect_failed",
+ proxyHost: socks5Config.socks5Host,
+ proxyPort: socks5Config.socks5Port || 1080,
+ targetHost,
+ targetPort,
+ errorMessage: error instanceof Error ? error.message : "Unknown error",
+ });
+ throw error;
+ }
+}
+
+/**
+ * Creates a connection through a chain of SOCKS proxies
+ * Each proxy in the chain connects through the previous one
+ */
+async function createProxyChainConnection(
+ targetHost: string,
+ targetPort: number,
+ proxyChain: ProxyNode[],
+): Promise {
+ if (proxyChain.length === 0) {
+ throw new Error("Proxy chain is empty");
+ }
+
+ const chainPath = proxyChain.map((p) => `${p.host}:${p.port}`).join(" → ");
+ try {
+ const info = await SocksClient.createConnectionChain({
+ proxies: proxyChain.map((p) => ({
+ host: p.host,
+ port: p.port,
+ type: p.type,
+ userId: p.username,
+ password: p.password,
+ timeout: 10000,
+ })),
+ command: "connect",
+ destination: {
+ host: targetHost,
+ port: targetPort,
+ },
+ });
+ return info.socket;
+ } catch (error) {
+ sshLogger.error("SOCKS proxy chain connection failed", error, {
+ operation: "socks5_chain_connect_failed",
+ chainLength: proxyChain.length,
+ targetHost,
+ targetPort,
+ errorMessage: error instanceof Error ? error.message : "Unknown error",
+ });
+ throw error;
+ }
+}
diff --git a/src/backend/utils/system-crypto.ts b/src/backend/utils/system-crypto.ts
index fdff0263..34f60cee 100644
--- a/src/backend/utils/system-crypto.ts
+++ b/src/backend/utils/system-crypto.ts
@@ -8,6 +8,7 @@ class SystemCrypto {
private jwtSecret: string | null = null;
private databaseKey: Buffer | null = null;
private internalAuthToken: string | null = null;
+ private credentialSharingKey: Buffer | null = null;
private constructor() {}
@@ -158,6 +159,48 @@ class SystemCrypto {
return this.internalAuthToken!;
}
+ async initializeCredentialSharingKey(): Promise {
+ try {
+ const dataDir = process.env.DATA_DIR || "./db/data";
+ const envPath = path.join(dataDir, ".env");
+
+ const envKey = process.env.CREDENTIAL_SHARING_KEY;
+ if (envKey && envKey.length >= 64) {
+ this.credentialSharingKey = Buffer.from(envKey, "hex");
+ return;
+ }
+
+ try {
+ const envContent = await fs.readFile(envPath, "utf8");
+ const csKeyMatch = envContent.match(/^CREDENTIAL_SHARING_KEY=(.+)$/m);
+ if (csKeyMatch && csKeyMatch[1] && csKeyMatch[1].length >= 64) {
+ this.credentialSharingKey = Buffer.from(csKeyMatch[1], "hex");
+ process.env.CREDENTIAL_SHARING_KEY = csKeyMatch[1];
+ return;
+ }
+ } catch (fileError) {}
+
+ await this.generateAndGuideCredentialSharingKey();
+ } catch (error) {
+ databaseLogger.error(
+ "Failed to initialize credential sharing key",
+ error,
+ {
+ operation: "cred_sharing_key_init_failed",
+ dataDir: process.env.DATA_DIR || "./db/data",
+ },
+ );
+ throw new Error("Credential sharing key initialization failed");
+ }
+ }
+
+ async getCredentialSharingKey(): Promise {
+ if (!this.credentialSharingKey) {
+ await this.initializeCredentialSharingKey();
+ }
+ return this.credentialSharingKey!;
+ }
+
private async generateAndGuideUser(): Promise {
const newSecret = crypto.randomBytes(32).toString("hex");
const instanceId = crypto.randomBytes(8).toString("hex");
@@ -210,6 +253,26 @@ class SystemCrypto {
);
}
+ private async generateAndGuideCredentialSharingKey(): Promise {
+ const newKey = crypto.randomBytes(32);
+ const newKeyHex = newKey.toString("hex");
+ const instanceId = crypto.randomBytes(8).toString("hex");
+
+ this.credentialSharingKey = newKey;
+
+ await this.updateEnvFile("CREDENTIAL_SHARING_KEY", newKeyHex);
+
+ databaseLogger.success(
+ "Credential sharing key auto-generated and saved to .env",
+ {
+ operation: "cred_sharing_key_auto_generated",
+ instanceId,
+ envVarName: "CREDENTIAL_SHARING_KEY",
+ note: "Used for offline credential sharing - no restart required",
+ },
+ );
+ }
+
async validateJWTSecret(): Promise {
try {
const secret = await this.getJWTSecret();
diff --git a/src/components/ui/alert-dialog.tsx b/src/components/ui/alert-dialog.tsx
new file mode 100644
index 00000000..1c376739
--- /dev/null
+++ b/src/components/ui/alert-dialog.tsx
@@ -0,0 +1,155 @@
+import * as React from "react";
+import * as AlertDialogPrimitive from "@radix-ui/react-alert-dialog";
+
+import { cn } from "@/lib/utils";
+import { buttonVariants } from "@/components/ui/button";
+
+function AlertDialog({
+ ...props
+}: React.ComponentProps) {
+ return ;
+}
+
+function AlertDialogTrigger({
+ ...props
+}: React.ComponentProps) {
+ return (
+
+ );
+}
+
+function AlertDialogPortal({
+ ...props
+}: React.ComponentProps) {
+ return (
+
+ );
+}
+
+function AlertDialogOverlay({
+ className,
+ ...props
+}: React.ComponentProps) {
+ return (
+
+ );
+}
+
+function AlertDialogContent({
+ className,
+ ...props
+}: React.ComponentProps) {
+ return (
+
+
+
+
+ );
+}
+
+function AlertDialogHeader({
+ className,
+ ...props
+}: React.ComponentProps<"div">) {
+ return (
+
+ );
+}
+
+function AlertDialogFooter({
+ className,
+ ...props
+}: React.ComponentProps<"div">) {
+ return (
+
+ );
+}
+
+function AlertDialogTitle({
+ className,
+ ...props
+}: React.ComponentProps) {
+ return (
+
+ );
+}
+
+function AlertDialogDescription({
+ className,
+ ...props
+}: React.ComponentProps) {
+ return (
+
+ );
+}
+
+function AlertDialogAction({
+ className,
+ ...props
+}: React.ComponentProps) {
+ return (
+
+ );
+}
+
+function AlertDialogCancel({
+ className,
+ ...props
+}: React.ComponentProps) {
+ return (
+
+ );
+}
+
+export {
+ AlertDialog,
+ AlertDialogPortal,
+ AlertDialogOverlay,
+ AlertDialogTrigger,
+ AlertDialogContent,
+ AlertDialogHeader,
+ AlertDialogFooter,
+ AlertDialogTitle,
+ AlertDialogDescription,
+ AlertDialogAction,
+ AlertDialogCancel,
+};
diff --git a/src/components/ui/badge.tsx b/src/components/ui/badge.tsx
index b99be47d..dbc4719e 100644
--- a/src/components/ui/badge.tsx
+++ b/src/components/ui/badge.tsx
@@ -15,7 +15,7 @@ const badgeVariants = cva(
secondary:
"border-transparent bg-secondary text-secondary-foreground [a&]:hover:bg-secondary/90",
destructive:
- "border-transparent bg-destructive text-white [a&]:hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
+ "border-transparent bg-destructive text-foreground [a&]:hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
outline:
"text-foreground [a&]:hover:bg-accent [a&]:hover:text-accent-foreground",
},
diff --git a/src/components/ui/button.tsx b/src/components/ui/button.tsx
index fbdf6b8d..849574a3 100644
--- a/src/components/ui/button.tsx
+++ b/src/components/ui/button.tsx
@@ -13,7 +13,7 @@ const buttonVariants = cva(
default:
"bg-primary text-primary-foreground shadow-xs hover:bg-primary/90",
destructive:
- "bg-destructive text-white shadow-xs hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
+ "bg-destructive text-foreground shadow-xs hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
outline:
"border bg-background shadow-xs hover:bg-accent hover:text-accent-foreground dark:bg-input/30 dark:border-input dark:hover:bg-input/50",
secondary:
diff --git a/src/components/ui/card.tsx b/src/components/ui/card.tsx
index 113d66c7..5b8522b8 100644
--- a/src/components/ui/card.tsx
+++ b/src/components/ui/card.tsx
@@ -7,7 +7,7 @@ function Card({ className, ...props }: React.ComponentProps<"div">) {
) {
type={type}
data-slot="input"
className={cn(
- "file:text-foreground placeholder:text-muted-foreground selection:bg-primary selection:text-primary-foreground dark:bg-input/30 border-input flex h-9 w-full min-w-0 rounded-md border bg-transparent px-3 py-1 text-base shadow-xs transition-[color,box-shadow] duration-200 outline-none file:inline-flex file:h-7 file:border-0 file:bg-transparent file:text-sm file:font-medium disabled:pointer-events-none disabled:cursor-not-allowed disabled:opacity-50 md:text-sm",
+ "file:text-foreground placeholder:text-muted-foreground selection:bg-primary selection:text-primary-foreground bg-elevated dark:bg-input/30 border-input flex h-9 w-full min-w-0 rounded-md border px-3 py-1 text-base shadow-xs transition-[color,box-shadow] duration-200 outline-none file:inline-flex file:h-7 file:border-0 file:bg-transparent file:text-sm file:font-medium disabled:pointer-events-none disabled:cursor-not-allowed disabled:opacity-50 md:text-sm",
"focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px]",
"aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive",
className,
diff --git a/src/components/ui/resizable.tsx b/src/components/ui/resizable.tsx
index 7909c556..2835db13 100644
--- a/src/components/ui/resizable.tsx
+++ b/src/components/ui/resizable.tsx
@@ -37,13 +37,13 @@ function ResizableHandle({
div]:rotate-90 bg-dark-border-hover hover:bg-dark-active active:bg-dark-pressed transition-colors duration-150",
+ "relative flex w-1 items-center justify-center after:absolute after:inset-y-0 after:left-1/2 after:w-1 after:-translate-x-1/2 focus-visible:ring-1 focus-visible:ring-offset-1 focus-visible:outline-hidden data-[panel-group-direction=vertical]:h-1 data-[panel-group-direction=vertical]:w-full data-[panel-group-direction=vertical]:after:left-0 data-[panel-group-direction=vertical]:after:h-1 data-[panel-group-direction=vertical]:after:w-full data-[panel-group-direction=vertical]:after:translate-x-0 data-[panel-group-direction=vertical]:after:-translate-y-1/2 [&[data-panel-group-direction=vertical]>div]:rotate-90 bg-edge-hover hover:bg-interact active:bg-pressed transition-colors duration-150",
className,
)}
{...props}
>
{withHandle && (
-
+
)}
diff --git a/src/components/ui/select.tsx b/src/components/ui/select.tsx
index 0c883e37..dadd3525 100644
--- a/src/components/ui/select.tsx
+++ b/src/components/ui/select.tsx
@@ -59,7 +59,7 @@ function SelectContent({
) {
data-slot="sidebar-content"
data-sidebar="content"
className={cn(
- "flex min-h-0 flex-1 flex-col gap-2 overflow-auto group-data-[collapsible=icon]:overflow-hidden",
+ "flex min-h-0 flex-1 flex-col gap-2 overflow-auto thin-scrollbar group-data-[collapsible=icon]:overflow-hidden",
className,
)}
{...props}
diff --git a/src/components/ui/slider.tsx b/src/components/ui/slider.tsx
index 74e41490..231c5a81 100644
--- a/src/components/ui/slider.tsx
+++ b/src/components/ui/slider.tsx
@@ -51,7 +51,7 @@ function Slider({
))}
diff --git a/src/components/ui/sonner.tsx b/src/components/ui/sonner.tsx
index 04e0013c..264f0503 100644
--- a/src/components/ui/sonner.tsx
+++ b/src/components/ui/sonner.tsx
@@ -1,4 +1,4 @@
-import { useTheme } from "next-themes";
+import { useTheme } from "@/components/theme-provider";
import { Toaster as Sonner, type ToasterProps, toast } from "sonner";
import { useRef } from "react";
diff --git a/src/components/ui/table.tsx b/src/components/ui/table.tsx
index 2ad27ce8..a198ece0 100644
--- a/src/components/ui/table.tsx
+++ b/src/components/ui/table.tsx
@@ -6,7 +6,7 @@ function Table({ className, ...props }: React.ComponentProps<"table">) {
return (