Compare commits
66 Commits
release-1.
...
dev-1.10.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c7872770a1 | ||
|
|
004ddcb2bb | ||
|
|
8eeef84b8a | ||
|
|
dc88ae5e8b | ||
|
|
cb478477e9 | ||
|
|
b7bd1e50b3 | ||
|
|
230ab2f737 | ||
|
|
042bf255ef | ||
|
|
f7e99b5af5 | ||
|
|
8fa093ae60 | ||
|
|
dd62b77c79 | ||
|
|
264682c5ad | ||
|
|
7210381f17 | ||
|
|
c0f4f1d74b | ||
|
|
f957959a86 | ||
|
|
a54dbe5b46 | ||
|
|
ac1cf82bba | ||
|
|
de556e3911 | ||
|
|
9be6b945c8 | ||
|
|
1aebbee21e | ||
|
|
80c09aef7d | ||
|
|
115a1fd7f0 | ||
|
|
a1c260ad22 | ||
|
|
18aa4f4877 | ||
|
|
8fc038e59b | ||
|
|
aea87be4d3 | ||
|
|
7caa32b364 | ||
|
|
868ac39b71 | ||
|
|
8ae8520c44 | ||
|
|
8ce4c6f364 | ||
|
|
a9a1a4b3d5 | ||
|
|
2e3f7e10c7 | ||
|
|
d821373b15 | ||
|
|
816172d67b | ||
|
|
ceff07c685 | ||
|
|
1eb28dec8b | ||
|
|
2b6361cbb6 | ||
|
|
e6870f962a | ||
|
|
99b0181c45 | ||
|
|
58945288e0 | ||
|
|
afb66a1098 | ||
|
|
5f080be4ee | ||
|
|
4648549e74 | ||
|
|
f5d948aa45 | ||
|
|
81d506afba | ||
|
|
4150faa558 | ||
|
|
7ecfb4d685 | ||
|
|
614f2f84ec | ||
|
|
af63fe1b7b | ||
|
|
4896b71b01 | ||
|
|
69f3f88ae5 | ||
|
|
d632f2b91f | ||
|
|
5366cb24ef | ||
|
|
177e783f92 | ||
|
|
1a2179c345 | ||
|
|
bdf9ea282e | ||
|
|
6feb8405ce | ||
|
|
2ee1318ded | ||
|
|
51e6826c95 | ||
|
|
ad86c2040b | ||
|
|
0216a2d2fe | ||
|
|
7139290d14 | ||
|
|
8106999d1e | ||
|
|
f0647dc7c1 | ||
|
|
403800f42b | ||
|
|
84ca8080f0 |
2
.github/workflows/docker.yml
vendored
2
.github/workflows/docker.yml
vendored
@@ -17,7 +17,7 @@ on:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
423
.github/workflows/electron.yml
vendored
423
.github/workflows/electron.yml
vendored
@@ -27,7 +27,7 @@ on:
|
||||
jobs:
|
||||
build-windows:
|
||||
runs-on: windows-latest
|
||||
if: github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'windows' || github.event.inputs.build_type == ''
|
||||
if: (github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'windows' || github.event.inputs.build_type == '') && github.event.inputs.artifact_destination != 'submit'
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
@@ -72,10 +72,6 @@ jobs:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: npm run build && npx electron-builder --win --x64 --ia32
|
||||
|
||||
- name: List release files
|
||||
run: |
|
||||
dir release
|
||||
|
||||
- name: Upload Windows x64 NSIS Installer
|
||||
uses: actions/upload-artifact@v4
|
||||
if: hashFiles('release/termix_windows_x64_nsis.exe') != '' && github.event.inputs.artifact_destination != 'none'
|
||||
@@ -136,7 +132,7 @@ jobs:
|
||||
|
||||
build-linux:
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
if: github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'linux' || github.event.inputs.build_type == ''
|
||||
if: (github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'linux' || github.event.inputs.build_type == '') && github.event.inputs.artifact_destination != 'submit'
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
@@ -199,17 +195,6 @@ jobs:
|
||||
|
||||
cd ..
|
||||
|
||||
- name: List release files
|
||||
run: |
|
||||
ls -la release/
|
||||
|
||||
- name: Debug electron-builder output
|
||||
if: always()
|
||||
run: |
|
||||
if [ -f "release/builder-debug.yml" ]; then
|
||||
cat release/builder-debug.yml
|
||||
fi
|
||||
|
||||
- name: Upload Linux x64 AppImage
|
||||
uses: actions/upload-artifact@v4
|
||||
if: hashFiles('release/termix_linux_x64_appimage.AppImage') != '' && github.event.inputs.artifact_destination != 'none'
|
||||
@@ -282,9 +267,96 @@ jobs:
|
||||
path: release/termix_linux_armv7l_portable.tar.gz
|
||||
retention-days: 30
|
||||
|
||||
- name: Install Flatpak builder and dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y flatpak flatpak-builder imagemagick
|
||||
|
||||
- name: Add Flathub repository
|
||||
run: |
|
||||
sudo flatpak remote-add --if-not-exists flathub https://flathub.org/repo/flathub.flatpakrepo
|
||||
|
||||
- name: Install Flatpak runtime and SDK
|
||||
run: |
|
||||
sudo flatpak install -y flathub org.freedesktop.Platform//24.08
|
||||
sudo flatpak install -y flathub org.freedesktop.Sdk//24.08
|
||||
sudo flatpak install -y flathub org.electronjs.Electron2.BaseApp//24.08
|
||||
|
||||
- name: Get version for Flatpak
|
||||
id: flatpak-version
|
||||
run: |
|
||||
VERSION=$(node -p "require('./package.json').version")
|
||||
RELEASE_DATE=$(date +%Y-%m-%d)
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "release_date=$RELEASE_DATE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Prepare Flatpak files
|
||||
run: |
|
||||
VERSION="${{ steps.flatpak-version.outputs.version }}"
|
||||
RELEASE_DATE="${{ steps.flatpak-version.outputs.release_date }}"
|
||||
|
||||
CHECKSUM_X64=$(sha256sum "release/termix_linux_x64_appimage.AppImage" | awk '{print $1}')
|
||||
CHECKSUM_ARM64=$(sha256sum "release/termix_linux_arm64_appimage.AppImage" | awk '{print $1}')
|
||||
|
||||
mkdir -p flatpak-build
|
||||
cp flatpak/com.karmaa.termix.yml flatpak-build/
|
||||
cp flatpak/com.karmaa.termix.desktop flatpak-build/
|
||||
cp flatpak/com.karmaa.termix.metainfo.xml flatpak-build/
|
||||
cp public/icon.svg flatpak-build/com.karmaa.termix.svg
|
||||
convert public/icon.png -resize 256x256 flatpak-build/icon-256.png
|
||||
convert public/icon.png -resize 128x128 flatpak-build/icon-128.png
|
||||
|
||||
cd flatpak-build
|
||||
sed -i "s|https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_x64_appimage.AppImage|file://$(realpath ../release/termix_linux_x64_appimage.AppImage)|g" com.karmaa.termix.yml
|
||||
sed -i "s|https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_arm64_appimage.AppImage|file://$(realpath ../release/termix_linux_arm64_appimage.AppImage)|g" com.karmaa.termix.yml
|
||||
sed -i "s/CHECKSUM_X64_PLACEHOLDER/$CHECKSUM_X64/g" com.karmaa.termix.yml
|
||||
sed -i "s/CHECKSUM_ARM64_PLACEHOLDER/$CHECKSUM_ARM64/g" com.karmaa.termix.yml
|
||||
sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" com.karmaa.termix.metainfo.xml
|
||||
sed -i "s/DATE_PLACEHOLDER/$RELEASE_DATE/g" com.karmaa.termix.metainfo.xml
|
||||
|
||||
- name: Build Flatpak bundle
|
||||
run: |
|
||||
cd flatpak-build
|
||||
flatpak-builder --repo=repo --force-clean --disable-rofiles-fuse build-dir com.karmaa.termix.yml
|
||||
|
||||
# Determine the architecture
|
||||
ARCH=$(uname -m)
|
||||
if [ "$ARCH" = "x86_64" ]; then
|
||||
FLATPAK_ARCH="x86_64"
|
||||
elif [ "$ARCH" = "aarch64" ]; then
|
||||
FLATPAK_ARCH="aarch64"
|
||||
else
|
||||
FLATPAK_ARCH="$ARCH"
|
||||
fi
|
||||
|
||||
# Build bundle for the current architecture
|
||||
flatpak build-bundle repo ../release/termix_linux_flatpak.flatpak com.karmaa.termix --runtime-repo=https://flathub.org/repo/flathub.flatpakrepo
|
||||
|
||||
- name: Create flatpakref file
|
||||
run: |
|
||||
VERSION="${{ steps.flatpak-version.outputs.version }}"
|
||||
cp flatpak/com.karmaa.termix.flatpakref release/
|
||||
sed -i "s|VERSION_PLACEHOLDER|release-${VERSION}-tag|g" release/com.karmaa.termix.flatpakref
|
||||
|
||||
- name: Upload Flatpak bundle
|
||||
uses: actions/upload-artifact@v4
|
||||
if: hashFiles('release/termix_linux_flatpak.flatpak') != '' && github.event.inputs.artifact_destination != 'none'
|
||||
with:
|
||||
name: termix_linux_flatpak
|
||||
path: release/termix_linux_flatpak.flatpak
|
||||
retention-days: 30
|
||||
|
||||
- name: Upload Flatpakref
|
||||
uses: actions/upload-artifact@v4
|
||||
if: hashFiles('release/com.karmaa.termix.flatpakref') != '' && github.event.inputs.artifact_destination != 'none'
|
||||
with:
|
||||
name: termix_linux_flatpakref
|
||||
path: release/com.karmaa.termix.flatpakref
|
||||
retention-days: 30
|
||||
|
||||
build-macos:
|
||||
runs-on: macos-latest
|
||||
if: github.event.inputs.build_type == 'macos' || github.event.inputs.build_type == 'all'
|
||||
if: (github.event.inputs.build_type == 'macos' || github.event.inputs.build_type == 'all') && github.event.inputs.artifact_destination != 'submit'
|
||||
needs: []
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -425,11 +497,6 @@ jobs:
|
||||
export GH_TOKEN="${{ secrets.GITHUB_TOKEN }}"
|
||||
npx electron-builder --mac dmg --universal --x64 --arm64 --publish never
|
||||
|
||||
- name: List release directory
|
||||
if: steps.check_certs.outputs.has_certs == 'true'
|
||||
run: |
|
||||
ls -R release/ || echo "Release directory not found"
|
||||
|
||||
- name: Upload macOS MAS PKG
|
||||
if: steps.check_certs.outputs.has_certs == 'true' && hashFiles('release/termix_macos_universal_mas.pkg') != '' && (github.event.inputs.artifact_destination == 'file' || github.event.inputs.artifact_destination == 'release' || github.event.inputs.artifact_destination == 'submit')
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -463,42 +530,51 @@ jobs:
|
||||
path: release/termix_macos_arm64_dmg.dmg
|
||||
retention-days: 30
|
||||
|
||||
- name: Check for App Store Connect API credentials
|
||||
if: steps.check_certs.outputs.has_certs == 'true'
|
||||
id: check_asc_creds
|
||||
- name: Get version for Homebrew
|
||||
id: homebrew-version
|
||||
run: |
|
||||
if [ -n "${{ secrets.APPLE_KEY_ID }}" ] && [ -n "${{ secrets.APPLE_ISSUER_ID }}" ] && [ -n "${{ secrets.APPLE_KEY_CONTENT }}" ]; then
|
||||
echo "has_credentials=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
VERSION=$(node -p "require('./package.json').version")
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Setup Ruby for Fastlane
|
||||
if: steps.check_asc_creds.outputs.has_credentials == 'true' && github.event.inputs.artifact_destination == 'submit'
|
||||
uses: ruby/setup-ruby@v1
|
||||
- name: Generate Homebrew Cask
|
||||
if: hashFiles('release/termix_macos_universal_dmg.dmg') != '' && (github.event.inputs.artifact_destination == 'file' || github.event.inputs.artifact_destination == 'release')
|
||||
run: |
|
||||
VERSION="${{ steps.homebrew-version.outputs.version }}"
|
||||
DMG_PATH="release/termix_macos_universal_dmg.dmg"
|
||||
|
||||
CHECKSUM=$(shasum -a 256 "$DMG_PATH" | awk '{print $1}')
|
||||
|
||||
mkdir -p homebrew-generated
|
||||
cp Casks/termix.rb homebrew-generated/termix.rb
|
||||
|
||||
sed -i '' "s/VERSION_PLACEHOLDER/$VERSION/g" homebrew-generated/termix.rb
|
||||
sed -i '' "s/CHECKSUM_PLACEHOLDER/$CHECKSUM/g" homebrew-generated/termix.rb
|
||||
sed -i '' "s|version \".*\"|version \"$VERSION\"|g" homebrew-generated/termix.rb
|
||||
sed -i '' "s|sha256 \".*\"|sha256 \"$CHECKSUM\"|g" homebrew-generated/termix.rb
|
||||
sed -i '' "s|release-[0-9.]*-tag|release-$VERSION-tag|g" homebrew-generated/termix.rb
|
||||
|
||||
- name: Upload Homebrew Cask as artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
if: hashFiles('homebrew-generated/termix.rb') != '' && github.event.inputs.artifact_destination == 'file'
|
||||
with:
|
||||
ruby-version: "3.2"
|
||||
bundler-cache: false
|
||||
name: termix_macos_homebrew_cask
|
||||
path: homebrew-generated/termix.rb
|
||||
retention-days: 30
|
||||
|
||||
- name: Install Fastlane
|
||||
if: steps.check_asc_creds.outputs.has_credentials == 'true' && github.event.inputs.artifact_destination == 'submit'
|
||||
- name: Upload Homebrew Cask to release
|
||||
if: hashFiles('homebrew-generated/termix.rb') != '' && github.event.inputs.artifact_destination == 'release'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gem install fastlane -N
|
||||
VERSION="${{ steps.homebrew-version.outputs.version }}"
|
||||
RELEASE_TAG="release-$VERSION-tag"
|
||||
|
||||
- name: Deploy to App Store Connect (TestFlight)
|
||||
if: steps.check_asc_creds.outputs.has_credentials == 'true' && github.event.inputs.artifact_destination == 'submit'
|
||||
run: |
|
||||
PKG_FILE=$(find release -name "*.pkg" -type f | head -n 1)
|
||||
if [ -z "$PKG_FILE" ]; then
|
||||
gh release list --repo ${{ github.repository }} --limit 100 | grep -q "$RELEASE_TAG" || {
|
||||
echo "Release $RELEASE_TAG not found"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
mkdir -p ~/private_keys
|
||||
echo "${{ secrets.APPLE_KEY_CONTENT }}" | base64 --decode > ~/private_keys/AuthKey_${{ secrets.APPLE_KEY_ID }}.p8
|
||||
|
||||
xcrun altool --upload-app -f "$PKG_FILE" \
|
||||
--type macos \
|
||||
--apiKey "${{ secrets.APPLE_KEY_ID }}" \
|
||||
--apiIssuer "${{ secrets.APPLE_ISSUER_ID }}"
|
||||
continue-on-error: true
|
||||
gh release upload "$RELEASE_TAG" homebrew-generated/termix.rb --repo ${{ github.repository }} --clobber
|
||||
|
||||
- name: Clean up keychains
|
||||
if: always()
|
||||
@@ -508,8 +584,7 @@ jobs:
|
||||
|
||||
submit-to-chocolatey:
|
||||
runs-on: windows-latest
|
||||
if: github.event.inputs.artifact_destination == 'submit'
|
||||
needs: [build-windows]
|
||||
if: github.event.inputs.artifact_destination == 'submit' && (github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'windows' || github.event.inputs.build_type == '')
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
@@ -525,20 +600,25 @@ jobs:
|
||||
$VERSION = (Get-Content package.json | ConvertFrom-Json).version
|
||||
echo "version=$VERSION" >> $env:GITHUB_OUTPUT
|
||||
|
||||
- name: Download Windows x64 MSI artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: termix_windows_x64_msi
|
||||
path: artifact
|
||||
|
||||
- name: Get MSI file info
|
||||
- name: Download and prepare MSI info from public release
|
||||
id: msi-info
|
||||
run: |
|
||||
$VERSION = "${{ steps.package-version.outputs.version }}"
|
||||
$MSI_FILE = Get-ChildItem -Path artifact -Filter "*.msi" | Select-Object -First 1
|
||||
$MSI_NAME = $MSI_FILE.Name
|
||||
$CHECKSUM = (Get-FileHash -Path $MSI_FILE.FullName -Algorithm SHA256).Hash
|
||||
$MSI_NAME = "termix_windows_x64_msi.msi"
|
||||
$DOWNLOAD_URL = "https://github.com/Termix-SSH/Termix/releases/download/release-$($VERSION)-tag/$($MSI_NAME)"
|
||||
|
||||
Write-Host "Downloading from $DOWNLOAD_URL"
|
||||
New-Item -ItemType Directory -Force -Path "release_asset"
|
||||
$DOWNLOAD_PATH = "release_asset\$MSI_NAME"
|
||||
|
||||
try {
|
||||
Invoke-WebRequest -Uri $DOWNLOAD_URL -OutFile $DOWNLOAD_PATH -UseBasicParsing
|
||||
} catch {
|
||||
Write-Error "Failed to download MSI from $DOWNLOAD_URL. Please ensure the release and asset exist."
|
||||
exit 1
|
||||
}
|
||||
|
||||
$CHECKSUM = (Get-FileHash -Path $DOWNLOAD_PATH -Algorithm SHA256).Hash
|
||||
echo "msi_name=$MSI_NAME" >> $env:GITHUB_OUTPUT
|
||||
echo "checksum=$CHECKSUM" >> $env:GITHUB_OUTPUT
|
||||
|
||||
@@ -609,8 +689,8 @@ jobs:
|
||||
|
||||
submit-to-flatpak:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.inputs.artifact_destination == 'submit'
|
||||
needs: [build-linux]
|
||||
if: github.event.inputs.artifact_destination == 'submit' && (github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'linux' || github.event.inputs.build_type == '')
|
||||
needs: []
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
@@ -628,30 +708,27 @@ jobs:
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "release_date=$RELEASE_DATE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Download Linux x64 AppImage artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: termix_linux_x64_appimage
|
||||
path: artifact-x64
|
||||
|
||||
- name: Download Linux arm64 AppImage artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: termix_linux_arm64_appimage
|
||||
path: artifact-arm64
|
||||
|
||||
- name: Get AppImage file info
|
||||
- name: Download and prepare AppImage info from public release
|
||||
id: appimage-info
|
||||
run: |
|
||||
VERSION="${{ steps.package-version.outputs.version }}"
|
||||
mkdir -p release_assets
|
||||
|
||||
APPIMAGE_X64_FILE=$(find artifact-x64 -name "*.AppImage" -type f | head -n 1)
|
||||
APPIMAGE_X64_NAME=$(basename "$APPIMAGE_X64_FILE")
|
||||
CHECKSUM_X64=$(sha256sum "$APPIMAGE_X64_FILE" | awk '{print $1}')
|
||||
APPIMAGE_X64_NAME="termix_linux_x64_appimage.AppImage"
|
||||
URL_X64="https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$APPIMAGE_X64_NAME"
|
||||
PATH_X64="release_assets/$APPIMAGE_X64_NAME"
|
||||
echo "Downloading x64 AppImage from $URL_X64"
|
||||
curl -L -o "$PATH_X64" "$URL_X64"
|
||||
chmod +x "$PATH_X64"
|
||||
CHECKSUM_X64=$(sha256sum "$PATH_X64" | awk '{print $1}')
|
||||
|
||||
APPIMAGE_ARM64_FILE=$(find artifact-arm64 -name "*.AppImage" -type f | head -n 1)
|
||||
APPIMAGE_ARM64_NAME=$(basename "$APPIMAGE_ARM64_FILE")
|
||||
CHECKSUM_ARM64=$(sha256sum "$APPIMAGE_ARM64_FILE" | awk '{print $1}')
|
||||
APPIMAGE_ARM64_NAME="termix_linux_arm64_appimage.AppImage"
|
||||
URL_ARM64="https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$APPIMAGE_ARM64_NAME"
|
||||
PATH_ARM64="release_assets/$APPIMAGE_ARM64_NAME"
|
||||
echo "Downloading arm64 AppImage from $URL_ARM64"
|
||||
curl -L -o "$PATH_ARM64" "$URL_ARM64"
|
||||
chmod +x "$PATH_ARM64"
|
||||
CHECKSUM_ARM64=$(sha256sum "$PATH_ARM64" | awk '{print $1}')
|
||||
|
||||
echo "appimage_x64_name=$APPIMAGE_X64_NAME" >> $GITHUB_OUTPUT
|
||||
echo "checksum_x64=$CHECKSUM_X64" >> $GITHUB_OUTPUT
|
||||
@@ -690,10 +767,6 @@ jobs:
|
||||
sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak-submission/com.karmaa.termix.metainfo.xml
|
||||
sed -i "s/DATE_PLACEHOLDER/$RELEASE_DATE/g" flatpak-submission/com.karmaa.termix.metainfo.xml
|
||||
|
||||
- name: List submission files
|
||||
run: |
|
||||
ls -la flatpak-submission/
|
||||
|
||||
- name: Upload Flatpak submission as artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -703,8 +776,8 @@ jobs:
|
||||
|
||||
submit-to-homebrew:
|
||||
runs-on: macos-latest
|
||||
if: github.event.inputs.artifact_destination == 'submit'
|
||||
needs: [build-macos]
|
||||
if: github.event.inputs.artifact_destination == 'submit' && (github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'macos')
|
||||
needs: []
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
@@ -720,19 +793,28 @@ jobs:
|
||||
VERSION=$(node -p "require('./package.json').version")
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Download macOS Universal DMG artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: termix_macos_universal_dmg
|
||||
path: artifact
|
||||
|
||||
- name: Get DMG file info
|
||||
- name: Download and prepare DMG info from public release
|
||||
id: dmg-info
|
||||
run: |
|
||||
VERSION="${{ steps.package-version.outputs.version }}"
|
||||
DMG_FILE=$(find artifact -name "*.dmg" -type f | head -n 1)
|
||||
DMG_NAME=$(basename "$DMG_FILE")
|
||||
CHECKSUM=$(shasum -a 256 "$DMG_FILE" | awk '{print $1}')
|
||||
DMG_NAME="termix_macos_universal_dmg.dmg"
|
||||
URL="https://github.com/Termix-SSH/Termix/releases/download/release-$VERSION-tag/$DMG_NAME"
|
||||
|
||||
mkdir -p release_asset
|
||||
DOWNLOAD_PATH="release_asset/$DMG_NAME"
|
||||
echo "Downloading DMG from $URL"
|
||||
|
||||
if command -v curl &> /dev/null; then
|
||||
curl -L -o "$DOWNLOAD_PATH" "$URL"
|
||||
elif command -v wget &> /dev/null; then
|
||||
wget -O "$DOWNLOAD_PATH" "$URL"
|
||||
else
|
||||
echo "Neither curl nor wget is available, installing curl"
|
||||
brew install curl
|
||||
curl -L -o "$DOWNLOAD_PATH" "$URL"
|
||||
fi
|
||||
|
||||
CHECKSUM=$(shasum -a 256 "$DOWNLOAD_PATH" | awk '{print $1}')
|
||||
|
||||
echo "dmg_name=$DMG_NAME" >> $GITHUB_OUTPUT
|
||||
echo "checksum=$CHECKSUM" >> $GITHUB_OUTPUT
|
||||
@@ -745,23 +827,15 @@ jobs:
|
||||
|
||||
mkdir -p homebrew-submission/Casks/t
|
||||
|
||||
cp homebrew/termix.rb homebrew-submission/Casks/t/termix.rb
|
||||
cp Casks/termix.rb homebrew-submission/Casks/t/termix.rb
|
||||
|
||||
sed -i '' "s/VERSION_PLACEHOLDER/$VERSION/g" homebrew-submission/Casks/t/termix.rb
|
||||
sed -i '' "s/CHECKSUM_PLACEHOLDER/$CHECKSUM/g" homebrew-submission/Casks/t/termix.rb
|
||||
|
||||
- name: Verify Cask syntax
|
||||
run: |
|
||||
if ! command -v brew &> /dev/null; then
|
||||
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
||||
fi
|
||||
|
||||
ruby -c homebrew-submission/Casks/t/termix.rb
|
||||
|
||||
- name: List submission files
|
||||
run: |
|
||||
find homebrew-submission -type f
|
||||
|
||||
- name: Upload Homebrew submission as artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -789,10 +863,6 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
- name: Display artifact structure
|
||||
run: |
|
||||
ls -R artifacts/
|
||||
|
||||
- name: Upload artifacts to latest release
|
||||
run: |
|
||||
cd artifacts
|
||||
@@ -808,3 +878,130 @@ jobs:
|
||||
done
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
submit-to-testflight:
|
||||
runs-on: macos-latest
|
||||
if: github.event.inputs.artifact_destination == 'submit' && (github.event.inputs.build_type == 'all' || github.event.inputs.build_type == 'macos')
|
||||
needs: []
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
for i in 1 2 3;
|
||||
do
|
||||
if npm ci; then
|
||||
break
|
||||
else
|
||||
if [ $i -eq 3 ]; then
|
||||
exit 1
|
||||
fi
|
||||
sleep 10
|
||||
fi
|
||||
done
|
||||
npm install --force @rollup/rollup-darwin-arm64
|
||||
npm install dmg-license
|
||||
|
||||
- name: Check for Code Signing Certificates
|
||||
id: check_certs
|
||||
run: |
|
||||
if [ -n "${{ secrets.MAC_BUILD_CERTIFICATE_BASE64 }}" ] && [ -n "${{ secrets.MAC_P12_PASSWORD }}" ]; then
|
||||
echo "has_certs=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Import Code Signing Certificates
|
||||
if: steps.check_certs.outputs.has_certs == 'true'
|
||||
env:
|
||||
MAC_BUILD_CERTIFICATE_BASE64: ${{ secrets.MAC_BUILD_CERTIFICATE_BASE64 }}
|
||||
MAC_INSTALLER_CERTIFICATE_BASE64: ${{ secrets.MAC_INSTALLER_CERTIFICATE_BASE64 }}
|
||||
MAC_P12_PASSWORD: ${{ secrets.MAC_P12_PASSWORD }}
|
||||
MAC_KEYCHAIN_PASSWORD: ${{ secrets.MAC_KEYCHAIN_PASSWORD }}
|
||||
run: |
|
||||
APP_CERT_PATH=$RUNNER_TEMP/app_certificate.p12
|
||||
INSTALLER_CERT_PATH=$RUNNER_TEMP/installer_certificate.p12
|
||||
KEYCHAIN_PATH=$RUNNER_TEMP/app-signing.keychain-db
|
||||
|
||||
echo -n "$MAC_BUILD_CERTIFICATE_BASE64" | base64 --decode -o $APP_CERT_PATH
|
||||
|
||||
if [ -n "$MAC_INSTALLER_CERTIFICATE_BASE64" ]; then
|
||||
echo -n "$MAC_INSTALLER_CERTIFICATE_BASE64" | base64 --decode -o $INSTALLER_CERT_PATH
|
||||
fi
|
||||
|
||||
security create-keychain -p "$MAC_KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
|
||||
security set-keychain-settings -lut 21600 $KEYCHAIN_PATH
|
||||
security unlock-keychain -p "$MAC_KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
|
||||
|
||||
security import $APP_CERT_PATH -P "$MAC_P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
|
||||
|
||||
if [ -f "$INSTALLER_CERT_PATH" ]; then
|
||||
security import $INSTALLER_CERT_PATH -P "$MAC_P12_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
|
||||
fi
|
||||
|
||||
security list-keychain -d user -s $KEYCHAIN_PATH
|
||||
|
||||
security find-identity -v -p codesigning $KEYCHAIN_PATH
|
||||
|
||||
- name: Build macOS App Store Package
|
||||
if: steps.check_certs.outputs.has_certs == 'true'
|
||||
env:
|
||||
ELECTRON_BUILDER_ALLOW_UNRESOLVED_DEPENDENCIES: true
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
CURRENT_VERSION=$(node -p "require('./package.json').version")
|
||||
BUILD_VERSION="${{ github.run_number }}"
|
||||
|
||||
npm run build && npx electron-builder --mac mas --universal --config.buildVersion="$BUILD_VERSION"
|
||||
|
||||
- name: Check for App Store Connect API credentials
|
||||
id: check_asc_creds
|
||||
run: |
|
||||
if [ -n "${{ secrets.APPLE_KEY_ID }}" ] && [ -n "${{ secrets.APPLE_ISSUER_ID }}" ] && [ -n "${{ secrets.APPLE_KEY_CONTENT }}" ]; then
|
||||
echo "has_credentials=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Setup Ruby for Fastlane
|
||||
if: steps.check_asc_creds.outputs.has_credentials == 'true'
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: "3.2"
|
||||
bundler-cache: false
|
||||
|
||||
- name: Install Fastlane
|
||||
if: steps.check_asc_creds.outputs.has_credentials == 'true'
|
||||
run: |
|
||||
gem install fastlane -N
|
||||
|
||||
- name: Deploy to App Store Connect (TestFlight)
|
||||
if: steps.check_asc_creds.outputs.has_credentials == 'true'
|
||||
run: |
|
||||
PKG_FILE=$(find release -name "termix_macos_universal_mas.pkg" -type f | head -n 1)
|
||||
if [ -z "$PKG_FILE" ]; then
|
||||
echo "PKG file not found, exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p ~/private_keys
|
||||
echo "${{ secrets.APPLE_KEY_CONTENT }}" | base64 --decode > ~/private_keys/AuthKey_${{ secrets.APPLE_KEY_ID }}.p8
|
||||
|
||||
xcrun altool --upload-app -f "$PKG_FILE" \
|
||||
--type macos \
|
||||
--apiKey "${{ secrets.APPLE_KEY_ID }}" \
|
||||
--apiIssuer "${{ secrets.APPLE_ISSUER_ID }}"
|
||||
continue-on-error: true
|
||||
|
||||
- name: Clean up keychains
|
||||
if: always()
|
||||
run: |
|
||||
security delete-keychain $RUNNER_TEMP/app-signing.keychain-db || true
|
||||
|
||||
32
.github/workflows/openapi.yml
vendored
Normal file
32
.github/workflows/openapi.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: Generate OpenAPI Specification
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
generate-openapi:
|
||||
name: Generate OpenAPI JSON
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Generate OpenAPI specification
|
||||
run: npm run generate:openapi
|
||||
|
||||
- name: Upload OpenAPI artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: openapi-spec
|
||||
path: openapi.json
|
||||
retention-days: 90
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -28,3 +28,4 @@ dist-ssr
|
||||
/.mcp.json
|
||||
/nul
|
||||
/.vscode/
|
||||
/CLAUDE.md
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
cask "termix" do
|
||||
version "VERSION_PLACEHOLDER"
|
||||
sha256 "CHECKSUM_PLACEHOLDER"
|
||||
version "1.10.0"
|
||||
sha256 "327c5026006c949f992447835aa6754113f731065b410bedbfa5da5af7cb2386"
|
||||
|
||||
url "https://github.com/Termix-SSH/Termix/releases/download/release-#{version}-tag/termix_macos_universal_#{version}_dmg.dmg"
|
||||
url "https://github.com/Termix-SSH/Termix/releases/download/release-#{version}-tag/termix_macos_universal_dmg.dmg"
|
||||
name "Termix"
|
||||
desc "Web-based server management platform with SSH terminal, tunneling, and file editing"
|
||||
homepage "https://github.com/Termix-SSH/Termix"
|
||||
16
README-CN.md
16
README-CN.md
@@ -51,20 +51,22 @@ Termix 是一个开源、永久免费、自托管的一体化服务器管理平
|
||||
- **SSH 终端访问** - 功能齐全的终端,具有分屏支持(最多 4 个面板)和类似浏览器的选项卡系统。包括对自定义终端的支持,包括常见终端主题、字体和其他组件
|
||||
- **SSH 隧道管理** - 创建和管理 SSH 隧道,具有自动重新连接和健康监控功能
|
||||
- **远程文件管理器** - 直接在远程服务器上管理文件,支持查看和编辑代码、图像、音频和视频。无缝上传、下载、重命名、删除和移动文件
|
||||
- **Docker 管理** - 启动、停止、暂停、删除容器。查看容器统计信息。使用 docker exec 终端控制容器。它不是用来替代 Portainer 或 Dockge,而是用于简单管理你的容器而不是创建它们。
|
||||
- **SSH 主机管理器** - 保存、组织和管理您的 SSH 连接,支持标签和文件夹,并轻松保存可重用的登录信息,同时能够自动部署 SSH 密钥
|
||||
- **服务器统计** - 在任何 SSH 服务器上查看 CPU、内存和磁盘使用情况以及网络、正常运行时间和系统信息
|
||||
- **仪表板** - 在仪表板上一目了然地查看服务器信息
|
||||
- **RBAC** - 创建角色并在用户/角色之间共享主机
|
||||
- **用户认证** - 安全的用户管理,具有管理员控制以及 OIDC 和 2FA (TOTP) 支持。查看所有平台上的活动用户会话并撤销权限。将您的 OIDC/本地帐户链接在一起。
|
||||
- **数据库加密** - 后端存储为加密的 SQLite 数据库文件。查看[文档](https://docs.termix.site/security)了解更多信息。
|
||||
- **数据导出/导入** - 导出和导入 SSH 主机、凭据和文件管理器数据
|
||||
- **自动 SSL 设置** - 内置 SSL 证书生成和管理,支持 HTTPS 重定向
|
||||
- **现代用户界面** - 使用 React、Tailwind CSS 和 Shadcn 构建的简洁的桌面/移动设备友好界面
|
||||
- **语言** - 内置支持英语、中文、德语和葡萄牙语
|
||||
- **现代用户界面** - 使用 React、Tailwind CSS 和 Shadcn 构建的简洁的桌面/移动设备友好界面。可选择基于深色或浅色模式的用户界面。
|
||||
- **语言** - 内置支持约 30 种语言(通过 Google 翻译批量翻译,结果可能有所不同)
|
||||
- **平台支持** - 可作为 Web 应用程序、桌面应用程序(Windows、Linux 和 macOS)以及适用于 iOS 和 Android 的专用移动/平板电脑应用程序。
|
||||
- **SSH 工具** - 创建可重用的命令片段,单击即可执行。在多个打开的终端上同时运行一个命令。
|
||||
- **命令历史** - 自动完成并查看以前运行的 SSH 命令
|
||||
- **命令面板** - 双击左 Shift 键可快速使用键盘访问 SSH 连接
|
||||
- **SSH 功能丰富** - 支持跳板机、warpgate、基于 TOTP 的连接等。
|
||||
- **SSH 功能丰富** - 支持跳板机、warpgate、基于 TOTP 的连接、SOCKS5、密码自动填充等。
|
||||
|
||||
# 计划功能
|
||||
|
||||
@@ -140,6 +142,12 @@ volumes:
|
||||
|
||||
<p align="center">
|
||||
<img src="./repo-images/Image 7.png" width="400" alt="Termix Demo 7"/>
|
||||
<img src="./repo-images/Image 8.png" width="400" alt="Termix Demo 8"/>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<img src="./repo-images/Image 9.png" width="400" alt="Termix Demo 9"/>
|
||||
<img src="./repo-images/Image 10.png" width="400" alt="Termix Demo 110"/>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
@@ -147,7 +155,7 @@ volumes:
|
||||
你的浏览器不支持 video 标签。
|
||||
</video>
|
||||
</p>
|
||||
视频和图像可能已过时。
|
||||
某些视频和图像可能已过时或可能无法完美展示功能。
|
||||
|
||||
# 许可证
|
||||
|
||||
|
||||
52
README.md
52
README.md
@@ -16,17 +16,6 @@
|
||||
<small style="color: #666;">Achieved on September 1st, 2025</small>
|
||||
</p>
|
||||
|
||||
#### Top Technologies
|
||||
|
||||
[](#)
|
||||
[](#)
|
||||
[](#)
|
||||
[](#)
|
||||
[](#)
|
||||
[](#)
|
||||
[](#)
|
||||
[](#)
|
||||
|
||||
<br />
|
||||
<p align="center">
|
||||
<a href="https://github.com/Termix-SSH/Termix">
|
||||
@@ -45,7 +34,7 @@ If you would like, you can support the project here!\
|
||||
|
||||
Termix is an open-source, forever-free, self-hosted all-in-one server management platform. It provides a multi-platform
|
||||
solution for managing your servers and infrastructure through a single, intuitive interface. Termix offers SSH terminal
|
||||
access, SSH tunneling capabilities, and remote file management, with many more tools to come. Termix is the perfect
|
||||
access, SSH tunneling capabilities, remote file management, and many other tools. Termix is the perfect
|
||||
free and self-hosted alternative to Termius available for all platforms.
|
||||
|
||||
# Features
|
||||
@@ -53,20 +42,22 @@ free and self-hosted alternative to Termius available for all platforms.
|
||||
- **SSH Terminal Access** - Full-featured terminal with split-screen support (up to 4 panels) with a browser-like tab system. Includes support for customizing the terminal including common terminal themes, fonts, and other components
|
||||
- **SSH Tunnel Management** - Create and manage SSH tunnels with automatic reconnection and health monitoring
|
||||
- **Remote File Manager** - Manage files directly on remote servers with support for viewing and editing code, images, audio, and video. Upload, download, rename, delete, and move files seamlessly
|
||||
- **Docker Management** - Start, stop, pause, remove containers. View container stats. Control container using docker exec terminal. It was not made to replace Portainer or Dockge but rather to simply manage your containers compared to creating them.
|
||||
- **SSH Host Manager** - Save, organize, and manage your SSH connections with tags and folders, and easily save reusable login info while being able to automate the deployment of SSH keys
|
||||
- **Server Stats** - View CPU, memory, and disk usage along with network, uptime, and system information on any SSH server
|
||||
- **Dashboard** - View server information at a glance on your dashboard
|
||||
- **RBAC** - Create roles and share hosts across users/roles
|
||||
- **User Authentication** - Secure user management with admin controls and OIDC and 2FA (TOTP) support. View active user sessions across all platforms and revoke permissions. Link your OIDC/Local accounts together.
|
||||
- **Database Encryption** - Backend stored as encrypted SQLite database files. View [docs](https://docs.termix.site/security) for more.
|
||||
- **Data Export/Import** - Export and import SSH hosts, credentials, and file manager data
|
||||
- **Automatic SSL Setup** - Built-in SSL certificate generation and management with HTTPS redirects
|
||||
- **Modern UI** - Clean desktop/mobile-friendly interface built with React, Tailwind CSS, and Shadcn
|
||||
- **Languages** - Built-in support for English, Chinese, German, and Portuguese
|
||||
- **Modern UI** - Clean desktop/mobile-friendly interface built with React, Tailwind CSS, and Shadcn. Choose between dark or light mode based UI.
|
||||
- **Languages** - Built-in support ~30 languages (bulk translated via Google Translate, results may vary ofc)
|
||||
- **Platform Support** - Available as a web app, desktop application (Windows, Linux, and macOS), and dedicated mobile/tablet app for iOS and Android.
|
||||
- **SSH Tools** - Create reusable command snippets that execute with a single click. Run one command simultaneously across multiple open terminals.
|
||||
- **Command History** - Auto-complete and view previously ran SSH commands
|
||||
- **Command Palette** - Double tap left shift to quickly access SSH connections with your keyboard
|
||||
- **SSH Feature Rich** - Supports jump hosts, warpgate, TOTP based connections, etc.
|
||||
- **SSH Feature Rich** - Supports jump hosts, warpgate, TOTP based connections, SOCKS5, password autofill, etc.
|
||||
|
||||
# Planned Features
|
||||
|
||||
@@ -80,16 +71,17 @@ Supported Devices:
|
||||
- Windows (x64/ia32)
|
||||
- Portable
|
||||
- MSI Installer
|
||||
- Chocolatey Package Manager (coming soon)
|
||||
- Chocolatey Package Manager
|
||||
- Linux (x64/ia32)
|
||||
- Portable
|
||||
- AUR
|
||||
- AppImage
|
||||
- Deb
|
||||
- Flatpak (coming soon)
|
||||
- Flatpak
|
||||
- macOS (x64/ia32 on v12.0+)
|
||||
- Apple App Store (coming soon)
|
||||
- Apple App Store
|
||||
- DMG
|
||||
- Homebrew (coming soon)
|
||||
- Homebrew
|
||||
- iOS/iPadOS (v15.1+)
|
||||
- Apple App Store
|
||||
- ISO
|
||||
@@ -118,13 +110,25 @@ volumes:
|
||||
driver: local
|
||||
```
|
||||
|
||||
# Sponsors
|
||||
|
||||
<p align="left">
|
||||
<a href="https://www.digitalocean.com/">
|
||||
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" height="50" alt="DigitalOcean">
|
||||
</a>
|
||||
|
||||
<a href="https://crowdin.com/">
|
||||
<img src="https://support.crowdin.com/assets/logos/core-logo/svg/crowdin-core-logo-cDark.svg" height="50" alt="Crowdin">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
# Support
|
||||
|
||||
If you need help or want to request a feature with Termix, visit the [Issues](https://github.com/Termix-SSH/Support/issues) page, log in, and press `New Issue`.
|
||||
Please be as detailed as possible in your issue, preferably written in English. You can also join the [Discord](https://discord.gg/jVQGdvHDrf) server and visit the support
|
||||
channel, however, response times may be longer.
|
||||
|
||||
# Show-off
|
||||
# Screenshots
|
||||
|
||||
<p align="center">
|
||||
<img src="./repo-images/Image 1.png" width="400" alt="Termix Demo 1"/>
|
||||
@@ -143,6 +147,12 @@ channel, however, response times may be longer.
|
||||
|
||||
<p align="center">
|
||||
<img src="./repo-images/Image 7.png" width="400" alt="Termix Demo 7"/>
|
||||
<img src="./repo-images/Image 8.png" width="400" alt="Termix Demo 8"/>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<img src="./repo-images/Image 9.png" width="400" alt="Termix Demo 9"/>
|
||||
<img src="./repo-images/Image 10.png" width="400" alt="Termix Demo 110"/>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
@@ -150,7 +160,7 @@ channel, however, response times may be longer.
|
||||
Your browser does not support the video tag.
|
||||
</video>
|
||||
</p>
|
||||
Videos and images may be out of date.
|
||||
Some videos and images may be out of date or may not perfectly showcase features.
|
||||
|
||||
# License
|
||||
|
||||
|
||||
3
crowdin.yml
Normal file
3
crowdin.yml
Normal file
@@ -0,0 +1,3 @@
|
||||
files:
|
||||
- source: /src/locales/en.json
|
||||
translation: /src/locales/translated/%two_letters_code%.json
|
||||
@@ -19,7 +19,7 @@ COPY . .
|
||||
RUN find public/fonts -name "*.ttf" ! -name "*Regular.ttf" ! -name "*Bold.ttf" ! -name "*Italic.ttf" -delete
|
||||
|
||||
RUN npm cache clean --force && \
|
||||
npm run build
|
||||
NODE_OPTIONS="--max-old-space-size=2048" npm run build
|
||||
|
||||
# Stage 3: Build backend
|
||||
FROM deps AS backend-builder
|
||||
@@ -53,16 +53,18 @@ ENV DATA_DIR=/app/data \
|
||||
|
||||
RUN apt-get update && apt-get install -y nginx gettext-base openssl && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
mkdir -p /app/data /app/uploads && \
|
||||
chown -R node:node /app/data /app/uploads && \
|
||||
useradd -r -s /bin/false nginx
|
||||
mkdir -p /app/data /app/uploads /app/nginx /app/nginx/logs /app/nginx/cache /app/nginx/client_body && \
|
||||
chown -R node:node /app && \
|
||||
chmod 755 /app/data /app/uploads /app/nginx && \
|
||||
touch /app/nginx/nginx.conf && \
|
||||
chown node:node /app/nginx/nginx.conf
|
||||
|
||||
COPY docker/nginx.conf /etc/nginx/nginx.conf
|
||||
COPY docker/nginx-https.conf /etc/nginx/nginx-https.conf
|
||||
COPY docker/nginx.conf /app/nginx/nginx.conf.template
|
||||
COPY docker/nginx-https.conf /app/nginx/nginx-https.conf.template
|
||||
|
||||
COPY --chown=nginx:nginx --from=frontend-builder /app/dist /usr/share/nginx/html
|
||||
COPY --chown=nginx:nginx --from=frontend-builder /app/src/locales /usr/share/nginx/html/locales
|
||||
COPY --chown=nginx:nginx --from=frontend-builder /app/public/fonts /usr/share/nginx/html/fonts
|
||||
COPY --chown=node:node --from=frontend-builder /app/dist /app/html
|
||||
COPY --chown=node:node --from=frontend-builder /app/src/locales /app/html/locales
|
||||
COPY --chown=node:node --from=frontend-builder /app/public/fonts /app/html/fonts
|
||||
|
||||
COPY --chown=node:node --from=production-deps /app/node_modules /app/node_modules
|
||||
COPY --chown=node:node --from=backend-builder /app/dist/backend ./dist/backend
|
||||
@@ -72,6 +74,12 @@ VOLUME ["/app/data"]
|
||||
|
||||
EXPOSE ${PORT} 30001 30002 30003 30004 30005 30006
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 \
|
||||
CMD node -e "require('http').get('http://localhost:30001/health', (r) => process.exit(r.statusCode === 200 ? 0 : 1)).on('error', () => process.exit(1))"
|
||||
|
||||
COPY docker/entrypoint.sh /entrypoint.sh
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
USER node
|
||||
|
||||
CMD ["/entrypoint.sh"]
|
||||
|
||||
@@ -11,24 +11,21 @@ echo "Configuring web UI to run on port: $PORT"
|
||||
|
||||
if [ "$ENABLE_SSL" = "true" ]; then
|
||||
echo "SSL enabled - using HTTPS configuration with redirect"
|
||||
NGINX_CONF_SOURCE="/etc/nginx/nginx-https.conf"
|
||||
NGINX_CONF_SOURCE="/app/nginx/nginx-https.conf.template"
|
||||
else
|
||||
echo "SSL disabled - using HTTP-only configuration (default)"
|
||||
NGINX_CONF_SOURCE="/etc/nginx/nginx.conf"
|
||||
NGINX_CONF_SOURCE="/app/nginx/nginx.conf.template"
|
||||
fi
|
||||
|
||||
envsubst '${PORT} ${SSL_PORT} ${SSL_CERT_PATH} ${SSL_KEY_PATH}' < $NGINX_CONF_SOURCE > /etc/nginx/nginx.conf.tmp
|
||||
mv /etc/nginx/nginx.conf.tmp /etc/nginx/nginx.conf
|
||||
envsubst '${PORT} ${SSL_PORT} ${SSL_CERT_PATH} ${SSL_KEY_PATH}' < $NGINX_CONF_SOURCE > /app/nginx/nginx.conf
|
||||
|
||||
mkdir -p /app/data /app/uploads
|
||||
chown -R node:node /app/data /app/uploads
|
||||
chmod 755 /app/data /app/uploads
|
||||
chmod 755 /app/data /app/uploads 2>/dev/null || true
|
||||
|
||||
if [ "$ENABLE_SSL" = "true" ]; then
|
||||
echo "Checking SSL certificate configuration..."
|
||||
mkdir -p /app/data/ssl
|
||||
chown -R node:node /app/data/ssl
|
||||
chmod 755 /app/data/ssl
|
||||
chmod 755 /app/data/ssl 2>/dev/null || true
|
||||
|
||||
DOMAIN=${SSL_DOMAIN:-localhost}
|
||||
|
||||
@@ -84,7 +81,6 @@ EOF
|
||||
|
||||
chmod 600 /app/data/ssl/termix.key
|
||||
chmod 644 /app/data/ssl/termix.crt
|
||||
chown node:node /app/data/ssl/termix.key /app/data/ssl/termix.crt
|
||||
|
||||
rm -f /app/data/ssl/openssl.conf
|
||||
|
||||
@@ -93,7 +89,7 @@ EOF
|
||||
fi
|
||||
|
||||
echo "Starting nginx..."
|
||||
nginx
|
||||
nginx -c /app/nginx/nginx.conf
|
||||
|
||||
echo "Starting backend services..."
|
||||
cd /app
|
||||
@@ -110,11 +106,7 @@ else
|
||||
echo "Warning: package.json not found"
|
||||
fi
|
||||
|
||||
if command -v su-exec > /dev/null 2>&1; then
|
||||
su-exec node node dist/backend/backend/starter.js
|
||||
else
|
||||
su -s /bin/sh node -c "node dist/backend/backend/starter.js"
|
||||
fi
|
||||
node dist/backend/backend/starter.js
|
||||
|
||||
echo "All services started"
|
||||
|
||||
|
||||
@@ -1,11 +1,24 @@
|
||||
worker_processes 1;
|
||||
master_process off;
|
||||
pid /app/nginx/nginx.pid;
|
||||
error_log /app/nginx/logs/error.log warn;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include mime.types;
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
access_log /app/nginx/logs/access.log;
|
||||
|
||||
client_body_temp_path /app/nginx/client_body;
|
||||
proxy_temp_path /app/nginx/proxy_temp;
|
||||
fastcgi_temp_path /app/nginx/fastcgi_temp;
|
||||
uwsgi_temp_path /app/nginx/uwsgi_temp;
|
||||
scgi_temp_path /app/nginx/scgi_temp;
|
||||
|
||||
sendfile on;
|
||||
keepalive_timeout 65;
|
||||
client_header_timeout 300s;
|
||||
@@ -37,9 +50,17 @@ http {
|
||||
add_header X-Content-Type-Options nosniff always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
|
||||
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
|
||||
root /app/html;
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
try_files $uri =404;
|
||||
}
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
root /app/html;
|
||||
index index.html index.htm;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
location ~* \.map$ {
|
||||
@@ -93,6 +114,15 @@ http {
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location ~ ^/rbac(/.*)?$ {
|
||||
proxy_pass http://127.0.0.1:30001;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location ~ ^/credentials(/.*)?$ {
|
||||
proxy_pass http://127.0.0.1:30001;
|
||||
proxy_http_version 1.1;
|
||||
@@ -171,6 +201,18 @@ http {
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /ssh/quick-connect {
|
||||
proxy_pass http://127.0.0.1:30001;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /ssh/ {
|
||||
proxy_pass http://127.0.0.1:30001;
|
||||
proxy_http_version 1.1;
|
||||
@@ -258,6 +300,15 @@ http {
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
location ~ ^/network-topology(/.*)?$ {
|
||||
proxy_pass http://127.0.0.1:30001;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /health {
|
||||
proxy_pass http://127.0.0.1:30001;
|
||||
proxy_http_version 1.1;
|
||||
@@ -283,6 +334,10 @@ http {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
proxy_connect_timeout 60s;
|
||||
proxy_send_timeout 60s;
|
||||
proxy_read_timeout 60s;
|
||||
}
|
||||
|
||||
location ~ ^/uptime(/.*)?$ {
|
||||
@@ -303,9 +358,54 @@ http {
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location ~ ^/dashboard/preferences(/.*)?$ {
|
||||
proxy_pass http://127.0.0.1:30006;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location ^~ /docker/console/ {
|
||||
proxy_pass http://127.0.0.1:30008/;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header Host $host;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
proxy_read_timeout 86400s;
|
||||
proxy_send_timeout 86400s;
|
||||
proxy_connect_timeout 10s;
|
||||
|
||||
proxy_buffering off;
|
||||
proxy_request_buffering off;
|
||||
|
||||
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503;
|
||||
}
|
||||
|
||||
location ~ ^/docker(/.*)?$ {
|
||||
proxy_pass http://127.0.0.1:30007;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
proxy_connect_timeout 60s;
|
||||
proxy_send_timeout 300s;
|
||||
proxy_read_timeout 300s;
|
||||
}
|
||||
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root /usr/share/nginx/html;
|
||||
root /app/html;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,24 @@
|
||||
worker_processes 1;
|
||||
master_process off;
|
||||
pid /app/nginx/nginx.pid;
|
||||
error_log /app/nginx/logs/error.log warn;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include mime.types;
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
access_log /app/nginx/logs/access.log;
|
||||
|
||||
client_body_temp_path /app/nginx/client_body;
|
||||
proxy_temp_path /app/nginx/proxy_temp;
|
||||
fastcgi_temp_path /app/nginx/fastcgi_temp;
|
||||
uwsgi_temp_path /app/nginx/uwsgi_temp;
|
||||
scgi_temp_path /app/nginx/scgi_temp;
|
||||
|
||||
sendfile on;
|
||||
keepalive_timeout 65;
|
||||
client_header_timeout 300s;
|
||||
@@ -27,14 +40,14 @@ http {
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
|
||||
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
|
||||
root /usr/share/nginx/html;
|
||||
root /app/html;
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
try_files $uri =404;
|
||||
}
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
root /app/html;
|
||||
index index.html index.htm;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
@@ -90,6 +103,15 @@ http {
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location ~ ^/rbac(/.*)?$ {
|
||||
proxy_pass http://127.0.0.1:30001;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location ~ ^/credentials(/.*)?$ {
|
||||
proxy_pass http://127.0.0.1:30001;
|
||||
proxy_http_version 1.1;
|
||||
@@ -168,6 +190,18 @@ http {
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /ssh/quick-connect {
|
||||
proxy_pass http://127.0.0.1:30001;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /ssh/ {
|
||||
proxy_pass http://127.0.0.1:30001;
|
||||
proxy_http_version 1.1;
|
||||
@@ -255,6 +289,15 @@ http {
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
location ~ ^/network-topology(/.*)?$ {
|
||||
proxy_pass http://127.0.0.1:30001;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /health {
|
||||
proxy_pass http://127.0.0.1:30001;
|
||||
proxy_http_version 1.1;
|
||||
@@ -280,6 +323,10 @@ http {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
proxy_connect_timeout 60s;
|
||||
proxy_send_timeout 60s;
|
||||
proxy_read_timeout 60s;
|
||||
}
|
||||
|
||||
location ~ ^/uptime(/.*)?$ {
|
||||
@@ -300,9 +347,54 @@ http {
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location ~ ^/dashboard/preferences(/.*)?$ {
|
||||
proxy_pass http://127.0.0.1:30006;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location ^~ /docker/console/ {
|
||||
proxy_pass http://127.0.0.1:30008/;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header Host $host;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
proxy_read_timeout 86400s;
|
||||
proxy_send_timeout 86400s;
|
||||
proxy_connect_timeout 10s;
|
||||
|
||||
proxy_buffering off;
|
||||
proxy_request_buffering off;
|
||||
|
||||
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503;
|
||||
}
|
||||
|
||||
location ~ ^/docker(/.*)?$ {
|
||||
proxy_pass http://127.0.0.1:30007;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
proxy_connect_timeout 60s;
|
||||
proxy_send_timeout 300s;
|
||||
proxy_read_timeout 300s;
|
||||
}
|
||||
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root /usr/share/nginx/html;
|
||||
root /app/html;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -124,5 +124,6 @@
|
||||
"ITSAppUsesNonExemptEncryption": false,
|
||||
"NSAppleEventsUsageDescription": "Termix needs access to control other applications for terminal operations."
|
||||
}
|
||||
}
|
||||
},
|
||||
"generateUpdatesFilesForAllChannels": true
|
||||
}
|
||||
|
||||
@@ -11,13 +11,9 @@ const fs = require("fs");
|
||||
const os = require("os");
|
||||
|
||||
if (process.platform === "linux") {
|
||||
app.commandLine.appendSwitch("--no-sandbox");
|
||||
app.commandLine.appendSwitch("--disable-setuid-sandbox");
|
||||
app.commandLine.appendSwitch("--disable-dev-shm-usage");
|
||||
app.commandLine.appendSwitch("--ozone-platform-hint=auto");
|
||||
|
||||
app.disableHardwareAcceleration();
|
||||
app.commandLine.appendSwitch("--disable-gpu");
|
||||
app.commandLine.appendSwitch("--disable-gpu-compositing");
|
||||
app.commandLine.appendSwitch("--enable-features=VaapiVideoDecoder");
|
||||
}
|
||||
|
||||
app.commandLine.appendSwitch("--ignore-certificate-errors");
|
||||
|
||||
@@ -2,21 +2,6 @@ const { contextBridge, ipcRenderer } = require("electron");
|
||||
|
||||
contextBridge.exposeInMainWorld("electronAPI", {
|
||||
getAppVersion: () => ipcRenderer.invoke("get-app-version"),
|
||||
getPlatform: () => ipcRenderer.invoke("get-platform"),
|
||||
checkElectronUpdate: () => ipcRenderer.invoke("check-electron-update"),
|
||||
|
||||
getServerConfig: () => ipcRenderer.invoke("get-server-config"),
|
||||
saveServerConfig: (config) =>
|
||||
ipcRenderer.invoke("save-server-config", config),
|
||||
testServerConnection: (serverUrl) =>
|
||||
ipcRenderer.invoke("test-server-connection", serverUrl),
|
||||
|
||||
showSaveDialog: (options) => ipcRenderer.invoke("show-save-dialog", options),
|
||||
showOpenDialog: (options) => ipcRenderer.invoke("show-open-dialog", options),
|
||||
|
||||
onUpdateAvailable: (callback) => ipcRenderer.on("update-available", callback),
|
||||
onUpdateDownloaded: (callback) =>
|
||||
ipcRenderer.on("update-downloaded", callback),
|
||||
|
||||
removeAllListeners: (channel) => ipcRenderer.removeAllListeners(channel),
|
||||
isElectron: true,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[Desktop Entry]
|
||||
Name=Termix
|
||||
Comment=Web-based server management platform with SSH terminal, tunneling, and file editing
|
||||
Exec=termix %U
|
||||
Exec=run.sh %U
|
||||
Icon=com.karmaa.termix
|
||||
Terminal=false
|
||||
Type=Application
|
||||
|
||||
12
flatpak/com.karmaa.termix.flatpakref
Normal file
12
flatpak/com.karmaa.termix.flatpakref
Normal file
@@ -0,0 +1,12 @@
|
||||
[Flatpak Ref]
|
||||
Name=Termix
|
||||
Branch=stable
|
||||
Title=Termix - SSH Server Management Platform
|
||||
IsRuntime=false
|
||||
Url=https://github.com/Termix-SSH/Termix/releases/download/VERSION_PLACEHOLDER/termix_linux_flatpak.flatpak
|
||||
GPGKey=
|
||||
RuntimeRepo=https://flathub.org/repo/flathub.flatpakrepo
|
||||
Comment=Web-based server management platform with SSH terminal, tunneling, and file editing
|
||||
Description=Termix is an open-source, forever-free, self-hosted all-in-one server management platform. It provides SSH terminal access, tunneling capabilities, and remote file management.
|
||||
Icon=https://raw.githubusercontent.com/Termix-SSH/Termix/main/public/icon.png
|
||||
Homepage=https://github.com/Termix-SSH/Termix
|
||||
@@ -5,7 +5,7 @@
|
||||
<summary>Web-based server management platform with SSH terminal, tunneling, and file editing</summary>
|
||||
|
||||
<metadata_license>CC0-1.0</metadata_license>
|
||||
<project_license>GPL-3.0-or-later</project_license>
|
||||
<project_license>Apache-2.0</project_license>
|
||||
|
||||
<developer_name>bugattiguy527</developer_name>
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
app-id: com.karmaa.termix
|
||||
runtime: org.freedesktop.Platform
|
||||
runtime-version: "23.08"
|
||||
runtime-version: "24.08"
|
||||
sdk: org.freedesktop.Sdk
|
||||
base: org.electronjs.Electron2.BaseApp
|
||||
base-version: "23.08"
|
||||
command: termix
|
||||
base-version: "24.08"
|
||||
command: run.sh
|
||||
separate-locales: false
|
||||
|
||||
finish-args:
|
||||
@@ -16,8 +16,11 @@ finish-args:
|
||||
- --device=dri
|
||||
- --filesystem=home
|
||||
- --socket=ssh-auth
|
||||
- --talk-name=org.freedesktop.Notifications
|
||||
- --socket=session-bus
|
||||
- --talk-name=org.freedesktop.secrets
|
||||
- --env=ELECTRON_TRASH=gio
|
||||
- --env=XCURSOR_PATH=/run/host/user-share/icons:/run/host/share/icons
|
||||
- --env=ELECTRON_OZONE_PLATFORM_HINT=auto
|
||||
|
||||
modules:
|
||||
- name: termix
|
||||
@@ -30,6 +33,21 @@ modules:
|
||||
- cp -r squashfs-root/resources /app/bin/
|
||||
- cp -r squashfs-root/locales /app/bin/ || true
|
||||
|
||||
- cp squashfs-root/*.so /app/bin/ || true
|
||||
- cp squashfs-root/*.pak /app/bin/ || true
|
||||
- cp squashfs-root/*.bin /app/bin/ || true
|
||||
- cp squashfs-root/*.dat /app/bin/ || true
|
||||
- cp squashfs-root/*.json /app/bin/ || true
|
||||
|
||||
- |
|
||||
cat > run.sh << 'EOF'
|
||||
#!/bin/bash
|
||||
export TMPDIR="$XDG_RUNTIME_DIR/app/$FLATPAK_ID"
|
||||
exec zypak-wrapper /app/bin/termix "$@"
|
||||
EOF
|
||||
- chmod +x run.sh
|
||||
- install -Dm755 run.sh /app/bin/run.sh
|
||||
|
||||
- install -Dm644 com.karmaa.termix.desktop /app/share/applications/com.karmaa.termix.desktop
|
||||
|
||||
- install -Dm644 com.karmaa.termix.metainfo.xml /app/share/metainfo/com.karmaa.termix.metainfo.xml
|
||||
@@ -40,14 +58,14 @@ modules:
|
||||
|
||||
sources:
|
||||
- type: file
|
||||
url: https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_x64_VERSION_PLACEHOLDER_appimage.AppImage
|
||||
url: https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_x64_appimage.AppImage
|
||||
sha256: CHECKSUM_X64_PLACEHOLDER
|
||||
dest-filename: termix.AppImage
|
||||
only-arches:
|
||||
- x86_64
|
||||
|
||||
- type: file
|
||||
url: https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_arm64_VERSION_PLACEHOLDER_appimage.AppImage
|
||||
url: https://github.com/Termix-SSH/Termix/releases/download/release-VERSION_PLACEHOLDER-tag/termix_linux_arm64_appimage.AppImage
|
||||
sha256: CHECKSUM_ARM64_PLACEHOLDER
|
||||
dest-filename: termix.AppImage
|
||||
only-arches:
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
VERSION="$1"
|
||||
CHECKSUM="$2"
|
||||
RELEASE_DATE="$3"
|
||||
|
||||
if [ -z "$VERSION" ] || [ -z "$CHECKSUM" ] || [ -z "$RELEASE_DATE" ]; then
|
||||
echo "Usage: $0 <version> <checksum> <release-date>"
|
||||
echo "Example: $0 1.8.0 abc123... 2025-10-26"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Preparing Flatpak submission for version $VERSION"
|
||||
|
||||
cp public/icon.svg flatpak/com.karmaa.termix.svg
|
||||
echo "✓ Copied SVG icon"
|
||||
|
||||
if command -v convert &> /dev/null; then
|
||||
convert public/icon.png -resize 256x256 flatpak/icon-256.png
|
||||
convert public/icon.png -resize 128x128 flatpak/icon-128.png
|
||||
echo "✓ Generated PNG icons"
|
||||
else
|
||||
cp public/icon.png flatpak/icon-256.png
|
||||
cp public/icon.png flatpak/icon-128.png
|
||||
echo "⚠ ImageMagick not found, using original icon"
|
||||
fi
|
||||
|
||||
sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak/com.karmaa.termix.yml
|
||||
sed -i "s/CHECKSUM_PLACEHOLDER/$CHECKSUM/g" flatpak/com.karmaa.termix.yml
|
||||
echo "✓ Updated manifest with version $VERSION"
|
||||
|
||||
sed -i "s/VERSION_PLACEHOLDER/$VERSION/g" flatpak/com.karmaa.termix.metainfo.xml
|
||||
sed -i "s/DATE_PLACEHOLDER/$RELEASE_DATE/g" flatpak/com.karmaa.termix.metainfo.xml
|
||||
@@ -4,6 +4,13 @@
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/favicon.ico" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<!-- PWA Meta Tags -->
|
||||
<meta name="theme-color" content="#09090b" />
|
||||
<meta name="apple-mobile-web-app-capable" content="yes" />
|
||||
<meta name="apple-mobile-web-app-status-bar-style" content="black-translucent" />
|
||||
<meta name="apple-mobile-web-app-title" content="Termix" />
|
||||
<link rel="apple-touch-icon" href="/icons/512x512.png" />
|
||||
<link rel="manifest" href="/manifest.json" />
|
||||
<title>Termix</title>
|
||||
<style>
|
||||
.hide-scrollbar {
|
||||
|
||||
2305
openapi.json
2305
openapi.json
File diff suppressed because it is too large
Load Diff
1321
package-lock.json
generated
1321
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
13
package.json
13
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "termix",
|
||||
"private": true,
|
||||
"version": "1.9.0",
|
||||
"version": "1.10.1",
|
||||
"description": "A web-based server management platform with SSH terminal, tunneling, and file editing capabilities",
|
||||
"author": "Karmaa",
|
||||
"main": "electron/main.cjs",
|
||||
@@ -17,6 +17,7 @@
|
||||
"build": "vite build && tsc -p tsconfig.node.json",
|
||||
"build:backend": "tsc -p tsconfig.node.json",
|
||||
"dev:backend": "tsc -p tsconfig.node.json && node ./dist/backend/backend/starter.js",
|
||||
"generate:openapi": "tsc -p tsconfig.node.json && node ./dist/backend/backend/swagger.js",
|
||||
"preview": "vite preview",
|
||||
"electron:dev": "concurrently \"npm run dev\" \"powershell -c \\\"Start-Sleep -Seconds 5\\\" && electron .\"",
|
||||
"build:win-portable": "npm run build && electron-builder --win --dir",
|
||||
@@ -35,6 +36,7 @@
|
||||
"@hookform/resolvers": "^5.1.1",
|
||||
"@monaco-editor/react": "^4.7.0",
|
||||
"@radix-ui/react-accordion": "^1.2.11",
|
||||
"@radix-ui/react-alert-dialog": "^1.1.15",
|
||||
"@radix-ui/react-checkbox": "^1.3.2",
|
||||
"@radix-ui/react-dialog": "^1.1.15",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.15",
|
||||
@@ -45,18 +47,20 @@
|
||||
"@radix-ui/react-select": "^2.2.5",
|
||||
"@radix-ui/react-separator": "^1.1.7",
|
||||
"@radix-ui/react-slider": "^1.3.6",
|
||||
"@radix-ui/react-slot": "^1.2.3",
|
||||
"@radix-ui/react-slot": "^1.2.4",
|
||||
"@radix-ui/react-switch": "^1.2.5",
|
||||
"@radix-ui/react-tabs": "^1.1.12",
|
||||
"@radix-ui/react-tooltip": "^1.2.8",
|
||||
"@tailwindcss/vite": "^4.1.14",
|
||||
"@types/bcryptjs": "^2.4.6",
|
||||
"@types/cookie-parser": "^1.4.9",
|
||||
"@types/cytoscape": "^3.21.9",
|
||||
"@types/jszip": "^3.4.0",
|
||||
"@types/multer": "^2.0.0",
|
||||
"@types/qrcode": "^1.5.5",
|
||||
"@types/speakeasy": "^2.0.10",
|
||||
"@uiw/codemirror-extensions-langs": "^4.24.1",
|
||||
"@uiw/codemirror-theme-github": "^4.25.4",
|
||||
"@uiw/react-codemirror": "^4.24.1",
|
||||
"@xterm/addon-clipboard": "^0.1.0",
|
||||
"@xterm/addon-fit": "^0.10.0",
|
||||
@@ -73,9 +77,11 @@
|
||||
"cmdk": "^1.1.1",
|
||||
"cookie-parser": "^1.4.7",
|
||||
"cors": "^2.8.5",
|
||||
"cytoscape": "^3.33.1",
|
||||
"dotenv": "^17.2.0",
|
||||
"drizzle-orm": "^0.44.3",
|
||||
"express": "^5.1.0",
|
||||
"i18n-auto-translation": "^2.2.3",
|
||||
"i18next": "^25.4.2",
|
||||
"i18next-browser-languagedetector": "^8.2.0",
|
||||
"jose": "^5.2.3",
|
||||
@@ -88,6 +94,7 @@
|
||||
"node-fetch": "^3.3.2",
|
||||
"qrcode": "^1.5.4",
|
||||
"react": "^19.1.0",
|
||||
"react-cytoscapejs": "^2.0.0",
|
||||
"react-dom": "^19.1.0",
|
||||
"react-h5-audio-player": "^3.10.1",
|
||||
"react-hook-form": "^7.60.0",
|
||||
@@ -103,6 +110,7 @@
|
||||
"react-xtermjs": "^1.0.10",
|
||||
"recharts": "^3.2.1",
|
||||
"remark-gfm": "^4.0.1",
|
||||
"socks": "^2.8.7",
|
||||
"sonner": "^2.0.7",
|
||||
"speakeasy": "^2.0.0",
|
||||
"ssh2": "^1.16.0",
|
||||
@@ -137,6 +145,7 @@
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^16.2.3",
|
||||
"prettier": "3.6.2",
|
||||
"swagger-jsdoc": "^6.2.8",
|
||||
"typescript": "~5.9.2",
|
||||
"typescript-eslint": "^8.40.0",
|
||||
"vite": "^7.1.5"
|
||||
|
||||
40
public/manifest.json
Normal file
40
public/manifest.json
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"name": "Termix",
|
||||
"short_name": "Termix",
|
||||
"description": "A web-based server management platform with SSH terminal, tunneling, and file editing capabilities",
|
||||
"theme_color": "#09090b",
|
||||
"background_color": "#09090b",
|
||||
"display": "standalone",
|
||||
"orientation": "any",
|
||||
"scope": "/",
|
||||
"start_url": "/",
|
||||
"icons": [
|
||||
{
|
||||
"src": "/icons/48x48.png",
|
||||
"sizes": "48x48",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "/icons/64x64.png",
|
||||
"sizes": "64x64",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "/icons/128x128.png",
|
||||
"sizes": "128x128",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "/icons/256x256.png",
|
||||
"sizes": "256x256",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "/icons/512x512.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png",
|
||||
"purpose": "any maskable"
|
||||
}
|
||||
],
|
||||
"categories": ["utilities", "developer", "productivity"]
|
||||
}
|
||||
120
public/sw.js
Normal file
120
public/sw.js
Normal file
@@ -0,0 +1,120 @@
|
||||
/**
|
||||
* Termix Service Worker
|
||||
* Handles caching for offline PWA support
|
||||
*/
|
||||
|
||||
const CACHE_NAME = "termix-v1";
|
||||
const STATIC_ASSETS = [
|
||||
"/",
|
||||
"/index.html",
|
||||
"/manifest.json",
|
||||
"/favicon.ico",
|
||||
"/icons/48x48.png",
|
||||
"/icons/128x128.png",
|
||||
"/icons/256x256.png",
|
||||
"/icons/512x512.png",
|
||||
];
|
||||
|
||||
// Install event - cache static assets
|
||||
self.addEventListener("install", (event) => {
|
||||
event.waitUntil(
|
||||
caches
|
||||
.open(CACHE_NAME)
|
||||
.then((cache) => {
|
||||
console.log("[SW] Caching static assets");
|
||||
return cache.addAll(STATIC_ASSETS);
|
||||
})
|
||||
.then(() => {
|
||||
// Activate immediately without waiting
|
||||
return self.skipWaiting();
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
// Activate event - clean up old caches
|
||||
self.addEventListener("activate", (event) => {
|
||||
event.waitUntil(
|
||||
caches
|
||||
.keys()
|
||||
.then((cacheNames) => {
|
||||
return Promise.all(
|
||||
cacheNames
|
||||
.filter((name) => name !== CACHE_NAME)
|
||||
.map((name) => {
|
||||
console.log("[SW] Deleting old cache:", name);
|
||||
return caches.delete(name);
|
||||
}),
|
||||
);
|
||||
})
|
||||
.then(() => {
|
||||
// Take control of all pages immediately
|
||||
return self.clients.claim();
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
// Fetch event - serve from cache, fall back to network
|
||||
self.addEventListener("fetch", (event) => {
|
||||
const { request } = event;
|
||||
const url = new URL(request.url);
|
||||
|
||||
// Skip non-GET requests
|
||||
if (request.method !== "GET") {
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip API requests - these must be online
|
||||
if (url.pathname.startsWith("/api/") || url.pathname.startsWith("/ws")) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip cross-origin requests
|
||||
if (url.origin !== self.location.origin) {
|
||||
return;
|
||||
}
|
||||
|
||||
// For navigation requests (HTML), use network-first
|
||||
if (request.mode === "navigate") {
|
||||
event.respondWith(
|
||||
fetch(request)
|
||||
.then((response) => {
|
||||
// Clone and cache the response
|
||||
const responseClone = response.clone();
|
||||
caches.open(CACHE_NAME).then((cache) => {
|
||||
cache.put(request, responseClone);
|
||||
});
|
||||
return response;
|
||||
})
|
||||
.catch(() => {
|
||||
// Offline: return cached index.html
|
||||
return caches.match("/index.html");
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// For all other assets, use cache-first
|
||||
event.respondWith(
|
||||
caches.match(request).then((cachedResponse) => {
|
||||
if (cachedResponse) {
|
||||
return cachedResponse;
|
||||
}
|
||||
|
||||
// Not in cache, fetch from network
|
||||
return fetch(request).then((response) => {
|
||||
// Don't cache non-successful responses
|
||||
if (!response || response.status !== 200 || response.type !== "basic") {
|
||||
return response;
|
||||
}
|
||||
|
||||
// Clone and cache the response
|
||||
const responseClone = response.clone();
|
||||
caches.open(CACHE_NAME).then((cache) => {
|
||||
cache.put(request, responseClone);
|
||||
});
|
||||
|
||||
return response;
|
||||
});
|
||||
}),
|
||||
);
|
||||
});
|
||||
BIN
repo-images/Image 10.png
Normal file
BIN
repo-images/Image 10.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 158 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 407 KiB After Width: | Height: | Size: 355 KiB |
BIN
repo-images/Image 8.png
Normal file
BIN
repo-images/Image 8.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 227 KiB |
BIN
repo-images/Image 9.png
Normal file
BIN
repo-images/Image 9.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 153 KiB |
@@ -1,9 +1,14 @@
|
||||
import express from "express";
|
||||
import cors from "cors";
|
||||
import cookieParser from "cookie-parser";
|
||||
import { getDb } from "./database/db/index.js";
|
||||
import { recentActivity, sshData } from "./database/db/schema.js";
|
||||
import { eq, and, desc } from "drizzle-orm";
|
||||
import { getDb, DatabaseSaveTrigger } from "./database/db/index.js";
|
||||
import {
|
||||
recentActivity,
|
||||
sshData,
|
||||
hostAccess,
|
||||
dashboardPreferences,
|
||||
} from "./database/db/schema.js";
|
||||
import { eq, and, desc, or, sql } from "drizzle-orm";
|
||||
import { dashboardLogger } from "./utils/logger.js";
|
||||
import { SimpleDBOps } from "./utils/simple-db-ops.js";
|
||||
import { AuthManager } from "./utils/auth-manager.js";
|
||||
@@ -15,7 +20,7 @@ const authManager = AuthManager.getInstance();
|
||||
const serverStartTime = Date.now();
|
||||
|
||||
const activityRateLimiter = new Map<string, number>();
|
||||
const RATE_LIMIT_MS = 1000; // 1 second window
|
||||
const RATE_LIMIT_MS = 1000;
|
||||
|
||||
app.use(
|
||||
cors({
|
||||
@@ -58,6 +63,31 @@ app.use(express.json({ limit: "1mb" }));
|
||||
|
||||
app.use(authManager.createAuthMiddleware());
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /uptime:
|
||||
* get:
|
||||
* summary: Get server uptime
|
||||
* description: Returns the uptime of the server in various formats.
|
||||
* tags:
|
||||
* - Dashboard
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Server uptime information.
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* uptimeMs:
|
||||
* type: number
|
||||
* uptimeSeconds:
|
||||
* type: number
|
||||
* formatted:
|
||||
* type: string
|
||||
* 500:
|
||||
* description: Failed to get uptime.
|
||||
*/
|
||||
app.get("/uptime", async (req, res) => {
|
||||
try {
|
||||
const uptimeMs = Date.now() - serverStartTime;
|
||||
@@ -77,6 +107,28 @@ app.get("/uptime", async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /activity/recent:
|
||||
* get:
|
||||
* summary: Get recent activity
|
||||
* description: Fetches the most recent activities for the authenticated user.
|
||||
* tags:
|
||||
* - Dashboard
|
||||
* parameters:
|
||||
* - in: query
|
||||
* name: limit
|
||||
* schema:
|
||||
* type: integer
|
||||
* description: The maximum number of activities to return.
|
||||
* responses:
|
||||
* 200:
|
||||
* description: A list of recent activities.
|
||||
* 401:
|
||||
* description: Session expired.
|
||||
* 500:
|
||||
* description: Failed to get recent activity.
|
||||
*/
|
||||
app.get("/activity/recent", async (req, res) => {
|
||||
try {
|
||||
const userId = (req as AuthenticatedRequest).userId;
|
||||
@@ -108,6 +160,40 @@ app.get("/activity/recent", async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /activity/log:
|
||||
* post:
|
||||
* summary: Log a new activity
|
||||
* description: Logs a new user activity, such as accessing a terminal or file manager. This endpoint is rate-limited.
|
||||
* tags:
|
||||
* - Dashboard
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* type:
|
||||
* type: string
|
||||
* enum: [terminal, file_manager, server_stats, tunnel, docker]
|
||||
* hostId:
|
||||
* type: integer
|
||||
* hostName:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Activity logged successfully or rate-limited.
|
||||
* 400:
|
||||
* description: Invalid request body.
|
||||
* 401:
|
||||
* description: Session expired.
|
||||
* 404:
|
||||
* description: Host not found or access denied.
|
||||
* 500:
|
||||
* description: Failed to log activity.
|
||||
*/
|
||||
app.post("/activity/log", async (req, res) => {
|
||||
try {
|
||||
const userId = (req as AuthenticatedRequest).userId;
|
||||
@@ -127,9 +213,18 @@ app.post("/activity/log", async (req, res) => {
|
||||
});
|
||||
}
|
||||
|
||||
if (type !== "terminal" && type !== "file_manager") {
|
||||
if (
|
||||
![
|
||||
"terminal",
|
||||
"file_manager",
|
||||
"server_stats",
|
||||
"tunnel",
|
||||
"docker",
|
||||
].includes(type)
|
||||
) {
|
||||
return res.status(400).json({
|
||||
error: "Invalid activity type. Must be 'terminal' or 'file_manager'",
|
||||
error:
|
||||
"Invalid activity type. Must be 'terminal', 'file_manager', 'server_stats', 'tunnel', or 'docker'",
|
||||
});
|
||||
}
|
||||
|
||||
@@ -155,7 +250,7 @@ app.post("/activity/log", async (req, res) => {
|
||||
entriesToDelete.forEach((key) => activityRateLimiter.delete(key));
|
||||
}
|
||||
|
||||
const hosts = await SimpleDBOps.select(
|
||||
const ownedHosts = await SimpleDBOps.select(
|
||||
getDb()
|
||||
.select()
|
||||
.from(sshData)
|
||||
@@ -164,8 +259,19 @@ app.post("/activity/log", async (req, res) => {
|
||||
userId,
|
||||
);
|
||||
|
||||
if (hosts.length === 0) {
|
||||
return res.status(404).json({ error: "Host not found" });
|
||||
if (ownedHosts.length === 0) {
|
||||
const sharedHosts = await getDb()
|
||||
.select()
|
||||
.from(hostAccess)
|
||||
.where(
|
||||
and(eq(hostAccess.hostId, hostId), eq(hostAccess.userId, userId)),
|
||||
);
|
||||
|
||||
if (sharedHosts.length === 0) {
|
||||
return res
|
||||
.status(404)
|
||||
.json({ error: "Host not found or access denied" });
|
||||
}
|
||||
}
|
||||
|
||||
const result = (await SimpleDBOps.insert(
|
||||
@@ -204,6 +310,22 @@ app.post("/activity/log", async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /activity/reset:
|
||||
* delete:
|
||||
* summary: Reset recent activity
|
||||
* description: Clears all recent activity for the authenticated user.
|
||||
* tags:
|
||||
* - Dashboard
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Recent activity cleared.
|
||||
* 401:
|
||||
* description: Session expired.
|
||||
* 500:
|
||||
* description: Failed to reset activity.
|
||||
*/
|
||||
app.delete("/activity/reset", async (req, res) => {
|
||||
try {
|
||||
const userId = (req as AuthenticatedRequest).userId;
|
||||
@@ -233,6 +355,166 @@ app.delete("/activity/reset", async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /dashboard/preferences:
|
||||
* get:
|
||||
* summary: Get dashboard layout preferences
|
||||
* description: Returns the user's customized dashboard layout settings. If no preferences exist, returns default layout.
|
||||
* tags:
|
||||
* - Dashboard
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Dashboard preferences retrieved
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* cards:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: string
|
||||
* enabled:
|
||||
* type: boolean
|
||||
* order:
|
||||
* type: integer
|
||||
* gridColumns:
|
||||
* type: integer
|
||||
* 401:
|
||||
* description: Session expired
|
||||
* 500:
|
||||
* description: Failed to get preferences
|
||||
*/
|
||||
app.get("/dashboard/preferences", async (req, res) => {
|
||||
try {
|
||||
const userId = (req as AuthenticatedRequest).userId;
|
||||
|
||||
if (!SimpleDBOps.isUserDataUnlocked(userId)) {
|
||||
return res.status(401).json({
|
||||
error: "Session expired - please log in again",
|
||||
code: "SESSION_EXPIRED",
|
||||
});
|
||||
}
|
||||
|
||||
const preferences = await getDb()
|
||||
.select()
|
||||
.from(dashboardPreferences)
|
||||
.where(eq(dashboardPreferences.userId, userId));
|
||||
|
||||
if (preferences.length === 0) {
|
||||
const defaultLayout = {
|
||||
cards: [
|
||||
{ id: "server_overview", enabled: true, order: 1 },
|
||||
{ id: "recent_activity", enabled: true, order: 2 },
|
||||
{ id: "network_graph", enabled: false, order: 3 },
|
||||
{ id: "quick_actions", enabled: true, order: 4 },
|
||||
{ id: "server_stats", enabled: true, order: 5 },
|
||||
],
|
||||
gridColumns: 2,
|
||||
};
|
||||
return res.json(defaultLayout);
|
||||
}
|
||||
|
||||
const layout = JSON.parse(preferences[0].layout as string);
|
||||
res.json(layout);
|
||||
} catch (err) {
|
||||
dashboardLogger.error("Failed to get dashboard preferences", err);
|
||||
res.status(500).json({ error: "Failed to get dashboard preferences" });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /dashboard/preferences:
|
||||
* post:
|
||||
* summary: Save dashboard layout preferences
|
||||
* description: Saves or updates the user's customized dashboard layout settings.
|
||||
* tags:
|
||||
* - Dashboard
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* cards:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: string
|
||||
* enabled:
|
||||
* type: boolean
|
||||
* order:
|
||||
* type: integer
|
||||
* gridColumns:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Preferences saved successfully
|
||||
* 400:
|
||||
* description: Invalid request body
|
||||
* 401:
|
||||
* description: Session expired
|
||||
* 500:
|
||||
* description: Failed to save preferences
|
||||
*/
|
||||
app.post("/dashboard/preferences", async (req, res) => {
|
||||
try {
|
||||
const userId = (req as AuthenticatedRequest).userId;
|
||||
|
||||
if (!SimpleDBOps.isUserDataUnlocked(userId)) {
|
||||
return res.status(401).json({
|
||||
error: "Session expired - please log in again",
|
||||
code: "SESSION_EXPIRED",
|
||||
});
|
||||
}
|
||||
|
||||
const { cards, gridColumns } = req.body;
|
||||
|
||||
if (!cards || !Array.isArray(cards) || typeof gridColumns !== "number") {
|
||||
return res.status(400).json({
|
||||
error:
|
||||
"Invalid request body. Expected { cards: Array, gridColumns: number }",
|
||||
});
|
||||
}
|
||||
|
||||
const layout = JSON.stringify({ cards, gridColumns });
|
||||
|
||||
const existing = await getDb()
|
||||
.select()
|
||||
.from(dashboardPreferences)
|
||||
.where(eq(dashboardPreferences.userId, userId));
|
||||
|
||||
if (existing.length > 0) {
|
||||
await getDb()
|
||||
.update(dashboardPreferences)
|
||||
.set({ layout, updatedAt: sql`CURRENT_TIMESTAMP` })
|
||||
.where(eq(dashboardPreferences.userId, userId));
|
||||
} else {
|
||||
await getDb().insert(dashboardPreferences).values({ userId, layout });
|
||||
}
|
||||
|
||||
await DatabaseSaveTrigger.triggerSave("dashboard_preferences_updated");
|
||||
|
||||
dashboardLogger.success("Dashboard preferences saved", {
|
||||
operation: "save_dashboard_preferences",
|
||||
userId,
|
||||
});
|
||||
|
||||
res.json({ success: true, message: "Dashboard preferences saved" });
|
||||
} catch (err) {
|
||||
dashboardLogger.error("Failed to save dashboard preferences", err);
|
||||
res.status(500).json({ error: "Failed to save dashboard preferences" });
|
||||
}
|
||||
});
|
||||
|
||||
const PORT = 30006;
|
||||
app.listen(PORT, async () => {
|
||||
try {
|
||||
|
||||
@@ -8,6 +8,8 @@ import alertRoutes from "./routes/alerts.js";
|
||||
import credentialsRoutes from "./routes/credentials.js";
|
||||
import snippetsRoutes from "./routes/snippets.js";
|
||||
import terminalRoutes from "./routes/terminal.js";
|
||||
import networkTopologyRoutes from "./routes/network-topology.js";
|
||||
import rbacRoutes from "./routes/rbac.js";
|
||||
import cors from "cors";
|
||||
import fetch from "node-fetch";
|
||||
import fs from "fs";
|
||||
@@ -204,10 +206,46 @@ app.use(bodyParser.urlencoded({ limit: "1gb", extended: true }));
|
||||
app.use(bodyParser.raw({ limit: "5gb", type: "application/octet-stream" }));
|
||||
app.use(cookieParser());
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /health:
|
||||
* get:
|
||||
* summary: Health check
|
||||
* description: Returns the health status of the server.
|
||||
* tags:
|
||||
* - General
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Server is healthy.
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* status:
|
||||
* type: string
|
||||
* example: ok
|
||||
*/
|
||||
app.get("/health", (req, res) => {
|
||||
res.json({ status: "ok" });
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /version:
|
||||
* get:
|
||||
* summary: Get version information
|
||||
* description: Returns the local and remote version of the application.
|
||||
* tags:
|
||||
* - General
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Version information.
|
||||
* 404:
|
||||
* description: Local version not set.
|
||||
* 500:
|
||||
* description: Fetch error.
|
||||
*/
|
||||
app.get("/version", authenticateJWT, async (req, res) => {
|
||||
let localVersion = process.env.VERSION;
|
||||
|
||||
@@ -306,6 +344,31 @@ app.get("/version", authenticateJWT, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /releases/rss:
|
||||
* get:
|
||||
* summary: Get releases in RSS format
|
||||
* description: Returns the latest releases from the GitHub repository in an RSS-like JSON format.
|
||||
* tags:
|
||||
* - General
|
||||
* parameters:
|
||||
* - in: query
|
||||
* name: page
|
||||
* schema:
|
||||
* type: integer
|
||||
* description: The page number of the releases to fetch.
|
||||
* - in: query
|
||||
* name: per_page
|
||||
* schema:
|
||||
* type: integer
|
||||
* description: The number of releases to fetch per page.
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Releases in RSS format.
|
||||
* 500:
|
||||
* description: Failed to generate RSS format.
|
||||
*/
|
||||
app.get("/releases/rss", authenticateJWT, async (req, res) => {
|
||||
try {
|
||||
const page = parseInt(req.query.page as string) || 1;
|
||||
@@ -362,6 +425,20 @@ app.get("/releases/rss", authenticateJWT, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /encryption/status:
|
||||
* get:
|
||||
* summary: Get encryption status
|
||||
* description: Returns the security status of the application.
|
||||
* tags:
|
||||
* - Encryption
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Security status.
|
||||
* 500:
|
||||
* description: Failed to get security status.
|
||||
*/
|
||||
app.get("/encryption/status", requireAdmin, async (req, res) => {
|
||||
try {
|
||||
const securityStatus = {
|
||||
@@ -383,6 +460,20 @@ app.get("/encryption/status", requireAdmin, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /encryption/initialize:
|
||||
* post:
|
||||
* summary: Initialize security system
|
||||
* description: Initializes the security system for the application.
|
||||
* tags:
|
||||
* - Encryption
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Security system initialized successfully.
|
||||
* 500:
|
||||
* description: Failed to initialize security system.
|
||||
*/
|
||||
app.post("/encryption/initialize", requireAdmin, async (req, res) => {
|
||||
try {
|
||||
const authManager = AuthManager.getInstance();
|
||||
@@ -406,6 +497,20 @@ app.post("/encryption/initialize", requireAdmin, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /encryption/regenerate:
|
||||
* post:
|
||||
* summary: Regenerate JWT secret
|
||||
* description: Regenerates the system JWT secret. This will invalidate all existing JWT tokens.
|
||||
* tags:
|
||||
* - Encryption
|
||||
* responses:
|
||||
* 200:
|
||||
* description: System JWT secret regenerated.
|
||||
* 500:
|
||||
* description: Failed to regenerate JWT secret.
|
||||
*/
|
||||
app.post("/encryption/regenerate", requireAdmin, async (req, res) => {
|
||||
try {
|
||||
apiLogger.warn("System JWT secret regenerated via API", {
|
||||
@@ -427,6 +532,20 @@ app.post("/encryption/regenerate", requireAdmin, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /encryption/regenerate-jwt:
|
||||
* post:
|
||||
* summary: Regenerate JWT secret
|
||||
* description: Regenerates the JWT secret. This will invalidate all existing JWT tokens.
|
||||
* tags:
|
||||
* - Encryption
|
||||
* responses:
|
||||
* 200:
|
||||
* description: New JWT secret generated.
|
||||
* 500:
|
||||
* description: Failed to regenerate JWT secret.
|
||||
*/
|
||||
app.post("/encryption/regenerate-jwt", requireAdmin, async (req, res) => {
|
||||
try {
|
||||
apiLogger.warn("JWT secret regenerated via API", {
|
||||
@@ -447,6 +566,33 @@ app.post("/encryption/regenerate-jwt", requireAdmin, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /database/export:
|
||||
* post:
|
||||
* summary: Export user data
|
||||
* description: Exports the user's data as a SQLite database file.
|
||||
* tags:
|
||||
* - Database
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* password:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: User data exported successfully.
|
||||
* 400:
|
||||
* description: Password required for export.
|
||||
* 401:
|
||||
* description: Invalid password.
|
||||
* 500:
|
||||
* description: Failed to export user data.
|
||||
*/
|
||||
app.post("/database/export", authenticateJWT, async (req, res) => {
|
||||
try {
|
||||
const userId = (req as AuthenticatedRequest).userId;
|
||||
@@ -897,6 +1043,36 @@ app.post("/database/export", authenticateJWT, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /database/import:
|
||||
* post:
|
||||
* summary: Import user data
|
||||
* description: Imports user data from a SQLite database file.
|
||||
* tags:
|
||||
* - Database
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* multipart/form-data:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* file:
|
||||
* type: string
|
||||
* format: binary
|
||||
* password:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Incremental import completed successfully.
|
||||
* 400:
|
||||
* description: No file uploaded or password required for import.
|
||||
* 401:
|
||||
* description: Invalid password.
|
||||
* 500:
|
||||
* description: Failed to import SQLite data.
|
||||
*/
|
||||
app.post(
|
||||
"/database/import",
|
||||
authenticateJWT,
|
||||
@@ -1361,6 +1537,31 @@ app.post(
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /database/export/preview:
|
||||
* post:
|
||||
* summary: Preview user data export
|
||||
* description: Generates a preview of the user data export, including statistics about the data.
|
||||
* tags:
|
||||
* - Database
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* scope:
|
||||
* type: string
|
||||
* includeCredentials:
|
||||
* type: boolean
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Export preview generated successfully.
|
||||
* 500:
|
||||
* description: Failed to generate export preview.
|
||||
*/
|
||||
app.post("/database/export/preview", authenticateJWT, async (req, res) => {
|
||||
try {
|
||||
const userId = (req as AuthenticatedRequest).userId;
|
||||
@@ -1396,6 +1597,33 @@ app.post("/database/export/preview", authenticateJWT, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /database/restore:
|
||||
* post:
|
||||
* summary: Restore database from backup
|
||||
* description: Restores the database from an encrypted backup file.
|
||||
* tags:
|
||||
* - Database
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* backupPath:
|
||||
* type: string
|
||||
* targetPath:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Database restored successfully.
|
||||
* 400:
|
||||
* description: Backup path is required or invalid encrypted backup file.
|
||||
* 500:
|
||||
* description: Database restore failed.
|
||||
*/
|
||||
app.post("/database/restore", requireAdmin, async (req, res) => {
|
||||
try {
|
||||
const { backupPath, targetPath } = req.body;
|
||||
@@ -1436,6 +1664,8 @@ app.use("/alerts", alertRoutes);
|
||||
app.use("/credentials", credentialsRoutes);
|
||||
app.use("/snippets", snippetsRoutes);
|
||||
app.use("/terminal", terminalRoutes);
|
||||
app.use("/network-topology", networkTopologyRoutes);
|
||||
app.use("/rbac", rbacRoutes);
|
||||
|
||||
app.use(
|
||||
(
|
||||
@@ -1475,6 +1705,20 @@ async function initializeSecurity() {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /database/migration/status:
|
||||
* get:
|
||||
* summary: Get database migration status
|
||||
* description: Returns the status of the database migration.
|
||||
* tags:
|
||||
* - Database
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Migration status.
|
||||
* 500:
|
||||
* description: Failed to get migration status.
|
||||
*/
|
||||
app.get(
|
||||
"/database/migration/status",
|
||||
authenticateJWT,
|
||||
@@ -1528,6 +1772,20 @@ app.get(
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /database/migration/history:
|
||||
* get:
|
||||
* summary: Get database migration history
|
||||
* description: Returns the history of database migrations.
|
||||
* tags:
|
||||
* - Database
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Migration history.
|
||||
* 500:
|
||||
* description: Failed to get migration history.
|
||||
*/
|
||||
app.get(
|
||||
"/database/migration/history",
|
||||
authenticateJWT,
|
||||
|
||||
@@ -201,13 +201,21 @@ async function initializeCompleteDatabase(): Promise<void> {
|
||||
enable_tunnel INTEGER NOT NULL DEFAULT 1,
|
||||
tunnel_connections TEXT,
|
||||
enable_file_manager INTEGER NOT NULL DEFAULT 1,
|
||||
enable_docker INTEGER NOT NULL DEFAULT 0,
|
||||
default_path TEXT,
|
||||
autostart_password TEXT,
|
||||
autostart_key TEXT,
|
||||
autostart_key_password TEXT,
|
||||
force_keyboard_interactive TEXT,
|
||||
stats_config TEXT,
|
||||
docker_config TEXT,
|
||||
terminal_config TEXT,
|
||||
notes TEXT,
|
||||
use_socks5 INTEGER,
|
||||
socks5_host TEXT,
|
||||
socks5_port INTEGER,
|
||||
socks5_username TEXT,
|
||||
socks5_password TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
|
||||
@@ -328,6 +336,81 @@ async function initializeCompleteDatabase(): Promise<void> {
|
||||
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS host_access (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
host_id INTEGER NOT NULL,
|
||||
user_id TEXT,
|
||||
role_id INTEGER,
|
||||
granted_by TEXT NOT NULL,
|
||||
permission_level TEXT NOT NULL DEFAULT 'use',
|
||||
expires_at TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
last_accessed_at TEXT,
|
||||
access_count INTEGER NOT NULL DEFAULT 0,
|
||||
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS roles (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
display_name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
is_system INTEGER NOT NULL DEFAULT 0,
|
||||
permissions TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS user_roles (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id TEXT NOT NULL,
|
||||
role_id INTEGER NOT NULL,
|
||||
granted_by TEXT,
|
||||
granted_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(user_id, role_id),
|
||||
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS audit_logs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id TEXT NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
action TEXT NOT NULL,
|
||||
resource_type TEXT NOT NULL,
|
||||
resource_id TEXT,
|
||||
resource_name TEXT,
|
||||
details TEXT,
|
||||
ip_address TEXT,
|
||||
user_agent TEXT,
|
||||
success INTEGER NOT NULL,
|
||||
error_message TEXT,
|
||||
timestamp TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS session_recordings (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
host_id INTEGER NOT NULL,
|
||||
user_id TEXT NOT NULL,
|
||||
access_id INTEGER,
|
||||
started_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
ended_at TEXT,
|
||||
duration INTEGER,
|
||||
commands TEXT,
|
||||
dangerous_actions TEXT,
|
||||
recording_path TEXT,
|
||||
terminated_by_owner INTEGER DEFAULT 0,
|
||||
termination_reason TEXT,
|
||||
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (access_id) REFERENCES host_access (id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
`);
|
||||
|
||||
try {
|
||||
@@ -486,11 +569,56 @@ const migrateSchema = () => {
|
||||
addColumnIfNotExists("ssh_data", "stats_config", "TEXT");
|
||||
addColumnIfNotExists("ssh_data", "terminal_config", "TEXT");
|
||||
addColumnIfNotExists("ssh_data", "quick_actions", "TEXT");
|
||||
addColumnIfNotExists(
|
||||
"ssh_data",
|
||||
"enable_docker",
|
||||
"INTEGER NOT NULL DEFAULT 0",
|
||||
);
|
||||
addColumnIfNotExists("ssh_data", "docker_config", "TEXT");
|
||||
|
||||
addColumnIfNotExists("ssh_data", "notes", "TEXT");
|
||||
|
||||
addColumnIfNotExists("ssh_data", "use_socks5", "INTEGER");
|
||||
addColumnIfNotExists("ssh_data", "socks5_host", "TEXT");
|
||||
addColumnIfNotExists("ssh_data", "socks5_port", "INTEGER");
|
||||
addColumnIfNotExists("ssh_data", "socks5_username", "TEXT");
|
||||
addColumnIfNotExists("ssh_data", "socks5_password", "TEXT");
|
||||
addColumnIfNotExists("ssh_data", "socks5_proxy_chain", "TEXT");
|
||||
|
||||
addColumnIfNotExists(
|
||||
"ssh_data",
|
||||
"show_terminal_in_sidebar",
|
||||
"INTEGER NOT NULL DEFAULT 1",
|
||||
);
|
||||
addColumnIfNotExists(
|
||||
"ssh_data",
|
||||
"show_file_manager_in_sidebar",
|
||||
"INTEGER NOT NULL DEFAULT 0",
|
||||
);
|
||||
addColumnIfNotExists(
|
||||
"ssh_data",
|
||||
"show_tunnel_in_sidebar",
|
||||
"INTEGER NOT NULL DEFAULT 0",
|
||||
);
|
||||
addColumnIfNotExists(
|
||||
"ssh_data",
|
||||
"show_docker_in_sidebar",
|
||||
"INTEGER NOT NULL DEFAULT 0",
|
||||
);
|
||||
addColumnIfNotExists(
|
||||
"ssh_data",
|
||||
"show_server_stats_in_sidebar",
|
||||
"INTEGER NOT NULL DEFAULT 0",
|
||||
);
|
||||
|
||||
addColumnIfNotExists("ssh_credentials", "private_key", "TEXT");
|
||||
addColumnIfNotExists("ssh_credentials", "public_key", "TEXT");
|
||||
addColumnIfNotExists("ssh_credentials", "detected_key_type", "TEXT");
|
||||
|
||||
addColumnIfNotExists("ssh_credentials", "system_password", "TEXT");
|
||||
addColumnIfNotExists("ssh_credentials", "system_key", "TEXT");
|
||||
addColumnIfNotExists("ssh_credentials", "system_key_password", "TEXT");
|
||||
|
||||
addColumnIfNotExists("file_manager_recent", "host_id", "INTEGER NOT NULL");
|
||||
addColumnIfNotExists("file_manager_pinned", "host_id", "INTEGER NOT NULL");
|
||||
addColumnIfNotExists("file_manager_shortcuts", "host_id", "INTEGER NOT NULL");
|
||||
@@ -551,6 +679,365 @@ const migrateSchema = () => {
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
sqlite
|
||||
.prepare("SELECT id FROM network_topology LIMIT 1")
|
||||
.get();
|
||||
} catch {
|
||||
try {
|
||||
sqlite.exec(`
|
||||
CREATE TABLE IF NOT EXISTS network_topology (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id TEXT NOT NULL,
|
||||
topology TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
|
||||
);
|
||||
`);
|
||||
} catch (createError) {
|
||||
databaseLogger.warn("Failed to create network_topology table", {
|
||||
operation: "schema_migration",
|
||||
error: createError,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
sqlite
|
||||
.prepare("SELECT id FROM dashboard_preferences LIMIT 1")
|
||||
.get();
|
||||
} catch {
|
||||
try {
|
||||
sqlite.exec(`
|
||||
CREATE TABLE IF NOT EXISTS dashboard_preferences (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id TEXT NOT NULL UNIQUE,
|
||||
layout TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
|
||||
);
|
||||
`);
|
||||
} catch (createError) {
|
||||
databaseLogger.warn("Failed to create dashboard_preferences table", {
|
||||
operation: "schema_migration",
|
||||
error: createError,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
sqlite.prepare("SELECT id FROM host_access LIMIT 1").get();
|
||||
} catch {
|
||||
try {
|
||||
sqlite.exec(`
|
||||
CREATE TABLE IF NOT EXISTS host_access (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
host_id INTEGER NOT NULL,
|
||||
user_id TEXT,
|
||||
role_id INTEGER,
|
||||
granted_by TEXT NOT NULL,
|
||||
permission_level TEXT NOT NULL DEFAULT 'use',
|
||||
expires_at TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
last_accessed_at TEXT,
|
||||
access_count INTEGER NOT NULL DEFAULT 0,
|
||||
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE CASCADE
|
||||
);
|
||||
`);
|
||||
} catch (createError) {
|
||||
databaseLogger.warn("Failed to create host_access table", {
|
||||
operation: "schema_migration",
|
||||
error: createError,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
sqlite.prepare("SELECT role_id FROM host_access LIMIT 1").get();
|
||||
} catch {
|
||||
try {
|
||||
sqlite.exec("ALTER TABLE host_access ADD COLUMN role_id INTEGER REFERENCES roles(id) ON DELETE CASCADE");
|
||||
} catch (alterError) {
|
||||
databaseLogger.warn("Failed to add role_id column", {
|
||||
operation: "schema_migration",
|
||||
error: alterError,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
sqlite.prepare("SELECT sudo_password FROM ssh_data LIMIT 1").get();
|
||||
} catch {
|
||||
try {
|
||||
sqlite.exec("ALTER TABLE ssh_data ADD COLUMN sudo_password TEXT");
|
||||
} catch (alterError) {
|
||||
databaseLogger.warn("Failed to add sudo_password column", {
|
||||
operation: "schema_migration",
|
||||
error: alterError,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
sqlite.prepare("SELECT id FROM roles LIMIT 1").get();
|
||||
} catch {
|
||||
try {
|
||||
sqlite.exec(`
|
||||
CREATE TABLE IF NOT EXISTS roles (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
display_name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
is_system INTEGER NOT NULL DEFAULT 0,
|
||||
permissions TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`);
|
||||
} catch (createError) {
|
||||
databaseLogger.warn("Failed to create roles table", {
|
||||
operation: "schema_migration",
|
||||
error: createError,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
sqlite.prepare("SELECT id FROM user_roles LIMIT 1").get();
|
||||
} catch {
|
||||
try {
|
||||
sqlite.exec(`
|
||||
CREATE TABLE IF NOT EXISTS user_roles (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id TEXT NOT NULL,
|
||||
role_id INTEGER NOT NULL,
|
||||
granted_by TEXT,
|
||||
granted_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(user_id, role_id),
|
||||
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (role_id) REFERENCES roles (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (granted_by) REFERENCES users (id) ON DELETE SET NULL
|
||||
);
|
||||
`);
|
||||
} catch (createError) {
|
||||
databaseLogger.warn("Failed to create user_roles table", {
|
||||
operation: "schema_migration",
|
||||
error: createError,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
sqlite.prepare("SELECT id FROM audit_logs LIMIT 1").get();
|
||||
} catch {
|
||||
try {
|
||||
sqlite.exec(`
|
||||
CREATE TABLE IF NOT EXISTS audit_logs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id TEXT NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
action TEXT NOT NULL,
|
||||
resource_type TEXT NOT NULL,
|
||||
resource_id TEXT,
|
||||
resource_name TEXT,
|
||||
details TEXT,
|
||||
ip_address TEXT,
|
||||
user_agent TEXT,
|
||||
success INTEGER NOT NULL,
|
||||
error_message TEXT,
|
||||
timestamp TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
|
||||
);
|
||||
`);
|
||||
} catch (createError) {
|
||||
databaseLogger.warn("Failed to create audit_logs table", {
|
||||
operation: "schema_migration",
|
||||
error: createError,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
sqlite.prepare("SELECT id FROM session_recordings LIMIT 1").get();
|
||||
} catch {
|
||||
try {
|
||||
sqlite.exec(`
|
||||
CREATE TABLE IF NOT EXISTS session_recordings (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
host_id INTEGER NOT NULL,
|
||||
user_id TEXT NOT NULL,
|
||||
access_id INTEGER,
|
||||
started_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
ended_at TEXT,
|
||||
duration INTEGER,
|
||||
commands TEXT,
|
||||
dangerous_actions TEXT,
|
||||
recording_path TEXT,
|
||||
terminated_by_owner INTEGER DEFAULT 0,
|
||||
termination_reason TEXT,
|
||||
FOREIGN KEY (host_id) REFERENCES ssh_data (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (access_id) REFERENCES host_access (id) ON DELETE SET NULL
|
||||
);
|
||||
`);
|
||||
} catch (createError) {
|
||||
databaseLogger.warn("Failed to create session_recordings table", {
|
||||
operation: "schema_migration",
|
||||
error: createError,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
sqlite.prepare("SELECT id FROM shared_credentials LIMIT 1").get();
|
||||
} catch {
|
||||
try {
|
||||
sqlite.exec(`
|
||||
CREATE TABLE IF NOT EXISTS shared_credentials (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
host_access_id INTEGER NOT NULL,
|
||||
original_credential_id INTEGER NOT NULL,
|
||||
target_user_id TEXT NOT NULL,
|
||||
encrypted_username TEXT NOT NULL,
|
||||
encrypted_auth_type TEXT NOT NULL,
|
||||
encrypted_password TEXT,
|
||||
encrypted_key TEXT,
|
||||
encrypted_key_password TEXT,
|
||||
encrypted_key_type TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
needs_re_encryption INTEGER NOT NULL DEFAULT 0,
|
||||
FOREIGN KEY (host_access_id) REFERENCES host_access (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (original_credential_id) REFERENCES ssh_credentials (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (target_user_id) REFERENCES users (id) ON DELETE CASCADE
|
||||
);
|
||||
`);
|
||||
} catch (createError) {
|
||||
databaseLogger.warn("Failed to create shared_credentials table", {
|
||||
operation: "schema_migration",
|
||||
error: createError,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const existingRoles = sqlite.prepare("SELECT name, is_system FROM roles").all() as Array<{ name: string; is_system: number }>;
|
||||
|
||||
try {
|
||||
const validSystemRoles = ['admin', 'user'];
|
||||
const unwantedRoleNames = ['superAdmin', 'powerUser', 'readonly', 'member'];
|
||||
let deletedCount = 0;
|
||||
|
||||
const deleteByName = sqlite.prepare("DELETE FROM roles WHERE name = ?");
|
||||
for (const roleName of unwantedRoleNames) {
|
||||
const result = deleteByName.run(roleName);
|
||||
if (result.changes > 0) {
|
||||
deletedCount += result.changes;
|
||||
}
|
||||
}
|
||||
|
||||
const deleteOldSystemRole = sqlite.prepare("DELETE FROM roles WHERE name = ? AND is_system = 1");
|
||||
for (const role of existingRoles) {
|
||||
if (role.is_system === 1 && !validSystemRoles.includes(role.name) && !unwantedRoleNames.includes(role.name)) {
|
||||
const result = deleteOldSystemRole.run(role.name);
|
||||
if (result.changes > 0) {
|
||||
deletedCount += result.changes;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (cleanupError) {
|
||||
databaseLogger.warn("Failed to clean up old system roles", {
|
||||
operation: "schema_migration",
|
||||
error: cleanupError,
|
||||
});
|
||||
}
|
||||
|
||||
const systemRoles = [
|
||||
{
|
||||
name: "admin",
|
||||
displayName: "rbac.roles.admin",
|
||||
description: "Administrator with full access",
|
||||
permissions: null,
|
||||
},
|
||||
{
|
||||
name: "user",
|
||||
displayName: "rbac.roles.user",
|
||||
description: "Regular user",
|
||||
permissions: null,
|
||||
},
|
||||
];
|
||||
|
||||
for (const role of systemRoles) {
|
||||
const existingRole = sqlite.prepare("SELECT id FROM roles WHERE name = ?").get(role.name);
|
||||
if (!existingRole) {
|
||||
try {
|
||||
sqlite.prepare(`
|
||||
INSERT INTO roles (name, display_name, description, is_system, permissions)
|
||||
VALUES (?, ?, ?, 1, ?)
|
||||
`).run(role.name, role.displayName, role.description, role.permissions);
|
||||
} catch (insertError) {
|
||||
databaseLogger.warn(`Failed to create system role: ${role.name}`, {
|
||||
operation: "schema_migration",
|
||||
error: insertError,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const adminUsers = sqlite.prepare("SELECT id FROM users WHERE is_admin = 1").all() as { id: string }[];
|
||||
const normalUsers = sqlite.prepare("SELECT id FROM users WHERE is_admin = 0").all() as { id: string }[];
|
||||
|
||||
const adminRole = sqlite.prepare("SELECT id FROM roles WHERE name = 'admin'").get() as { id: number } | undefined;
|
||||
const userRole = sqlite.prepare("SELECT id FROM roles WHERE name = 'user'").get() as { id: number } | undefined;
|
||||
|
||||
if (adminRole) {
|
||||
const insertUserRole = sqlite.prepare(`
|
||||
INSERT OR IGNORE INTO user_roles (user_id, role_id, granted_at)
|
||||
VALUES (?, ?, CURRENT_TIMESTAMP)
|
||||
`);
|
||||
|
||||
for (const admin of adminUsers) {
|
||||
try {
|
||||
insertUserRole.run(admin.id, adminRole.id);
|
||||
} catch (error) {
|
||||
// Ignore duplicate errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (userRole) {
|
||||
const insertUserRole = sqlite.prepare(`
|
||||
INSERT OR IGNORE INTO user_roles (user_id, role_id, granted_at)
|
||||
VALUES (?, ?, CURRENT_TIMESTAMP)
|
||||
`);
|
||||
|
||||
for (const user of normalUsers) {
|
||||
try {
|
||||
insertUserRole.run(user.id, userRole.id);
|
||||
} catch (error) {
|
||||
// Ignore duplicate errors
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (migrationError) {
|
||||
databaseLogger.warn("Failed to migrate existing users to roles", {
|
||||
operation: "schema_migration",
|
||||
error: migrationError,
|
||||
});
|
||||
}
|
||||
} catch (seedError) {
|
||||
databaseLogger.warn("Failed to seed system roles", {
|
||||
operation: "schema_migration",
|
||||
error: seedError,
|
||||
});
|
||||
}
|
||||
|
||||
databaseLogger.success("Schema migration completed", {
|
||||
operation: "schema_migration",
|
||||
});
|
||||
|
||||
@@ -66,6 +66,7 @@ export const sshData = sqliteTable("ssh_data", {
|
||||
key: text("key", { length: 8192 }),
|
||||
key_password: text("key_password"),
|
||||
keyType: text("key_type"),
|
||||
sudoPassword: text("sudo_password"),
|
||||
|
||||
autostartPassword: text("autostart_password"),
|
||||
autostartKey: text("autostart_key", { length: 8192 }),
|
||||
@@ -86,10 +87,37 @@ export const sshData = sqliteTable("ssh_data", {
|
||||
enableFileManager: integer("enable_file_manager", { mode: "boolean" })
|
||||
.notNull()
|
||||
.default(true),
|
||||
enableDocker: integer("enable_docker", { mode: "boolean" })
|
||||
.notNull()
|
||||
.default(false),
|
||||
showTerminalInSidebar: integer("show_terminal_in_sidebar", { mode: "boolean" })
|
||||
.notNull()
|
||||
.default(true),
|
||||
showFileManagerInSidebar: integer("show_file_manager_in_sidebar", { mode: "boolean" })
|
||||
.notNull()
|
||||
.default(false),
|
||||
showTunnelInSidebar: integer("show_tunnel_in_sidebar", { mode: "boolean" })
|
||||
.notNull()
|
||||
.default(false),
|
||||
showDockerInSidebar: integer("show_docker_in_sidebar", { mode: "boolean" })
|
||||
.notNull()
|
||||
.default(false),
|
||||
showServerStatsInSidebar: integer("show_server_stats_in_sidebar", { mode: "boolean" })
|
||||
.notNull()
|
||||
.default(false),
|
||||
defaultPath: text("default_path"),
|
||||
statsConfig: text("stats_config"),
|
||||
terminalConfig: text("terminal_config"),
|
||||
quickActions: text("quick_actions"),
|
||||
notes: text("notes"),
|
||||
|
||||
useSocks5: integer("use_socks5", { mode: "boolean" }),
|
||||
socks5Host: text("socks5_host"),
|
||||
socks5Port: integer("socks5_port"),
|
||||
socks5Username: text("socks5_username"),
|
||||
socks5Password: text("socks5_password"),
|
||||
socks5ProxyChain: text("socks5_proxy_chain"),
|
||||
|
||||
createdAt: text("created_at")
|
||||
.notNull()
|
||||
.default(sql`CURRENT_TIMESTAMP`),
|
||||
@@ -172,6 +200,11 @@ export const sshCredentials = sqliteTable("ssh_credentials", {
|
||||
key_password: text("key_password"),
|
||||
keyType: text("key_type"),
|
||||
detectedKeyType: text("detected_key_type"),
|
||||
|
||||
systemPassword: text("system_password"),
|
||||
systemKey: text("system_key", { length: 16384 }),
|
||||
systemKeyPassword: text("system_key_password"),
|
||||
|
||||
usageCount: integer("usage_count").notNull().default(0),
|
||||
lastUsed: text("last_used"),
|
||||
createdAt: text("created_at")
|
||||
@@ -276,3 +309,185 @@ export const commandHistory = sqliteTable("command_history", {
|
||||
.notNull()
|
||||
.default(sql`CURRENT_TIMESTAMP`),
|
||||
});
|
||||
|
||||
export const networkTopology = sqliteTable("network_topology", {
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
userId: text("user_id")
|
||||
.notNull()
|
||||
.references(() => users.id, { onDelete: "cascade" }),
|
||||
topology: text("topology"),
|
||||
createdAt: text("created_at")
|
||||
.notNull()
|
||||
.default(sql`CURRENT_TIMESTAMP`),
|
||||
updatedAt: text("updated_at")
|
||||
.notNull()
|
||||
.default(sql`CURRENT_TIMESTAMP`),
|
||||
});
|
||||
|
||||
export const dashboardPreferences = sqliteTable("dashboard_preferences", {
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
userId: text("user_id")
|
||||
.notNull()
|
||||
.unique()
|
||||
.references(() => users.id, { onDelete: "cascade" }),
|
||||
layout: text("layout").notNull(),
|
||||
createdAt: text("created_at")
|
||||
.notNull()
|
||||
.default(sql`CURRENT_TIMESTAMP`),
|
||||
updatedAt: text("updated_at")
|
||||
.notNull()
|
||||
.default(sql`CURRENT_TIMESTAMP`),
|
||||
});
|
||||
|
||||
export const hostAccess = sqliteTable("host_access", {
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
hostId: integer("host_id")
|
||||
.notNull()
|
||||
.references(() => sshData.id, { onDelete: "cascade" }),
|
||||
|
||||
userId: text("user_id")
|
||||
.references(() => users.id, { onDelete: "cascade" }),
|
||||
roleId: integer("role_id")
|
||||
.references(() => roles.id, { onDelete: "cascade" }),
|
||||
|
||||
grantedBy: text("granted_by")
|
||||
.notNull()
|
||||
.references(() => users.id, { onDelete: "cascade" }),
|
||||
|
||||
permissionLevel: text("permission_level")
|
||||
.notNull()
|
||||
.default("view"),
|
||||
|
||||
expiresAt: text("expires_at"),
|
||||
|
||||
createdAt: text("created_at")
|
||||
.notNull()
|
||||
.default(sql`CURRENT_TIMESTAMP`),
|
||||
lastAccessedAt: text("last_accessed_at"),
|
||||
accessCount: integer("access_count").notNull().default(0),
|
||||
});
|
||||
|
||||
export const sharedCredentials = sqliteTable("shared_credentials", {
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
|
||||
hostAccessId: integer("host_access_id")
|
||||
.notNull()
|
||||
.references(() => hostAccess.id, { onDelete: "cascade" }),
|
||||
|
||||
originalCredentialId: integer("original_credential_id")
|
||||
.notNull()
|
||||
.references(() => sshCredentials.id, { onDelete: "cascade" }),
|
||||
|
||||
targetUserId: text("target_user_id")
|
||||
.notNull()
|
||||
.references(() => users.id, { onDelete: "cascade" }),
|
||||
|
||||
encryptedUsername: text("encrypted_username").notNull(),
|
||||
encryptedAuthType: text("encrypted_auth_type").notNull(),
|
||||
encryptedPassword: text("encrypted_password"),
|
||||
encryptedKey: text("encrypted_key", { length: 16384 }),
|
||||
encryptedKeyPassword: text("encrypted_key_password"),
|
||||
encryptedKeyType: text("encrypted_key_type"),
|
||||
|
||||
createdAt: text("created_at")
|
||||
.notNull()
|
||||
.default(sql`CURRENT_TIMESTAMP`),
|
||||
updatedAt: text("updated_at")
|
||||
.notNull()
|
||||
.default(sql`CURRENT_TIMESTAMP`),
|
||||
|
||||
needsReEncryption: integer("needs_re_encryption", { mode: "boolean" })
|
||||
.notNull()
|
||||
.default(false),
|
||||
});
|
||||
|
||||
export const roles = sqliteTable("roles", {
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
name: text("name").notNull().unique(),
|
||||
displayName: text("display_name").notNull(),
|
||||
description: text("description"),
|
||||
|
||||
isSystem: integer("is_system", { mode: "boolean" })
|
||||
.notNull()
|
||||
.default(false),
|
||||
|
||||
permissions: text("permissions"),
|
||||
|
||||
createdAt: text("created_at")
|
||||
.notNull()
|
||||
.default(sql`CURRENT_TIMESTAMP`),
|
||||
updatedAt: text("updated_at")
|
||||
.notNull()
|
||||
.default(sql`CURRENT_TIMESTAMP`),
|
||||
});
|
||||
|
||||
export const userRoles = sqliteTable("user_roles", {
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
userId: text("user_id")
|
||||
.notNull()
|
||||
.references(() => users.id, { onDelete: "cascade" }),
|
||||
roleId: integer("role_id")
|
||||
.notNull()
|
||||
.references(() => roles.id, { onDelete: "cascade" }),
|
||||
|
||||
grantedBy: text("granted_by").references(() => users.id, {
|
||||
onDelete: "set null",
|
||||
}),
|
||||
grantedAt: text("granted_at")
|
||||
.notNull()
|
||||
.default(sql`CURRENT_TIMESTAMP`),
|
||||
});
|
||||
|
||||
export const auditLogs = sqliteTable("audit_logs", {
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
|
||||
userId: text("user_id")
|
||||
.notNull()
|
||||
.references(() => users.id, { onDelete: "cascade" }),
|
||||
username: text("username").notNull(),
|
||||
|
||||
action: text("action").notNull(),
|
||||
resourceType: text("resource_type").notNull(),
|
||||
resourceId: text("resource_id"),
|
||||
resourceName: text("resource_name"),
|
||||
|
||||
details: text("details"),
|
||||
ipAddress: text("ip_address"),
|
||||
userAgent: text("user_agent"),
|
||||
|
||||
success: integer("success", { mode: "boolean" }).notNull(),
|
||||
errorMessage: text("error_message"),
|
||||
|
||||
timestamp: text("timestamp")
|
||||
.notNull()
|
||||
.default(sql`CURRENT_TIMESTAMP`),
|
||||
});
|
||||
|
||||
export const sessionRecordings = sqliteTable("session_recordings", {
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
|
||||
hostId: integer("host_id")
|
||||
.notNull()
|
||||
.references(() => sshData.id, { onDelete: "cascade" }),
|
||||
userId: text("user_id")
|
||||
.notNull()
|
||||
.references(() => users.id, { onDelete: "cascade" }),
|
||||
accessId: integer("access_id").references(() => hostAccess.id, {
|
||||
onDelete: "set null",
|
||||
}),
|
||||
|
||||
startedAt: text("started_at")
|
||||
.notNull()
|
||||
.default(sql`CURRENT_TIMESTAMP`),
|
||||
endedAt: text("ended_at"),
|
||||
duration: integer("duration"),
|
||||
|
||||
commands: text("commands"),
|
||||
dangerousActions: text("dangerous_actions"),
|
||||
|
||||
recordingPath: text("recording_path"),
|
||||
|
||||
terminatedByOwner: integer("terminated_by_owner", { mode: "boolean" })
|
||||
.default(false),
|
||||
terminationReason: text("termination_reason"),
|
||||
});
|
||||
|
||||
@@ -99,8 +99,20 @@ const router = express.Router();
|
||||
const authManager = AuthManager.getInstance();
|
||||
const authenticateJWT = authManager.createAuthMiddleware();
|
||||
|
||||
// Route: Get alerts for the authenticated user (excluding dismissed ones)
|
||||
// GET /alerts
|
||||
/**
|
||||
* @openapi
|
||||
* /alerts:
|
||||
* get:
|
||||
* summary: Get active alerts
|
||||
* description: Fetches active alerts for the authenticated user, excluding those that have been dismissed.
|
||||
* tags:
|
||||
* - Alerts
|
||||
* responses:
|
||||
* 200:
|
||||
* description: A list of active alerts.
|
||||
* 500:
|
||||
* description: Failed to fetch alerts.
|
||||
*/
|
||||
router.get("/", authenticateJWT, async (req, res) => {
|
||||
try {
|
||||
const userId = (req as AuthenticatedRequest).userId;
|
||||
@@ -131,8 +143,33 @@ router.get("/", authenticateJWT, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Route: Dismiss an alert for the authenticated user
|
||||
// POST /alerts/dismiss
|
||||
/**
|
||||
* @openapi
|
||||
* /alerts/dismiss:
|
||||
* post:
|
||||
* summary: Dismiss an alert
|
||||
* description: Marks an alert as dismissed for the authenticated user.
|
||||
* tags:
|
||||
* - Alerts
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* alertId:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Alert dismissed successfully.
|
||||
* 400:
|
||||
* description: Alert ID is required.
|
||||
* 409:
|
||||
* description: Alert already dismissed.
|
||||
* 500:
|
||||
* description: Failed to dismiss alert.
|
||||
*/
|
||||
router.post("/dismiss", authenticateJWT, async (req, res) => {
|
||||
try {
|
||||
const { alertId } = req.body;
|
||||
@@ -170,8 +207,20 @@ router.post("/dismiss", authenticateJWT, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Route: Get dismissed alerts for a user
|
||||
// GET /alerts/dismissed/:userId
|
||||
/**
|
||||
* @openapi
|
||||
* /alerts/dismissed:
|
||||
* get:
|
||||
* summary: Get dismissed alerts
|
||||
* description: Fetches a list of alerts that have been dismissed by the authenticated user.
|
||||
* tags:
|
||||
* - Alerts
|
||||
* responses:
|
||||
* 200:
|
||||
* description: A list of dismissed alerts.
|
||||
* 500:
|
||||
* description: Failed to fetch dismissed alerts.
|
||||
*/
|
||||
router.get("/dismissed", authenticateJWT, async (req, res) => {
|
||||
try {
|
||||
const userId = (req as AuthenticatedRequest).userId;
|
||||
@@ -194,8 +243,33 @@ router.get("/dismissed", authenticateJWT, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Route: Undismiss an alert for the authenticated user (remove from dismissed list)
|
||||
// DELETE /alerts/dismiss
|
||||
/**
|
||||
* @openapi
|
||||
* /alerts/dismiss:
|
||||
* delete:
|
||||
* summary: Undismiss an alert
|
||||
* description: Removes an alert from the dismissed list for the authenticated user.
|
||||
* tags:
|
||||
* - Alerts
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* alertId:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Alert undismissed successfully.
|
||||
* 400:
|
||||
* description: Alert ID is required.
|
||||
* 404:
|
||||
* description: Dismissed alert not found.
|
||||
* 500:
|
||||
* description: Failed to undismiss alert.
|
||||
*/
|
||||
router.delete("/dismiss", authenticateJWT, async (req, res) => {
|
||||
try {
|
||||
const { alertId } = req.body;
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
import type { AuthenticatedRequest } from "../../../types/index.js";
|
||||
import type {
|
||||
AuthenticatedRequest,
|
||||
CredentialBackend,
|
||||
} from "../../../types/index.js";
|
||||
import express from "express";
|
||||
import { db } from "../db/index.js";
|
||||
import { sshCredentials, sshCredentialUsage, sshData } from "../db/schema.js";
|
||||
import {
|
||||
sshCredentials,
|
||||
sshCredentialUsage,
|
||||
sshData,
|
||||
hostAccess,
|
||||
} from "../db/schema.js";
|
||||
import { eq, and, desc, sql } from "drizzle-orm";
|
||||
import type { Request, Response } from "express";
|
||||
import { authLogger } from "../../utils/logger.js";
|
||||
@@ -76,8 +84,52 @@ const authManager = AuthManager.getInstance();
|
||||
const authenticateJWT = authManager.createAuthMiddleware();
|
||||
const requireDataAccess = authManager.createDataAccessMiddleware();
|
||||
|
||||
// Create a new credential
|
||||
// POST /credentials
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials:
|
||||
* post:
|
||||
* summary: Create a new credential
|
||||
* description: Creates a new SSH credential for the authenticated user.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* name:
|
||||
* type: string
|
||||
* description:
|
||||
* type: string
|
||||
* folder:
|
||||
* type: string
|
||||
* tags:
|
||||
* type: array
|
||||
* items:
|
||||
* type: string
|
||||
* authType:
|
||||
* type: string
|
||||
* enum: [password, key]
|
||||
* username:
|
||||
* type: string
|
||||
* password:
|
||||
* type: string
|
||||
* key:
|
||||
* type: string
|
||||
* keyPassword:
|
||||
* type: string
|
||||
* keyType:
|
||||
* type: string
|
||||
* responses:
|
||||
* 201:
|
||||
* description: Credential created successfully.
|
||||
* 400:
|
||||
* description: Invalid request body.
|
||||
* 500:
|
||||
* description: Failed to create credential.
|
||||
*/
|
||||
router.post(
|
||||
"/",
|
||||
authenticateJWT,
|
||||
@@ -223,8 +275,22 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
// Get all credentials for the authenticated user
|
||||
// GET /credentials
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials:
|
||||
* get:
|
||||
* summary: Get all credentials
|
||||
* description: Retrieves all SSH credentials for the authenticated user.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* responses:
|
||||
* 200:
|
||||
* description: A list of credentials.
|
||||
* 400:
|
||||
* description: Invalid userId.
|
||||
* 500:
|
||||
* description: Failed to fetch credentials.
|
||||
*/
|
||||
router.get(
|
||||
"/",
|
||||
authenticateJWT,
|
||||
@@ -256,8 +322,22 @@ router.get(
|
||||
},
|
||||
);
|
||||
|
||||
// Get all unique credential folders for the authenticated user
|
||||
// GET /credentials/folders
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials/folders:
|
||||
* get:
|
||||
* summary: Get credential folders
|
||||
* description: Retrieves all unique credential folders for the authenticated user.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* responses:
|
||||
* 200:
|
||||
* description: A list of folder names.
|
||||
* 400:
|
||||
* description: Invalid userId.
|
||||
* 500:
|
||||
* description: Failed to fetch credential folders.
|
||||
*/
|
||||
router.get(
|
||||
"/folders",
|
||||
authenticateJWT,
|
||||
@@ -294,8 +374,30 @@ router.get(
|
||||
},
|
||||
);
|
||||
|
||||
// Get a specific credential by ID (with plain text secrets)
|
||||
// GET /credentials/:id
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials/{id}:
|
||||
* get:
|
||||
* summary: Get a specific credential
|
||||
* description: Retrieves a specific credential by its ID, including secrets.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The requested credential.
|
||||
* 400:
|
||||
* description: Invalid request.
|
||||
* 404:
|
||||
* description: Credential not found.
|
||||
* 500:
|
||||
* description: Failed to fetch credential.
|
||||
*/
|
||||
router.get(
|
||||
"/:id",
|
||||
authenticateJWT,
|
||||
@@ -358,8 +460,41 @@ router.get(
|
||||
},
|
||||
);
|
||||
|
||||
// Update a credential
|
||||
// PUT /credentials/:id
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials/{id}:
|
||||
* put:
|
||||
* summary: Update a credential
|
||||
* description: Updates a specific credential by its ID.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* name:
|
||||
* type: string
|
||||
* description:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The updated credential.
|
||||
* 400:
|
||||
* description: Invalid request.
|
||||
* 404:
|
||||
* description: Credential not found.
|
||||
* 500:
|
||||
* description: Failed to update credential.
|
||||
*/
|
||||
router.put(
|
||||
"/:id",
|
||||
authenticateJWT,
|
||||
@@ -470,6 +605,14 @@ router.put(
|
||||
userId,
|
||||
);
|
||||
|
||||
const { SharedCredentialManager } =
|
||||
await import("../../utils/shared-credential-manager.js");
|
||||
const sharedCredManager = SharedCredentialManager.getInstance();
|
||||
await sharedCredManager.updateSharedCredentialsForOriginal(
|
||||
parseInt(id),
|
||||
userId,
|
||||
);
|
||||
|
||||
const credential = updated[0];
|
||||
authLogger.success(
|
||||
`SSH credential updated: ${credential.name} (${credential.authType}) by user ${userId}`,
|
||||
@@ -494,8 +637,30 @@ router.put(
|
||||
},
|
||||
);
|
||||
|
||||
// Delete a credential
|
||||
// DELETE /credentials/:id
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials/{id}:
|
||||
* delete:
|
||||
* summary: Delete a credential
|
||||
* description: Deletes a specific credential by its ID.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Credential deleted successfully.
|
||||
* 400:
|
||||
* description: Invalid request.
|
||||
* 404:
|
||||
* description: Credential not found.
|
||||
* 500:
|
||||
* description: Failed to delete credential.
|
||||
*/
|
||||
router.delete(
|
||||
"/:id",
|
||||
authenticateJWT,
|
||||
@@ -524,8 +689,6 @@ router.delete(
|
||||
return res.status(404).json({ error: "Credential not found" });
|
||||
}
|
||||
|
||||
// Update hosts using this credential to set credentialId to null
|
||||
// This prevents orphaned references before deletion
|
||||
const hostsUsingCredential = await db
|
||||
.select()
|
||||
.from(sshData)
|
||||
@@ -552,10 +715,32 @@ router.delete(
|
||||
eq(sshData.userId, userId),
|
||||
),
|
||||
);
|
||||
|
||||
for (const host of hostsUsingCredential) {
|
||||
const revokedShares = await db
|
||||
.delete(hostAccess)
|
||||
.where(eq(hostAccess.hostId, host.id))
|
||||
.returning({ id: hostAccess.id });
|
||||
|
||||
if (revokedShares.length > 0) {
|
||||
authLogger.info(
|
||||
"Auto-revoked host shares due to credential deletion",
|
||||
{
|
||||
operation: "auto_revoke_shares",
|
||||
hostId: host.id,
|
||||
credentialId: parseInt(id),
|
||||
revokedCount: revokedShares.length,
|
||||
reason: "credential_deleted",
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// sshCredentialUsage will be automatically deleted by ON DELETE CASCADE
|
||||
// No need for manual deletion
|
||||
const { SharedCredentialManager } =
|
||||
await import("../../utils/shared-credential-manager.js");
|
||||
const sharedCredManager = SharedCredentialManager.getInstance();
|
||||
await sharedCredManager.deleteSharedCredentialsForOriginal(parseInt(id));
|
||||
|
||||
await db
|
||||
.delete(sshCredentials)
|
||||
@@ -590,8 +775,35 @@ router.delete(
|
||||
},
|
||||
);
|
||||
|
||||
// Apply a credential to an SSH host (for quick application)
|
||||
// POST /credentials/:id/apply-to-host/:hostId
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials/{id}/apply-to-host/{hostId}:
|
||||
* post:
|
||||
* summary: Apply a credential to a host
|
||||
* description: Applies a credential to an SSH host for quick application.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* - in: path
|
||||
* name: hostId
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Credential applied to host successfully.
|
||||
* 400:
|
||||
* description: Invalid request.
|
||||
* 404:
|
||||
* description: Credential not found.
|
||||
* 500:
|
||||
* description: Failed to apply credential to host.
|
||||
*/
|
||||
router.post(
|
||||
"/:id/apply-to-host/:hostId",
|
||||
authenticateJWT,
|
||||
@@ -669,8 +881,28 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
// Get hosts using a specific credential
|
||||
// GET /credentials/:id/hosts
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials/{id}/hosts:
|
||||
* get:
|
||||
* summary: Get hosts using a credential
|
||||
* description: Retrieves a list of hosts that are using a specific credential.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 200:
|
||||
* description: A list of hosts.
|
||||
* 400:
|
||||
* description: Invalid request.
|
||||
* 500:
|
||||
* description: Failed to fetch hosts using credential.
|
||||
*/
|
||||
router.get(
|
||||
"/:id/hosts",
|
||||
authenticateJWT,
|
||||
@@ -764,8 +996,33 @@ function formatSSHHostOutput(
|
||||
};
|
||||
}
|
||||
|
||||
// Rename a credential folder
|
||||
// PUT /credentials/folders/rename
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials/folders/rename:
|
||||
* put:
|
||||
* summary: Rename a credential folder
|
||||
* description: Renames a credential folder for the authenticated user.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* oldName:
|
||||
* type: string
|
||||
* newName:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Folder renamed successfully.
|
||||
* 400:
|
||||
* description: Both oldName and newName are required.
|
||||
* 500:
|
||||
* description: Failed to rename folder.
|
||||
*/
|
||||
router.put(
|
||||
"/folders/rename",
|
||||
authenticateJWT,
|
||||
@@ -804,8 +1061,33 @@ router.put(
|
||||
},
|
||||
);
|
||||
|
||||
// Detect SSH key type endpoint
|
||||
// POST /credentials/detect-key-type
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials/detect-key-type:
|
||||
* post:
|
||||
* summary: Detect SSH key type
|
||||
* description: Detects the type of an SSH private key.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* privateKey:
|
||||
* type: string
|
||||
* keyPassword:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Key type detection result.
|
||||
* 400:
|
||||
* description: Private key is required.
|
||||
* 500:
|
||||
* description: Failed to detect key type.
|
||||
*/
|
||||
router.post(
|
||||
"/detect-key-type",
|
||||
authenticateJWT,
|
||||
@@ -838,8 +1120,31 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
// Detect SSH public key type endpoint
|
||||
// POST /credentials/detect-public-key-type
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials/detect-public-key-type:
|
||||
* post:
|
||||
* summary: Detect SSH public key type
|
||||
* description: Detects the type of an SSH public key.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* publicKey:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Key type detection result.
|
||||
* 400:
|
||||
* description: Public key is required.
|
||||
* 500:
|
||||
* description: Failed to detect public key type.
|
||||
*/
|
||||
router.post(
|
||||
"/detect-public-key-type",
|
||||
authenticateJWT,
|
||||
@@ -873,8 +1178,35 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
// Validate SSH key pair endpoint
|
||||
// POST /credentials/validate-key-pair
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials/validate-key-pair:
|
||||
* post:
|
||||
* summary: Validate SSH key pair
|
||||
* description: Validates if a given SSH private key and public key match.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* privateKey:
|
||||
* type: string
|
||||
* publicKey:
|
||||
* type: string
|
||||
* keyPassword:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Key pair validation result.
|
||||
* 400:
|
||||
* description: Private key and public key are required.
|
||||
* 500:
|
||||
* description: Failed to validate key pair.
|
||||
*/
|
||||
router.post(
|
||||
"/validate-key-pair",
|
||||
authenticateJWT,
|
||||
@@ -917,8 +1249,32 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
// Generate new SSH key pair endpoint
|
||||
// POST /credentials/generate-key-pair
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials/generate-key-pair:
|
||||
* post:
|
||||
* summary: Generate new SSH key pair
|
||||
* description: Generates a new SSH key pair.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* requestBody:
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* keyType:
|
||||
* type: string
|
||||
* keySize:
|
||||
* type: integer
|
||||
* passphrase:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The new key pair.
|
||||
* 500:
|
||||
* description: Failed to generate SSH key pair.
|
||||
*/
|
||||
router.post(
|
||||
"/generate-key-pair",
|
||||
authenticateJWT,
|
||||
@@ -960,8 +1316,33 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
// Generate public key from private key endpoint
|
||||
// POST /credentials/generate-public-key
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials/generate-public-key:
|
||||
* post:
|
||||
* summary: Generate public key from private key
|
||||
* description: Generates a public key from a given private key.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* privateKey:
|
||||
* type: string
|
||||
* keyPassword:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The generated public key.
|
||||
* 400:
|
||||
* description: Private key is required.
|
||||
* 500:
|
||||
* description: Failed to generate public key.
|
||||
*/
|
||||
router.post(
|
||||
"/generate-public-key",
|
||||
authenticateJWT,
|
||||
@@ -1124,10 +1505,9 @@ router.post(
|
||||
|
||||
async function deploySSHKeyToHost(
|
||||
hostConfig: Record<string, unknown>,
|
||||
publicKey: string,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
_credentialData: Record<string, unknown>,
|
||||
credData: CredentialBackend,
|
||||
): Promise<{ success: boolean; message?: string; error?: string }> {
|
||||
const publicKey = credData.public_key as string;
|
||||
return new Promise((resolve) => {
|
||||
const conn = new Client();
|
||||
|
||||
@@ -1248,7 +1628,7 @@ async function deploySSHKeyToHost(
|
||||
.replace(/'/g, "'\\''");
|
||||
|
||||
conn.exec(
|
||||
`printf '%s\\n' '${escapedKey}' >> ~/.ssh/authorized_keys && chmod 600 ~/.ssh/authorized_keys`,
|
||||
`printf '%s\n' '${escapedKey} ${credData.name}@Termix' >> ~/.ssh/authorized_keys && chmod 600 ~/.ssh/authorized_keys`,
|
||||
(err, stream) => {
|
||||
if (err) {
|
||||
clearTimeout(addTimeout);
|
||||
@@ -1467,8 +1847,41 @@ async function deploySSHKeyToHost(
|
||||
});
|
||||
}
|
||||
|
||||
// Deploy SSH Key to Host endpoint
|
||||
// POST /credentials/:id/deploy-to-host
|
||||
/**
|
||||
* @openapi
|
||||
* /credentials/{id}/deploy-to-host:
|
||||
* post:
|
||||
* summary: Deploy SSH key to a host
|
||||
* description: Deploys an SSH public key to a target host's authorized_keys file.
|
||||
* tags:
|
||||
* - Credentials
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* targetHostId:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 200:
|
||||
* description: SSH key deployed successfully.
|
||||
* 400:
|
||||
* description: Credential ID and target host ID are required.
|
||||
* 401:
|
||||
* description: Authentication required.
|
||||
* 404:
|
||||
* description: Credential or target host not found.
|
||||
* 500:
|
||||
* description: Failed to deploy SSH key.
|
||||
*/
|
||||
router.post(
|
||||
"/:id/deploy-to-host",
|
||||
authenticateJWT,
|
||||
@@ -1510,7 +1923,7 @@ router.post(
|
||||
});
|
||||
}
|
||||
|
||||
const credData = credential[0];
|
||||
const credData = credential[0] as unknown as CredentialBackend;
|
||||
|
||||
if (credData.authType !== "key") {
|
||||
return res.status(400).json({
|
||||
@@ -1519,7 +1932,7 @@ router.post(
|
||||
});
|
||||
}
|
||||
|
||||
const publicKey = credData.public_key || credData.publicKey;
|
||||
const publicKey = credData.public_key;
|
||||
if (!publicKey) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
@@ -1599,11 +2012,7 @@ router.post(
|
||||
}
|
||||
}
|
||||
|
||||
const deployResult = await deploySSHKeyToHost(
|
||||
hostConfig,
|
||||
publicKey as string,
|
||||
credData,
|
||||
);
|
||||
const deployResult = await deploySSHKeyToHost(hostConfig, credData);
|
||||
|
||||
if (deployResult.success) {
|
||||
res.json({
|
||||
|
||||
142
src/backend/database/routes/network-topology.ts
Normal file
142
src/backend/database/routes/network-topology.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
import express from "express";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { getDb } from "../db/index.js";
|
||||
import { networkTopology } from "../db/schema.js";
|
||||
import { AuthManager } from "../../utils/auth-manager.js";
|
||||
import type { AuthenticatedRequest } from "../../../types/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
const authManager = AuthManager.getInstance();
|
||||
const authenticateJWT = authManager.createAuthMiddleware();
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /network-topology:
|
||||
* get:
|
||||
* summary: Get network topology
|
||||
* description: Retrieves the network topology for the authenticated user.
|
||||
* tags:
|
||||
* - Network Topology
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The network topology.
|
||||
* 401:
|
||||
* description: User not authenticated.
|
||||
* 500:
|
||||
* description: Failed to fetch network topology.
|
||||
*/
|
||||
router.get(
|
||||
"/",
|
||||
authenticateJWT,
|
||||
async (req: express.Request, res: express.Response) => {
|
||||
try {
|
||||
const userId = (req as AuthenticatedRequest).userId;
|
||||
if (!userId) {
|
||||
return res.status(401).json({ error: "User not authenticated" });
|
||||
}
|
||||
|
||||
const db = getDb();
|
||||
const result = await db
|
||||
.select()
|
||||
.from(networkTopology)
|
||||
.where(eq(networkTopology.userId, userId));
|
||||
|
||||
if (result.length > 0) {
|
||||
const topologyStr = result[0].topology;
|
||||
const topology = topologyStr ? JSON.parse(topologyStr) : null;
|
||||
return res.json(topology);
|
||||
} else {
|
||||
return res.json(null);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error fetching network topology:", error);
|
||||
return res
|
||||
.status(500)
|
||||
.json({
|
||||
error: "Failed to fetch network topology",
|
||||
details: (error as Error).message,
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /network-topology:
|
||||
* post:
|
||||
* summary: Save network topology
|
||||
* description: Saves the network topology for the authenticated user.
|
||||
* tags:
|
||||
* - Network Topology
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* topology:
|
||||
* type: object
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Network topology saved successfully.
|
||||
* 400:
|
||||
* description: Topology data is required.
|
||||
* 401:
|
||||
* description: User not authenticated.
|
||||
* 500:
|
||||
* description: Failed to save network topology.
|
||||
*/
|
||||
router.post(
|
||||
"/",
|
||||
authenticateJWT,
|
||||
async (req: express.Request, res: express.Response) => {
|
||||
try {
|
||||
const userId = (req as AuthenticatedRequest).userId;
|
||||
if (!userId) {
|
||||
return res.status(401).json({ error: "User not authenticated" });
|
||||
}
|
||||
|
||||
const { topology } = req.body;
|
||||
if (!topology) {
|
||||
return res.status(400).json({ error: "Topology data is required" });
|
||||
}
|
||||
|
||||
const db = getDb();
|
||||
|
||||
// Ensure topology is a string
|
||||
const topologyStr =
|
||||
typeof topology === "string" ? topology : JSON.stringify(topology);
|
||||
|
||||
const existing = await db
|
||||
.select()
|
||||
.from(networkTopology)
|
||||
.where(eq(networkTopology.userId, userId));
|
||||
|
||||
if (existing.length > 0) {
|
||||
// Update existing record
|
||||
await db
|
||||
.update(networkTopology)
|
||||
.set({ topology: topologyStr })
|
||||
.where(eq(networkTopology.userId, userId));
|
||||
} else {
|
||||
// Insert new record
|
||||
await db
|
||||
.insert(networkTopology)
|
||||
.values({ userId, topology: topologyStr });
|
||||
}
|
||||
|
||||
return res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error("Error saving network topology:", error);
|
||||
return res
|
||||
.status(500)
|
||||
.json({
|
||||
error: "Failed to save network topology",
|
||||
details: (error as Error).message,
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
export default router;
|
||||
1148
src/backend/database/routes/rbac.ts
Normal file
1148
src/backend/database/routes/rbac.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -17,8 +17,22 @@ const authManager = AuthManager.getInstance();
|
||||
const authenticateJWT = authManager.createAuthMiddleware();
|
||||
const requireDataAccess = authManager.createDataAccessMiddleware();
|
||||
|
||||
// Get all snippet folders
|
||||
// GET /snippets/folders
|
||||
/**
|
||||
* @openapi
|
||||
* /snippets/folders:
|
||||
* get:
|
||||
* summary: Get all snippet folders
|
||||
* description: Retrieves all snippet folders for the authenticated user.
|
||||
* tags:
|
||||
* - Snippets
|
||||
* responses:
|
||||
* 200:
|
||||
* description: A list of snippet folders.
|
||||
* 400:
|
||||
* description: Invalid userId.
|
||||
* 500:
|
||||
* description: Failed to fetch snippet folders.
|
||||
*/
|
||||
router.get(
|
||||
"/folders",
|
||||
authenticateJWT,
|
||||
@@ -46,8 +60,37 @@ router.get(
|
||||
},
|
||||
);
|
||||
|
||||
// Create a new snippet folder
|
||||
// POST /snippets/folders
|
||||
/**
|
||||
* @openapi
|
||||
* /snippets/folders:
|
||||
* post:
|
||||
* summary: Create a new snippet folder
|
||||
* description: Creates a new snippet folder for the authenticated user.
|
||||
* tags:
|
||||
* - Snippets
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* name:
|
||||
* type: string
|
||||
* color:
|
||||
* type: string
|
||||
* icon:
|
||||
* type: string
|
||||
* responses:
|
||||
* 201:
|
||||
* description: Snippet folder created successfully.
|
||||
* 400:
|
||||
* description: Folder name is required.
|
||||
* 409:
|
||||
* description: Folder with this name already exists.
|
||||
* 500:
|
||||
* description: Failed to create snippet folder.
|
||||
*/
|
||||
router.post(
|
||||
"/folders",
|
||||
authenticateJWT,
|
||||
@@ -110,8 +153,41 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
// Update snippet folder metadata (color, icon)
|
||||
// PUT /snippets/folders/:name/metadata
|
||||
/**
|
||||
* @openapi
|
||||
* /snippets/folders/{name}/metadata:
|
||||
* put:
|
||||
* summary: Update snippet folder metadata
|
||||
* description: Updates the metadata (color, icon) of a snippet folder.
|
||||
* tags:
|
||||
* - Snippets
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: name
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* color:
|
||||
* type: string
|
||||
* icon:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Snippet folder metadata updated successfully.
|
||||
* 400:
|
||||
* description: Invalid request.
|
||||
* 404:
|
||||
* description: Folder not found.
|
||||
* 500:
|
||||
* description: Failed to update snippet folder metadata.
|
||||
*/
|
||||
router.put(
|
||||
"/folders/:name/metadata",
|
||||
authenticateJWT,
|
||||
@@ -194,8 +270,37 @@ router.put(
|
||||
},
|
||||
);
|
||||
|
||||
// Rename snippet folder
|
||||
// PUT /snippets/folders/rename
|
||||
/**
|
||||
* @openapi
|
||||
* /snippets/folders/rename:
|
||||
* put:
|
||||
* summary: Rename a snippet folder
|
||||
* description: Renames a snippet folder for the authenticated user.
|
||||
* tags:
|
||||
* - Snippets
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* oldName:
|
||||
* type: string
|
||||
* newName:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Folder renamed successfully.
|
||||
* 400:
|
||||
* description: Invalid request.
|
||||
* 404:
|
||||
* description: Folder not found.
|
||||
* 409:
|
||||
* description: Folder with new name already exists.
|
||||
* 500:
|
||||
* description: Failed to rename snippet folder.
|
||||
*/
|
||||
router.put(
|
||||
"/folders/rename",
|
||||
authenticateJWT,
|
||||
@@ -282,8 +387,28 @@ router.put(
|
||||
},
|
||||
);
|
||||
|
||||
// Delete snippet folder
|
||||
// DELETE /snippets/folders/:name
|
||||
/**
|
||||
* @openapi
|
||||
* /snippets/folders/{name}:
|
||||
* delete:
|
||||
* summary: Delete a snippet folder
|
||||
* description: Deletes a snippet folder and moves its snippets to the root.
|
||||
* tags:
|
||||
* - Snippets
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: name
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Snippet folder deleted successfully.
|
||||
* 400:
|
||||
* description: Invalid request.
|
||||
* 500:
|
||||
* description: Failed to delete snippet folder.
|
||||
*/
|
||||
router.delete(
|
||||
"/folders/:name",
|
||||
authenticateJWT,
|
||||
@@ -338,8 +463,40 @@ router.delete(
|
||||
},
|
||||
);
|
||||
|
||||
// Reorder snippets (bulk update)
|
||||
// PUT /snippets/reorder
|
||||
/**
|
||||
* @openapi
|
||||
* /snippets/reorder:
|
||||
* put:
|
||||
* summary: Reorder snippets
|
||||
* description: Bulk updates the order and folder of snippets.
|
||||
* tags:
|
||||
* - Snippets
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* snippets:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: integer
|
||||
* order:
|
||||
* type: integer
|
||||
* folder:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Snippets reordered successfully.
|
||||
* 400:
|
||||
* description: Invalid request.
|
||||
* 500:
|
||||
* description: Failed to reorder snippets.
|
||||
*/
|
||||
router.put(
|
||||
"/reorder",
|
||||
authenticateJWT,
|
||||
@@ -405,8 +562,35 @@ router.put(
|
||||
},
|
||||
);
|
||||
|
||||
// Execute a snippet on a host
|
||||
// POST /snippets/execute
|
||||
/**
|
||||
* @openapi
|
||||
* /snippets/execute:
|
||||
* post:
|
||||
* summary: Execute a snippet on a host
|
||||
* description: Executes a snippet on a specified host.
|
||||
* tags:
|
||||
* - Snippets
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* snippetId:
|
||||
* type: integer
|
||||
* hostId:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Snippet executed successfully.
|
||||
* 400:
|
||||
* description: Snippet ID and Host ID are required.
|
||||
* 404:
|
||||
* description: Snippet or host not found.
|
||||
* 500:
|
||||
* description: Failed to execute snippet.
|
||||
*/
|
||||
router.post(
|
||||
"/execute",
|
||||
authenticateJWT,
|
||||
@@ -662,8 +846,22 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
// Get all snippets for the authenticated user
|
||||
// GET /snippets
|
||||
/**
|
||||
* @openapi
|
||||
* /snippets:
|
||||
* get:
|
||||
* summary: Get all snippets
|
||||
* description: Retrieves all snippets for the authenticated user.
|
||||
* tags:
|
||||
* - Snippets
|
||||
* responses:
|
||||
* 200:
|
||||
* description: A list of snippets.
|
||||
* 400:
|
||||
* description: Invalid userId.
|
||||
* 500:
|
||||
* description: Failed to fetch snippets.
|
||||
*/
|
||||
router.get(
|
||||
"/",
|
||||
authenticateJWT,
|
||||
@@ -696,8 +894,30 @@ router.get(
|
||||
},
|
||||
);
|
||||
|
||||
// Get a specific snippet by ID
|
||||
// GET /snippets/:id
|
||||
/**
|
||||
* @openapi
|
||||
* /snippets/{id}:
|
||||
* get:
|
||||
* summary: Get a specific snippet
|
||||
* description: Retrieves a specific snippet by its ID.
|
||||
* tags:
|
||||
* - Snippets
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The requested snippet.
|
||||
* 400:
|
||||
* description: Invalid request parameters.
|
||||
* 404:
|
||||
* description: Snippet not found.
|
||||
* 500:
|
||||
* description: Failed to fetch snippet.
|
||||
*/
|
||||
router.get(
|
||||
"/:id",
|
||||
authenticateJWT,
|
||||
@@ -735,8 +955,39 @@ router.get(
|
||||
},
|
||||
);
|
||||
|
||||
// Create a new snippet
|
||||
// POST /snippets
|
||||
/**
|
||||
* @openapi
|
||||
* /snippets:
|
||||
* post:
|
||||
* summary: Create a new snippet
|
||||
* description: Creates a new snippet for the authenticated user.
|
||||
* tags:
|
||||
* - Snippets
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* name:
|
||||
* type: string
|
||||
* content:
|
||||
* type: string
|
||||
* description:
|
||||
* type: string
|
||||
* folder:
|
||||
* type: string
|
||||
* order:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 201:
|
||||
* description: Snippet created successfully.
|
||||
* 400:
|
||||
* description: Name and content are required.
|
||||
* 500:
|
||||
* description: Failed to create snippet.
|
||||
*/
|
||||
router.post(
|
||||
"/",
|
||||
authenticateJWT,
|
||||
@@ -806,8 +1057,47 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
// Update a snippet
|
||||
// PUT /snippets/:id
|
||||
/**
|
||||
* @openapi
|
||||
* /snippets/{id}:
|
||||
* put:
|
||||
* summary: Update a snippet
|
||||
* description: Updates a specific snippet by its ID.
|
||||
* tags:
|
||||
* - Snippets
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* name:
|
||||
* type: string
|
||||
* content:
|
||||
* type: string
|
||||
* description:
|
||||
* type: string
|
||||
* folder:
|
||||
* type: string
|
||||
* order:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 200:
|
||||
* description: The updated snippet.
|
||||
* 400:
|
||||
* description: Invalid request.
|
||||
* 404:
|
||||
* description: Snippet not found.
|
||||
* 500:
|
||||
* description: Failed to update snippet.
|
||||
*/
|
||||
router.put(
|
||||
"/:id",
|
||||
authenticateJWT,
|
||||
@@ -883,8 +1173,30 @@ router.put(
|
||||
},
|
||||
);
|
||||
|
||||
// Delete a snippet
|
||||
// DELETE /snippets/:id
|
||||
/**
|
||||
* @openapi
|
||||
* /snippets/{id}:
|
||||
* delete:
|
||||
* summary: Delete a snippet
|
||||
* description: Deletes a specific snippet by its ID.
|
||||
* tags:
|
||||
* - Snippets
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Snippet deleted successfully.
|
||||
* 400:
|
||||
* description: Invalid request.
|
||||
* 404:
|
||||
* description: Snippet not found.
|
||||
* 500:
|
||||
* description: Failed to delete snippet.
|
||||
*/
|
||||
router.delete(
|
||||
"/:id",
|
||||
authenticateJWT,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -17,8 +17,33 @@ const authManager = AuthManager.getInstance();
|
||||
const authenticateJWT = authManager.createAuthMiddleware();
|
||||
const requireDataAccess = authManager.createDataAccessMiddleware();
|
||||
|
||||
// Save command to history
|
||||
// POST /terminal/command_history
|
||||
/**
|
||||
* @openapi
|
||||
* /terminal/command_history:
|
||||
* post:
|
||||
* summary: Save command to history
|
||||
* description: Saves a command to the command history for a specific host.
|
||||
* tags:
|
||||
* - Terminal
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* hostId:
|
||||
* type: integer
|
||||
* command:
|
||||
* type: string
|
||||
* responses:
|
||||
* 201:
|
||||
* description: Command saved successfully.
|
||||
* 400:
|
||||
* description: Missing required parameters.
|
||||
* 500:
|
||||
* description: Failed to save command.
|
||||
*/
|
||||
router.post(
|
||||
"/command_history",
|
||||
authenticateJWT,
|
||||
@@ -59,8 +84,28 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
// Get command history for a specific host
|
||||
// GET /terminal/command_history/:hostId
|
||||
/**
|
||||
* @openapi
|
||||
* /terminal/command_history/{hostId}:
|
||||
* get:
|
||||
* summary: Get command history
|
||||
* description: Retrieves the command history for a specific host.
|
||||
* tags:
|
||||
* - Terminal
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: hostId
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 200:
|
||||
* description: A list of commands.
|
||||
* 400:
|
||||
* description: Invalid request parameters.
|
||||
* 500:
|
||||
* description: Failed to fetch history.
|
||||
*/
|
||||
router.get(
|
||||
"/command_history/:hostId",
|
||||
authenticateJWT,
|
||||
@@ -107,8 +152,33 @@ router.get(
|
||||
},
|
||||
);
|
||||
|
||||
// Delete a specific command from history
|
||||
// POST /terminal/command_history/delete
|
||||
/**
|
||||
* @openapi
|
||||
* /terminal/command_history/delete:
|
||||
* post:
|
||||
* summary: Delete a specific command from history
|
||||
* description: Deletes a specific command from the history of a host.
|
||||
* tags:
|
||||
* - Terminal
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* hostId:
|
||||
* type: integer
|
||||
* command:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Command deleted successfully.
|
||||
* 400:
|
||||
* description: Missing required parameters.
|
||||
* 500:
|
||||
* description: Failed to delete command.
|
||||
*/
|
||||
router.post(
|
||||
"/command_history/delete",
|
||||
authenticateJWT,
|
||||
@@ -150,8 +220,28 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
// Clear command history for a specific host (optional feature)
|
||||
// DELETE /terminal/command_history/:hostId
|
||||
/**
|
||||
* @openapi
|
||||
* /terminal/command_history/{hostId}:
|
||||
* delete:
|
||||
* summary: Clear command history
|
||||
* description: Clears the entire command history for a specific host.
|
||||
* tags:
|
||||
* - Terminal
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: hostId
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Command history cleared successfully.
|
||||
* 400:
|
||||
* description: Invalid request.
|
||||
* 500:
|
||||
* description: Failed to clear history.
|
||||
*/
|
||||
router.delete(
|
||||
"/command_history/:hostId",
|
||||
authenticateJWT,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
632
src/backend/ssh/docker-console.ts
Normal file
632
src/backend/ssh/docker-console.ts
Normal file
@@ -0,0 +1,632 @@
|
||||
import { Client as SSHClient } from "ssh2";
|
||||
import { WebSocketServer, WebSocket } from "ws";
|
||||
import { parse as parseUrl } from "url";
|
||||
import { AuthManager } from "../utils/auth-manager.js";
|
||||
import { sshData, sshCredentials } from "../database/db/schema.js";
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import { getDb } from "../database/db/index.js";
|
||||
import { SimpleDBOps } from "../utils/simple-db-ops.js";
|
||||
import { systemLogger } from "../utils/logger.js";
|
||||
import type { SSHHost } from "../../types/index.js";
|
||||
|
||||
const dockerConsoleLogger = systemLogger;
|
||||
|
||||
interface SSHSession {
|
||||
client: SSHClient;
|
||||
stream: any;
|
||||
isConnected: boolean;
|
||||
containerId?: string;
|
||||
shell?: string;
|
||||
}
|
||||
|
||||
const activeSessions = new Map<string, SSHSession>();
|
||||
|
||||
const wss = new WebSocketServer({
|
||||
host: "0.0.0.0",
|
||||
port: 30008,
|
||||
verifyClient: async (info) => {
|
||||
try {
|
||||
const url = parseUrl(info.req.url || "", true);
|
||||
const token = url.query.token as string;
|
||||
|
||||
if (!token) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const authManager = AuthManager.getInstance();
|
||||
const decoded = await authManager.verifyJWTToken(token);
|
||||
|
||||
if (!decoded || !decoded.userId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
async function detectShell(
|
||||
session: SSHSession,
|
||||
containerId: string,
|
||||
): Promise<string> {
|
||||
const shells = ["bash", "sh", "ash"];
|
||||
|
||||
for (const shell of shells) {
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
session.client.exec(
|
||||
`docker exec ${containerId} which ${shell}`,
|
||||
(err, stream) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
let output = "";
|
||||
stream.on("data", (data: Buffer) => {
|
||||
output += data.toString();
|
||||
});
|
||||
|
||||
stream.on("close", (code: number) => {
|
||||
if (code === 0 && output.trim()) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error(`Shell ${shell} not found`));
|
||||
}
|
||||
});
|
||||
|
||||
stream.stderr.on("data", () => {
|
||||
// Ignore stderr
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
return shell;
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return "sh";
|
||||
}
|
||||
|
||||
async function createJumpHostChain(
|
||||
jumpHosts: any[],
|
||||
userId: string,
|
||||
): Promise<SSHClient | null> {
|
||||
if (!jumpHosts || jumpHosts.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let currentClient: SSHClient | null = null;
|
||||
|
||||
for (let i = 0; i < jumpHosts.length; i++) {
|
||||
const jumpHostId = jumpHosts[i].hostId;
|
||||
|
||||
const jumpHostData = await SimpleDBOps.select(
|
||||
getDb()
|
||||
.select()
|
||||
.from(sshData)
|
||||
.where(and(eq(sshData.id, jumpHostId), eq(sshData.userId, userId))),
|
||||
"ssh_data",
|
||||
userId,
|
||||
);
|
||||
|
||||
if (jumpHostData.length === 0) {
|
||||
throw new Error(`Jump host ${jumpHostId} not found`);
|
||||
}
|
||||
|
||||
const jumpHost = jumpHostData[0] as unknown as SSHHost;
|
||||
if (typeof jumpHost.jumpHosts === "string" && jumpHost.jumpHosts) {
|
||||
try {
|
||||
jumpHost.jumpHosts = JSON.parse(jumpHost.jumpHosts);
|
||||
} catch (e) {
|
||||
dockerConsoleLogger.error("Failed to parse jump hosts", e, {
|
||||
hostId: jumpHost.id,
|
||||
});
|
||||
jumpHost.jumpHosts = [];
|
||||
}
|
||||
}
|
||||
|
||||
let resolvedCredentials: any = {
|
||||
password: jumpHost.password,
|
||||
sshKey: jumpHost.key,
|
||||
keyPassword: jumpHost.keyPassword,
|
||||
authType: jumpHost.authType,
|
||||
};
|
||||
|
||||
if (jumpHost.credentialId) {
|
||||
const credentials = await SimpleDBOps.select(
|
||||
getDb()
|
||||
.select()
|
||||
.from(sshCredentials)
|
||||
.where(
|
||||
and(
|
||||
eq(sshCredentials.id, jumpHost.credentialId as number),
|
||||
eq(sshCredentials.userId, userId),
|
||||
),
|
||||
),
|
||||
"ssh_credentials",
|
||||
userId,
|
||||
);
|
||||
|
||||
if (credentials.length > 0) {
|
||||
const credential = credentials[0];
|
||||
resolvedCredentials = {
|
||||
password: credential.password,
|
||||
sshKey:
|
||||
credential.private_key || credential.privateKey || credential.key,
|
||||
keyPassword: credential.key_password || credential.keyPassword,
|
||||
authType: credential.auth_type || credential.authType,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const client = new SSHClient();
|
||||
|
||||
const config: any = {
|
||||
host: jumpHost.ip,
|
||||
port: jumpHost.port || 22,
|
||||
username: jumpHost.username,
|
||||
tryKeyboard: true,
|
||||
readyTimeout: 60000,
|
||||
keepaliveInterval: 30000,
|
||||
keepaliveCountMax: 120,
|
||||
tcpKeepAlive: true,
|
||||
tcpKeepAliveInitialDelay: 30000,
|
||||
};
|
||||
|
||||
if (
|
||||
resolvedCredentials.authType === "password" &&
|
||||
resolvedCredentials.password
|
||||
) {
|
||||
config.password = resolvedCredentials.password;
|
||||
} else if (
|
||||
resolvedCredentials.authType === "key" &&
|
||||
resolvedCredentials.sshKey
|
||||
) {
|
||||
const cleanKey = resolvedCredentials.sshKey
|
||||
.trim()
|
||||
.replace(/\r\n/g, "\n")
|
||||
.replace(/\r/g, "\n");
|
||||
config.privateKey = Buffer.from(cleanKey, "utf8");
|
||||
if (resolvedCredentials.keyPassword) {
|
||||
config.passphrase = resolvedCredentials.keyPassword;
|
||||
}
|
||||
}
|
||||
|
||||
if (currentClient) {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
currentClient!.forwardOut(
|
||||
"127.0.0.1",
|
||||
0,
|
||||
jumpHost.ip,
|
||||
jumpHost.port || 22,
|
||||
(err, stream) => {
|
||||
if (err) return reject(err);
|
||||
config.sock = stream;
|
||||
resolve();
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
client.on("ready", () => resolve());
|
||||
client.on("error", reject);
|
||||
client.connect(config);
|
||||
});
|
||||
|
||||
currentClient = client;
|
||||
}
|
||||
|
||||
return currentClient;
|
||||
}
|
||||
|
||||
wss.on("connection", async (ws: WebSocket, req) => {
|
||||
const userId = (req as any).userId;
|
||||
const sessionId = `docker-console-${Date.now()}-${Math.random()}`;
|
||||
|
||||
let sshSession: SSHSession | null = null;
|
||||
|
||||
ws.on("message", async (data) => {
|
||||
try {
|
||||
const message = JSON.parse(data.toString());
|
||||
|
||||
switch (message.type) {
|
||||
case "connect": {
|
||||
const { hostConfig, containerId, shell, cols, rows } =
|
||||
message.data as {
|
||||
hostConfig: SSHHost;
|
||||
containerId: string;
|
||||
shell?: string;
|
||||
cols?: number;
|
||||
rows?: number;
|
||||
};
|
||||
|
||||
if (
|
||||
typeof hostConfig.jumpHosts === "string" &&
|
||||
hostConfig.jumpHosts
|
||||
) {
|
||||
try {
|
||||
hostConfig.jumpHosts = JSON.parse(hostConfig.jumpHosts);
|
||||
} catch (e) {
|
||||
dockerConsoleLogger.error("Failed to parse jump hosts", e, {
|
||||
hostId: hostConfig.id,
|
||||
});
|
||||
hostConfig.jumpHosts = [];
|
||||
}
|
||||
}
|
||||
|
||||
if (!hostConfig || !containerId) {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "error",
|
||||
message: "Host configuration and container ID are required",
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!hostConfig.enableDocker) {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "error",
|
||||
message:
|
||||
"Docker is not enabled for this host. Enable it in Host Settings.",
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
let resolvedCredentials: any = {
|
||||
password: hostConfig.password,
|
||||
sshKey: hostConfig.key,
|
||||
keyPassword: hostConfig.keyPassword,
|
||||
authType: hostConfig.authType,
|
||||
};
|
||||
|
||||
if (hostConfig.credentialId) {
|
||||
const credentials = await SimpleDBOps.select(
|
||||
getDb()
|
||||
.select()
|
||||
.from(sshCredentials)
|
||||
.where(
|
||||
and(
|
||||
eq(sshCredentials.id, hostConfig.credentialId as number),
|
||||
eq(sshCredentials.userId, userId),
|
||||
),
|
||||
),
|
||||
"ssh_credentials",
|
||||
userId,
|
||||
);
|
||||
|
||||
if (credentials.length > 0) {
|
||||
const credential = credentials[0];
|
||||
resolvedCredentials = {
|
||||
password: credential.password,
|
||||
sshKey:
|
||||
credential.private_key ||
|
||||
credential.privateKey ||
|
||||
credential.key,
|
||||
keyPassword:
|
||||
credential.key_password || credential.keyPassword,
|
||||
authType: credential.auth_type || credential.authType,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const client = new SSHClient();
|
||||
|
||||
const config: any = {
|
||||
host: hostConfig.ip,
|
||||
port: hostConfig.port || 22,
|
||||
username: hostConfig.username,
|
||||
tryKeyboard: true,
|
||||
readyTimeout: 60000,
|
||||
keepaliveInterval: 30000,
|
||||
keepaliveCountMax: 120,
|
||||
tcpKeepAlive: true,
|
||||
tcpKeepAliveInitialDelay: 30000,
|
||||
};
|
||||
|
||||
if (
|
||||
resolvedCredentials.authType === "password" &&
|
||||
resolvedCredentials.password
|
||||
) {
|
||||
config.password = resolvedCredentials.password;
|
||||
} else if (
|
||||
resolvedCredentials.authType === "key" &&
|
||||
resolvedCredentials.sshKey
|
||||
) {
|
||||
const cleanKey = resolvedCredentials.sshKey
|
||||
.trim()
|
||||
.replace(/\r\n/g, "\n")
|
||||
.replace(/\r/g, "\n");
|
||||
config.privateKey = Buffer.from(cleanKey, "utf8");
|
||||
if (resolvedCredentials.keyPassword) {
|
||||
config.passphrase = resolvedCredentials.keyPassword;
|
||||
}
|
||||
}
|
||||
|
||||
if (hostConfig.jumpHosts && hostConfig.jumpHosts.length > 0) {
|
||||
const jumpClient = await createJumpHostChain(
|
||||
hostConfig.jumpHosts,
|
||||
userId,
|
||||
);
|
||||
if (jumpClient) {
|
||||
const stream = await new Promise<any>((resolve, reject) => {
|
||||
jumpClient.forwardOut(
|
||||
"127.0.0.1",
|
||||
0,
|
||||
hostConfig.ip,
|
||||
hostConfig.port || 22,
|
||||
(err, stream) => {
|
||||
if (err) return reject(err);
|
||||
resolve(stream);
|
||||
},
|
||||
);
|
||||
});
|
||||
config.sock = stream;
|
||||
}
|
||||
}
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
client.on("ready", () => resolve());
|
||||
client.on("error", reject);
|
||||
client.connect(config);
|
||||
});
|
||||
|
||||
sshSession = {
|
||||
client,
|
||||
stream: null,
|
||||
isConnected: true,
|
||||
containerId,
|
||||
};
|
||||
|
||||
activeSessions.set(sessionId, sshSession);
|
||||
|
||||
let shellToUse = shell || "bash";
|
||||
|
||||
if (shell) {
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
client.exec(
|
||||
`docker exec ${containerId} which ${shell}`,
|
||||
(err, stream) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
let output = "";
|
||||
stream.on("data", (data: Buffer) => {
|
||||
output += data.toString();
|
||||
});
|
||||
|
||||
stream.on("close", (code: number) => {
|
||||
if (code === 0 && output.trim()) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error(`Shell ${shell} not available`));
|
||||
}
|
||||
});
|
||||
|
||||
stream.stderr.on("data", () => {
|
||||
// Ignore stderr
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
} catch {
|
||||
dockerConsoleLogger.warn(
|
||||
`Requested shell ${shell} not found, detecting available shell`,
|
||||
{
|
||||
operation: "shell_validation",
|
||||
sessionId,
|
||||
containerId,
|
||||
requestedShell: shell,
|
||||
},
|
||||
);
|
||||
shellToUse = await detectShell(sshSession, containerId);
|
||||
}
|
||||
} else {
|
||||
shellToUse = await detectShell(sshSession, containerId);
|
||||
}
|
||||
|
||||
sshSession.shell = shellToUse;
|
||||
|
||||
const execCommand = `docker exec -it ${containerId} /bin/${shellToUse}`;
|
||||
|
||||
client.exec(
|
||||
execCommand,
|
||||
{
|
||||
pty: {
|
||||
term: "xterm-256color",
|
||||
cols: cols || 80,
|
||||
rows: rows || 24,
|
||||
},
|
||||
},
|
||||
(err, stream) => {
|
||||
if (err) {
|
||||
dockerConsoleLogger.error(
|
||||
"Failed to create docker exec",
|
||||
err,
|
||||
{
|
||||
operation: "docker_exec",
|
||||
sessionId,
|
||||
containerId,
|
||||
},
|
||||
);
|
||||
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "error",
|
||||
message: `Failed to start console: ${err.message}`,
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
sshSession!.stream = stream;
|
||||
|
||||
stream.on("data", (data: Buffer) => {
|
||||
if (ws.readyState === WebSocket.OPEN) {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "output",
|
||||
data: data.toString("utf8"),
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
stream.stderr.on("data", (data: Buffer) => {});
|
||||
|
||||
stream.on("close", () => {
|
||||
if (ws.readyState === WebSocket.OPEN) {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "disconnected",
|
||||
message: "Console session ended",
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (sshSession) {
|
||||
sshSession.client.end();
|
||||
activeSessions.delete(sessionId);
|
||||
}
|
||||
});
|
||||
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "connected",
|
||||
data: {
|
||||
shell: shellToUse,
|
||||
requestedShell: shell,
|
||||
shellChanged: shell && shell !== shellToUse,
|
||||
},
|
||||
}),
|
||||
);
|
||||
},
|
||||
);
|
||||
} catch (error) {
|
||||
dockerConsoleLogger.error("Failed to connect to container", error, {
|
||||
operation: "console_connect",
|
||||
sessionId,
|
||||
containerId: message.data.containerId,
|
||||
});
|
||||
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "error",
|
||||
message:
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: "Failed to connect to container",
|
||||
}),
|
||||
);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case "input": {
|
||||
if (sshSession && sshSession.stream) {
|
||||
sshSession.stream.write(message.data);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case "resize": {
|
||||
if (sshSession && sshSession.stream) {
|
||||
const { cols, rows } = message.data;
|
||||
sshSession.stream.setWindow(rows, cols);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case "disconnect": {
|
||||
if (sshSession) {
|
||||
if (sshSession.stream) {
|
||||
sshSession.stream.end();
|
||||
}
|
||||
sshSession.client.end();
|
||||
activeSessions.delete(sessionId);
|
||||
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "disconnected",
|
||||
message: "Disconnected from container",
|
||||
}),
|
||||
);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case "ping": {
|
||||
if (ws.readyState === WebSocket.OPEN) {
|
||||
ws.send(JSON.stringify({ type: "pong" }));
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
dockerConsoleLogger.warn("Unknown message type", {
|
||||
operation: "ws_message",
|
||||
type: message.type,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
dockerConsoleLogger.error("WebSocket message error", error, {
|
||||
operation: "ws_message",
|
||||
sessionId,
|
||||
});
|
||||
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "error",
|
||||
message: error instanceof Error ? error.message : "An error occurred",
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
ws.on("close", () => {
|
||||
if (sshSession) {
|
||||
if (sshSession.stream) {
|
||||
sshSession.stream.end();
|
||||
}
|
||||
sshSession.client.end();
|
||||
activeSessions.delete(sessionId);
|
||||
}
|
||||
});
|
||||
|
||||
ws.on("error", (error) => {
|
||||
dockerConsoleLogger.error("WebSocket error", error, {
|
||||
operation: "ws_error",
|
||||
sessionId,
|
||||
});
|
||||
|
||||
if (sshSession) {
|
||||
if (sshSession.stream) {
|
||||
sshSession.stream.end();
|
||||
}
|
||||
sshSession.client.end();
|
||||
activeSessions.delete(sessionId);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
process.on("SIGTERM", () => {
|
||||
activeSessions.forEach((session, sessionId) => {
|
||||
if (session.stream) {
|
||||
session.stream.end();
|
||||
}
|
||||
session.client.end();
|
||||
});
|
||||
|
||||
activeSessions.clear();
|
||||
|
||||
wss.close(() => {
|
||||
process.exit(0);
|
||||
});
|
||||
});
|
||||
2340
src/backend/ssh/docker.ts
Normal file
2340
src/backend/ssh/docker.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -14,6 +14,7 @@ import { sshLogger } from "../utils/logger.js";
|
||||
import { SimpleDBOps } from "../utils/simple-db-ops.js";
|
||||
import { AuthManager } from "../utils/auth-manager.js";
|
||||
import { UserCrypto } from "../utils/user-crypto.js";
|
||||
import { createSocks5Connection } from "../utils/socks5-helper.js";
|
||||
|
||||
interface ConnectToHostData {
|
||||
cols: number;
|
||||
@@ -32,6 +33,12 @@ interface ConnectToHostData {
|
||||
userId?: string;
|
||||
forceKeyboardInteractive?: boolean;
|
||||
jumpHosts?: Array<{ hostId: number }>;
|
||||
useSocks5?: boolean;
|
||||
socks5Host?: string;
|
||||
socks5Port?: number;
|
||||
socks5Username?: string;
|
||||
socks5Password?: string;
|
||||
socks5ProxyChain?: unknown;
|
||||
};
|
||||
initialPath?: string;
|
||||
executeCommand?: string;
|
||||
@@ -130,10 +137,12 @@ async function createJumpHostChain(
|
||||
const clients: Client[] = [];
|
||||
|
||||
try {
|
||||
for (let i = 0; i < jumpHosts.length; i++) {
|
||||
const jumpHostConfig = await resolveJumpHost(jumpHosts[i].hostId, userId);
|
||||
const jumpHostConfigs = await Promise.all(
|
||||
jumpHosts.map((jh) => resolveJumpHost(jh.hostId, userId)),
|
||||
);
|
||||
|
||||
if (!jumpHostConfig) {
|
||||
for (let i = 0; i < jumpHostConfigs.length; i++) {
|
||||
if (!jumpHostConfigs[i]) {
|
||||
sshLogger.error(`Jump host ${i + 1} not found`, undefined, {
|
||||
operation: "jump_host_chain",
|
||||
hostId: jumpHosts[i].hostId,
|
||||
@@ -141,6 +150,10 @@ async function createJumpHostChain(
|
||||
clients.forEach((c) => c.end());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < jumpHostConfigs.length; i++) {
|
||||
const jumpHostConfig = jumpHostConfigs[i];
|
||||
|
||||
const jumpClient = new Client();
|
||||
clients.push(jumpClient);
|
||||
@@ -316,9 +329,10 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
|
||||
let sshConn: Client | null = null;
|
||||
let sshStream: ClientChannel | null = null;
|
||||
let pingInterval: NodeJS.Timeout | null = null;
|
||||
let keyboardInteractiveFinish: ((responses: string[]) => void) | null = null;
|
||||
let totpPromptSent = false;
|
||||
let totpAttempts = 0;
|
||||
let totpTimeout: NodeJS.Timeout | null = null;
|
||||
let isKeyboardInteractive = false;
|
||||
let keyboardInteractiveResponded = false;
|
||||
let isConnecting = false;
|
||||
@@ -435,9 +449,15 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
case "totp_response": {
|
||||
const totpData = data as TOTPResponseData;
|
||||
if (keyboardInteractiveFinish && totpData?.code) {
|
||||
if (totpTimeout) {
|
||||
clearTimeout(totpTimeout);
|
||||
totpTimeout = null;
|
||||
}
|
||||
const totpCode = totpData.code;
|
||||
totpAttempts++;
|
||||
keyboardInteractiveFinish([totpCode]);
|
||||
keyboardInteractiveFinish = null;
|
||||
totpPromptSent = false;
|
||||
} else {
|
||||
sshLogger.warn("TOTP response received but no callback available", {
|
||||
operation: "totp_response_error",
|
||||
@@ -458,6 +478,10 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
case "password_response": {
|
||||
const passwordData = data as TOTPResponseData;
|
||||
if (keyboardInteractiveFinish && passwordData?.code) {
|
||||
if (totpTimeout) {
|
||||
clearTimeout(totpTimeout);
|
||||
totpTimeout = null;
|
||||
}
|
||||
const password = passwordData.code;
|
||||
keyboardInteractiveFinish([password]);
|
||||
keyboardInteractiveFinish = null;
|
||||
@@ -597,6 +621,13 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
isConnecting,
|
||||
isConnected,
|
||||
});
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "error",
|
||||
message: "Connection already in progress",
|
||||
code: "DUPLICATE_CONNECTION",
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -730,6 +761,36 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
return;
|
||||
}
|
||||
|
||||
sshLogger.info("Creating shell", {
|
||||
operation: "ssh_shell_start",
|
||||
hostId: id,
|
||||
ip,
|
||||
port,
|
||||
username,
|
||||
});
|
||||
|
||||
let shellCallbackReceived = false;
|
||||
const shellTimeout = setTimeout(() => {
|
||||
if (!shellCallbackReceived && isShellInitializing) {
|
||||
sshLogger.error("Shell creation timeout - no response from server", {
|
||||
operation: "ssh_shell_timeout",
|
||||
hostId: id,
|
||||
ip,
|
||||
port,
|
||||
username,
|
||||
});
|
||||
isShellInitializing = false;
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "error",
|
||||
message:
|
||||
"Shell creation timeout. The server may not support interactive shells or the connection was interrupted.",
|
||||
}),
|
||||
);
|
||||
cleanupSSH(connectionTimeout);
|
||||
}
|
||||
}, 15000);
|
||||
|
||||
conn.shell(
|
||||
{
|
||||
rows: data.rows,
|
||||
@@ -737,6 +798,8 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
term: "xterm-256color",
|
||||
} as PseudoTtyOptions,
|
||||
(err, stream) => {
|
||||
shellCallbackReceived = true;
|
||||
clearTimeout(shellTimeout);
|
||||
isShellInitializing = false;
|
||||
|
||||
if (err) {
|
||||
@@ -753,6 +816,7 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
message: "Shell error: " + err.message,
|
||||
}),
|
||||
);
|
||||
cleanupSSH(connectionTimeout);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -802,8 +866,6 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
);
|
||||
});
|
||||
|
||||
setupPingInterval();
|
||||
|
||||
if (initialPath && initialPath.trim() !== "") {
|
||||
const cdCommand = `cd "${initialPath.replace(/"/g, '\\"')}" && pwd\n`;
|
||||
stream.write(cdCommand);
|
||||
@@ -940,6 +1002,31 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
|
||||
sshConn.on("close", () => {
|
||||
clearTimeout(connectionTimeout);
|
||||
if (isShellInitializing || (isConnected && !sshStream)) {
|
||||
sshLogger.warn("SSH connection closed during shell initialization", {
|
||||
operation: "ssh_close_during_init",
|
||||
hostId: id,
|
||||
ip,
|
||||
port,
|
||||
username,
|
||||
isShellInitializing,
|
||||
hasStream: !!sshStream,
|
||||
});
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "error",
|
||||
message:
|
||||
"Connection closed during shell initialization. The server may have rejected the shell request.",
|
||||
}),
|
||||
);
|
||||
} else if (!sshStream) {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "disconnected",
|
||||
message: "Connection closed",
|
||||
}),
|
||||
);
|
||||
}
|
||||
cleanupSSH(connectionTimeout);
|
||||
});
|
||||
|
||||
@@ -987,6 +1074,25 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
|
||||
finish(responses);
|
||||
};
|
||||
|
||||
totpTimeout = setTimeout(() => {
|
||||
if (keyboardInteractiveFinish) {
|
||||
keyboardInteractiveFinish = null;
|
||||
totpPromptSent = false;
|
||||
sshLogger.warn("TOTP prompt timeout", {
|
||||
operation: "totp_timeout",
|
||||
hostId: id,
|
||||
});
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "error",
|
||||
message: "TOTP verification timeout. Please reconnect.",
|
||||
}),
|
||||
);
|
||||
cleanupSSH(connectionTimeout);
|
||||
}
|
||||
}, 180000);
|
||||
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "totp_required",
|
||||
@@ -1021,6 +1127,24 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
finish(responses);
|
||||
};
|
||||
|
||||
totpTimeout = setTimeout(() => {
|
||||
if (keyboardInteractiveFinish) {
|
||||
keyboardInteractiveFinish = null;
|
||||
keyboardInteractiveResponded = false;
|
||||
sshLogger.warn("Password prompt timeout", {
|
||||
operation: "password_timeout",
|
||||
hostId: id,
|
||||
});
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "error",
|
||||
message: "Password verification timeout. Please reconnect.",
|
||||
}),
|
||||
);
|
||||
cleanupSSH(connectionTimeout);
|
||||
}
|
||||
}, 180000);
|
||||
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "password_required",
|
||||
@@ -1128,9 +1252,7 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!hostConfig.forceKeyboardInteractive) {
|
||||
connectConfig.password = resolvedCredentials.password;
|
||||
}
|
||||
connectConfig.password = resolvedCredentials.password;
|
||||
} else if (
|
||||
resolvedCredentials.authType === "key" &&
|
||||
resolvedCredentials.key
|
||||
@@ -1183,6 +1305,49 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
hostConfig.useSocks5 &&
|
||||
(hostConfig.socks5Host ||
|
||||
(hostConfig.socks5ProxyChain &&
|
||||
(hostConfig.socks5ProxyChain as any).length > 0))
|
||||
) {
|
||||
try {
|
||||
const socks5Socket = await createSocks5Connection(ip, port, {
|
||||
useSocks5: hostConfig.useSocks5,
|
||||
socks5Host: hostConfig.socks5Host,
|
||||
socks5Port: hostConfig.socks5Port,
|
||||
socks5Username: hostConfig.socks5Username,
|
||||
socks5Password: hostConfig.socks5Password,
|
||||
socks5ProxyChain: hostConfig.socks5ProxyChain as any,
|
||||
});
|
||||
|
||||
if (socks5Socket) {
|
||||
connectConfig.sock = socks5Socket;
|
||||
sshConn.connect(connectConfig);
|
||||
return;
|
||||
}
|
||||
} catch (socks5Error) {
|
||||
sshLogger.error("SOCKS5 connection failed", socks5Error, {
|
||||
operation: "socks5_connect",
|
||||
hostId: id,
|
||||
proxyHost: hostConfig.socks5Host,
|
||||
proxyPort: hostConfig.socks5Port || 1080,
|
||||
});
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "error",
|
||||
message:
|
||||
"SOCKS5 proxy connection failed: " +
|
||||
(socks5Error instanceof Error
|
||||
? socks5Error.message
|
||||
: "Unknown error"),
|
||||
}),
|
||||
);
|
||||
cleanupSSH(connectionTimeout);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
hostConfig.jumpHosts &&
|
||||
hostConfig.jumpHosts.length > 0 &&
|
||||
@@ -1279,9 +1444,9 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
|
||||
if (pingInterval) {
|
||||
clearInterval(pingInterval);
|
||||
pingInterval = null;
|
||||
if (totpTimeout) {
|
||||
clearTimeout(totpTimeout);
|
||||
totpTimeout = null;
|
||||
}
|
||||
|
||||
if (sshStream) {
|
||||
@@ -1309,35 +1474,21 @@ wss.on("connection", async (ws: WebSocket, req) => {
|
||||
}
|
||||
|
||||
totpPromptSent = false;
|
||||
totpAttempts = 0;
|
||||
isKeyboardInteractive = false;
|
||||
keyboardInteractiveResponded = false;
|
||||
keyboardInteractiveFinish = null;
|
||||
isConnecting = false;
|
||||
isConnected = false;
|
||||
|
||||
setTimeout(() => {
|
||||
isCleaningUp = false;
|
||||
}, 100);
|
||||
isCleaningUp = false;
|
||||
}
|
||||
|
||||
function setupPingInterval() {
|
||||
pingInterval = setInterval(() => {
|
||||
if (sshConn && sshStream) {
|
||||
try {
|
||||
sshStream.write("\x00");
|
||||
} catch (e: unknown) {
|
||||
sshLogger.error(
|
||||
"SSH keepalive failed: " +
|
||||
(e instanceof Error ? e.message : "Unknown error"),
|
||||
);
|
||||
cleanupSSH();
|
||||
}
|
||||
} else if (!sshConn || !sshStream) {
|
||||
if (pingInterval) {
|
||||
clearInterval(pingInterval);
|
||||
pingInterval = null;
|
||||
}
|
||||
}
|
||||
}, 30000);
|
||||
}
|
||||
// Note: PTY-level keepalive (writing \x00 to the stream) was removed.
|
||||
// It was causing ^@ characters to appear in terminals with echoctl enabled.
|
||||
// SSH-level keepalive is configured via connectConfig (keepaliveInterval,
|
||||
// keepaliveCountMax, tcpKeepAlive), which handles connection health monitoring
|
||||
// without producing visible output on the terminal.
|
||||
//
|
||||
// See: https://github.com/Termix-SSH/Support/issues/232
|
||||
// See: https://github.com/Termix-SSH/Support/issues/309
|
||||
});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import express from "express";
|
||||
import express, { type Response } from "express";
|
||||
import cors from "cors";
|
||||
import cookieParser from "cookie-parser";
|
||||
import { Client } from "ssh2";
|
||||
@@ -13,12 +13,16 @@ import type {
|
||||
TunnelStatus,
|
||||
VerificationData,
|
||||
ErrorType,
|
||||
AuthenticatedRequest,
|
||||
} from "../../types/index.js";
|
||||
import { CONNECTION_STATES } from "../../types/index.js";
|
||||
import { tunnelLogger, sshLogger } from "../utils/logger.js";
|
||||
import { SystemCrypto } from "../utils/system-crypto.js";
|
||||
import { SimpleDBOps } from "../utils/simple-db-ops.js";
|
||||
import { DataCrypto } from "../utils/data-crypto.js";
|
||||
import { createSocks5Connection } from "../utils/socks5-helper.js";
|
||||
import { AuthManager } from "../utils/auth-manager.js";
|
||||
import { PermissionManager } from "../utils/permission-manager.js";
|
||||
|
||||
const app = express();
|
||||
app.use(
|
||||
@@ -63,6 +67,10 @@ app.use(
|
||||
app.use(cookieParser());
|
||||
app.use(express.json());
|
||||
|
||||
const authManager = AuthManager.getInstance();
|
||||
const permissionManager = PermissionManager.getInstance();
|
||||
const authenticateJWT = authManager.createAuthMiddleware();
|
||||
|
||||
const activeTunnels = new Map<string, Client>();
|
||||
const retryCounters = new Map<string, number>();
|
||||
const connectionStatus = new Map<string, TunnelStatus>();
|
||||
@@ -77,6 +85,7 @@ const tunnelConnecting = new Set<string>();
|
||||
|
||||
const tunnelConfigs = new Map<string, TunnelConfig>();
|
||||
const activeTunnelProcesses = new Map<string, ChildProcess>();
|
||||
const pendingTunnelOperations = new Map<string, Promise<void>>();
|
||||
|
||||
function broadcastTunnelStatus(tunnelName: string, status: TunnelStatus): void {
|
||||
if (
|
||||
@@ -154,10 +163,75 @@ function getTunnelMarker(tunnelName: string) {
|
||||
return `TUNNEL_MARKER_${tunnelName.replace(/[^a-zA-Z0-9]/g, "_")}`;
|
||||
}
|
||||
|
||||
function cleanupTunnelResources(
|
||||
function normalizeTunnelName(
|
||||
hostId: number,
|
||||
tunnelIndex: number,
|
||||
displayName: string,
|
||||
sourcePort: number,
|
||||
endpointHost: string,
|
||||
endpointPort: number,
|
||||
): string {
|
||||
return `${hostId}::${tunnelIndex}::${displayName}::${sourcePort}::${endpointHost}::${endpointPort}`;
|
||||
}
|
||||
|
||||
function parseTunnelName(tunnelName: string): {
|
||||
hostId?: number;
|
||||
tunnelIndex?: number;
|
||||
displayName: string;
|
||||
sourcePort: string;
|
||||
endpointHost: string;
|
||||
endpointPort: string;
|
||||
isLegacyFormat: boolean;
|
||||
} {
|
||||
const parts = tunnelName.split("::");
|
||||
|
||||
if (parts.length === 6) {
|
||||
return {
|
||||
hostId: parseInt(parts[0]),
|
||||
tunnelIndex: parseInt(parts[1]),
|
||||
displayName: parts[2],
|
||||
sourcePort: parts[3],
|
||||
endpointHost: parts[4],
|
||||
endpointPort: parts[5],
|
||||
isLegacyFormat: false,
|
||||
};
|
||||
}
|
||||
|
||||
tunnelLogger.warn(`Legacy tunnel name format: ${tunnelName}`);
|
||||
|
||||
const legacyParts = tunnelName.split("_");
|
||||
return {
|
||||
displayName: legacyParts[0] || "unknown",
|
||||
sourcePort: legacyParts[legacyParts.length - 3] || "0",
|
||||
endpointHost: legacyParts[legacyParts.length - 2] || "unknown",
|
||||
endpointPort: legacyParts[legacyParts.length - 1] || "0",
|
||||
isLegacyFormat: true,
|
||||
};
|
||||
}
|
||||
|
||||
function validateTunnelConfig(
|
||||
tunnelName: string,
|
||||
tunnelConfig: TunnelConfig,
|
||||
): boolean {
|
||||
const parsed = parseTunnelName(tunnelName);
|
||||
|
||||
if (parsed.isLegacyFormat) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return (
|
||||
parsed.hostId === tunnelConfig.sourceHostId &&
|
||||
parsed.tunnelIndex === tunnelConfig.tunnelIndex &&
|
||||
String(parsed.sourcePort) === String(tunnelConfig.sourcePort) &&
|
||||
parsed.endpointHost === tunnelConfig.endpointHost &&
|
||||
String(parsed.endpointPort) === String(tunnelConfig.endpointPort)
|
||||
);
|
||||
}
|
||||
|
||||
async function cleanupTunnelResources(
|
||||
tunnelName: string,
|
||||
forceCleanup = false,
|
||||
): void {
|
||||
): Promise<void> {
|
||||
if (cleanupInProgress.has(tunnelName)) {
|
||||
return;
|
||||
}
|
||||
@@ -170,13 +244,16 @@ function cleanupTunnelResources(
|
||||
|
||||
const tunnelConfig = tunnelConfigs.get(tunnelName);
|
||||
if (tunnelConfig) {
|
||||
killRemoteTunnelByMarker(tunnelConfig, tunnelName, (err) => {
|
||||
cleanupInProgress.delete(tunnelName);
|
||||
if (err) {
|
||||
tunnelLogger.error(
|
||||
`Failed to kill remote tunnel for '${tunnelName}': ${err.message}`,
|
||||
);
|
||||
}
|
||||
await new Promise<void>((resolve) => {
|
||||
killRemoteTunnelByMarker(tunnelConfig, tunnelName, (err) => {
|
||||
cleanupInProgress.delete(tunnelName);
|
||||
if (err) {
|
||||
tunnelLogger.error(
|
||||
`Failed to kill remote tunnel for '${tunnelName}': ${err.message}`,
|
||||
);
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
} else {
|
||||
cleanupInProgress.delete(tunnelName);
|
||||
@@ -272,11 +349,11 @@ function resetRetryState(tunnelName: string): void {
|
||||
});
|
||||
}
|
||||
|
||||
function handleDisconnect(
|
||||
async function handleDisconnect(
|
||||
tunnelName: string,
|
||||
tunnelConfig: TunnelConfig | null,
|
||||
shouldRetry = true,
|
||||
): void {
|
||||
): Promise<void> {
|
||||
if (tunnelVerifications.has(tunnelName)) {
|
||||
try {
|
||||
const verification = tunnelVerifications.get(tunnelName);
|
||||
@@ -286,7 +363,11 @@ function handleDisconnect(
|
||||
tunnelVerifications.delete(tunnelName);
|
||||
}
|
||||
|
||||
cleanupTunnelResources(tunnelName);
|
||||
while (cleanupInProgress.has(tunnelName)) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
}
|
||||
|
||||
await cleanupTunnelResources(tunnelName);
|
||||
|
||||
if (manualDisconnects.has(tunnelName)) {
|
||||
resetRetryState(tunnelName);
|
||||
@@ -490,43 +571,76 @@ async function connectSSHTunnel(
|
||||
authMethod: tunnelConfig.sourceAuthMethod,
|
||||
};
|
||||
|
||||
if (tunnelConfig.sourceCredentialId && tunnelConfig.sourceUserId) {
|
||||
try {
|
||||
const userDataKey = DataCrypto.getUserDataKey(tunnelConfig.sourceUserId);
|
||||
if (userDataKey) {
|
||||
const credentials = await SimpleDBOps.select(
|
||||
getDb()
|
||||
.select()
|
||||
.from(sshCredentials)
|
||||
.where(
|
||||
and(
|
||||
eq(sshCredentials.id, tunnelConfig.sourceCredentialId),
|
||||
eq(sshCredentials.userId, tunnelConfig.sourceUserId),
|
||||
),
|
||||
),
|
||||
"ssh_credentials",
|
||||
tunnelConfig.sourceUserId,
|
||||
);
|
||||
const effectiveUserId =
|
||||
tunnelConfig.requestingUserId || tunnelConfig.sourceUserId;
|
||||
|
||||
if (credentials.length > 0) {
|
||||
const credential = credentials[0];
|
||||
resolvedSourceCredentials = {
|
||||
password: credential.password as string | undefined,
|
||||
sshKey: (credential.private_key ||
|
||||
credential.privateKey ||
|
||||
credential.key) as string | undefined,
|
||||
keyPassword: (credential.key_password || credential.keyPassword) as
|
||||
| string
|
||||
| undefined,
|
||||
keyType: (credential.key_type || credential.keyType) as
|
||||
| string
|
||||
| undefined,
|
||||
authMethod: (credential.auth_type || credential.authType) as string,
|
||||
};
|
||||
if (tunnelConfig.sourceCredentialId && effectiveUserId) {
|
||||
try {
|
||||
if (
|
||||
tunnelConfig.requestingUserId &&
|
||||
tunnelConfig.requestingUserId !== tunnelConfig.sourceUserId
|
||||
) {
|
||||
const { SharedCredentialManager } =
|
||||
await import("../utils/shared-credential-manager.js");
|
||||
const sharedCredManager = SharedCredentialManager.getInstance();
|
||||
|
||||
if (tunnelConfig.sourceHostId) {
|
||||
const sharedCred = await sharedCredManager.getSharedCredentialForUser(
|
||||
tunnelConfig.sourceHostId,
|
||||
tunnelConfig.requestingUserId,
|
||||
);
|
||||
|
||||
if (sharedCred) {
|
||||
resolvedSourceCredentials = {
|
||||
password: sharedCred.password,
|
||||
sshKey: sharedCred.key,
|
||||
keyPassword: sharedCred.keyPassword,
|
||||
keyType: sharedCred.keyType,
|
||||
authMethod: sharedCred.authType,
|
||||
};
|
||||
} else {
|
||||
const errorMessage = `Cannot connect tunnel '${tunnelName}': shared credentials not available`;
|
||||
tunnelLogger.error(errorMessage);
|
||||
broadcastTunnelStatus(tunnelName, {
|
||||
connected: false,
|
||||
status: CONNECTION_STATES.FAILED,
|
||||
reason: errorMessage,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const userDataKey = DataCrypto.getUserDataKey(effectiveUserId);
|
||||
if (userDataKey) {
|
||||
const credentials = await SimpleDBOps.select(
|
||||
getDb()
|
||||
.select()
|
||||
.from(sshCredentials)
|
||||
.where(eq(sshCredentials.id, tunnelConfig.sourceCredentialId)),
|
||||
"ssh_credentials",
|
||||
effectiveUserId,
|
||||
);
|
||||
|
||||
if (credentials.length > 0) {
|
||||
const credential = credentials[0];
|
||||
resolvedSourceCredentials = {
|
||||
password: credential.password as string | undefined,
|
||||
sshKey: (credential.private_key ||
|
||||
credential.privateKey ||
|
||||
credential.key) as string | undefined,
|
||||
keyPassword: (credential.key_password ||
|
||||
credential.keyPassword) as string | undefined,
|
||||
keyType: (credential.key_type || credential.keyType) as
|
||||
| string
|
||||
| undefined,
|
||||
authMethod: (credential.auth_type ||
|
||||
credential.authType) as string,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
tunnelLogger.warn("Failed to resolve source credentials from database", {
|
||||
tunnelLogger.warn("Failed to resolve source credentials", {
|
||||
operation: "tunnel_connect",
|
||||
tunnelName,
|
||||
credentialId: tunnelConfig.sourceCredentialId,
|
||||
@@ -581,12 +695,7 @@ async function connectSSHTunnel(
|
||||
getDb()
|
||||
.select()
|
||||
.from(sshCredentials)
|
||||
.where(
|
||||
and(
|
||||
eq(sshCredentials.id, tunnelConfig.endpointCredentialId),
|
||||
eq(sshCredentials.userId, tunnelConfig.endpointUserId),
|
||||
),
|
||||
),
|
||||
.where(eq(sshCredentials.id, tunnelConfig.endpointCredentialId)),
|
||||
"ssh_credentials",
|
||||
tunnelConfig.endpointUserId,
|
||||
);
|
||||
@@ -719,15 +828,22 @@ async function connectSSHTunnel(
|
||||
return;
|
||||
}
|
||||
|
||||
const tunnelType = tunnelConfig.tunnelType || "remote";
|
||||
const tunnelFlag = tunnelType === "local" ? "-L" : "-R";
|
||||
const portMapping =
|
||||
tunnelType === "local"
|
||||
? `${tunnelConfig.sourcePort}:${tunnelConfig.endpointIP}:${tunnelConfig.endpointPort}`
|
||||
: `${tunnelConfig.endpointPort}:localhost:${tunnelConfig.sourcePort}`;
|
||||
|
||||
let tunnelCmd: string;
|
||||
if (
|
||||
resolvedEndpointCredentials.authMethod === "key" &&
|
||||
resolvedEndpointCredentials.sshKey
|
||||
) {
|
||||
const keyFilePath = `/tmp/tunnel_key_${tunnelName.replace(/[^a-zA-Z0-9]/g, "_")}`;
|
||||
tunnelCmd = `echo '${resolvedEndpointCredentials.sshKey}' > ${keyFilePath} && chmod 600 ${keyFilePath} && exec -a "${tunnelMarker}" ssh -i ${keyFilePath} -N -o StrictHostKeyChecking=no -o ExitOnForwardFailure=yes -o ServerAliveInterval=30 -o ServerAliveCountMax=3 -o GatewayPorts=yes -R ${tunnelConfig.endpointPort}:localhost:${tunnelConfig.sourcePort} ${tunnelConfig.endpointUsername}@${tunnelConfig.endpointIP} && rm -f ${keyFilePath}`;
|
||||
tunnelCmd = `echo '${resolvedEndpointCredentials.sshKey}' > ${keyFilePath} && chmod 600 ${keyFilePath} && exec -a "${tunnelMarker}" ssh -i ${keyFilePath} -N -o StrictHostKeyChecking=no -o ExitOnForwardFailure=yes -o ServerAliveInterval=30 -o ServerAliveCountMax=3 -o GatewayPorts=yes ${tunnelFlag} ${portMapping} ${tunnelConfig.endpointUsername}@${tunnelConfig.endpointIP} && rm -f ${keyFilePath}`;
|
||||
} else {
|
||||
tunnelCmd = `exec -a "${tunnelMarker}" sshpass -p '${resolvedEndpointCredentials.password || ""}' ssh -N -o StrictHostKeyChecking=no -o ExitOnForwardFailure=yes -o ServerAliveInterval=30 -o ServerAliveCountMax=3 -o GatewayPorts=yes -R ${tunnelConfig.endpointPort}:localhost:${tunnelConfig.sourcePort} ${tunnelConfig.endpointUsername}@${tunnelConfig.endpointIP}`;
|
||||
tunnelCmd = `exec -a "${tunnelMarker}" sshpass -p '${resolvedEndpointCredentials.password || ""}' ssh -N -o StrictHostKeyChecking=no -o ExitOnForwardFailure=yes -o ServerAliveInterval=30 -o ServerAliveCountMax=3 -o GatewayPorts=yes ${tunnelFlag} ${portMapping} ${tunnelConfig.endpointUsername}@${tunnelConfig.endpointIP}`;
|
||||
}
|
||||
|
||||
conn.exec(tunnelCmd, (err, stream) => {
|
||||
@@ -1016,6 +1132,51 @@ async function connectSSHTunnel(
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
tunnelConfig.useSocks5 &&
|
||||
(tunnelConfig.socks5Host ||
|
||||
(tunnelConfig.socks5ProxyChain &&
|
||||
tunnelConfig.socks5ProxyChain.length > 0))
|
||||
) {
|
||||
try {
|
||||
const socks5Socket = await createSocks5Connection(
|
||||
tunnelConfig.sourceIP,
|
||||
tunnelConfig.sourceSSHPort,
|
||||
{
|
||||
useSocks5: tunnelConfig.useSocks5,
|
||||
socks5Host: tunnelConfig.socks5Host,
|
||||
socks5Port: tunnelConfig.socks5Port,
|
||||
socks5Username: tunnelConfig.socks5Username,
|
||||
socks5Password: tunnelConfig.socks5Password,
|
||||
socks5ProxyChain: tunnelConfig.socks5ProxyChain,
|
||||
},
|
||||
);
|
||||
|
||||
if (socks5Socket) {
|
||||
connOptions.sock = socks5Socket;
|
||||
conn.connect(connOptions);
|
||||
return;
|
||||
}
|
||||
} catch (socks5Error) {
|
||||
tunnelLogger.error("SOCKS5 connection failed for tunnel", socks5Error, {
|
||||
operation: "socks5_connect",
|
||||
tunnelName,
|
||||
proxyHost: tunnelConfig.socks5Host,
|
||||
proxyPort: tunnelConfig.socks5Port || 1080,
|
||||
});
|
||||
broadcastTunnelStatus(tunnelName, {
|
||||
connected: false,
|
||||
status: CONNECTION_STATES.FAILED,
|
||||
reason:
|
||||
"SOCKS5 proxy connection failed: " +
|
||||
(socks5Error instanceof Error
|
||||
? socks5Error.message
|
||||
: "Unknown error"),
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
conn.connect(connOptions);
|
||||
}
|
||||
|
||||
@@ -1042,12 +1203,7 @@ async function killRemoteTunnelByMarker(
|
||||
getDb()
|
||||
.select()
|
||||
.from(sshCredentials)
|
||||
.where(
|
||||
and(
|
||||
eq(sshCredentials.id, tunnelConfig.sourceCredentialId),
|
||||
eq(sshCredentials.userId, tunnelConfig.sourceUserId),
|
||||
),
|
||||
),
|
||||
.where(eq(sshCredentials.id, tunnelConfig.sourceCredentialId)),
|
||||
"ssh_credentials",
|
||||
tunnelConfig.sourceUserId,
|
||||
);
|
||||
@@ -1153,7 +1309,9 @@ async function killRemoteTunnelByMarker(
|
||||
}
|
||||
|
||||
conn.on("ready", () => {
|
||||
const checkCmd = `ps aux | grep -E '(${tunnelMarker}|ssh.*-R.*${tunnelConfig.endpointPort}:localhost:${tunnelConfig.sourcePort}.*${tunnelConfig.endpointUsername}@${tunnelConfig.endpointIP}|sshpass.*ssh.*-R.*${tunnelConfig.endpointPort})' | grep -v grep`;
|
||||
const tunnelType = tunnelConfig.tunnelType || "remote";
|
||||
const tunnelFlag = tunnelType === "local" ? "-L" : "-R";
|
||||
const checkCmd = `ps aux | grep -E '(${tunnelMarker}|ssh.*${tunnelFlag}.*${tunnelConfig.endpointPort}:.*:${tunnelConfig.sourcePort}.*${tunnelConfig.endpointUsername}@${tunnelConfig.endpointIP}|sshpass.*ssh.*${tunnelFlag})' | grep -v grep`;
|
||||
|
||||
conn.exec(checkCmd, (_err, stream) => {
|
||||
let foundProcesses = false;
|
||||
@@ -1174,8 +1332,8 @@ async function killRemoteTunnelByMarker(
|
||||
|
||||
const killCmds = [
|
||||
`pkill -TERM -f '${tunnelMarker}'`,
|
||||
`sleep 1 && pkill -f 'ssh.*-R.*${tunnelConfig.endpointPort}:localhost:${tunnelConfig.sourcePort}.*${tunnelConfig.endpointUsername}@${tunnelConfig.endpointIP}'`,
|
||||
`sleep 1 && pkill -f 'sshpass.*ssh.*-R.*${tunnelConfig.endpointPort}'`,
|
||||
`sleep 1 && pkill -f 'ssh.*${tunnelFlag}.*${tunnelConfig.endpointPort}:.*:${tunnelConfig.sourcePort}.*${tunnelConfig.endpointUsername}@${tunnelConfig.endpointIP}'`,
|
||||
`sleep 1 && pkill -f 'sshpass.*ssh.*${tunnelFlag}.*${tunnelConfig.endpointPort}'`,
|
||||
`sleep 2 && pkill -9 -f '${tunnelMarker}'`,
|
||||
];
|
||||
|
||||
@@ -1248,13 +1406,95 @@ async function killRemoteTunnelByMarker(
|
||||
callback(err);
|
||||
});
|
||||
|
||||
conn.connect(connOptions);
|
||||
if (
|
||||
tunnelConfig.useSocks5 &&
|
||||
(tunnelConfig.socks5Host ||
|
||||
(tunnelConfig.socks5ProxyChain &&
|
||||
tunnelConfig.socks5ProxyChain.length > 0))
|
||||
) {
|
||||
(async () => {
|
||||
try {
|
||||
const socks5Socket = await createSocks5Connection(
|
||||
tunnelConfig.sourceIP,
|
||||
tunnelConfig.sourceSSHPort,
|
||||
{
|
||||
useSocks5: tunnelConfig.useSocks5,
|
||||
socks5Host: tunnelConfig.socks5Host,
|
||||
socks5Port: tunnelConfig.socks5Port,
|
||||
socks5Username: tunnelConfig.socks5Username,
|
||||
socks5Password: tunnelConfig.socks5Password,
|
||||
socks5ProxyChain: tunnelConfig.socks5ProxyChain,
|
||||
},
|
||||
);
|
||||
|
||||
if (socks5Socket) {
|
||||
connOptions.sock = socks5Socket;
|
||||
conn.connect(connOptions);
|
||||
} else {
|
||||
callback(new Error("Failed to create SOCKS5 connection"));
|
||||
}
|
||||
} catch (socks5Error) {
|
||||
tunnelLogger.error(
|
||||
"SOCKS5 connection failed for killing tunnel",
|
||||
socks5Error,
|
||||
{
|
||||
operation: "socks5_connect_kill",
|
||||
tunnelName,
|
||||
proxyHost: tunnelConfig.socks5Host,
|
||||
proxyPort: tunnelConfig.socks5Port || 1080,
|
||||
},
|
||||
);
|
||||
callback(
|
||||
new Error(
|
||||
"SOCKS5 proxy connection failed: " +
|
||||
(socks5Error instanceof Error
|
||||
? socks5Error.message
|
||||
: "Unknown error"),
|
||||
),
|
||||
);
|
||||
}
|
||||
})();
|
||||
} else {
|
||||
conn.connect(connOptions);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /ssh/tunnel/status:
|
||||
* get:
|
||||
* summary: Get all tunnel statuses
|
||||
* description: Retrieves the status of all SSH tunnels.
|
||||
* tags:
|
||||
* - SSH Tunnels
|
||||
* responses:
|
||||
* 200:
|
||||
* description: A list of all tunnel statuses.
|
||||
*/
|
||||
app.get("/ssh/tunnel/status", (req, res) => {
|
||||
res.json(getAllTunnelStatus());
|
||||
});
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /ssh/tunnel/status/{tunnelName}:
|
||||
* get:
|
||||
* summary: Get tunnel status by name
|
||||
* description: Retrieves the status of a specific SSH tunnel by its name.
|
||||
* tags:
|
||||
* - SSH Tunnels
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: tunnelName
|
||||
* required: true
|
||||
* schema:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Tunnel status.
|
||||
* 404:
|
||||
* description: Tunnel not found.
|
||||
*/
|
||||
app.get("/ssh/tunnel/status/:tunnelName", (req, res) => {
|
||||
const { tunnelName } = req.params;
|
||||
const status = connectionStatus.get(tunnelName);
|
||||
@@ -1266,103 +1506,382 @@ app.get("/ssh/tunnel/status/:tunnelName", (req, res) => {
|
||||
res.json({ name: tunnelName, status });
|
||||
});
|
||||
|
||||
app.post("/ssh/tunnel/connect", (req, res) => {
|
||||
const tunnelConfig: TunnelConfig = req.body;
|
||||
/**
|
||||
* @openapi
|
||||
* /ssh/tunnel/connect:
|
||||
* post:
|
||||
* summary: Connect SSH tunnel
|
||||
* description: Establishes an SSH tunnel connection with the specified configuration.
|
||||
* tags:
|
||||
* - SSH Tunnels
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* name:
|
||||
* type: string
|
||||
* sourceHostId:
|
||||
* type: integer
|
||||
* tunnelIndex:
|
||||
* type: integer
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Connection request received.
|
||||
* 400:
|
||||
* description: Invalid tunnel configuration.
|
||||
* 401:
|
||||
* description: Authentication required.
|
||||
* 403:
|
||||
* description: Access denied to this host.
|
||||
* 500:
|
||||
* description: Failed to connect tunnel.
|
||||
*/
|
||||
app.post(
|
||||
"/ssh/tunnel/connect",
|
||||
authenticateJWT,
|
||||
async (req: AuthenticatedRequest, res: Response) => {
|
||||
const tunnelConfig: TunnelConfig = req.body;
|
||||
const userId = req.userId;
|
||||
|
||||
if (!tunnelConfig || !tunnelConfig.name) {
|
||||
return res.status(400).json({ error: "Invalid tunnel configuration" });
|
||||
}
|
||||
if (!userId) {
|
||||
return res.status(401).json({ error: "Authentication required" });
|
||||
}
|
||||
|
||||
const tunnelName = tunnelConfig.name;
|
||||
if (!tunnelConfig || !tunnelConfig.name) {
|
||||
return res.status(400).json({ error: "Invalid tunnel configuration" });
|
||||
}
|
||||
|
||||
cleanupTunnelResources(tunnelName);
|
||||
const tunnelName = tunnelConfig.name;
|
||||
|
||||
manualDisconnects.delete(tunnelName);
|
||||
retryCounters.delete(tunnelName);
|
||||
retryExhaustedTunnels.delete(tunnelName);
|
||||
try {
|
||||
if (!validateTunnelConfig(tunnelName, tunnelConfig)) {
|
||||
tunnelLogger.error(`Tunnel config validation failed`, {
|
||||
operation: "tunnel_connect",
|
||||
tunnelName,
|
||||
configHostId: tunnelConfig.sourceHostId,
|
||||
configTunnelIndex: tunnelConfig.tunnelIndex,
|
||||
});
|
||||
return res.status(400).json({
|
||||
error: "Tunnel configuration does not match tunnel name",
|
||||
});
|
||||
}
|
||||
|
||||
tunnelConfigs.set(tunnelName, tunnelConfig);
|
||||
if (tunnelConfig.sourceHostId) {
|
||||
const accessInfo = await permissionManager.canAccessHost(
|
||||
userId,
|
||||
tunnelConfig.sourceHostId,
|
||||
"read",
|
||||
);
|
||||
|
||||
connectSSHTunnel(tunnelConfig, 0).catch((error) => {
|
||||
tunnelLogger.error(
|
||||
`Failed to connect tunnel ${tunnelConfig.name}: ${error instanceof Error ? error.message : "Unknown error"}`,
|
||||
);
|
||||
});
|
||||
if (!accessInfo.hasAccess) {
|
||||
tunnelLogger.warn("User attempted tunnel connect without access", {
|
||||
operation: "tunnel_connect_unauthorized",
|
||||
userId,
|
||||
hostId: tunnelConfig.sourceHostId,
|
||||
tunnelName,
|
||||
});
|
||||
return res.status(403).json({ error: "Access denied to this host" });
|
||||
}
|
||||
|
||||
res.json({ message: "Connection request received", tunnelName });
|
||||
});
|
||||
if (accessInfo.isShared && !accessInfo.isOwner) {
|
||||
tunnelConfig.requestingUserId = userId;
|
||||
}
|
||||
}
|
||||
|
||||
app.post("/ssh/tunnel/disconnect", (req, res) => {
|
||||
const { tunnelName } = req.body;
|
||||
if (pendingTunnelOperations.has(tunnelName)) {
|
||||
try {
|
||||
await pendingTunnelOperations.get(tunnelName);
|
||||
} catch (error) {
|
||||
tunnelLogger.warn(`Previous tunnel operation failed`, { tunnelName });
|
||||
}
|
||||
}
|
||||
|
||||
if (!tunnelName) {
|
||||
return res.status(400).json({ error: "Tunnel name required" });
|
||||
}
|
||||
const operation = (async () => {
|
||||
manualDisconnects.delete(tunnelName);
|
||||
retryCounters.delete(tunnelName);
|
||||
retryExhaustedTunnels.delete(tunnelName);
|
||||
|
||||
manualDisconnects.add(tunnelName);
|
||||
retryCounters.delete(tunnelName);
|
||||
retryExhaustedTunnels.delete(tunnelName);
|
||||
await cleanupTunnelResources(tunnelName);
|
||||
|
||||
if (activeRetryTimers.has(tunnelName)) {
|
||||
clearTimeout(activeRetryTimers.get(tunnelName)!);
|
||||
activeRetryTimers.delete(tunnelName);
|
||||
}
|
||||
if (tunnelConfigs.has(tunnelName)) {
|
||||
const existingConfig = tunnelConfigs.get(tunnelName);
|
||||
if (
|
||||
existingConfig &&
|
||||
(existingConfig.sourceHostId !== tunnelConfig.sourceHostId ||
|
||||
existingConfig.tunnelIndex !== tunnelConfig.tunnelIndex)
|
||||
) {
|
||||
throw new Error(`Tunnel name collision detected: ${tunnelName}`);
|
||||
}
|
||||
}
|
||||
|
||||
cleanupTunnelResources(tunnelName, true);
|
||||
if (!tunnelConfig.endpointIP || !tunnelConfig.endpointUsername) {
|
||||
try {
|
||||
const systemCrypto = SystemCrypto.getInstance();
|
||||
const internalAuthToken = await systemCrypto.getInternalAuthToken();
|
||||
|
||||
broadcastTunnelStatus(tunnelName, {
|
||||
connected: false,
|
||||
status: CONNECTION_STATES.DISCONNECTED,
|
||||
manualDisconnect: true,
|
||||
});
|
||||
const allHostsResponse = await axios.get(
|
||||
"http://localhost:30001/ssh/db/host/internal/all",
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"X-Internal-Auth-Token": internalAuthToken,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const tunnelConfig = tunnelConfigs.get(tunnelName) || null;
|
||||
handleDisconnect(tunnelName, tunnelConfig, false);
|
||||
const allHosts: SSHHost[] = allHostsResponse.data || [];
|
||||
const endpointHost = allHosts.find(
|
||||
(h) =>
|
||||
h.name === tunnelConfig.endpointHost ||
|
||||
`${h.username}@${h.ip}` === tunnelConfig.endpointHost,
|
||||
);
|
||||
|
||||
setTimeout(() => {
|
||||
manualDisconnects.delete(tunnelName);
|
||||
}, 5000);
|
||||
if (!endpointHost) {
|
||||
throw new Error(
|
||||
`Endpoint host '${tunnelConfig.endpointHost}' not found in database`,
|
||||
);
|
||||
}
|
||||
|
||||
res.json({ message: "Disconnect request received", tunnelName });
|
||||
});
|
||||
tunnelConfig.endpointIP = endpointHost.ip;
|
||||
tunnelConfig.endpointSSHPort = endpointHost.port;
|
||||
tunnelConfig.endpointUsername = endpointHost.username;
|
||||
tunnelConfig.endpointPassword = endpointHost.password;
|
||||
tunnelConfig.endpointAuthMethod = endpointHost.authType;
|
||||
tunnelConfig.endpointSSHKey = endpointHost.key;
|
||||
tunnelConfig.endpointKeyPassword = endpointHost.keyPassword;
|
||||
tunnelConfig.endpointKeyType = endpointHost.keyType;
|
||||
tunnelConfig.endpointCredentialId = endpointHost.credentialId;
|
||||
tunnelConfig.endpointUserId = endpointHost.userId;
|
||||
} catch (resolveError) {
|
||||
tunnelLogger.error(
|
||||
"Failed to resolve endpoint host",
|
||||
resolveError,
|
||||
{
|
||||
operation: "tunnel_connect_resolve_endpoint_failed",
|
||||
tunnelName,
|
||||
endpointHost: tunnelConfig.endpointHost,
|
||||
},
|
||||
);
|
||||
throw new Error(
|
||||
`Failed to resolve endpoint host: ${resolveError instanceof Error ? resolveError.message : "Unknown error"}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
app.post("/ssh/tunnel/cancel", (req, res) => {
|
||||
const { tunnelName } = req.body;
|
||||
tunnelConfigs.set(tunnelName, tunnelConfig);
|
||||
await connectSSHTunnel(tunnelConfig, 0);
|
||||
})();
|
||||
|
||||
if (!tunnelName) {
|
||||
return res.status(400).json({ error: "Tunnel name required" });
|
||||
}
|
||||
pendingTunnelOperations.set(tunnelName, operation);
|
||||
|
||||
retryCounters.delete(tunnelName);
|
||||
retryExhaustedTunnels.delete(tunnelName);
|
||||
res.json({ message: "Connection request received", tunnelName });
|
||||
|
||||
if (activeRetryTimers.has(tunnelName)) {
|
||||
clearTimeout(activeRetryTimers.get(tunnelName)!);
|
||||
activeRetryTimers.delete(tunnelName);
|
||||
}
|
||||
operation.finally(() => {
|
||||
pendingTunnelOperations.delete(tunnelName);
|
||||
});
|
||||
} catch (error) {
|
||||
tunnelLogger.error("Failed to process tunnel connect", error, {
|
||||
operation: "tunnel_connect",
|
||||
tunnelName,
|
||||
userId,
|
||||
});
|
||||
res.status(500).json({ error: "Failed to connect tunnel" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
if (countdownIntervals.has(tunnelName)) {
|
||||
clearInterval(countdownIntervals.get(tunnelName)!);
|
||||
countdownIntervals.delete(tunnelName);
|
||||
}
|
||||
/**
|
||||
* @openapi
|
||||
* /ssh/tunnel/disconnect:
|
||||
* post:
|
||||
* summary: Disconnect SSH tunnel
|
||||
* description: Disconnects an active SSH tunnel.
|
||||
* tags:
|
||||
* - SSH Tunnels
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* tunnelName:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Disconnect request received.
|
||||
* 400:
|
||||
* description: Tunnel name required.
|
||||
* 401:
|
||||
* description: Authentication required.
|
||||
* 403:
|
||||
* description: Access denied.
|
||||
* 500:
|
||||
* description: Failed to disconnect tunnel.
|
||||
*/
|
||||
app.post(
|
||||
"/ssh/tunnel/disconnect",
|
||||
authenticateJWT,
|
||||
async (req: AuthenticatedRequest, res: Response) => {
|
||||
const { tunnelName } = req.body;
|
||||
const userId = req.userId;
|
||||
|
||||
cleanupTunnelResources(tunnelName, true);
|
||||
if (!userId) {
|
||||
return res.status(401).json({ error: "Authentication required" });
|
||||
}
|
||||
|
||||
broadcastTunnelStatus(tunnelName, {
|
||||
connected: false,
|
||||
status: CONNECTION_STATES.DISCONNECTED,
|
||||
manualDisconnect: true,
|
||||
});
|
||||
if (!tunnelName) {
|
||||
return res.status(400).json({ error: "Tunnel name required" });
|
||||
}
|
||||
|
||||
const tunnelConfig = tunnelConfigs.get(tunnelName) || null;
|
||||
handleDisconnect(tunnelName, tunnelConfig, false);
|
||||
try {
|
||||
const config = tunnelConfigs.get(tunnelName);
|
||||
if (config && config.sourceHostId) {
|
||||
const accessInfo = await permissionManager.canAccessHost(
|
||||
userId,
|
||||
config.sourceHostId,
|
||||
"read",
|
||||
);
|
||||
if (!accessInfo.hasAccess) {
|
||||
return res.status(403).json({ error: "Access denied" });
|
||||
}
|
||||
}
|
||||
|
||||
setTimeout(() => {
|
||||
manualDisconnects.delete(tunnelName);
|
||||
}, 5000);
|
||||
manualDisconnects.add(tunnelName);
|
||||
retryCounters.delete(tunnelName);
|
||||
retryExhaustedTunnels.delete(tunnelName);
|
||||
|
||||
res.json({ message: "Cancel request received", tunnelName });
|
||||
});
|
||||
if (activeRetryTimers.has(tunnelName)) {
|
||||
clearTimeout(activeRetryTimers.get(tunnelName)!);
|
||||
activeRetryTimers.delete(tunnelName);
|
||||
}
|
||||
|
||||
await cleanupTunnelResources(tunnelName, true);
|
||||
|
||||
broadcastTunnelStatus(tunnelName, {
|
||||
connected: false,
|
||||
status: CONNECTION_STATES.DISCONNECTED,
|
||||
manualDisconnect: true,
|
||||
});
|
||||
|
||||
const tunnelConfig = tunnelConfigs.get(tunnelName) || null;
|
||||
handleDisconnect(tunnelName, tunnelConfig, false);
|
||||
|
||||
setTimeout(() => {
|
||||
manualDisconnects.delete(tunnelName);
|
||||
}, 5000);
|
||||
|
||||
res.json({ message: "Disconnect request received", tunnelName });
|
||||
} catch (error) {
|
||||
tunnelLogger.error("Failed to disconnect tunnel", error, {
|
||||
operation: "tunnel_disconnect",
|
||||
tunnelName,
|
||||
userId,
|
||||
});
|
||||
res.status(500).json({ error: "Failed to disconnect tunnel" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /ssh/tunnel/cancel:
|
||||
* post:
|
||||
* summary: Cancel tunnel retry
|
||||
* description: Cancels the retry mechanism for a failed SSH tunnel connection.
|
||||
* tags:
|
||||
* - SSH Tunnels
|
||||
* requestBody:
|
||||
* required: true
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* tunnelName:
|
||||
* type: string
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Cancel request received.
|
||||
* 400:
|
||||
* description: Tunnel name required.
|
||||
* 401:
|
||||
* description: Authentication required.
|
||||
* 403:
|
||||
* description: Access denied.
|
||||
* 500:
|
||||
* description: Failed to cancel tunnel retry.
|
||||
*/
|
||||
app.post(
|
||||
"/ssh/tunnel/cancel",
|
||||
authenticateJWT,
|
||||
async (req: AuthenticatedRequest, res: Response) => {
|
||||
const { tunnelName } = req.body;
|
||||
const userId = req.userId;
|
||||
|
||||
if (!userId) {
|
||||
return res.status(401).json({ error: "Authentication required" });
|
||||
}
|
||||
|
||||
if (!tunnelName) {
|
||||
return res.status(400).json({ error: "Tunnel name required" });
|
||||
}
|
||||
|
||||
try {
|
||||
const config = tunnelConfigs.get(tunnelName);
|
||||
if (config && config.sourceHostId) {
|
||||
const accessInfo = await permissionManager.canAccessHost(
|
||||
userId,
|
||||
config.sourceHostId,
|
||||
"read",
|
||||
);
|
||||
if (!accessInfo.hasAccess) {
|
||||
return res.status(403).json({ error: "Access denied" });
|
||||
}
|
||||
}
|
||||
|
||||
retryCounters.delete(tunnelName);
|
||||
retryExhaustedTunnels.delete(tunnelName);
|
||||
|
||||
if (activeRetryTimers.has(tunnelName)) {
|
||||
clearTimeout(activeRetryTimers.get(tunnelName)!);
|
||||
activeRetryTimers.delete(tunnelName);
|
||||
}
|
||||
|
||||
if (countdownIntervals.has(tunnelName)) {
|
||||
clearInterval(countdownIntervals.get(tunnelName)!);
|
||||
countdownIntervals.delete(tunnelName);
|
||||
}
|
||||
|
||||
await cleanupTunnelResources(tunnelName, true);
|
||||
|
||||
broadcastTunnelStatus(tunnelName, {
|
||||
connected: false,
|
||||
status: CONNECTION_STATES.DISCONNECTED,
|
||||
manualDisconnect: true,
|
||||
});
|
||||
|
||||
const tunnelConfig = tunnelConfigs.get(tunnelName) || null;
|
||||
handleDisconnect(tunnelName, tunnelConfig, false);
|
||||
|
||||
setTimeout(() => {
|
||||
manualDisconnects.delete(tunnelName);
|
||||
}, 5000);
|
||||
|
||||
res.json({ message: "Cancel request received", tunnelName });
|
||||
} catch (error) {
|
||||
tunnelLogger.error("Failed to cancel tunnel retry", error, {
|
||||
operation: "tunnel_cancel",
|
||||
tunnelName,
|
||||
userId,
|
||||
});
|
||||
res.status(500).json({ error: "Failed to cancel tunnel retry" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
async function initializeAutoStartTunnels(): Promise<void> {
|
||||
try {
|
||||
@@ -1408,12 +1927,20 @@ async function initializeAutoStartTunnels(): Promise<void> {
|
||||
);
|
||||
|
||||
if (endpointHost) {
|
||||
const tunnelIndex =
|
||||
host.tunnelConnections.indexOf(tunnelConnection);
|
||||
const tunnelConfig: TunnelConfig = {
|
||||
name: `${host.name || `${host.username}@${host.ip}`}_${
|
||||
tunnelConnection.sourcePort
|
||||
}_${tunnelConnection.endpointHost}_${
|
||||
tunnelConnection.endpointPort
|
||||
}`,
|
||||
name: normalizeTunnelName(
|
||||
host.id,
|
||||
tunnelIndex,
|
||||
host.name || `${host.username}@${host.ip}`,
|
||||
tunnelConnection.sourcePort,
|
||||
tunnelConnection.endpointHost,
|
||||
tunnelConnection.endpointPort,
|
||||
),
|
||||
tunnelType: tunnelConnection.tunnelType || "remote",
|
||||
sourceHostId: host.id,
|
||||
tunnelIndex: tunnelIndex,
|
||||
hostName: host.name || `${host.username}@${host.ip}`,
|
||||
sourceIP: host.ip,
|
||||
sourceSSHPort: host.port,
|
||||
@@ -1429,6 +1956,7 @@ async function initializeAutoStartTunnels(): Promise<void> {
|
||||
endpointIP: endpointHost.ip,
|
||||
endpointSSHPort: endpointHost.port,
|
||||
endpointUsername: endpointHost.username,
|
||||
endpointHost: tunnelConnection.endpointHost,
|
||||
endpointPassword:
|
||||
tunnelConnection.endpointPassword ||
|
||||
endpointHost.autostartPassword ||
|
||||
@@ -1453,6 +1981,11 @@ async function initializeAutoStartTunnels(): Promise<void> {
|
||||
retryInterval: tunnelConnection.retryInterval * 1000,
|
||||
autoStart: tunnelConnection.autoStart,
|
||||
isPinned: host.pin,
|
||||
useSocks5: host.useSocks5,
|
||||
socks5Host: host.socks5Host,
|
||||
socks5Port: host.socks5Port,
|
||||
socks5Username: host.socks5Username,
|
||||
socks5Password: host.socks5Password,
|
||||
};
|
||||
|
||||
autoStartTunnels.push(tunnelConfig);
|
||||
|
||||
@@ -3,28 +3,87 @@ import type { Client } from "ssh2";
|
||||
export function execCommand(
|
||||
client: Client,
|
||||
command: string,
|
||||
timeoutMs = 30000,
|
||||
): Promise<{
|
||||
stdout: string;
|
||||
stderr: string;
|
||||
code: number | null;
|
||||
}> {
|
||||
return new Promise((resolve, reject) => {
|
||||
client.exec(command, { pty: false }, (err, stream) => {
|
||||
if (err) return reject(err);
|
||||
let settled = false;
|
||||
let stream: any = null;
|
||||
|
||||
const timeout = setTimeout(() => {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
cleanup();
|
||||
reject(new Error(`Command timeout after ${timeoutMs}ms: ${command}`));
|
||||
}
|
||||
}, timeoutMs);
|
||||
|
||||
const cleanup = () => {
|
||||
clearTimeout(timeout);
|
||||
if (stream) {
|
||||
try {
|
||||
stream.removeAllListeners();
|
||||
if (stream.stderr) {
|
||||
stream.stderr.removeAllListeners();
|
||||
}
|
||||
stream.destroy();
|
||||
} catch (error) {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
client.exec(command, { pty: false }, (err, _stream) => {
|
||||
if (err) {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
cleanup();
|
||||
reject(err);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
stream = _stream;
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
let exitCode: number | null = null;
|
||||
|
||||
stream
|
||||
.on("close", (code: number | undefined) => {
|
||||
exitCode = typeof code === "number" ? code : null;
|
||||
resolve({ stdout, stderr, code: exitCode });
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
exitCode = typeof code === "number" ? code : null;
|
||||
cleanup();
|
||||
resolve({ stdout, stderr, code: exitCode });
|
||||
}
|
||||
})
|
||||
.on("data", (data: Buffer) => {
|
||||
stdout += data.toString("utf8");
|
||||
})
|
||||
.stderr.on("data", (data: Buffer) => {
|
||||
stderr += data.toString("utf8");
|
||||
.on("error", (streamErr: Error) => {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
cleanup();
|
||||
reject(streamErr);
|
||||
}
|
||||
});
|
||||
|
||||
if (stream.stderr) {
|
||||
stream.stderr
|
||||
.on("data", (data: Buffer) => {
|
||||
stderr += data.toString("utf8");
|
||||
})
|
||||
.on("error", (stderrErr: Error) => {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
cleanup();
|
||||
reject(stderrErr);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -26,12 +26,20 @@ export async function collectCpuMetrics(client: Client): Promise<{
|
||||
let loadTriplet: [number, number, number] | null = null;
|
||||
|
||||
try {
|
||||
const [stat1, loadAvgOut, coresOut] = await Promise.all([
|
||||
execCommand(client, "cat /proc/stat"),
|
||||
execCommand(client, "cat /proc/loadavg"),
|
||||
execCommand(
|
||||
client,
|
||||
"nproc 2>/dev/null || grep -c ^processor /proc/cpuinfo",
|
||||
const [stat1, loadAvgOut, coresOut] = await Promise.race([
|
||||
Promise.all([
|
||||
execCommand(client, "cat /proc/stat"),
|
||||
execCommand(client, "cat /proc/loadavg"),
|
||||
execCommand(
|
||||
client,
|
||||
"nproc 2>/dev/null || grep -c ^processor /proc/cpuinfo",
|
||||
),
|
||||
]),
|
||||
new Promise<never>((_, reject) =>
|
||||
setTimeout(
|
||||
() => reject(new Error("CPU metrics collection timeout")),
|
||||
25000,
|
||||
),
|
||||
),
|
||||
]);
|
||||
|
||||
|
||||
254
src/backend/ssh/widgets/firewall-collector.ts
Normal file
254
src/backend/ssh/widgets/firewall-collector.ts
Normal file
@@ -0,0 +1,254 @@
|
||||
import type { Client } from "ssh2";
|
||||
import { execCommand } from "./common-utils.js";
|
||||
import type {
|
||||
FirewallMetrics,
|
||||
FirewallChain,
|
||||
FirewallRule,
|
||||
} from "../../../types/stats-widgets.js";
|
||||
|
||||
function parseIptablesRule(line: string): FirewallRule | null {
|
||||
if (!line.startsWith("-A ")) return null;
|
||||
|
||||
const rule: FirewallRule = {
|
||||
chain: "",
|
||||
target: "",
|
||||
protocol: "all",
|
||||
source: "0.0.0.0/0",
|
||||
destination: "0.0.0.0/0",
|
||||
};
|
||||
|
||||
const chainMatch = line.match(/^-A\s+(\S+)/);
|
||||
if (chainMatch) {
|
||||
rule.chain = chainMatch[1];
|
||||
}
|
||||
|
||||
const targetMatch = line.match(/-j\s+(\S+)/);
|
||||
if (targetMatch) {
|
||||
rule.target = targetMatch[1];
|
||||
}
|
||||
|
||||
const protocolMatch = line.match(/-p\s+(\S+)/);
|
||||
if (protocolMatch) {
|
||||
rule.protocol = protocolMatch[1];
|
||||
}
|
||||
|
||||
const sourceMatch = line.match(/-s\s+(\S+)/);
|
||||
if (sourceMatch) {
|
||||
rule.source = sourceMatch[1];
|
||||
}
|
||||
|
||||
const destMatch = line.match(/-d\s+(\S+)/);
|
||||
if (destMatch) {
|
||||
rule.destination = destMatch[1];
|
||||
}
|
||||
|
||||
const dportMatch = line.match(/--dport\s+(\S+)/);
|
||||
if (dportMatch) {
|
||||
rule.dport = dportMatch[1];
|
||||
}
|
||||
|
||||
const sportMatch = line.match(/--sport\s+(\S+)/);
|
||||
if (sportMatch) {
|
||||
rule.sport = sportMatch[1];
|
||||
}
|
||||
|
||||
const stateMatch = line.match(/--state\s+(\S+)/);
|
||||
if (stateMatch) {
|
||||
rule.state = stateMatch[1];
|
||||
}
|
||||
|
||||
const interfaceMatch = line.match(/-i\s+(\S+)/);
|
||||
if (interfaceMatch) {
|
||||
rule.interface = interfaceMatch[1];
|
||||
}
|
||||
|
||||
return rule;
|
||||
}
|
||||
|
||||
function parseIptablesOutput(output: string): FirewallChain[] {
|
||||
const chains: Map<string, FirewallChain> = new Map();
|
||||
const lines = output.split("\n");
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
|
||||
const policyMatch = trimmed.match(/^:(\S+)\s+(\S+)/);
|
||||
if (policyMatch) {
|
||||
const [, chainName, policy] = policyMatch;
|
||||
chains.set(chainName, {
|
||||
name: chainName,
|
||||
policy: policy,
|
||||
rules: [],
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
const rule = parseIptablesRule(trimmed);
|
||||
if (rule) {
|
||||
let chain = chains.get(rule.chain);
|
||||
if (!chain) {
|
||||
chain = {
|
||||
name: rule.chain,
|
||||
policy: "ACCEPT",
|
||||
rules: [],
|
||||
};
|
||||
chains.set(rule.chain, chain);
|
||||
}
|
||||
chain.rules.push(rule);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(chains.values());
|
||||
}
|
||||
|
||||
function parseNftablesOutput(output: string): FirewallChain[] {
|
||||
const chains: FirewallChain[] = [];
|
||||
let currentChain: FirewallChain | null = null;
|
||||
|
||||
const lines = output.split("\n");
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
|
||||
const chainMatch = trimmed.match(
|
||||
/chain\s+(\S+)\s*\{?\s*(?:type\s+\S+\s+hook\s+(\S+))?/,
|
||||
);
|
||||
if (chainMatch) {
|
||||
if (currentChain) {
|
||||
chains.push(currentChain);
|
||||
}
|
||||
currentChain = {
|
||||
name: chainMatch[1].toUpperCase(),
|
||||
policy: "ACCEPT",
|
||||
rules: [],
|
||||
};
|
||||
continue;
|
||||
}
|
||||
|
||||
if (currentChain && trimmed.startsWith("policy ")) {
|
||||
const policyMatch = trimmed.match(/policy\s+(\S+)/);
|
||||
if (policyMatch) {
|
||||
currentChain.policy = policyMatch[1].toUpperCase();
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (currentChain && trimmed && !trimmed.startsWith("}")) {
|
||||
const rule: FirewallRule = {
|
||||
chain: currentChain.name,
|
||||
target: "",
|
||||
protocol: "all",
|
||||
source: "0.0.0.0/0",
|
||||
destination: "0.0.0.0/0",
|
||||
};
|
||||
|
||||
if (trimmed.includes("accept")) rule.target = "ACCEPT";
|
||||
else if (trimmed.includes("drop")) rule.target = "DROP";
|
||||
else if (trimmed.includes("reject")) rule.target = "REJECT";
|
||||
|
||||
const tcpMatch = trimmed.match(/tcp\s+dport\s+(\S+)/);
|
||||
if (tcpMatch) {
|
||||
rule.protocol = "tcp";
|
||||
rule.dport = tcpMatch[1];
|
||||
}
|
||||
|
||||
const udpMatch = trimmed.match(/udp\s+dport\s+(\S+)/);
|
||||
if (udpMatch) {
|
||||
rule.protocol = "udp";
|
||||
rule.dport = udpMatch[1];
|
||||
}
|
||||
|
||||
const saddrMatch = trimmed.match(/saddr\s+(\S+)/);
|
||||
if (saddrMatch) {
|
||||
rule.source = saddrMatch[1];
|
||||
}
|
||||
|
||||
const daddrMatch = trimmed.match(/daddr\s+(\S+)/);
|
||||
if (daddrMatch) {
|
||||
rule.destination = daddrMatch[1];
|
||||
}
|
||||
|
||||
const iifMatch = trimmed.match(/iif\s+"?(\S+)"?/);
|
||||
if (iifMatch) {
|
||||
rule.interface = iifMatch[1].replace(/"/g, "");
|
||||
}
|
||||
|
||||
const ctStateMatch = trimmed.match(/ct\s+state\s+(\S+)/);
|
||||
if (ctStateMatch) {
|
||||
rule.state = ctStateMatch[1].toUpperCase();
|
||||
}
|
||||
|
||||
if (rule.target) {
|
||||
currentChain.rules.push(rule);
|
||||
}
|
||||
}
|
||||
|
||||
if (trimmed === "}") {
|
||||
if (currentChain) {
|
||||
chains.push(currentChain);
|
||||
currentChain = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (currentChain) {
|
||||
chains.push(currentChain);
|
||||
}
|
||||
|
||||
return chains;
|
||||
}
|
||||
|
||||
export async function collectFirewallMetrics(
|
||||
client: Client,
|
||||
): Promise<FirewallMetrics> {
|
||||
try {
|
||||
const iptablesResult = await execCommand(
|
||||
client,
|
||||
"iptables-save 2>/dev/null",
|
||||
15000,
|
||||
);
|
||||
|
||||
if (iptablesResult.stdout && iptablesResult.stdout.includes("*filter")) {
|
||||
const chains = parseIptablesOutput(iptablesResult.stdout);
|
||||
const hasRules = chains.some((c) => c.rules.length > 0);
|
||||
|
||||
return {
|
||||
type: "iptables",
|
||||
status: hasRules ? "active" : "inactive",
|
||||
chains: chains.filter(
|
||||
(c) =>
|
||||
c.name === "INPUT" || c.name === "OUTPUT" || c.name === "FORWARD",
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
const nftResult = await execCommand(
|
||||
client,
|
||||
"nft list ruleset 2>/dev/null",
|
||||
15000,
|
||||
);
|
||||
|
||||
if (nftResult.stdout && nftResult.stdout.trim()) {
|
||||
const chains = parseNftablesOutput(nftResult.stdout);
|
||||
const hasRules = chains.some((c) => c.rules.length > 0);
|
||||
|
||||
return {
|
||||
type: "nftables",
|
||||
status: hasRules ? "active" : "inactive",
|
||||
chains,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
type: "none",
|
||||
status: "unknown",
|
||||
chains: [],
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
type: "none",
|
||||
status: "unknown",
|
||||
chains: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { Client } from "ssh2";
|
||||
import { execCommand } from "./common-utils.js";
|
||||
import { statsLogger } from "../../utils/logger.js";
|
||||
|
||||
export interface LoginRecord {
|
||||
user: string;
|
||||
@@ -46,10 +47,20 @@ export async function collectLoginStats(client: Client): Promise<LoginStats> {
|
||||
const timeStr = parts.slice(timeStart, timeStart + 5).join(" ");
|
||||
|
||||
if (user && user !== "wtmp" && tty !== "system") {
|
||||
let parsedTime: string;
|
||||
try {
|
||||
const date = new Date(timeStr);
|
||||
parsedTime = isNaN(date.getTime())
|
||||
? new Date().toISOString()
|
||||
: date.toISOString();
|
||||
} catch (e) {
|
||||
parsedTime = new Date().toISOString();
|
||||
}
|
||||
|
||||
recentLogins.push({
|
||||
user,
|
||||
ip,
|
||||
time: new Date(timeStr).toISOString(),
|
||||
time: parsedTime,
|
||||
status: "success",
|
||||
});
|
||||
if (ip !== "local") {
|
||||
@@ -59,9 +70,7 @@ export async function collectLoginStats(client: Client): Promise<LoginStats> {
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore errors
|
||||
}
|
||||
} catch (e) {}
|
||||
|
||||
try {
|
||||
const failedOut = await execCommand(
|
||||
@@ -96,12 +105,20 @@ export async function collectLoginStats(client: Client): Promise<LoginStats> {
|
||||
}
|
||||
|
||||
if (user && ip) {
|
||||
let parsedTime: string;
|
||||
try {
|
||||
const date = timeStr ? new Date(timeStr) : new Date();
|
||||
parsedTime = isNaN(date.getTime())
|
||||
? new Date().toISOString()
|
||||
: date.toISOString();
|
||||
} catch (e) {
|
||||
parsedTime = new Date().toISOString();
|
||||
}
|
||||
|
||||
failedLogins.push({
|
||||
user,
|
||||
ip,
|
||||
time: timeStr
|
||||
? new Date(timeStr).toISOString()
|
||||
: new Date().toISOString(),
|
||||
time: parsedTime,
|
||||
status: "failed",
|
||||
});
|
||||
if (ip !== "unknown") {
|
||||
@@ -109,9 +126,7 @@ export async function collectLoginStats(client: Client): Promise<LoginStats> {
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore errors
|
||||
}
|
||||
} catch (e) {}
|
||||
|
||||
return {
|
||||
recentLogins: recentLogins.slice(0, 10),
|
||||
|
||||
@@ -68,12 +68,7 @@ export async function collectNetworkMetrics(client: Client): Promise<{
|
||||
txBytes: null,
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
statsLogger.debug("Failed to collect network interface stats", {
|
||||
operation: "network_stats_failed",
|
||||
error: e instanceof Error ? e.message : String(e),
|
||||
});
|
||||
}
|
||||
} catch (e) {}
|
||||
|
||||
return { interfaces };
|
||||
}
|
||||
|
||||
155
src/backend/ssh/widgets/ports-collector.ts
Normal file
155
src/backend/ssh/widgets/ports-collector.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import type { Client } from "ssh2";
|
||||
import { execCommand } from "./common-utils.js";
|
||||
import type { PortsMetrics, ListeningPort } from "../../../types/stats-widgets.js";
|
||||
|
||||
function parseSsOutput(output: string): ListeningPort[] {
|
||||
const ports: ListeningPort[] = [];
|
||||
const lines = output.split("\n").slice(1);
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) continue;
|
||||
|
||||
const parts = trimmed.split(/\s+/);
|
||||
if (parts.length < 5) continue;
|
||||
|
||||
const protocol = parts[0]?.toLowerCase();
|
||||
if (protocol !== "tcp" && protocol !== "udp") continue;
|
||||
|
||||
const state = parts[1];
|
||||
const localAddr = parts[4];
|
||||
|
||||
if (!localAddr) continue;
|
||||
|
||||
const lastColon = localAddr.lastIndexOf(":");
|
||||
if (lastColon === -1) continue;
|
||||
|
||||
const address = localAddr.substring(0, lastColon);
|
||||
const portStr = localAddr.substring(lastColon + 1);
|
||||
const port = parseInt(portStr, 10);
|
||||
|
||||
if (isNaN(port)) continue;
|
||||
|
||||
const portEntry: ListeningPort = {
|
||||
protocol: protocol as "tcp" | "udp",
|
||||
localAddress: address.replace(/^\[|\]$/g, ""),
|
||||
localPort: port,
|
||||
state: protocol === "tcp" ? state : undefined,
|
||||
};
|
||||
|
||||
const processInfo = parts[6];
|
||||
if (processInfo && processInfo.startsWith("users:")) {
|
||||
const pidMatch = processInfo.match(/pid=(\d+)/);
|
||||
const nameMatch = processInfo.match(/\("([^"]+)"/);
|
||||
if (pidMatch) portEntry.pid = parseInt(pidMatch[1], 10);
|
||||
if (nameMatch) portEntry.process = nameMatch[1];
|
||||
}
|
||||
|
||||
ports.push(portEntry);
|
||||
}
|
||||
|
||||
return ports;
|
||||
}
|
||||
|
||||
function parseNetstatOutput(output: string): ListeningPort[] {
|
||||
const ports: ListeningPort[] = [];
|
||||
const lines = output.split("\n");
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) continue;
|
||||
|
||||
const parts = trimmed.split(/\s+/);
|
||||
if (parts.length < 4) continue;
|
||||
|
||||
const proto = parts[0]?.toLowerCase();
|
||||
if (!proto) continue;
|
||||
|
||||
let protocol: "tcp" | "udp";
|
||||
if (proto.startsWith("tcp")) {
|
||||
protocol = "tcp";
|
||||
} else if (proto.startsWith("udp")) {
|
||||
protocol = "udp";
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
const localAddr = parts[3];
|
||||
if (!localAddr) continue;
|
||||
|
||||
const lastColon = localAddr.lastIndexOf(":");
|
||||
if (lastColon === -1) continue;
|
||||
|
||||
const address = localAddr.substring(0, lastColon);
|
||||
const portStr = localAddr.substring(lastColon + 1);
|
||||
const port = parseInt(portStr, 10);
|
||||
|
||||
if (isNaN(port)) continue;
|
||||
|
||||
const portEntry: ListeningPort = {
|
||||
protocol,
|
||||
localAddress: address,
|
||||
localPort: port,
|
||||
};
|
||||
|
||||
if (protocol === "tcp" && parts.length >= 6) {
|
||||
portEntry.state = parts[5];
|
||||
}
|
||||
|
||||
const pidProgram = parts[parts.length - 1];
|
||||
if (pidProgram && pidProgram.includes("/")) {
|
||||
const [pidStr, process] = pidProgram.split("/");
|
||||
const pid = parseInt(pidStr, 10);
|
||||
if (!isNaN(pid)) portEntry.pid = pid;
|
||||
if (process) portEntry.process = process;
|
||||
}
|
||||
|
||||
ports.push(portEntry);
|
||||
}
|
||||
|
||||
return ports;
|
||||
}
|
||||
|
||||
export async function collectPortsMetrics(
|
||||
client: Client,
|
||||
): Promise<PortsMetrics> {
|
||||
try {
|
||||
const ssResult = await execCommand(
|
||||
client,
|
||||
"ss -tulnp 2>/dev/null",
|
||||
15000,
|
||||
);
|
||||
|
||||
if (ssResult.stdout && ssResult.stdout.includes("Local")) {
|
||||
const ports = parseSsOutput(ssResult.stdout);
|
||||
return {
|
||||
source: "ss",
|
||||
ports: ports.sort((a, b) => a.localPort - b.localPort),
|
||||
};
|
||||
}
|
||||
|
||||
const netstatResult = await execCommand(
|
||||
client,
|
||||
"netstat -tulnp 2>/dev/null",
|
||||
15000,
|
||||
);
|
||||
|
||||
if (netstatResult.stdout && netstatResult.stdout.includes("Local")) {
|
||||
const ports = parseNetstatOutput(netstatResult.stdout);
|
||||
return {
|
||||
source: "netstat",
|
||||
ports: ports.sort((a, b) => a.localPort - b.localPort),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
source: "none",
|
||||
ports: [],
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
source: "none",
|
||||
ports: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -33,11 +33,13 @@ export async function collectProcessesMetrics(client: Client): Promise<{
|
||||
for (let i = 1; i < Math.min(psLines.length, 11); i++) {
|
||||
const parts = psLines[i].split(/\s+/);
|
||||
if (parts.length >= 11) {
|
||||
const cpuVal = Number(parts[2]);
|
||||
const memVal = Number(parts[3]);
|
||||
topProcesses.push({
|
||||
pid: parts[1],
|
||||
user: parts[0],
|
||||
cpu: parts[2],
|
||||
mem: parts[3],
|
||||
cpu: Number.isFinite(cpuVal) ? cpuVal.toString() : "0",
|
||||
mem: Number.isFinite(memVal) ? memVal.toString() : "0",
|
||||
command: parts.slice(10).join(" ").substring(0, 50),
|
||||
});
|
||||
}
|
||||
@@ -46,14 +48,13 @@ export async function collectProcessesMetrics(client: Client): Promise<{
|
||||
|
||||
const procCount = await execCommand(client, "ps aux | wc -l");
|
||||
const runningCount = await execCommand(client, "ps aux | grep -c ' R '");
|
||||
totalProcesses = Number(procCount.stdout.trim()) - 1;
|
||||
runningProcesses = Number(runningCount.stdout.trim());
|
||||
} catch (e) {
|
||||
statsLogger.debug("Failed to collect process stats", {
|
||||
operation: "process_stats_failed",
|
||||
error: e instanceof Error ? e.message : String(e),
|
||||
});
|
||||
}
|
||||
|
||||
const totalCount = Number(procCount.stdout.trim()) - 1;
|
||||
totalProcesses = Number.isFinite(totalCount) ? totalCount : null;
|
||||
|
||||
const runningCount2 = Number(runningCount.stdout.trim());
|
||||
runningProcesses = Number.isFinite(runningCount2) ? runningCount2 : null;
|
||||
} catch (e) {}
|
||||
|
||||
return {
|
||||
total: totalProcesses,
|
||||
|
||||
@@ -23,10 +23,7 @@ export async function collectSystemMetrics(client: Client): Promise<{
|
||||
kernel = kernelOut.stdout.trim() || null;
|
||||
os = osOut.stdout.trim() || null;
|
||||
} catch (e) {
|
||||
statsLogger.debug("Failed to collect system info", {
|
||||
operation: "system_info_failed",
|
||||
error: e instanceof Error ? e.message : String(e),
|
||||
});
|
||||
// No error log
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -21,12 +21,7 @@ export async function collectUptimeMetrics(client: Client): Promise<{
|
||||
uptimeFormatted = `${days}d ${hours}h ${minutes}m`;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
statsLogger.debug("Failed to collect uptime", {
|
||||
operation: "uptime_failed",
|
||||
error: e instanceof Error ? e.message : String(e),
|
||||
});
|
||||
}
|
||||
} catch (e) {}
|
||||
|
||||
return {
|
||||
seconds: uptimeSeconds,
|
||||
|
||||
@@ -102,6 +102,8 @@ import { systemLogger, versionLogger } from "./utils/logger.js";
|
||||
await import("./ssh/tunnel.js");
|
||||
await import("./ssh/file-manager.js");
|
||||
await import("./ssh/server-stats.js");
|
||||
await import("./ssh/docker.js");
|
||||
await import("./ssh/docker-console.js");
|
||||
await import("./dashboard.js");
|
||||
|
||||
process.on("SIGINT", () => {
|
||||
|
||||
145
src/backend/swagger.ts
Normal file
145
src/backend/swagger.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
import swaggerJSDoc from "swagger-jsdoc";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import { promises as fs } from "fs";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
const projectRoot = path.join(__dirname, "..", "..", "..");
|
||||
|
||||
const swaggerOptions: swaggerJSDoc.Options = {
|
||||
definition: {
|
||||
openapi: "3.0.3",
|
||||
info: {
|
||||
title: "Termix API",
|
||||
version: "0.0.0",
|
||||
description: "Termix Backend API Reference",
|
||||
},
|
||||
servers: [
|
||||
{
|
||||
url: "http://localhost:30001",
|
||||
description: "Main database and authentication server",
|
||||
},
|
||||
{
|
||||
url: "http://localhost:30003",
|
||||
description: "SSH tunnel management server",
|
||||
},
|
||||
{
|
||||
url: "http://localhost:30004",
|
||||
description: "SSH file manager server",
|
||||
},
|
||||
{
|
||||
url: "http://localhost:30005",
|
||||
description: "Server statistics and monitoring server",
|
||||
},
|
||||
{
|
||||
url: "http://localhost:30006",
|
||||
description: "Dashboard server",
|
||||
},
|
||||
{
|
||||
url: "http://localhost:30007",
|
||||
description: "Docker management server",
|
||||
},
|
||||
],
|
||||
components: {
|
||||
securitySchemes: {
|
||||
bearerAuth: {
|
||||
type: "http",
|
||||
scheme: "bearer",
|
||||
bearerFormat: "JWT",
|
||||
},
|
||||
},
|
||||
schemas: {
|
||||
Error: {
|
||||
type: "object",
|
||||
properties: {
|
||||
error: { type: "string" },
|
||||
details: { type: "string" },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
security: [
|
||||
{
|
||||
bearerAuth: [],
|
||||
},
|
||||
],
|
||||
tags: [
|
||||
{
|
||||
name: "Alerts",
|
||||
description: "System alerts and notifications management",
|
||||
},
|
||||
{
|
||||
name: "Credentials",
|
||||
description: "SSH credential management",
|
||||
},
|
||||
{
|
||||
name: "Network Topology",
|
||||
description: "Network topology visualization and management",
|
||||
},
|
||||
{
|
||||
name: "RBAC",
|
||||
description: "Role-based access control for host sharing",
|
||||
},
|
||||
{
|
||||
name: "Snippets",
|
||||
description: "Command snippet management",
|
||||
},
|
||||
{
|
||||
name: "Terminal",
|
||||
description: "Terminal command history",
|
||||
},
|
||||
{
|
||||
name: "Users",
|
||||
description: "User management and authentication",
|
||||
},
|
||||
{
|
||||
name: "Dashboard",
|
||||
description: "Dashboard statistics and activity",
|
||||
},
|
||||
{
|
||||
name: "Docker",
|
||||
description: "Docker container management",
|
||||
},
|
||||
{
|
||||
name: "SSH Tunnels",
|
||||
description: "SSH tunnel connection management",
|
||||
},
|
||||
{
|
||||
name: "Server Stats",
|
||||
description: "Server status monitoring and metrics collection",
|
||||
},
|
||||
{
|
||||
name: "File Manager",
|
||||
description: "SSH file management operations",
|
||||
},
|
||||
],
|
||||
},
|
||||
apis: [
|
||||
path.join(projectRoot, "src", "backend", "database", "routes", "*.ts"),
|
||||
path.join(projectRoot, "src", "backend", "dashboard.ts"),
|
||||
path.join(projectRoot, "src", "backend", "ssh", "*.ts"),
|
||||
],
|
||||
};
|
||||
|
||||
async function generateOpenAPISpec() {
|
||||
try {
|
||||
const swaggerSpec = swaggerJSDoc(swaggerOptions);
|
||||
|
||||
const outputPath = path.join(projectRoot, "openapi.json");
|
||||
|
||||
await fs.writeFile(
|
||||
outputPath,
|
||||
JSON.stringify(swaggerSpec, null, 2),
|
||||
"utf-8",
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Failed to generate OpenAPI specification:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
generateOpenAPISpec();
|
||||
|
||||
export { swaggerOptions, generateOpenAPISpec };
|
||||
@@ -154,9 +154,8 @@ class AuthManager {
|
||||
return;
|
||||
}
|
||||
|
||||
const { getSqlite, saveMemoryDatabaseToFile } = await import(
|
||||
"../database/db/index.js"
|
||||
);
|
||||
const { getSqlite, saveMemoryDatabaseToFile } =
|
||||
await import("../database/db/index.js");
|
||||
|
||||
const sqlite = getSqlite();
|
||||
|
||||
@@ -169,6 +168,23 @@ class AuthManager {
|
||||
if (migrationResult.migrated) {
|
||||
await saveMemoryDatabaseToFile();
|
||||
}
|
||||
|
||||
try {
|
||||
const { CredentialSystemEncryptionMigration } =
|
||||
await import("./credential-system-encryption-migration.js");
|
||||
const credMigration = new CredentialSystemEncryptionMigration();
|
||||
const credResult = await credMigration.migrateUserCredentials(userId);
|
||||
|
||||
if (credResult.migrated > 0) {
|
||||
await saveMemoryDatabaseToFile();
|
||||
}
|
||||
} catch (error) {
|
||||
databaseLogger.warn("Credential migration failed during login", {
|
||||
operation: "login_credential_migration_failed",
|
||||
userId,
|
||||
error: error instanceof Error ? error.message : "Unknown error",
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
databaseLogger.error("Lazy encryption migration failed", error, {
|
||||
operation: "lazy_encryption_migration_error",
|
||||
@@ -231,9 +247,8 @@ class AuthManager {
|
||||
});
|
||||
|
||||
try {
|
||||
const { saveMemoryDatabaseToFile } = await import(
|
||||
"../database/db/index.js"
|
||||
);
|
||||
const { saveMemoryDatabaseToFile } =
|
||||
await import("../database/db/index.js");
|
||||
await saveMemoryDatabaseToFile();
|
||||
} catch (saveError) {
|
||||
databaseLogger.error(
|
||||
@@ -334,9 +349,8 @@ class AuthManager {
|
||||
await db.delete(sessions).where(eq(sessions.id, sessionId));
|
||||
|
||||
try {
|
||||
const { saveMemoryDatabaseToFile } = await import(
|
||||
"../database/db/index.js"
|
||||
);
|
||||
const { saveMemoryDatabaseToFile } =
|
||||
await import("../database/db/index.js");
|
||||
await saveMemoryDatabaseToFile();
|
||||
} catch (saveError) {
|
||||
databaseLogger.error(
|
||||
@@ -387,9 +401,8 @@ class AuthManager {
|
||||
}
|
||||
|
||||
try {
|
||||
const { saveMemoryDatabaseToFile } = await import(
|
||||
"../database/db/index.js"
|
||||
);
|
||||
const { saveMemoryDatabaseToFile } =
|
||||
await import("../database/db/index.js");
|
||||
await saveMemoryDatabaseToFile();
|
||||
} catch (saveError) {
|
||||
databaseLogger.error(
|
||||
@@ -430,9 +443,8 @@ class AuthManager {
|
||||
.where(sql`${sessions.expiresAt} < datetime('now')`);
|
||||
|
||||
try {
|
||||
const { saveMemoryDatabaseToFile } = await import(
|
||||
"../database/db/index.js"
|
||||
);
|
||||
const { saveMemoryDatabaseToFile } =
|
||||
await import("../database/db/index.js");
|
||||
await saveMemoryDatabaseToFile();
|
||||
} catch (saveError) {
|
||||
databaseLogger.error(
|
||||
@@ -568,9 +580,8 @@ class AuthManager {
|
||||
.where(eq(sessions.id, payload.sessionId))
|
||||
.then(async () => {
|
||||
try {
|
||||
const { saveMemoryDatabaseToFile } = await import(
|
||||
"../database/db/index.js"
|
||||
);
|
||||
const { saveMemoryDatabaseToFile } =
|
||||
await import("../database/db/index.js");
|
||||
await saveMemoryDatabaseToFile();
|
||||
|
||||
const remainingSessions = await db
|
||||
@@ -714,9 +725,8 @@ class AuthManager {
|
||||
await db.delete(sessions).where(eq(sessions.id, sessionId));
|
||||
|
||||
try {
|
||||
const { saveMemoryDatabaseToFile } = await import(
|
||||
"../database/db/index.js"
|
||||
);
|
||||
const { saveMemoryDatabaseToFile } =
|
||||
await import("../database/db/index.js");
|
||||
await saveMemoryDatabaseToFile();
|
||||
} catch (saveError) {
|
||||
databaseLogger.error(
|
||||
|
||||
131
src/backend/utils/credential-system-encryption-migration.ts
Normal file
131
src/backend/utils/credential-system-encryption-migration.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import { db } from "../database/db/index.js";
|
||||
import { sshCredentials } from "../database/db/schema.js";
|
||||
import { eq, and, or, isNull } from "drizzle-orm";
|
||||
import { DataCrypto } from "./data-crypto.js";
|
||||
import { SystemCrypto } from "./system-crypto.js";
|
||||
import { FieldCrypto } from "./field-crypto.js";
|
||||
import { databaseLogger } from "./logger.js";
|
||||
|
||||
export class CredentialSystemEncryptionMigration {
|
||||
async migrateUserCredentials(userId: string): Promise<{
|
||||
migrated: number;
|
||||
failed: number;
|
||||
skipped: number;
|
||||
}> {
|
||||
try {
|
||||
const userDEK = DataCrypto.getUserDataKey(userId);
|
||||
if (!userDEK) {
|
||||
throw new Error("User must be logged in to migrate credentials");
|
||||
}
|
||||
|
||||
const systemCrypto = SystemCrypto.getInstance();
|
||||
const CSKEK = await systemCrypto.getCredentialSharingKey();
|
||||
|
||||
const credentials = await db
|
||||
.select()
|
||||
.from(sshCredentials)
|
||||
.where(
|
||||
and(
|
||||
eq(sshCredentials.userId, userId),
|
||||
or(
|
||||
isNull(sshCredentials.systemPassword),
|
||||
isNull(sshCredentials.systemKey),
|
||||
isNull(sshCredentials.systemKeyPassword),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
let migrated = 0;
|
||||
let failed = 0;
|
||||
const skipped = 0;
|
||||
|
||||
for (const cred of credentials) {
|
||||
try {
|
||||
const plainPassword = cred.password
|
||||
? FieldCrypto.decryptField(
|
||||
cred.password,
|
||||
userDEK,
|
||||
cred.id.toString(),
|
||||
"password",
|
||||
)
|
||||
: null;
|
||||
|
||||
const plainKey = cred.key
|
||||
? FieldCrypto.decryptField(
|
||||
cred.key,
|
||||
userDEK,
|
||||
cred.id.toString(),
|
||||
"key",
|
||||
)
|
||||
: null;
|
||||
|
||||
const plainKeyPassword = cred.key_password
|
||||
? FieldCrypto.decryptField(
|
||||
cred.key_password,
|
||||
userDEK,
|
||||
cred.id.toString(),
|
||||
"key_password",
|
||||
)
|
||||
: null;
|
||||
|
||||
const systemPassword = plainPassword
|
||||
? FieldCrypto.encryptField(
|
||||
plainPassword,
|
||||
CSKEK,
|
||||
cred.id.toString(),
|
||||
"password",
|
||||
)
|
||||
: null;
|
||||
|
||||
const systemKey = plainKey
|
||||
? FieldCrypto.encryptField(
|
||||
plainKey,
|
||||
CSKEK,
|
||||
cred.id.toString(),
|
||||
"key",
|
||||
)
|
||||
: null;
|
||||
|
||||
const systemKeyPassword = plainKeyPassword
|
||||
? FieldCrypto.encryptField(
|
||||
plainKeyPassword,
|
||||
CSKEK,
|
||||
cred.id.toString(),
|
||||
"key_password",
|
||||
)
|
||||
: null;
|
||||
|
||||
await db
|
||||
.update(sshCredentials)
|
||||
.set({
|
||||
systemPassword,
|
||||
systemKey,
|
||||
systemKeyPassword,
|
||||
updatedAt: new Date().toISOString(),
|
||||
})
|
||||
.where(eq(sshCredentials.id, cred.id));
|
||||
|
||||
migrated++;
|
||||
} catch (error) {
|
||||
databaseLogger.error("Failed to migrate credential", error, {
|
||||
credentialId: cred.id,
|
||||
userId,
|
||||
});
|
||||
failed++;
|
||||
}
|
||||
}
|
||||
return { migrated, failed, skipped };
|
||||
} catch (error) {
|
||||
databaseLogger.error(
|
||||
"Credential system encryption migration failed",
|
||||
error,
|
||||
{
|
||||
operation: "credential_migration_failed",
|
||||
userId,
|
||||
error: error instanceof Error ? error.message : "Unknown error",
|
||||
},
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -475,6 +475,52 @@ class DataCrypto {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt sensitive credential fields with system key for offline sharing
|
||||
* Returns an object with systemPassword, systemKey, systemKeyPassword fields
|
||||
*/
|
||||
static async encryptRecordWithSystemKey<T extends Record<string, unknown>>(
|
||||
tableName: string,
|
||||
record: T,
|
||||
systemKey: Buffer,
|
||||
): Promise<Partial<T>> {
|
||||
const systemEncrypted: Record<string, unknown> = {};
|
||||
const recordId = record.id || "temp-" + Date.now();
|
||||
|
||||
if (tableName !== "ssh_credentials") {
|
||||
return systemEncrypted as Partial<T>;
|
||||
}
|
||||
|
||||
if (record.password && typeof record.password === "string") {
|
||||
systemEncrypted.systemPassword = FieldCrypto.encryptField(
|
||||
record.password as string,
|
||||
systemKey,
|
||||
recordId as string,
|
||||
"password",
|
||||
);
|
||||
}
|
||||
|
||||
if (record.key && typeof record.key === "string") {
|
||||
systemEncrypted.systemKey = FieldCrypto.encryptField(
|
||||
record.key as string,
|
||||
systemKey,
|
||||
recordId as string,
|
||||
"key",
|
||||
);
|
||||
}
|
||||
|
||||
if (record.key_password && typeof record.key_password === "string") {
|
||||
systemEncrypted.systemKeyPassword = FieldCrypto.encryptField(
|
||||
record.key_password as string,
|
||||
systemKey,
|
||||
recordId as string,
|
||||
"key_password",
|
||||
);
|
||||
}
|
||||
|
||||
return systemEncrypted as Partial<T>;
|
||||
}
|
||||
}
|
||||
|
||||
export { DataCrypto };
|
||||
|
||||
@@ -327,11 +327,7 @@ class DatabaseFileEncryption {
|
||||
fs.accessSync(envPath, fs.constants.R_OK);
|
||||
envFileReadable = true;
|
||||
}
|
||||
} catch (error) {
|
||||
databaseLogger.debug("Operation failed, continuing", {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
} catch (error) {}
|
||||
|
||||
databaseLogger.error(
|
||||
"Database decryption authentication failed - possible causes: wrong DATABASE_KEY, corrupted files, or interrupted write",
|
||||
|
||||
@@ -32,7 +32,6 @@ class FieldCrypto {
|
||||
"key",
|
||||
"key_password",
|
||||
"keyPassword",
|
||||
"keyType",
|
||||
"autostartPassword",
|
||||
"autostartKey",
|
||||
"autostartKeyPassword",
|
||||
@@ -46,7 +45,6 @@ class FieldCrypto {
|
||||
"key",
|
||||
"public_key",
|
||||
"publicKey",
|
||||
"keyType",
|
||||
]),
|
||||
};
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ const SENSITIVE_FIELDS = [
|
||||
|
||||
const TRUNCATE_FIELDS = ["data", "content", "body", "response", "request"];
|
||||
|
||||
class Logger {
|
||||
export class Logger {
|
||||
private serviceName: string;
|
||||
private serviceIcon: string;
|
||||
private serviceColor: string;
|
||||
|
||||
@@ -7,11 +7,21 @@ interface LoginAttempt {
|
||||
class LoginRateLimiter {
|
||||
private ipAttempts = new Map<string, LoginAttempt>();
|
||||
private usernameAttempts = new Map<string, LoginAttempt>();
|
||||
private totpAttempts = new Map<string, LoginAttempt>();
|
||||
private resetCodeAttempts = new Map<string, LoginAttempt>();
|
||||
|
||||
private readonly MAX_ATTEMPTS = 5;
|
||||
private readonly WINDOW_MS = 10 * 60 * 1000;
|
||||
private readonly LOCKOUT_MS = 10 * 60 * 1000;
|
||||
|
||||
private readonly TOTP_MAX_ATTEMPTS = 5;
|
||||
private readonly TOTP_WINDOW_MS = 1 * 60 * 1000;
|
||||
private readonly TOTP_LOCKOUT_MS = 5 * 60 * 1000;
|
||||
|
||||
private readonly RESET_CODE_MAX_ATTEMPTS = 5;
|
||||
private readonly RESET_CODE_WINDOW_MS = 1 * 60 * 1000;
|
||||
private readonly RESET_CODE_LOCKOUT_MS = 5 * 60 * 1000;
|
||||
|
||||
constructor() {
|
||||
setInterval(() => this.cleanup(), 5 * 60 * 1000);
|
||||
}
|
||||
@@ -40,6 +50,28 @@ class LoginRateLimiter {
|
||||
this.usernameAttempts.delete(username);
|
||||
}
|
||||
}
|
||||
|
||||
for (const [userId, attempt] of this.totpAttempts.entries()) {
|
||||
if (attempt.lockedUntil && attempt.lockedUntil < now) {
|
||||
this.totpAttempts.delete(userId);
|
||||
} else if (
|
||||
!attempt.lockedUntil &&
|
||||
now - attempt.firstAttempt > this.TOTP_WINDOW_MS
|
||||
) {
|
||||
this.totpAttempts.delete(userId);
|
||||
}
|
||||
}
|
||||
|
||||
for (const [username, attempt] of this.resetCodeAttempts.entries()) {
|
||||
if (attempt.lockedUntil && attempt.lockedUntil < now) {
|
||||
this.resetCodeAttempts.delete(username);
|
||||
} else if (
|
||||
!attempt.lockedUntil &&
|
||||
now - attempt.firstAttempt > this.RESET_CODE_WINDOW_MS
|
||||
) {
|
||||
this.resetCodeAttempts.delete(username);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
recordFailedAttempt(ip: string, username?: string): void {
|
||||
@@ -141,6 +173,114 @@ class LoginRateLimiter {
|
||||
|
||||
return minRemaining;
|
||||
}
|
||||
|
||||
recordFailedTOTPAttempt(userId: string): void {
|
||||
const now = Date.now();
|
||||
|
||||
const totpAttempt = this.totpAttempts.get(userId);
|
||||
if (!totpAttempt) {
|
||||
this.totpAttempts.set(userId, {
|
||||
count: 1,
|
||||
firstAttempt: now,
|
||||
});
|
||||
} else if (now - totpAttempt.firstAttempt > this.TOTP_WINDOW_MS) {
|
||||
this.totpAttempts.set(userId, {
|
||||
count: 1,
|
||||
firstAttempt: now,
|
||||
});
|
||||
} else {
|
||||
totpAttempt.count++;
|
||||
if (totpAttempt.count >= this.TOTP_MAX_ATTEMPTS) {
|
||||
totpAttempt.lockedUntil = now + this.TOTP_LOCKOUT_MS;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resetTOTPAttempts(userId: string): void {
|
||||
this.totpAttempts.delete(userId);
|
||||
}
|
||||
|
||||
isTOTPLocked(userId: string): { locked: boolean; remainingTime?: number } {
|
||||
const now = Date.now();
|
||||
|
||||
const totpAttempt = this.totpAttempts.get(userId);
|
||||
if (totpAttempt?.lockedUntil && totpAttempt.lockedUntil > now) {
|
||||
return {
|
||||
locked: true,
|
||||
remainingTime: Math.ceil((totpAttempt.lockedUntil - now) / 1000),
|
||||
};
|
||||
}
|
||||
|
||||
return { locked: false };
|
||||
}
|
||||
|
||||
getRemainingTOTPAttempts(userId: string): number {
|
||||
const now = Date.now();
|
||||
|
||||
const totpAttempt = this.totpAttempts.get(userId);
|
||||
if (totpAttempt && now - totpAttempt.firstAttempt <= this.TOTP_WINDOW_MS) {
|
||||
return Math.max(0, this.TOTP_MAX_ATTEMPTS - totpAttempt.count);
|
||||
}
|
||||
|
||||
return this.TOTP_MAX_ATTEMPTS;
|
||||
}
|
||||
|
||||
recordResetCodeAttempt(username: string): void {
|
||||
const now = Date.now();
|
||||
|
||||
const resetAttempt = this.resetCodeAttempts.get(username);
|
||||
if (!resetAttempt) {
|
||||
this.resetCodeAttempts.set(username, {
|
||||
count: 1,
|
||||
firstAttempt: now,
|
||||
});
|
||||
} else if (now - resetAttempt.firstAttempt > this.RESET_CODE_WINDOW_MS) {
|
||||
this.resetCodeAttempts.set(username, {
|
||||
count: 1,
|
||||
firstAttempt: now,
|
||||
});
|
||||
} else {
|
||||
resetAttempt.count++;
|
||||
if (resetAttempt.count >= this.RESET_CODE_MAX_ATTEMPTS) {
|
||||
resetAttempt.lockedUntil = now + this.RESET_CODE_LOCKOUT_MS;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resetResetCodeAttempts(username: string): void {
|
||||
this.resetCodeAttempts.delete(username);
|
||||
}
|
||||
|
||||
isResetCodeLocked(username: string): {
|
||||
locked: boolean;
|
||||
remainingTime?: number;
|
||||
} {
|
||||
const now = Date.now();
|
||||
|
||||
const resetAttempt = this.resetCodeAttempts.get(username);
|
||||
if (resetAttempt?.lockedUntil && resetAttempt.lockedUntil > now) {
|
||||
return {
|
||||
locked: true,
|
||||
remainingTime: Math.ceil((resetAttempt.lockedUntil - now) / 1000),
|
||||
};
|
||||
}
|
||||
|
||||
return { locked: false };
|
||||
}
|
||||
|
||||
getRemainingResetCodeAttempts(username: string): number {
|
||||
const now = Date.now();
|
||||
|
||||
const resetAttempt = this.resetCodeAttempts.get(username);
|
||||
if (
|
||||
resetAttempt &&
|
||||
now - resetAttempt.firstAttempt <= this.RESET_CODE_WINDOW_MS
|
||||
) {
|
||||
return Math.max(0, this.RESET_CODE_MAX_ATTEMPTS - resetAttempt.count);
|
||||
}
|
||||
|
||||
return this.RESET_CODE_MAX_ATTEMPTS;
|
||||
}
|
||||
}
|
||||
|
||||
export const loginRateLimiter = new LoginRateLimiter();
|
||||
|
||||
436
src/backend/utils/permission-manager.ts
Normal file
436
src/backend/utils/permission-manager.ts
Normal file
@@ -0,0 +1,436 @@
|
||||
import type { Request, Response, NextFunction } from "express";
|
||||
import { db } from "../database/db/index.js";
|
||||
import {
|
||||
hostAccess,
|
||||
roles,
|
||||
userRoles,
|
||||
sshData,
|
||||
users,
|
||||
} from "../database/db/schema.js";
|
||||
import { eq, and, or, isNull, gte, sql } from "drizzle-orm";
|
||||
import { databaseLogger } from "./logger.js";
|
||||
|
||||
interface AuthenticatedRequest extends Request {
|
||||
userId?: string;
|
||||
dataKey?: Buffer;
|
||||
}
|
||||
|
||||
interface HostAccessInfo {
|
||||
hasAccess: boolean;
|
||||
isOwner: boolean;
|
||||
isShared: boolean;
|
||||
permissionLevel?: "view";
|
||||
expiresAt?: string | null;
|
||||
}
|
||||
|
||||
interface PermissionCheckResult {
|
||||
allowed: boolean;
|
||||
reason?: string;
|
||||
}
|
||||
|
||||
class PermissionManager {
|
||||
private static instance: PermissionManager;
|
||||
private permissionCache: Map<
|
||||
string,
|
||||
{ permissions: string[]; timestamp: number }
|
||||
>;
|
||||
private readonly CACHE_TTL = 5 * 60 * 1000;
|
||||
|
||||
private constructor() {
|
||||
this.permissionCache = new Map();
|
||||
|
||||
setInterval(() => {
|
||||
this.cleanupExpiredAccess().catch((error) => {
|
||||
databaseLogger.error(
|
||||
"Failed to run periodic host access cleanup",
|
||||
error,
|
||||
{
|
||||
operation: "host_access_cleanup_periodic",
|
||||
},
|
||||
);
|
||||
});
|
||||
}, 60 * 1000);
|
||||
|
||||
setInterval(() => {
|
||||
this.clearPermissionCache();
|
||||
}, this.CACHE_TTL);
|
||||
}
|
||||
|
||||
static getInstance(): PermissionManager {
|
||||
if (!this.instance) {
|
||||
this.instance = new PermissionManager();
|
||||
}
|
||||
return this.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up expired host access entries
|
||||
*/
|
||||
private async cleanupExpiredAccess(): Promise<void> {
|
||||
try {
|
||||
const now = new Date().toISOString();
|
||||
const result = await db
|
||||
.delete(hostAccess)
|
||||
.where(
|
||||
and(
|
||||
sql`${hostAccess.expiresAt} IS NOT NULL`,
|
||||
sql`${hostAccess.expiresAt} <= ${now}`,
|
||||
),
|
||||
)
|
||||
.returning({ id: hostAccess.id });
|
||||
} catch (error) {
|
||||
databaseLogger.error("Failed to cleanup expired host access", error, {
|
||||
operation: "host_access_cleanup_failed",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear permission cache
|
||||
*/
|
||||
private clearPermissionCache(): void {
|
||||
this.permissionCache.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate permission cache for a specific user
|
||||
*/
|
||||
invalidateUserPermissionCache(userId: string): void {
|
||||
this.permissionCache.delete(userId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get user permissions from roles
|
||||
*/
|
||||
async getUserPermissions(userId: string): Promise<string[]> {
|
||||
const cached = this.permissionCache.get(userId);
|
||||
if (cached && Date.now() - cached.timestamp < this.CACHE_TTL) {
|
||||
return cached.permissions;
|
||||
}
|
||||
|
||||
try {
|
||||
const userRoleRecords = await db
|
||||
.select({
|
||||
permissions: roles.permissions,
|
||||
})
|
||||
.from(userRoles)
|
||||
.innerJoin(roles, eq(userRoles.roleId, roles.id))
|
||||
.where(eq(userRoles.userId, userId));
|
||||
|
||||
const allPermissions = new Set<string>();
|
||||
for (const record of userRoleRecords) {
|
||||
try {
|
||||
const permissions = JSON.parse(record.permissions) as string[];
|
||||
for (const perm of permissions) {
|
||||
allPermissions.add(perm);
|
||||
}
|
||||
} catch (parseError) {
|
||||
databaseLogger.warn("Failed to parse role permissions", {
|
||||
operation: "get_user_permissions",
|
||||
userId,
|
||||
error: parseError,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const permissionsArray = Array.from(allPermissions);
|
||||
|
||||
this.permissionCache.set(userId, {
|
||||
permissions: permissionsArray,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
return permissionsArray;
|
||||
} catch (error) {
|
||||
databaseLogger.error("Failed to get user permissions", error, {
|
||||
operation: "get_user_permissions",
|
||||
userId,
|
||||
});
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user has a specific permission
|
||||
* Supports wildcards: "hosts.*", "*"
|
||||
*/
|
||||
async hasPermission(userId: string, permission: string): Promise<boolean> {
|
||||
const userPermissions = await this.getUserPermissions(userId);
|
||||
|
||||
if (userPermissions.includes("*")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (userPermissions.includes(permission)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const parts = permission.split(".");
|
||||
for (let i = parts.length; i > 0; i--) {
|
||||
const wildcardPermission = parts.slice(0, i).join(".") + ".*";
|
||||
if (userPermissions.includes(wildcardPermission)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user can access a specific host
|
||||
*/
|
||||
async canAccessHost(
|
||||
userId: string,
|
||||
hostId: number,
|
||||
action: "read" | "write" | "execute" | "delete" | "share" = "read",
|
||||
): Promise<HostAccessInfo> {
|
||||
try {
|
||||
const host = await db
|
||||
.select()
|
||||
.from(sshData)
|
||||
.where(and(eq(sshData.id, hostId), eq(sshData.userId, userId)))
|
||||
.limit(1);
|
||||
|
||||
if (host.length > 0) {
|
||||
return {
|
||||
hasAccess: true,
|
||||
isOwner: true,
|
||||
isShared: false,
|
||||
};
|
||||
}
|
||||
|
||||
const userRoleIds = await db
|
||||
.select({ roleId: userRoles.roleId })
|
||||
.from(userRoles)
|
||||
.where(eq(userRoles.userId, userId));
|
||||
const roleIds = userRoleIds.map((r) => r.roleId);
|
||||
|
||||
const now = new Date().toISOString();
|
||||
const sharedAccess = await db
|
||||
.select()
|
||||
.from(hostAccess)
|
||||
.where(
|
||||
and(
|
||||
eq(hostAccess.hostId, hostId),
|
||||
or(
|
||||
eq(hostAccess.userId, userId),
|
||||
roleIds.length > 0
|
||||
? sql`${hostAccess.roleId} IN (${sql.join(
|
||||
roleIds.map((id) => sql`${id}`),
|
||||
sql`, `,
|
||||
)})`
|
||||
: sql`false`,
|
||||
),
|
||||
or(isNull(hostAccess.expiresAt), gte(hostAccess.expiresAt, now)),
|
||||
),
|
||||
)
|
||||
.limit(1);
|
||||
|
||||
if (sharedAccess.length > 0) {
|
||||
const access = sharedAccess[0];
|
||||
|
||||
if (action === "write" || action === "delete") {
|
||||
return {
|
||||
hasAccess: false,
|
||||
isOwner: false,
|
||||
isShared: true,
|
||||
permissionLevel: access.permissionLevel as "view",
|
||||
expiresAt: access.expiresAt,
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
await db
|
||||
.update(hostAccess)
|
||||
.set({
|
||||
lastAccessedAt: now,
|
||||
})
|
||||
.where(eq(hostAccess.id, access.id));
|
||||
} catch (error) {
|
||||
databaseLogger.warn("Failed to update host access timestamp", {
|
||||
operation: "update_host_access_timestamp",
|
||||
error,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
hasAccess: true,
|
||||
isOwner: false,
|
||||
isShared: true,
|
||||
permissionLevel: access.permissionLevel as "view",
|
||||
expiresAt: access.expiresAt,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
hasAccess: false,
|
||||
isOwner: false,
|
||||
isShared: false,
|
||||
};
|
||||
} catch (error) {
|
||||
databaseLogger.error("Failed to check host access", error, {
|
||||
operation: "can_access_host",
|
||||
userId,
|
||||
hostId,
|
||||
action,
|
||||
});
|
||||
return {
|
||||
hasAccess: false,
|
||||
isOwner: false,
|
||||
isShared: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user is admin (backward compatibility)
|
||||
*/
|
||||
async isAdmin(userId: string): Promise<boolean> {
|
||||
try {
|
||||
const user = await db
|
||||
.select({ isAdmin: users.is_admin })
|
||||
.from(users)
|
||||
.where(eq(users.id, userId))
|
||||
.limit(1);
|
||||
|
||||
if (user.length > 0 && user[0].isAdmin) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const adminRoles = await db
|
||||
.select({ roleName: roles.name })
|
||||
.from(userRoles)
|
||||
.innerJoin(roles, eq(userRoles.roleId, roles.id))
|
||||
.where(
|
||||
and(
|
||||
eq(userRoles.userId, userId),
|
||||
or(eq(roles.name, "admin"), eq(roles.name, "super_admin")),
|
||||
),
|
||||
);
|
||||
|
||||
return adminRoles.length > 0;
|
||||
} catch (error) {
|
||||
databaseLogger.error("Failed to check admin status", error, {
|
||||
operation: "is_admin",
|
||||
userId,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware: Require specific permission
|
||||
*/
|
||||
requirePermission(permission: string) {
|
||||
return async (
|
||||
req: AuthenticatedRequest,
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
) => {
|
||||
const userId = req.userId;
|
||||
|
||||
if (!userId) {
|
||||
return res.status(401).json({ error: "Not authenticated" });
|
||||
}
|
||||
|
||||
const hasPermission = await this.hasPermission(userId, permission);
|
||||
|
||||
if (!hasPermission) {
|
||||
databaseLogger.warn("Permission denied", {
|
||||
operation: "permission_check",
|
||||
userId,
|
||||
permission,
|
||||
path: req.path,
|
||||
});
|
||||
|
||||
return res.status(403).json({
|
||||
error: "Insufficient permissions",
|
||||
required: permission,
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware: Require host access
|
||||
*/
|
||||
requireHostAccess(
|
||||
hostIdParam: string = "id",
|
||||
action: "read" | "write" | "execute" | "delete" | "share" = "read",
|
||||
) {
|
||||
return async (
|
||||
req: AuthenticatedRequest,
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
) => {
|
||||
const userId = req.userId;
|
||||
|
||||
if (!userId) {
|
||||
return res.status(401).json({ error: "Not authenticated" });
|
||||
}
|
||||
|
||||
const hostId = parseInt(req.params[hostIdParam], 10);
|
||||
|
||||
if (isNaN(hostId)) {
|
||||
return res.status(400).json({ error: "Invalid host ID" });
|
||||
}
|
||||
|
||||
const accessInfo = await this.canAccessHost(userId, hostId, action);
|
||||
|
||||
if (!accessInfo.hasAccess) {
|
||||
databaseLogger.warn("Host access denied", {
|
||||
operation: "host_access_check",
|
||||
userId,
|
||||
hostId,
|
||||
action,
|
||||
});
|
||||
|
||||
return res.status(403).json({
|
||||
error: "Access denied to host",
|
||||
hostId,
|
||||
action,
|
||||
});
|
||||
}
|
||||
|
||||
(req as any).hostAccessInfo = accessInfo;
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware: Require admin role (backward compatible)
|
||||
*/
|
||||
requireAdmin() {
|
||||
return async (
|
||||
req: AuthenticatedRequest,
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
) => {
|
||||
const userId = req.userId;
|
||||
|
||||
if (!userId) {
|
||||
return res.status(401).json({ error: "Not authenticated" });
|
||||
}
|
||||
|
||||
const isAdmin = await this.isAdmin(userId);
|
||||
|
||||
if (!isAdmin) {
|
||||
databaseLogger.warn("Admin access denied", {
|
||||
operation: "admin_check",
|
||||
userId,
|
||||
path: req.path,
|
||||
});
|
||||
|
||||
return res.status(403).json({ error: "Admin access required" });
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export { PermissionManager };
|
||||
export type { AuthenticatedRequest, HostAccessInfo, PermissionCheckResult };
|
||||
700
src/backend/utils/shared-credential-manager.ts
Normal file
700
src/backend/utils/shared-credential-manager.ts
Normal file
@@ -0,0 +1,700 @@
|
||||
import { db } from "../database/db/index.js";
|
||||
import {
|
||||
sharedCredentials,
|
||||
sshCredentials,
|
||||
hostAccess,
|
||||
users,
|
||||
userRoles,
|
||||
sshData,
|
||||
} from "../database/db/schema.js";
|
||||
import { eq, and } from "drizzle-orm";
|
||||
import { DataCrypto } from "./data-crypto.js";
|
||||
import { FieldCrypto } from "./field-crypto.js";
|
||||
import { databaseLogger } from "./logger.js";
|
||||
|
||||
interface CredentialData {
|
||||
username: string;
|
||||
authType: string;
|
||||
password?: string;
|
||||
key?: string;
|
||||
keyPassword?: string;
|
||||
keyType?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Manages shared credentials for RBAC host sharing.
|
||||
* Creates per-user encrypted credential copies to enable credential sharing
|
||||
* without requiring the credential owner to be online.
|
||||
*/
|
||||
class SharedCredentialManager {
|
||||
private static instance: SharedCredentialManager;
|
||||
|
||||
private constructor() {}
|
||||
|
||||
static getInstance(): SharedCredentialManager {
|
||||
if (!this.instance) {
|
||||
this.instance = new SharedCredentialManager();
|
||||
}
|
||||
return this.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create shared credential for a specific user
|
||||
* Called when sharing a host with a user
|
||||
*/
|
||||
async createSharedCredentialForUser(
|
||||
hostAccessId: number,
|
||||
originalCredentialId: number,
|
||||
targetUserId: string,
|
||||
ownerId: string,
|
||||
): Promise<void> {
|
||||
try {
|
||||
const ownerDEK = DataCrypto.getUserDataKey(ownerId);
|
||||
|
||||
if (ownerDEK) {
|
||||
const targetDEK = DataCrypto.getUserDataKey(targetUserId);
|
||||
if (!targetDEK) {
|
||||
await this.createPendingSharedCredential(
|
||||
hostAccessId,
|
||||
originalCredentialId,
|
||||
targetUserId,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const credentialData = await this.getDecryptedCredential(
|
||||
originalCredentialId,
|
||||
ownerId,
|
||||
ownerDEK,
|
||||
);
|
||||
|
||||
const encryptedForTarget = this.encryptCredentialForUser(
|
||||
credentialData,
|
||||
targetUserId,
|
||||
targetDEK,
|
||||
hostAccessId,
|
||||
);
|
||||
|
||||
await db.insert(sharedCredentials).values({
|
||||
hostAccessId,
|
||||
originalCredentialId,
|
||||
targetUserId,
|
||||
...encryptedForTarget,
|
||||
needsReEncryption: false,
|
||||
});
|
||||
} else {
|
||||
const targetDEK = DataCrypto.getUserDataKey(targetUserId);
|
||||
if (!targetDEK) {
|
||||
await this.createPendingSharedCredential(
|
||||
hostAccessId,
|
||||
originalCredentialId,
|
||||
targetUserId,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const credentialData =
|
||||
await this.getDecryptedCredentialViaSystemKey(originalCredentialId);
|
||||
|
||||
const encryptedForTarget = this.encryptCredentialForUser(
|
||||
credentialData,
|
||||
targetUserId,
|
||||
targetDEK,
|
||||
hostAccessId,
|
||||
);
|
||||
|
||||
await db.insert(sharedCredentials).values({
|
||||
hostAccessId,
|
||||
originalCredentialId,
|
||||
targetUserId,
|
||||
...encryptedForTarget,
|
||||
needsReEncryption: false,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
databaseLogger.error("Failed to create shared credential", error, {
|
||||
operation: "create_shared_credential",
|
||||
hostAccessId,
|
||||
targetUserId,
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create shared credentials for all users in a role
|
||||
* Called when sharing a host with a role
|
||||
*/
|
||||
async createSharedCredentialsForRole(
|
||||
hostAccessId: number,
|
||||
originalCredentialId: number,
|
||||
roleId: number,
|
||||
ownerId: string,
|
||||
): Promise<void> {
|
||||
try {
|
||||
const roleUsers = await db
|
||||
.select({ userId: userRoles.userId })
|
||||
.from(userRoles)
|
||||
.where(eq(userRoles.roleId, roleId));
|
||||
|
||||
for (const { userId } of roleUsers) {
|
||||
try {
|
||||
await this.createSharedCredentialForUser(
|
||||
hostAccessId,
|
||||
originalCredentialId,
|
||||
userId,
|
||||
ownerId,
|
||||
);
|
||||
} catch (error) {
|
||||
databaseLogger.error(
|
||||
"Failed to create shared credential for role member",
|
||||
error,
|
||||
{
|
||||
operation: "create_shared_credentials_role",
|
||||
hostAccessId,
|
||||
roleId,
|
||||
userId,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
databaseLogger.error(
|
||||
"Failed to create shared credentials for role",
|
||||
error,
|
||||
{
|
||||
operation: "create_shared_credentials_role",
|
||||
hostAccessId,
|
||||
roleId,
|
||||
},
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get credential data for a shared user
|
||||
* Called when a shared user connects to a host
|
||||
*/
|
||||
async getSharedCredentialForUser(
|
||||
hostId: number,
|
||||
userId: string,
|
||||
): Promise<CredentialData | null> {
|
||||
try {
|
||||
const userDEK = DataCrypto.getUserDataKey(userId);
|
||||
if (!userDEK) {
|
||||
throw new Error(`User ${userId} data not unlocked`);
|
||||
}
|
||||
|
||||
const sharedCred = await db
|
||||
.select()
|
||||
.from(sharedCredentials)
|
||||
.innerJoin(
|
||||
hostAccess,
|
||||
eq(sharedCredentials.hostAccessId, hostAccess.id),
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(hostAccess.hostId, hostId),
|
||||
eq(sharedCredentials.targetUserId, userId),
|
||||
),
|
||||
)
|
||||
.limit(1);
|
||||
|
||||
if (sharedCred.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const cred = sharedCred[0].shared_credentials;
|
||||
|
||||
if (cred.needsReEncryption) {
|
||||
databaseLogger.warn(
|
||||
"Shared credential needs re-encryption but cannot be accessed yet",
|
||||
{
|
||||
operation: "get_shared_credential_pending",
|
||||
hostId,
|
||||
userId,
|
||||
},
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.decryptSharedCredential(cred, userDEK);
|
||||
} catch (error) {
|
||||
databaseLogger.error("Failed to get shared credential", error, {
|
||||
operation: "get_shared_credential",
|
||||
hostId,
|
||||
userId,
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update all shared credentials when original credential is updated
|
||||
* Called when credential owner updates credential
|
||||
*/
|
||||
async updateSharedCredentialsForOriginal(
|
||||
credentialId: number,
|
||||
ownerId: string,
|
||||
): Promise<void> {
|
||||
try {
|
||||
const sharedCreds = await db
|
||||
.select()
|
||||
.from(sharedCredentials)
|
||||
.where(eq(sharedCredentials.originalCredentialId, credentialId));
|
||||
|
||||
const ownerDEK = DataCrypto.getUserDataKey(ownerId);
|
||||
let credentialData: CredentialData;
|
||||
|
||||
if (ownerDEK) {
|
||||
credentialData = await this.getDecryptedCredential(
|
||||
credentialId,
|
||||
ownerId,
|
||||
ownerDEK,
|
||||
);
|
||||
} else {
|
||||
try {
|
||||
credentialData =
|
||||
await this.getDecryptedCredentialViaSystemKey(credentialId);
|
||||
} catch (error) {
|
||||
databaseLogger.warn(
|
||||
"Cannot update shared credentials: owner offline and credential not migrated",
|
||||
{
|
||||
operation: "update_shared_credentials_failed",
|
||||
credentialId,
|
||||
ownerId,
|
||||
error: error instanceof Error ? error.message : "Unknown error",
|
||||
},
|
||||
);
|
||||
await db
|
||||
.update(sharedCredentials)
|
||||
.set({ needsReEncryption: true })
|
||||
.where(eq(sharedCredentials.originalCredentialId, credentialId));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
for (const sharedCred of sharedCreds) {
|
||||
const targetDEK = DataCrypto.getUserDataKey(sharedCred.targetUserId);
|
||||
|
||||
if (!targetDEK) {
|
||||
await db
|
||||
.update(sharedCredentials)
|
||||
.set({ needsReEncryption: true })
|
||||
.where(eq(sharedCredentials.id, sharedCred.id));
|
||||
continue;
|
||||
}
|
||||
|
||||
const encryptedForTarget = this.encryptCredentialForUser(
|
||||
credentialData,
|
||||
sharedCred.targetUserId,
|
||||
targetDEK,
|
||||
sharedCred.hostAccessId,
|
||||
);
|
||||
|
||||
await db
|
||||
.update(sharedCredentials)
|
||||
.set({
|
||||
...encryptedForTarget,
|
||||
needsReEncryption: false,
|
||||
updatedAt: new Date().toISOString(),
|
||||
})
|
||||
.where(eq(sharedCredentials.id, sharedCred.id));
|
||||
}
|
||||
} catch (error) {
|
||||
databaseLogger.error("Failed to update shared credentials", error, {
|
||||
operation: "update_shared_credentials",
|
||||
credentialId,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete shared credentials when original credential is deleted
|
||||
* Called from credential deletion route
|
||||
*/
|
||||
async deleteSharedCredentialsForOriginal(
|
||||
credentialId: number,
|
||||
): Promise<void> {
|
||||
try {
|
||||
const result = await db
|
||||
.delete(sharedCredentials)
|
||||
.where(eq(sharedCredentials.originalCredentialId, credentialId))
|
||||
.returning({ id: sharedCredentials.id });
|
||||
} catch (error) {
|
||||
databaseLogger.error("Failed to delete shared credentials", error, {
|
||||
operation: "delete_shared_credentials",
|
||||
credentialId,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Re-encrypt pending shared credentials for a user when they log in
|
||||
* Called during user login
|
||||
*/
|
||||
async reEncryptPendingCredentialsForUser(userId: string): Promise<void> {
|
||||
try {
|
||||
const userDEK = DataCrypto.getUserDataKey(userId);
|
||||
if (!userDEK) {
|
||||
return;
|
||||
}
|
||||
|
||||
const pendingCreds = await db
|
||||
.select()
|
||||
.from(sharedCredentials)
|
||||
.where(
|
||||
and(
|
||||
eq(sharedCredentials.targetUserId, userId),
|
||||
eq(sharedCredentials.needsReEncryption, true),
|
||||
),
|
||||
);
|
||||
|
||||
for (const cred of pendingCreds) {
|
||||
await this.reEncryptSharedCredential(cred.id, userId);
|
||||
}
|
||||
} catch (error) {
|
||||
databaseLogger.error("Failed to re-encrypt pending credentials", error, {
|
||||
operation: "reencrypt_pending_credentials",
|
||||
userId,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async getDecryptedCredential(
|
||||
credentialId: number,
|
||||
ownerId: string,
|
||||
ownerDEK: Buffer,
|
||||
): Promise<CredentialData> {
|
||||
const creds = await db
|
||||
.select()
|
||||
.from(sshCredentials)
|
||||
.where(
|
||||
and(
|
||||
eq(sshCredentials.id, credentialId),
|
||||
eq(sshCredentials.userId, ownerId),
|
||||
),
|
||||
)
|
||||
.limit(1);
|
||||
|
||||
if (creds.length === 0) {
|
||||
throw new Error(`Credential ${credentialId} not found`);
|
||||
}
|
||||
|
||||
const cred = creds[0];
|
||||
|
||||
return {
|
||||
username: cred.username,
|
||||
authType: cred.authType,
|
||||
password: cred.password
|
||||
? this.decryptField(cred.password, ownerDEK, credentialId, "password")
|
||||
: undefined,
|
||||
key: cred.key
|
||||
? this.decryptField(cred.key, ownerDEK, credentialId, "key")
|
||||
: undefined,
|
||||
keyPassword: cred.key_password
|
||||
? this.decryptField(
|
||||
cred.key_password,
|
||||
ownerDEK,
|
||||
credentialId,
|
||||
"key_password",
|
||||
)
|
||||
: undefined,
|
||||
keyType: cred.keyType,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypt credential using system key (for offline sharing when owner is offline)
|
||||
*/
|
||||
private async getDecryptedCredentialViaSystemKey(
|
||||
credentialId: number,
|
||||
): Promise<CredentialData> {
|
||||
const creds = await db
|
||||
.select()
|
||||
.from(sshCredentials)
|
||||
.where(eq(sshCredentials.id, credentialId))
|
||||
.limit(1);
|
||||
|
||||
if (creds.length === 0) {
|
||||
throw new Error(`Credential ${credentialId} not found`);
|
||||
}
|
||||
|
||||
const cred = creds[0];
|
||||
|
||||
if (!cred.systemPassword && !cred.systemKey && !cred.systemKeyPassword) {
|
||||
throw new Error(
|
||||
"Credential not yet migrated for offline sharing. " +
|
||||
"Please ask credential owner to log in to enable sharing.",
|
||||
);
|
||||
}
|
||||
|
||||
const { SystemCrypto } = await import("./system-crypto.js");
|
||||
const systemCrypto = SystemCrypto.getInstance();
|
||||
const CSKEK = await systemCrypto.getCredentialSharingKey();
|
||||
|
||||
return {
|
||||
username: cred.username,
|
||||
authType: cred.authType,
|
||||
password: cred.systemPassword
|
||||
? this.decryptField(
|
||||
cred.systemPassword,
|
||||
CSKEK,
|
||||
credentialId,
|
||||
"password",
|
||||
)
|
||||
: undefined,
|
||||
key: cred.systemKey
|
||||
? this.decryptField(cred.systemKey, CSKEK, credentialId, "key")
|
||||
: undefined,
|
||||
keyPassword: cred.systemKeyPassword
|
||||
? this.decryptField(
|
||||
cred.systemKeyPassword,
|
||||
CSKEK,
|
||||
credentialId,
|
||||
"key_password",
|
||||
)
|
||||
: undefined,
|
||||
keyType: cred.keyType,
|
||||
};
|
||||
}
|
||||
|
||||
private encryptCredentialForUser(
|
||||
credentialData: CredentialData,
|
||||
targetUserId: string,
|
||||
targetDEK: Buffer,
|
||||
hostAccessId: number,
|
||||
): {
|
||||
encryptedUsername: string;
|
||||
encryptedAuthType: string;
|
||||
encryptedPassword: string | null;
|
||||
encryptedKey: string | null;
|
||||
encryptedKeyPassword: string | null;
|
||||
encryptedKeyType: string | null;
|
||||
} {
|
||||
const recordId = `shared-${hostAccessId}-${targetUserId}`;
|
||||
|
||||
return {
|
||||
encryptedUsername: FieldCrypto.encryptField(
|
||||
credentialData.username,
|
||||
targetDEK,
|
||||
recordId,
|
||||
"username",
|
||||
),
|
||||
encryptedAuthType: credentialData.authType,
|
||||
encryptedPassword: credentialData.password
|
||||
? FieldCrypto.encryptField(
|
||||
credentialData.password,
|
||||
targetDEK,
|
||||
recordId,
|
||||
"password",
|
||||
)
|
||||
: null,
|
||||
encryptedKey: credentialData.key
|
||||
? FieldCrypto.encryptField(
|
||||
credentialData.key,
|
||||
targetDEK,
|
||||
recordId,
|
||||
"key",
|
||||
)
|
||||
: null,
|
||||
encryptedKeyPassword: credentialData.keyPassword
|
||||
? FieldCrypto.encryptField(
|
||||
credentialData.keyPassword,
|
||||
targetDEK,
|
||||
recordId,
|
||||
"key_password",
|
||||
)
|
||||
: null,
|
||||
encryptedKeyType: credentialData.keyType || null,
|
||||
};
|
||||
}
|
||||
|
||||
private decryptSharedCredential(
|
||||
sharedCred: typeof sharedCredentials.$inferSelect,
|
||||
userDEK: Buffer,
|
||||
): CredentialData {
|
||||
const recordId = `shared-${sharedCred.hostAccessId}-${sharedCred.targetUserId}`;
|
||||
|
||||
return {
|
||||
username: FieldCrypto.decryptField(
|
||||
sharedCred.encryptedUsername,
|
||||
userDEK,
|
||||
recordId,
|
||||
"username",
|
||||
),
|
||||
authType: sharedCred.encryptedAuthType,
|
||||
password: sharedCred.encryptedPassword
|
||||
? FieldCrypto.decryptField(
|
||||
sharedCred.encryptedPassword,
|
||||
userDEK,
|
||||
recordId,
|
||||
"password",
|
||||
)
|
||||
: undefined,
|
||||
key: sharedCred.encryptedKey
|
||||
? FieldCrypto.decryptField(
|
||||
sharedCred.encryptedKey,
|
||||
userDEK,
|
||||
recordId,
|
||||
"key",
|
||||
)
|
||||
: undefined,
|
||||
keyPassword: sharedCred.encryptedKeyPassword
|
||||
? FieldCrypto.decryptField(
|
||||
sharedCred.encryptedKeyPassword,
|
||||
userDEK,
|
||||
recordId,
|
||||
"key_password",
|
||||
)
|
||||
: undefined,
|
||||
keyType: sharedCred.encryptedKeyType || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
private decryptField(
|
||||
encryptedValue: string,
|
||||
dek: Buffer,
|
||||
recordId: number | string,
|
||||
fieldName: string,
|
||||
): string {
|
||||
try {
|
||||
return FieldCrypto.decryptField(
|
||||
encryptedValue,
|
||||
dek,
|
||||
recordId.toString(),
|
||||
fieldName,
|
||||
);
|
||||
} catch (error) {
|
||||
databaseLogger.warn("Field decryption failed, returning as-is", {
|
||||
operation: "decrypt_field",
|
||||
fieldName,
|
||||
recordId,
|
||||
});
|
||||
return encryptedValue;
|
||||
}
|
||||
}
|
||||
|
||||
private async createPendingSharedCredential(
|
||||
hostAccessId: number,
|
||||
originalCredentialId: number,
|
||||
targetUserId: string,
|
||||
): Promise<void> {
|
||||
await db.insert(sharedCredentials).values({
|
||||
hostAccessId,
|
||||
originalCredentialId,
|
||||
targetUserId,
|
||||
encryptedUsername: "",
|
||||
encryptedAuthType: "",
|
||||
needsReEncryption: true,
|
||||
});
|
||||
|
||||
databaseLogger.info("Created pending shared credential", {
|
||||
operation: "create_pending_shared_credential",
|
||||
hostAccessId,
|
||||
targetUserId,
|
||||
});
|
||||
}
|
||||
|
||||
private async reEncryptSharedCredential(
|
||||
sharedCredId: number,
|
||||
userId: string,
|
||||
): Promise<void> {
|
||||
try {
|
||||
const sharedCred = await db
|
||||
.select()
|
||||
.from(sharedCredentials)
|
||||
.where(eq(sharedCredentials.id, sharedCredId))
|
||||
.limit(1);
|
||||
|
||||
if (sharedCred.length === 0) {
|
||||
databaseLogger.warn("Re-encrypt: shared credential not found", {
|
||||
operation: "reencrypt_not_found",
|
||||
sharedCredId,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const cred = sharedCred[0];
|
||||
|
||||
const access = await db
|
||||
.select()
|
||||
.from(hostAccess)
|
||||
.innerJoin(sshData, eq(hostAccess.hostId, sshData.id))
|
||||
.where(eq(hostAccess.id, cred.hostAccessId))
|
||||
.limit(1);
|
||||
|
||||
if (access.length === 0) {
|
||||
databaseLogger.warn("Re-encrypt: host access not found", {
|
||||
operation: "reencrypt_access_not_found",
|
||||
sharedCredId,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const ownerId = access[0].ssh_data.userId;
|
||||
|
||||
const userDEK = DataCrypto.getUserDataKey(userId);
|
||||
if (!userDEK) {
|
||||
databaseLogger.warn("Re-encrypt: user DEK not available", {
|
||||
operation: "reencrypt_user_offline",
|
||||
sharedCredId,
|
||||
userId,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const ownerDEK = DataCrypto.getUserDataKey(ownerId);
|
||||
let credentialData: CredentialData;
|
||||
|
||||
if (ownerDEK) {
|
||||
credentialData = await this.getDecryptedCredential(
|
||||
cred.originalCredentialId,
|
||||
ownerId,
|
||||
ownerDEK,
|
||||
);
|
||||
} else {
|
||||
try {
|
||||
credentialData = await this.getDecryptedCredentialViaSystemKey(
|
||||
cred.originalCredentialId,
|
||||
);
|
||||
} catch (error) {
|
||||
databaseLogger.warn(
|
||||
"Re-encrypt: system key decryption failed, credential may not be migrated yet",
|
||||
{
|
||||
operation: "reencrypt_system_key_failed",
|
||||
sharedCredId,
|
||||
error: error instanceof Error ? error.message : "Unknown error",
|
||||
},
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const encryptedForTarget = this.encryptCredentialForUser(
|
||||
credentialData,
|
||||
userId,
|
||||
userDEK,
|
||||
cred.hostAccessId,
|
||||
);
|
||||
|
||||
await db
|
||||
.update(sharedCredentials)
|
||||
.set({
|
||||
...encryptedForTarget,
|
||||
needsReEncryption: false,
|
||||
updatedAt: new Date().toISOString(),
|
||||
})
|
||||
.where(eq(sharedCredentials.id, sharedCredId));
|
||||
} catch (error) {
|
||||
databaseLogger.error("Failed to re-encrypt shared credential", error, {
|
||||
operation: "reencrypt_shared_credential",
|
||||
sharedCredId,
|
||||
userId,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export { SharedCredentialManager };
|
||||
@@ -2,7 +2,12 @@ import { getDb, DatabaseSaveTrigger } from "../database/db/index.js";
|
||||
import { DataCrypto } from "./data-crypto.js";
|
||||
import type { SQLiteTable } from "drizzle-orm/sqlite-core";
|
||||
|
||||
type TableName = "users" | "ssh_data" | "ssh_credentials" | "recent_activity";
|
||||
type TableName =
|
||||
| "users"
|
||||
| "ssh_data"
|
||||
| "ssh_credentials"
|
||||
| "recent_activity"
|
||||
| "socks5_proxy_presets";
|
||||
|
||||
class SimpleDBOps {
|
||||
static async insert<T extends Record<string, unknown>>(
|
||||
@@ -23,6 +28,20 @@ class SimpleDBOps {
|
||||
userDataKey,
|
||||
);
|
||||
|
||||
if (tableName === "ssh_credentials") {
|
||||
const { SystemCrypto } = await import("./system-crypto.js");
|
||||
const systemCrypto = SystemCrypto.getInstance();
|
||||
const systemKey = await systemCrypto.getCredentialSharingKey();
|
||||
|
||||
const systemEncrypted = await DataCrypto.encryptRecordWithSystemKey(
|
||||
tableName,
|
||||
dataWithTempId,
|
||||
systemKey,
|
||||
);
|
||||
|
||||
Object.assign(encryptedData, systemEncrypted);
|
||||
}
|
||||
|
||||
if (!data.id) {
|
||||
delete encryptedData.id;
|
||||
}
|
||||
@@ -105,6 +124,20 @@ class SimpleDBOps {
|
||||
userDataKey,
|
||||
);
|
||||
|
||||
if (tableName === "ssh_credentials") {
|
||||
const { SystemCrypto } = await import("./system-crypto.js");
|
||||
const systemCrypto = SystemCrypto.getInstance();
|
||||
const systemKey = await systemCrypto.getCredentialSharingKey();
|
||||
|
||||
const systemEncrypted = await DataCrypto.encryptRecordWithSystemKey(
|
||||
tableName,
|
||||
data,
|
||||
systemKey,
|
||||
);
|
||||
|
||||
Object.assign(encryptedData, systemEncrypted);
|
||||
}
|
||||
|
||||
const result = await getDb()
|
||||
.update(table)
|
||||
.set(encryptedData)
|
||||
|
||||
131
src/backend/utils/socks5-helper.ts
Normal file
131
src/backend/utils/socks5-helper.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import { SocksClient } from "socks";
|
||||
import type { SocksClientOptions } from "socks";
|
||||
import net from "net";
|
||||
import { sshLogger } from "./logger.js";
|
||||
import type { ProxyNode } from "../../types/index.js";
|
||||
|
||||
export interface SOCKS5Config {
|
||||
useSocks5?: boolean;
|
||||
socks5Host?: string;
|
||||
socks5Port?: number;
|
||||
socks5Username?: string;
|
||||
socks5Password?: string;
|
||||
socks5ProxyChain?: ProxyNode[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a SOCKS5 connection through a single proxy or a chain of proxies
|
||||
* @param targetHost - Target SSH server hostname/IP
|
||||
* @param targetPort - Target SSH server port
|
||||
* @param socks5Config - SOCKS5 proxy configuration
|
||||
* @returns Promise with connected socket or null if SOCKS5 is not enabled
|
||||
*/
|
||||
export async function createSocks5Connection(
|
||||
targetHost: string,
|
||||
targetPort: number,
|
||||
socks5Config: SOCKS5Config,
|
||||
): Promise<net.Socket | null> {
|
||||
if (!socks5Config.useSocks5) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (
|
||||
socks5Config.socks5ProxyChain &&
|
||||
socks5Config.socks5ProxyChain.length > 0
|
||||
) {
|
||||
return createProxyChainConnection(
|
||||
targetHost,
|
||||
targetPort,
|
||||
socks5Config.socks5ProxyChain,
|
||||
);
|
||||
}
|
||||
|
||||
if (socks5Config.socks5Host) {
|
||||
return createSingleProxyConnection(targetHost, targetPort, socks5Config);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a connection through a single SOCKS proxy
|
||||
*/
|
||||
async function createSingleProxyConnection(
|
||||
targetHost: string,
|
||||
targetPort: number,
|
||||
socks5Config: SOCKS5Config,
|
||||
): Promise<net.Socket> {
|
||||
const socksOptions: SocksClientOptions = {
|
||||
proxy: {
|
||||
host: socks5Config.socks5Host!,
|
||||
port: socks5Config.socks5Port || 1080,
|
||||
type: 5,
|
||||
userId: socks5Config.socks5Username,
|
||||
password: socks5Config.socks5Password,
|
||||
},
|
||||
command: "connect",
|
||||
destination: {
|
||||
host: targetHost,
|
||||
port: targetPort,
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
const info = await SocksClient.createConnection(socksOptions);
|
||||
|
||||
return info.socket;
|
||||
} catch (error) {
|
||||
sshLogger.error("SOCKS5 connection failed", error, {
|
||||
operation: "socks5_connect_failed",
|
||||
proxyHost: socks5Config.socks5Host,
|
||||
proxyPort: socks5Config.socks5Port || 1080,
|
||||
targetHost,
|
||||
targetPort,
|
||||
errorMessage: error instanceof Error ? error.message : "Unknown error",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a connection through a chain of SOCKS proxies
|
||||
* Each proxy in the chain connects through the previous one
|
||||
*/
|
||||
async function createProxyChainConnection(
|
||||
targetHost: string,
|
||||
targetPort: number,
|
||||
proxyChain: ProxyNode[],
|
||||
): Promise<net.Socket> {
|
||||
if (proxyChain.length === 0) {
|
||||
throw new Error("Proxy chain is empty");
|
||||
}
|
||||
|
||||
const chainPath = proxyChain.map((p) => `${p.host}:${p.port}`).join(" → ");
|
||||
try {
|
||||
const info = await SocksClient.createConnectionChain({
|
||||
proxies: proxyChain.map((p) => ({
|
||||
host: p.host,
|
||||
port: p.port,
|
||||
type: p.type,
|
||||
userId: p.username,
|
||||
password: p.password,
|
||||
timeout: 10000,
|
||||
})),
|
||||
command: "connect",
|
||||
destination: {
|
||||
host: targetHost,
|
||||
port: targetPort,
|
||||
},
|
||||
});
|
||||
return info.socket;
|
||||
} catch (error) {
|
||||
sshLogger.error("SOCKS proxy chain connection failed", error, {
|
||||
operation: "socks5_chain_connect_failed",
|
||||
chainLength: proxyChain.length,
|
||||
targetHost,
|
||||
targetPort,
|
||||
errorMessage: error instanceof Error ? error.message : "Unknown error",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ class SystemCrypto {
|
||||
private jwtSecret: string | null = null;
|
||||
private databaseKey: Buffer | null = null;
|
||||
private internalAuthToken: string | null = null;
|
||||
private credentialSharingKey: Buffer | null = null;
|
||||
|
||||
private constructor() {}
|
||||
|
||||
@@ -158,6 +159,48 @@ class SystemCrypto {
|
||||
return this.internalAuthToken!;
|
||||
}
|
||||
|
||||
async initializeCredentialSharingKey(): Promise<void> {
|
||||
try {
|
||||
const dataDir = process.env.DATA_DIR || "./db/data";
|
||||
const envPath = path.join(dataDir, ".env");
|
||||
|
||||
const envKey = process.env.CREDENTIAL_SHARING_KEY;
|
||||
if (envKey && envKey.length >= 64) {
|
||||
this.credentialSharingKey = Buffer.from(envKey, "hex");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const envContent = await fs.readFile(envPath, "utf8");
|
||||
const csKeyMatch = envContent.match(/^CREDENTIAL_SHARING_KEY=(.+)$/m);
|
||||
if (csKeyMatch && csKeyMatch[1] && csKeyMatch[1].length >= 64) {
|
||||
this.credentialSharingKey = Buffer.from(csKeyMatch[1], "hex");
|
||||
process.env.CREDENTIAL_SHARING_KEY = csKeyMatch[1];
|
||||
return;
|
||||
}
|
||||
} catch (fileError) {}
|
||||
|
||||
await this.generateAndGuideCredentialSharingKey();
|
||||
} catch (error) {
|
||||
databaseLogger.error(
|
||||
"Failed to initialize credential sharing key",
|
||||
error,
|
||||
{
|
||||
operation: "cred_sharing_key_init_failed",
|
||||
dataDir: process.env.DATA_DIR || "./db/data",
|
||||
},
|
||||
);
|
||||
throw new Error("Credential sharing key initialization failed");
|
||||
}
|
||||
}
|
||||
|
||||
async getCredentialSharingKey(): Promise<Buffer> {
|
||||
if (!this.credentialSharingKey) {
|
||||
await this.initializeCredentialSharingKey();
|
||||
}
|
||||
return this.credentialSharingKey!;
|
||||
}
|
||||
|
||||
private async generateAndGuideUser(): Promise<void> {
|
||||
const newSecret = crypto.randomBytes(32).toString("hex");
|
||||
const instanceId = crypto.randomBytes(8).toString("hex");
|
||||
@@ -210,6 +253,26 @@ class SystemCrypto {
|
||||
);
|
||||
}
|
||||
|
||||
private async generateAndGuideCredentialSharingKey(): Promise<void> {
|
||||
const newKey = crypto.randomBytes(32);
|
||||
const newKeyHex = newKey.toString("hex");
|
||||
const instanceId = crypto.randomBytes(8).toString("hex");
|
||||
|
||||
this.credentialSharingKey = newKey;
|
||||
|
||||
await this.updateEnvFile("CREDENTIAL_SHARING_KEY", newKeyHex);
|
||||
|
||||
databaseLogger.success(
|
||||
"Credential sharing key auto-generated and saved to .env",
|
||||
{
|
||||
operation: "cred_sharing_key_auto_generated",
|
||||
instanceId,
|
||||
envVarName: "CREDENTIAL_SHARING_KEY",
|
||||
note: "Used for offline credential sharing - no restart required",
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
async validateJWTSecret(): Promise<boolean> {
|
||||
try {
|
||||
const secret = await this.getJWTSecret();
|
||||
|
||||
@@ -177,30 +177,57 @@ class UserDataImport {
|
||||
continue;
|
||||
}
|
||||
|
||||
const tempId = `import-ssh-${targetUserId}-${Date.now()}-${imported}`;
|
||||
const newHostData = {
|
||||
const existing = await getDb()
|
||||
.select()
|
||||
.from(sshData)
|
||||
.where(
|
||||
and(
|
||||
eq(sshData.userId, targetUserId),
|
||||
eq(sshData.ip, host.ip as string),
|
||||
eq(sshData.port, host.port as number),
|
||||
eq(sshData.username, host.username as string),
|
||||
),
|
||||
);
|
||||
|
||||
if (existing.length > 0 && !options.replaceExisting) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const newHostData: any = {
|
||||
...host,
|
||||
id: tempId,
|
||||
userId: targetUserId,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
let processedHostData = newHostData;
|
||||
if (existing.length === 0) {
|
||||
newHostData.createdAt = new Date().toISOString();
|
||||
}
|
||||
|
||||
let processedHostData: any = newHostData;
|
||||
if (options.userDataKey) {
|
||||
processedHostData = DataCrypto.encryptRecord(
|
||||
"ssh_data",
|
||||
newHostData,
|
||||
targetUserId,
|
||||
options.userDataKey,
|
||||
);
|
||||
) as Record<string, unknown>;
|
||||
}
|
||||
|
||||
delete processedHostData.id;
|
||||
|
||||
await getDb()
|
||||
.insert(sshData)
|
||||
.values(processedHostData as unknown as typeof sshData.$inferInsert);
|
||||
if (existing.length > 0 && options.replaceExisting) {
|
||||
await getDb()
|
||||
.update(sshData)
|
||||
.set(processedHostData as unknown as typeof sshData.$inferInsert)
|
||||
.where(eq(sshData.id, existing[0].id));
|
||||
} else {
|
||||
await getDb()
|
||||
.insert(sshData)
|
||||
.values(
|
||||
processedHostData as unknown as typeof sshData.$inferInsert,
|
||||
);
|
||||
}
|
||||
imported++;
|
||||
} catch (error) {
|
||||
errors.push(
|
||||
@@ -233,34 +260,59 @@ class UserDataImport {
|
||||
continue;
|
||||
}
|
||||
|
||||
const tempCredId = `import-cred-${targetUserId}-${Date.now()}-${imported}`;
|
||||
const newCredentialData = {
|
||||
const existing = await getDb()
|
||||
.select()
|
||||
.from(sshCredentials)
|
||||
.where(
|
||||
and(
|
||||
eq(sshCredentials.userId, targetUserId),
|
||||
eq(sshCredentials.name, credential.name as string),
|
||||
),
|
||||
);
|
||||
|
||||
if (existing.length > 0 && !options.replaceExisting) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const newCredentialData: any = {
|
||||
...credential,
|
||||
id: tempCredId,
|
||||
userId: targetUserId,
|
||||
usageCount: 0,
|
||||
lastUsed: null,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
let processedCredentialData = newCredentialData;
|
||||
if (existing.length === 0) {
|
||||
newCredentialData.usageCount = 0;
|
||||
newCredentialData.lastUsed = null;
|
||||
newCredentialData.createdAt = new Date().toISOString();
|
||||
}
|
||||
|
||||
let processedCredentialData: any = newCredentialData;
|
||||
if (options.userDataKey) {
|
||||
processedCredentialData = DataCrypto.encryptRecord(
|
||||
"ssh_credentials",
|
||||
newCredentialData,
|
||||
targetUserId,
|
||||
options.userDataKey,
|
||||
);
|
||||
) as Record<string, unknown>;
|
||||
}
|
||||
|
||||
delete processedCredentialData.id;
|
||||
|
||||
await getDb()
|
||||
.insert(sshCredentials)
|
||||
.values(
|
||||
processedCredentialData as unknown as typeof sshCredentials.$inferInsert,
|
||||
);
|
||||
if (existing.length > 0 && options.replaceExisting) {
|
||||
await getDb()
|
||||
.update(sshCredentials)
|
||||
.set(
|
||||
processedCredentialData as unknown as typeof sshCredentials.$inferInsert,
|
||||
)
|
||||
.where(eq(sshCredentials.id, existing[0].id));
|
||||
} else {
|
||||
await getDb()
|
||||
.insert(sshCredentials)
|
||||
.values(
|
||||
processedCredentialData as unknown as typeof sshCredentials.$inferInsert,
|
||||
);
|
||||
}
|
||||
imported++;
|
||||
} catch (error) {
|
||||
errors.push(
|
||||
|
||||
155
src/components/ui/alert-dialog.tsx
Normal file
155
src/components/ui/alert-dialog.tsx
Normal file
@@ -0,0 +1,155 @@
|
||||
import * as React from "react";
|
||||
import * as AlertDialogPrimitive from "@radix-ui/react-alert-dialog";
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
import { buttonVariants } from "@/components/ui/button";
|
||||
|
||||
function AlertDialog({
|
||||
...props
|
||||
}: React.ComponentProps<typeof AlertDialogPrimitive.Root>) {
|
||||
return <AlertDialogPrimitive.Root data-slot="alert-dialog" {...props} />;
|
||||
}
|
||||
|
||||
function AlertDialogTrigger({
|
||||
...props
|
||||
}: React.ComponentProps<typeof AlertDialogPrimitive.Trigger>) {
|
||||
return (
|
||||
<AlertDialogPrimitive.Trigger data-slot="alert-dialog-trigger" {...props} />
|
||||
);
|
||||
}
|
||||
|
||||
function AlertDialogPortal({
|
||||
...props
|
||||
}: React.ComponentProps<typeof AlertDialogPrimitive.Portal>) {
|
||||
return (
|
||||
<AlertDialogPrimitive.Portal data-slot="alert-dialog-portal" {...props} />
|
||||
);
|
||||
}
|
||||
|
||||
function AlertDialogOverlay({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof AlertDialogPrimitive.Overlay>) {
|
||||
return (
|
||||
<AlertDialogPrimitive.Overlay
|
||||
data-slot="alert-dialog-overlay"
|
||||
className={cn(
|
||||
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-overlay",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function AlertDialogContent({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof AlertDialogPrimitive.Content>) {
|
||||
return (
|
||||
<AlertDialogPortal>
|
||||
<AlertDialogOverlay />
|
||||
<AlertDialogPrimitive.Content
|
||||
data-slot="alert-dialog-content"
|
||||
className={cn(
|
||||
"bg-background data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 fixed top-[50%] left-[50%] z-50 grid w-full max-w-[calc(100%-2rem)] translate-x-[-50%] translate-y-[-50%] gap-4 rounded-lg border p-6 shadow-lg duration-200 sm:max-w-lg",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
</AlertDialogPortal>
|
||||
);
|
||||
}
|
||||
|
||||
function AlertDialogHeader({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="alert-dialog-header"
|
||||
className={cn("flex flex-col gap-2 text-center sm:text-left", className)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function AlertDialogFooter({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="alert-dialog-footer"
|
||||
className={cn(
|
||||
"flex flex-col-reverse gap-2 sm:flex-row sm:justify-end",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function AlertDialogTitle({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof AlertDialogPrimitive.Title>) {
|
||||
return (
|
||||
<AlertDialogPrimitive.Title
|
||||
data-slot="alert-dialog-title"
|
||||
className={cn("text-lg font-semibold", className)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function AlertDialogDescription({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof AlertDialogPrimitive.Description>) {
|
||||
return (
|
||||
<AlertDialogPrimitive.Description
|
||||
data-slot="alert-dialog-description"
|
||||
className={cn("text-muted-foreground text-sm", className)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function AlertDialogAction({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof AlertDialogPrimitive.Action>) {
|
||||
return (
|
||||
<AlertDialogPrimitive.Action
|
||||
className={cn(buttonVariants(), className)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function AlertDialogCancel({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof AlertDialogPrimitive.Cancel>) {
|
||||
return (
|
||||
<AlertDialogPrimitive.Cancel
|
||||
className={cn(buttonVariants({ variant: "outline" }), className)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export {
|
||||
AlertDialog,
|
||||
AlertDialogPortal,
|
||||
AlertDialogOverlay,
|
||||
AlertDialogTrigger,
|
||||
AlertDialogContent,
|
||||
AlertDialogHeader,
|
||||
AlertDialogFooter,
|
||||
AlertDialogTitle,
|
||||
AlertDialogDescription,
|
||||
AlertDialogAction,
|
||||
AlertDialogCancel,
|
||||
};
|
||||
@@ -15,7 +15,7 @@ const badgeVariants = cva(
|
||||
secondary:
|
||||
"border-transparent bg-secondary text-secondary-foreground [a&]:hover:bg-secondary/90",
|
||||
destructive:
|
||||
"border-transparent bg-destructive text-white [a&]:hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
|
||||
"border-transparent bg-destructive text-foreground [a&]:hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
|
||||
outline:
|
||||
"text-foreground [a&]:hover:bg-accent [a&]:hover:text-accent-foreground",
|
||||
},
|
||||
|
||||
@@ -13,7 +13,7 @@ const buttonVariants = cva(
|
||||
default:
|
||||
"bg-primary text-primary-foreground shadow-xs hover:bg-primary/90",
|
||||
destructive:
|
||||
"bg-destructive text-white shadow-xs hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
|
||||
"bg-destructive text-foreground shadow-xs hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
|
||||
outline:
|
||||
"border bg-background shadow-xs hover:bg-accent hover:text-accent-foreground dark:bg-input/30 dark:border-input dark:hover:bg-input/50",
|
||||
secondary:
|
||||
|
||||
@@ -7,7 +7,7 @@ function Card({ className, ...props }: React.ComponentProps<"div">) {
|
||||
<div
|
||||
data-slot="card"
|
||||
className={cn(
|
||||
"bg-card text-card-foreground flex flex-col gap-6 rounded-xl border py-6 shadow-sm",
|
||||
"bg-elevated text-foreground flex flex-col gap-6 rounded-lg border-2 border-edge py-6 shadow-sm",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
|
||||
@@ -90,7 +90,7 @@ function CommandList({
|
||||
<CommandPrimitive.List
|
||||
data-slot="command-list"
|
||||
className={cn(
|
||||
"max-h-[300px] scroll-py-1 overflow-x-hidden overflow-y-auto",
|
||||
"max-h-[300px] scroll-py-1 overflow-x-hidden overflow-y-auto thin-scrollbar",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
|
||||
@@ -36,7 +36,7 @@ function DialogOverlay({
|
||||
<DialogPrimitive.Overlay
|
||||
data-slot="dialog-overlay"
|
||||
className={cn(
|
||||
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-black/50",
|
||||
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-overlay",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
|
||||
@@ -8,7 +8,7 @@ function Input({ className, type, ...props }: React.ComponentProps<"input">) {
|
||||
type={type}
|
||||
data-slot="input"
|
||||
className={cn(
|
||||
"file:text-foreground placeholder:text-muted-foreground selection:bg-primary selection:text-primary-foreground dark:bg-input/30 border-input flex h-9 w-full min-w-0 rounded-md border bg-transparent px-3 py-1 text-base shadow-xs transition-[color,box-shadow] duration-200 outline-none file:inline-flex file:h-7 file:border-0 file:bg-transparent file:text-sm file:font-medium disabled:pointer-events-none disabled:cursor-not-allowed disabled:opacity-50 md:text-sm",
|
||||
"file:text-foreground placeholder:text-muted-foreground selection:bg-primary selection:text-primary-foreground bg-elevated dark:bg-input/30 border-input flex h-9 w-full min-w-0 rounded-md border px-3 py-1 text-base shadow-xs transition-[color,box-shadow] duration-200 outline-none file:inline-flex file:h-7 file:border-0 file:bg-transparent file:text-sm file:font-medium disabled:pointer-events-none disabled:cursor-not-allowed disabled:opacity-50 md:text-sm",
|
||||
"focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px]",
|
||||
"aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive",
|
||||
className,
|
||||
|
||||
@@ -37,13 +37,13 @@ function ResizableHandle({
|
||||
<ResizablePrimitive.PanelResizeHandle
|
||||
data-slot="resizable-handle"
|
||||
className={cn(
|
||||
"relative flex w-1 items-center justify-center after:absolute after:inset-y-0 after:left-1/2 after:w-1 after:-translate-x-1/2 focus-visible:ring-1 focus-visible:ring-offset-1 focus-visible:outline-hidden data-[panel-group-direction=vertical]:h-1 data-[panel-group-direction=vertical]:w-full data-[panel-group-direction=vertical]:after:left-0 data-[panel-group-direction=vertical]:after:h-1 data-[panel-group-direction=vertical]:after:w-full data-[panel-group-direction=vertical]:after:translate-x-0 data-[panel-group-direction=vertical]:after:-translate-y-1/2 [&[data-panel-group-direction=vertical]>div]:rotate-90 bg-dark-border-hover hover:bg-dark-active active:bg-dark-pressed transition-colors duration-150",
|
||||
"relative flex w-1 items-center justify-center after:absolute after:inset-y-0 after:left-1/2 after:w-1 after:-translate-x-1/2 focus-visible:ring-1 focus-visible:ring-offset-1 focus-visible:outline-hidden data-[panel-group-direction=vertical]:h-1 data-[panel-group-direction=vertical]:w-full data-[panel-group-direction=vertical]:after:left-0 data-[panel-group-direction=vertical]:after:h-1 data-[panel-group-direction=vertical]:after:w-full data-[panel-group-direction=vertical]:after:translate-x-0 data-[panel-group-direction=vertical]:after:-translate-y-1/2 [&[data-panel-group-direction=vertical]>div]:rotate-90 bg-edge-hover hover:bg-interact active:bg-pressed transition-colors duration-150",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
{withHandle && (
|
||||
<div className="bg-dark-border-hover hover:bg-dark-active active:bg-dark-pressed z-10 flex h-4 w-3 items-center justify-center rounded-xs border transition-colors duration-150">
|
||||
<div className="bg-edge-hover hover:bg-interact active:bg-pressed z-10 flex h-4 w-3 items-center justify-center rounded-xs border transition-colors duration-150">
|
||||
<GripVerticalIcon className="size-2.5" />
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -59,7 +59,7 @@ function SelectContent({
|
||||
<SelectPrimitive.Content
|
||||
data-slot="select-content"
|
||||
className={cn(
|
||||
"bg-popover text-popover-foreground data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 relative z-50 max-h-(--radix-select-content-available-height) min-w-[8rem] origin-(--radix-select-content-transform-origin) overflow-x-hidden overflow-y-auto rounded-md border shadow-md",
|
||||
"bg-popover text-popover-foreground data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 relative z-50 max-h-(--radix-select-content-available-height) min-w-[8rem] origin-(--radix-select-content-transform-origin) overflow-x-hidden overflow-y-auto thin-scrollbar rounded-md border shadow-md",
|
||||
position === "popper" &&
|
||||
"data-[side=bottom]:translate-y-1 data-[side=left]:-translate-x-1 data-[side=right]:translate-x-1 data-[side=top]:-translate-y-1",
|
||||
className,
|
||||
|
||||
@@ -34,7 +34,7 @@ function SheetOverlay({
|
||||
<SheetPrimitive.Overlay
|
||||
data-slot="sheet-overlay"
|
||||
className={cn(
|
||||
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-black/50 data-[state=closed]:pointer-events-none",
|
||||
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-overlay data-[state=closed]:pointer-events-none",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
|
||||
@@ -365,7 +365,7 @@ function SidebarContent({ className, ...props }: React.ComponentProps<"div">) {
|
||||
data-slot="sidebar-content"
|
||||
data-sidebar="content"
|
||||
className={cn(
|
||||
"flex min-h-0 flex-1 flex-col gap-2 overflow-auto group-data-[collapsible=icon]:overflow-hidden",
|
||||
"flex min-h-0 flex-1 flex-col gap-2 overflow-auto thin-scrollbar group-data-[collapsible=icon]:overflow-hidden",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
|
||||
@@ -51,7 +51,7 @@ function Slider({
|
||||
<SliderPrimitive.Thumb
|
||||
data-slot="slider-thumb"
|
||||
key={index}
|
||||
className="border-primary ring-ring/50 block size-4 shrink-0 rounded-full border bg-white shadow-sm transition-[color,box-shadow] hover:ring-4 focus-visible:ring-4 focus-visible:outline-hidden disabled:pointer-events-none disabled:opacity-50"
|
||||
className="border-primary ring-ring/50 block size-4 shrink-0 rounded-full border bg-elevated shadow-sm transition-[color,box-shadow] hover:ring-4 focus-visible:ring-4 focus-visible:outline-hidden disabled:pointer-events-none disabled:opacity-50"
|
||||
/>
|
||||
))}
|
||||
</SliderPrimitive.Root>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useTheme } from "next-themes";
|
||||
import { useTheme } from "@/components/theme-provider";
|
||||
import { Toaster as Sonner, type ToasterProps, toast } from "sonner";
|
||||
import { useRef } from "react";
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ function Table({ className, ...props }: React.ComponentProps<"table">) {
|
||||
return (
|
||||
<div
|
||||
data-slot="table-container"
|
||||
className="relative w-full overflow-x-auto"
|
||||
className="relative w-full overflow-x-auto thin-scrollbar"
|
||||
>
|
||||
<table
|
||||
data-slot="table"
|
||||
|
||||
@@ -9,7 +9,7 @@ const Textarea = React.forwardRef<HTMLTextAreaElement, TextareaProps>(
|
||||
return (
|
||||
<textarea
|
||||
className={cn(
|
||||
"flex min-h-[80px] w-full rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50",
|
||||
"flex min-h-[80px] w-full rounded-md border border-input bg-transparent dark:bg-input/30 px-3 py-2 text-sm shadow-xs transition-[color,box-shadow] duration-200 outline-none placeholder:text-muted-foreground focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px] disabled:cursor-not-allowed disabled:opacity-50 disabled:pointer-events-none",
|
||||
className,
|
||||
)}
|
||||
ref={ref}
|
||||
|
||||
@@ -36,7 +36,7 @@ function TooltipTrigger({
|
||||
|
||||
function TooltipContent({
|
||||
className,
|
||||
sideOffset = 0,
|
||||
sideOffset = 4,
|
||||
children,
|
||||
...props
|
||||
}: React.ComponentProps<typeof TooltipPrimitive.Content>) {
|
||||
@@ -46,7 +46,7 @@ function TooltipContent({
|
||||
data-slot="tooltip-content"
|
||||
sideOffset={sideOffset}
|
||||
className={cn(
|
||||
"bg-primary text-primary-foreground animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-50 w-fit origin-(--radix-tooltip-content-transform-origin) rounded-md px-3 py-1.5 text-xs text-balance",
|
||||
"bg-elevated text-foreground border border-edge-medium shadow-lg animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-50 w-fit origin-(--radix-tooltip-content-transform-origin) rounded-md px-3 py-1.5 text-xs text-balance",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
|
||||
@@ -56,6 +56,62 @@ export const TERMINAL_THEMES: Record<string, TerminalTheme> = {
|
||||
},
|
||||
},
|
||||
|
||||
termixDark: {
|
||||
name: "Termix Dark",
|
||||
category: "dark",
|
||||
colors: {
|
||||
background: "#18181b",
|
||||
foreground: "#f7f7f7",
|
||||
cursor: "#f7f7f7",
|
||||
cursorAccent: "#18181b",
|
||||
selectionBackground: "#3a3a3d",
|
||||
black: "#2e3436",
|
||||
red: "#cc0000",
|
||||
green: "#4e9a06",
|
||||
yellow: "#c4a000",
|
||||
blue: "#3465a4",
|
||||
magenta: "#75507b",
|
||||
cyan: "#06989a",
|
||||
white: "#d3d7cf",
|
||||
brightBlack: "#555753",
|
||||
brightRed: "#ef2929",
|
||||
brightGreen: "#8ae234",
|
||||
brightYellow: "#fce94f",
|
||||
brightBlue: "#729fcf",
|
||||
brightMagenta: "#ad7fa8",
|
||||
brightCyan: "#34e2e2",
|
||||
brightWhite: "#eeeeec",
|
||||
},
|
||||
},
|
||||
|
||||
termixLight: {
|
||||
name: "Termix Light",
|
||||
category: "light",
|
||||
colors: {
|
||||
background: "#ffffff",
|
||||
foreground: "#18181b",
|
||||
cursor: "#18181b",
|
||||
cursorAccent: "#ffffff",
|
||||
selectionBackground: "#d1d5db",
|
||||
black: "#18181b",
|
||||
red: "#dc2626",
|
||||
green: "#16a34a",
|
||||
yellow: "#ca8a04",
|
||||
blue: "#2563eb",
|
||||
magenta: "#9333ea",
|
||||
cyan: "#0891b2",
|
||||
white: "#f4f4f5",
|
||||
brightBlack: "#71717a",
|
||||
brightRed: "#ef4444",
|
||||
brightGreen: "#22c55e",
|
||||
brightYellow: "#eab308",
|
||||
brightBlue: "#3b82f6",
|
||||
brightMagenta: "#a855f7",
|
||||
brightCyan: "#06b6d4",
|
||||
brightWhite: "#ffffff",
|
||||
},
|
||||
},
|
||||
|
||||
dracula: {
|
||||
name: "Dracula",
|
||||
category: "dark",
|
||||
@@ -689,7 +745,7 @@ export const DEFAULT_TERMINAL_CONFIG = {
|
||||
fontSize: 14,
|
||||
fontFamily: "Caskaydia Cove Nerd Font Mono",
|
||||
letterSpacing: 0,
|
||||
lineHeight: 1.2,
|
||||
lineHeight: 1.0,
|
||||
theme: "termix",
|
||||
|
||||
scrollback: 10000,
|
||||
@@ -705,6 +761,7 @@ export const DEFAULT_TERMINAL_CONFIG = {
|
||||
startupSnippetId: null as number | null,
|
||||
autoMosh: false,
|
||||
moshCommand: "mosh-server new -s -l LANG=en_US.UTF-8",
|
||||
sudoPasswordAutoFill: false,
|
||||
};
|
||||
|
||||
export type TerminalConfigType = typeof DEFAULT_TERMINAL_CONFIG;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useState } from "react";
|
||||
import { useState, useEffect, useCallback } from "react";
|
||||
import { toast } from "sonner";
|
||||
|
||||
interface ConfirmationOptions {
|
||||
@@ -9,10 +9,47 @@ interface ConfirmationOptions {
|
||||
variant?: "default" | "destructive";
|
||||
}
|
||||
|
||||
interface ToastConfirmOptions {
|
||||
confirmOnEnter?: boolean;
|
||||
duration?: number;
|
||||
}
|
||||
|
||||
export function useConfirmation() {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [options, setOptions] = useState<ConfirmationOptions | null>(null);
|
||||
const [onConfirm, setOnConfirm] = useState<(() => void) | null>(null);
|
||||
const [activeToastId, setActiveToastId] = useState<string | number | null>(null);
|
||||
const [pendingConfirmCallback, setPendingConfirmCallback] = useState<(() => void) | null>(null);
|
||||
const [pendingResolve, setPendingResolve] = useState<((value: boolean) => void) | null>(null);
|
||||
|
||||
const handleEnterKey = useCallback((event: KeyboardEvent) => {
|
||||
if (event.key === "Enter" && activeToastId !== null) {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
|
||||
if (pendingConfirmCallback) {
|
||||
pendingConfirmCallback();
|
||||
}
|
||||
if (pendingResolve) {
|
||||
pendingResolve(true);
|
||||
}
|
||||
|
||||
toast.dismiss(activeToastId);
|
||||
setActiveToastId(null);
|
||||
setPendingConfirmCallback(null);
|
||||
setPendingResolve(null);
|
||||
}
|
||||
}, [activeToastId, pendingConfirmCallback, pendingResolve]);
|
||||
|
||||
useEffect(() => {
|
||||
if (activeToastId !== null) {
|
||||
// Use capture phase to intercept Enter before terminal receives it
|
||||
window.addEventListener("keydown", handleEnterKey, true);
|
||||
return () => {
|
||||
window.removeEventListener("keydown", handleEnterKey, true);
|
||||
};
|
||||
}
|
||||
}, [activeToastId, handleEnterKey]);
|
||||
|
||||
const confirm = (opts: ConfirmationOptions, callback: () => void) => {
|
||||
setOptions(opts);
|
||||
@@ -36,24 +73,69 @@ export function useConfirmation() {
|
||||
};
|
||||
|
||||
const confirmWithToast = (
|
||||
message: string,
|
||||
callback: () => void,
|
||||
variant: "default" | "destructive" = "default",
|
||||
) => {
|
||||
const actionText = variant === "destructive" ? "Delete" : "Confirm";
|
||||
const cancelText = "Cancel";
|
||||
opts: ConfirmationOptions | string,
|
||||
callback?: () => void,
|
||||
variantOrConfirmLabel: "default" | "destructive" | string = "Confirm",
|
||||
cancelLabel: string = "Cancel",
|
||||
toastOptions: ToastConfirmOptions = { confirmOnEnter: false },
|
||||
): Promise<boolean> => {
|
||||
return new Promise((resolve) => {
|
||||
const isVariant =
|
||||
variantOrConfirmLabel === "default" ||
|
||||
variantOrConfirmLabel === "destructive";
|
||||
const confirmLabel = isVariant ? "Confirm" : variantOrConfirmLabel;
|
||||
|
||||
toast(message, {
|
||||
action: {
|
||||
label: actionText,
|
||||
onClick: callback,
|
||||
},
|
||||
cancel: {
|
||||
label: cancelText,
|
||||
onClick: () => {},
|
||||
},
|
||||
duration: 10000,
|
||||
className: variant === "destructive" ? "border-red-500" : "",
|
||||
const { confirmOnEnter = false, duration = 8000 } = toastOptions;
|
||||
|
||||
const handleToastConfirm = () => {
|
||||
if (callback) callback();
|
||||
resolve(true);
|
||||
setActiveToastId(null);
|
||||
setPendingConfirmCallback(null);
|
||||
setPendingResolve(null);
|
||||
};
|
||||
|
||||
const handleToastCancel = () => {
|
||||
resolve(false);
|
||||
setActiveToastId(null);
|
||||
setPendingConfirmCallback(null);
|
||||
setPendingResolve(null);
|
||||
};
|
||||
|
||||
const message = typeof opts === "string" ? opts : opts.description;
|
||||
const actualConfirmLabel = typeof opts === "object" && opts.confirmText ? opts.confirmText : confirmLabel;
|
||||
const actualCancelLabel = typeof opts === "object" && opts.cancelText ? opts.cancelText : cancelLabel;
|
||||
|
||||
const toastId = toast(message, {
|
||||
duration,
|
||||
action: {
|
||||
label: confirmOnEnter ? `${actualConfirmLabel} ↵` : actualConfirmLabel,
|
||||
onClick: handleToastConfirm,
|
||||
},
|
||||
cancel: {
|
||||
label: actualCancelLabel,
|
||||
onClick: handleToastCancel,
|
||||
},
|
||||
onDismiss: () => {
|
||||
setActiveToastId(null);
|
||||
setPendingConfirmCallback(null);
|
||||
setPendingResolve(null);
|
||||
},
|
||||
onAutoClose: () => {
|
||||
resolve(false);
|
||||
setActiveToastId(null);
|
||||
setPendingConfirmCallback(null);
|
||||
setPendingResolve(null);
|
||||
},
|
||||
} as any);
|
||||
|
||||
if (confirmOnEnter) {
|
||||
setActiveToastId(toastId);
|
||||
setPendingConfirmCallback(() => () => {
|
||||
if (callback) callback();
|
||||
});
|
||||
setPendingResolve(() => resolve);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
71
src/hooks/use-service-worker.ts
Normal file
71
src/hooks/use-service-worker.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { useEffect, useState, useCallback } from "react";
|
||||
import { isElectron } from "@/ui/main-axios";
|
||||
|
||||
interface ServiceWorkerState {
|
||||
isSupported: boolean;
|
||||
isRegistered: boolean;
|
||||
updateAvailable: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to manage PWA Service Worker registration.
|
||||
* Only registers in production web environment (not in Electron).
|
||||
*/
|
||||
export function useServiceWorker(): ServiceWorkerState {
|
||||
const [state, setState] = useState<ServiceWorkerState>({
|
||||
isSupported: false,
|
||||
isRegistered: false,
|
||||
updateAvailable: false,
|
||||
});
|
||||
|
||||
const handleUpdateFound = useCallback(
|
||||
(registration: ServiceWorkerRegistration) => {
|
||||
const newWorker = registration.installing;
|
||||
if (!newWorker) return;
|
||||
|
||||
newWorker.addEventListener("statechange", () => {
|
||||
if (
|
||||
newWorker.state === "installed" &&
|
||||
navigator.serviceWorker.controller
|
||||
) {
|
||||
setState((prev) => ({ ...prev, updateAvailable: true }));
|
||||
console.log("[SW] Update available");
|
||||
}
|
||||
});
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const isSupported =
|
||||
"serviceWorker" in navigator && !isElectron() && import.meta.env.PROD;
|
||||
|
||||
setState((prev) => ({ ...prev, isSupported }));
|
||||
|
||||
if (!isSupported) return;
|
||||
|
||||
const registerSW = async () => {
|
||||
try {
|
||||
const registration = await navigator.serviceWorker.register("/sw.js");
|
||||
console.log("[SW] Registered:", registration.scope);
|
||||
|
||||
setState((prev) => ({ ...prev, isRegistered: true }));
|
||||
|
||||
registration.addEventListener("updatefound", () =>
|
||||
handleUpdateFound(registration),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("[SW] Registration failed:", error);
|
||||
}
|
||||
};
|
||||
|
||||
if (document.readyState === "complete") {
|
||||
registerSW();
|
||||
} else {
|
||||
window.addEventListener("load", registerSW);
|
||||
return () => window.removeEventListener("load", registerSW);
|
||||
}
|
||||
}, [handleUpdateFound]);
|
||||
|
||||
return state;
|
||||
}
|
||||
168
src/i18n/i18n.ts
168
src/i18n/i18n.ts
@@ -2,18 +2,79 @@ import i18n from "i18next";
|
||||
import { initReactI18next } from "react-i18next";
|
||||
import LanguageDetector from "i18next-browser-languagedetector";
|
||||
|
||||
import enTranslation from "../locales/en/translation.json";
|
||||
import zhTranslation from "../locales/zh/translation.json";
|
||||
import deTranslation from "../locales/de/translation.json";
|
||||
import ptbrTranslation from "../locales/pt-BR/translation.json";
|
||||
import ruTranslation from "../locales/ru/translation.json";
|
||||
import frTranslation from "../locales/fr/translation.json";
|
||||
import enTranslation from "../locales/en.json";
|
||||
import afTranslation from "../locales/translated/af.json";
|
||||
import arTranslation from "../locales/translated/ar.json";
|
||||
import bnTranslation from "../locales/translated/bn.json";
|
||||
import bgTranslation from "../locales/translated/bg.json";
|
||||
import caTranslation from "../locales/translated/ca.json";
|
||||
import csTranslation from "../locales/translated/cs.json";
|
||||
import daTranslation from "../locales/translated/da.json";
|
||||
import deTranslation from "../locales/translated/de.json";
|
||||
import elTranslation from "../locales/translated/el.json";
|
||||
import esTranslation from "../locales/translated/es.json";
|
||||
import fiTranslation from "../locales/translated/fi.json";
|
||||
import frTranslation from "../locales/translated/fr.json";
|
||||
import heTranslation from "../locales/translated/he.json";
|
||||
import hiTranslation from "../locales/translated/hi.json";
|
||||
import huTranslation from "../locales/translated/hu.json";
|
||||
import idTranslation from "../locales/translated/id.json";
|
||||
import itTranslation from "../locales/translated/it.json";
|
||||
import jaTranslation from "../locales/translated/ja.json";
|
||||
import koTranslation from "../locales/translated/ko.json";
|
||||
import nlTranslation from "../locales/translated/nl.json";
|
||||
import noTranslation from "../locales/translated/no.json";
|
||||
import plTranslation from "../locales/translated/pl.json";
|
||||
import ptTranslation from "../locales/translated/pt.json";
|
||||
import roTranslation from "../locales/translated/ro.json";
|
||||
import ruTranslation from "../locales/translated/ru.json";
|
||||
import srTranslation from "../locales/translated/sr.json";
|
||||
import svTranslation from "../locales/translated/sv.json";
|
||||
import thTranslation from "../locales/translated/th.json";
|
||||
import trTranslation from "../locales/translated/tr.json";
|
||||
import ukTranslation from "../locales/translated/uk.json";
|
||||
import viTranslation from "../locales/translated/vi.json";
|
||||
import zhTranslation from "../locales/translated/zh.json";
|
||||
|
||||
i18n
|
||||
.use(LanguageDetector)
|
||||
.use(initReactI18next)
|
||||
.init({
|
||||
supportedLngs: ["en", "zh", "de", "ptbr", "ru", "fr"],
|
||||
supportedLngs: [
|
||||
"en",
|
||||
"af",
|
||||
"ar",
|
||||
"bn",
|
||||
"bg",
|
||||
"ca",
|
||||
"cs",
|
||||
"da",
|
||||
"de",
|
||||
"el",
|
||||
"es",
|
||||
"fi",
|
||||
"fr",
|
||||
"he",
|
||||
"hi",
|
||||
"hu",
|
||||
"id",
|
||||
"it",
|
||||
"ja",
|
||||
"ko",
|
||||
"nl",
|
||||
"no",
|
||||
"pl",
|
||||
"pt",
|
||||
"ro",
|
||||
"ru",
|
||||
"sr",
|
||||
"sv",
|
||||
"th",
|
||||
"tr",
|
||||
"uk",
|
||||
"vi",
|
||||
"zh",
|
||||
],
|
||||
fallbackLng: "en",
|
||||
debug: false,
|
||||
|
||||
@@ -29,20 +90,101 @@ i18n
|
||||
en: {
|
||||
translation: enTranslation,
|
||||
},
|
||||
zh: {
|
||||
translation: zhTranslation,
|
||||
af: {
|
||||
translation: afTranslation,
|
||||
},
|
||||
ar: {
|
||||
translation: arTranslation,
|
||||
},
|
||||
bn: {
|
||||
translation: bnTranslation,
|
||||
},
|
||||
bg: {
|
||||
translation: bgTranslation,
|
||||
},
|
||||
ca: {
|
||||
translation: caTranslation,
|
||||
},
|
||||
cs: {
|
||||
translation: csTranslation,
|
||||
},
|
||||
da: {
|
||||
translation: daTranslation,
|
||||
},
|
||||
de: {
|
||||
translation: deTranslation,
|
||||
},
|
||||
ptbr: {
|
||||
translation: ptbrTranslation,
|
||||
el: {
|
||||
translation: elTranslation,
|
||||
},
|
||||
es: {
|
||||
translation: esTranslation,
|
||||
},
|
||||
fi: {
|
||||
translation: fiTranslation,
|
||||
},
|
||||
fr: {
|
||||
translation: frTranslation,
|
||||
},
|
||||
he: {
|
||||
translation: heTranslation,
|
||||
},
|
||||
hi: {
|
||||
translation: hiTranslation,
|
||||
},
|
||||
hu: {
|
||||
translation: huTranslation,
|
||||
},
|
||||
id: {
|
||||
translation: idTranslation,
|
||||
},
|
||||
it: {
|
||||
translation: itTranslation,
|
||||
},
|
||||
ja: {
|
||||
translation: jaTranslation,
|
||||
},
|
||||
ko: {
|
||||
translation: koTranslation,
|
||||
},
|
||||
nl: {
|
||||
translation: nlTranslation,
|
||||
},
|
||||
no: {
|
||||
translation: noTranslation,
|
||||
},
|
||||
pl: {
|
||||
translation: plTranslation,
|
||||
},
|
||||
pt: {
|
||||
translation: ptTranslation,
|
||||
},
|
||||
ro: {
|
||||
translation: roTranslation,
|
||||
},
|
||||
ru: {
|
||||
translation: ruTranslation,
|
||||
},
|
||||
fr: {
|
||||
translation: frTranslation,
|
||||
sr: {
|
||||
translation: srTranslation,
|
||||
},
|
||||
sv: {
|
||||
translation: svTranslation,
|
||||
},
|
||||
th: {
|
||||
translation: thTranslation,
|
||||
},
|
||||
tr: {
|
||||
translation: trTranslation,
|
||||
},
|
||||
uk: {
|
||||
translation: ukTranslation,
|
||||
},
|
||||
vi: {
|
||||
translation: viTranslation,
|
||||
},
|
||||
zh: {
|
||||
translation: zhTranslation,
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
264
src/index.css
264
src/index.css
@@ -8,45 +8,76 @@
|
||||
font-weight: 400;
|
||||
|
||||
color-scheme: light dark;
|
||||
color: rgba(255, 255, 255, 0.87);
|
||||
background-color: #09090b;
|
||||
color: var(--foreground);
|
||||
background-color: var(--bg-base);
|
||||
|
||||
font-synthesis: none;
|
||||
text-rendering: optimizeLegibility;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
--radius: 0.625rem;
|
||||
--background: oklch(1 0 0);
|
||||
--foreground: oklch(0.141 0.005 285.823);
|
||||
--card: oklch(1 0 0);
|
||||
--card-foreground: oklch(0.141 0.005 285.823);
|
||||
--popover: oklch(1 0 0);
|
||||
--popover-foreground: oklch(0.141 0.005 285.823);
|
||||
--primary: oklch(0.21 0.006 285.885);
|
||||
--primary-foreground: oklch(0.985 0 0);
|
||||
--secondary: oklch(0.967 0.001 286.375);
|
||||
--secondary-foreground: oklch(0.21 0.006 285.885);
|
||||
--muted: oklch(0.967 0.001 286.375);
|
||||
--muted-foreground: oklch(0.552 0.016 285.938);
|
||||
--accent: oklch(0.967 0.001 286.375);
|
||||
--accent-foreground: oklch(0.21 0.006 285.885);
|
||||
--destructive: oklch(0.577 0.245 27.325);
|
||||
--border: oklch(0.92 0.004 286.32);
|
||||
--input: oklch(0.92 0.004 286.32);
|
||||
--ring: oklch(0.705 0.015 286.067);
|
||||
--chart-1: oklch(0.646 0.222 41.116);
|
||||
--chart-2: oklch(0.6 0.118 184.704);
|
||||
--chart-3: oklch(0.398 0.07 227.392);
|
||||
--chart-4: oklch(0.828 0.189 84.429);
|
||||
--chart-5: oklch(0.769 0.188 70.08);
|
||||
--sidebar: oklch(0.985 0 0);
|
||||
--sidebar-foreground: oklch(0.141 0.005 285.823);
|
||||
--sidebar-primary: oklch(0.21 0.006 285.885);
|
||||
--sidebar-primary-foreground: oklch(0.985 0 0);
|
||||
--sidebar-accent: oklch(0.967 0.001 286.375);
|
||||
--sidebar-accent-foreground: oklch(0.21 0.006 285.885);
|
||||
--sidebar-border: oklch(0.92 0.004 286.32);
|
||||
--sidebar-ring: oklch(0.705 0.015 286.067);
|
||||
--background: #ffffff;
|
||||
--foreground: #18181b;
|
||||
--card: #ffffff;
|
||||
--card-foreground: #18181b;
|
||||
--popover: #ffffff;
|
||||
--popover-foreground: #18181b;
|
||||
--primary: #27272a;
|
||||
--primary-foreground: #fafafa;
|
||||
--secondary: #f4f4f5;
|
||||
--secondary-foreground: #27272a;
|
||||
--muted: #f4f4f5;
|
||||
--muted-foreground: #71717a;
|
||||
--accent: #f4f4f5;
|
||||
--accent-foreground: #27272a;
|
||||
--destructive: #dc2626;
|
||||
--border: #e4e4e7;
|
||||
--input: #e4e4e7;
|
||||
--ring: #a1a1aa;
|
||||
--chart-1: #e76e50;
|
||||
--chart-2: #2a9d8f;
|
||||
--chart-3: #264653;
|
||||
--chart-4: #e9c46a;
|
||||
--chart-5: #f4a261;
|
||||
--sidebar: #f9f9f9;
|
||||
--sidebar-foreground: #18181b;
|
||||
--sidebar-primary: #27272a;
|
||||
--sidebar-primary-foreground: #fafafa;
|
||||
--sidebar-accent: #f4f4f5;
|
||||
--sidebar-accent-foreground: #27272a;
|
||||
--sidebar-border: #e4e4e7;
|
||||
--sidebar-ring: #a1a1aa;
|
||||
|
||||
--bg-base: #fcfcfc;
|
||||
--bg-elevated: #ffffff;
|
||||
--bg-surface: #f3f4f6;
|
||||
--bg-surface-hover: #e5e7eb;
|
||||
--bg-input: #ffffff;
|
||||
--bg-deepest: #e5e7eb;
|
||||
--bg-header: #eeeeef;
|
||||
--bg-button: #f3f4f6;
|
||||
--bg-active: #e5e7eb;
|
||||
--bg-light: #fafafa;
|
||||
--bg-subtle: #f5f5f5;
|
||||
--bg-interact: #d1d5db;
|
||||
--border-base: #e5e7eb;
|
||||
--border-panel: #d1d5db;
|
||||
--border-subtle: #f3f4f6;
|
||||
--border-medium: #d1d5db;
|
||||
--bg-hover: #f3f4f6;
|
||||
--bg-hover-alt: #e5e7eb;
|
||||
--bg-pressed: #d1d5db;
|
||||
--border-hover: #d1d5db;
|
||||
--border-active: #9ca3af;
|
||||
|
||||
--foreground-secondary: #334155;
|
||||
--foreground-subtle: #94a3b8;
|
||||
|
||||
--scrollbar-thumb: #c1c1c3;
|
||||
--scrollbar-thumb-hover: #a1a1a3;
|
||||
--scrollbar-track: #f3f4f6;
|
||||
|
||||
--bg-overlay: rgba(0, 0, 0, 0.5);
|
||||
}
|
||||
|
||||
@theme inline {
|
||||
@@ -107,40 +138,99 @@
|
||||
--color-dark-bg-panel: #1b1b1e;
|
||||
--color-dark-border-panel: #222224;
|
||||
--color-dark-bg-panel-hover: #232327;
|
||||
|
||||
--color-canvas: var(--bg-base);
|
||||
--color-elevated: var(--bg-elevated);
|
||||
--color-surface: var(--bg-surface);
|
||||
--color-surface-hover: var(--bg-surface-hover);
|
||||
--color-field: var(--bg-input);
|
||||
--color-deepest: var(--bg-deepest);
|
||||
--color-header: var(--bg-header);
|
||||
--color-button: var(--bg-button);
|
||||
--color-active: var(--bg-active);
|
||||
--color-light: var(--bg-light);
|
||||
--color-subtle: var(--bg-subtle);
|
||||
--color-interact: var(--bg-interact);
|
||||
--color-hover: var(--bg-hover);
|
||||
--color-hover-alt: var(--bg-hover-alt);
|
||||
--color-pressed: var(--bg-pressed);
|
||||
|
||||
--color-edge: var(--border-base);
|
||||
--color-edge-panel: var(--border-panel);
|
||||
--color-edge-subtle: var(--border-subtle);
|
||||
--color-edge-medium: var(--border-medium);
|
||||
--color-edge-hover: var(--border-hover);
|
||||
--color-edge-active: var(--border-active);
|
||||
|
||||
--color-foreground-secondary: var(--foreground-secondary);
|
||||
--color-foreground-subtle: var(--foreground-subtle);
|
||||
|
||||
--color-overlay: var(--bg-overlay);
|
||||
}
|
||||
|
||||
.dark {
|
||||
--background: oklch(0.141 0.005 285.823);
|
||||
--foreground: oklch(0.985 0 0);
|
||||
--card: oklch(0.21 0.006 285.885);
|
||||
--card-foreground: oklch(0.985 0 0);
|
||||
--popover: oklch(0.21 0.006 285.885);
|
||||
--popover-foreground: oklch(0.985 0 0);
|
||||
--primary: oklch(0.92 0.004 286.32);
|
||||
--primary-foreground: oklch(0.21 0.006 285.885);
|
||||
--secondary: oklch(0.274 0.006 286.033);
|
||||
--secondary-foreground: oklch(0.985 0 0);
|
||||
--muted: oklch(0.274 0.006 286.033);
|
||||
--muted-foreground: oklch(0.705 0.015 286.067);
|
||||
--accent: oklch(0.274 0.006 286.033);
|
||||
--accent-foreground: oklch(0.985 0 0);
|
||||
--destructive: oklch(0.704 0.191 22.216);
|
||||
--border: oklch(1 0 0 / 10%);
|
||||
--input: oklch(1 0 0 / 15%);
|
||||
--ring: oklch(0.552 0.016 285.938);
|
||||
--chart-1: oklch(0.488 0.243 264.376);
|
||||
--chart-2: oklch(0.696 0.17 162.48);
|
||||
--chart-3: oklch(0.769 0.188 70.08);
|
||||
--chart-4: oklch(0.627 0.265 303.9);
|
||||
--chart-5: oklch(0.645 0.246 16.439);
|
||||
--sidebar: oklch(0.21 0.006 285.885);
|
||||
--sidebar-foreground: oklch(0.985 0 0);
|
||||
--sidebar-primary: oklch(0.488 0.243 264.376);
|
||||
--sidebar-primary-foreground: oklch(0.985 0 0);
|
||||
--sidebar-accent: oklch(0.274 0.006 286.033);
|
||||
--sidebar-accent-foreground: oklch(0.985 0 0);
|
||||
--sidebar-border: oklch(1 0 0 / 10%);
|
||||
--sidebar-ring: oklch(0.552 0.016 285.938);
|
||||
--background: #09090b;
|
||||
--foreground: #fafafa;
|
||||
--card: #18181b;
|
||||
--card-foreground: #fafafa;
|
||||
--popover: #27272a;
|
||||
--popover-foreground: #fafafa;
|
||||
--primary: #e4e4e7;
|
||||
--primary-foreground: #27272a;
|
||||
--secondary: #3f3f46;
|
||||
--secondary-foreground: #fafafa;
|
||||
--muted: #27272a;
|
||||
--muted-foreground: #9ca3af;
|
||||
--accent: #3f3f46;
|
||||
--accent-foreground: #fafafa;
|
||||
--destructive: #f87171;
|
||||
--border: #ffffff1a;
|
||||
--input: #ffffff26;
|
||||
--ring: #71717a;
|
||||
--chart-1: #3b82f6;
|
||||
--chart-2: #34d399;
|
||||
--chart-3: #f4a261;
|
||||
--chart-4: #a855f7;
|
||||
--chart-5: #f43f5e;
|
||||
--sidebar: #18181b;
|
||||
--sidebar-foreground: #fafafa;
|
||||
--sidebar-primary: #3b82f6;
|
||||
--sidebar-primary-foreground: #fafafa;
|
||||
--sidebar-accent: #3f3f46;
|
||||
--sidebar-accent-foreground: #fafafa;
|
||||
--sidebar-border: #ffffff1a;
|
||||
--sidebar-ring: #71717a;
|
||||
|
||||
--bg-base: #18181b;
|
||||
--bg-elevated: #0e0e10;
|
||||
--bg-surface: #1b1b1e;
|
||||
--bg-surface-hover: #232327;
|
||||
--bg-input: #222225;
|
||||
--bg-deepest: #09090b;
|
||||
--bg-header: #131316;
|
||||
--bg-button: #23232a;
|
||||
--bg-active: #1d1d1f;
|
||||
--bg-light: #141416;
|
||||
--bg-subtle: #101014;
|
||||
--bg-interact: #2a2a2c;
|
||||
--border-base: #303032;
|
||||
--border-panel: #222224;
|
||||
--border-subtle: #5a5a5d;
|
||||
--border-medium: #373739;
|
||||
--bg-hover: #2d2d30;
|
||||
--bg-hover-alt: #2a2a2d;
|
||||
--bg-pressed: #1a1a1c;
|
||||
--border-hover: #434345;
|
||||
--border-active: #2d2d30;
|
||||
|
||||
--foreground-secondary: #d1d5db;
|
||||
--foreground-subtle: #6b7280;
|
||||
|
||||
--scrollbar-thumb: #434345;
|
||||
--scrollbar-thumb-hover: #5a5a5d;
|
||||
--scrollbar-track: #18181b;
|
||||
|
||||
--bg-overlay: rgba(0, 0, 0, 0.7);
|
||||
}
|
||||
|
||||
@layer base {
|
||||
@@ -160,23 +250,7 @@
|
||||
|
||||
.thin-scrollbar {
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: #303032 transparent;
|
||||
}
|
||||
|
||||
.thin-scrollbar::-webkit-scrollbar {
|
||||
height: 6px;
|
||||
width: 6px;
|
||||
}
|
||||
|
||||
.thin-scrollbar::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
.thin-scrollbar::-webkit-scrollbar-thumb {
|
||||
background-color: #303032;
|
||||
border-radius: 9999px;
|
||||
border: 2px solid transparent;
|
||||
background-clip: content-box;
|
||||
scrollbar-color: var(--scrollbar-thumb) var(--scrollbar-track);
|
||||
}
|
||||
|
||||
.thin-scrollbar::-webkit-scrollbar {
|
||||
@@ -185,19 +259,37 @@
|
||||
}
|
||||
|
||||
.thin-scrollbar::-webkit-scrollbar-track {
|
||||
background: #18181b;
|
||||
background: var(--scrollbar-track);
|
||||
}
|
||||
|
||||
.thin-scrollbar::-webkit-scrollbar-thumb {
|
||||
background: #434345;
|
||||
background: var(--scrollbar-thumb);
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
.thin-scrollbar::-webkit-scrollbar-thumb:hover {
|
||||
background: #5a5a5d;
|
||||
background: var(--scrollbar-thumb-hover);
|
||||
}
|
||||
|
||||
.thin-scrollbar {
|
||||
.skinny-scrollbar {
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: #434345 #18181b;
|
||||
scrollbar-color: var(--scrollbar-thumb) transparent;
|
||||
}
|
||||
|
||||
.skinny-scrollbar::-webkit-scrollbar {
|
||||
width: 4px;
|
||||
height: 4px;
|
||||
}
|
||||
|
||||
.skinny-scrollbar::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
.skinny-scrollbar::-webkit-scrollbar-thumb {
|
||||
background: var(--scrollbar-thumb);
|
||||
border-radius: 2px;
|
||||
}
|
||||
|
||||
.skinny-scrollbar::-webkit-scrollbar-thumb:hover {
|
||||
background: var(--scrollbar-thumb-hover);
|
||||
}
|
||||
|
||||
104
src/lib/db-health-monitor.ts
Normal file
104
src/lib/db-health-monitor.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
type EventListener = (...args: any[]) => void;
|
||||
|
||||
class DatabaseHealthMonitor {
|
||||
private static instance: DatabaseHealthMonitor;
|
||||
private dbHealthy: boolean = true;
|
||||
private lastCheckTime: number = 0;
|
||||
private checkInProgress: boolean = false;
|
||||
private listeners: Map<string, EventListener[]> = new Map();
|
||||
|
||||
private constructor() {}
|
||||
|
||||
static getInstance(): DatabaseHealthMonitor {
|
||||
if (!DatabaseHealthMonitor.instance) {
|
||||
DatabaseHealthMonitor.instance = new DatabaseHealthMonitor();
|
||||
}
|
||||
return DatabaseHealthMonitor.instance;
|
||||
}
|
||||
|
||||
on(event: string, listener: EventListener): void {
|
||||
if (!this.listeners.has(event)) {
|
||||
this.listeners.set(event, []);
|
||||
}
|
||||
this.listeners.get(event)!.push(listener);
|
||||
}
|
||||
|
||||
off(event: string, listener: EventListener): void {
|
||||
const eventListeners = this.listeners.get(event);
|
||||
if (eventListeners) {
|
||||
const index = eventListeners.indexOf(listener);
|
||||
if (index !== -1) {
|
||||
eventListeners.splice(index, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private emit(event: string, ...args: any[]): void {
|
||||
const eventListeners = this.listeners.get(event);
|
||||
if (eventListeners) {
|
||||
eventListeners.forEach((listener) => listener(...args));
|
||||
}
|
||||
}
|
||||
|
||||
reportDatabaseError(error: any, wasAuthenticated: boolean = false) {
|
||||
const errorMessage = error?.response?.data?.error || error?.message || "";
|
||||
const errorCode = error?.response?.data?.code || error?.code;
|
||||
const httpStatus = error?.response?.status;
|
||||
|
||||
const isDatabaseError =
|
||||
errorMessage.toLowerCase().includes("database") ||
|
||||
errorMessage.toLowerCase().includes("sqlite") ||
|
||||
errorMessage.toLowerCase().includes("drizzle") ||
|
||||
errorCode === "DATABASE_ERROR" ||
|
||||
errorCode === "DB_CONNECTION_FAILED";
|
||||
|
||||
const isBackendUnreachable =
|
||||
errorCode === "ERR_NETWORK" ||
|
||||
errorCode === "ECONNREFUSED" ||
|
||||
(errorMessage.toLowerCase().includes("network error") &&
|
||||
error?.response === undefined);
|
||||
|
||||
const isAuthenticationLost =
|
||||
wasAuthenticated &&
|
||||
httpStatus === 401 &&
|
||||
(errorCode === "AUTH_REQUIRED" ||
|
||||
errorCode === "SESSION_EXPIRED" ||
|
||||
errorCode === "SESSION_NOT_FOUND" ||
|
||||
errorMessage === "Missing authentication token" ||
|
||||
errorMessage === "Invalid token" ||
|
||||
errorMessage === "Authentication required");
|
||||
|
||||
if (
|
||||
(isDatabaseError || isBackendUnreachable || isAuthenticationLost) &&
|
||||
this.dbHealthy
|
||||
) {
|
||||
this.dbHealthy = false;
|
||||
this.emit("database-connection-lost", {
|
||||
error: errorMessage || "Backend server unreachable",
|
||||
code: errorCode,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
reportDatabaseSuccess() {
|
||||
if (!this.dbHealthy) {
|
||||
this.dbHealthy = true;
|
||||
this.emit("database-connection-restored", {
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
isDatabaseHealthy(): boolean {
|
||||
return this.dbHealthy;
|
||||
}
|
||||
|
||||
reset() {
|
||||
this.dbHealthy = true;
|
||||
this.lastCheckTime = 0;
|
||||
this.checkInProgress = false;
|
||||
}
|
||||
}
|
||||
|
||||
export const dbHealthMonitor = DatabaseHealthMonitor.getInstance();
|
||||
263
src/lib/terminal-syntax-highlighter.ts
Normal file
263
src/lib/terminal-syntax-highlighter.ts
Normal file
@@ -0,0 +1,263 @@
|
||||
const ANSI_CODES = {
|
||||
reset: "\x1b[0m",
|
||||
colors: {
|
||||
red: "\x1b[31m",
|
||||
green: "\x1b[32m",
|
||||
yellow: "\x1b[33m",
|
||||
blue: "\x1b[34m",
|
||||
magenta: "\x1b[35m",
|
||||
cyan: "\x1b[36m",
|
||||
white: "\x1b[37m",
|
||||
brightBlack: "\x1b[90m",
|
||||
brightRed: "\x1b[91m",
|
||||
brightGreen: "\x1b[92m",
|
||||
brightYellow: "\x1b[93m",
|
||||
brightBlue: "\x1b[94m",
|
||||
brightMagenta: "\x1b[95m",
|
||||
brightCyan: "\x1b[96m",
|
||||
brightWhite: "\x1b[97m",
|
||||
},
|
||||
styles: {
|
||||
bold: "\x1b[1m",
|
||||
dim: "\x1b[2m",
|
||||
italic: "\x1b[3m",
|
||||
underline: "\x1b[4m",
|
||||
},
|
||||
} as const;
|
||||
|
||||
interface HighlightPattern {
|
||||
name: string;
|
||||
regex: RegExp;
|
||||
ansiCode: string;
|
||||
priority: number;
|
||||
quickCheck?: string;
|
||||
}
|
||||
|
||||
interface MatchResult {
|
||||
start: number;
|
||||
end: number;
|
||||
ansiCode: string;
|
||||
priority: number;
|
||||
}
|
||||
|
||||
const MAX_LINE_LENGTH = 5000;
|
||||
const MAX_ANSI_CODES = 10;
|
||||
|
||||
const PATTERNS: HighlightPattern[] = [
|
||||
{
|
||||
name: "ipv4",
|
||||
regex:
|
||||
/(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])(?::\d{1,5})?/g,
|
||||
ansiCode: ANSI_CODES.colors.magenta,
|
||||
priority: 10,
|
||||
},
|
||||
|
||||
{
|
||||
name: "log-error",
|
||||
regex:
|
||||
/\b(ERROR|FATAL|CRITICAL|FAIL(?:ED)?|denied|invalid|DENIED)\b|\[ERROR\]/gi,
|
||||
ansiCode: ANSI_CODES.colors.brightRed,
|
||||
priority: 9,
|
||||
},
|
||||
|
||||
{
|
||||
name: "log-warn",
|
||||
regex: /\b(WARN(?:ING)?|ALERT)\b|\[WARN(?:ING)?\]/gi,
|
||||
ansiCode: ANSI_CODES.colors.yellow,
|
||||
priority: 9,
|
||||
},
|
||||
|
||||
{
|
||||
name: "log-success",
|
||||
regex:
|
||||
/\b(SUCCESS|OK|PASS(?:ED)?|COMPLETE(?:D)?|connected|active|up|Up|UP|FULL)\b/gi,
|
||||
ansiCode: ANSI_CODES.colors.brightGreen,
|
||||
priority: 8,
|
||||
},
|
||||
|
||||
{
|
||||
name: "url",
|
||||
regex: /https?:\/\/[^\s\])}]+/g,
|
||||
ansiCode: `${ANSI_CODES.colors.blue}${ANSI_CODES.styles.underline}`,
|
||||
priority: 8,
|
||||
},
|
||||
|
||||
{
|
||||
name: "path-absolute",
|
||||
regex: /\/[a-zA-Z][a-zA-Z0-9_\-@.]*(?:\/[a-zA-Z0-9_\-@.]+)+/g,
|
||||
ansiCode: ANSI_CODES.colors.cyan,
|
||||
priority: 7,
|
||||
},
|
||||
|
||||
{
|
||||
name: "path-home",
|
||||
regex: /~\/[a-zA-Z0-9_\-@./]+/g,
|
||||
ansiCode: ANSI_CODES.colors.cyan,
|
||||
priority: 7,
|
||||
},
|
||||
|
||||
{
|
||||
name: "log-info",
|
||||
regex: /\bINFO\b|\[INFO\]/gi,
|
||||
ansiCode: ANSI_CODES.colors.blue,
|
||||
priority: 6,
|
||||
},
|
||||
{
|
||||
name: "log-debug",
|
||||
regex: /\b(?:DEBUG|TRACE)\b|\[(?:DEBUG|TRACE)\]/gi,
|
||||
ansiCode: ANSI_CODES.colors.brightBlack,
|
||||
priority: 6,
|
||||
},
|
||||
];
|
||||
|
||||
function hasExistingAnsiCodes(text: string): boolean {
|
||||
const ansiCount = (
|
||||
text.match(
|
||||
/\x1b[\[\]()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nq-uy=><~]/g,
|
||||
) || []
|
||||
).length;
|
||||
return ansiCount > MAX_ANSI_CODES;
|
||||
}
|
||||
|
||||
function hasIncompleteAnsiSequence(text: string): boolean {
|
||||
return /\x1b(?:\[(?:[0-9;]*)?)?$/.test(text);
|
||||
}
|
||||
|
||||
interface TextSegment {
|
||||
isAnsi: boolean;
|
||||
content: string;
|
||||
}
|
||||
|
||||
function parseAnsiSegments(text: string): TextSegment[] {
|
||||
const segments: TextSegment[] = [];
|
||||
const ansiRegex = /\x1b(?:[@-Z\\-_]|\[[0-9;]*[@-~])/g;
|
||||
let lastIndex = 0;
|
||||
let match;
|
||||
|
||||
while ((match = ansiRegex.exec(text)) !== null) {
|
||||
if (match.index > lastIndex) {
|
||||
segments.push({
|
||||
isAnsi: false,
|
||||
content: text.slice(lastIndex, match.index),
|
||||
});
|
||||
}
|
||||
|
||||
segments.push({
|
||||
isAnsi: true,
|
||||
content: match[0],
|
||||
});
|
||||
|
||||
lastIndex = ansiRegex.lastIndex;
|
||||
}
|
||||
|
||||
if (lastIndex < text.length) {
|
||||
segments.push({
|
||||
isAnsi: false,
|
||||
content: text.slice(lastIndex),
|
||||
});
|
||||
}
|
||||
|
||||
return segments;
|
||||
}
|
||||
|
||||
function highlightPlainText(text: string): string {
|
||||
if (text.length > MAX_LINE_LENGTH) {
|
||||
return text;
|
||||
}
|
||||
|
||||
if (!text.trim()) {
|
||||
return text;
|
||||
}
|
||||
|
||||
const matches: MatchResult[] = [];
|
||||
|
||||
for (const pattern of PATTERNS) {
|
||||
pattern.regex.lastIndex = 0;
|
||||
|
||||
let match;
|
||||
while ((match = pattern.regex.exec(text)) !== null) {
|
||||
matches.push({
|
||||
start: match.index,
|
||||
end: match.index + match[0].length,
|
||||
ansiCode: pattern.ansiCode,
|
||||
priority: pattern.priority,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (matches.length === 0) {
|
||||
return text;
|
||||
}
|
||||
|
||||
matches.sort((a, b) => {
|
||||
if (a.priority !== b.priority) {
|
||||
return b.priority - a.priority;
|
||||
}
|
||||
return a.start - b.start;
|
||||
});
|
||||
|
||||
const appliedRanges: Array<{ start: number; end: number }> = [];
|
||||
const finalMatches = matches.filter((match) => {
|
||||
const overlaps = appliedRanges.some(
|
||||
(range) =>
|
||||
(match.start >= range.start && match.start < range.end) ||
|
||||
(match.end > range.start && match.end <= range.end) ||
|
||||
(match.start <= range.start && match.end >= range.end),
|
||||
);
|
||||
|
||||
if (!overlaps) {
|
||||
appliedRanges.push({ start: match.start, end: match.end });
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
let result = text;
|
||||
finalMatches.reverse().forEach((match) => {
|
||||
const before = result.slice(0, match.start);
|
||||
const matched = result.slice(match.start, match.end);
|
||||
const after = result.slice(match.end);
|
||||
|
||||
result = before + match.ansiCode + matched + ANSI_CODES.reset + after;
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function highlightTerminalOutput(text: string): string {
|
||||
if (!text || !text.trim()) {
|
||||
return text;
|
||||
}
|
||||
|
||||
if (hasIncompleteAnsiSequence(text)) {
|
||||
return text;
|
||||
}
|
||||
|
||||
if (hasExistingAnsiCodes(text)) {
|
||||
return text;
|
||||
}
|
||||
|
||||
const segments = parseAnsiSegments(text);
|
||||
|
||||
if (segments.length === 0) {
|
||||
return highlightPlainText(text);
|
||||
}
|
||||
|
||||
const highlightedSegments = segments.map((segment) => {
|
||||
if (segment.isAnsi) {
|
||||
return segment.content;
|
||||
} else {
|
||||
return highlightPlainText(segment.content);
|
||||
}
|
||||
});
|
||||
|
||||
return highlightedSegments.join("");
|
||||
}
|
||||
|
||||
export function isSyntaxHighlightingEnabled(): boolean {
|
||||
try {
|
||||
return localStorage.getItem("terminalSyntaxHighlighting") === "true";
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
2563
src/locales/en.json
Normal file
2563
src/locales/en.json
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user